Compare commits

..

1630 Commits

Author SHA1 Message Date
5e4524de8b refactor: simplify spread usage in various components (#25886) 2025-09-18 12:15:26 +08:00
a984549f01 refactor: simplify array spread usage in various components 2025-09-18 12:10:17 +08:00
afe61c486b del website test (#25883) 2025-09-18 11:36:14 +08:00
b588985074 del website test 2025-09-18 11:33:28 +08:00
2539803337 del website test (#25882) 2025-09-18 11:26:11 +08:00
a570925130 del website test 2025-09-18 11:24:56 +08:00
aa3c8f0657 test(api): fix broken testcontainer tests (#25869) 2025-09-18 11:21:43 +08:00
f1e2ef3762 fix version missing (#25879) 2025-09-18 11:14:58 +08:00
195c52be9b fix version missing 2025-09-18 11:14:04 +08:00
c0a3fc1412 fix version missing 2025-09-18 11:09:02 +08:00
092ced7c66 fix: Fix dependency version display (#25856) 2025-09-18 11:07:02 +08:00
18027b530a Merge branch 'feat/rag-2' into fix/dependency-version 2025-09-18 11:06:26 +08:00
0d9becd060 fix version missing 2025-09-18 10:51:47 +08:00
5956375cec fix: ensure output_schema properties are checked before accessing them in strategy detail, use config, and tool default components 2025-09-18 10:11:15 +08:00
a678dd1a32 WIP: test(api): fix broken tests for WebsiteService 2025-09-18 03:00:57 +08:00
fda15ef018 test(api): Fix testscontainer tests for WorkflowDraftVariableService 2025-09-18 02:58:07 +08:00
7fb1a903ae test(api): fix testcontainer tests for FileService 2025-09-18 02:57:38 +08:00
8f2b53275c fix(api): Remove postgresql_nulls_not_distinct=False in unique indexes
This option would generate upgrading / table creating sql with `NULLS
DISTINCT` part and causing syntax error while running testcontainer
tests.

The `NULLS DISTINCT` syntax is only supported by PG 15+.
2025-09-18 02:55:19 +08:00
87fe8c8a2f fix(api): fix line too long (#25868) 2025-09-18 00:01:04 +08:00
370127b87a fix(api): fix line too long 2025-09-17 23:59:14 +08:00
0d66007ce9 fix(api): simplify parameters in get_signed_file_url_for_plugin function (#25866) 2025-09-17 23:53:42 +08:00
6371cc5028 Apply suggestion from @gemini-code-assist[bot]
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-09-17 23:49:27 +08:00
495562e313 chore(api): fix incorrect assertion message 2025-09-17 23:48:45 +08:00
1a3ac6fc68 Merge branch 'feat/rag-2' into feat/end_user 2025-09-17 23:46:38 +08:00
73e8623f07 fix(api): simplify parameters in get_signed_file_url_for_plugin function 2025-09-17 23:42:32 +08:00
3a427327c4 fix(api): update user retrieval logic in get_user function (#25864) 2025-09-17 23:32:01 +08:00
eed82f7ca7 fix(api): update user retrieval logic in get_user function 2025-09-17 23:23:58 +08:00
e386f350d1 fix mypy (#25862) 2025-09-17 23:21:59 +08:00
42d76dd126 fix mypy 2025-09-17 23:19:57 +08:00
ea38b4bcbe fix mypy 2025-09-17 23:15:03 +08:00
9243fe81de fix mypy (#25859) 2025-09-17 23:08:14 +08:00
41be002ec3 fix(api): fix format, replace .filter with .where (#25858) 2025-09-17 23:06:04 +08:00
8cc6927fed fix mypy 2025-09-17 23:04:03 +08:00
5077f8b299 fix(api): fix format, replace .filter with .where 2025-09-17 22:55:13 +08:00
4fd2f605ba fix(api): fix Optional not defined (#25857) 2025-09-17 22:41:57 +08:00
24fc7d0d6b fix(api): fix Optional not defined 2025-09-17 22:40:24 +08:00
1631f9438d fix style check (#25854) 2025-09-17 22:37:17 +08:00
adf2e33013 chore(api): apply autofix manully (#25853) 2025-09-17 22:37:10 +08:00
55f96a4266 refactor(fetch): convert baseOptions to a function for dynamic request options 2025-09-17 22:37:04 +08:00
6166c26ea6 fix style check 2025-09-17 22:36:18 +08:00
eefcd3ecc4 chore(api): apply autofix manully 2025-09-17 22:34:21 +08:00
efce1b04e0 fix style check 2025-09-17 22:34:11 +08:00
73d4bb596a fix: fix import linting, remove incorrect tests (#25849) 2025-09-17 21:26:35 +08:00
56630c18d4 test(api): remove incorrect tests 2025-09-17 21:21:41 +08:00
6c6f1cf24c fix(api): fix import linting 2025-09-17 21:18:27 +08:00
873a1a952f fix style check (#25840) 2025-09-17 21:00:48 +08:00
f73d6b6a51 fix style check 2025-09-17 20:57:41 +08:00
621a7898d7 fix(api): fix property access 2025-09-17 20:47:46 +08:00
055f7644fb Merge branch 'feat/rag-2' into feat/merge-migrations
# Conflicts:
#	api/services/workflow_service.py
2025-09-17 20:42:33 +08:00
3c9258a02b fix(plugin): fix EndUser id does not match session_id (#25847) 2025-09-17 20:34:29 +08:00
bbf35a6df8 test(api): fix broken tests (#25846) 2025-09-17 20:20:09 +08:00
8352128a27 fix(plugin): fix EndUser id does not match session_id 2025-09-17 20:18:10 +08:00
0ad8cac3a8 Update api/tests/unit_tests/services/test_dataset_service_update_dataset.py
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-17 20:17:40 +08:00
948d6bac97 Update api/tests/unit_tests/services/test_dataset_service_update_dataset.py
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-17 20:17:10 +08:00
396fd728fb test(api): fix broken tests 2025-09-17 20:09:52 +08:00
e6d65fe356 chore: merge feat/queue-based-graph-engine (#25833) 2025-09-17 18:05:25 +08:00
04919195cc Merge remote-tracking branch 'upstream/feat/queue-based-graph-engine' into feat/rag-2 2025-09-17 18:00:48 +08:00
22517ea496 fix indexing_technique setting (#25787) 2025-09-17 17:56:39 +08:00
caf4b16b8e Update api/core/datasource/utils/message_transformer.py
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-09-17 17:51:53 +08:00
69a402ba99 fix style check 2025-09-17 17:34:05 +08:00
a7665da813 fix: Fix web test error (#25836) 2025-09-17 16:51:51 +08:00
ba8bcf3a22 fix: Clean up whitespace in PortalToFollowElem component tests and improve controlled state handling 2025-09-17 16:48:17 +08:00
eaf6a29efa Update the translation for pipeline (#25834) 2025-09-17 16:24:20 +08:00
1c23cbc643 Update the translation for pipeline 2025-09-17 16:20:02 +08:00
96e17f1f2d fix: Fix lint and ts errors (#25829) 2025-09-17 16:09:54 +08:00
9c1b49b7f5 test(api): fix incorrect import in tests 2025-09-17 16:09:36 +08:00
da6b04656d Merge branch 'feat/rag-2' into fix/response-ts-error 2025-09-17 16:07:36 +08:00
253b8dbc0a feat: add i18n type generation scripts and improve code formatting 2025-09-17 16:05:10 +08:00
fbf0e5dd78 merge rag2 migrations (#25831) 2025-09-17 16:00:50 +08:00
f963eb525c fix(api): apply suggestion from gemini 2025-09-17 15:55:38 +08:00
090b079ab4 fix(api): fix errors in migration
- add default value for `is_default_value`.
- use sa.String(20) for `EnumText`
2025-09-17 15:50:08 +08:00
262ffa0eae chore(api): fix circular import caused by type hints 2025-09-17 15:49:35 +08:00
8eb063deb6 merge rag2 migrations 2025-09-17 15:35:46 +08:00
f89d6376dd fix: improve error handling in mail and password authentication 2025-09-17 15:27:10 +08:00
c38a48027a use DifyCoreRepositoryFactory (#25824) 2025-09-17 14:41:07 +08:00
47b1cd83c7 use DifyCoreRepositoryFactory 2025-09-17 14:38:23 +08:00
f0ebd654a0 fix: checklist before publish (#25819) 2025-09-17 13:52:00 +08:00
58a5e5263c Merge branch 'feat/rag-2' into fix/checklist-before-publish 2025-09-17 13:49:32 +08:00
262189c54f fix: checklist before publish 2025-09-17 13:47:11 +08:00
1cf788c43b Merge branch 'main' into feat/queue-based-graph-engine 2025-09-17 12:46:08 +08:00
73a7756350 feat(graph_engine): allow to dumps and loads RSC 2025-09-17 12:45:51 +08:00
5520c80dbf fix: Rename hybridSearchMode to reranking_mode in KnowledgeBase compo… (#25789) 2025-09-17 10:24:18 +08:00
409ad3e2b3 Merge branch 'feat/rag-2' into fix/kb-node-rerank-model 2025-09-17 10:16:07 +08:00
7ef2d92143 feat: improve number input (#25794) 2025-09-16 21:51:16 +08:00
4f3abc3ae5 chore: extract regex logic 2025-09-16 21:39:54 +08:00
496f87e028 chore: clean up 2025-09-16 21:29:55 +08:00
5469f6c846 feat: remove leading zeros in number input 2025-09-16 21:25:34 +08:00
926ae7320a feat: Add useDatasetApiAccessUrl hook and update API links in various components for localization support 2025-09-16 21:08:05 +08:00
e3f3d4f980 fix: Rename hybridSearchMode to reranking_mode in KnowledgeBase components for consistency 2025-09-16 19:53:19 +08:00
bd6f8382b2 fix indexing_technique setting 2025-09-16 19:40:50 +08:00
02d15ebd5a feat(graph_engine): support dumps and loads in GraphExecution 2025-09-16 19:38:10 +08:00
3947945a6f Feat/merge main (#25785) 2025-09-16 19:33:22 +08:00
b04d945b3d Merge branch 'main' into feat/rag-2 2025-09-16 19:29:27 +08:00
b37bef44f6 Fix/merge fix (#25781) 2025-09-16 19:14:33 +08:00
8cbfaa2c03 dev/reformat 2025-09-16 19:13:22 +08:00
5133623d42 dev/reformat 2025-09-16 19:06:42 +08:00
d575072735 fix: Remove customContainer prop from PortalToFollowElem and adjust padding in dropdown component (#25779) 2025-09-16 18:40:06 +08:00
73007362b3 fix: Remove customContainer prop from PortalToFollowElem and adjust padding in dropdown component 2025-09-16 18:21:08 +08:00
6be5772a51 dev/reformat 2025-09-16 17:45:21 +08:00
9ed6679966 dev/reformat 2025-09-16 17:34:22 +08:00
98db7d365c dev/reformat 2025-09-16 17:33:56 +08:00
7b9326a411 dev/reformat 2025-09-16 17:08:39 +08:00
8997e49f3a Merge branch 'feat/rag-2' into fix/api-url 2025-09-16 16:46:37 +08:00
3797d53c78 fix: Update type handling in ToolNodeType to ensure proper formatting for unknown types 2025-09-16 16:42:15 +08:00
1c6e57d3df dev/reformat 2025-09-16 16:41:50 +08:00
271da87c84 dev/reformat 2025-09-16 16:27:19 +08:00
6c7c465b17 fix: Update reopening logic for chunk details in Drawer component 2025-09-16 16:17:23 +08:00
5691ceb0ba fix: Correct isMobile prop logic in LinkedAppsPanel and clean up component styles 2025-09-16 16:11:56 +08:00
1bf0dbc5d6 Feat/add dataset service api enable (#25765) 2025-09-16 16:09:57 +08:00
0ec037b803 dev/reformat 2025-09-16 16:08:04 +08:00
05aec66424 fix re-chunk document 2025-09-16 16:05:01 +08:00
11576932c9 feat: Add original_document_id to pipeline settings and update API key mutation paths 2025-09-16 15:57:38 +08:00
610f0414db fix document retry 2025-09-16 15:29:19 +08:00
976b3b5e83 Merge branch 'main' into feat/queue-based-graph-engine 2025-09-16 15:21:36 +08:00
2f1c47406c Merge branch 'feat/rag-2' into feat/add-dataset-service-api-enable
# Conflicts:
#	api/controllers/console/datasets/datasets.py
#	api/controllers/service_api/wraps.py
#	api/services/dataset_service.py
2025-09-16 15:21:23 +08:00
7d8164ab4c Sync with main (#25763) 2025-09-16 15:10:20 +08:00
ffb4f214ea Merge remote-tracking branch 'upstream/feat/rag-2' into feat/rag-2 2025-09-16 15:06:38 +08:00
57972b7920 refactor: Enhance dependency tracking in List components and plugin tag compatibility (#25757) 2025-09-16 15:03:35 +08:00
dd34002db2 Merge remote-tracking branch 'upstream/main' into feat/rag-2 2025-09-16 14:59:35 +08:00
c463f31f56 fix document retry 2025-09-16 14:52:33 +08:00
c4ddc6420a fix document retry 2025-09-16 14:18:26 +08:00
8346506978 fix document retry 2025-09-16 14:14:09 +08:00
b5684f1992 refactor(graph_engine): remove unused parameters from Engine 2025-09-16 14:11:42 +08:00
4ea8fddf1a feat: Add APP_VERSION to headers for marketplace API requests 2025-09-16 13:46:48 +08:00
a061215e42 refactor: Simplify tag label retrieval in hooks and update related components 2025-09-16 13:45:56 +08:00
d790d2bc89 refactor: Enhance dependency tracking in List components for improved loading state management 2025-09-16 13:37:49 +08:00
bd13cf05eb Merge branch 'main' into feat/queue-based-graph-engine 2025-09-16 12:59:26 +08:00
5f263147f9 fix: make mypy happy 2025-09-16 12:51:11 +08:00
b68afdfa64 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-16 12:32:16 +08:00
1e9fce50a1 refactor: Update knowledge pipeline terminology and chunk detail UI/UX (#25749) 2025-09-16 12:16:36 +08:00
aeaad2e843 Merge branch 'fix/styling-issue' of https://github.com/langgenius/dify into fix/styling-issue 2025-09-16 11:52:26 +08:00
713bd7c326 refactor: Simplify drawer component logic by extracting shouldCloseDrawer function for better readability and maintainability 2025-09-16 11:52:07 +08:00
95abf8b44b Update web/app/components/datasets/documents/detail/completed/common/drawer.tsx
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-16 11:46:43 +08:00
6440be9f35 feat: Update documentation links in OnlineDocuments, OnlineDrive, and WebsiteCrawl components for improved localization support 2025-09-16 11:38:29 +08:00
31edc39686 Merge branch 'feat/rag-2' into fix/styling-issue 2025-09-16 11:22:39 +08:00
7c911f06b9 feat: Update help links in useAvailableNodesMetaData hook for improved localization support 2025-09-16 11:21:48 +08:00
6d7b377195 feat(api): add Service API component with related UI elements and remove Access API tab from dataset list (#25693) 2025-09-16 11:01:45 +08:00
4d61189210 Merge branch 'refactor/dataset-service-api' of https://github.com/langgenius/dify into refactor/dataset-service-api 2025-09-16 10:59:46 +08:00
d9839e5ec6 fix(api): correct variable name from 'apiaBaseUrl' to 'apiBaseUrl' in Service API components 2025-09-16 10:59:14 +08:00
21977d464d Merge branch 'feat/rag-2' into refactor/dataset-service-api 2025-09-16 10:54:04 +08:00
8b139087e4 feat: Enhance drawer components with modal and overlay options for improved user experience 2025-09-16 10:45:00 +08:00
da87fce751 feat(graph_engine): dump and load ready queue 2025-09-16 04:19:46 +08:00
d5342927d0 chore: change _outputs type to dict[str, object] 2025-09-16 01:53:25 +08:00
de51b1e658 chore(api): Use uuidv7 to generate PK for new knowledge pipeline models (#25728) 2025-09-15 21:57:55 +08:00
ab24af9c94 feat: Implement reranking model enable/disable functionality in knowledge base panel 2025-09-15 21:42:35 +08:00
70a362ed3b fix priority task 2025-09-15 18:52:01 +08:00
57a311c937 chore(api): Use uuidv7 to generate PK for new models of knowledge pipeline 2025-09-15 18:25:51 +08:00
d869f1405e fix: variable assigner in node metadata (#25716) 2025-09-15 16:08:13 +08:00
754d790c89 [autofix.ci] apply automated fixes (attempt 2/3) 2025-09-15 07:58:44 +00:00
a099a35e51 [autofix.ci] apply automated fixes 2025-09-15 07:56:51 +00:00
58fe02fb3f fix: variable assigner in node metadata 2025-09-15 15:56:08 +08:00
2dd893e60d Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-15 15:54:42 +08:00
b8ee1d4697 Merge branch 'main' into feat/queue-based-graph-engine 2025-09-15 12:21:18 +08:00
01beb59aa7 feat: Add documentation link to step three of dataset creation process 2025-09-15 11:48:50 +08:00
7eb8259e3d fix priority task 2025-09-15 11:44:13 +08:00
93b1c61f5e refactor: Update knowledge pipeline terminology and permissions in the RAG pipeline header 2025-09-15 11:41:39 +08:00
4620bef25c feat(api): add Service API component with related UI elements 2025-09-15 10:27:19 +08:00
b4ef1de30f feat(graph_engine): add ready_queue state persistence to GraphRuntimeState
- Add ReadyQueueState TypedDict for type-safe queue serialization
- Add ready_queue attribute to GraphRuntimeState for initializing with pre-existing queue state
- Update GraphEngine to load ready_queue from GraphRuntimeState on initialization
- Implement proper type hints using ReadyQueueState for better type safety
- Add comprehensive tests for ready_queue loading functionality

The ready_queue is read-only after initialization and allows resuming workflow
execution with a pre-populated queue of nodes ready to execute.
2025-09-15 03:05:10 +08:00
c08a60021a add dataset service api enable 2025-09-14 22:06:32 +08:00
815e5568c3 add dataset service api enable 2025-09-14 21:53:32 +08:00
80c32a130f add dataset service api enable 2025-09-14 20:43:49 +08:00
c004988ecd Fix wrong permission logic (#25649) 2025-09-14 18:44:12 +08:00
0d79062dba Fix missing import of DataSourceOauthBinding (#25648) 2025-09-14 18:41:01 +08:00
0f15a2baca [autofix.ci] apply automated fixes 2025-09-13 20:20:53 +00:00
4cdc19fd05 feat(graph_engine): add abstract layer and dump / load methods for ready queue. 2025-09-14 04:19:24 +08:00
efa5f35277 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-14 01:48:06 +08:00
831548132e Merge branch 'feat/rag-2' into fix-permission 2025-09-13 23:42:32 +08:00
ec87474d45 Fix worng permission logic 2025-09-13 23:40:59 +08:00
8d0139bb21 Merge branch 'feat/rag-2' into fix-miss-import 2025-09-13 23:34:38 +08:00
bfb25ced7a Fix missing import of DataSourceOauthBinding 2025-09-13 23:32:07 +08:00
766fda395b Merge branch 'main' into feat/queue-based-graph-engine 2025-09-13 19:37:52 +08:00
b0e815c3c7 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-13 01:31:17 +08:00
c51cedff7e security(api): enforce privilege validation for dataset-to-pipeline transformation (#25603)
The transformation from classic dataset to knowledge pipeline represents an irreversible
write operation that permanently alters the dataset structure. To prevent unauthorized
modifications, this change implements strict privilege validation in RagPipelineTransformApi.

Only users with editor privileges or dataset operator roles are authorized to execute
this transformation, ensuring proper access control for this critical operation.
2025-09-12 17:12:06 +08:00
32a1a61d65 security(api): enforce privilege validation for dataset-to-pipeline transformation
The transformation from classic dataset to knowledge pipeline represents an irreversible
write operation that permanently alters the dataset structure. To prevent unauthorized
modifications, this change implements strict privilege validation in `RagPipelineTransformApi`.

Only users with editor privileges or dataset operator roles are authorized to execute
this transformation, ensuring proper access control for this critical operation.
2025-09-12 17:07:26 +08:00
ad870de554 add dataset service api enable 2025-09-12 15:35:13 +08:00
ac41151571 chore(api): remove unused installed_plugins.jsonl 2025-09-12 10:38:43 +08:00
462ba354a4 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-12 00:21:06 +08:00
3c668e4a5c fix: update test assertions for ToolProviderApiEntity validation
- Fixed test_repack_provider_entity_no_dark_icon to use empty string instead of None for icon_dark field
- Updated test_builtin_provider_to_user_provider_no_credentials assertion to match actual implementation behavior where masked_credentials always contains empty strings for schema fields
2025-09-11 16:41:10 +08:00
c2ad68d59a refactor(workflow): streamline node metadata structure and enhance filtering logic (#25528) 2025-09-11 16:11:34 +08:00
274e7f4f09 refactor(workflow): streamline node metadata structure and enhance filtering logic 2025-09-11 16:02:06 +08:00
872cff7bab chore(iteration_node): convert some Any to object
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-11 15:40:12 +08:00
8fb69429f9 feat(graph_engine): support parallel mode in iteration node
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-11 15:37:46 +08:00
85064bd8cf Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-11 15:13:31 +08:00
ba5df3612b fix: tests
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-11 15:13:18 +08:00
a923ab1ab8 fix: type errors
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-11 15:01:16 +08:00
9458ebe320 fix: Add waiting state to document embedding process (#25478) 2025-09-11 11:00:47 +08:00
5c3d12cfc8 refactor(i18n): clean up code structure and improve readability (#25510) 2025-09-11 11:00:14 +08:00
b146f5d3fa refactor(i18n): clean up code structure and improve readability 2025-09-11 10:53:40 +08:00
b4c1766932 fix: type errors
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-10 21:48:05 +08:00
00a1af8506 refactor(graph_engine): use singledispatch in Node
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-10 20:59:34 +08:00
f925cb5191 fix: Fix typo in embedding process variable name 2025-09-10 16:05:44 +08:00
4559d19d46 fix: Add waiting state to document embedding process 2025-09-10 15:59:13 +08:00
bb6b663ef4 fix(sidebar): eliminate nav link icon jumping and text squashing (#25467) 2025-09-10 15:17:22 +08:00
ce4be13b2c chore: update test file 2025-09-10 14:36:06 +08:00
c68d0231de Merge branch 'feat/rag-2' into fix/dataset-sidebar 2025-09-10 14:12:13 +08:00
2b91fba3e9 refactor(sidebar): extract duplicated icon wrapper logic 2025-09-10 14:11:47 +08:00
23def7d0f9 fix workflow graph user_id missed (#25466) 2025-09-10 13:54:34 +08:00
463ea3e916 fix: Handle missing dataset avatars and improve routing (#25459) 2025-09-10 13:53:19 +08:00
c295b1d645 fix user_id missed 2025-09-10 13:50:12 +08:00
f56fccee9d fix: workflow knowledge query raise error (#25465) 2025-09-10 13:47:47 +08:00
df420e37d9 fix(sidebar): eliminate nav link icon jumping and text squashing
- Unified layout structure using consistent pl-3 pr-1 padding
- Icon micro-adjustment with -ml-1 for better centering in collapsed state
- Text animation uses max-width instead of width to prevent squashing effect
- Maintains smooth transitions without layout jumping
2025-09-10 13:37:02 +08:00
bc061016b6 refactor: Extract getDatasetLink function into separate callback 2025-09-10 11:11:27 +08:00
92a91d790f refactor: Rename createRouter to createRoute 2025-09-10 11:06:07 +08:00
88563f10df Update datasets.ts 2025-09-10 11:04:02 +08:00
9ae6978a77 refactor: Simplify dataset navigation and avatar display logic 2025-09-10 11:00:49 +08:00
9fdaa14c8d fix: Handle missing dataset avatars and improve routing 2025-09-10 10:51:48 +08:00
b6b98a2c8e Merge branch 'feat/dispatch-method' into feat/queue-based-graph-engine 2025-09-10 03:12:59 +08:00
7e69403dda refactor(graph_engine): use singledispatchmethod in event_handler
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-10 03:12:33 +08:00
9796cede72 fix: add missing type field to node configurations in integration tests
- Added 'type' field to all node data configurations in test files
- Fixed test_code.py: added 'type: code' to all code node configs
- Fixed test_http.py: added 'type: http-request' to all HTTP node configs
- Fixed test_template_transform.py: added 'type: template-transform' to template node config
- Fixed test_tool.py: added 'type: tool' to all tool node configs
- Added setup_code_executor_mock fixture to test_execute_code_scientific_notation

These changes fix the ValueError: 'Node X missing or invalid type information' errors
that were occurring due to changes in the node factory validation requirements.
2025-09-10 02:54:01 +08:00
836ed1f380 refactor(graph_engine): Move ErrorHandler into a single file package
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-10 02:35:05 +08:00
80f39963f1 chore: add import lint to CI
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-10 02:32:24 +08:00
9cf2b2b231 fix: type errors
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-10 02:22:58 +08:00
2a97a69825 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-10 02:03:45 +08:00
f17c71e08a refactor(graph_engine): Move GraphStateManager to single file package.
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-10 01:55:30 +08:00
d52621fce3 refactor(graph_engine): Merge error strategies into error_handler.py
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-10 01:49:46 +08:00
e060d7c28c refactor(graph_engine): remove Optional
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-10 01:49:15 +08:00
ea5dfe41d5 chore: ignore comment
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-10 01:36:11 +08:00
a23c8fcb1a refactor: move execution limits from engine core to layer
Remove max_execution_time and max_execution_steps from ExecutionContext and GraphEngine since these limits are now handled by ExecutionLimitsLayer. This follows the separation of concerns principle by keeping execution limits as a cross-cutting concern handled by layers rather than embedded in core engine components.

Changes:
- Remove max_execution_time and max_execution_steps from ExecutionContext
- Remove these parameters from GraphEngine.__init__()
- Remove max_execution_time from Dispatcher
- Update workflow_entry.py to no longer pass these parameters
- Update all tests to remove these parameters
2025-09-10 01:32:45 +08:00
e0e82fbfaa refactor: extract _run method into smaller focused methods in IterationNode
- Extract iterator variable retrieval and validation logic
- Separate empty iteration handling
- Create dedicated methods for iteration execution and result handling
- Improve type hints and use modern Python syntax
- Enhance code readability and maintainability
2025-09-10 01:15:36 +08:00
65780e96ca fix(dataset-sidebar): eliminate icon flickering during expand/collapse (#25444) 2025-09-09 23:14:59 +08:00
5e1e387c5c Revert "chore: add .serena/ to gitignore"
This reverts commit 4eca134a2a.
2025-09-09 22:55:19 +08:00
4eca134a2a chore: add .serena/ to gitignore 2025-09-09 22:54:10 +08:00
f4ecc293f9 fix(dataset-sidebar): eliminate icon flickering during expand/collapse
- Unified layout structure for both expand/collapse states
- Single AppIcon component with dynamic size props instead of conditional rendering
- Fixed container structure prevents DOM rebuilding and flickering
- Adjusted spacing for collapsed state more button positioning
2025-09-09 22:53:13 +08:00
1c9f40f92a Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-09 22:16:59 +08:00
6ffa2ebabf feat: improve error handling in graph node creation
- Replace ValueError catch with generic Exception
- Use logger.exception for automatic traceback logging
- Abort on node creation failure instead of continuing
2025-09-09 22:16:42 +08:00
48e32e285e fix: migration
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-09 21:41:03 +08:00
e1ecce8d27 fix: Fix RAG pipeline identifier name dash format (#25438) 2025-09-09 18:43:34 +08:00
2d6330f9b3 fix: Fix RAG pipeline identifier name dash format 2025-09-09 18:40:32 +08:00
c79de5d68e feat: Add chunk input field translation and tooltip (#25434) 2025-09-09 18:20:43 +08:00
01416fe6a8 feat: Add chunk input field translation and tooltip 2025-09-09 18:13:36 +08:00
6247fadb37 fix(api): Fix incorrect import of ToolProviderID (#25432) 2025-09-09 17:45:12 +08:00
6637b9efda fix(api): Fix incorrect import of ToolProviderID 2025-09-09 17:42:42 +08:00
95dc1e2fe8 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-09 17:13:16 +08:00
6fe7cf5ebf Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-09 17:11:46 +08:00
2c13d1072d Fix/value content rerender error (#25421) 2025-09-09 17:10:25 +08:00
fc8adb9d6c fix ruff 2025-09-09 17:07:22 +08:00
045d07885d Merge branch 'main' into fix/value-content-rerender-error 2025-09-09 16:40:08 +08:00
7596eb4c2f refactor: clean up unused code and improve Textarea component 2025-09-09 16:24:20 +08:00
a1e8ac4c96 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-09 15:49:09 +08:00
9240ed680e fix: hide export pipeline button (#25343) 2025-09-09 15:44:45 +08:00
daba3af694 refactor: replace useResetDatasetList with useInvalidDatasetList (#25413) 2025-09-09 15:44:40 +08:00
c8614b108a refactor: replace useResetDatasetList with useInvalidDatasetList across multiple components 2025-09-09 15:35:02 +08:00
06d45f0278 Merge branch 'feat/rag-2' into fix/hide-export-button 2025-09-09 15:10:26 +08:00
b46858d87d Merge branch 'main' into feat/queue-based-graph-engine 2025-09-09 13:33:17 +08:00
5ab6838849 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-08 19:55:43 +08:00
2a84832998 Merge branch 'feat/queue-based-graph-engine' into feat/rag-2 2025-09-08 16:44:00 +08:00
ef974e484b fix: handle None env vars
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-08 16:43:47 +08:00
aeaeb28a0b fix: add variable_pool to pipeline WorkflowEntry
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-08 16:17:43 +08:00
be005d0549 Merge branch 'feat/rag-2' into fix/hide-export-button 2025-09-08 15:29:04 +08:00
b865ef54d5 chore: bump version to 2.0.0-beta.2
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-08 15:19:02 +08:00
23cd615489 Merge branch 'feat/queue-based-graph-engine' into feat/rag-2 2025-09-08 14:30:43 +08:00
299141ae01 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-08 13:56:45 +08:00
1427e08cb4 fix: prevent memory leak by revoking object URLs after file download in various components 2025-09-08 11:43:32 +08:00
b48c266908 style: improve pointer event handling in PublishToast and Workflow components (#25339) 2025-09-08 11:20:24 +08:00
f7d4e17ae1 style: improve pointer event handling in PublishToast and Workflow components 2025-09-08 11:06:35 +08:00
18204ac2b0 fix: fix dark mode styling issues (#25338) 2025-09-08 10:38:36 +08:00
092e249bfb style: enhance loading indicator and adjust Markdown component styling for consistency 2025-09-08 10:26:08 +08:00
ed9b6f7bce style: update DSL modal and uploader components for improved UI consistency 2025-09-08 10:09:11 +08:00
c2e67e93ef Fix mem leak of URL.createObjectURL() (#25303) 2025-09-08 09:47:51 +08:00
e5a983a409 fix(datasource): load default fileExtensions for file node earlier (#24696) 2025-09-08 09:41:11 +08:00
cc1d437dc1 fix: correct indentation in TokenBufferMemory get_history_prompt_messages method 2025-09-07 12:48:50 +08:00
7aef0b54e5 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-07 12:34:54 +08:00
1f10c1f474 fix(datasource): load default fileExtensions for file node earlier 2025-09-06 16:11:52 +00:00
a1abe98666 Fix mem leak of URL.createObjectURL 2025-09-06 22:13:18 +08:00
3c28936796 fix: test
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-06 16:21:28 +08:00
81fdc7c54b fix: type errors
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-06 16:09:59 +08:00
abb53f11ad Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-06 16:05:13 +08:00
d9aa0ec046 fix: resolve mypy type errors in http_request and list_operator nodes
- Fix str | bytes union type handling in http_request executor
- Add type guard for boolean filter value in list_operator node
2025-09-05 21:17:18 +08:00
6c3302a192 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-05 21:13:07 +08:00
7ba1f0a046 chore: improve typing
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-05 20:57:11 +08:00
ca1deb8f01 fix: remove notion page selector modal (#25247) 2025-09-05 20:25:54 +08:00
b3026dfb0d Update web/app/components/datasets/documents/index.tsx
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-05 16:14:38 +08:00
f64b055d2e refactor(Documents): Remove ProgressBar component and simplify document loading logic 2025-09-05 16:13:15 +08:00
bebd7c5718 change migration 2025-09-05 15:30:02 +08:00
917c924a94 change migration 2025-09-05 15:29:48 +08:00
447b016e9e refactor(NotionPageSelector): Remove NotionPageSelectorModal component and associated styles 2025-09-05 14:21:41 +08:00
110b6a0863 fix incorrect indent in TokenBufferMemory (#25215) 2025-09-05 14:01:07 +08:00
e3cbe85db4 Fix _logger is not defined (#25235) 2025-09-05 13:59:37 +08:00
aa74d2c322 feat: rag-2 i18n (#25231) 2025-09-05 13:12:40 +08:00
1a81c79852 Correct value knowledge_index to align with NodeType.KNOWLEDGE_INDEX.value (#25180) 2025-09-05 09:54:34 +08:00
2adf5d0eee docs: remove outdated document 2025-09-05 02:09:53 +08:00
103a9a4e67 fix(graph_engine): add type hint for workers_to_remove 2025-09-05 01:59:11 +08:00
15b3443e9e fix(debug_logging_layer): remove access for variable pool 2025-09-05 01:52:19 +08:00
fae6d4f2dd chore: add default value for FILES_URL
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 21:30:12 +08:00
495324b85b Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-04 20:52:02 +08:00
65df8f6c57 refactor(DocumentList): Optimize dataset configuration handling and improve data source type checks 2025-09-04 20:51:56 +08:00
d4ee915058 Merge branch 'feat/queue-based-graph-engine' into feat/rag-2 2025-09-04 20:51:48 +08:00
81e9d6f63a fix: correct type checking for None values in code node output validation
- Fixed isinstance() checks to properly handle None values by checking None separately
- Fixed typo in STRING type validation where 'output_name' was hardcoded as string instead of variable
- Updated error message format to be consistent and more informative
- Updated test assertion to match new error message format
2025-09-04 20:39:37 +08:00
637afe1ab0 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-04 20:36:38 +08:00
df70e28481 chore(docker): Update Docker images to version 2.0.0-beta.1 for API, worker, web, and plugin daemon services 2025-09-04 20:36:23 +08:00
b9394d542c Merge branch 'feat/queue-based-graph-engine' into feat/rag-2
# Conflicts:
#	api/core/app/apps/advanced_chat/generate_task_pipeline.py
#	api/pyproject.toml
#	api/uv.lock
#	docker/docker-compose-template.yaml
#	docker/docker-compose.yaml
#	web/package.json
2025-09-04 20:30:08 +08:00
1fda319ecb chore(docker): Update Docker images to version 2.0.0-beta1 for API, worker, web, and plugin daemon services 2025-09-04 20:24:17 +08:00
9c2943183e test: fix code node
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 20:17:28 +08:00
814cbee28e chore: Update GitHub Actions workflow to include 'feat/rag-2' path for build triggers 2025-09-04 20:11:16 +08:00
443e1fad32 chore: Bump version to 2.0.0-beta1 2025-09-04 20:06:15 +08:00
f6a2a09815 test: fix code node
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 20:04:29 +08:00
e21c133e1d change migration 2025-09-04 19:38:51 +08:00
e229510e73 perf: eliminate lock contention in worker pool by removing callbacks
Remove worker idle/active callbacks that caused severe lock contention.
Instead, use sampling-based monitoring where worker states are queried
on-demand during scaling decisions. This eliminates the performance
bottleneck caused by workers acquiring locks 10+ times per second.

Changes:
- Remove callback parameters from Worker class
- Add properties to expose worker idle state directly
- Update WorkerPool to query worker states without callbacks
- Maintain scaling functionality with better performance
2025-09-04 19:37:31 +08:00
36048d1526 feat(graph_engine): allow to scale down without lock
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 19:32:07 +08:00
aff7ca12b8 fix(code_node): type checking bypass
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 19:25:08 +08:00
8506288180 fix(firecrawl): map markdown content to content field in crawl results 2025-09-04 19:24:54 +08:00
ad9eed2551 fix: disable scale for perfermance
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 19:11:22 +08:00
ef64729771 refactor(notion-page-preview): update fetchNotionPagePreview to include pageType parameter for improved API request 2025-09-04 19:02:35 +08:00
771ea54a0b change migration 2025-09-04 18:37:58 +08:00
35ec0d25e8 change migration 2025-09-04 18:06:45 +08:00
07109846e0 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-04 17:48:08 +08:00
2aeaefccec test: fix test 2025-09-04 17:47:36 +08:00
4d63bd2083 refactor(graph_engine): rename SimpleWorkerPool to WorkerPool 2025-09-04 17:47:13 +08:00
952fb16b91 change migration 2025-09-04 17:37:21 +08:00
e9f11b4706 change migration 2025-09-04 17:29:09 +08:00
41b5596441 fix file name 2025-09-04 17:11:13 +08:00
55b936003f refactor(context-menu): conditionally render export option based on pipeline ID 2025-09-04 16:47:21 +08:00
a6b0071ca0 feat: add datasource category handling in marketplace list condition 2025-09-04 16:05:29 +08:00
226f14a20f feat(graph_engine): implement scale down worker
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 15:35:20 +08:00
ebeb17ec96 refactor(form-input-item): enhance type switch logic to include select input handling 2025-09-04 15:34:21 +08:00
313069a63e refactor(workflow): improve loading state rendering and enhance control prompt editor re-rendering logic 2025-09-04 15:27:55 +08:00
64fc0c9073 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-04 14:52:34 +08:00
94f2956e3a fix file name 2025-09-04 14:52:23 +08:00
1bf04b7b74 fix: export workflow image 2025-09-04 14:43:14 +08:00
1e5f6c1475 fix file name 2025-09-04 14:07:58 +08:00
9f3c996be2 fix(pipeline-publish): publish not update dataset 2025-09-04 14:07:42 +08:00
0bcdd0db99 fix(field): add z-index to sticky header for improved layering in workflow nodes 2025-09-04 13:18:14 +08:00
5ecf382180 refactor(chunk-preview): improve key assignment for ChunkContainer components and enhance localFileList handling in Preparation component 2025-09-04 13:03:49 +08:00
2b28aed4e2 [autofix.ci] apply automated fixes 2025-09-04 04:50:21 +00:00
938a845852 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-04 12:48:58 +08:00
04d01c8409 refactor(icons): remove unused Globe01 icon and related files 2025-09-04 12:37:01 +08:00
7efe215d2b fix file name 2025-09-04 12:28:52 +08:00
ead8568bfc fix: some errors reported by basedpyright
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 11:58:54 +08:00
25e3f4da04 fix(pipeline-tracing-log): datasource icon error 2025-09-04 11:44:10 +08:00
68f4d4b97c fix(rag-pipeline-dsl): dsl import session error 2025-09-04 11:03:45 +08:00
c40dfe7060 fix(pipeline): correct API endpoint for stopping workflow runs 2025-09-04 10:39:23 +08:00
647af6fc69 fix file name 2025-09-04 10:36:44 +08:00
5114569017 refactor(document-picker): enhance chunking mode handling and improve parent mode label logic 2025-09-04 09:59:55 +08:00
ed22d04ea0 test: remove outdated test case 2025-09-04 02:42:36 +08:00
04bbf540d9 chore: code format
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 02:33:53 +08:00
657c27ec75 feat(graph_engine): make runtime state read-only in layer
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 02:30:40 +08:00
16e9cd5ac5 feat(graph_runtime_state): prevent to set variable pool after initialized. 2025-09-04 02:20:19 +08:00
61c79b0013 test: correct imported name 2025-09-04 02:15:46 +08:00
8332472944 refactor(graph_engine): rename Layer to GraphEngineLayer
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 02:11:31 +08:00
fe3f03e50a feat: add property-based access control to GraphRuntimeState
- Replace direct field access with private attributes and property decorators
- Implement deep copy protection for mutable objects (dict, LLMUsage)
- Add helper methods: set_output(), get_output(), update_outputs()
- Add increment_node_run_steps() and add_tokens() convenience methods
- Update loop_node and event_handlers to use new accessor methods
- Add comprehensive unit tests for immutability and validation
- Ensure backward compatibility with existing property access patterns
2025-09-04 02:08:58 +08:00
9c96b23d55 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-04 00:27:08 +08:00
f811855f79 fix(workflow): ensure variable updates only occur for matching selectors in updateNodeVars 2025-09-03 23:28:32 +08:00
c7510d3f54 refactor(panel): simplify outputSchema mapping for improved readability 2025-09-03 23:06:12 +08:00
e3092837e4 fix file name 2025-09-03 21:26:28 +08:00
46731dc8a1 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-03 20:33:01 +08:00
1214942eb7 refactor(datasets): rename 'markdown' to 'content' for consistency across components 2025-09-03 20:32:48 +08:00
dd89c9aa21 fix preview and recommend plugins 2025-09-03 19:51:07 +08:00
5ab3cd7213 fix preview and recommend plugins 2025-09-03 19:42:20 +08:00
49d268d4a3 fix preview and recommend plugins 2025-09-03 19:20:14 +08:00
aa670c8982 fix(datasource): add datasource icon in tracing panel 2025-09-03 18:23:23 +08:00
c8d60f372d fix: in pipeline not show the node config right 2025-09-03 18:21:51 +08:00
c4f0691454 refactor(workflow): update fetchInspectVars calls to accept empty parameters 2025-09-03 18:15:14 +08:00
451948d49c Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-03 18:10:14 +08:00
fd3b55cc16 fix preview and recommend plugins 2025-09-03 18:09:56 +08:00
0212f0de9f fix(api): fix workflow execution 2025-09-03 17:11:43 +08:00
9eb1c15906 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-03 16:51:57 +08:00
110211d966 refactor(uploader): replace YAML icon with pipeline icon and update label text 2025-09-03 16:51:49 +08:00
08d6fcfd52 fix: large data markdown ui problems 2025-09-03 16:48:41 +08:00
d755b2885e refactor(workflow): enhance handleExportDSL function and clean up code structure 2025-09-03 16:22:59 +08:00
b4c98daa8d refactor(workflow): update RAG tool suggestions and improve filtering logic 2025-09-03 16:05:55 +08:00
58e598dac8 Merge branch 'main' into feat/rag-2 2025-09-03 15:01:41 +08:00
d4aed3df5c Merge branch 'feat/queue-based-graph-engine' into feat/rag-2
# Conflicts:
#	api/core/memory/token_buffer_memory.py
#	api/core/rag/extractor/notion_extractor.py
#	api/core/repositories/sqlalchemy_workflow_node_execution_repository.py
#	api/core/variables/variables.py
#	api/core/workflow/graph/graph.py
#	api/core/workflow/graph_engine/entities/event.py
#	api/services/dataset_service.py
#	web/app/components/app-sidebar/index.tsx
#	web/app/components/base/tag-management/selector.tsx
#	web/app/components/base/toast/index.tsx
#	web/app/components/datasets/create/website/index.tsx
#	web/app/components/datasets/create/website/jina-reader/base/options-wrap.tsx
#	web/app/components/workflow/header/version-history-button.tsx
#	web/app/components/workflow/hooks/use-inspect-vars-crud-common.ts
#	web/app/components/workflow/hooks/use-workflow-interactions.ts
#	web/app/components/workflow/panel/version-history-panel/index.tsx
#	web/service/base.ts
2025-09-03 15:01:06 +08:00
c422d732d2 fix: tool bool input can choose file 2025-09-03 14:41:29 +08:00
df0fe49fcc fix: one step run schema type file not support 2025-09-03 14:28:13 +08:00
97875d2b55 plugin dependencies select all 2025-09-03 14:27:29 +08:00
8c97937cae Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-03 13:53:43 +08:00
9a79d8941e Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-03 12:59:34 +08:00
9d3772f7d6 feat(workflow): add DisplayContent component for improved variable inspection and integrate schema type handling 2025-09-03 12:59:22 +08:00
d2e341367e fix(api): fix relations of WorkflowNodeExecutionModel not preloaded
`WorkflowNodeExecutionModel.offload_data` should be preloaded to
provide info about the offloading information of execution record.

The `RagPipelineService.get_node_last_run` does not utilize
`DifyAPISQLAlchemyWorkflowNodeExecutionRepository` so the loading
logics is not changed.

In the commit we migrate to calling
`DifyAPISQLAlchemyWorkflowNodeExecutionRepository` to avoid such issue.
2025-09-03 12:30:20 +08:00
e761f38d26 fix(api): adjust gevent patching 2025-09-03 12:29:39 +08:00
e2ff7fac77 fix(api): fix variable truncation related field not returned. 2025-09-03 12:29:38 +08:00
617dc247f5 fix preview and recommend plugins 2025-09-03 12:15:03 +08:00
f6acff4cce chore: remove unused variables
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-03 12:12:27 +08:00
3fa48cb5cf chore: remove ty-check from Python style check.
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-03 12:05:41 +08:00
b81745aed8 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-03 11:56:05 +08:00
e1b6da21f4 fix preview and recommend plugins 2025-09-03 11:35:41 +08:00
b10d7d5b22 fix: datasource file can not be chosen 2025-09-03 11:31:53 +08:00
8c41d95d03 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-03 11:06:42 +08:00
885c7d26e5 feat: handle choose select 2025-09-03 11:05:15 +08:00
7896eeec5b fix: correct run history tracing sequence 2025-09-03 10:56:23 +08:00
38e24922e1 Merge branch 'feat/tool-rag-tag' into feat/rag-2 2025-09-03 10:27:02 +08:00
d2787dc925 feat: tools add rag tag 2025-09-03 10:25:08 +08:00
706969d812 fix(workflow): enhance variable inspection by integrating schema type handling and refactoring logic 2025-09-03 10:17:38 +08:00
9d004a0971 test: fix test
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-03 02:11:37 +08:00
02fcd08c08 [autofix.ci] apply automated fixes 2025-09-02 17:34:07 +00:00
77a9a73d0d Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-03 01:33:17 +08:00
7623dc14bb fix preview and recommend plugins 2025-09-02 20:12:45 +08:00
1ad46d0962 fix(pipeline): add handling for RAG pipeline variables in workflow state 2025-09-02 20:11:38 +08:00
5e854238b0 fix(workflow): improve formatting and add checkbox type to input variable type mapping 2025-09-02 19:57:18 +08:00
343f1a375f fix preview and recommend plugins 2025-09-02 19:41:29 +08:00
fc4bc08796 feat(workflow): enhance RAG recommended plugins structure and update related components 2025-09-02 19:11:03 +08:00
60da4c9048 feat: set var inspect schema type 2025-09-02 18:44:49 +08:00
32a009654f feat(input-field): add isEditMode prop to InputFieldForm and update handling of variable changes 2025-09-02 18:19:40 +08:00
1522fd50df Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-02 17:25:54 +08:00
00b5772012 feat(portal): add customContainer prop to PortalToFollowElem for flexible rendering 2025-09-02 17:25:47 +08:00
8467494706 fix preview and recommend plugins 2025-09-02 17:02:25 +08:00
a8cd1e2483 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-02 16:44:33 +08:00
7ace7e65e1 fix preview and recommend plugins 2025-09-02 16:44:18 +08:00
273dae6738 fix(header): update boolean form type to checkbox and improve JSX formatting in form input component 2025-09-02 16:26:41 +08:00
1770b93e5b chore(graph_engine): Add a TODO commment in _update_response_outputs in event_handlers
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-02 15:20:03 +08:00
d8ff4aa9ba feat(graph_engine): Handle NodeRunAgentLogEvent
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-02 15:02:07 +08:00
9f8f21bf87 chore: remove backup files
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-02 15:01:58 +08:00
dfb967773b Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-02 11:56:13 +08:00
56fc9088dd add recommended rag plugin endpoint 2025-09-02 11:56:05 +08:00
0b0dc63f29 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-02 11:52:25 +08:00
bf09c3eb10 feat(rag): integrate RAG recommended plugins into workflow component 2025-09-02 10:43:57 +08:00
e5e52ab4b9 refactor(value-content): improve JSX formatting for readability in variable inspect component 2025-09-02 10:19:01 +08:00
826b9d5b21 add recommended rag plugin endpoint 2025-09-01 20:41:14 +08:00
633de2cb47 add recommended rag plugin endpoint 2025-09-01 20:12:27 +08:00
d94e03c72b add recommended rag plugin endpoint 2025-09-01 19:38:28 +08:00
89ec13ec67 fix(api): fix ToolNode._extract_variable_selector_to_variable_mapping
The original selector syntax does not match our current implementation
for injecting user inputs into VariablePool.
2025-09-01 18:11:33 +08:00
1103130f81 add recommended rag plugin endpoint 2025-09-01 18:03:35 +08:00
493dae239f add recommended rag plugin endpoint 2025-09-01 17:08:37 +08:00
eb7b21c7f1 Merge remote-tracking branch 'upstream/feat/rag-2' into feat/rag-2 2025-09-01 17:06:53 +08:00
ad5a2c77c1 Merge remote-tracking branch 'upstream/feat/big-var-value-show-fe' into feat/rag-2 2025-09-01 17:06:23 +08:00
acbf27eb65 chore(api): adjust migration order and date 2025-09-01 15:20:57 +08:00
e2ae89e08d fix(tests): fix broken tests and linter issues 2025-09-01 14:55:35 +08:00
1676207776 add recommended rag plugin endpoint 2025-09-01 14:50:02 +08:00
8c780df8fd Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-01 14:46:08 +08:00
7920714f49 add recommended rag plugin endpoint 2025-09-01 14:45:56 +08:00
8433cf4437 refactor(graph_engine): Merge event_collector and event_emitter into event_manager
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 13:15:58 +08:00
bb5d52539c refactor(graph_engine): Merge branch_handler into edge_processor
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 12:53:06 +08:00
88622f70fb refactor(graph_engine): Move setup methods into __init__
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 12:08:03 +08:00
a41af6f322 refactor: update file upload and download handling for pipeline format 2025-09-01 11:13:11 +08:00
ce3eb0fcbb fix: rag pipeline template 2025-09-01 10:23:08 +08:00
0fdb1b2bc9 refactor(graph_engine): Correct private attributes and private methods naming
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 04:37:23 +08:00
a5cb9d2b73 refactor(graph_engine): inline output_registry into response_coordinator
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 03:59:53 +08:00
64c1234724 refactor(graph_engine): Merge worker management into one WorkerPool
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 03:23:47 +08:00
202fdfcb81 refactor(graph_engine): Remove backward compatibility code
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 02:41:16 +08:00
e2f4c9ba8d refactor(graph_engine): Merge state managers into unified_state_manager
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-01 02:08:08 +08:00
546d75d84d Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-09-01 00:29:28 +08:00
63c035d8a2 chore(api): migrate import for db 2025-08-31 16:46:25 +08:00
e9e7d4f3cd Merge branch 'feat/rag-2' into feat/workflow-draft-var-optimize 2025-08-31 15:17:23 +08:00
c72d847ac7 feat(api): support array[boolean] truncation 2025-08-31 14:32:53 +08:00
982fd9170c WIP: test(api): tests for truncation logic 2025-08-31 13:45:16 +08:00
91fac9b720 WIP: feat(api): draft var cleanup task 2025-08-31 13:45:16 +08:00
a527bd42b5 chore(api): optimize file url signing 2025-08-31 13:45:16 +08:00
621b75b343 feat(api): implement truncation for SSE events 2025-08-31 13:45:15 +08:00
6b9d2e98b9 feat(api): Implement truncation for WorkflowNodeExecution 2025-08-31 13:44:39 +08:00
2fd337e610 feat(api): add WorkflowNodeExecutionOffload model 2025-08-31 13:44:39 +08:00
a7aa17e361 chore(api): remove orphan files in draft var cleanup script 2025-08-31 13:44:38 +08:00
13eb9f7d7d feat(api): implement truncation for draft var 2025-08-31 13:44:38 +08:00
d7db58cabd feat(api): implement VariableTruncator 2025-08-31 13:44:38 +08:00
0cf8a80bdd chore(api): Introduce variable truncation configuration 2025-08-31 13:44:38 +08:00
a10586c8ea fixup! feat(api): Add migration for WorkflowDraftVariableFile 2025-08-31 13:44:38 +08:00
40faa9ce16 refactor(api): Inject db dependency to FileService 2025-08-31 13:44:38 +08:00
58dfae60f0 feat(api): Add migration for WorkflowDraftVariableFile 2025-08-31 13:44:38 +08:00
de08b2310c feat(api): Introduce WorkflowDraftVariableFile model
This model is used to track offloaded variables values for
`WorkflowDraftVariable`.
2025-08-31 13:44:38 +08:00
e9c6038192 docs(api): update docs for UploadFile 2025-08-31 13:44:38 +08:00
a8fe4ea802 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-30 16:36:10 +08:00
82193580de chore: improve typing
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-30 16:35:57 +08:00
1fd27cf3ad Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-30 00:13:45 +08:00
1e9bfd8872 feat(chunk-card-list): implement ChunkCard and QAItem components, refactor ChunkCardList to utilize new structure and types 2025-08-29 23:28:43 +08:00
11d32ca87d test: fix web test
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-29 23:20:28 +08:00
5415d0c6d1 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-29 23:17:30 +08:00
d8af8ae4e6 fix: update workflow service tests for new graph engine
- Update method calls from _handle_node_run_result to _handle_single_step_result
- Add required fields (id, node_id, node_type, start_at) to graph events
- Use proper NodeType enum values instead of strings
- Fix imports to use correct modules (Node instead of BaseNode)
- Ensure event generators return proper generator objects

These tests were failing because the internal implementation changed
with the new graph engine architecture.
2025-08-29 23:04:33 +08:00
04e5d4692f Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-29 22:34:47 +08:00
3aa48efd0a test(test_workflow_service): Use new engine's method.
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-29 22:06:10 +08:00
c2afb84884 fix chunk format 2025-08-29 17:10:18 +08:00
8eb78c04b2 chore(token_buffer_memory): code format
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-29 17:02:51 +08:00
22ee318cf8 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-29 17:01:42 +08:00
3c0adfb48a fix chunk format 2025-08-29 16:27:22 +08:00
b3dbf9fe94 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-29 16:20:54 +08:00
a597ae1a8a feat(block-selector): add RAG tool suggestions component and integrate with existing tools 2025-08-29 16:20:43 +08:00
f2bc4f5d87 fix: resolve type error in node_factory by using type guard for node_type_str 2025-08-29 16:16:58 +08:00
d7d456349d Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-29 16:14:04 +08:00
65215135e5 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-29 14:49:05 +08:00
5cf795f2b8 fix preview run 2025-08-29 14:48:51 +08:00
1c8190f142 feat(search-box): enhance marketplace search box with new triggers and conditional rendering 2025-08-29 14:45:04 +08:00
dce4d0ff80 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-29 13:22:13 +08:00
3dee8064ba feat: enhance typing 2025-08-29 13:17:02 +08:00
d0dd728e6c Merge branch 'main' into feat/rag-2
# Conflicts:
#	api/core/app/entities/queue_entities.py
#	api/core/workflow/graph_engine/entities/event.py
2025-08-29 11:29:51 +08:00
0bb3407385 fix preview run 2025-08-29 11:25:08 +08:00
05c21883f9 fix(pipeline): make kb node deletable (#24731) 2025-08-29 09:19:09 +08:00
5b4335c4b5 fix preview run 2025-08-28 22:02:22 +08:00
39080eed10 fix preview run 2025-08-28 22:01:49 +08:00
1db04aa729 Merge branch 'feat/queue-based-graph-engine' into feat/rag-2 2025-08-28 18:12:49 +08:00
64772fb413 fix preview run 2025-08-28 18:12:27 +08:00
1022e67fb6 fix: adjust padding in search box component for improved layout 2025-08-28 18:02:19 +08:00
a67589d5db feat: add title property to DataSourceDefaultValue type in block-selector 2025-08-28 17:25:26 +08:00
bfbb36756a feat(graph_engine): Add NodeExecutionType.ROOT and auto mark skipped in Graph.init
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 16:41:51 +08:00
9448d49305 feat: add 'rag' tag to plugin constants and translations in multiple languages 2025-08-28 16:21:53 +08:00
169a8edc28 refactor: update import statement style in IfElseNode component 2025-08-28 15:47:39 +08:00
d7e0c5f759 chore: use 'XXX | None' instead of Optional[XXX] in graph.py 2025-08-28 15:45:22 +08:00
9c93a49605 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-28 15:40:11 +08:00
f6ed1c5643 fix: update chunk structure tip messages in English and Chinese translations for clarity 2025-08-28 15:40:02 +08:00
334bc8f1a2 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-28 15:39:36 +08:00
a9f7ee029e fix preview run 2025-08-28 15:39:22 +08:00
c396788128 chore(graph_engine): add final mark to classes
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 15:38:35 +08:00
edd41a30eb feat: add AddChunks icon and related components for knowledge base chunk structure 2025-08-28 15:31:30 +08:00
580201ed2c merge main 2025-08-28 14:57:30 +08:00
e955a603c6 fix: can choose file in add in variable aggregator 2025-08-28 14:19:07 +08:00
740f1c5f2c fix: tools value not update caused data outdated 2025-08-28 14:06:28 +08:00
9a13cb5bdf refactor: remove unused state management in usePipelineRun hook 2025-08-28 13:58:03 +08:00
048feb4165 refactor: update local file and online drive state management in create-from-pipeline components 2025-08-28 13:47:20 +08:00
843b14ccc6 refactor: Refactor online drive breadcrumbs navigation 2025-08-28 10:56:51 +08:00
e3a7b1f691 fix: type hints
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 05:24:18 +08:00
8aab7f49c3 chore(graph_engine): Use XXX | None instead of Optional[XXX] 2025-08-28 05:09:33 +08:00
1e12c1cbf2 [autofix.ci] apply automated fixes 2025-08-27 21:00:36 +00:00
affedd6ce4 chore(graph_engine): Use XXX | None instead of Optional[XXX] 2025-08-28 04:59:49 +08:00
ef21097774 refactor(graph_engine): Remove unnecessary check from SkipPropagator
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:45:26 +08:00
1d377fe994 refactor(graph_engine): Use _ to mark unused variable in BranchHandler
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:44:45 +08:00
c82697f267 refactor(graph_engine): Remove node_id from SkipPropagator.skip_branch_paths
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:43:56 +08:00
98b25c0bbc refactor(graph_engine): Convert attrs to private in error_handler
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:42:37 +08:00
1cd0792606 chore(graph_events): Improve type hints
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:41:48 +08:00
7cbf4093f4 chore(graph_engine): Use TYPE | None instead of Optional
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:30:50 +08:00
8129ca7c05 chore(graph_engine): Move error_strategy.py to protocols/
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 04:29:32 +08:00
65617f000d feat(event_collector): Update to use ReadWriteLock 2025-08-28 03:26:42 +08:00
635eff2e25 test(graph_engine): remove outdated tests
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 02:53:19 +08:00
55085a9ca2 chore(graph_engine): add type hint for event_queue
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-28 02:38:56 +08:00
9dc1e9724e Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-28 02:26:40 +08:00
4403a26f37 merge new graph engine 2025-08-27 20:44:14 +08:00
88abaa840c merge new graph engine 2025-08-27 18:33:38 +08:00
4e2c3bfb05 fix: not obj type struct schema render error 2025-08-27 18:32:04 +08:00
a644153e9f feat: just use chunk type in knowledge base 2025-08-27 18:16:46 +08:00
0316eb6064 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-27 18:09:55 +08:00
6a56649be2 feat(web): add support for Tool block type in BlockIcon component 2025-08-27 18:09:48 +08:00
8a585607c1 fix: tool schem not right 2025-08-27 18:07:33 +08:00
c3f66e2901 Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine 2025-08-27 18:05:35 +08:00
90d72f5ddf merge new graph engine 2025-08-27 17:46:46 +08:00
6c8212d509 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-27 17:46:05 +08:00
19d3ba42e5 chore: schema type change 2025-08-27 17:40:05 +08:00
ee144452e2 Merge branch 'main' into feat/rag-2 2025-08-27 17:28:21 +08:00
367b2d0320 refactor(web): streamline data source before run form and enhance run handling logic 2025-08-27 16:56:33 +08:00
bd294ffe0d feat: file schem file replace 2025-08-27 16:07:31 +08:00
392514fa13 Merge branch 'feat/queue-based-graph-engine' into feat/rag-2
# Conflicts:
#	api/commands.py
#	api/core/app/apps/common/workflow_response_converter.py
#	api/core/llm_generator/llm_generator.py
#	api/core/plugin/entities/plugin.py
#	api/core/plugin/impl/tool.py
#	api/core/rag/index_processor/index_processor_base.py
#	api/core/workflow/entities/workflow_execution.py
#	api/core/workflow/entities/workflow_node_execution.py
#	api/core/workflow/enums.py
#	api/core/workflow/graph_engine/entities/graph.py
#	api/core/workflow/graph_engine/graph_engine.py
#	api/core/workflow/nodes/enums.py
#	api/services/dataset_service.py
2025-08-27 16:05:59 +08:00
86e7cb713c [autofix.ci] apply automated fixes 2025-08-27 07:38:26 +00:00
0f29244459 fix: test
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-27 15:37:37 +08:00
48cbf4c78f [autofix.ci] apply automated fixes 2025-08-27 15:33:30 +08:00
8c35663220 feat: queue-based graph engine
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-27 15:33:28 +08:00
4cb286c765 feat: handle file schem show 2025-08-27 14:46:44 +08:00
ff33d42c55 rag pipeline initial template 2025-08-27 14:39:39 +08:00
e1a2755e3b fix(web): add spinning animation to loader icon in variable inspect trigger for better UX 2025-08-27 13:55:43 +08:00
b1f348fb31 refactor(web): reorganize imports in document index component for improved clarity 2025-08-27 13:48:04 +08:00
5f0bae0ae5 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-27 13:46:10 +08:00
57c242fff3 Update dataset pipeline header text and translations for improved clarity 2025-08-27 13:46:03 +08:00
1814b99c79 fix(web): correct title color in option card in dark mode (#24579) 2025-08-27 11:27:25 +08:00
f24f573731 feat: show struct schema in output 2025-08-27 11:18:22 +08:00
da48e54778 Merge branch 'main' into feat/rag-2 2025-08-27 11:16:27 +08:00
2db699522c feat: compare schema type 2025-08-27 10:48:38 +08:00
5c15fe7b01 fix pipeline export 2025-08-26 18:52:24 +08:00
da9b38f642 pipeline name 2025-08-26 18:26:25 +08:00
918958743f Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-26 17:18:49 +08:00
51c3f1b2e2 add pipeline async run 2025-08-26 17:18:43 +08:00
bdcd9ad9cb refactor(workflow): update output schema handling in tool and data source nodes for improved integration with plugin info 2025-08-26 16:48:18 +08:00
1f5fd13359 refactor(plugin_migration): improve code readability by formatting the install function signature 2025-08-26 16:03:59 +08:00
61f2f8fd31 refactor(schemas): update titles in JSON schemas for consistency and clarity 2025-08-26 16:03:59 +08:00
60fb242f27 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-26 15:52:01 +08:00
85f0d31fab add pipeline async run 2025-08-26 15:51:49 +08:00
a282b6cea4 refactor(tests): remove unused TestPerformance class from schema resolver tests 2025-08-26 15:47:34 +08:00
46019ea927 refactor(resolver): enhance schema reference resolution with improved error handling and caching 2025-08-26 15:47:34 +08:00
7e20273bce refactor(resolver): implement BFS approach for resolving references in Dify schemas 2025-08-26 15:47:34 +08:00
1d2d0ff49f add pipeline async run 2025-08-26 15:32:39 +08:00
c77bdd1fb3 add pipeline async run 2025-08-26 15:20:40 +08:00
0f3ca1d8f4 fix: ruff 2025-08-26 12:51:40 +08:00
d1be9544fb fix: restful to restx 2025-08-26 12:51:40 +08:00
1692a7bd1b Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-26 12:06:43 +08:00
c3c4ef3a8e add pipeline async run 2025-08-26 12:06:34 +08:00
c911ac8c01 refactor(console): add spec controller import and enhance tool output schema resolution 2025-08-26 11:07:12 +08:00
1bc77204b1 fix(use-initial): handle potential undefined nodeData by providing a fallback object 2025-08-26 10:23:13 +08:00
1ad7b0e852 add pipeline async run 2025-08-25 18:26:05 +08:00
e0e3224413 refactor(option-card): enhance styling and accessibility by updating class names and adding title attribute 2025-08-25 17:46:27 +08:00
f418164648 add pipeline async run 2025-08-25 17:33:49 +08:00
4fc498bd48 fix 2025-08-25 16:55:08 +08:00
14d8788dac refactor: remove unused DataSourceType import and simplify iconType handling in DatasetDetailLayout 2025-08-25 16:43:52 +08:00
101d6504fb fix 2025-08-25 16:14:22 +08:00
d440577913 refactor(operations): remove document download functionality and associated UI elements 2025-08-25 15:32:48 +08:00
85fd97e090 Merge branch 'main' into feat/rag-2 2025-08-25 15:30:18 +08:00
412e4b04f3 fix(use-settings-display): correct translation key for keyword search in settings display 2025-08-25 15:20:59 +08:00
cc1f0d4d8d fix 2025-08-25 13:55:42 +08:00
1b9c817dba feat: add process data truncate 2025-08-25 11:26:57 +08:00
b7ed2cade1 initial template 2025-08-22 18:12:24 +08:00
8c44151e6f Merge branch 'main' into feat/rag-2 2025-08-22 17:40:34 +08:00
570b644a7e refactor(header, option-card): improve layout and responsiveness by adjusting flex properties and adding title attributes 2025-08-22 16:49:28 +08:00
5824a2d71c refactor(use-tool-icon): make data parameter optional and update usage in variable inspect components 2025-08-22 16:02:14 +08:00
83cc3b4710 refactor(credential-selector): enhance layout with overflow handling for better UI responsiveness 2025-08-22 14:49:24 +08:00
0ca7c29c47 feat: preiew show the large data 2025-08-22 11:35:54 +08:00
2f6c51927e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-22 11:34:13 +08:00
6b7ea9885c support name generate 2025-08-22 11:34:01 +08:00
68ba41e57e chore: fix trunate change proplem 2025-08-22 11:08:54 +08:00
8ff4f34773 refactor(create-pipeline): remove unused CreateForm and CreateModal components, streamline dataset creation process 2025-08-22 10:49:23 +08:00
4545f509a5 support name generate 2025-08-21 19:15:16 +08:00
af74019d6e refactor(knowledge-base): update title from 'Question-Answer' to 'Q&A' for improved clarity 2025-08-21 18:13:44 +08:00
d3b6631df8 refactor(retrieval-method): replace all instances of invertedIndex with keyword_search in components and translations 2025-08-21 17:31:25 +08:00
5699adf69c refactor(result-preview): update chunk handling to use GeneralChunkPreview type and extract content 2025-08-21 17:07:20 +08:00
42935997aa refactor(retrieval-method): update retrieval method from invertedIndex to keywordSearch across components 2025-08-21 16:29:23 +08:00
f82f06bdd0 feat: support filter file vars 2025-08-21 16:11:32 +08:00
c5ee6e09d4 fix: output schema file type 2025-08-21 15:12:53 +08:00
60fafc524c feat(data-source): add LOCAL_FILE_OUTPUT to constants and integrate into panel for local file handling 2025-08-21 14:17:25 +08:00
cecef01bf7 feat(rag-pipeline): enhance result preview with chunk formatting and add configuration for preview chunk limit 2025-08-21 11:04:33 +08:00
dfd33b3d84 Merge branch 'main' into feat/rag-2 2025-08-21 09:43:51 +08:00
3e27e97364 feat(publish): improve success and error notifications for knowledge pipeline publishing with localized messages and enhanced user guidance 2025-08-20 18:41:09 +08:00
9d3198f808 refactor(data-source, before-run-form): enhance data handling and user interface for data source selection, integrating new components and improving state management 2025-08-20 18:21:59 +08:00
449755ada4 refactor(test-run, preparation): restructure test run components, enhance data handling, and improve user experience with new loading states and error handling 2025-08-20 16:40:56 +08:00
8ab3f1212b refactor(billing, chunk-preview): update link target and improve file property handling in document preview 2025-08-20 10:23:33 +08:00
be045a68ee refactor(credential-icon, create-from-pipeline, test-run): improve component structure and enhance data handling for online drive files 2025-08-19 15:24:14 +08:00
d4370a8ca5 chore: alert ui 2025-08-19 15:07:53 +08:00
95f60d89ab refactor(datasets): reorganize document components and enhance operations with download functionality 2025-08-19 15:07:52 +08:00
a1666fe058 Merge branch 'main' into feat/rag-2 2025-08-19 14:59:06 +08:00
ed4bd56146 refactor(create-card, template-card): add TODO comments for direct pipeline dataset creation and improve code organization 2025-08-19 14:41:32 +08:00
64897bc6fe chore: hide mock data 2025-08-19 14:21:53 +08:00
559d014b29 chore: use api return truncate 2025-08-19 14:18:41 +08:00
ad523ef4ad feat(publish): enhance success notification for pipeline template publishing with additional information and links 2025-08-19 14:15:49 +08:00
3c4b374038 feat: run result data too long export 2025-08-19 14:04:00 +08:00
d9cdce3f7a fix(i18n): refine Simplified Chinese translations for dataset and pipeline terminology 2025-08-19 13:10:51 +08:00
865e3ee85b fix(billing): adjust padding and margin for pricing plan item components 2025-08-19 10:43:49 +08:00
07887fb24f fix(i18n): update Simplified Chinese translations for "知识管道" to "知识流水线" 2025-08-19 10:24:52 +08:00
fcdbe3b84a fix: plugin service import 2025-08-19 10:06:27 +08:00
38c51b1011 fix(i18n): highlight "プレミアム" in Japanese billing translations 2025-08-18 21:04:57 +08:00
469ed5a311 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-18 21:00:13 +08:00
56273a2dcf fix(i18n): update Japanese and Simplified Chinese translations for billing terms 2025-08-18 20:59:49 +08:00
6ce013ac52 fix: OAuth client parameter retrieval by verifying plugin status 2025-08-18 20:41:22 +08:00
fb6fe4a32b fix(billing): adjust z-index for noise effects in pricing plans and self-hosted plan items 2025-08-18 20:40:45 +08:00
ae183b348c feat: variable preview 2025-08-18 18:16:48 +08:00
b4e76af4a7 feat: string type too long hide 2025-08-18 18:15:10 +08:00
4a5c883988 feat(pipeline): implement footer component for dataset creation and enhance UI with new styles 2025-08-18 16:58:43 +08:00
2391e582f2 feat: debug show big data 2025-08-18 16:57:41 +08:00
cd760633cb feat(billing): add noise effects to pricing plans and update rendering logic 2025-08-18 14:31:09 +08:00
ece1330567 feat(billing): add Enterprise plan component and update plan rendering logic 2025-08-18 11:25:48 +08:00
386614951f Merge branch 'main' into feat/rag-2 2025-08-18 11:16:18 +08:00
a2892773f2 fix: dark mode color 2025-08-17 15:25:10 +08:00
e80645bfa1 feat: enhance billing plan components with new SVG assets and update styles for premium and enterprise plans 2025-08-15 22:17:32 +08:00
c7d5ec1520 feat: add new pricing assets and update billing plan components to utilize them 2025-08-15 18:39:59 +08:00
8cf98ba0ce feat: add knowledge pipeline publishing feature and update billing context; refactor popup component for conditional rendering 2025-08-15 18:06:15 +08:00
fb10706c20 feat: refactor billing plans components to improve structure and add self-hosted plan item button; update pricing layout and translations 2025-08-15 16:29:45 +08:00
fb7dc4e0e1 fix file 2025-08-15 14:55:37 +08:00
d558f98aa6 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-15 14:17:46 +08:00
89c7f71199 feat: add Footer component and integrate it into Pricing layout; refactor Header styles and update Plans component structure 2025-08-15 14:16:35 +08:00
ff76adc88a [autofix.ci] apply automated fixes 2025-08-15 04:17:51 +00:00
cfc555d05d feat: add knowledge pipeline creation feature 2025-08-15 12:15:37 +08:00
153d5e8f03 refactor: Change plan-item directory 2025-08-15 11:14:38 +08:00
ac456c1a95 feat: refactor billing plans components and add new PlanItem structure with tooltip support 2025-08-15 11:08:36 +08:00
8e88765261 fix file 2025-08-14 18:17:44 +08:00
5a2618d002 fix file 2025-08-14 18:07:52 +08:00
69a821db02 fix file 2025-08-14 18:01:29 +08:00
46224724a2 feat: add Cloud and SelfHosted components with updated PlanSwitcher integration 2025-08-14 15:54:07 +08:00
8a5bcd11f2 fix: add missing newline at end of JSON files for icon components 2025-08-14 15:21:53 +08:00
a8fbf123e4 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-14 15:12:29 +08:00
8b8d257f78 fix file 2025-08-14 15:12:09 +08:00
02720c9b95 Merge branch 'main' into feat/rag-2 2025-08-14 15:03:48 +08:00
d8a9645e83 feat: Implement billing plan selection UI with plan switcher and range options 2025-08-14 15:01:38 +08:00
72ea3b4d01 fix variable_pool 2025-08-13 17:38:14 +08:00
3797416fe0 fix online drive 2025-08-13 15:45:33 +08:00
f74706a4a5 fix online drive 2025-08-13 15:28:18 +08:00
463ca7043d fix: sync doc styling change 2025-08-13 15:13:03 +08:00
5a6818c817 Merge branch 'main' into feat/rag-2 2025-08-13 15:05:57 +08:00
acde411629 fix: Update breadcrumb styles in Online Drive component for improved visual consistency 2025-08-13 14:28:55 +08:00
4d34891ac0 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-13 11:34:27 +08:00
cf46fba753 feat: Enhance Online Drive component with bucket handling and breadcrumb navigation 2025-08-13 11:34:22 +08:00
fcdbfbda4f add credential id 2025-08-12 17:56:28 +08:00
71d8a0c0b6 refactor: Refactor FileIcon component to use useMemo for file type determination and rename loading state variable for clarity 2025-08-12 16:55:00 +08:00
ae3addb922 add credential id 2025-08-12 15:43:11 +08:00
bd1d7f8652 add credential id 2025-08-12 15:38:26 +08:00
a0006ce968 add credential id 2025-08-12 14:45:45 +08:00
2b7243dbc7 add credential id 2025-08-12 11:13:47 +08:00
22b3933cc3 Merge branch 'main' into feat/rag-2
# Conflicts:
#	api/core/workflow/entities/variable_pool.py
2025-08-12 11:13:04 +08:00
1bc506603a add credential id 2025-08-12 11:10:21 +08:00
54b935f609 fix 2025-08-12 10:49:49 +08:00
cf4a526e7f refactor: replace db.session with session in DatasourceProviderService for consistency 2025-08-11 20:35:46 +08:00
543f80ad5d refactor: replace get_real_credential_by_id with get_datasource_credentials in multiple services for consistency 2025-08-11 20:04:04 +08:00
7f328328fb Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-11 18:37:22 +08:00
9893b8110d refactor: rename selectedFileKeys to selectedFileIds and update related logic in online drive components 2025-08-11 18:37:15 +08:00
087a4fbd14 add credential id 2025-08-11 18:26:41 +08:00
ada0875ac4 fix: update default value for expires_at in DatasourceProvider model 2025-08-11 11:40:51 +08:00
6b07e0e8d6 feat: add expiration for OAuth credentials in datasource provider 2025-08-11 11:25:50 +08:00
fc779d00df Merge branch 'main' into feat/rag-2 2025-08-11 11:15:58 +08:00
58aca75ee0 fix: json schema 2025-08-08 17:38:01 +08:00
8464ec46e6 fix: json schema 2025-08-08 17:38:01 +08:00
ac7953a32c feat: add credential_id handling in CreateFormPipeline and OnlineDrive components 2025-08-08 14:48:58 +08:00
b21d991fdb feat(rag): pass credentialId to online document preview and wire to data source store 2025-08-08 14:26:38 +08:00
df5a4e5c08 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-08 14:18:45 +08:00
d07ce809be add credential id 2025-08-08 14:18:30 +08:00
ec3cb126a0 feat: add clear data functions for online documents, website crawl, and online drive; integrate credential change handling 2025-08-08 13:49:08 +08:00
184c3c88b7 refactor: Refactor online document and online drive components to handle credential changes 2025-08-08 13:43:01 +08:00
097a6fc1e0 fix: variable 2025-08-07 17:22:29 +08:00
d5f82d0d5f fix: json schema 2025-08-07 16:36:59 +08:00
1b3860d012 Merge branch 'main' into feat/rag-2 2025-08-07 16:27:20 +08:00
5729d38776 feat: add CredentialIcon component and integrate it into credential selector for improved avatar display 2025-08-07 15:34:15 +08:00
9e882122ca fix: json schema 2025-08-07 15:28:42 +08:00
e6f1bc165c add tool file preview 2025-08-07 14:35:04 +08:00
842ced218e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-07 11:13:10 +08:00
ca8f80ee33 notion fix 2025-08-07 11:13:02 +08:00
646b798e9c Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-07 10:10:32 +08:00
e0753359f8 fix: adjust Toast component positioning to top-right corner 2025-08-07 10:10:27 +08:00
1d79c21ae3 feat: implement structured output wrapping for pipeline items 2025-08-06 17:41:07 +08:00
5b433aa2d1 feat: add useFloatingRight hook and integrate it into InputFieldEditorPanel and PreviewPanel for dynamic positioning 2025-08-06 16:59:22 +08:00
218e778099 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-06 15:39:27 +08:00
1567b615dc notion fix 2025-08-06 15:39:22 +08:00
29da2e5c19 feat: enhance output schema descriptions and remove unused constants 2025-08-06 15:12:57 +08:00
facbe02cf7 feat: datasource output schema 2025-08-06 15:12:57 +08:00
2a1e1a8042 feat: datasource output schema 2025-08-06 15:12:57 +08:00
13f38045d4 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-06 15:11:45 +08:00
94a0fb6dc1 notion fix 2025-08-06 15:11:37 +08:00
c5a3bf9b9e refactor: simplify Markdown rendering in ChunkContent component 2025-08-06 14:32:28 +08:00
95982d37a6 refactor: remove unused DataSourceOauthBinding import from dataset_service and document_indexing_sync_task 2025-08-06 14:25:39 +08:00
40f3524cfe Merge branch 'main' into feat/rag-2 2025-08-06 14:23:51 +08:00
da68cf48c4 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-06 11:40:35 +08:00
8717a789b3 feat: dynamically import panel components to improve performance 2025-08-06 11:40:30 +08:00
05e96e56e5 notion fix 2025-08-06 11:13:20 +08:00
6954926df9 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-06 10:33:50 +08:00
ec6fabb222 feat: introduce useInputFieldPanel hook to manage input field panel state and refactor related components 2025-08-06 10:32:57 +08:00
6cae1a2872 fix: rag variable 2025-08-05 18:26:07 +08:00
2f163bad8f transform document 2025-08-05 18:16:24 +08:00
6faa4b107b fix: rag variable 2025-08-05 16:31:14 +08:00
8ab5c47737 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-05 14:20:41 +08:00
0a397ac477 feat: Refactor document context and update chunking mode handling across components 2025-08-05 14:19:28 +08:00
09b5cacbad fix: rag variable 2025-08-05 13:02:51 +08:00
522210bad6 fix: rag variable 2025-08-05 11:39:17 +08:00
0975f5bdc2 merge main 2025-08-05 10:31:12 +08:00
201e4cd64d merge main 2025-08-05 10:30:53 +08:00
1e5317d3f0 feat: Enhance InputFieldPanel to manage preview and edit states more effectively 2025-08-01 16:35:27 +08:00
44d569a7c1 feat: Implement input field management panel 2025-08-01 16:27:53 +08:00
ec501cf664 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-01 14:43:00 +08:00
cd9bfe0df3 feat: Update website crawl provider to use jinaReader and synchronize selection changes 2025-08-01 14:42:54 +08:00
d36501203f Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-01 14:11:18 +08:00
f3f3a99e5b transform document 2025-08-01 14:11:11 +08:00
19a93c6554 feat: Enhance Notion integration by adding credential_id to NotionInfo and updating related functions 2025-08-01 14:04:01 +08:00
383ee368e6 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-01 13:59:19 +08:00
79f30daf03 transform document 2025-08-01 13:59:11 +08:00
e7e531dc06 feat: Refactor WebsiteCrawl component to improve header configuration and remove unused memoization 2025-08-01 11:50:50 +08:00
d8ac78056e fix: open input field modal from var picker 2025-08-01 11:28:30 +08:00
f75a3ef212 feat: Enhance InputFieldDialog and PreviewPanel with improved styling for better layout 2025-07-31 20:24:18 +08:00
c9ab0fb8f6 feat: Update InputFieldDialog styling to allow for flexible growth 2025-07-31 20:18:20 +08:00
2dc71f059c feat: Update FooterTip component and enhance InputFieldDialog layout 2025-07-31 18:38:04 +08:00
18af3dfe5d Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-31 16:29:00 +08:00
0b871abe59 feat: Add credential handling to Notion page selector and related components 2025-07-31 16:28:53 +08:00
82819af55c transform document 2025-07-31 15:59:30 +08:00
9915364740 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-31 15:21:18 +08:00
97136ca8f0 transform document 2025-07-31 15:21:06 +08:00
195bf6621a Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-31 15:19:11 +08:00
8711a57d92 feat: Enhance NotionPageSelector and NotionPageSelectorModal with loading states and credential handling 2025-07-31 15:19:03 +08:00
8e96b9ed77 transform document 2025-07-31 11:51:40 +08:00
5a21da00c5 chore: knowledge base single run 2025-07-31 11:20:46 +08:00
a7a4c8228e Merge branch 'main' into feat/rag-2
# Conflicts:
#	web/app/components/workflow/hooks/use-workflow.ts
2025-07-31 10:30:28 +08:00
1b6a925b34 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-30 17:42:10 +08:00
41363459b5 feat: Update Header component button styling and remove redundant onClick from icon 2025-07-30 17:41:55 +08:00
deceaa38f0 feat: datasource oauth default credentials 2025-07-30 16:36:27 +08:00
f7ec255b3e feat: oauth 2025-07-30 15:55:16 +08:00
4dab128900 feat: oauth 2025-07-30 15:52:59 +08:00
76b4288b34 datasource change authed page 2025-07-30 15:23:04 +08:00
a1c38a2740 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-30 15:19:16 +08:00
f8d7d07c13 feat: Introduce CredentialSelector component and remove WorkspaceSelector 2025-07-30 15:19:10 +08:00
69738794bc feat: support custom before run form 2025-07-30 14:43:29 +08:00
f37109ef39 transform document 2025-07-30 14:34:38 +08:00
875aea1c22 feat: datasource reauthentication 2025-07-30 13:39:04 +08:00
c70a7e832e chore: data source add single run button 2025-07-29 18:31:35 +08:00
ecba9e44ff transform document 2025-07-29 18:17:56 +08:00
a7d4675831 transform document 2025-07-29 18:12:35 +08:00
21df72a57a transform document 2025-07-29 17:56:28 +08:00
240f6890f1 fix: some lint 2025-07-29 16:29:59 +08:00
27f65150d7 fix: run tool cause page crash because of feature context 2025-07-29 16:19:14 +08:00
e2df3f182d transform document 2025-07-29 16:01:06 +08:00
a996c1d90c merge main 2025-07-29 15:45:01 +08:00
e19a07c2e6 merge main 2025-07-29 15:44:23 +08:00
2c6f88ef82 chore: reload vars 2025-07-29 15:24:26 +08:00
786d121fdf Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-29 15:23:22 +08:00
9cfb531e3b transform document 2025-07-29 15:23:11 +08:00
1c813239c9 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-29 14:52:25 +08:00
fbf3abddf2 feat: Add tooltip for configuration button in header and update translations 2025-07-29 14:52:17 +08:00
e89398f415 add old auth transform 2025-07-29 14:13:50 +08:00
cc911f46f2 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-29 13:46:23 +08:00
c2cbdcd3bf feat: Enhance dataset info and card components with memoization for improved performance 2025-07-29 13:46:17 +08:00
46db1acf98 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-29 11:18:14 +08:00
6d00ffa509 add old auth transform 2025-07-29 11:18:06 +08:00
446301a443 fix: search method 2025-07-29 11:10:16 +08:00
657e813c7f add old auth transform 2025-07-28 19:29:36 +08:00
829e6f0d1a add old auth transform 2025-07-28 19:29:07 +08:00
b0cd4daf54 feat: Add credential seletor for online docuemnts and online drive 2025-07-28 16:55:40 +08:00
fc3250678c fix: module not found 2025-07-28 16:36:40 +08:00
a95cf6f8b0 merge main 2025-07-28 16:00:38 +08:00
acae51c309 initial nodes 2025-07-28 15:38:48 +08:00
347cd6befc publish toast 2025-07-28 14:29:05 +08:00
50fed69c0c r2 transform 2025-07-28 14:00:18 +08:00
9dbc887da5 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-25 17:43:23 +08:00
38e6391be4 r2 transform 2025-07-25 17:43:15 +08:00
e1861f5f9c fix: add dataset reset functionality and improve warning message consistency 2025-07-25 17:30:19 +08:00
f887bbedab r2 transform 2025-07-25 17:06:29 +08:00
f4dd22b9cb r2 transform 2025-07-25 15:17:03 +08:00
7f6759e0ac r2 transform 2025-07-25 14:41:39 +08:00
b01c66acbc Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-25 14:10:20 +08:00
ed1bec9344 r2 transform 2025-07-25 14:10:12 +08:00
4fdcd74f52 fix: publish 2025-07-25 12:00:00 +08:00
bb609ee3ca Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-24 17:43:38 +08:00
1938991162 r2 transform 2025-07-24 17:43:26 +08:00
bae2af0c85 Merge branch main into feat/rag-2 2025-07-24 17:40:04 +08:00
3b0be18d47 r2 transform 2025-07-24 17:08:39 +08:00
0417e2f4d9 fix: auth provider 2025-07-24 16:58:25 +08:00
16603952a0 datasource template 2025-07-23 18:20:32 +08:00
5401299e6e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-23 17:02:12 +08:00
98ef4ffb4d r2 transform 2025-07-23 17:02:01 +08:00
761d717656 fix 2025-07-23 16:59:47 +08:00
44a8b4120b pipeline template 2025-07-23 15:34:32 +08:00
53cdee1e2f pipeline template 2025-07-23 10:46:45 +08:00
d76e37b018 add datasource empty node 2025-07-22 16:48:24 +08:00
3b8d96f45c merge main 2025-07-22 13:52:24 +08:00
10657b6bd3 datasource auth 2025-07-22 10:39:19 +08:00
e5c7fd5b14 feat: enforce maximum length for authorization name in datasource authentication 2025-07-21 19:31:58 +08:00
12661ce0ca feat: improve authorization name validation and enhance credential encryption handling 2025-07-21 19:28:00 +08:00
34072371a1 feat: refactor OAuth client retrieval in datasource authentication 2025-07-21 18:55:21 +08:00
666868fa35 feat: remove unused import for CredentialsValidateFailedError in datasource provider service 2025-07-21 18:53:36 +08:00
ba7f0b3004 feat: enhance datasource authentication by improving credential handling and updating API parameters 2025-07-21 18:51:55 +08:00
386d320650 rename auth name 2025-07-21 17:59:13 +08:00
4d36e784b7 merge main 2025-07-21 17:45:26 +08:00
caa2de3344 datasource oauth 2025-07-21 17:41:19 +08:00
039a053027 feat: standardize credential type string for API key in datasource provider service 2025-07-21 17:40:50 +08:00
7141181732 feat: remove unnecessary blank line in datasource authentication setup 2025-07-21 17:00:10 +08:00
17da96bdd8 feat: refactor datasource authentication APIs for improved credential management 2025-07-21 16:43:50 +08:00
57b48f51b5 feat: convert credential form schemas to lists for consistency 2025-07-21 15:51:24 +08:00
af94602d37 feat: add APIs for setting default datasource provider and updating provider name 2025-07-21 15:49:39 +08:00
9c96f1db6c r2 transform 2025-07-21 14:51:40 +08:00
51d7a9b6be feat: mask hidden values in tenant OAuth client retrieval 2025-07-21 14:35:46 +08:00
529eca70bc feat: enhance datasource credential and OAuth schema serialization 2025-07-21 14:31:26 +08:00
ef8d941633 feat: simplify OAuth encrypter retrieval and remove unnecessary validation 2025-07-21 13:48:05 +08:00
e97f03c130 feat: add custom OAuth client setup and enhance datasource provider model with avatar_url 2025-07-21 12:36:02 +08:00
7364d051d2 feat: refactor provider name generation to use incremental naming & enforce unique constraints 2025-07-18 21:34:59 +08:00
23a5ff410e feat: add avatar_url to datasource providers and update OAuth handling 2025-07-18 19:47:59 +08:00
34a6ed74b6 r2 transform 2025-07-18 19:22:31 +08:00
dc359c6442 r2 transform 2025-07-18 19:04:46 +08:00
dd59cea085 migrations 2025-07-18 14:58:10 +08:00
ab775bce26 feat: remove BuiltinDatasourceProvider class and related credential handling 2025-07-18 14:47:08 +08:00
82b531e949 feat: remove tenant_plugin_auto_upgrade_strategies table and adjust datasource_oauth_params 2025-07-18 14:44:01 +08:00
f325662141 feat: refactor DatasourceNode and KnowledgeIndexNode to use _node_data attribute 2025-07-18 14:25:11 +08:00
32fe8313b4 feat: import and extend dayjs relativeTime plugin in multiple components 2025-07-18 14:15:07 +08:00
6ca5bc1063 feat: add datasource OAuth client setup command and refactor related models 2025-07-18 14:11:15 +08:00
f153319a77 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-18 14:05:55 +08:00
0e428afe43 feat: convert VersionHistoryPanel to dynamic import for improved performance 2025-07-18 14:05:34 +08:00
f336245a45 fix: tool node text overflow 2025-07-18 14:05:08 +08:00
5b2c99e183 Merge branch 'main' into feat/rag-2 2025-07-18 14:03:48 +08:00
399866e0ac fix: add 'no-spinner' class to InputNumber component for better styling 2025-07-18 13:42:05 +08:00
633bfc25e0 feat: update provider parameter naming and refactor related logic in datasource_auth.py 2025-07-18 13:13:20 +08:00
0ac5c0bf3e feat: refactor OAuth provider handling and improve provider name generation 2025-07-18 12:47:32 +08:00
9f2a9ad271 fix: update keyboard shortcut and clean up component structure in various files 2025-07-17 18:22:03 +08:00
59f68cd63b fix: ensure default values are handled correctly in InputNumber and related components 2025-07-17 18:15:52 +08:00
3388e83920 Merge remote-tracking branch 'origin/main' into feat/rag-2
# Conflicts:
#	.github/workflows/build-push.yml
#	web/app/components/workflow/nodes/_base/components/workflow-panel/index.tsx
2025-07-17 17:56:56 +08:00
e0e0a7661d merge main 2025-07-17 16:49:59 +08:00
de47b56ca4 merge main 2025-07-17 16:49:22 +08:00
01566035e3 merge main 2025-07-17 16:48:43 +08:00
cc96b7f507 r2 transform 2025-07-17 16:45:30 +08:00
ad7650e724 r2 transform 2025-07-17 16:36:40 +08:00
f79a90fb21 fix agent default 2025-07-17 16:07:58 +08:00
d0c78d079b r2 transform 2025-07-17 15:32:58 +08:00
cc06ce60fd Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-17 15:22:51 +08:00
761ea0eedb r2 transform 2025-07-17 15:22:39 +08:00
c706793847 fix: file upload config 2025-07-17 13:48:23 +08:00
2c52561060 datasource oauth 2025-07-17 11:18:08 +08:00
a39d7e1f85 r2 transform 2025-07-16 19:26:33 +08:00
aaa5b0e295 r2 transform 2025-07-16 18:05:40 +08:00
3bdb40f37b Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-16 14:25:30 +08:00
c660c98b04 r2 transform 2025-07-16 14:25:16 +08:00
e7f31a66be fix: correct notification type for successful dataset conversion 2025-07-16 14:12:33 +08:00
675ff9bd7d r2 transform 2025-07-16 12:00:26 +08:00
6363ecef97 r2 transform 2025-07-16 11:49:59 +08:00
5c0d19e36d fix: improve handleVariableNameBlur logic to prevent setting label when it already exists 2025-07-16 10:33:59 +08:00
e0753ebfd1 fix: update dataset conversion endpoint path for correct API integration 2025-07-16 10:08:46 +08:00
b8e9b97f07 feat: implement dataset conversion to pipeline with success and error notifications 2025-07-16 09:53:11 +08:00
384073f025 r2 transform 2025-07-16 02:02:08 +08:00
2012ea3213 r2 transform 2025-07-16 01:50:37 +08:00
1ad73ccdc8 r2 2025-07-15 17:54:53 +08:00
96484731a2 r2 2025-07-15 16:13:45 +08:00
537e535d9a r2 2025-07-15 15:33:40 +08:00
3a3b60bab5 r2 2025-07-15 15:00:38 +08:00
63111e8050 r2 2025-07-14 18:17:34 +08:00
405139c377 fix: add isRunning prop to ProcessDocuments and related components for better processing state management 2025-07-14 17:45:19 +08:00
a919e3e135 r2 2025-07-14 17:33:08 +08:00
3e5772c50c fix: enhance layout and tooltip handling in Actions component 2025-07-14 16:36:51 +08:00
cb8fab7364 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-14 16:26:03 +08:00
60f3832c65 fix: refactor OptionCard and StepTwo components for improved structure and readability 2025-07-14 16:25:56 +08:00
cf89d06023 fix: single run url error in pipeline 2025-07-14 16:15:52 +08:00
4b9a5a66c1 r2 2025-07-14 16:14:27 +08:00
e095de05c5 feat: pipeline run 2025-07-14 16:04:43 +08:00
82f4b35d52 chore: use flow type instead of whole url 2025-07-14 15:30:04 +08:00
7a9faf909e feat: workflow use common last run 2025-07-14 15:10:35 +08:00
928751a856 r2 2025-07-14 14:11:58 +08:00
d77d86f53b fix: remove unused showWorkflowEmpty prop from Tools component for cleaner code 2025-07-14 14:11:05 +08:00
2a5bab10b8 fix: pass dataSources prop to PortalToFollowElem for improved functionality 2025-07-14 13:48:45 +08:00
6313f819cf fix: add block enumeration and tool icon handling for enhanced workflow functionality 2025-07-14 11:44:13 +08:00
682b65034c Merge branch 'main' into feat/rag-2 2025-07-14 11:17:42 +08:00
adbad0ad33 fix: enhance bucket list initiation check for improved accuracy 2025-07-14 10:06:55 +08:00
ed77877db1 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-11 18:46:29 +08:00
9c4356e9a1 r2 2025-07-11 18:46:22 +08:00
0fdb1fedb0 feat: add PluginDependency component to RagPipelineChildren and WorkflowChildren for enhanced functionality 2025-07-11 18:38:18 +08:00
23a6fe3259 fix: adjust default selection of crawl results based on pipeline status 2025-07-11 18:02:23 +08:00
90d0f12ee9 refactor: update file extension handling in ChunkPreview component to use dynamic extension retrieval 2025-07-11 16:53:03 +08:00
50e16f8362 refactor: optimize state selection in data source components using useShallow for improved performance 2025-07-11 16:41:01 +08:00
9dbb06fccc fix: Fix node panel positioning issue when chat log modal is open 2025-07-11 15:58:26 +08:00
3b70f8ac08 r2 2025-07-11 15:25:58 +08:00
58a3471a5f refactor: update variable utility functions to include isRagVariableVar for enhanced variable validation 2025-07-11 13:51:23 +08:00
3e187ba6aa refactor: update BlockIcon component to handle Tool and DataSource types for conditional rendering 2025-07-10 16:56:59 +08:00
f677f2f71b refactor: update useEffect dependency to include currentPage.page_id for proper content fetching 2025-07-10 16:27:56 +08:00
de6867f875 refactor: update CrawledResult and WebsiteCrawl components to handle showPreview prop and adjust previewIndex logic 2025-07-10 15:43:36 +08:00
c39746181d refactor: update data source store usage in LocalFile and WebsiteCrawl components 2025-07-10 15:24:58 +08:00
15cd9e0b12 refactor: rename selectedFileList to selectedFileKeys across components and update related logic 2025-07-10 15:14:23 +08:00
e66c2badda refactor: update error handling to use DataSourceNodeErrorResponse in OnlineDocuments and WebsiteCrawl components 2025-07-10 13:55:24 +08:00
6030ae9d0f refactor: integrate currentNodeIdRef into data source store and update related components 2025-07-10 12:02:54 +08:00
42fd40500a refactor: remove isTruncated state and update related logic to use mutable refs 2025-07-10 10:39:01 +08:00
611bc728d0 fix: update hover background color for disabled and active NavLink states 2025-07-10 10:21:45 +08:00
e2a141b3bb Merge branch 'main' into feat/rag-2 2025-07-10 10:14:12 +08:00
8b97551f1a r2 2025-07-09 18:50:13 +08:00
966e6e03fc style: Update component attributes to use single quotes and adjust z-index in ContentDialog 2025-07-09 18:34:21 +08:00
775983b04b Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-09 18:21:01 +08:00
a2e0bc26c6 feat: Add conversion functionality to Knowledge Pipeline with UI and translations 2025-07-09 18:20:52 +08:00
bd33b9ffec r2 2025-07-09 17:34:42 +08:00
b538eee5dd r2 2025-07-09 17:28:52 +08:00
7c6bdb9ec9 feat: Enhance operations with pause and resume functionality 2025-07-09 16:05:42 +08:00
258c965bd0 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-09 15:55:31 +08:00
9e44f2b805 Merge branch 'main' into feat/rag-2 2025-07-09 15:54:57 +08:00
9dcba51225 r2 2025-07-09 15:48:08 +08:00
e7d394f160 feat: Add DatasetSidebarDropdown component and integrate ExtraInfo for dataset details 2025-07-09 15:13:02 +08:00
dfe3c2caa1 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-09 14:45:03 +08:00
4ea4d227c6 refactor: Remove unused file support types logic from CreateFormPipeline component 2025-07-09 14:44:56 +08:00
b5e4ce6c68 r2 2025-07-09 14:27:49 +08:00
4a8061d14c fix: Integrate dataset list reset functionality in dropdown and step two components 2025-07-09 13:54:49 +08:00
59c3305dcc feat: Enhance dataset dropdown functionality with export and delete options 2025-07-09 13:42:24 +08:00
8fc15c83d0 feat: Refactor dataset info components and add export pipeline functionality 2025-07-09 10:45:50 +08:00
a0942399cd Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-08 20:04:26 +08:00
1c85dada53 feat: Implement sidebar toggle functionality with keyboard shortcuts and improve translations 2025-07-08 20:04:15 +08:00
bc1a517a97 r2 2025-07-08 17:16:10 +08:00
b3431ab0c4 feat: Refactor online drive components to improve file retrieval and selection logic 2025-07-08 16:46:59 +08:00
073a0974a4 fix: Update breadcrumb click handling to close dropdown and adjust prefix slicing logic 2025-07-08 15:40:16 +08:00
e911a4e719 fix: Update button styles and improve file size validation in breadcrumb and item components 2025-07-08 15:28:22 +08:00
5e2b60664f fix: Improve item selection logic and reset selected file list on folder open 2025-07-08 14:26:55 +08:00
b36f36d242 feat: Enhance CreateFormPipeline with file selection and validation for online documents and drives 2025-07-08 14:14:50 +08:00
6332fe795e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-08 14:05:45 +08:00
db886ae3fb r2 2025-07-08 14:04:44 +08:00
467cd2c4c1 fix: Update onlineDrive file check to use selectedFileList for better validation 2025-07-07 16:34:43 +08:00
d3ca50626d feat: Integrate useOnlineDrive hook and enhance datasource handling in CreateFormPipeline 2025-07-07 16:30:15 +08:00
13f168ed1c refactor: Refactor Online Drive components to improve state management and add truncation support 2025-07-07 15:51:59 +08:00
83c8219942 feat: Enhance file item component with support for disabled state and file type validation 2025-07-07 14:36:09 +08:00
a30b92d6b1 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-04 19:15:52 +08:00
1bd664e655 feat: Implement Dropdown and Menu components for breadcrumb navigation in Online Drive 2025-07-04 19:15:37 +08:00
1fb59adba9 r2 2025-07-04 19:09:40 +08:00
1b3888a13e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-04 18:49:50 +08:00
9c6eb95700 r2 2025-07-04 18:49:37 +08:00
1ff608dfa9 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-04 18:24:59 +08:00
933ad0649c feat: Add subItems mapping to Panel component for enhanced item details 2025-07-04 18:24:52 +08:00
a8b676ade0 r2 2025-07-04 18:05:58 +08:00
0d9991ec88 feat: Add ONLINE_DRIVE_OUTPUT and integrate into DataSource components for online drive support 2025-07-04 18:04:47 +08:00
e67a19b26b refactor: Enhance Bucket and Breadcrumbs components; improve event handling and add button for bucket name 2025-07-04 17:55:30 +08:00
d44af3ec46 refactor: Restructure breadcrumbs component; introduce Bucket and BreadcrumbItem components for improved navigation 2025-07-04 16:44:21 +08:00
9ce0c69687 refactor: Update event handling in Checkbox and Radio components; optimize Online Drive file filtering 2025-07-04 15:30:08 +08:00
2ecbcd6a7f refactor: Add loading state and bucket handling to Online Drive components 2025-07-04 15:14:19 +08:00
d3b17ea567 fix: Update key property for onlineDrive datasource handling in TestRunPanel 2025-07-04 14:11:23 +08:00
a4f7d373b5 refactor: Replace useDataSourceStore with useDataSourceStoreWithSelector for improved state selection across components 2025-07-04 14:03:04 +08:00
334f0c905a feat: Enhance Online Drive file handling with selection and folder opening functionality 2025-07-04 13:42:36 +08:00
44c2efcfe4 r2 2025-07-03 18:56:42 +08:00
f2960989c1 refactor: Refactor data source components 2025-07-03 18:34:54 +08:00
816b49483a Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-03 15:15:14 +08:00
798d0880d6 r2 2025-07-03 15:15:01 +08:00
cf4f652105 fix: Enhance data source handling by adding error response type and updating local file and online document slices 2025-07-03 14:58:58 +08:00
76c418c0b7 r2 2025-07-03 14:03:06 +08:00
7c5893db91 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	web/app/components/workflow-app/components/workflow-main.tsx
#	web/app/components/workflow/constants.ts
#	web/app/components/workflow/header/run-and-history.tsx
#	web/app/components/workflow/hooks-store/store.ts
#	web/app/components/workflow/hooks/use-nodes-interactions.ts
#	web/app/components/workflow/hooks/use-workflow-interactions.ts
#	web/app/components/workflow/hooks/use-workflow.ts
#	web/app/components/workflow/nodes/_base/components/panel-operator/panel-operator-popup.tsx
#	web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx
#	web/app/components/workflow/nodes/code/use-config.ts
#	web/app/components/workflow/nodes/llm/default.ts
#	web/app/components/workflow/panel/index.tsx
#	web/app/components/workflow/panel/version-history-panel/index.tsx
#	web/app/components/workflow/store/workflow/index.ts
#	web/app/components/workflow/types.ts
#	web/config/index.ts
#	web/types/workflow.ts
2025-07-03 11:40:54 +08:00
2dd1f41ad3 feat: Implement data source store with slices for local files, online documents, website crawls, and online drives 2025-07-03 10:31:29 +08:00
ddde576b4a refactor: Update notification messages in CreateFromDSLModal and CreateFromScratchModal for dataset creation 2025-07-03 10:17:55 +08:00
38d895ab5f r2 2025-07-02 18:46:36 +08:00
a6ff9b224b r2 2025-07-02 18:20:41 +08:00
832bef053f Merge branch 'main' into feat/r2
# Conflicts:
#	docker/docker-compose.middleware.yaml
#	web/app/components/workflow-app/components/workflow-main.tsx
#	web/app/components/workflow-app/hooks/index.ts
#	web/app/components/workflow/hooks-store/store.ts
#	web/app/components/workflow/hooks/index.ts
#	web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx
2025-07-02 18:20:05 +08:00
81b07dc3be r2 2025-07-02 18:15:23 +08:00
a52bf6211a merge main 2025-07-02 18:07:09 +08:00
d7b0ccd6f7 feat: Add name field to data source credentials update function in usePipeline service 2025-07-02 15:08:20 +08:00
68d59ee8b3 refactor: Refactor useOnlineDocument hook 2025-07-02 14:56:29 +08:00
e23d7e39ec Merge branch 'feat/datasource' into feat/r2
# Conflicts:
#	api/services/rag_pipeline/rag_pipeline.py
#	web/app/components/workflow/constants.ts
#	web/app/components/workflow/header/run-and-history.tsx
#	web/app/components/workflow/hooks/use-nodes-interactions.ts
#	web/app/components/workflow/hooks/use-workflow-interactions.ts
#	web/app/components/workflow/hooks/use-workflow.ts
#	web/app/components/workflow/index.tsx
#	web/app/components/workflow/nodes/_base/components/panel-operator/panel-operator-popup.tsx
#	web/app/components/workflow/nodes/_base/panel.tsx
#	web/app/components/workflow/nodes/code/use-config.ts
#	web/app/components/workflow/nodes/llm/default.ts
#	web/app/components/workflow/panel/index.tsx
#	web/app/components/workflow/panel/version-history-panel/index.tsx
#	web/app/components/workflow/store/workflow/index.ts
#	web/app/components/workflow/types.ts
#	web/config/index.ts
#	web/types/workflow.ts
2025-07-02 14:01:59 +08:00
0284e7556e refactor: Refactor useDatasourceIcon hook and enhance dataset node rendering with AppIcon component 2025-07-02 13:48:11 +08:00
9f14b5db9a r2 2025-07-02 11:55:21 +08:00
39d3f58082 r2 2025-07-02 11:33:00 +08:00
5d7a533ada fix: Improve layout by adding overflow handling in CreateFromPipeline and List components 2025-07-01 17:46:41 +08:00
0db7967e5f refactor: Add useOnlineDrive hook and integrate it into CreateFormPipeline and TestRunPanel components 2025-07-01 16:54:44 +08:00
a81dc49ad2 feat: Refactor OnlineDocuments and PageSelector components to enhance state management and integrate new Actions component 2025-07-01 16:32:21 +08:00
f33b6c0c73 add online drive 2025-07-01 16:08:54 +08:00
a4eddd7dc2 r2 2025-07-01 15:16:33 +08:00
c993a05da7 Merge remote-tracking branch 'origin/feat/r2' into feat/r2 2025-07-01 14:23:58 +08:00
f44f0fa34c r2 2025-07-01 14:23:46 +08:00
2d0d448667 feat: Update selection handling to support multiple choice in OnlineDocuments and PageSelector components 2025-07-01 14:14:28 +08:00
bfcf09b684 feat(datasource): fix datasource icon 2025-07-01 14:04:09 +08:00
cdbba1400c feat(datasource): update fetch_datasource_provider 2025-07-01 11:57:06 +08:00
55d7d7ef76 fix: Update default value handling for number input in useInitialData hook 2025-07-01 11:24:30 +08:00
7b473bb5c9 feat: Integrate OnlineDrive component into CreateFormPipeline and update related components 2025-06-30 18:31:52 +08:00
ff511c6f31 refactor: Remove unused variables and simplify next button logic in TestRunPanel 2025-06-30 17:54:33 +08:00
310102bebd feat: Add SearchMenu icon and integrate it into the file list component with empty state handling 2025-06-30 17:31:27 +08:00
1f5c32525f datasource page 2025-06-30 16:16:54 +08:00
618ad4c291 r2 2025-06-30 15:36:20 +08:00
ada632f9f5 feat: Enhance input field handling by adding allVariableNames prop and localizing error messages 2025-06-30 15:28:55 +08:00
4c82c9d029 feat: Add Online Drive file management components and enhance file icon handling 2025-06-30 14:19:14 +08:00
42655a3b1f fix: checklist 2025-06-30 14:11:26 +08:00
1449ed86c4 feat: rename online driver to online drive and update related classes and methods :) 2025-06-27 20:11:28 +08:00
94674e99ab datasource page add marketplace 2025-06-27 16:44:23 +08:00
eee72101f4 feat(online_driver): add online driver plugin, support browsing and downloading 2025-06-27 16:41:39 +08:00
93eabef58a refactor: Refactor OnlineDocuments component and remove OnlineDocumentSelector 2025-06-27 16:29:30 +08:00
5248fcca56 feat: implement support for single and multiple choice in crawled result items 2025-06-27 15:56:38 +08:00
264b95e572 feat: separate input fields into datasource and global categories in RAG pipeline 2025-06-27 15:23:04 +08:00
8f2ad89027 datasource page 2025-06-27 15:01:33 +08:00
18b1a9cb2e Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-26 15:28:19 +08:00
25fef5d757 merge main 2025-06-26 15:21:24 +08:00
2a25ca2b2c feat: enhance online drive connection UI and add localization for connection status in dataset pipeline 2025-06-26 14:24:50 +08:00
3a9c79b09a feat: refactor data source handling and integrate OnlineDrive component in TestRunPanel 2025-06-26 13:46:12 +08:00
025b55ef3b feat: update tooltip text for test run mode in English and Chinese translations for clarity 2025-06-26 10:17:48 +08:00
cf7574bd10 feat: add FooterTips component and integrate it into TestRunPanel; extend DatasourceType enum with onlineDrive 2025-06-26 10:16:37 +08:00
efccbe4039 r2 2025-06-25 17:32:26 +08:00
c7cec120a6 feat: update variable validation regex for consistency in ExternalDataToolModal and schema 2025-06-25 17:07:31 +08:00
7d7fd18e65 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-25 16:16:41 +08:00
c6ae9628af feat: refactor input variable handling and configurations in pipeline processing components 2025-06-25 16:15:59 +08:00
4631575c12 feat: can support choose current node var 2025-06-25 16:06:08 +08:00
a4f4fea0a5 fix: note node delete 2025-06-25 16:01:45 +08:00
540096a8d8 Merge branch 'main' into feat/r2
# Conflicts:
#	api/core/plugin/impl/oauth.py
#	api/core/workflow/entities/variable_pool.py
#	api/models/workflow.py
#	api/services/dataset_service.py
2025-06-25 14:35:23 +08:00
7b7cdad1d8 r2 2025-06-25 13:28:08 +08:00
261b7cabc8 feat: enhance OnlineDocumentPreview with datasourceNodeId and implement preview functionality 2025-06-25 11:36:56 +08:00
ccd346d1da feat: add handling for RAG pipeline variables in node interactions 2025-06-25 10:40:48 +08:00
a866cbc6d7 feat: implement usePipeline hook for managing pipeline variables and refactor input field handling 2025-06-25 10:11:26 +08:00
6aba39a2dd feat(datasource): add datasource content preview api 2025-06-24 17:43:25 +08:00
8f4a0d4a22 variable picker 2025-06-24 17:27:06 +08:00
49bb15fae1 feat(datasource): add datasource content preview api 2025-06-24 17:14:31 +08:00
e165f4a102 feat(datasource): add datasource content preview api 2025-06-24 17:14:16 +08:00
1c51bef3cb fix: standardize capitalization in translation keys and remove unused group property in FieldListContainer 2025-06-24 14:25:58 +08:00
c31754e6cd fix: create pipeline from customized 2025-06-24 11:12:39 +08:00
83cc484c24 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-23 17:12:26 +08:00
1ff9c07a92 fix notion dataset rule not found 2025-06-23 17:12:08 +08:00
b25b284d7f Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-23 16:41:13 +08:00
2414dbb5f8 feat: clear selected IDs on document deletion action in DocumentList component 2025-06-23 16:38:19 +08:00
916a8c76e7 fix: rename currentDocuments to currentDocument for consistency in online documents handling 2025-06-23 16:31:09 +08:00
9783832223 Merge branch 'feat/datasource' into deploy/rag-dev 2025-06-23 16:12:03 +08:00
b77081a19e feat(datasource): update datasource icon 2025-06-23 15:57:37 +08:00
896906ae77 feat: refactor layout structure in PipelineSettings component for improved responsiveness 2025-06-23 15:57:02 +08:00
2365a3a5fc Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-23 15:39:00 +08:00
dd792210f6 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-23 15:38:34 +08:00
6ba4a4c165 feat: enhance website crawl functionality with state management and result handling 2025-06-23 15:38:24 +08:00
0a6dbf6ee2 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-23 15:21:15 +08:00
ca0979dd43 feat(datasource): update fetch_datasource_provider 2025-06-23 15:18:15 +08:00
0762e5ae50 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-23 15:03:32 +08:00
48f53f3b9b workflow dependency 2025-06-23 15:02:57 +08:00
af64f29e87 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-23 13:59:39 +08:00
b9f59e3a75 Merge branch 'main' into feat/rag-pipeline 2025-06-23 13:59:05 +08:00
b12a8eeb90 feat(datasource): change datasource result type to event-stream 2025-06-20 10:09:47 +08:00
e551cf65c9 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-19 15:58:51 +08:00
3899211c41 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-19 15:58:01 +08:00
335e1e3602 feat: enhance pipeline settings with execution log and processing capabilities 2025-06-19 15:57:49 +08:00
725fc72c6f Merge branch 'feat/r2' into deploy/rag-dev 2025-06-19 15:31:03 +08:00
b618f3bd9e r2 2025-06-19 15:30:46 +08:00
95ba55af4d fix: import dsl sync rag variables 2025-06-19 15:04:26 +08:00
f4e1ea9011 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-19 14:30:05 +08:00
3d0e288e85 r2 2025-06-19 14:29:39 +08:00
9620d6bcd8 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	web/i18n/zh-Hans/app.ts
2025-06-19 13:32:49 +08:00
f7fbded8b9 Merge branch 'main' into feat/r2 2025-06-19 13:32:07 +08:00
0c5706b3f6 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-19 11:10:43 +08:00
82d0a70cb4 feat(datasource): change datasource result type to event-stream 2025-06-19 11:10:24 +08:00
55516c4e57 fix: add type checks for workspace roles in DatasetsLayout component 2025-06-19 10:56:03 +08:00
cc2cd85ff5 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-19 10:46:15 +08:00
6ec742539a r2 2025-06-19 10:45:59 +08:00
09e0a54070 r2 2025-06-19 10:38:10 +08:00
5d25199f42 refactor: update layout for creation title and content in StepThree component 2025-06-19 09:36:04 +08:00
387826674c Merge branch 'main' into feat/rag-pipeline 2025-06-19 09:34:09 +08:00
02ae479636 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-18 18:04:23 +08:00
a103324f25 refactor: enhance UI components with new icons and improved styling in billing and dataset processes 2025-06-18 18:03:43 +08:00
643efc5d85 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-18 17:11:52 +08:00
43e5798e13 feat(datasource): change datasource result type to event-stream 2025-06-18 16:27:10 +08:00
8aca70cd50 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-18 16:05:08 +08:00
2cf980026e feat(datasource): change datasource result type to event-stream 2025-06-18 16:04:47 +08:00
224111081b feat(datasource): change datasource result type to event-stream 2025-06-18 16:04:40 +08:00
4dc6cad588 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-18 15:08:05 +08:00
f85e6a0dea feat: implement SSE for data source node processing and completion events, replacing previous run methods 2025-06-18 15:06:50 +08:00
4b3a54633f refactor: streamline dataset detail fetching and improve dataset list handling across components 2025-06-18 15:05:21 +08:00
6f67a34349 r2 qa index 2025-06-18 14:37:18 +08:00
e51d308312 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-18 13:47:36 +08:00
379c92bd82 Merge branch 'main' into feat/rag-pipeline 2025-06-18 13:47:06 +08:00
fa9f0ebfb1 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-18 11:06:37 +08:00
ac917bb56d r2 2025-06-18 11:05:52 +08:00
f7a4e5d1a6 Merge branch 'main' into feat/r2 2025-06-18 10:57:44 +08:00
515d34bbfb Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 19:07:38 +08:00
66de2e1f0a Merge remote-tracking branch 'origin/feat/r2' into feat/r2
# Conflicts:
#	api/core/workflow/graph_engine/entities/event.py
#	api/services/rag_pipeline/rag_pipeline.py
2025-06-17 19:07:15 +08:00
7f7ea92a45 r2 2025-06-17 19:06:17 +08:00
a014345688 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 18:29:24 +08:00
cf66d111ba feat(datasource): change datasource result type to event-stream 2025-06-17 18:29:02 +08:00
2d01b1a808 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 18:24:52 +08:00
739ebf2117 feat(datasource): change datasource result type to event-stream 2025-06-17 18:24:09 +08:00
176b844cd5 refactor: consolidate DialogWrapper component usage and improve prop handling across input fields 2025-06-17 18:20:30 +08:00
8fc6684ab1 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 18:12:36 +08:00
7c41f71248 r2 2025-06-17 18:11:38 +08:00
2c2bfb4f54 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-17 17:45:50 +08:00
3164f90327 merge main 2025-06-17 17:44:08 +08:00
90ac52482c test: add unit tests for ActionButton component with various states and sizes 2025-06-17 16:48:52 +08:00
879ac940dd Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-17 16:30:37 +08:00
796797d12b feat: centralize variable type mapping by introducing VAR_TYPE_MAP in pipeline model 2025-06-17 16:28:50 +08:00
7ac0f0c08c feat: enhance processing components by adding runDisabled state and fetching indicators 2025-06-17 16:13:49 +08:00
5cc6a2bf33 refactor: update toast notification handling and improve context usage in DocumentDetail 2025-06-17 14:41:06 +08:00
2db0b19044 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 14:05:15 +08:00
1d2ee9020c r2 2025-06-17 14:04:55 +08:00
f2538bf381 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 13:56:00 +08:00
f37e28a368 feat(datasource): Comment out the datasource_file_manager. 2025-06-17 13:54:25 +08:00
c5976f5a09 feat(datasource): change datasource result type to event-stream 2025-06-17 13:51:41 +08:00
64a9181ee4 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-17 11:30:38 +08:00
33cd32382f feat: add indexing status batch and process rule hooks; refactor Notion page preview types 2025-06-17 11:29:56 +08:00
9456c59290 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-17 10:14:20 +08:00
ce0bd421ae Merge branch 'main' into feat/rag-pipeline 2025-06-17 10:13:31 +08:00
f9d04c6975 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-16 18:26:01 +08:00
ecb07a5d0d feat: enhance field list functionality by adding chosen and selected properties to SortableItem 2025-06-16 18:25:30 +08:00
a165ba2059 merge main 2025-06-16 15:43:57 +08:00
12fd2903d8 fix 2025-06-16 15:41:27 +08:00
0a2c569b3b fix: replace useGetDocLanguage with useDocLink for consistent documentation linking 2025-06-16 14:58:52 +08:00
9ab0d5fe60 Merge branch 'main' into feat/rag-pipeline 2025-06-16 14:25:58 +08:00
1d71fd5b56 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-16 14:09:04 +08:00
b277acc298 Merge branch 'main' into feat/r2 2025-06-16 14:08:02 +08:00
8d47d8ce4f Merge remote-tracking branch 'origin/feat/r2' into feat/r2
# Conflicts:
#	api/core/datasource/website_crawl/website_crawl_plugin.py
#	api/services/rag_pipeline/rag_pipeline.py
2025-06-16 13:50:33 +08:00
41fef8a21f r2 2025-06-16 13:48:43 +08:00
b853a42e37 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-16 11:45:24 +08:00
1633626d23 Merge branch 'main' into feat/rag-pipeline 2025-06-16 11:44:42 +08:00
6c7a40c571 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-16 10:16:00 +08:00
abb2ed66e7 merge main 2025-06-16 10:15:24 +08:00
5ae78f79b0 datasource dark theme 2025-06-16 10:00:14 +08:00
e3b3a6d040 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 18:32:56 +08:00
6622ce6ad8 fix: update formData construction in convertToInputFieldFormData for improved handling of optional fields
fix: adjust z-index value in DialogWrapper for proper stacking context
2025-06-13 18:32:36 +08:00
5ccb8d9736 feat: online document 2025-06-13 18:22:15 +08:00
55906c8375 fix: remove unused billing plan logic from CreateFromDSLModal component 2025-06-13 18:01:01 +08:00
0908f310fc feat: webcrawl 2025-06-13 17:47:51 +08:00
58842898e1 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-13 16:42:27 +08:00
1c17c8fa36 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 16:39:40 +08:00
26aff400e4 node default configs 2025-06-13 16:38:54 +08:00
4b11d29ede fix: update VAR_TYPE_MAP and initialData handling in useConfigurations for improved variable processing 2025-06-13 15:57:16 +08:00
b2b95412b9 r2 2025-06-13 15:04:22 +08:00
5c228bca4f feat: replace TypeIcon with AppIcon in SelectDataSet component for improved icon display 2025-06-13 15:02:31 +08:00
7bd2509ad5 Update deploy-dev.yml 2025-06-13 14:50:38 +08:00
2a5d70d9e1 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 14:44:53 +08:00
b0107f4128 fix: update z-index values for DialogWrapper components to ensure proper stacking context 2025-06-13 14:44:32 +08:00
dc3c5362e4 Merge branch 'deploy/rag-dev' of https://github.com/langgenius/dify into deploy/rag-dev 2025-06-13 14:43:58 +08:00
1d106c3660 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 14:43:46 +08:00
fcb2fa04e7 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 14:43:38 +08:00
55bff10f0d fix 2025-06-13 14:43:02 +08:00
45c9b77e82 fix: match var reg 2025-06-13 14:42:35 +08:00
767860e76b Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 11:44:22 +08:00
80f656f79a fix: adjust layout and visibility conditions in CreateFormPipeline and ChunkPreview components 2025-06-13 11:38:26 +08:00
c891eb28fc fix: in prompt editor show vars error 2025-06-13 11:12:11 +08:00
b9fa3f54e9 refactor: refactor component imports and enhance layout for better responsiveness in dataset previews 2025-06-13 10:54:31 +08:00
4d2f904d72 feat: enhance WorkflowPreview and TemplateCard components with additional styling and className prop 2025-06-13 10:14:45 +08:00
26b7911177 Merge branch 'main' into feat/rag-pipeline 2025-06-13 09:49:03 +08:00
dd91edf70b Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-12 18:11:19 +08:00
d994e6b6c7 feat: replace data source icons with AppIcon component in Item and DatasetItem 2025-06-12 18:11:00 +08:00
aba48bde0b feat: update SettingsModal to integrate keyword number handling and refactor index method logic 2025-06-12 18:06:00 +08:00
3e5d9884cb test: add unit tests for AppIcon component with various rendering scenarios 2025-06-12 17:33:28 +08:00
faadad62ff Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-12 17:01:41 +08:00
406d70e4a3 feat: integrate resetDatasetList hook into CreateOptions and TemplateCard components 2025-06-12 16:59:33 +08:00
f17f256b2b Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-12 16:25:04 +08:00
b367f48de6 add datasource category 2025-06-12 16:24:21 +08:00
dee7b6eb22 Update deploy-dev.yml 2025-06-12 16:19:12 +08:00
6f17200dec refactor: update dataset handling to use runtime_mode instead of pipeline_id 2025-06-12 15:57:07 +08:00
d3dbfbe8b3 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-12 15:18:49 +08:00
b1f250862f Merge branch 'main' into feat/rag-pipeline 2025-06-12 15:18:19 +08:00
141d6b1abf feat: implement document settings and pipeline settings components with localization support 2025-06-12 15:13:15 +08:00
a7eb534761 add datasource category 2025-06-12 15:05:36 +08:00
808f792f55 fix: update isPending condition and add indexing technique checks for segment detail and new segment modal 2025-06-12 11:11:03 +08:00
346d066128 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 18:12:17 +08:00
5c41922b8a fix: update file extension for downloaded DSL files and refine mutation keys for template operations 2025-06-11 18:11:38 +08:00
9c3e3b00d0 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 18:07:20 +08:00
da3a3ce165 r2 2025-06-11 18:07:06 +08:00
b525bc2b81 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 18:03:53 +08:00
14dc3e8642 r2 2025-06-11 18:03:21 +08:00
e52c905aa5 refactor: improve layout-main component structure and readability 2025-06-11 17:46:50 +08:00
7b9a3c1084 fix: update translation keys for document availability messages in English and Chinese 2025-06-11 17:42:45 +08:00
ce8ddae11e Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-11 17:30:45 +08:00
4e8184bc56 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	api/models/dataset.py
2025-06-11 17:30:30 +08:00
9eb8597957 r2 2025-06-11 17:29:14 +08:00
cde584046d Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 17:19:08 +08:00
b7f9d7e94a Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-11 17:17:15 +08:00
88817bf974 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 17:17:10 +08:00
92e6c52c0e refactor: update handleUseTemplate to use callback for dataset creation and improve error handling; change HTTP method for dependency check 2025-06-11 17:17:09 +08:00
309dfe8829 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 17:16:52 +08:00
1d8b390584 Merge branch 'main' into feat/r2
# Conflicts:
#	docker/docker-compose.middleware.yaml
2025-06-11 17:16:27 +08:00
7dea7f77ac Merge branch 'main' into feat/rag-pipeline 2025-06-11 17:12:38 +08:00
4d9b15e519 fix 2025-06-11 17:11:57 +08:00
45a708f17e Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 17:10:42 +08:00
5f08a9314c r2 2025-06-11 17:10:20 +08:00
5802b2b437 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 16:39:18 +08:00
f995436eec feat: implement chunk structure card and related hooks for dataset creation; update translations and refactor pipeline template fetching 2025-06-11 16:38:42 +08:00
25f0c61e65 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 16:36:56 +08:00
66fa68fa18 r2 2025-06-11 16:36:36 +08:00
3e5781c6f1 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 14:03:50 +08:00
a6f7560d2f r2 2025-06-11 14:03:32 +08:00
45c76c1d68 refactor: rename icon property to icon_info in UpdateTemplateInfoRequest and related components 2025-06-11 13:39:07 +08:00
14d5af468c Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 13:12:41 +08:00
874e1bc41d r2 2025-06-11 13:12:18 +08:00
d2ae695b3b Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 11:55:46 +08:00
6ecdac6344 pipeline preview 2025-06-11 11:51:19 +08:00
3c2ce07f38 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 11:30:14 +08:00
5c58b11b22 refactor: standardize pipeline template properties and improve related components 2025-06-11 11:25:08 +08:00
be92122f17 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 11:21:37 +08:00
2972a06f16 r2 2025-06-11 11:21:17 +08:00
caa275fdbd refactor: remove unused websiteCrawlJobId state and related props from useWebsiteCrawl and CreateFormPipeline components; update loading and file preview components for consistent width 2025-06-11 10:50:03 +08:00
5dbda7f4c5 merge main 2025-06-11 10:41:50 +08:00
0564651f6f publish as customized pipeline 2025-06-11 10:40:49 +08:00
eff8108f1c refactor: update dataset model and improve batch action component 2025-06-11 10:24:07 +08:00
127a77d807 r2 2025-06-10 19:22:08 +08:00
265842223c Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 18:20:44 +08:00
95a24156de r2 2025-06-10 18:20:32 +08:00
80ca5b3356 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 18:16:45 +08:00
e934503fa0 r2 2025-06-10 18:16:30 +08:00
442bcd18c0 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-10 17:59:27 +08:00
aeb1d1946c r2 2025-06-10 17:59:14 +08:00
12f2913e08 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 17:40:08 +08:00
0aeeee49f7 fix: draft sync 2025-06-10 17:39:20 +08:00
eb7479b1ea Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 17:12:12 +08:00
80b219707e r2 2025-06-10 17:11:49 +08:00
65ac022245 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-10 16:06:58 +08:00
6e6090d5a9 test(SegmentedControl): add test cases 2025-06-10 16:06:42 +08:00
58b5daeef3 r2 2025-06-10 15:56:28 +08:00
33fd1fa79d Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 15:40:52 +08:00
978118f770 fix: datasource 2025-06-10 15:40:15 +08:00
a2610b22cc Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 15:20:19 +08:00
f4789d750d publish as pipeline 2025-06-10 15:19:47 +08:00
176f9ea2f4 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 15:10:52 +08:00
5e71f7c825 publish as pipeline 2025-06-10 15:10:13 +08:00
7624edd32d r2 2025-06-10 14:56:18 +08:00
7b79354849 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 14:54:27 +08:00
a7ff2ab470 r2 2025-06-10 14:53:07 +08:00
d3eedaf0ec feat(i18n): add new translation entries for local file, website crawl, and online document 2025-06-10 14:22:09 +08:00
bcb0496bf4 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 14:13:32 +08:00
4d967544f3 r2 2025-06-10 14:13:10 +08:00
c18ee4be50 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 11:45:19 +08:00
65873aa411 r2 2025-06-10 11:44:52 +08:00
b95256d624 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 11:00:11 +08:00
c0d3452494 r2 2025-06-10 10:59:44 +08:00
c91456de1b fix(ChunkStructure): add disabled prop to OptionCard component 2025-06-10 10:43:40 +08:00
e1ce156433 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 10:16:49 +08:00
9e19ed4e67 knowledge base node checklisst 2025-06-10 10:16:13 +08:00
ba383b1b0d Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 10:00:48 +08:00
ad3d9cf782 r2 2025-06-10 10:00:20 +08:00
69053332e4 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 09:47:36 +08:00
5b4d04b348 Merge branch 'main' into feat/rag-pipeline 2025-06-10 09:38:06 +08:00
47664f8fd3 r2 2025-06-09 14:00:34 +08:00
8d8f21addd Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 18:52:57 +08:00
9b9640b3db refactor: remove job ID handling from website crawl components and update related hooks 2025-06-06 18:52:32 +08:00
83ba61203b Merge branch 'feat/r2' into deploy/rag-dev 2025-06-06 17:47:47 +08:00
fcbd5febeb r2 2025-06-06 17:47:06 +08:00
b8813e199f Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 17:27:14 +08:00
2322496552 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	docker/docker-compose.middleware.yaml
2025-06-06 17:15:24 +08:00
21a3509bef r2 2025-06-06 17:14:43 +08:00
3e2f12b065 refactor: update website crawl handling and improve parameter naming in pipeline processing 2025-06-06 17:00:34 +08:00
55e20d189a Update deploy-dev.yml 2025-06-06 16:16:44 +08:00
1aa13bd20d r2 2025-06-06 16:05:49 +08:00
cc2dd052df Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-06 16:03:46 +08:00
4ffdf68a20 r2 2025-06-06 16:03:35 +08:00
547bd3cc1b refactor: rename cancel editor handler and improve variable name validation in field list 2025-06-06 15:54:55 +08:00
f3e9761c75 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 15:35:53 +08:00
83ca59e0f1 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-06 15:35:50 +08:00
d725aa8791 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-06 15:35:31 +08:00
cc8ee0ff69 dsl 2025-06-06 15:35:19 +08:00
4a249c40b1 feat: enhance input field configurations with blur listeners and update translations for display name 2025-06-06 15:35:19 +08:00
04e4a1e3aa Merge branch 'feat/r2' into deploy/rag-dev 2025-06-06 15:20:06 +08:00
d2d5fc62ae r2 2025-06-06 15:19:53 +08:00
52460f6929 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-06 15:06:59 +08:00
06dfc32e0f Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	docker/docker-compose.middleware.yaml
2025-06-06 15:06:47 +08:00
0ca38d8215 r2 2025-06-06 15:06:26 +08:00
3da6becad3 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 15:02:00 +08:00
f9d0a7bdc8 Merge branch 'main' into feat/rag-pipeline 2025-06-06 15:01:27 +08:00
e961722597 dsl 2025-06-06 15:00:37 +08:00
2ddd2616ec confirm publish 2025-06-06 14:24:02 +08:00
a82a9fb9d4 fix: update condition for handling datasource selection in DataSourceOptions 2025-06-06 14:24:02 +08:00
3fce6f2581 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	api/services/rag_pipeline/rag_pipeline.py
2025-06-06 14:23:05 +08:00
3db864561e confirm publish 2025-06-06 14:22:15 +08:00
d2750f1a02 r2 2025-06-06 14:22:00 +08:00
30a50c5cc8 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-06 12:08:20 +08:00
0ff746ebf6 r2 2025-06-06 12:08:09 +08:00
5193fa2118 fix: update condition for handling datasource selection in DataSourceOptions 2025-06-06 11:43:00 +08:00
9a0dc82e6a Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 10:55:43 +08:00
8e4165defe datasource 2025-06-06 10:55:13 +08:00
d917bc8ed0 Merge branch 'deploy/rag-dev' of https://github.com/langgenius/dify into deploy/rag-dev 2025-06-06 10:53:23 +08:00
ef7bd262c5 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 10:52:53 +08:00
d3e29ffa74 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-06 10:40:57 +08:00
70432952fd r2 2025-06-06 10:40:06 +08:00
cf2ef93ad5 Merge branch 'main' into feat/rag-pipeline 2025-06-06 10:10:53 +08:00
cbf0864edc refactor: refactor online documents handling and update related components 2025-06-06 10:08:19 +08:00
bce2bdd0de Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-05 18:28:58 +08:00
82e7c8a2f9 refactor: update datasource handling and improve documentation properties in pipeline components 2025-06-05 18:28:48 +08:00
2acdb0a4ea fix: var type error in cal var type in data source type 2025-06-05 17:40:49 +08:00
350ea6be6e fix: correct spacing and formatting in variable utility functions 2025-06-05 17:32:03 +08:00
4664174ef3 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-05 16:51:55 +08:00
f0413f359a datasource 2025-06-05 16:51:19 +08:00
53b32c8b22 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 16:44:02 +08:00
b8ef1d9585 r2 2025-06-05 16:43:47 +08:00
90ca98ff3a fix: in node show rag var 2025-06-05 16:41:04 +08:00
d4a1d045f8 fix: to new var format 2025-06-05 16:36:41 +08:00
91fefa0e37 refactor: improve layout and event handling in Header and FieldItem components 2025-06-05 15:44:18 +08:00
067ec17539 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 15:29:37 +08:00
c084b57933 r2 2025-06-05 15:28:44 +08:00
876be7e6e9 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-05 15:07:59 +08:00
468bfdfed9 datasource 2025-06-05 15:07:29 +08:00
82d817f612 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 14:56:19 +08:00
9e84a5321d r2 2025-06-05 14:55:09 +08:00
d77e27ac05 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 14:10:07 +08:00
8a86a2c817 r2 2025-06-05 14:09:50 +08:00
fdc4c36b77 refactor: replace useStore with useDatasetDetailContextWithSelector for pipeline ID retrieval 2025-06-05 14:05:27 +08:00
52c118f5b8 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-05 11:46:19 +08:00
5d7c7023c3 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 11:46:07 +08:00
3e0a10b7ed r2 2025-06-05 11:45:53 +08:00
84f5272f72 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-05 11:40:47 +08:00
6286f368f1 refactor: replace ImagePlus icon with RiImageCircleAiLine and improve tab button styling 2025-06-05 11:21:17 +08:00
cb2ca0b533 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 11:12:32 +08:00
5fe5da7c1d r2 2025-06-05 11:12:06 +08:00
c83370f701 refactor: simplify workflow draft synchronization in InputFieldDialog 2025-06-05 11:07:28 +08:00
7506867fb9 Merge branch 'main' into feat/rag-pipeline 2025-06-05 10:29:18 +08:00
842136959b feat: update data source handling and improve processing parameters integration 2025-06-05 10:24:25 +08:00
4c2cc98ebc Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-04 18:37:25 +08:00
44b9f49ab1 feat: enhance field item interaction and add preprocessing parameters hooks 2025-06-04 18:37:19 +08:00
f7f7952951 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-04 18:10:03 +08:00
a7fa5044e3 datasource 2025-06-04 18:09:31 +08:00
eb84134706 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 17:39:46 +08:00
fbca9010f3 r2 2025-06-04 17:39:31 +08:00
0bf0c7dbe8 feat: var type to inner 2025-06-04 17:34:22 +08:00
e071bd63e6 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 17:29:55 +08:00
8a147a00e8 r2 2025-06-04 17:29:39 +08:00
c9a4c66b07 fix: update input type from 'number-input' to 'number' for consistency 2025-06-04 16:58:08 +08:00
edec654b68 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 16:51:52 +08:00
a82ab1d152 r2 2025-06-04 16:51:23 +08:00
9934eac15c refactor: refactor data source handling and add form for document processing 2025-06-04 16:50:27 +08:00
c155afac29 chore: rename rag var spell errro 2025-06-04 16:43:24 +08:00
7080c9f279 fix: show rag vars names 2025-06-04 16:29:36 +08:00
e41699cbc8 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 16:23:34 +08:00
133193e7d0 r2 2025-06-04 16:23:12 +08:00
9d6371e0a3 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-04 15:48:58 +08:00
dfe091789c datasource 2025-06-04 15:48:29 +08:00
4c9bf78363 knowledge base node checklist 2025-06-04 15:18:03 +08:00
b95ecaf8a3 Update build-push.yml 2025-06-04 15:17:39 +08:00
7a0e8108ae Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 15:16:47 +08:00
3afd5e73c9 feat: enhance input field dialog with preview functionality and global inputs 2025-06-04 15:16:02 +08:00
c09c8c6e5b r2 2025-06-04 15:12:05 +08:00
cab491795a chore: some special to some fns 2025-06-04 14:54:11 +08:00
e290ddc3e5 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-04 14:50:16 +08:00
db154e33b7 Merge branch 'main' into feat/rag-pipeline 2025-06-04 14:48:10 +08:00
32f9004b5f merge feat/rag-pipeline 2025-06-04 11:43:38 +08:00
225402280e datasource auth 2025-06-04 11:39:31 +08:00
abcca11479 r2 2025-06-03 19:10:40 +08:00
9cdd2cbb27 r2 2025-06-03 19:02:57 +08:00
309fffd1e4 Merge branch 'main' into feat/r2
# Conflicts:
#	api/core/repositories/sqlalchemy_workflow_node_execution_repository.py
#	api/core/workflow/entities/node_entities.py
#	api/core/workflow/enums.py
2025-06-03 18:56:49 +08:00
0a9f50e85f Merge branch 'main' into feat/rag-pipeline 2025-06-03 18:44:53 +08:00
ed1d71f4d0 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-03 18:33:07 +08:00
7039ec33b9 refactor: update retrieval search method from invertedIndex to keywordSearch 2025-06-03 18:33:01 +08:00
025dc7c781 feat: can show rag vars 2025-06-03 18:32:52 +08:00
4130c50643 r2 2025-06-03 18:32:39 +08:00
7b7f8ef51d r2 2025-06-03 18:12:24 +08:00
bad451d5ec Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-03 17:42:53 +08:00
87c15062e6 feat: enhance document processing with embedding and rule detail components 2025-06-03 17:42:40 +08:00
573cd15e77 r2 2025-06-03 16:52:21 +08:00
ab1730bbaa r2 2025-06-03 16:51:21 +08:00
163bae3aaf input rag variable 2025-06-03 16:07:58 +08:00
270edd43ab r2 2025-06-03 15:53:17 +08:00
b8f3b23b1a r2 2025-06-03 15:51:31 +08:00
b9c6496fea datasource default value & publish sync draft 2025-06-03 14:42:17 +08:00
0486aa3445 r2 2025-06-03 13:30:51 +08:00
5fb771218c fix: update types and improve data handling in pipeline components 2025-06-03 10:14:48 +08:00
3fb02a7933 r2 2025-05-30 17:28:09 +08:00
898495b5c4 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-30 15:54:46 +08:00
08624878cf fix: update ChunkStructureEnum values for consistency with model naming 2025-05-30 15:53:32 +08:00
6fe473f0fa knowledge base node 2025-05-30 15:45:58 +08:00
11cf23e5fc Merge remote-tracking branch 'origin/feat/r2' into feat/r2 2025-05-30 15:42:56 +08:00
631768ea1d r2 2025-05-30 15:42:36 +08:00
e1d658b482 Update build-push.yml 2025-05-30 15:26:51 +08:00
1274aaed5d fix: add dataset mutation context to Popup component 2025-05-30 15:17:19 +08:00
9be036e0ca Merge branch 'main' into feat/rag-pipeline 2025-05-30 15:10:16 +08:00
7284569c5f Update build-push.yml 2025-05-30 01:02:33 +08:00
976b465e76 r2 2025-05-30 00:55:06 +08:00
804e55824d r2 2025-05-30 00:37:36 +08:00
69529fb16d r2 2025-05-30 00:37:27 +08:00
cb5cfb2dae r2 2025-05-30 00:03:43 +08:00
a826879cf7 Merge branch 'main' into feat/r2 2025-05-29 23:04:38 +08:00
e7c48c0b69 r2 2025-05-29 23:04:04 +08:00
558a280fc8 datasource type 2025-05-29 18:21:29 +08:00
2158c03231 fix: update button disabled state to reflect publishedAt status in Popup component 2025-05-29 17:53:08 +08:00
a61f1f8eb0 refactor: replace anchor tag with Link component for navigation in Actions 2025-05-29 17:42:00 +08:00
9f724c19db refactor: refactor navigation components to use Link for improved routing 2025-05-29 17:33:04 +08:00
4ae936b263 refactor: refactor navigation handling in dataset components to use button elements 2025-05-29 15:48:54 +08:00
80875a109a feat: add logic to handle navigation based on pipeline status in DatasetCard 2025-05-29 15:22:29 +08:00
121e54f3e3 plugins page 2025-05-29 15:18:27 +08:00
1c2c4b62f8 run & tracing 2025-05-29 14:31:35 +08:00
9176790adf feat: enhance dataset detail layout with button disable logic based on pipeline status 2025-05-29 14:06:12 +08:00
6ff6525d1d test run 2025-05-29 11:30:42 +08:00
71ce505631 data source panel 2025-05-29 11:03:22 +08:00
11dfe3713f refactor: enhance document upload flow with step indicators and file preview handling 2025-05-29 10:18:11 +08:00
a025db137d Merge branch 'main' into feat/r2 2025-05-29 09:54:28 +08:00
797d044714 r2 2025-05-29 09:53:42 +08:00
c4169f8aa0 Merge branch 'main' into feat/rag-pipeline 2025-05-29 09:39:36 +08:00
3005419573 feat: implement document upload steps and enhance test run panel with new hooks and components 2025-05-28 18:34:26 +08:00
7f59ffe7af r2 2025-05-28 17:56:04 +08:00
cc7ad5ac97 feat: add input field variables change sync 2025-05-28 16:38:49 +08:00
769b5e185a workflow init config staletime 2025-05-28 15:36:10 +08:00
9e763c9e87 feat: enhance file uploader and test run panel with batch upload limits and tooltips 2025-05-28 14:51:10 +08:00
b9214ca76b knowledge base default data 2025-05-28 13:57:24 +08:00
29d2f2339b refactor: enhance document preview functionality and refactor form handling 2025-05-28 13:44:37 +08:00
5ac1e3584d Merge branch 'main' into feat/rag-pipeline 2025-05-28 11:01:56 +08:00
dd0cf6fadc refactor: streamline data source type handling and improve FieldList props 2025-05-28 10:07:12 +08:00
b320ebe2ba datasource variables 2025-05-27 18:44:29 +08:00
377093b776 fix: conditionally render FieldListContainer based on inputFields length 2025-05-27 18:19:10 +08:00
70119a054a fix: add is_preview flag to datasource submission and improve dataset card rendering logic 2025-05-27 17:54:39 +08:00
69d1e3ec7d input field in datasource 2025-05-27 17:42:30 +08:00
365157c37d refactor: enhance action button logic to include workflow running state 2025-05-27 15:43:10 +08:00
4bc0a1bd37 knowledge base node init 2025-05-27 15:28:35 +08:00
d6640f2adf refactor: streamline input field data conversion and enhance datasource component 2025-05-27 15:25:35 +08:00
987f845e79 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-27 14:42:12 +08:00
84daf49047 node meta data 2025-05-27 14:40:56 +08:00
31e183ef0d refactor: enhance datasource handling by adding fileExtensions support 2025-05-27 14:39:52 +08:00
754a1d1197 refactor: add DatasourceIcon component and update related hooks and options 2025-05-27 14:17:55 +08:00
049a6de4b3 refactor: update data source handling and replace icon implementation 2025-05-27 13:52:43 +08:00
6bd28cadc4 datasource icon 2025-05-27 13:42:13 +08:00
3b9a0b1d25 datasource icon 2025-05-27 11:27:25 +08:00
db963a638c Merge branch 'main' into feat/rag-pipeline 2025-05-27 11:03:49 +08:00
dcb4c9e84a refactor: refactor datasource type handling 2025-05-27 11:01:38 +08:00
5fc2bc58a9 r2 2025-05-27 00:01:23 +08:00
d333645e09 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-26 17:48:59 +08:00
2812c774c6 fix(i18n): Update economy index method description to include keyword count 2025-05-26 17:48:54 +08:00
e2f3f0ae4c datasource 2025-05-26 17:47:03 +08:00
83ca7f8deb feat: add datasource support to PluginDeclaration and PluginCategory 2025-05-26 17:32:25 +08:00
e6c6fa8ed8 tool icon 2025-05-26 17:28:16 +08:00
678d6ffe2b r2 2025-05-26 17:00:16 +08:00
cef77a3717 datasource icon 2025-05-26 16:41:50 +08:00
28726b6cf3 block selector 2025-05-26 16:33:08 +08:00
ef0e41de07 r2 2025-05-26 16:02:11 +08:00
dc2b63b832 Merge branch 'main' into feat/rag-pipeline 2025-05-26 15:58:15 +08:00
0478fc9649 datasource node variable 2025-05-26 15:57:34 +08:00
1b07e612d2 r2 2025-05-26 15:49:37 +08:00
38cce3f62a r2 2025-05-26 14:52:09 +08:00
35be8721b9 Merge branch 'main' into feat/r2 2025-05-26 14:50:33 +08:00
665ffbdc10 r2 2025-05-26 14:49:59 +08:00
b5f88c77a3 datasource list 2025-05-26 14:13:59 +08:00
324c0d7b4c refactor: improve layout and styling in TestRunPanel and FilePreview components 2025-05-26 14:06:04 +08:00
13e3f17493 refactor: standardize terminology by renaming 'data-source' to 'datasource' across components and translations 2025-05-26 10:50:39 +08:00
841bd35ebb Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-26 09:49:15 +08:00
ccefd41606 refactor: rename InputType to InputTypeEnum and update related usages for consistency 2025-05-26 09:48:17 +08:00
ec1c4efca9 r2 2025-05-25 23:09:01 +08:00
0f10852b6b Merge remote-tracking branch 'origin/feat/r2' into feat/r2 2025-05-23 19:30:59 +08:00
6d547447d3 r2 2025-05-23 19:30:48 +08:00
6123f1ab21 refactor: reorganize imports and fix datasource endpoint URL 2025-05-23 19:22:50 +08:00
e7370766bd datasource 2025-05-23 18:19:28 +08:00
db4958be05 fix: fix modal handling in InputFieldEditor 2025-05-23 17:52:00 +08:00
a15bf8e8fe remove output schema 2025-05-23 17:35:26 +08:00
70d2c78176 r2 2025-05-23 17:13:09 +08:00
42fcda3dc8 r2 2025-05-23 17:11:56 +08:00
ac049d938e Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-23 16:57:46 +08:00
3af61f4b5d refactor: update input type mappings and enums for consistency across components 2025-05-23 16:57:27 +08:00
e19adbbbc5 datasource 2025-05-23 16:27:49 +08:00
64d997fdb0 r2 2025-05-23 15:55:41 +08:00
a49942b949 fix: rename first_step_parameters 2025-05-23 15:12:31 +08:00
4460d96e58 feat: add oauth schema 2025-05-23 15:11:40 +08:00
a7d5f2f53b apply ruff 2025-05-23 15:10:56 +08:00
c9bf99a1e2 refactor: update input variable types and initial data handling in pipeline components 2025-05-23 15:10:20 +08:00
4300ebc8aa fix: remove provide type 2025-05-23 15:10:16 +08:00
720ce79901 checklist & datasource icon 2025-05-23 14:26:06 +08:00
693107a6c8 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-23 13:54:59 +08:00
583db24ee7 refactor: update CustomActions type to use structured props across form components 2025-05-23 13:54:49 +08:00
7d92574e02 datasource panel 2025-05-23 11:51:17 +08:00
5aaa06c8b0 refactor: integrate routing for document creation in Popup component 2025-05-23 11:19:57 +08:00
52b773770b refactor: update datasource handling in InputFieldDialog and Datasource components 2025-05-23 11:07:48 +08:00
23adc7d8a8 datasource 2025-05-23 10:47:31 +08:00
e3708bfa85 refactor: enhance ChunkPreview with form handling and preview functionality 2025-05-23 10:29:59 +08:00
7d65e9980c Merge branch 'main' into feat/rag-pipeline 2025-05-23 09:35:08 +08:00
b93d26ee9f Merge remote-tracking branch 'origin/feat/r2' into feat/r2
# Conflicts:
#	api/core/datasource/entities/datasource_entities.py
2025-05-23 00:06:51 +08:00
b82b26bba5 r2 2025-05-23 00:05:57 +08:00
21c24977d8 refactor: enhance document processing UI and functionality with new components and translations 2025-05-22 23:05:58 +08:00
fe435c23c3 i18n 2025-05-22 17:44:07 +08:00
ead1209f98 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-22 17:41:18 +08:00
3994bb1771 refactor: refactor document processing components and update translations 2025-05-22 17:39:39 +08:00
327690e4a7 merge main 2025-05-22 16:45:13 +08:00
c2a7e0e986 version panel 2025-05-22 16:43:30 +08:00
faf6b9ea03 refactor: refactor preview components 2025-05-22 14:49:40 +08:00
3bfc602561 refactor: update datasource entity structure and parameter handling
- Renamed and split parameters in DatasourceEntity into first_step_parameters and second_step_parameters.
- Updated validation methods for new parameter structure.
- Adjusted datasource_node to reference first_step_parameters.
- Cleaned up unused imports and improved type hints in workflow.py.
2025-05-21 20:36:26 +08:00
5fa2aca2c8 feat: add oauth schema to datasource 2025-05-21 20:29:59 +08:00
69a60101fe Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-21 16:37:08 +08:00
b18519b824 refactor: add create-from-pipeline page and associated components for document processing 2025-05-21 16:37:02 +08:00
0d01025254 parallel check 2025-05-21 16:34:41 +08:00
eef1542cb3 use available nodes 2025-05-21 15:51:05 +08:00
9aef4b6d6b refactor: Notion component and add NotionPageSelector for improved page selection 2025-05-21 14:02:37 +08:00
7dba83754f Merge branch 'main' into feat/rag-pipeline 2025-05-21 13:42:28 +08:00
e2585bc778 Merge branch 'main' into feat/rag-pipeline 2025-05-21 11:27:50 +08:00
cc6e2558ef Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-21 10:53:29 +08:00
20343facad refactor: website data source components and hooks 2025-05-21 10:53:18 +08:00
eff123a11c checklist 2025-05-20 16:52:45 +08:00
9bafd3a226 r2 2025-05-20 15:41:10 +08:00
82be119fec Merge branch 'main' into feat/r2 2025-05-20 15:18:52 +08:00
a64df507f6 r2 2025-05-20 15:18:33 +08:00
cf73faf174 feat: add FileUploaderField and TextAreaField components; enhance BaseField to support file inputs 2025-05-20 15:09:30 +08:00
ba52bf27c1 r2 2025-05-20 14:57:26 +08:00
55f4177b01 merge main 2025-05-20 14:03:54 +08:00
14a9052d60 refactor: update variable naming for consistency and improve data source handling in pipeline components 2025-05-20 11:42:22 +08:00
314a2f9be8 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-19 18:21:29 +08:00
8eee344fbb fix: correct hover state logic and refactor environment variable handling in FieldItem and usePipelineInit 2025-05-19 18:21:17 +08:00
0e0a266142 merge main 2025-05-19 18:11:45 +08:00
7bce35913d i18n 2025-05-19 18:09:12 +08:00
7898dbd5bf Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-19 16:26:24 +08:00
bd1073ff1a refactor: update input variable types to use PipelineInputVarType and simplify form data handling 2025-05-19 16:26:13 +08:00
1bbd572593 option card 2025-05-19 15:59:04 +08:00
5199297f61 run and history 2025-05-19 14:23:40 +08:00
c5a2f43ceb refactor: replace BuiltinToolManageService with RagPipelineManageService for datasource management and remove unused datasource engine and related code 2025-05-16 18:42:07 +08:00
8d4ced227e fix: update click handler logic in OptionCard 2025-05-16 17:54:41 +08:00
f481075f8f pipeline sync draft 2025-05-16 17:48:01 +08:00
836cf6453e pipeline sync draft 2025-05-16 17:48:01 +08:00
8bea88c8cc r2 2025-05-16 17:22:17 +08:00
4b7274f9a5 fix: update link in Form component and correct endpoint for related apps query 2025-05-16 16:49:43 +08:00
7de5585da6 refactor: replace SWR with custom hooks for dataset detail and related apps; update context usage in components 2025-05-16 16:32:25 +08:00
87dc80f6fa fix: add cursor pointer and hover effect to MemberItem; adjust padding in PermissionItem 2025-05-16 15:52:28 +08:00
a008c04331 refactor: standardize naming for load more handlers and navigation items across components 2025-05-16 15:43:28 +08:00
56b66b8a57 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-16 15:15:00 +08:00
35a7add4e9 refactor: refactor pipeline-related components and services to use template terminology 2025-05-16 15:14:50 +08:00
f1fe143962 add i18n 2025-05-16 14:53:39 +08:00
9e72afee3c r2 2025-05-16 14:00:35 +08:00
613b94a6e6 r2 2025-05-16 13:45:47 +08:00
7b0d38f7d3 r2 2025-05-16 12:02:35 +08:00
4ff971c8a3 r2 2025-05-16 11:26:56 +08:00
019ef74bf2 refactor: replace Container with List, update DatasetCard z-index, and implement useDatasetList for data fetching 2025-05-16 10:50:31 +08:00
2670557258 merge main 2025-05-16 10:09:24 +08:00
93ac6d37e9 r2 2025-05-15 16:44:55 +08:00
e710a8402c r2 2025-05-15 16:07:17 +08:00
360f8a3375 Merge branch 'main' into feat/r2 2025-05-15 15:15:23 +08:00
818eb46a8b r2 2025-05-15 15:14:52 +08:00
f5c297708b Merge branch 'main' into feat/rag-pipeline 2025-05-15 14:52:54 +08:00
bf8324f7f7 tag filter 2025-05-15 14:52:00 +08:00
b730d153ea Merge branch 'main' into feat/rag-pipeline 2025-05-15 10:27:47 +08:00
11977596c9 merge main 2025-05-15 10:14:40 +08:00
612dca8b7d feat: add WorkflowPreview component to Details and define graph structure in PipelineTemplateByIdResponse 2025-05-14 18:20:29 +08:00
53018289d4 workflow preview 2025-05-14 18:02:58 +08:00
958ff44707 refactor: simplify import DSL confirmation request structure 2025-05-14 16:27:59 +08:00
d910770b3c feat: add dataset_id to DSL import responses and update routing logic in CreateFromDSLModal 2025-05-14 16:00:17 +08:00
5a8f10520f feat: refactor template card actions and details to standardize prop names; add create modal for dataset creation 2025-05-14 15:53:17 +08:00
df928772c0 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-14 14:49:10 +08:00
b713218cab feat: add DSL modal header and tab components; enhance pipeline import functionality 2025-05-14 14:49:01 +08:00
9ea2123e7f component add readonly 2025-05-14 11:14:49 +08:00
de0cb06f8c feat: implement create dataset pipeline forms and modals 2025-05-14 10:48:54 +08:00
cfb6d59513 Merge branch 'main' into feat/rag-pipeline 2025-05-13 18:38:26 +08:00
4c30d1c1eb feat: Enhance InputFieldDialog and workflow hooks to handle ragPipelineVariables 2025-05-13 16:33:38 +08:00
5bb02c79cc Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-13 16:18:07 +08:00
0a891e5392 feat: Update retrieval method configuration to use new OptionCard component and improve layout 2025-05-13 16:17:59 +08:00
f6978ce6b1 fix: pipeline init 2025-05-13 16:02:36 +08:00
4d68aadc1c Refactor: Replace IndexMethodRadio with IndexMethod component, add keyword number functionality, and update related translations 2025-05-13 15:35:21 +08:00
cef6463847 feat: Enhance dataset settings with chunk structure and icon selection 2025-05-13 11:07:31 +08:00
39b8331f81 merge main 2025-05-09 18:20:56 +08:00
212d4c5899 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-09 16:35:14 +08:00
97ec855df4 feat: enhance input field management with internationalization support and improved state handling 2025-05-09 16:35:09 +08:00
d83b9b70e3 fix: import 2025-05-09 16:25:34 +08:00
b51c18c2cf pipeline init 2025-05-09 15:53:31 +08:00
7e31da7882 refactor: update data source handling and improve internationalization support in test run panel 2025-05-09 12:56:57 +08:00
d9ed61287d Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-08 18:29:58 +08:00
6024dbe98d refactor: simplify type definitions in form components and update related configurations 2025-05-08 18:29:49 +08:00
13ce6317f1 pipeline header 2025-05-08 18:27:44 +08:00
0099f2296d Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-08 16:26:37 +08:00
2d93bc6725 refactor: update DatasetInfo component layout and styling for better responsiveness 2025-05-08 16:26:30 +08:00
cb52f9ecc5 pipeline header 2025-05-08 15:32:19 +08:00
1fbeb3a21a refactor: enhance dataset components with new icons and improve layout structure 2025-05-08 13:48:14 +08:00
38f1a42ce8 refactor: remove unused icon components and update imports in dataset components 2025-05-08 11:15:27 +08:00
3d11af2dd6 refactor: update imports for knowledge and pipeline components 2025-05-08 10:44:26 +08:00
d1fd5db7f8 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-08 09:52:53 +08:00
c240cf3bb1 refactor: dataset creation to support pipeline datasets, update related types and hooks 2025-05-08 09:42:02 +08:00
bbbcd68258 portal element 2025-05-07 18:14:26 +08:00
7ce9710229 feat: add pipeline template details and import functionality, enhance dataset pipeline management 2025-05-07 18:09:38 +08:00
3f7f21ce70 show test run panel 2025-05-07 17:31:06 +08:00
fa8ab4ea04 rag pipeline 2025-05-07 16:30:24 +08:00
3f1363503b r2 2025-05-07 16:19:09 +08:00
3f52f491d7 feat: knowledge base node 2025-05-07 15:08:37 +08:00
e86a3fc672 feat: Enhance dataset pipeline creation and management with new export and delete functionalities, improved internationalization, and refactor for better clarity 2025-05-07 14:29:01 +08:00
6f77f67427 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-07 11:30:20 +08:00
4025cd0b46 feat: Refactor dataset pipeline creation components and add internationalization support 2025-05-07 11:30:13 +08:00
3bbb22750c merge main 2025-05-06 18:28:44 +08:00
d196872059 merge main 2025-05-06 17:31:48 +08:00
a478d95950 feat: knowledge base node 2025-05-06 17:25:18 +08:00
12c060b795 feat: enhance dataset icon handling by making icon background and URL optional 2025-05-06 16:58:37 +08:00
c480c3d881 feat: enhance dataset detail layout with new icon structure and additional document count display 2025-05-06 16:37:21 +08:00
a998022c12 r2 2025-05-06 16:18:34 +08:00
a25cc4e8af r2 2025-05-06 13:56:13 +08:00
b4bccf5fef feat: Add External Knowledge Base and Pipeline icons, update DatasetCard component 2025-05-06 11:58:53 +08:00
14ad34af71 feat: enhance dataset creation UI with new pipeline list and edit functionality 2025-05-03 17:16:00 +08:00
7ed398267f Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-03 13:43:43 +08:00
fc9556e057 feat: add dataset creation components and functionality 2025-05-03 13:43:37 +08:00
acf6872a50 fix: variable selector 2025-04-30 16:55:00 +08:00
e689f21a60 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-04-30 14:16:25 +08:00
a7f9259e27 feat: new Dataset list 2025-04-30 14:16:13 +08:00
a46b4e3616 Merge branch 'main' into feat/rag-pipeline 2025-04-29 16:27:49 +08:00
e7e12c1d2e fix: node selector 2025-04-29 16:26:53 +08:00
66176c4d71 fix: node default 2025-04-29 16:14:20 +08:00
2613a380b6 fix: Correct link path for creating datasets and optimize Link component with memoization 2025-04-29 15:47:29 +08:00
9392ce259f feat: Refactor dataset components and update translations for new dataset creation options 2025-04-29 15:44:32 +08:00
d1287f08b4 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-04-29 10:44:37 +08:00
7ee8472a5f feat: Add SegmentedControl component with styling and option handling 2025-04-29 10:44:03 +08:00
cdb615deeb knowledge base node 2025-04-28 18:37:18 +08:00
abbba1d004 knowledge base node 2025-04-28 18:37:17 +08:00
3c386c63a6 Merge remote-tracking branch 'origin/feat/r2' into feat/r2 2025-04-28 16:19:29 +08:00
49d1846e63 r2 2025-04-28 16:19:12 +08:00
53f2882077 feat: Implement document processing component with configuration and action handling 2025-04-28 15:55:24 +08:00
8f07e088f5 feat: Add JinaReader and WaterCrawl components with configurations and schema handling 2025-04-28 14:33:01 +08:00
f71b0eccb2 Refactor: dataset creation components and implement Firecrawl functionality 2025-04-28 13:33:16 +08:00
5b89d36ea1 feat: Update Zod schema generation for file types and upload methods to use new constants 2025-04-27 20:44:05 +08:00
7c3af74b0d feat: Update useConfigurations and useHiddenConfigurations to use InputVarType constants for type values 2025-04-27 20:34:25 +08:00
d1d83f8a2a feat: Enhance form components with hidden fields and popup properties for improved configuration 2025-04-27 20:17:50 +08:00
839fe12087 feat: Update OptionsField to use correct Options type and enhance Zod schema generation for options and select input types 2025-04-27 18:45:22 +08:00
fd8ee9f53e Refactor input field form components and schema 2025-04-27 15:29:11 +08:00
c2d02f8f4d Merge branch 'main' into feat/r2 2025-04-27 14:31:19 +08:00
8367ae85de feat: Replace BaseVarType with BaseFieldType for consistent field type usage across components 2025-04-27 10:16:16 +08:00
d1f0e6e5c2 feat: Implement Zod schema generation for form validation and update form component usage 2025-04-27 09:56:48 +08:00
7deb44f864 feat: Add additional field components to form hook for enhanced functionality 2025-04-26 22:43:51 +08:00
d12e9b81e3 feat: Introduce new form field components and enhance existing ones with label options 2025-04-26 21:50:21 +08:00
b1fbaaed95 refactor: Simplify type checks for form field rendering and correct comment grammar 2025-04-25 18:39:05 +08:00
3f8b0b937c Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-04-25 18:13:57 +08:00
734c62998f feat: Implement dynamic form field rendering and replace SubmitButton with Actions component 2025-04-25 18:13:52 +08:00
4792ca1813 knowledge base node 2025-04-25 17:24:47 +08:00
d4007ae073 r2 2025-04-25 15:49:36 +08:00
389f15f8e3 r2 2025-04-25 14:56:22 +08:00
9437145218 r2 2025-04-25 13:42:57 +08:00
076924bbd6 rag pipeline main 2025-04-25 11:32:17 +08:00
97cf6b2d65 refactor workflow 2025-04-25 11:04:14 +08:00
f317ef2fe2 feat: Refactor NotionConnector integration and add Header component for improved UI in NotionPageSelector 2025-04-24 21:26:54 +08:00
f7de55364f chore: refactor workflow 2025-04-24 16:29:58 +08:00
de30e9278c feat: Refactor Notion and LocalFile components to remove unused VectorSpaceFull prop and improve step indicator logic 2025-04-24 15:47:22 +08:00
b9ab1555fb r2 2025-04-24 15:42:30 +08:00
44b9ce0951 feat: Implement Notion connector and related components for data source selection in the RAG pipeline 2025-04-24 15:32:04 +08:00
d768094376 feat: Refactor file upload configuration and validation logic 2025-04-24 13:46:50 +08:00
93f83086c1 feat: add CustomSelectField component and integrate with input field form 2025-04-23 22:16:19 +08:00
8d9c252811 block selector & data source node 2025-04-22 16:46:33 +08:00
c7f4b41920 r2 2025-04-22 16:08:58 +08:00
efb27eb443 feat: enhance FieldList component with sorting and dynamic input field management 2025-04-22 12:56:22 +08:00
5b8c43052e feat: implement input field dialog and related components for rag pipeline 2025-04-22 11:29:03 +08:00
e04ae927b6 Merge branch 'main' into feat/rag-pipeline 2025-04-22 10:14:28 +08:00
ac68d62d1c Merge branch 'main' into feat/rag-pipeline 2025-04-21 18:07:15 +08:00
caa17b8fe9 rag pipeline store 2025-04-21 17:52:34 +08:00
cd1562ee24 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-04-21 16:58:28 +08:00
47af1a9c42 feat: add InputField component and integrate into RagPipeline panel 2025-04-21 16:58:22 +08:00
0cd6a427af add publisher 2025-04-21 14:41:41 +08:00
51165408ed feat: implement input field form with file upload settings and validation 2025-04-21 09:53:35 +08:00
a2dc38f90a feat: add rag pipeline store slice 2025-04-18 15:51:40 +08:00
a36436b585 feat: add rag pipeline store slice 2025-04-18 15:46:54 +08:00
2d87823fc6 init rag pipeline 2025-04-18 14:56:34 +08:00
d238da9826 Merge branch 'main' into feat/rag-pipeline 2025-04-18 14:00:58 +08:00
6eef5990c9 feat: enhance form components with additional props for validation and tooltips; add OptionsField component 2025-04-18 11:32:23 +08:00
5c4bf2a9e4 r2 2025-04-17 15:07:23 +08:00
0345eb4659 feat: add new form components including CheckboxField, NumberInputField, SelectField, TextField, and SubmitButton with updated input sizes 2025-04-17 13:33:33 +08:00
71f78e0d33 feat: replace existing page content with DemoForm component for improved layout 2025-04-16 14:16:56 +08:00
942648e9e9 feat: implement form components including CheckboxField, SelectField, TextField, and SubmitButton with validation 2025-04-16 14:16:32 +08:00
d841581679 feat: add IndeterminateIcon component and update Checkbox to support indeterminate state
refactor: remove mixed state handling and update related styles
fix: update useCallback dependencies for better performance
2025-04-15 17:30:18 +08:00
9f8e05d9f0 r2 2025-04-14 18:17:17 +08:00
3340775052 r2 2025-04-14 11:10:44 +08:00
9987774471 r2 2025-04-10 18:00:22 +08:00
1950 changed files with 30336 additions and 89522 deletions

View File

@ -1,4 +1,4 @@
FROM mcr.microsoft.com/devcontainers/python:3.12-bookworm
FROM mcr.microsoft.com/devcontainers/python:3.12
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install libgmp-dev libmpfr-dev libmpc-dev

View File

@ -11,7 +11,7 @@
"nodeGypDependencies": true,
"version": "lts"
},
"ghcr.io/devcontainers-extra/features/npm-package:1": {
"ghcr.io/devcontainers-contrib/features/npm-package:1": {
"package": "typescript",
"version": "latest"
},

View File

@ -1,16 +1,15 @@
#!/bin/bash
WORKSPACE_ROOT=$(pwd)
corepack enable
cd web && pnpm install
pipx install uv
echo "alias start-api=\"cd $WORKSPACE_ROOT/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug\"" >> ~/.bashrc
echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P threads -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage\"" >> ~/.bashrc
echo "alias start-web=\"cd $WORKSPACE_ROOT/web && pnpm dev\"" >> ~/.bashrc
echo "alias start-web-prod=\"cd $WORKSPACE_ROOT/web && pnpm build && pnpm start\"" >> ~/.bashrc
echo "alias start-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d\"" >> ~/.bashrc
echo "alias stop-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down\"" >> ~/.bashrc
echo 'alias start-api="cd /workspaces/dify/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
echo 'alias start-worker="cd /workspaces/dify/api && uv run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage"' >> ~/.bashrc
echo 'alias start-web="cd /workspaces/dify/web && pnpm dev"' >> ~/.bashrc
echo 'alias start-web-prod="cd /workspaces/dify/web && pnpm build && pnpm start"' >> ~/.bashrc
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d"' >> ~/.bashrc
echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down"' >> ~/.bashrc
source /home/vscode/.bashrc

View File

@ -1,8 +1,5 @@
blank_issues_enabled: false
contact_links:
- name: "\U0001F510 Security Vulnerabilities"
url: "https://github.com/langgenius/dify/security/advisories/new"
about: Report security vulnerabilities through GitHub Security Advisories to ensure responsible disclosure. 💡 Please do not report security vulnerabilities in public issues.
- name: "\U0001F4A1 Model Providers & Plugins"
url: "https://github.com/langgenius/dify-official-plugins/issues/new/choose"
about: Report issues with official plugins or model providers, you will need to provide the plugin version and other relevant details.

View File

@ -39,11 +39,25 @@ jobs:
- name: Install dependencies
run: uv sync --project api --dev
- name: Run Unit tests
run: |
uv run --project api bash dev/pytest/pytest_unit_tests.sh
- name: Run pyrefly check
run: |
cd api
uv add --dev pyrefly
uv run pyrefly check || true
- name: Coverage Summary
run: |
set -x
# Extract coverage percentage and create a summary
TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])')
# Create a detailed coverage summary
echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY
echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY
uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY
- name: Run dify config tests
run: uv run --project api dev/pytest/pytest_config_tests.py
@ -79,19 +93,3 @@ jobs:
- name: Run TestContainers
run: uv run --project api bash dev/pytest/pytest_testcontainers.sh
- name: Run Unit tests
run: |
uv run --project api bash dev/pytest/pytest_unit_tests.sh
- name: Coverage Summary
run: |
set -x
# Extract coverage percentage and create a summary
TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])')
# Create a detailed coverage summary
echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY
echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY
uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY

View File

@ -15,12 +15,10 @@ jobs:
# Use uv to ensure we have the same ruff version in CI and locally.
- uses: astral-sh/setup-uv@v6
with:
python-version: "3.11"
python-version: "3.12"
- run: |
cd api
uv sync --dev
# fmt first to avoid line too long
uv run ruff format ..
# Fix lint errors
uv run ruff check --fix .
# Format code
@ -30,8 +28,6 @@ jobs:
run: |
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all
uvx --from ast-grep-cli sg --pattern 'session.query($WHATEVER).filter($HERE)' --rewrite 'session.query($WHATEVER).where($HERE)' -l py --update-all
uvx --from ast-grep-cli sg -p '$A = db.Column($$$B)' -r '$A = mapped_column($$$B)' -l py --update-all
uvx --from ast-grep-cli sg -p '$A : $T = db.Column($$$B)' -r '$A : $T = mapped_column($$$B)' -l py --update-all
# Convert Optional[T] to T | None (ignoring quoted types)
cat > /tmp/optional-rule.yml << 'EOF'
id: convert-optional-to-union

View File

@ -4,10 +4,12 @@ on:
push:
branches:
- "main"
- "deploy/**"
- "deploy/dev"
- "deploy/enterprise"
- "build/**"
- "release/e-*"
- "hotfix/**"
- "deploy/rag-dev"
- "feat/rag-2"
tags:
- "*"

View File

@ -4,7 +4,7 @@ on:
workflow_run:
workflows: ["Build and Push API & Web"]
branches:
- "deploy/dev"
- "deploy/rag-dev"
types:
- completed
@ -13,12 +13,12 @@ jobs:
runs-on: ubuntu-latest
if: |
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_branch == 'deploy/dev'
github.event.workflow_run.head_branch == 'deploy/rag-dev'
steps:
- name: Deploy to server
uses: appleboy/ssh-action@v0.1.8
with:
host: ${{ secrets.SSH_HOST }}
host: ${{ secrets.RAG_SSH_HOST }}
username: ${{ secrets.SSH_USER }}
key: ${{ secrets.SSH_PRIVATE_KEY }}
script: |

View File

@ -1,4 +1,4 @@
name: Deploy Trigger Dev
name: Deploy RAG Dev
permissions:
contents: read
@ -7,7 +7,7 @@ on:
workflow_run:
workflows: ["Build and Push API & Web"]
branches:
- "deploy/trigger-dev"
- "deploy/rag-dev"
types:
- completed
@ -16,12 +16,12 @@ jobs:
runs-on: ubuntu-latest
if: |
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_branch == 'deploy/trigger-dev'
github.event.workflow_run.head_branch == 'deploy/rag-dev'
steps:
- name: Deploy to server
uses: appleboy/ssh-action@v0.1.8
with:
host: ${{ secrets.TRIGGER_SSH_HOST }}
host: ${{ secrets.RAG_SSH_HOST }}
username: ${{ secrets.SSH_USER }}
key: ${{ secrets.SSH_PRIVATE_KEY }}
script: |

View File

@ -1,7 +1,6 @@
#!/bin/bash
yq eval '.services.weaviate.ports += ["8080:8080"]' -i docker/docker-compose.yaml
yq eval '.services.weaviate.ports += ["50051:50051"]' -i docker/docker-compose.yaml
yq eval '.services.qdrant.ports += ["6333:6333"]' -i docker/docker-compose.yaml
yq eval '.services.chroma.ports += ["8000:8000"]' -i docker/docker-compose.yaml
yq eval '.services["milvus-standalone"].ports += ["19530:19530"]' -i docker/docker-compose.yaml
@ -14,4 +13,4 @@ yq eval '.services.tidb.ports += ["4000:4000"]' -i docker/tidb/docker-compose.ya
yq eval '.services.oceanbase.ports += ["2881:2881"]' -i docker/docker-compose.yaml
yq eval '.services.opengauss.ports += ["6600:6600"]' -i docker/docker-compose.yaml
echo "Ports exposed for sandbox, weaviate (HTTP 8080, gRPC 50051), tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase, opengauss"
echo "Ports exposed for sandbox, weaviate, tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase, opengauss"

View File

@ -102,11 +102,7 @@ jobs:
working-directory: ./web
run: |
pnpm run lint
- name: Web type check
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: pnpm run type-check
pnpm run eslint
docker-compose-template:
name: Docker Compose Template

7
.gitignore vendored
View File

@ -97,7 +97,6 @@ __pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat-schedule.db
celerybeat.pid
# SageMath parsed files
@ -231,8 +230,4 @@ api/.env.backup
# Benchmark
scripts/stress-test/setup/config/
scripts/stress-test/reports/
# mcp
.playwright-mcp/
.serena/
scripts/stress-test/reports/

View File

@ -8,7 +8,8 @@
"module": "flask",
"env": {
"FLASK_APP": "app.py",
"FLASK_ENV": "development"
"FLASK_ENV": "development",
"GEVENT_SUPPORT": "True"
},
"args": [
"run",
@ -27,7 +28,9 @@
"type": "debugpy",
"request": "launch",
"module": "celery",
"env": {},
"env": {
"GEVENT_SUPPORT": "True"
},
"args": [
"-A",
"app.celery",
@ -37,7 +40,7 @@
"-c",
"1",
"-Q",
"dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline",
"dataset,generation,mail,ops_trace",
"--loglevel",
"INFO"
],

View File

@ -4,51 +4,84 @@
Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management.
The codebase is split into:
The codebase consists of:
- **Backend API** (`/api`): Python Flask application organized with Domain-Driven Design
- **Frontend Web** (`/web`): Next.js 15 application using TypeScript and React 19
- **Backend API** (`/api`): Python Flask application with Domain-Driven Design architecture
- **Frontend Web** (`/web`): Next.js 15 application with TypeScript and React 19
- **Docker deployment** (`/docker`): Containerized deployment configurations
## Backend Workflow
## Development Commands
- Run backend CLI commands through `uv run --project api <command>`.
### Backend (API)
- Before submission, all backend modifications must pass local checks: `make lint`, `make type-check`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh`.
All Python commands must be prefixed with `uv run --project api`:
- Use Makefile targets for linting and formatting; `make lint` and `make type-check` cover the required checks.
```bash
# Start development servers
./dev/start-api # Start API server
./dev/start-worker # Start Celery worker
- Integration tests are CI-only and are not expected to run in the local environment.
# Run tests
uv run --project api pytest # Run all tests
uv run --project api pytest tests/unit_tests/ # Unit tests only
uv run --project api pytest tests/integration_tests/ # Integration tests
## Frontend Workflow
# Code quality
./dev/reformat # Run all formatters and linters
uv run --project api ruff check --fix ./ # Fix linting issues
uv run --project api ruff format ./ # Format code
uv run --directory api basedpyright # Type checking
```
### Frontend (Web)
```bash
cd web
pnpm lint
pnpm lint:fix
pnpm test
pnpm lint # Run ESLint
pnpm eslint-fix # Fix ESLint issues
pnpm test # Run Jest tests
```
## Testing & Quality Practices
## Testing Guidelines
- Follow TDD: red → green → refactor.
- Use `pytest` for backend tests with Arrange-Act-Assert structure.
- Enforce strong typing; avoid `Any` and prefer explicit type annotations.
- Write self-documenting code; only add comments that explain intent.
### Backend Testing
## Language Style
- Use `pytest` for all backend tests
- Write tests first (TDD approach)
- Test structure: Arrange-Act-Assert
- **Python**: Keep type hints on functions and attributes, and implement relevant special methods (e.g., `__repr__`, `__str__`).
- **TypeScript**: Use the strict config, lean on ESLint + Prettier workflows, and avoid `any` types.
## Code Style Requirements
## General Practices
### Python
- Prefer editing existing files; add new documentation only when requested.
- Inject dependencies through constructors and preserve clean architecture boundaries.
- Handle errors with domain-specific exceptions at the correct layer.
- Use type hints for all functions and class attributes
- No `Any` types unless absolutely necessary
- Implement special methods (`__repr__`, `__str__`) appropriately
## Project Conventions
### TypeScript/JavaScript
- Backend architecture adheres to DDD and Clean Architecture principles.
- Async work runs through Celery with Redis as the broker.
- Frontend user-facing strings must use `web/i18n/en-US/`; avoid hardcoded text.
- Strict TypeScript configuration
- ESLint with Prettier integration
- Avoid `any` type
## Important Notes
- **Environment Variables**: Always use UV for Python commands: `uv run --project api <command>`
- **Comments**: Only write meaningful comments that explain "why", not "what"
- **File Creation**: Always prefer editing existing files over creating new ones
- **Documentation**: Don't create documentation files unless explicitly requested
- **Code Quality**: Always run `./dev/reformat` before committing backend changes
## Common Development Tasks
### Adding a New API Endpoint
1. Create controller in `/api/controllers/`
1. Add service logic in `/api/services/`
1. Update routes in controller's `__init__.py`
1. Write tests in `/api/tests/`
## Project-Specific Conventions
- All async tasks use Celery with Redis as broker
- **Internationalization**: Frontend supports multiple languages with English (`web/i18n/en-US/`) as the source. All user-facing text must use i18n keys, no hardcoded strings. Edit corresponding module files in `en-US/` directory for translations.

View File

@ -6,7 +6,7 @@
本指南和 Dify 一样在不断完善中。如果有任何滞后于项目实际情况的地方,恳请谅解,我们也欢迎任何改进建议。
关于许可证,请花一分钟阅读我们简短的[许可和贡献者协议](../../LICENSE)。同时也请遵循社区[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。
关于许可证,请花一分钟阅读我们简短的[许可和贡献者协议](./LICENSE)。同时也请遵循社区[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。
## 开始之前

View File

@ -6,7 +6,7 @@ Wir müssen wendig sein und schnell liefern, aber wir möchten auch sicherstelle
Dieser Leitfaden ist, wie Dify selbst, in ständiger Entwicklung. Wir sind dankbar für Ihr Verständnis, falls er manchmal hinter dem eigentlichen Projekt zurückbleibt, und begrüßen jedes Feedback zur Verbesserung.
Bitte nehmen Sie sich einen Moment Zeit, um unsere [Lizenz- und Mitwirkungsvereinbarung](../../LICENSE) zu lesen. Die Community hält sich außerdem an den [Verhaltenskodex](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
Bitte nehmen Sie sich einen Moment Zeit, um unsere [Lizenz- und Mitwirkungsvereinbarung](./LICENSE) zu lesen. Die Community hält sich außerdem an den [Verhaltenskodex](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
## Bevor Sie loslegen

View File

@ -6,7 +6,7 @@ Necesitamos ser ágiles y enviar rápidamente dado donde estamos, pero también
Esta guía, como Dify mismo, es un trabajo en constante progreso. Agradecemos mucho tu comprensión si a veces se queda atrás del proyecto real, y damos la bienvenida a cualquier comentario para que podamos mejorar.
En términos de licencia, por favor tómate un minuto para leer nuestro breve [Acuerdo de Licencia y Colaborador](../../LICENSE). La comunidad también se adhiere al [código de conducta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
En términos de licencia, por favor tómate un minuto para leer nuestro breve [Acuerdo de Licencia y Colaborador](./LICENSE). La comunidad también se adhiere al [código de conducta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
## Antes de empezar

View File

@ -6,7 +6,7 @@ Nous devons être agiles et livrer rapidement compte tenu de notre position, mai
Ce guide, comme Dify lui-même, est un travail en constante évolution. Nous apprécions grandement votre compréhension si parfois il est en retard par rapport au projet réel, et nous accueillons tout commentaire pour nous aider à nous améliorer.
En termes de licence, veuillez prendre une minute pour lire notre bref [Accord de Licence et de Contributeur](../../LICENSE). La communauté adhère également au [code de conduite](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
En termes de licence, veuillez prendre une minute pour lire notre bref [Accord de Licence et de Contributeur](./LICENSE). La communauté adhère également au [code de conduite](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
## Avant de vous lancer

View File

@ -6,7 +6,7 @@ Difyに貢献しようとお考えですか素晴らしいですね。私た
このガイドは、Dify自体と同様に、常に進化し続けています。実際のプロジェクトの進行状況と多少のずれが生じる場合もございますが、ご理解いただけますと幸いです。改善のためのフィードバックも歓迎いたします。
ライセンスについては、[ライセンスと貢献者同意書](../../LICENSE)をご一読ください。また、コミュニティは[行動規範](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)に従っています。
ライセンスについては、[ライセンスと貢献者同意書](./LICENSE)をご一読ください。また、コミュニティは[行動規範](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)に従っています。
## 始める前に

View File

@ -6,7 +6,7 @@ Dify에 기여하려고 하시는군요 - 정말 멋집니다, 당신이 무엇
이 가이드는 Dify 자체와 마찬가지로 끊임없이 진행 중인 작업입니다. 때로는 실제 프로젝트보다 뒤처질 수 있다는 점을 이해해 주시면 감사하겠으며, 개선을 위한 피드백은 언제든지 환영합니다.
라이센스 측면에서, 간략한 [라이센스 및 기여자 동의서](../../LICENSE)를 읽어보는 시간을 가져주세요. 커뮤니티는 또한 [행동 강령](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)을 준수합니다.
라이센스 측면에서, 간략한 [라이센스 및 기여자 동의서](./LICENSE)를 읽어보는 시간을 가져주세요. 커뮤니티는 또한 [행동 강령](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)을 준수합니다.
## 시작하기 전에

View File

@ -6,7 +6,7 @@ Precisamos ser ágeis e entregar rapidamente considerando onde estamos, mas tamb
Este guia, como o próprio Dify, é um trabalho em constante evolução. Agradecemos muito a sua compreensão se às vezes ele ficar atrasado em relação ao projeto real, e damos as boas-vindas a qualquer feedback para que possamos melhorar.
Em termos de licenciamento, por favor, dedique um minuto para ler nosso breve [Acordo de Licença e Contribuidor](../../LICENSE). A comunidade também adere ao [código de conduta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
Em termos de licenciamento, por favor, dedique um minuto para ler nosso breve [Acordo de Licença e Contribuidor](./LICENSE). A comunidade também adere ao [código de conduta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
## Antes de começar

View File

@ -6,7 +6,7 @@ Bulunduğumuz noktada çevik olmamız ve hızlı hareket etmemiz gerekiyor, anca
Bu rehber, Dify'ın kendisi gibi, sürekli gelişen bir çalışmadır. Bazen gerçek projenin gerisinde kalırsa anlayışınız için çok minnettarız ve gelişmemize yardımcı olacak her türlü geri bildirimi memnuniyetle karşılıyoruz.
Lisanslama konusunda, lütfen kısa [Lisans ve Katkıda Bulunan Anlaşmamızı](../../LICENSE) okumak için bir dakikanızı ayırın. Topluluk ayrıca [davranış kurallarına](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md) da uyar.
Lisanslama konusunda, lütfen kısa [Lisans ve Katkıda Bulunan Anlaşmamızı](./LICENSE) okumak için bir dakikanızı ayırın. Topluluk ayrıca [davranış kurallarına](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md) da uyar.
## Başlamadan Önce

View File

@ -6,7 +6,7 @@
這份指南與 Dify 一樣,都在持續完善中。如果指南內容有落後於實際專案的情況,還請見諒,也歡迎提供改進建議。
關於授權部分,請花點時間閱讀我們簡短的[授權和貢獻者協議](../../LICENSE)。社群也需遵守[行為準則](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。
關於授權部分,請花點時間閱讀我們簡短的[授權和貢獻者協議](./LICENSE)。社群也需遵守[行為準則](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。
## 開始之前

View File

@ -6,7 +6,7 @@ Chúng tôi cần phải nhanh nhẹn và triển khai nhanh chóng, nhưng cũn
Hướng dẫn này, giống như Dify, đang được phát triển liên tục. Chúng tôi rất cảm kích sự thông cảm của bạn nếu đôi khi nó chưa theo kịp dự án thực tế, và hoan nghênh mọi phản hồi để cải thiện.
Về giấy phép, vui lòng dành chút thời gian đọc [Thỏa thuận Cấp phép và Người đóng góp](../../LICENSE) ngắn gọn của chúng tôi. Cộng đồng cũng tuân theo [quy tắc ứng xử](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
Về giấy phép, vui lòng dành chút thời gian đọc [Thỏa thuận Cấp phép và Người đóng góp](./LICENSE) ngắn gọn của chúng tôi. Cộng đồng cũng tuân theo [quy tắc ứng xử](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
## Trước khi bắt đầu

View File

@ -26,6 +26,7 @@ prepare-web:
@echo "🌐 Setting up web environment..."
@cp -n web/.env.example web/.env 2>/dev/null || echo "Web .env already exists"
@cd web && pnpm install
@cd web && pnpm build
@echo "✅ Web environment prepared (not started)"
# Step 3: Prepare API environment
@ -60,9 +61,8 @@ check:
@echo "✅ Code check complete"
lint:
@echo "🔧 Running ruff format, check with fixes, and import linter..."
@uv run --project api --dev sh -c 'ruff format ./api && ruff check --fix ./api'
@uv run --directory api --dev lint-imports
@echo "🔧 Running ruff format and check with fixes..."
@uv run --directory api --dev sh -c 'ruff format ./api && ruff check --fix ./api'
@echo "✅ Linting complete"
type-check:

View File

@ -40,18 +40,18 @@
<p align="center">
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./docs/zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="./docs/zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./docs/ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./docs/es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./docs/fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./docs/tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./docs/ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./docs/ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./docs/tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./docs/vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./docs/de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="./docs/bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README_TW.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_DE.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
Dify is an open-source platform for developing LLM applications. Its intuitive interface combines agentic AI workflows, RAG pipelines, agent capabilities, model management, observability features, and more—allowing you to quickly move from prototype to production.
@ -63,7 +63,7 @@ Dify is an open-source platform for developing LLM applications. Its intuitive i
> - CPU >= 2 Core
> - RAM >= 4 GiB
<br/>
</br>
The easiest way to start the Dify server is through [Docker Compose](docker/docker-compose.yaml). Before running Dify with the following commands, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine:
@ -109,15 +109,15 @@ All of Dify's offerings come with corresponding APIs, so you could effortlessly
## Using Dify
- **Cloud <br/>**
- **Cloud </br>**
We host a [Dify Cloud](https://dify.ai) service for anyone to try with zero setup. It provides all the capabilities of the self-deployed version, and includes 200 free GPT-4 calls in the sandbox plan.
- **Self-hosting Dify Community Edition<br/>**
- **Self-hosting Dify Community Edition</br>**
Quickly get Dify running in your environment with this [starter guide](#quick-start).
Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions.
- **Dify for enterprise / organizations<br/>**
We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) to discuss enterprise needs. <br/>
- **Dify for enterprise / organizations</br>**
We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) to discuss enterprise needs. </br>
> For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one click. It's an affordable AMI offering with the option to create apps with custom logo and branding.
@ -129,18 +129,8 @@ Star Dify on GitHub and be instantly notified of new releases.
## Advanced Setup
### Custom configurations
If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
### Metrics Monitoring with Grafana
Import the dashboard to Grafana, using Dify's PostgreSQL database as data source, to monitor metrics in granularity of apps, tenants, messages, and more.
- [Grafana Dashboard by @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard)
### Deployment with Kubernetes
If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) and YAML files which allow Dify to be deployed on Kubernetes.
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<p align="center">
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
@ -35,19 +35,17 @@
</p>
<p align="center">
<a href="../../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="../bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
<div style="text-align: right;">
@ -99,7 +97,7 @@
</br>
أسهل طريقة لبدء تشغيل خادم Dify هي تشغيل ملف [docker-compose.yml](../../docker/docker-compose.yaml) الخاص بنا. قبل تشغيل أمر التثبيت، تأكد من تثبيت [Docker](https://docs.docker.com/get-docker/) و [Docker Compose](https://docs.docker.com/compose/install/) على جهازك:
أسهل طريقة لبدء تشغيل خادم Dify هي تشغيل ملف [docker-compose.yml](docker/docker-compose.yaml) الخاص بنا. قبل تشغيل أمر التثبيت، تأكد من تثبيت [Docker](https://docs.docker.com/get-docker/) و [Docker Compose](https://docs.docker.com/compose/install/) على جهازك:
```bash
cd docker
@ -113,15 +111,7 @@ docker compose up -d
## الخطوات التالية
إذا كنت بحاجة إلى تخصيص الإعدادات، فيرجى الرجوع إلى التعليقات في ملف [.env.example](../../docker/.env.example) وتحديث القيم المقابلة في ملف `.env`. بالإضافة إلى ذلك، قد تحتاج إلى إجراء تعديلات على ملف `docker-compose.yaml` نفسه، مثل تغيير إصدارات الصور أو تعيينات المنافذ أو نقاط تحميل وحدات التخزين، بناءً على بيئة النشر ومتطلباتك الخاصة. بعد إجراء أي تغييرات، يرجى إعادة تشغيل `docker-compose up -d`. يمكنك العثور على قائمة كاملة بمتغيرات البيئة المتاحة [هنا](https://docs.dify.ai/getting-started/install-self-hosted/environments).
### مراقبة المقاييس باستخدام Grafana
استيراد لوحة التحكم إلى Grafana، باستخدام قاعدة بيانات PostgreSQL الخاصة بـ Dify كمصدر للبيانات، لمراقبة المقاييس بدقة للتطبيقات والمستأجرين والرسائل وغير ذلك.
- [لوحة تحكم Grafana بواسطة @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard)
### النشر باستخدام Kubernetes
إذا كنت بحاجة إلى تخصيص الإعدادات، فيرجى الرجوع إلى التعليقات في ملف [.env.example](docker/.env.example) وتحديث القيم المقابلة في ملف `.env`. بالإضافة إلى ذلك، قد تحتاج إلى إجراء تعديلات على ملف `docker-compose.yaml` نفسه، مثل تغيير إصدارات الصور أو تعيينات المنافذ أو نقاط تحميل وحدات التخزين، بناءً على بيئة النشر ومتطلباتك الخاصة. بعد إجراء أي تغييرات، يرجى إعادة تشغيل `docker-compose up -d`. يمكنك العثور على قائمة كاملة بمتغيرات البيئة المتاحة [هنا](https://docs.dify.ai/getting-started/install-self-hosted/environments).
يوجد مجتمع خاص بـ [Helm Charts](https://helm.sh/) وملفات YAML التي تسمح بتنفيذ Dify على Kubernetes للنظام من الإيجابيات العلوية.
@ -195,4 +185,12 @@ docker compose up -d
## الرخصة
هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](../../LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية.
هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية.
## الكشف عن الأمان
لحماية خصوصيتك، يرجى تجنب نشر مشكلات الأمان على GitHub. بدلاً من ذلك، أرسل أسئلتك إلى <security@dify.ai> وسنقدم لك إجابة أكثر تفصيلاً.
## الرخصة
هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية.

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<p align="center">
📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">ডিফাই ওয়ার্কফ্লো ফাইল আপলোড পরিচিতি: গুগল নোটবুক-এলএম পডকাস্ট পুনর্নির্মাণ</a>
@ -39,19 +39,18 @@
</p>
<p align="center">
<a href="../../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="../bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_DE.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
ডিফাই একটি ওপেন-সোর্স LLM অ্যাপ ডেভেলপমেন্ট প্ল্যাটফর্ম। এটি ইন্টুইটিভ ইন্টারফেস, এজেন্টিক AI ওয়ার্কফ্লো, RAG পাইপলাইন, এজেন্ট ক্যাপাবিলিটি, মডেল ম্যানেজমেন্ট, মনিটরিং সুবিধা এবং আরও অনেক কিছু একত্রিত করে, যা দ্রুত প্রোটোটাইপ থেকে প্রোডাকশন পর্যন্ত নিয়ে যেতে সহায়তা করে।
@ -65,7 +64,7 @@
</br>
ডিফাই সার্ভার চালু করার সবচেয়ে সহজ উপায় [docker compose](../../docker/docker-compose.yaml) মাধ্যমে। নিম্নলিখিত কমান্ডগুলো ব্যবহার করে ডিফাই চালানোর আগে, নিশ্চিত করুন যে আপনার মেশিনে [Docker](https://docs.docker.com/get-docker/) এবং [Docker Compose](https://docs.docker.com/compose/install/) ইনস্টল করা আছে :
ডিফাই সার্ভার চালু করার সবচেয়ে সহজ উপায় [docker compose](docker/docker-compose.yaml) মাধ্যমে। নিম্নলিখিত কমান্ডগুলো ব্যবহার করে ডিফাই চালানোর আগে, নিশ্চিত করুন যে আপনার মেশিনে [Docker](https://docs.docker.com/get-docker/) এবং [Docker Compose](https://docs.docker.com/compose/install/) ইনস্টল করা আছে :
```bash
cd dify
@ -129,17 +128,9 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন
## Advanced Setup
যদি আপনার কনফিগারেশনটি কাস্টমাইজ করার প্রয়োজন হয়, তাহলে অনুগ্রহ করে আমাদের [.env.example](../../docker/.env.example) ফাইল দেখুন এবং আপনার `.env` ফাইলে সংশ্লিষ্ট মানগুলি আপডেট করুন। এছাড়াও, আপনার নির্দিষ্ট এনভায়রনমেন্ট এবং প্রয়োজনীয়তার উপর ভিত্তি করে আপনাকে `docker-compose.yaml` ফাইলে সমন্বয় করতে হতে পারে, যেমন ইমেজ ভার্সন পরিবর্তন করা, পোর্ট ম্যাপিং করা, অথবা ভলিউম মাউন্ট করা।
যদি আপনার কনফিগারেশনটি কাস্টমাইজ করার প্রয়োজন হয়, তাহলে অনুগ্রহ করে আমাদের [.env.example](docker/.env.example) ফাইল দেখুন এবং আপনার `.env` ফাইলে সংশ্লিষ্ট মানগুলি আপডেট করুন। এছাড়াও, আপনার নির্দিষ্ট এনভায়রনমেন্ট এবং প্রয়োজনীয়তার উপর ভিত্তি করে আপনাকে `docker-compose.yaml` ফাইলে সমন্বয় করতে হতে পারে, যেমন ইমেজ ভার্সন পরিবর্তন করা, পোর্ট ম্যাপিং করা, অথবা ভলিউম মাউন্ট করা।
যেকোনো পরিবর্তন করার পর, অনুগ্রহ করে `docker-compose up -d` পুনরায় চালান। ভেরিয়েবলের সম্পূর্ণ তালিকা [এখানে] (https://docs.dify.ai/getting-started/install-self-hosted/environments) খুঁজে পেতে পারেন।
### Grafana দিয়ে মেট্রিক্স মনিটরিং
Dify-এর PostgreSQL ডাটাবেসকে ডেটা সোর্স হিসাবে ব্যবহার করে, অ্যাপ, টেন্যান্ট, মেসেজ ইত্যাদির গ্র্যানুলারিটিতে মেট্রিক্স মনিটর করার জন্য Grafana-তে ড্যাশবোর্ড ইম্পোর্ট করুন।
- [@bowenliang123 কর্তৃক Grafana ড্যাশবোর্ড](https://github.com/bowenliang123/dify-grafana-dashboard)
### Kubernetes এর সাথে ডেপ্লয়মেন্ট
যদি আপনি একটি হাইলি এভেইলেবল সেটআপ কনফিগার করতে চান, তাহলে কমিউনিটি [Helm Charts](https://helm.sh/) এবং YAML ফাইল রয়েছে যা Dify কে Kubernetes-এ ডিপ্লয় করার প্রক্রিয়া বর্ণনা করে।
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
@ -184,7 +175,7 @@ Dify-এর PostgreSQL ডাটাবেসকে ডেটা সোর্স
## Contributing
যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) দেখুন
যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা] দেখুন (https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)
একই সাথে, সোশ্যাল মিডিয়া এবং ইভেন্ট এবং কনফারেন্সে এটি শেয়ার করে Dify কে সমর্থন করুন।
> আমরা ম্যান্ডারিন বা ইংরেজি ছাড়া অন্য ভাষায় Dify অনুবাদ করতে সাহায্য করার জন্য অবদানকারীদের খুঁজছি। আপনি যদি সাহায্য করতে আগ্রহী হন, তাহলে আরও তথ্যের জন্য [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) দেখুন এবং আমাদের [ডিসকর্ড কমিউনিটি সার্ভার](https://discord.gg/8Tpq4AcN9c) এর `গ্লোবাল-ইউজারস` চ্যানেলে আমাদের একটি মন্তব্য করুন।
@ -212,4 +203,4 @@ Dify-এর PostgreSQL ডাটাবেসকে ডেটা সোর্স
## লাইসেন্স
এই রিপোজিটরিটি [ডিফাই ওপেন সোর্স লাইসেন্স](../../LICENSE) এর অধিনে , যা মূলত অ্যাপাচি ২., তবে কিছু অতিরিক্ত বিধিনিষেধ রয়েছে।
এই রিপোজিটরিটি [ডিফাই ওপেন সোর্স লাইসেন্স](LICENSE) এর অধিনে , যা মূলত অ্যাপাচি ২., তবে কিছু অতিরিক্ত বিধিনিষেধ রয়েছে।

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<div align="center">
<a href="https://cloud.dify.ai">Dify 云服务</a> ·
@ -35,19 +35,17 @@
</p>
<div align="center">
<a href="../../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="../bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</div>
#
@ -113,7 +111,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI
### 快速启动
启动 Dify 服务器的最简单方法是运行我们的 [docker-compose.yml](../../docker/docker-compose.yaml) 文件。在运行安装命令之前,请确保您的机器上安装了 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/)
启动 Dify 服务器的最简单方法是运行我们的 [docker-compose.yml](docker/docker-compose.yaml) 文件。在运行安装命令之前,请确保您的机器上安装了 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/)
```bash
cd docker
@ -125,13 +123,7 @@ docker compose up -d
### 自定义配置
如果您需要自定义配置,请参考 [.env.example](../../docker/.env.example) 文件中的注释,并更新 `.env` 文件中对应的值。此外,您可能需要根据您的具体部署环境和需求对 `docker-compose.yaml` 文件本身进行调整,例如更改镜像版本、端口映射或卷挂载。完成任何更改后,请重新运行 `docker-compose up -d`。您可以在[此处](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用环境变量的完整列表。
### 使用 Grafana 进行指标监控
将仪表板导入 Grafana使用 Dify 的 PostgreSQL 数据库作为数据源,以监控应用、租户、消息等粒度的指标。
- [由 @bowenliang123 提供的 Grafana 仪表板](https://github.com/bowenliang123/dify-grafana-dashboard)
如果您需要自定义配置,请参考 [.env.example](docker/.env.example) 文件中的注释,并更新 `.env` 文件中对应的值。此外,您可能需要根据您的具体部署环境和需求对 `docker-compose.yaml` 文件本身进行调整,例如更改镜像版本、端口映射或卷挂载。完成任何更改后,请重新运行 `docker-compose up -d`。您可以在[此处](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用环境变量的完整列表。
#### 使用 Helm Chart 或 Kubernetes 资源清单YAML部署
@ -188,7 +180,7 @@ docker compose up -d
## Contributing
对于那些想要贡献代码的人,请参阅我们的[贡献指南](./CONTRIBUTING.md)。
对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_CN.md)。
同时,请考虑通过社交媒体、活动和会议来支持 Dify 的分享。
> 我们正在寻找贡献者来帮助将 Dify 翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)获取更多信息,并在我们的[Discord 社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。
@ -204,7 +196,7 @@ docker compose up -d
我们欢迎您为 Dify 做出贡献,以帮助改善 Dify。包括提交代码、问题、新想法或分享您基于 Dify 创建的有趣且有用的 AI 应用程序。同时,我们也欢迎您在不同的活动、会议和社交媒体上分享 Dify。
- [GitHub Discussion](https://github.com/langgenius/dify/discussions). 👉:分享您的应用程序并与社区交流。
- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](./CONTRIBUTING.md)。
- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](CONTRIBUTING.md)。
- [电子邮件支持](mailto:hello@dify.ai?subject=%5BGitHub%5DQuestions%20About%20Dify)。👉:关于使用 Dify.AI 的问题。
- [Discord](https://discord.gg/FngNHpbcY7)。👉:分享您的应用程序并与社区交流。
- [X(Twitter)](https://twitter.com/dify_ai)。👉:分享您的应用程序并与社区交流。
@ -216,4 +208,4 @@ docker compose up -d
## License
本仓库遵循 [Dify Open Source License](../../LICENSE) 开源协议,该许可证本质上是 Apache 2.0,但有一些额外的限制。
本仓库遵循 [Dify Open Source License](LICENSE) 开源协议,该许可证本质上是 Apache 2.0,但有一些额外的限制。

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<p align="center">
📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">Einführung in Dify Workflow File Upload: Google NotebookLM Podcast nachbilden</a>
@ -39,19 +39,18 @@
</p>
<p align="center">
<a href="../../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="../bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_DE.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
Dify ist eine Open-Source-Plattform zur Entwicklung von LLM-Anwendungen. Ihre intuitive Benutzeroberfläche vereint agentenbasierte KI-Workflows, RAG-Pipelines, Agentenfunktionen, Modellverwaltung, Überwachungsfunktionen und mehr, sodass Sie schnell von einem Prototyp in die Produktion übergehen können.
@ -65,7 +64,7 @@ Dify ist eine Open-Source-Plattform zur Entwicklung von LLM-Anwendungen. Ihre in
</br>
Der einfachste Weg, den Dify-Server zu starten, ist über [docker compose](../../docker/docker-compose.yaml). Stellen Sie vor dem Ausführen von Dify mit den folgenden Befehlen sicher, dass [Docker](https://docs.docker.com/get-docker/) und [Docker Compose](https://docs.docker.com/compose/install/) auf Ihrem System installiert sind:
Der einfachste Weg, den Dify-Server zu starten, ist über [docker compose](docker/docker-compose.yaml). Stellen Sie vor dem Ausführen von Dify mit den folgenden Befehlen sicher, dass [Docker](https://docs.docker.com/get-docker/) und [Docker Compose](https://docs.docker.com/compose/install/) auf Ihrem System installiert sind:
```bash
cd dify
@ -128,15 +127,7 @@ Star Dify auf GitHub und lassen Sie sich sofort über neue Releases benachrichti
## Erweiterte Einstellungen
Falls Sie die Konfiguration anpassen müssen, lesen Sie bitte die Kommentare in unserer [.env.example](../../docker/.env.example)-Datei und aktualisieren Sie die entsprechenden Werte in Ihrer `.env`-Datei. Zusätzlich müssen Sie eventuell Anpassungen an der `docker-compose.yaml`-Datei vornehmen, wie zum Beispiel das Ändern von Image-Versionen, Portzuordnungen oder Volumen-Mounts, je nach Ihrer spezifischen Einsatzumgebung und Ihren Anforderungen. Nachdem Sie Änderungen vorgenommen haben, starten Sie `docker-compose up -d` erneut. Eine vollständige Liste der verfügbaren Umgebungsvariablen finden Sie [hier](https://docs.dify.ai/getting-started/install-self-hosted/environments).
### Metriküberwachung mit Grafana
Importieren Sie das Dashboard in Grafana, wobei Sie die PostgreSQL-Datenbank von Dify als Datenquelle verwenden, um Metriken in der Granularität von Apps, Mandanten, Nachrichten und mehr zu überwachen.
- [Grafana-Dashboard von @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard)
### Bereitstellung mit Kubernetes
Falls Sie die Konfiguration anpassen müssen, lesen Sie bitte die Kommentare in unserer [.env.example](docker/.env.example)-Datei und aktualisieren Sie die entsprechenden Werte in Ihrer `.env`-Datei. Zusätzlich müssen Sie eventuell Anpassungen an der `docker-compose.yaml`-Datei vornehmen, wie zum Beispiel das Ändern von Image-Versionen, Portzuordnungen oder Volumen-Mounts, je nach Ihrer spezifischen Einsatzumgebung und Ihren Anforderungen. Nachdem Sie Änderungen vorgenommen haben, starten Sie `docker-compose up -d` erneut. Eine vollständige Liste der verfügbaren Umgebungsvariablen finden Sie [hier](https://docs.dify.ai/getting-started/install-self-hosted/environments).
Falls Sie eine hochverfügbare Konfiguration einrichten möchten, gibt es von der Community bereitgestellte [Helm Charts](https://helm.sh/) und YAML-Dateien, die es ermöglichen, Dify auf Kubernetes bereitzustellen.
@ -182,14 +173,14 @@ Stellen Sie Dify mit einem Klick in AKS bereit, indem Sie [Azure Devops Pipeline
## Contributing
Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](./CONTRIBUTING.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren.
Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_DE.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren.
> Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c).
## Gemeinschaft & Kontakt
- [GitHub Discussion](https://github.com/langgenius/dify/discussions). Am besten geeignet für: den Austausch von Feedback und das Stellen von Fragen.
- [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](./CONTRIBUTING.md).
- [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
- [Discord](https://discord.gg/FngNHpbcY7). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community.
- [X(Twitter)](https://twitter.com/dify_ai). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community.
@ -209,4 +200,4 @@ Um Ihre Privatsphäre zu schützen, vermeiden Sie es bitte, Sicherheitsprobleme
## Lizenz
Dieses Repository steht unter der [Dify Open Source License](../../LICENSE), die im Wesentlichen Apache 2.0 mit einigen zusätzlichen Einschränkungen ist.
Dieses Repository steht unter der [Dify Open Source License](LICENSE), die im Wesentlichen Apache 2.0 mit einigen zusätzlichen Einschränkungen ist.

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<p align="center">
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
@ -35,19 +35,17 @@
</p>
<p align="center">
<a href="../../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="../bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
#
@ -110,7 +108,7 @@ Dale estrella a Dify en GitHub y serás notificado instantáneamente de las nuev
</br>
La forma más fácil de iniciar el servidor de Dify es ejecutar nuestro archivo [docker-compose.yml](../../docker/docker-compose.yaml). Antes de ejecutar el comando de instalación, asegúrate de que [Docker](https://docs.docker.com/get-docker/) y [Docker Compose](https://docs.docker.com/compose/install/) estén instalados en tu máquina:
La forma más fácil de iniciar el servidor de Dify es ejecutar nuestro archivo [docker-compose.yml](docker/docker-compose.yaml). Antes de ejecutar el comando de instalación, asegúrate de que [Docker](https://docs.docker.com/get-docker/) y [Docker Compose](https://docs.docker.com/compose/install/) estén instalados en tu máquina:
```bash
cd docker
@ -124,18 +122,10 @@ Después de ejecutarlo, puedes acceder al panel de control de Dify en tu navegad
## Próximos pasos
Si necesita personalizar la configuración, consulte los comentarios en nuestro archivo [.env.example](../../docker/.env.example) y actualice los valores correspondientes en su archivo `.env`. Además, es posible que deba realizar ajustes en el propio archivo `docker-compose.yaml`, como cambiar las versiones de las imágenes, las asignaciones de puertos o los montajes de volúmenes, según su entorno de implementación y requisitos específicos. Después de realizar cualquier cambio, vuelva a ejecutar `docker-compose up -d`. Puede encontrar la lista completa de variables de entorno disponibles [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments).
Si necesita personalizar la configuración, consulte los comentarios en nuestro archivo [.env.example](docker/.env.example) y actualice los valores correspondientes en su archivo `.env`. Además, es posible que deba realizar ajustes en el propio archivo `docker-compose.yaml`, como cambiar las versiones de las imágenes, las asignaciones de puertos o los montajes de volúmenes, según su entorno de implementación y requisitos específicos. Después de realizar cualquier cambio, vuelva a ejecutar `docker-compose up -d`. Puede encontrar la lista completa de variables de entorno disponibles [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments).
. Después de realizar los cambios, ejecuta `docker-compose up -d` nuevamente. Puedes ver la lista completa de variables de entorno [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments).
### Monitorización de Métricas con Grafana
Importe el panel a Grafana, utilizando la base de datos PostgreSQL de Dify como fuente de datos, para monitorizar métricas en granularidad de aplicaciones, inquilinos, mensajes y más.
- [Panel de Grafana por @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard)
### Implementación con Kubernetes
Si desea configurar una configuración de alta disponibilidad, la comunidad proporciona [Gráficos Helm](https://helm.sh/) y archivos YAML, a través de los cuales puede desplegar Dify en Kubernetes.
- [Gráfico Helm por @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
@ -180,7 +170,7 @@ Implementa Dify en AKS con un clic usando [Azure Devops Pipeline Helm Chart by @
## Contribuir
Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](./CONTRIBUTING.md).
Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_ES.md).
Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en eventos y conferencias.
> Estamos buscando colaboradores para ayudar con la traducción de Dify a idiomas que no sean el mandarín o el inglés. Si estás interesado en ayudar, consulta el [README de i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para obtener más información y déjanos un comentario en el canal `global-users` de nuestro [Servidor de Comunidad en Discord](https://discord.gg/8Tpq4AcN9c).
@ -194,7 +184,7 @@ Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en
## Comunidad y Contacto
- [Discusión en GitHub](https://github.com/langgenius/dify/discussions). Lo mejor para: compartir comentarios y hacer preguntas.
- [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](./CONTRIBUTING.md).
- [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
- [Discord](https://discord.gg/FngNHpbcY7). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad.
- [X(Twitter)](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad.
@ -208,4 +198,12 @@ Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En
## Licencia
Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](../../LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales.
Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales.
## Divulgación de Seguridad
Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En su lugar, envía tus preguntas a security@dify.ai y te proporcionaremos una respuesta más detallada.
## Licencia
Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales.

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<p align="center">
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
@ -35,19 +35,17 @@
</p>
<p align="center">
<a href="../../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="../bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
#
@ -110,7 +108,7 @@ Mettez une étoile à Dify sur GitHub et soyez instantanément informé des nouv
</br>
La manière la plus simple de démarrer le serveur Dify est d'exécuter notre fichier [docker-compose.yml](../../docker/docker-compose.yaml). Avant d'exécuter la commande d'installation, assurez-vous que [Docker](https://docs.docker.com/get-docker/) et [Docker Compose](https://docs.docker.com/compose/install/) sont installés sur votre machine:
La manière la plus simple de démarrer le serveur Dify est d'exécuter notre fichier [docker-compose.yml](docker/docker-compose.yaml). Avant d'exécuter la commande d'installation, assurez-vous que [Docker](https://docs.docker.com/get-docker/) et [Docker Compose](https://docs.docker.com/compose/install/) sont installés sur votre machine:
```bash
cd docker
@ -124,15 +122,7 @@ Après l'exécution, vous pouvez accéder au tableau de bord Dify dans votre nav
## Prochaines étapes
Si vous devez personnaliser la configuration, veuillez vous référer aux commentaires dans notre fichier [.env.example](../../docker/.env.example) et mettre à jour les valeurs correspondantes dans votre fichier `.env`. De plus, vous devrez peut-être apporter des modifications au fichier `docker-compose.yaml` lui-même, comme changer les versions d'image, les mappages de ports ou les montages de volumes, en fonction de votre environnement de déploiement et de vos exigences spécifiques. Après avoir effectué des modifications, veuillez réexécuter `docker-compose up -d`. Vous pouvez trouver la liste complète des variables d'environnement disponibles [ici](https://docs.dify.ai/getting-started/install-self-hosted/environments).
### Surveillance des Métriques avec Grafana
Importez le tableau de bord dans Grafana, en utilisant la base de données PostgreSQL de Dify comme source de données, pour surveiller les métriques avec une granularité d'applications, de locataires, de messages et plus.
- [Tableau de bord Grafana par @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard)
### Déploiement avec Kubernetes
Si vous devez personnaliser la configuration, veuillez vous référer aux commentaires dans notre fichier [.env.example](docker/.env.example) et mettre à jour les valeurs correspondantes dans votre fichier `.env`. De plus, vous devrez peut-être apporter des modifications au fichier `docker-compose.yaml` lui-même, comme changer les versions d'image, les mappages de ports ou les montages de volumes, en fonction de votre environnement de déploiement et de vos exigences spécifiques. Après avoir effectué des modifications, veuillez réexécuter `docker-compose up -d`. Vous pouvez trouver la liste complète des variables d'environnement disponibles [ici](https://docs.dify.ai/getting-started/install-self-hosted/environments).
Si vous souhaitez configurer une configuration haute disponibilité, la communauté fournit des [Helm Charts](https://helm.sh/) et des fichiers YAML, à travers lesquels vous pouvez déployer Dify sur Kubernetes.
@ -178,7 +168,7 @@ Déployez Dify sur AKS en un clic en utilisant [Azure Devops Pipeline Helm Chart
## Contribuer
Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](./CONTRIBUTING.md).
Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_FR.md).
Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur les réseaux sociaux et lors d'événements et de conférences.
> Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c).
@ -192,7 +182,7 @@ Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur le
## Communauté & Contact
- [Discussion GitHub](https://github.com/langgenius/dify/discussions). Meilleur pour: partager des commentaires et poser des questions.
- [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](./CONTRIBUTING.md).
- [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
- [Discord](https://discord.gg/FngNHpbcY7). Meilleur pour: partager vos applications et passer du temps avec la communauté.
- [X(Twitter)](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté.
@ -206,4 +196,12 @@ Pour protéger votre vie privée, veuillez éviter de publier des problèmes de
## Licence
Ce référentiel est disponible sous la [Licence open source Dify](../../LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires.
Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires.
## Divulgation de sécurité
Pour protéger votre vie privée, veuillez éviter de publier des problèmes de sécurité sur GitHub. Au lieu de cela, envoyez vos questions à security@dify.ai et nous vous fournirons une réponse plus détaillée.
## Licence
Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires.

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<p align="center">
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
@ -35,19 +35,17 @@
</p>
<p align="center">
<a href="../../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="../bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
#
@ -111,7 +109,7 @@ GitHub上でDifyにスターを付けることで、Difyに関する新しいニ
</br>
Difyサーバーを起動する最も簡単な方法は、[docker-compose.yml](../../docker/docker-compose.yaml)ファイルを実行することです。インストールコマンドを実行する前に、マシンに[Docker](https://docs.docker.com/get-docker/)と[Docker Compose](https://docs.docker.com/compose/install/)がインストールされていることを確認してください。
Difyサーバーを起動する最も簡単な方法は、[docker-compose.yml](docker/docker-compose.yaml)ファイルを実行することです。インストールコマンドを実行する前に、マシンに[Docker](https://docs.docker.com/get-docker/)と[Docker Compose](https://docs.docker.com/compose/install/)がインストールされていることを確認してください。
```bash
cd docker
@ -125,15 +123,7 @@ docker compose up -d
## 次のステップ
設定をカスタマイズする必要がある場合は、[.env.example](../../docker/.env.example) ファイルのコメントを参照し、`.env` ファイルの対応する値を更新してください。さらに、デプロイ環境や要件に応じて、`docker-compose.yaml` ファイル自体を調整する必要がある場合があります。たとえば、イメージのバージョン、ポートのマッピング、ボリュームのマウントなどを変更します。変更を加えた後は、`docker-compose up -d` を再実行してください。利用可能な環境変数の全一覧は、[こちら](https://docs.dify.ai/getting-started/install-self-hosted/environments)で確認できます。
### Grafanaを使用したメトリクス監視
Grafanaにダッシュボードをインポートし、DifyのPostgreSQLデータベースをデータソースとして使用して、アプリ、テナント、メッセージなどの粒度でメトリクスを監視します。
- [@bowenliang123によるGrafanaダッシュボード](https://github.com/bowenliang123/dify-grafana-dashboard)
### Kubernetesでのデプロイ
設定をカスタマイズする必要がある場合は、[.env.example](docker/.env.example) ファイルのコメントを参照し、`.env` ファイルの対応する値を更新してください。さらに、デプロイ環境や要件に応じて、`docker-compose.yaml` ファイル自体を調整する必要がある場合があります。たとえば、イメージのバージョン、ポートのマッピング、ボリュームのマウントなどを変更します。変更を加えた後は、`docker-compose up -d` を再実行してください。利用可能な環境変数の全一覧は、[こちら](https://docs.dify.ai/getting-started/install-self-hosted/environments)で確認できます。
高可用性設定を設定する必要がある場合、コミュニティは[Helm Charts](https://helm.sh/)とYAMLファイルにより、DifyをKubernetesにデプロイすることができます。
@ -179,7 +169,7 @@ Grafanaにダッシュボードをインポートし、DifyのPostgreSQLデー
## 貢献
コードに貢献したい方は、[Contribution Guide](./CONTRIBUTING.md)を参照してください。
コードに貢献したい方は、[Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_JA.md)を参照してください。
同時に、DifyをSNSやイベント、カンファレンスで共有してサポートしていただけると幸いです。
> Difyを英語または中国語以外の言語に翻訳してくれる貢献者を募集しています。興味がある場合は、詳細については[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)を参照してください。また、[Discordコミュニティサーバー](https://discord.gg/8Tpq4AcN9c)の`global-users`チャンネルにコメントを残してください。
@ -193,10 +183,10 @@ Grafanaにダッシュボードをインポートし、DifyのPostgreSQLデー
## コミュニティ & お問い合わせ
- [GitHub Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。
- [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](./CONTRIBUTING.md)を参照してください
- [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](CONTRIBUTING_JA.md)を参照してください
- [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。
- [X(Twitter)](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。
## ライセンス
このリポジトリは、Dify Open Source License にいくつかの追加制限を加えた[Difyオープンソースライセンス](../../LICENSE)の下で利用可能です。
このリポジトリは、Dify Open Source License にいくつかの追加制限を加えた[Difyオープンソースライセンス](LICENSE)の下で利用可能です。

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<p align="center">
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
@ -35,19 +35,17 @@
</p>
<p align="center">
<a href="../../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="../bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
#
@ -110,7 +108,7 @@ Star Dify on GitHub and be instantly notified of new releases.
</br>
The easiest way to start the Dify server is to run our [docker-compose.yml](../../docker/docker-compose.yaml) file. Before running the installation command, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine:
The easiest way to start the Dify server is to run our [docker-compose.yml](docker/docker-compose.yaml) file. Before running the installation command, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine:
```bash
cd docker
@ -124,7 +122,7 @@ After running, you can access the Dify dashboard in your browser at [http://loca
## Next steps
If you need to customize the configuration, please refer to the comments in our [.env.example](../../docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) and YAML files which allow Dify to be deployed on Kubernetes.
@ -183,7 +181,10 @@ At the same time, please consider supporting Dify by sharing it on social media
## Community & Contact
- [GitHub Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions.
- \[GitHub Discussion\](https://github.com/langgenius/dify/discussions
). Best for: sharing feedback and asking questions.
- [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
- [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
- [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
@ -198,4 +199,4 @@ To protect your privacy, please avoid posting security issues on GitHub. Instead
## License
This repository is available under the [Dify Open Source License](../../LICENSE), which is essentially Apache 2.0 with a few additional restrictions.
This repository is available under the [Dify Open Source License](LICENSE), which is essentially Apache 2.0 with a few additional restrictions.

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<p align="center">
<a href="https://cloud.dify.ai">Dify 클라우드</a> ·
@ -35,19 +35,17 @@
</p>
<p align="center">
<a href="../../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="../bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
Dify는 오픈 소스 LLM 앱 개발 플랫폼입니다. 직관적인 인터페이스를 통해 AI 워크플로우, RAG 파이프라인, 에이전트 기능, 모델 관리, 관찰 기능 등을 결합하여 프로토타입에서 프로덕션까지 빠르게 전환할 수 있습니다. 주요 기능 목록은 다음과 같습니다:</br> </br>
@ -104,7 +102,7 @@ GitHub에서 Dify에 별표를 찍어 새로운 릴리스를 즉시 알림 받
</br>
Dify 서버를 시작하는 가장 쉬운 방법은 [docker-compose.yml](../../docker/docker-compose.yaml) 파일을 실행하는 것입니다. 설치 명령을 실행하기 전에 [Docker](https://docs.docker.com/get-docker/) 및 [Docker Compose](https://docs.docker.com/compose/install/)가 머신에 설치되어 있는지 확인하세요.
Dify 서버를 시작하는 가장 쉬운 방법은 [docker-compose.yml](docker/docker-compose.yaml) 파일을 실행하는 것입니다. 설치 명령을 실행하기 전에 [Docker](https://docs.docker.com/get-docker/) 및 [Docker Compose](https://docs.docker.com/compose/install/)가 머신에 설치되어 있는지 확인하세요.
```bash
cd docker
@ -118,15 +116,7 @@ docker compose up -d
## 다음 단계
구성을 사용자 정의해야 하는 경우 [.env.example](../../docker/.env.example) 파일의 주석을 참조하고 `.env` 파일에서 해당 값을 업데이트하십시오. 또한 특정 배포 환경 및 요구 사항에 따라 `docker-compose.yaml` 파일 자체를 조정해야 할 수도 있습니다. 예를 들어 이미지 버전, 포트 매핑 또는 볼륨 마운트를 변경합니다. 변경 한 후 `docker-compose up -d`를 다시 실행하십시오. 사용 가능한 환경 변수의 전체 목록은 [여기](https://docs.dify.ai/getting-started/install-self-hosted/environments)에서 찾을 수 있습니다.
### Grafana를 사용한 메트릭 모니터링
Dify의 PostgreSQL 데이터베이스를 데이터 소스로 사용하여 앱, 테넌트, 메시지 등에 대한 세분화된 메트릭을 모니터링하기 위해 대시보드를 Grafana로 가져옵니다.
- [@bowenliang123의 Grafana 대시보드](https://github.com/bowenliang123/dify-grafana-dashboard)
### Kubernetes를 통한 배포
구성을 사용자 정의해야 하는 경우 [.env.example](docker/.env.example) 파일의 주석을 참조하고 `.env` 파일에서 해당 값을 업데이트하십시오. 또한 특정 배포 환경 및 요구 사항에 따라 `docker-compose.yaml` 파일 자체를 조정해야 할 수도 있습니다. 예를 들어 이미지 버전, 포트 매핑 또는 볼륨 마운트를 변경합니다. 변경 한 후 `docker-compose up -d`를 다시 실행하십시오. 사용 가능한 환경 변수의 전체 목록은 [여기](https://docs.dify.ai/getting-started/install-self-hosted/environments)에서 찾을 수 있습니다.
Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했다는 커뮤니티가 제공하는 [Helm Charts](https://helm.sh/)와 YAML 파일이 존재합니다.
@ -172,7 +162,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
## 기여
코드에 기여하고 싶은 분들은 [기여 가이드](./CONTRIBUTING.md)를 참조하세요.
코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_KR.md)를 참조하세요.
동시에 Dify를 소셜 미디어와 행사 및 컨퍼런스에 공유하여 지원하는 것을 고려해 주시기 바랍니다.
> 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요.
@ -186,7 +176,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
## 커뮤니티 & 연락처
- [GitHub 토론](https://github.com/langgenius/dify/discussions). 피드백 공유 및 질문하기에 적합합니다.
- [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](./CONTRIBUTING.md)를 참조하세요.
- [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요.
- [디스코드](https://discord.gg/FngNHpbcY7). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.
- [트위터](https://twitter.com/dify_ai). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.
@ -200,4 +190,4 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
## 라이선스
이 저장소는 기본적으로 몇 가지 추가 제한 사항이 있는 Apache 2.0인 [Dify 오픈 소스 라이선스](../../LICENSE)에 따라 사용할 수 있습니다.
이 저장소는 기본적으로 몇 가지 추가 제한 사항이 있는 Apache 2.0인 [Dify 오픈 소스 라이선스](LICENSE)에 따라 사용할 수 있습니다.

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<p align="center">
📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">Introduzindo o Dify Workflow com Upload de Arquivo: Recrie o Podcast Google NotebookLM</a>
@ -39,20 +39,18 @@
</p>
<p align="center">
<a href="../../README.md"><img alt="README em Inglês" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README em Espanhol" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README em Francês" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README em Coreano" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README em Árabe" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="README em Turco" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README em Vietnamita" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../pt-BR/README.md"><img alt="README em Português - BR" src="https://img.shields.io/badge/Portugu%C3%AAs-BR?style=flat&label=BR&color=d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="../bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README.md"><img alt="README em Inglês" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README em Espanhol" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README em Francês" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README em Coreano" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README em Árabe" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="README em Turco" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README em Vietnamita" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_PT.md"><img alt="README em Português - BR" src="https://img.shields.io/badge/Portugu%C3%AAs-BR?style=flat&label=BR&color=d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
Dify é uma plataforma de desenvolvimento de aplicativos LLM de código aberto. Sua interface intuitiva combina workflow de IA, pipeline RAG, capacidades de agente, gerenciamento de modelos, recursos de observabilidade e muito mais, permitindo que você vá rapidamente do protótipo à produção. Aqui está uma lista das principais funcionalidades:
@ -110,7 +108,7 @@ Dê uma estrela no Dify no GitHub e seja notificado imediatamente sobre novos la
</br>
A maneira mais fácil de iniciar o servidor Dify é executar nosso arquivo [docker-compose.yml](../../docker/docker-compose.yaml). Antes de rodar o comando de instalação, certifique-se de que o [Docker](https://docs.docker.com/get-docker/) e o [Docker Compose](https://docs.docker.com/compose/install/) estão instalados na sua máquina:
A maneira mais fácil de iniciar o servidor Dify é executar nosso arquivo [docker-compose.yml](docker/docker-compose.yaml). Antes de rodar o comando de instalação, certifique-se de que o [Docker](https://docs.docker.com/get-docker/) e o [Docker Compose](https://docs.docker.com/compose/install/) estão instalados na sua máquina:
```bash
cd docker
@ -124,15 +122,7 @@ Após a execução, você pode acessar o painel do Dify no navegador em [http://
## Próximos passos
Se precisar personalizar a configuração, consulte os comentários no nosso arquivo [.env.example](../../docker/.env.example) e atualize os valores correspondentes no seu arquivo `.env`. Além disso, talvez seja necessário fazer ajustes no próprio arquivo `docker-compose.yaml`, como alterar versões de imagem, mapeamentos de portas ou montagens de volumes, com base no seu ambiente de implantação específico e nas suas necessidades. Após fazer quaisquer alterações, execute novamente `docker-compose up -d`. Você pode encontrar a lista completa de variáveis de ambiente disponíveis [aqui](https://docs.dify.ai/getting-started/install-self-hosted/environments).
### Monitoramento de Métricas com Grafana
Importe o dashboard para o Grafana, usando o banco de dados PostgreSQL do Dify como fonte de dados, para monitorar métricas na granularidade de aplicativos, inquilinos, mensagens e muito mais.
- [Dashboard do Grafana por @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard)
### Implantação com Kubernetes
Se precisar personalizar a configuração, consulte os comentários no nosso arquivo [.env.example](docker/.env.example) e atualize os valores correspondentes no seu arquivo `.env`. Além disso, talvez seja necessário fazer ajustes no próprio arquivo `docker-compose.yaml`, como alterar versões de imagem, mapeamentos de portas ou montagens de volumes, com base no seu ambiente de implantação específico e nas suas necessidades. Após fazer quaisquer alterações, execute novamente `docker-compose up -d`. Você pode encontrar a lista completa de variáveis de ambiente disponíveis [aqui](https://docs.dify.ai/getting-started/install-self-hosted/environments).
Se deseja configurar uma instalação de alta disponibilidade, há [Helm Charts](https://helm.sh/) e arquivos YAML contribuídos pela comunidade que permitem a implantação do Dify no Kubernetes.
@ -178,7 +168,7 @@ Implante o Dify no AKS com um clique usando [Azure Devops Pipeline Helm Chart by
## Contribuindo
Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](./CONTRIBUTING.md).
Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_PT.md).
Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em eventos e conferências.
> Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c).
@ -192,7 +182,7 @@ Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em
## Comunidade e contato
- [Discussões no GitHub](https://github.com/langgenius/dify/discussions). Melhor para: compartilhar feedback e fazer perguntas.
- [Problemas no GitHub](https://github.com/langgenius/dify/issues). Melhor para: relatar bugs encontrados no Dify.AI e propor novos recursos. Veja nosso [Guia de Contribuição](./CONTRIBUTING.md).
- [Problemas no GitHub](https://github.com/langgenius/dify/issues). Melhor para: relatar bugs encontrados no Dify.AI e propor novos recursos. Veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
- [Discord](https://discord.gg/FngNHpbcY7). Melhor para: compartilhar suas aplicações e interagir com a comunidade.
- [X(Twitter)](https://twitter.com/dify_ai). Melhor para: compartilhar suas aplicações e interagir com a comunidade.
@ -206,4 +196,4 @@ Para proteger sua privacidade, evite postar problemas de segurança no GitHub. E
## Licença
Este repositório está disponível sob a [Licença de Código Aberto Dify](../../LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais.
Este repositório está disponível sob a [Licença de Código Aberto Dify](LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais.

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<p align="center">
📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">Predstavljamo nalaganje datotek Dify Workflow: znova ustvarite Google NotebookLM Podcast</a>
@ -36,20 +36,18 @@
</p>
<p align="center">
<a href="../../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../sl-SI/README.md"><img alt="README Slovenščina" src="https://img.shields.io/badge/Sloven%C5%A1%C4%8Dina-d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="../bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_SI.md"><img alt="README Slovenščina" src="https://img.shields.io/badge/Sloven%C5%A1%C4%8Dina-d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
Dify je odprtokodna platforma za razvoj aplikacij LLM. Njegov intuitivni vmesnik združuje agentski potek dela z umetno inteligenco, cevovod RAG, zmogljivosti agentov, upravljanje modelov, funkcije opazovanja in več, kar vam omogoča hiter prehod od prototipa do proizvodnje.
@ -128,14 +126,6 @@ Star Dify on GitHub and be instantly notified of new releases.
Če morate prilagoditi konfiguracijo, si oglejte komentarje v naši datoteki .env.example in posodobite ustrezne vrednosti v svoji .env datoteki. Poleg tega boste morda morali prilagoditi docker-compose.yamlsamo datoteko, na primer spremeniti različice slike, preslikave vrat ali namestitve nosilca, glede na vaše specifično okolje in zahteve za uvajanje. Po kakršnih koli spremembah ponovno zaženite docker-compose up -d. Celoten seznam razpoložljivih spremenljivk okolja najdete tukaj .
### Spremljanje metrik z Grafana
Uvoz nadzorne plošče v Grafana, z uporabo Difyjeve PostgreSQL baze podatkov kot vir podatkov, za spremljanje metrike glede na podrobnost aplikacij, najemnikov, sporočil in drugega.
- [Nadzorna plošča Grafana avtorja @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard)
### Namestitev s Kubernetes
Če želite konfigurirati visoko razpoložljivo nastavitev, so na voljo Helm Charts in datoteke YAML, ki jih prispeva skupnost, ki omogočajo uvedbo Difyja v Kubernetes.
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
@ -179,7 +169,7 @@ Z enim klikom namestite Dify v AKS z uporabo [Azure Devops Pipeline Helm Chart b
## Prispevam
Za tiste, ki bi radi prispevali kodo, si oglejte naš [vodnik za prispevke](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). Hkrati vas prosimo, da podprete Dify tako, da ga delite na družbenih medijih ter na dogodkih in konferencah.
Za tiste, ki bi radi prispevali kodo, si oglejte naš vodnik za prispevke . Hkrati vas prosimo, da podprete Dify tako, da ga delite na družbenih medijih ter na dogodkih in konferencah.
> Iščemo sodelavce za pomoč pri prevajanju Difyja v jezike, ki niso mandarinščina ali angleščina. Če želite pomagati, si oglejte i18n README za več informacij in nam pustite komentar v global-userskanalu našega strežnika skupnosti Discord .
@ -206,4 +196,4 @@ Zaradi zaščite vaše zasebnosti se izogibajte objavljanju varnostnih vprašanj
## Licenca
To skladišče je na voljo pod [odprtokodno licenco Dify](../../LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami.
To skladišče je na voljo pod [odprtokodno licenco Dify](LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami.

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<p align="center">
<a href="https://cloud.dify.ai">Dify Bulut</a> ·
@ -35,19 +35,17 @@
</p>
<p align="center">
<a href="../../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="../bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
Dify, açık kaynaklı bir LLM uygulama geliştirme platformudur. Sezgisel arayüzü, AI iş akışı, RAG pipeline'ı, ajan yetenekleri, model yönetimi, gözlemlenebilirlik özellikleri ve daha fazlasını birleştirerek, prototipten üretime hızlıca geçmenizi sağlar. İşte temel özelliklerin bir listesi:
@ -104,7 +102,7 @@ GitHub'da Dify'a yıldız verin ve yeni sürümlerden anında haberdar olun.
> - RAM >= 4GB
</br>
Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](../../docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun:
Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun:
```bash
cd docker
@ -118,15 +116,7 @@ docker compose up -d
## Sonraki adımlar
Yapılandırmayı özelleştirmeniz gerekiyorsa, lütfen [.env.example](../../docker/.env.example) dosyamızdaki yorumlara bakın ve `.env` dosyanızdaki ilgili değerleri güncelleyin. Ayrıca, spesifik dağıtım ortamınıza ve gereksinimlerinize bağlı olarak `docker-compose.yaml` dosyasının kendisinde de, imaj sürümlerini, port eşlemelerini veya hacim bağlantılarını değiştirmek gibi ayarlamalar yapmanız gerekebilir. Herhangi bir değişiklik yaptıktan sonra, lütfen `docker-compose up -d` komutunu tekrar çalıştırın. Kullanılabilir tüm ortam değişkenlerinin tam listesini [burada](https://docs.dify.ai/getting-started/install-self-hosted/environments) bulabilirsiniz.
### Grafana ile Metrik İzleme
Uygulamalar, kiracılar, mesajlar ve daha fazlasının granularitesinde metrikleri izlemek için Dify'nin PostgreSQL veritabanını veri kaynağı olarak kullanarak panoyu Grafana'ya aktarın.
- [@bowenliang123 tarafından Grafana Panosu](%E9%93%BE%E6%8E%A5)
### Kubernetes ile Dağıtım
Yapılandırmayı özelleştirmeniz gerekiyorsa, lütfen [.env.example](docker/.env.example) dosyamızdaki yorumlara bakın ve `.env` dosyanızdaki ilgili değerleri güncelleyin. Ayrıca, spesifik dağıtım ortamınıza ve gereksinimlerinize bağlı olarak `docker-compose.yaml` dosyasının kendisinde de, imaj sürümlerini, port eşlemelerini veya hacim bağlantılarını değiştirmek gibi ayarlamalar yapmanız gerekebilir. Herhangi bir değişiklik yaptıktan sonra, lütfen `docker-compose up -d` komutunu tekrar çalıştırın. Kullanılabilir tüm ortam değişkenlerinin tam listesini [burada](https://docs.dify.ai/getting-started/install-self-hosted/environments) bulabilirsiniz.
Yüksek kullanılabilirliğe sahip bir kurulum yapılandırmak isterseniz, Dify'ın Kubernetes üzerine dağıtılmasına olanak tanıyan topluluk katkılı [Helm Charts](https://helm.sh/) ve YAML dosyaları mevcuttur.
@ -171,7 +161,7 @@ Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.ter
## Katkıda Bulunma
Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](./CONTRIBUTING.md) bakabilirsiniz.
Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_TR.md) bakabilirsiniz.
Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda paylaşarak desteklemeyi düşünün.
> Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın.
@ -185,7 +175,7 @@ Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda p
## Topluluk & iletişim
- [GitHub Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için.
- [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](./CONTRIBUTING.md) bakın.
- [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakın.
- [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
- [X(Twitter)](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
@ -199,4 +189,4 @@ Gizliliğinizi korumak için, lütfen güvenlik sorunlarını GitHub'da paylaşm
## Lisans
Bu depo, temel olarak Apache 2.0 lisansı ve birkaç ek kısıtlama içeren [Dify Açık Kaynak Lisansı](../../LICENSE) altında kullanıma sunulmuştur.
Bu depo, temel olarak Apache 2.0 lisansı ve birkaç ek kısıtlama içeren [Dify Açık Kaynak Lisansı](LICENSE) altında kullanıma sunulmuştur.

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<p align="center">
📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">介紹 Dify 工作流程檔案上傳功能:重現 Google NotebookLM Podcast</a>
@ -39,18 +39,18 @@
</p>
<p align="center">
<a href="../../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_TW.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_DE.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
</p>
Dify 是一個開源的 LLM 應用程式開發平台。其直觀的界面結合了智能代理工作流程、RAG 管道、代理功能、模型管理、可觀察性功能等,讓您能夠快速從原型進展到生產環境。
@ -64,7 +64,7 @@ Dify 是一個開源的 LLM 應用程式開發平台。其直觀的界面結合
</br>
啟動 Dify 伺服器最簡單的方式是透過 [docker compose](../../docker/docker-compose.yaml)。在使用以下命令運行 Dify 之前,請確保您的機器已安裝 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/)
啟動 Dify 伺服器最簡單的方式是透過 [docker compose](docker/docker-compose.yaml)。在使用以下命令運行 Dify 之前,請確保您的機器已安裝 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/)
```bash
cd dify
@ -128,15 +128,7 @@ Dify 的所有功能都提供相應的 API因此您可以輕鬆地將 Dify
## 進階設定
如果您需要自定義配置,請參考我們的 [.env.example](../../docker/.env.example) 文件中的註釋,並在您的 `.env` 文件中更新相應的值。此外,根據您特定的部署環境和需求,您可能需要調整 `docker-compose.yaml` 文件本身,例如更改映像版本、端口映射或卷掛載。進行任何更改後,請重新運行 `docker-compose up -d`。您可以在[這裡](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用環境變數的完整列表。
### 使用 Grafana 進行指標監控
將儀表板匯入 Grafana使用 Dify 的 PostgreSQL 資料庫作為資料來源,以監控應用程式、租戶、訊息等顆粒度的指標。
- [由 @bowenliang123 提供的 Grafana 儀表板](https://github.com/bowenliang123/dify-grafana-dashboard)
### 使用 Kubernetes 部署
如果您需要自定義配置,請參考我們的 [.env.example](docker/.env.example) 文件中的註釋,並在您的 `.env` 文件中更新相應的值。此外,根據您特定的部署環境和需求,您可能需要調整 `docker-compose.yaml` 文件本身,例如更改映像版本、端口映射或卷掛載。進行任何更改後,請重新運行 `docker-compose up -d`。您可以在[這裡](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用環境變數的完整列表。
如果您想配置高可用性設置,社區貢獻的 [Helm Charts](https://helm.sh/) 和 Kubernetes 資源清單YAML允許在 Kubernetes 上部署 Dify。
@ -181,7 +173,7 @@ Dify 的所有功能都提供相應的 API因此您可以輕鬆地將 Dify
## 貢獻
對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](./CONTRIBUTING.md)。
對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_TW.md)。
同時,也請考慮透過在社群媒體和各種活動與會議上分享 Dify 來支持我們。
> 我們正在尋找貢獻者協助將 Dify 翻譯成中文和英文以外的語言。如果您有興趣幫忙,請查看 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) 獲取更多資訊,並在我們的 [Discord 社群伺服器](https://discord.gg/8Tpq4AcN9c) 的 `global-users` 頻道留言給我們。
@ -189,7 +181,7 @@ Dify 的所有功能都提供相應的 API因此您可以輕鬆地將 Dify
## 社群與聯絡方式
- [GitHub Discussion](https://github.com/langgenius/dify/discussions):最適合分享反饋和提問。
- [GitHub Issues](https://github.com/langgenius/dify/issues):最適合報告使用 Dify.AI 時遇到的問題和提出功能建議。請參閱我們的[貢獻指南](./CONTRIBUTING.md)。
- [GitHub Issues](https://github.com/langgenius/dify/issues):最適合報告使用 Dify.AI 時遇到的問題和提出功能建議。請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。
- [Discord](https://discord.gg/FngNHpbcY7):最適合分享您的應用程式並與社群互動。
- [X(Twitter)](https://twitter.com/dify_ai):最適合分享您的應用程式並與社群互動。
@ -209,4 +201,4 @@ Dify 的所有功能都提供相應的 API因此您可以輕鬆地將 Dify
## 授權條款
本代碼庫採用 [Dify 開源授權](../../LICENSE),這基本上是 Apache 2.0 授權加上一些額外限制條款。
本代碼庫採用 [Dify 開源授權](LICENSE),這基本上是 Apache 2.0 授權加上一些額外限制條款。

View File

@ -1,4 +1,4 @@
![cover-v5-optimized](../../images/GitHub_README_if.png)
![cover-v5-optimized](./images/GitHub_README_if.png)
<p align="center">
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
@ -35,19 +35,17 @@
</p>
<p align="center">
<a href="../../README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="../zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="../zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="../ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="../es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="../fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="../tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="../ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="../ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="../tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="../vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="../de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="../bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
Dify là một nền tảng phát triển ứng dụng LLM mã nguồn mở. Giao diện trực quan kết hợp quy trình làm việc AI, mô hình RAG, khả năng tác nhân, quản lý mô hình, tính năng quan sát và hơn thế nữa, cho phép bạn nhanh chóng chuyển từ nguyên mẫu sang sản phẩm. Đây là danh sách các tính năng cốt lõi:
@ -105,7 +103,7 @@ Yêu thích Dify trên GitHub và được thông báo ngay lập tức về cá
</br>
Cách dễ nhất để khởi động máy chủ Dify là chạy tệp [docker-compose.yml](../../docker/docker-compose.yaml) của chúng tôi. Trước khi chạy lệnh cài đặt, hãy đảm bảo rằng [Docker](https://docs.docker.com/get-docker/) và [Docker Compose](https://docs.docker.com/compose/install/) đã được cài đặt trên máy của bạn:
Cách dễ nhất để khởi động máy chủ Dify là chạy tệp [docker-compose.yml](docker/docker-compose.yaml) của chúng tôi. Trước khi chạy lệnh cài đặt, hãy đảm bảo rằng [Docker](https://docs.docker.com/get-docker/) và [Docker Compose](https://docs.docker.com/compose/install/) đã được cài đặt trên máy của bạn:
```bash
cd docker
@ -119,15 +117,7 @@ Sau khi chạy, bạn có thể truy cập bảng điều khiển Dify trong tr
## Các bước tiếp theo
Nếu bạn cần tùy chỉnh cấu hình, vui lòng tham khảo các nhận xét trong tệp [.env.example](../../docker/.env.example) của chúng tôi và cập nhật các giá trị tương ứng trong tệp `.env` của bạn. Ngoài ra, bạn có thể cần điều chỉnh tệp `docker-compose.yaml`, chẳng hạn như thay đổi phiên bản hình ảnh, ánh xạ cổng hoặc gắn kết khối lượng, dựa trên môi trường triển khai cụ thể và yêu cầu của bạn. Sau khi thực hiện bất kỳ thay đổi nào, vui lòng chạy lại `docker-compose up -d`. Bạn có thể tìm thấy danh sách đầy đủ các biến môi trường có sẵn [tại đây](https://docs.dify.ai/getting-started/install-self-hosted/environments).
### Giám sát Số liệu với Grafana
Nhập bảng điều khiển vào Grafana, sử dụng cơ sở dữ liệu PostgreSQL của Dify làm nguồn dữ liệu, để giám sát số liệu theo mức độ chi tiết của ứng dụng, người thuê, tin nhắn và hơn thế nữa.
- [Bảng điều khiển Grafana của @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard)
### Triển khai với Kubernetes
Nếu bạn cần tùy chỉnh cấu hình, vui lòng tham khảo các nhận xét trong tệp [.env.example](docker/.env.example) của chúng tôi và cập nhật các giá trị tương ứng trong tệp `.env` của bạn. Ngoài ra, bạn có thể cần điều chỉnh tệp `docker-compose.yaml`, chẳng hạn như thay đổi phiên bản hình ảnh, ánh xạ cổng hoặc gắn kết khối lượng, dựa trên môi trường triển khai cụ thể và yêu cầu của bạn. Sau khi thực hiện bất kỳ thay đổi nào, vui lòng chạy lại `docker-compose up -d`. Bạn có thể tìm thấy danh sách đầy đủ các biến môi trường có sẵn [tại đây](https://docs.dify.ai/getting-started/install-self-hosted/environments).
Nếu bạn muốn cấu hình một cài đặt có độ sẵn sàng cao, có các [Helm Charts](https://helm.sh/) và tệp YAML do cộng đồng đóng góp cho phép Dify được triển khai trên Kubernetes.
@ -172,7 +162,7 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De
## Đóng góp
Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](./CONTRIBUTING.md) của chúng tôi.
Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_VI.md) của chúng tôi.
Đồng thời, vui lòng xem xét hỗ trợ Dify bằng cách chia sẻ nó trên mạng xã hội và tại các sự kiện và hội nghị.
> Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi.
@ -186,7 +176,7 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De
## Cộng đồng & liên hệ
- [Thảo luận GitHub](https://github.com/langgenius/dify/discussions). Tốt nhất cho: chia sẻ phản hồi và đặt câu hỏi.
- [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](./CONTRIBUTING.md) của chúng tôi.
- [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi.
- [Discord](https://discord.gg/FngNHpbcY7). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng.
- [X(Twitter)](https://twitter.com/dify_ai). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng.
@ -200,4 +190,4 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De
## Giấy phép
Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](../../LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung.
Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung.

View File

@ -76,7 +76,6 @@ DB_HOST=localhost
DB_PORT=5432
DB_DATABASE=dify
SQLALCHEMY_POOL_PRE_PING=true
SQLALCHEMY_POOL_TIMEOUT=30
# Storage configuration
# use for store upload files, private keys...
@ -156,9 +155,6 @@ SUPABASE_URL=your-server-url
# CORS configuration
WEB_API_CORS_ALLOW_ORIGINS=http://localhost:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,*
# Set COOKIE_DOMAIN when the console frontend and API are on different subdomains.
# Provide the registrable domain (e.g. example.com); leading dots are optional.
COOKIE_DOMAIN=
# Vector database configuration
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
@ -307,8 +303,6 @@ BAIDU_VECTOR_DB_API_KEY=dify
BAIDU_VECTOR_DB_DATABASE=dify
BAIDU_VECTOR_DB_SHARD=1
BAIDU_VECTOR_DB_REPLICAS=3
BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER=DEFAULT_ANALYZER
BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE=COARSE_MODE
# Upstash configuration
UPSTASH_VECTOR_URL=your-server-url
@ -346,15 +340,6 @@ OCEANBASE_VECTOR_DATABASE=test
OCEANBASE_MEMORY_LIMIT=6G
OCEANBASE_ENABLE_HYBRID_SEARCH=false
# AlibabaCloud MySQL Vector configuration
ALIBABACLOUD_MYSQL_HOST=127.0.0.1
ALIBABACLOUD_MYSQL_PORT=3306
ALIBABACLOUD_MYSQL_USER=root
ALIBABACLOUD_MYSQL_PASSWORD=root
ALIBABACLOUD_MYSQL_DATABASE=dify
ALIBABACLOUD_MYSQL_MAX_CONNECTION=5
ALIBABACLOUD_MYSQL_HNSW_M=6
# openGauss configuration
OPENGAUSS_HOST=127.0.0.1
OPENGAUSS_PORT=6600
@ -371,12 +356,6 @@ UPLOAD_IMAGE_FILE_SIZE_LIMIT=10
UPLOAD_VIDEO_FILE_SIZE_LIMIT=100
UPLOAD_AUDIO_FILE_SIZE_LIMIT=50
# Comma-separated list of file extensions blocked from upload for security reasons.
# Extensions should be lowercase without dots (e.g., exe,bat,sh,dll).
# Empty by default to allow all file types.
# Recommended: exe,bat,cmd,com,scr,vbs,ps1,msi,dll
UPLOAD_FILE_EXTENSION_BLACKLIST=
# Model configuration
MULTIMODAL_SEND_FORMAT=base64
PROMPT_GENERATION_MAX_TOKENS=512
@ -426,9 +405,6 @@ SSRF_DEFAULT_TIME_OUT=5
SSRF_DEFAULT_CONNECT_TIME_OUT=5
SSRF_DEFAULT_READ_TIME_OUT=5
SSRF_DEFAULT_WRITE_TIME_OUT=5
SSRF_POOL_MAX_CONNECTIONS=100
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20
SSRF_POOL_KEEPALIVE_EXPIRY=5.0
BATCH_UPLOAD_LIMIT=10
KEYWORD_DATA_SOURCE_TYPE=database
@ -439,17 +415,10 @@ WORKFLOW_FILE_UPLOAD_LIMIT=10
# CODE EXECUTION CONFIGURATION
CODE_EXECUTION_ENDPOINT=http://127.0.0.1:8194
CODE_EXECUTION_API_KEY=dify-sandbox
CODE_EXECUTION_SSL_VERIFY=True
CODE_EXECUTION_POOL_MAX_CONNECTIONS=100
CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0
CODE_EXECUTION_CONNECT_TIMEOUT=10
CODE_EXECUTION_READ_TIMEOUT=60
CODE_EXECUTION_WRITE_TIMEOUT=10
CODE_MAX_NUMBER=9223372036854775807
CODE_MIN_NUMBER=-9223372036854775808
CODE_MAX_STRING_LENGTH=400000
TEMPLATE_TRANSFORM_MAX_LENGTH=400000
CODE_MAX_STRING_LENGTH=80000
TEMPLATE_TRANSFORM_MAX_LENGTH=80000
CODE_MAX_STRING_ARRAY_LENGTH=30
CODE_MAX_OBJECT_ARRAY_LENGTH=30
CODE_MAX_NUMBER_ARRAY_LENGTH=1000
@ -489,6 +458,7 @@ INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000
WORKFLOW_MAX_EXECUTION_STEPS=500
WORKFLOW_MAX_EXECUTION_TIME=1200
WORKFLOW_CALL_MAX_DEPTH=5
WORKFLOW_PARALLEL_DEPTH_LIMIT=3
MAX_VARIABLE_SIZE=204800
# GraphEngine Worker Pool Configuration
@ -614,6 +584,3 @@ SWAGGER_UI_PATH=/swagger-ui.html
# Whether to encrypt dataset IDs when exporting DSL files (default: true)
# Set to false to export dataset IDs as plain text for easier cross-environment import
DSL_EXPORT_ENCRYPT_DATASET_ID=true
# Maximum number of segments for dataset segments API (0 for unlimited)
DATASET_MAX_SEGMENTS_PER_REQUEST=0

View File

@ -30,7 +30,6 @@ select = [
"RUF022", # unsorted-dunder-all
"S506", # unsafe-yaml-load
"SIM", # flake8-simplify rules
"T201", # print-found
"TRY400", # error-instead-of-exception
"TRY401", # verbose-log-message
"UP", # pyupgrade rules
@ -81,6 +80,7 @@ ignore = [
"SIM113", # enumerate-for-loop
"SIM117", # multiple-with-statements
"SIM210", # if-expr-with-true-false
"UP038", # deprecated and not recommended by Ruff, https://docs.astral.sh/ruff/rules/non-pep604-isinstance/
]
[lint.per-file-ignores]
@ -91,18 +91,11 @@ ignore = [
"configs/*" = [
"N802", # invalid-function-name
]
"core/model_runtime/callbacks/base_callback.py" = [
"T201",
]
"core/workflow/callbacks/workflow_logging_callback.py" = [
"T201",
]
"libs/gmpy2_pkcs10aep_cipher.py" = [
"N803", # invalid-argument-name
]
"tests/*" = [
"F811", # redefined-while-unused
"T201", # allow print in tests
]
[lint.pyflakes]

View File

@ -54,7 +54,7 @@
"--loglevel",
"DEBUG",
"-Q",
"dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline"
"dataset,generation,mail,ops_trace,app_deletion"
]
}
]

View File

@ -15,11 +15,7 @@ FROM base AS packages
# RUN sed -i 's@deb.debian.org@mirrors.aliyun.com@g' /etc/apt/sources.list.d/debian.sources
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
# basic environment
g++ \
# for building gmpy2
libmpfr-dev libmpc-dev
&& apt-get install -y --no-install-recommends gcc g++ libc-dev libffi-dev libgmp-dev libmpfr-dev libmpc-dev
# Install Python dependencies
COPY pyproject.toml uv.lock ./
@ -53,9 +49,7 @@ RUN \
# Install dependencies
&& apt-get install -y --no-install-recommends \
# basic environment
curl nodejs \
# for gmpy2 \
libgmp-dev libmpfr-dev libmpc-dev \
curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
# For Security
expat libldap-2.5-0 perl libsqlite3-0 zlib1g \
# install fonts to support the use of tools like pypdfium2

View File

@ -80,10 +80,10 @@
1. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
```bash
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline
uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation
```
Additionally, if you want to debug the celery scheduled tasks, you can run the following command in another terminal to start the beat service:
Addition, if you want to debug the celery scheduled tasks, you can use the following command in another terminal:
```bash
uv run celery -A app.celery beat

View File

@ -13,12 +13,23 @@ if is_db_command():
app = create_migrations_app()
else:
# Gunicorn and Celery handle monkey patching automatically in production by
# specifying the `gevent` worker class. Manual monkey patching is not required here.
# It seems that JetBrains Python debugger does not work well with gevent,
# so we need to disable gevent in debug mode.
# If you are using debugpy and set GEVENT_SUPPORT=True, you can debug with gevent.
# if (flask_debug := os.environ.get("FLASK_DEBUG", "0")) and flask_debug.lower() in {"false", "0", "no"}:
# from gevent import monkey
#
# See `api/docker/entrypoint.sh` (lines 33 and 47) for details.
# # gevent
# monkey.patch_all()
#
# For third-party library patching, refer to `gunicorn.conf.py` and `celery_entrypoint.py`.
# from grpc.experimental import gevent as grpc_gevent # type: ignore
#
# # grpc gevent
# grpc_gevent.init_gevent()
# import psycogreen.gevent # type: ignore
#
# psycogreen.gevent.patch_psycopg()
from app_factory import create_app

View File

@ -1,11 +1,20 @@
import logging
import psycogreen.gevent as pscycogreen_gevent # type: ignore
from grpc.experimental import gevent as grpc_gevent # type: ignore
_logger = logging.getLogger(__name__)
def _log(message: str):
print(message, flush=True)
# grpc gevent
grpc_gevent.init_gevent()
print("gRPC patched with gevent.", flush=True) # noqa: T201
_log("gRPC patched with gevent.")
pscycogreen_gevent.patch_psycopg()
print("psycopg2 patched with gevent.", flush=True) # noqa: T201
_log("psycopg2 patched with gevent.")
from app import app, celery

View File

@ -10,7 +10,6 @@ from flask import current_app
from pydantic import TypeAdapter
from sqlalchemy import select
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import sessionmaker
from configs import dify_config
from constants.languages import languages
@ -26,15 +25,13 @@ from events.app_event import app_was_created
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from extensions.ext_storage import storage
from extensions.storage.opendal_storage import OpenDALStorage
from extensions.storage.storage_type import StorageType
from libs.helper import email as email_validate
from libs.password import hash_password, password_pattern, valid_password
from libs.rsa import generate_key_pair
from models import Tenant
from models.dataset import Dataset, DatasetCollectionBinding, DatasetMetadata, DatasetMetadataBinding, DocumentSegment
from models.dataset import Document as DatasetDocument
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation, UploadFile
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
from models.oauth import DatasourceOauthParamConfig, DatasourceProvider
from models.provider import Provider, ProviderModel
from models.provider_ids import DatasourceProviderID, ToolProviderID
@ -62,30 +59,31 @@ def reset_password(email, new_password, password_confirm):
if str(new_password).strip() != str(password_confirm).strip():
click.echo(click.style("Passwords do not match.", fg="red"))
return
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
account = session.query(Account).where(Account.email == email).one_or_none()
if not account:
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
return
account = db.session.query(Account).where(Account.email == email).one_or_none()
try:
valid_password(new_password)
except:
click.echo(click.style(f"Invalid password. Must match {password_pattern}", fg="red"))
return
if not account:
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
return
# generate password salt
salt = secrets.token_bytes(16)
base64_salt = base64.b64encode(salt).decode()
try:
valid_password(new_password)
except:
click.echo(click.style(f"Invalid password. Must match {password_pattern}", fg="red"))
return
# encrypt password with salt
password_hashed = hash_password(new_password, salt)
base64_password_hashed = base64.b64encode(password_hashed).decode()
account.password = base64_password_hashed
account.password_salt = base64_salt
AccountService.reset_login_error_rate_limit(email)
click.echo(click.style("Password reset successfully.", fg="green"))
# generate password salt
salt = secrets.token_bytes(16)
base64_salt = base64.b64encode(salt).decode()
# encrypt password with salt
password_hashed = hash_password(new_password, salt)
base64_password_hashed = base64.b64encode(password_hashed).decode()
account.password = base64_password_hashed
account.password_salt = base64_salt
db.session.commit()
AccountService.reset_login_error_rate_limit(email)
click.echo(click.style("Password reset successfully.", fg="green"))
@click.command("reset-email", help="Reset the account email.")
@ -100,21 +98,22 @@ def reset_email(email, new_email, email_confirm):
if str(new_email).strip() != str(email_confirm).strip():
click.echo(click.style("New emails do not match.", fg="red"))
return
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
account = session.query(Account).where(Account.email == email).one_or_none()
if not account:
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
return
account = db.session.query(Account).where(Account.email == email).one_or_none()
try:
email_validate(new_email)
except:
click.echo(click.style(f"Invalid email: {new_email}", fg="red"))
return
if not account:
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
return
account.email = new_email
click.echo(click.style("Email updated successfully.", fg="green"))
try:
email_validate(new_email)
except:
click.echo(click.style(f"Invalid email: {new_email}", fg="red"))
return
account.email = new_email
db.session.commit()
click.echo(click.style("Email updated successfully.", fg="green"))
@click.command(
@ -138,24 +137,25 @@ def reset_encrypt_key_pair():
if dify_config.EDITION != "SELF_HOSTED":
click.echo(click.style("This command is only for SELF_HOSTED installations.", fg="red"))
return
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
tenants = session.query(Tenant).all()
for tenant in tenants:
if not tenant:
click.echo(click.style("No workspaces found. Run /install first.", fg="red"))
return
tenant.encrypt_public_key = generate_key_pair(tenant.id)
tenants = db.session.query(Tenant).all()
for tenant in tenants:
if not tenant:
click.echo(click.style("No workspaces found. Run /install first.", fg="red"))
return
session.query(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
session.query(ProviderModel).where(ProviderModel.tenant_id == tenant.id).delete()
tenant.encrypt_public_key = generate_key_pair(tenant.id)
click.echo(
click.style(
f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.",
fg="green",
)
db.session.query(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
db.session.query(ProviderModel).where(ProviderModel.tenant_id == tenant.id).delete()
db.session.commit()
click.echo(
click.style(
f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.",
fg="green",
)
)
@click.command("vdb-migrate", help="Migrate vector db.")
@ -180,15 +180,14 @@ def migrate_annotation_vector_database():
try:
# get apps info
per_page = 50
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
apps = (
session.query(App)
.where(App.status == "normal")
.order_by(App.created_at.desc())
.limit(per_page)
.offset((page - 1) * per_page)
.all()
)
apps = (
db.session.query(App)
.where(App.status == "normal")
.order_by(App.created_at.desc())
.limit(per_page)
.offset((page - 1) * per_page)
.all()
)
if not apps:
break
except SQLAlchemyError:
@ -202,27 +201,26 @@ def migrate_annotation_vector_database():
)
try:
click.echo(f"Creating app annotation index: {app.id}")
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
app_annotation_setting = (
session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first()
)
app_annotation_setting = (
db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first()
)
if not app_annotation_setting:
skipped_count = skipped_count + 1
click.echo(f"App annotation setting disabled: {app.id}")
continue
# get dataset_collection_binding info
dataset_collection_binding = (
session.query(DatasetCollectionBinding)
.where(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
.first()
)
if not dataset_collection_binding:
click.echo(f"App annotation collection binding not found: {app.id}")
continue
annotations = session.scalars(
select(MessageAnnotation).where(MessageAnnotation.app_id == app.id)
).all()
if not app_annotation_setting:
skipped_count = skipped_count + 1
click.echo(f"App annotation setting disabled: {app.id}")
continue
# get dataset_collection_binding info
dataset_collection_binding = (
db.session.query(DatasetCollectionBinding)
.where(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
.first()
)
if not dataset_collection_binding:
click.echo(f"App annotation collection binding not found: {app.id}")
continue
annotations = db.session.scalars(
select(MessageAnnotation).where(MessageAnnotation.app_id == app.id)
).all()
dataset = Dataset(
id=app.id,
tenant_id=app.tenant_id,
@ -321,8 +319,6 @@ def migrate_knowledge_vector_database():
)
datasets = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False)
if not datasets.items:
break
except SQLAlchemyError:
raise
@ -741,18 +737,18 @@ where sites.id is null limit 1000"""
try:
app = db.session.query(App).where(App.id == app_id).first()
if not app:
logger.info("App %s not found", app_id)
print(f"App {app_id} not found")
continue
tenant = app.tenant
if tenant:
accounts = tenant.get_accounts()
if not accounts:
logger.info("Fix failed for app %s", app.id)
print(f"Fix failed for app {app.id}")
continue
account = accounts[0]
logger.info("Fixing missing site for app %s", app.id)
print(f"Fixing missing site for app {app.id}")
app_was_created.send(app, account=account)
except Exception:
failed_app_ids.append(app_id)
@ -1450,52 +1446,41 @@ def transform_datasource_credentials():
notion_credentials_tenant_mapping[tenant_id] = []
notion_credentials_tenant_mapping[tenant_id].append(notion_credential)
for tenant_id, notion_tenant_credentials in notion_credentials_tenant_mapping.items():
tenant = db.session.query(Tenant).filter_by(id=tenant_id).first()
if not tenant:
continue
try:
# check notion plugin is installed
installed_plugins = installer_manager.list_plugins(tenant_id)
installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
if notion_plugin_id not in installed_plugins_ids:
if notion_plugin_unique_identifier:
# install notion plugin
PluginService.install_from_marketplace_pkg(tenant_id, [notion_plugin_unique_identifier])
auth_count = 0
for notion_tenant_credential in notion_tenant_credentials:
auth_count += 1
# get credential oauth params
access_token = notion_tenant_credential.access_token
# notion info
notion_info = notion_tenant_credential.source_info
workspace_id = notion_info.get("workspace_id")
workspace_name = notion_info.get("workspace_name")
workspace_icon = notion_info.get("workspace_icon")
new_credentials = {
"integration_secret": encrypter.encrypt_token(tenant_id, access_token),
"workspace_id": workspace_id,
"workspace_name": workspace_name,
"workspace_icon": workspace_icon,
}
datasource_provider = DatasourceProvider(
provider="notion_datasource",
tenant_id=tenant_id,
plugin_id=notion_plugin_id,
auth_type=oauth_credential_type.value,
encrypted_credentials=new_credentials,
name=f"Auth {auth_count}",
avatar_url=workspace_icon or "default",
is_default=False,
)
db.session.add(datasource_provider)
deal_notion_count += 1
except Exception as e:
click.echo(
click.style(
f"Error transforming notion credentials: {str(e)}, tenant_id: {tenant_id}", fg="red"
)
# check notion plugin is installed
installed_plugins = installer_manager.list_plugins(tenant_id)
installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
if notion_plugin_id not in installed_plugins_ids:
if notion_plugin_unique_identifier:
# install notion plugin
PluginService.install_from_marketplace_pkg(tenant_id, [notion_plugin_unique_identifier])
auth_count = 0
for notion_tenant_credential in notion_tenant_credentials:
auth_count += 1
# get credential oauth params
access_token = notion_tenant_credential.access_token
# notion info
notion_info = notion_tenant_credential.source_info
workspace_id = notion_info.get("workspace_id")
workspace_name = notion_info.get("workspace_name")
workspace_icon = notion_info.get("workspace_icon")
new_credentials = {
"integration_secret": encrypter.encrypt_token(tenant_id, access_token),
"workspace_id": workspace_id,
"workspace_name": workspace_name,
"workspace_icon": workspace_icon,
}
datasource_provider = DatasourceProvider(
provider="notion_datasource",
tenant_id=tenant_id,
plugin_id=notion_plugin_id,
auth_type=oauth_credential_type.value,
encrypted_credentials=new_credentials,
name=f"Auth {auth_count}",
avatar_url=workspace_icon or "default",
is_default=False,
)
continue
db.session.add(datasource_provider)
deal_notion_count += 1
db.session.commit()
# deal firecrawl credentials
deal_firecrawl_count = 0
@ -1508,56 +1493,37 @@ def transform_datasource_credentials():
firecrawl_credentials_tenant_mapping[tenant_id] = []
firecrawl_credentials_tenant_mapping[tenant_id].append(firecrawl_credential)
for tenant_id, firecrawl_tenant_credentials in firecrawl_credentials_tenant_mapping.items():
tenant = db.session.query(Tenant).filter_by(id=tenant_id).first()
if not tenant:
continue
try:
# check firecrawl plugin is installed
installed_plugins = installer_manager.list_plugins(tenant_id)
installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
if firecrawl_plugin_id not in installed_plugins_ids:
if firecrawl_plugin_unique_identifier:
# install firecrawl plugin
PluginService.install_from_marketplace_pkg(tenant_id, [firecrawl_plugin_unique_identifier])
# check firecrawl plugin is installed
installed_plugins = installer_manager.list_plugins(tenant_id)
installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
if firecrawl_plugin_id not in installed_plugins_ids:
if firecrawl_plugin_unique_identifier:
# install firecrawl plugin
PluginService.install_from_marketplace_pkg(tenant_id, [firecrawl_plugin_unique_identifier])
auth_count = 0
for firecrawl_tenant_credential in firecrawl_tenant_credentials:
auth_count += 1
if not firecrawl_tenant_credential.credentials:
click.echo(
click.style(
f"Skipping firecrawl credential for tenant {tenant_id} due to missing credentials.",
fg="yellow",
)
)
continue
# get credential api key
credentials_json = json.loads(firecrawl_tenant_credential.credentials)
api_key = credentials_json.get("config", {}).get("api_key")
base_url = credentials_json.get("config", {}).get("base_url")
new_credentials = {
"firecrawl_api_key": api_key,
"base_url": base_url,
}
datasource_provider = DatasourceProvider(
provider="firecrawl",
tenant_id=tenant_id,
plugin_id=firecrawl_plugin_id,
auth_type=api_key_credential_type.value,
encrypted_credentials=new_credentials,
name=f"Auth {auth_count}",
avatar_url="default",
is_default=False,
)
db.session.add(datasource_provider)
deal_firecrawl_count += 1
except Exception as e:
click.echo(
click.style(
f"Error transforming firecrawl credentials: {str(e)}, tenant_id: {tenant_id}", fg="red"
)
auth_count = 0
for firecrawl_tenant_credential in firecrawl_tenant_credentials:
auth_count += 1
# get credential api key
credentials_json = json.loads(firecrawl_tenant_credential.credentials)
api_key = credentials_json.get("config", {}).get("api_key")
base_url = credentials_json.get("config", {}).get("base_url")
new_credentials = {
"firecrawl_api_key": api_key,
"base_url": base_url,
}
datasource_provider = DatasourceProvider(
provider="firecrawl",
tenant_id=tenant_id,
plugin_id=firecrawl_plugin_id,
auth_type=api_key_credential_type.value,
encrypted_credentials=new_credentials,
name=f"Auth {auth_count}",
avatar_url="default",
is_default=False,
)
continue
db.session.add(datasource_provider)
deal_firecrawl_count += 1
db.session.commit()
# deal jina credentials
deal_jina_count = 0
@ -1570,53 +1536,36 @@ def transform_datasource_credentials():
jina_credentials_tenant_mapping[tenant_id] = []
jina_credentials_tenant_mapping[tenant_id].append(jina_credential)
for tenant_id, jina_tenant_credentials in jina_credentials_tenant_mapping.items():
tenant = db.session.query(Tenant).filter_by(id=tenant_id).first()
if not tenant:
continue
try:
# check jina plugin is installed
installed_plugins = installer_manager.list_plugins(tenant_id)
installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
if jina_plugin_id not in installed_plugins_ids:
if jina_plugin_unique_identifier:
# install jina plugin
logger.debug("Installing Jina plugin %s", jina_plugin_unique_identifier)
PluginService.install_from_marketplace_pkg(tenant_id, [jina_plugin_unique_identifier])
# check jina plugin is installed
installed_plugins = installer_manager.list_plugins(tenant_id)
installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
if jina_plugin_id not in installed_plugins_ids:
if jina_plugin_unique_identifier:
# install jina plugin
print(jina_plugin_unique_identifier)
PluginService.install_from_marketplace_pkg(tenant_id, [jina_plugin_unique_identifier])
auth_count = 0
for jina_tenant_credential in jina_tenant_credentials:
auth_count += 1
if not jina_tenant_credential.credentials:
click.echo(
click.style(
f"Skipping jina credential for tenant {tenant_id} due to missing credentials.",
fg="yellow",
)
)
continue
# get credential api key
credentials_json = json.loads(jina_tenant_credential.credentials)
api_key = credentials_json.get("config", {}).get("api_key")
new_credentials = {
"integration_secret": api_key,
}
datasource_provider = DatasourceProvider(
provider="jinareader",
tenant_id=tenant_id,
plugin_id=jina_plugin_id,
auth_type=api_key_credential_type.value,
encrypted_credentials=new_credentials,
name=f"Auth {auth_count}",
avatar_url="default",
is_default=False,
)
db.session.add(datasource_provider)
deal_jina_count += 1
except Exception as e:
click.echo(
click.style(f"Error transforming jina credentials: {str(e)}, tenant_id: {tenant_id}", fg="red")
auth_count = 0
for jina_tenant_credential in jina_tenant_credentials:
auth_count += 1
# get credential api key
credentials_json = json.loads(jina_tenant_credential.credentials)
api_key = credentials_json.get("config", {}).get("api_key")
new_credentials = {
"integration_secret": api_key,
}
datasource_provider = DatasourceProvider(
provider="jina",
tenant_id=tenant_id,
plugin_id=jina_plugin_id,
auth_type=api_key_credential_type.value,
encrypted_credentials=new_credentials,
name=f"Auth {auth_count}",
avatar_url="default",
is_default=False,
)
continue
db.session.add(datasource_provider)
deal_jina_count += 1
db.session.commit()
except Exception as e:
click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
@ -1648,197 +1597,3 @@ def install_rag_pipeline_plugins(input_file, output_file, workers):
workers,
)
click.echo(click.style("Installing rag pipeline plugins successfully", fg="green"))
@click.command(
"migrate-oss",
help="Migrate files from Local or OpenDAL source to a cloud OSS storage (destination must NOT be local/opendal).",
)
@click.option(
"--path",
"paths",
multiple=True,
help="Storage path prefixes to migrate (repeatable). Defaults: privkeys, upload_files, image_files,"
" tools, website_files, keyword_files, ops_trace",
)
@click.option(
"--source",
type=click.Choice(["local", "opendal"], case_sensitive=False),
default="opendal",
show_default=True,
help="Source storage type to read from",
)
@click.option("--overwrite", is_flag=True, default=False, help="Overwrite destination if file already exists")
@click.option("--dry-run", is_flag=True, default=False, help="Show what would be migrated without uploading")
@click.option("-f", "--force", is_flag=True, help="Skip confirmation and run without prompts")
@click.option(
"--update-db/--no-update-db",
default=True,
help="Update upload_files.storage_type from source type to current storage after migration",
)
def migrate_oss(
paths: tuple[str, ...],
source: str,
overwrite: bool,
dry_run: bool,
force: bool,
update_db: bool,
):
"""
Copy all files under selected prefixes from a source storage
(Local filesystem or OpenDAL-backed) into the currently configured
destination storage backend, then optionally update DB records.
Expected usage: set STORAGE_TYPE (and its credentials) to your target backend.
"""
# Ensure target storage is not local/opendal
if dify_config.STORAGE_TYPE in (StorageType.LOCAL, StorageType.OPENDAL):
click.echo(
click.style(
"Target STORAGE_TYPE must be a cloud OSS (not 'local' or 'opendal').\n"
"Please set STORAGE_TYPE to one of: s3, aliyun-oss, azure-blob, google-storage, tencent-cos, \n"
"volcengine-tos, supabase, oci-storage, huawei-obs, baidu-obs, clickzetta-volume.",
fg="red",
)
)
return
# Default paths if none specified
default_paths = ("privkeys", "upload_files", "image_files", "tools", "website_files", "keyword_files", "ops_trace")
path_list = list(paths) if paths else list(default_paths)
is_source_local = source.lower() == "local"
click.echo(click.style("Preparing migration to target storage.", fg="yellow"))
click.echo(click.style(f"Target storage type: {dify_config.STORAGE_TYPE}", fg="white"))
if is_source_local:
src_root = dify_config.STORAGE_LOCAL_PATH
click.echo(click.style(f"Source: local fs, root: {src_root}", fg="white"))
else:
click.echo(click.style(f"Source: opendal scheme={dify_config.OPENDAL_SCHEME}", fg="white"))
click.echo(click.style(f"Paths to migrate: {', '.join(path_list)}", fg="white"))
click.echo("")
if not force:
click.confirm("Proceed with migration?", abort=True)
# Instantiate source storage
try:
if is_source_local:
src_root = dify_config.STORAGE_LOCAL_PATH
source_storage = OpenDALStorage(scheme="fs", root=src_root)
else:
source_storage = OpenDALStorage(scheme=dify_config.OPENDAL_SCHEME)
except Exception as e:
click.echo(click.style(f"Failed to initialize source storage: {str(e)}", fg="red"))
return
total_files = 0
copied_files = 0
skipped_files = 0
errored_files = 0
copied_upload_file_keys: list[str] = []
for prefix in path_list:
click.echo(click.style(f"Scanning source path: {prefix}", fg="white"))
try:
keys = source_storage.scan(path=prefix, files=True, directories=False)
except FileNotFoundError:
click.echo(click.style(f" -> Skipping missing path: {prefix}", fg="yellow"))
continue
except NotImplementedError:
click.echo(click.style(" -> Source storage does not support scanning.", fg="red"))
return
except Exception as e:
click.echo(click.style(f" -> Error scanning '{prefix}': {str(e)}", fg="red"))
continue
click.echo(click.style(f"Found {len(keys)} files under {prefix}", fg="white"))
for key in keys:
total_files += 1
# check destination existence
if not overwrite:
try:
if storage.exists(key):
skipped_files += 1
continue
except Exception as e:
# existence check failures should not block migration attempt
# but should be surfaced to user as a warning for visibility
click.echo(
click.style(
f" -> Warning: failed target existence check for {key}: {str(e)}",
fg="yellow",
)
)
if dry_run:
copied_files += 1
continue
# read from source and write to destination
try:
data = source_storage.load_once(key)
except FileNotFoundError:
errored_files += 1
click.echo(click.style(f" -> Missing on source: {key}", fg="yellow"))
continue
except Exception as e:
errored_files += 1
click.echo(click.style(f" -> Error reading {key}: {str(e)}", fg="red"))
continue
try:
storage.save(key, data)
copied_files += 1
if prefix == "upload_files":
copied_upload_file_keys.append(key)
except Exception as e:
errored_files += 1
click.echo(click.style(f" -> Error writing {key} to target: {str(e)}", fg="red"))
continue
click.echo("")
click.echo(click.style("Migration summary:", fg="yellow"))
click.echo(click.style(f" Total: {total_files}", fg="white"))
click.echo(click.style(f" Copied: {copied_files}", fg="green"))
click.echo(click.style(f" Skipped: {skipped_files}", fg="white"))
if errored_files:
click.echo(click.style(f" Errors: {errored_files}", fg="red"))
if dry_run:
click.echo(click.style("Dry-run complete. No changes were made.", fg="green"))
return
if errored_files:
click.echo(
click.style(
"Some files failed to migrate. Review errors above before updating DB records.",
fg="yellow",
)
)
if update_db and not force:
if not click.confirm("Proceed to update DB storage_type despite errors?", default=False):
update_db = False
# Optionally update DB records for upload_files.storage_type (only for successfully copied upload_files)
if update_db:
if not copied_upload_file_keys:
click.echo(click.style("No upload_files copied. Skipping DB storage_type update.", fg="yellow"))
else:
try:
source_storage_type = StorageType.LOCAL if is_source_local else StorageType.OPENDAL
updated = (
db.session.query(UploadFile)
.where(
UploadFile.storage_type == source_storage_type,
UploadFile.key.in_(copied_upload_file_keys),
)
.update({UploadFile.storage_type: dify_config.STORAGE_TYPE}, synchronize_session=False)
)
db.session.commit()
click.echo(click.style(f"Updated storage_type for {updated} upload_files records.", fg="green"))
except Exception as e:
db.session.rollback()
click.echo(click.style(f"Failed to update DB storage_type: {str(e)}", fg="red"))

View File

@ -1,4 +1,3 @@
from enum import StrEnum
from typing import Literal
from pydantic import (
@ -113,21 +112,6 @@ class CodeExecutionSandboxConfig(BaseSettings):
default=10.0,
)
CODE_EXECUTION_POOL_MAX_CONNECTIONS: PositiveInt = Field(
description="Maximum number of concurrent connections for the code execution HTTP client",
default=100,
)
CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS: PositiveInt = Field(
description="Maximum number of persistent keep-alive connections for the code execution HTTP client",
default=20,
)
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY: PositiveFloat | None = Field(
description="Keep-alive expiry in seconds for idle connections (set to None to disable)",
default=5.0,
)
CODE_MAX_NUMBER: PositiveInt = Field(
description="Maximum allowed numeric value in code execution",
default=9223372036854775807,
@ -150,7 +134,7 @@ class CodeExecutionSandboxConfig(BaseSettings):
CODE_MAX_STRING_LENGTH: PositiveInt = Field(
description="Maximum allowed length for strings in code execution",
default=400_000,
default=80000,
)
CODE_MAX_STRING_ARRAY_LENGTH: PositiveInt = Field(
@ -168,11 +152,6 @@ class CodeExecutionSandboxConfig(BaseSettings):
default=1000,
)
CODE_EXECUTION_SSL_VERIFY: bool = Field(
description="Enable or disable SSL verification for code execution requests",
default=True,
)
class PluginConfig(BaseSettings):
"""
@ -189,11 +168,6 @@ class PluginConfig(BaseSettings):
default="plugin-api-key",
)
PLUGIN_DAEMON_TIMEOUT: PositiveFloat | None = Field(
description="Timeout in seconds for requests to the plugin daemon (set to None to disable)",
default=300.0,
)
INNER_API_KEY_FOR_PLUGIN: str = Field(description="Inner api key for plugin", default="inner-api-key")
PLUGIN_REMOTE_INSTALL_HOST: str = Field(
@ -331,42 +305,12 @@ class FileUploadConfig(BaseSettings):
default=10,
)
inner_UPLOAD_FILE_EXTENSION_BLACKLIST: str = Field(
description=(
"Comma-separated list of file extensions that are blocked from upload. "
"Extensions should be lowercase without dots (e.g., 'exe,bat,sh,dll'). "
"Empty by default to allow all file types."
),
validation_alias=AliasChoices("UPLOAD_FILE_EXTENSION_BLACKLIST"),
default="",
)
@computed_field # type: ignore[misc]
@property
def UPLOAD_FILE_EXTENSION_BLACKLIST(self) -> set[str]:
"""
Parse and return the blacklist as a set of lowercase extensions.
Returns an empty set if no blacklist is configured.
"""
if not self.inner_UPLOAD_FILE_EXTENSION_BLACKLIST:
return set()
return {
ext.strip().lower().strip(".")
for ext in self.inner_UPLOAD_FILE_EXTENSION_BLACKLIST.split(",")
if ext.strip()
}
class HttpConfig(BaseSettings):
"""
HTTP-related configurations for the application
"""
COOKIE_DOMAIN: str = Field(
description="Explicit cookie domain for console/service cookies when sharing across subdomains",
default="",
)
API_COMPRESSION_ENABLED: bool = Field(
description="Enable or disable gzip compression for HTTP responses",
default=False,
@ -397,11 +341,11 @@ class HttpConfig(BaseSettings):
)
HTTP_REQUEST_MAX_READ_TIMEOUT: int = Field(
ge=1, description="Maximum read timeout in seconds for HTTP requests", default=600
ge=1, description="Maximum read timeout in seconds for HTTP requests", default=60
)
HTTP_REQUEST_MAX_WRITE_TIMEOUT: int = Field(
ge=1, description="Maximum write timeout in seconds for HTTP requests", default=600
ge=1, description="Maximum write timeout in seconds for HTTP requests", default=20
)
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: PositiveInt = Field(
@ -459,21 +403,6 @@ class HttpConfig(BaseSettings):
default=5,
)
SSRF_POOL_MAX_CONNECTIONS: PositiveInt = Field(
description="Maximum number of concurrent connections for the SSRF HTTP client",
default=100,
)
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS: PositiveInt = Field(
description="Maximum number of persistent keep-alive connections for the SSRF HTTP client",
default=20,
)
SSRF_POOL_KEEPALIVE_EXPIRY: PositiveFloat | None = Field(
description="Keep-alive expiry in seconds for idle SSRF connections (set to None to disable)",
default=5.0,
)
RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field(
description="Enable handling of X-Forwarded-For, X-Forwarded-Proto, and X-Forwarded-Port headers"
" when the app is behind a single trusted reverse proxy.",
@ -578,7 +507,7 @@ class UpdateConfig(BaseSettings):
class WorkflowVariableTruncationConfig(BaseSettings):
WORKFLOW_VARIABLE_TRUNCATION_MAX_SIZE: PositiveInt = Field(
# 1000 KiB
# 100KB
1024_000,
description="Maximum size for variable to trigger final truncation.",
)
@ -612,16 +541,16 @@ class WorkflowConfig(BaseSettings):
default=5,
)
WORKFLOW_PARALLEL_DEPTH_LIMIT: PositiveInt = Field(
description="Maximum allowed depth for nested parallel executions",
default=3,
)
MAX_VARIABLE_SIZE: PositiveInt = Field(
description="Maximum size in bytes for a single variable in workflows. Default to 200 KB.",
default=200 * 1024,
)
TEMPLATE_TRANSFORM_MAX_LENGTH: PositiveInt = Field(
description="Maximum number of characters allowed in Template Transform node output",
default=400_000,
)
# GraphEngine Worker Pool Configuration
GRAPH_ENGINE_MIN_WORKERS: PositiveInt = Field(
description="Minimum number of workers per GraphEngine instance",
@ -782,35 +711,11 @@ class ToolConfig(BaseSettings):
)
class TemplateMode(StrEnum):
# unsafe mode allows flexible operations in templates, but may cause security vulnerabilities
UNSAFE = "unsafe"
# sandbox mode restricts some unsafe operations like accessing __class__.
# however, it is still not 100% safe, for example, cpu exploitation can happen.
SANDBOX = "sandbox"
# templating is disabled
DISABLED = "disabled"
class MailConfig(BaseSettings):
"""
Configuration for email services
"""
MAIL_TEMPLATING_MODE: TemplateMode = Field(
description="Template mode for email services",
default=TemplateMode.SANDBOX,
)
MAIL_TEMPLATING_TIMEOUT: int = Field(
description="""
Timeout for email templating in seconds. Used to prevent infinite loops in malicious templates.
Only available in sandbox mode.""",
default=3,
)
MAIL_TYPE: str | None = Field(
description="Email service provider type ('smtp' or 'resend' or 'sendGrid), default to None.",
default=None,
@ -945,11 +850,6 @@ class DataSetConfig(BaseSettings):
default=True,
)
DATASET_MAX_SEGMENTS_PER_REQUEST: NonNegativeInt = Field(
description="Maximum number of segments for dataset segments API (0 for unlimited)",
default=0,
)
class WorkspaceConfig(BaseSettings):
"""

View File

@ -18,7 +18,6 @@ from .storage.opendal_storage_config import OpenDALStorageConfig
from .storage.supabase_storage_config import SupabaseStorageConfig
from .storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
from .storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
from .vdb.alibabacloud_mysql_config import AlibabaCloudMySQLConfig
from .vdb.analyticdb_config import AnalyticdbConfig
from .vdb.baidu_vector_config import BaiduVectorDBConfig
from .vdb.chroma_config import ChromaConfig
@ -145,7 +144,7 @@ class DatabaseConfig(BaseSettings):
default="postgresql",
)
@computed_field # type: ignore[prop-decorator]
@computed_field # type: ignore[misc]
@property
def SQLALCHEMY_DATABASE_URI(self) -> str:
db_extras = (
@ -188,17 +187,12 @@ class DatabaseConfig(BaseSettings):
default=False,
)
SQLALCHEMY_POOL_TIMEOUT: NonNegativeInt = Field(
description="Number of seconds to wait for a connection from the pool before raising a timeout error.",
default=30,
)
RETRIEVAL_SERVICE_EXECUTORS: NonNegativeInt = Field(
description="Number of processes for the retrieval service, default to CPU cores.",
default=os.cpu_count() or 1,
)
@computed_field # type: ignore[prop-decorator]
@computed_field # type: ignore[misc]
@property
def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
# Parse DB_EXTRAS for 'options'
@ -222,7 +216,6 @@ class DatabaseConfig(BaseSettings):
"connect_args": connect_args,
"pool_use_lifo": self.SQLALCHEMY_POOL_USE_LIFO,
"pool_reset_on_return": None,
"pool_timeout": self.SQLALCHEMY_POOL_TIMEOUT,
}
@ -331,7 +324,6 @@ class MiddlewareConfig(
ClickzettaConfig,
HuaweiCloudConfig,
MilvusConfig,
AlibabaCloudMySQLConfig,
MyScaleConfig,
OpenSearchConfig,
OracleConfig,

View File

@ -1,54 +0,0 @@
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
class AlibabaCloudMySQLConfig(BaseSettings):
"""
Configuration settings for AlibabaCloud MySQL vector database
"""
ALIBABACLOUD_MYSQL_HOST: str = Field(
description="Hostname or IP address of the AlibabaCloud MySQL server (e.g., 'localhost' or 'mysql.aliyun.com')",
default="localhost",
)
ALIBABACLOUD_MYSQL_PORT: PositiveInt = Field(
description="Port number on which the AlibabaCloud MySQL server is listening (default is 3306)",
default=3306,
)
ALIBABACLOUD_MYSQL_USER: str = Field(
description="Username for authenticating with AlibabaCloud MySQL (default is 'root')",
default="root",
)
ALIBABACLOUD_MYSQL_PASSWORD: str = Field(
description="Password for authenticating with AlibabaCloud MySQL (default is an empty string)",
default="",
)
ALIBABACLOUD_MYSQL_DATABASE: str = Field(
description="Name of the AlibabaCloud MySQL database to connect to (default is 'dify')",
default="dify",
)
ALIBABACLOUD_MYSQL_MAX_CONNECTION: PositiveInt = Field(
description="Maximum number of connections in the connection pool",
default=5,
)
ALIBABACLOUD_MYSQL_CHARSET: str = Field(
description="Character set for AlibabaCloud MySQL connection (default is 'utf8mb4')",
default="utf8mb4",
)
ALIBABACLOUD_MYSQL_DISTANCE_FUNCTION: str = Field(
description="Distance function used for vector similarity search in AlibabaCloud MySQL "
"(e.g., 'cosine', 'euclidean')",
default="cosine",
)
ALIBABACLOUD_MYSQL_HNSW_M: PositiveInt = Field(
description="Maximum number of connections per layer for HNSW vector index (default is 6, range: 3-200)",
default=6,
)

View File

@ -41,13 +41,3 @@ class BaiduVectorDBConfig(BaseSettings):
description="Number of replicas for the Baidu Vector Database (default is 3)",
default=3,
)
BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER: str = Field(
description="Analyzer type for inverted index in Baidu Vector Database (default is DEFAULT_ANALYZER)",
default="DEFAULT_ANALYZER",
)
BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE: str = Field(
description="Parser mode for inverted index in Baidu Vector Database (default is COARSE_MODE)",
default="COARSE_MODE",
)

View File

@ -37,15 +37,3 @@ class OceanBaseVectorConfig(BaseSettings):
"with older versions",
default=False,
)
OCEANBASE_FULLTEXT_PARSER: str | None = Field(
description=(
"Fulltext parser to use for text indexing. "
"Built-in options: 'ngram' (N-gram tokenizer for English/numbers), "
"'beng' (Basic English tokenizer), 'space' (Space-based tokenizer), "
"'ngram2' (Improved N-gram tokenizer), 'ik' (Chinese tokenizer). "
"External plugins (require installation): 'japanese_ftparser' (Japanese tokenizer), "
"'thai_ftparser' (Thai tokenizer). Default is 'ik'"
),
default="ik",
)

View File

@ -1,24 +1,23 @@
from enum import StrEnum
from enum import Enum
from typing import Literal
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
class AuthMethod(StrEnum):
"""
Authentication method for OpenSearch
"""
BASIC = "basic"
AWS_MANAGED_IAM = "aws_managed_iam"
class OpenSearchConfig(BaseSettings):
"""
Configuration settings for OpenSearch
"""
class AuthMethod(Enum):
"""
Authentication method for OpenSearch
"""
BASIC = "basic"
AWS_MANAGED_IAM = "aws_managed_iam"
OPENSEARCH_HOST: str | None = Field(
description="Hostname or IP address of the OpenSearch server (e.g., 'localhost' or 'opensearch.example.com')",
default=None,

View File

@ -22,11 +22,6 @@ class WeaviateConfig(BaseSettings):
default=True,
)
WEAVIATE_GRPC_ENDPOINT: str | None = Field(
description="URL of the Weaviate gRPC server (e.g., 'grpc://localhost:50051' or 'grpcs://weaviate.example.com:443')",
default=None,
)
WEAVIATE_BATCH_SIZE: PositiveInt = Field(
description="Number of objects to be processed in a single batch operation (default is 100)",
default=100,

View File

@ -5,7 +5,7 @@ import logging
import os
import time
import httpx
import requests
logger = logging.getLogger(__name__)
@ -30,10 +30,10 @@ class NacosHttpClient:
params = {}
try:
self._inject_auth_info(headers, params)
response = httpx.request(method, url="http://" + self.server + url, headers=headers, params=params)
response = requests.request(method, url="http://" + self.server + url, headers=headers, params=params)
response.raise_for_status()
return response.text
except httpx.RequestError as e:
except requests.RequestException as e:
return f"Request to Nacos failed: {e}"
def _inject_auth_info(self, headers: dict[str, str], params: dict[str, str], module: str = "config") -> None:
@ -78,7 +78,7 @@ class NacosHttpClient:
params = {"username": self.username, "password": self.password}
url = "http://" + self.server + "/nacos/v1/auth/login"
try:
resp = httpx.request("POST", url, headers=None, params=params)
resp = requests.request("POST", url, headers=None, params=params)
resp.raise_for_status()
response_data = resp.json()
self.token = response_data.get("accessToken")

View File

@ -1,5 +1,4 @@
from configs import dify_config
from libs.collection_utils import convert_to_lower_and_upper_set
HIDDEN_VALUE = "[__HIDDEN__]"
UNKNOWN_VALUE = "[__UNKNOWN__]"
@ -7,39 +6,24 @@ UUID_NIL = "00000000-0000-0000-0000-000000000000"
DEFAULT_FILE_NUMBER_LIMITS = 3
IMAGE_EXTENSIONS = convert_to_lower_and_upper_set({"jpg", "jpeg", "png", "webp", "gif", "svg"})
IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"]
IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS])
VIDEO_EXTENSIONS = convert_to_lower_and_upper_set({"mp4", "mov", "mpeg", "webm"})
VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "webm"]
VIDEO_EXTENSIONS.extend([ext.upper() for ext in VIDEO_EXTENSIONS])
AUDIO_EXTENSIONS = convert_to_lower_and_upper_set({"mp3", "m4a", "wav", "amr", "mpga"})
AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "amr", "mpga"]
AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])
_doc_extensions: set[str]
_doc_extensions: list[str]
if dify_config.ETL_TYPE == "Unstructured":
_doc_extensions = {
"txt",
"markdown",
"md",
"mdx",
"pdf",
"html",
"htm",
"xlsx",
"xls",
"vtt",
"properties",
"doc",
"docx",
"csv",
"eml",
"msg",
"pptx",
"xml",
"epub",
}
_doc_extensions = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "vtt", "properties"]
_doc_extensions.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
if dify_config.UNSTRUCTURED_API_URL:
_doc_extensions.add("ppt")
_doc_extensions.append("ppt")
else:
_doc_extensions = {
_doc_extensions = [
"txt",
"markdown",
"md",
@ -53,18 +37,5 @@ else:
"csv",
"vtt",
"properties",
}
DOCUMENT_EXTENSIONS: set[str] = convert_to_lower_and_upper_set(_doc_extensions)
# console
COOKIE_NAME_ACCESS_TOKEN = "access_token"
COOKIE_NAME_REFRESH_TOKEN = "refresh_token"
COOKIE_NAME_CSRF_TOKEN = "csrf_token"
# webapp
COOKIE_NAME_WEBAPP_ACCESS_TOKEN = "webapp_access_token"
COOKIE_NAME_PASSPORT = "passport"
HEADER_NAME_CSRF_TOKEN = "X-CSRF-Token"
HEADER_NAME_APP_CODE = "X-App-Code"
HEADER_NAME_PASSPORT = "X-App-Passport"
]
DOCUMENT_EXTENSIONS = _doc_extensions + [ext.upper() for ext in _doc_extensions]

View File

@ -31,9 +31,3 @@ def supported_language(lang):
error = f"{lang} is not a valid language."
raise ValueError(error)
def get_valid_language(lang: str | None) -> str:
if lang and lang in languages:
return lang
return languages[0]

View File

@ -25,12 +25,6 @@ class UnsupportedFileTypeError(BaseHTTPException):
code = 415
class BlockedFileExtensionError(BaseHTTPException):
error_code = "file_extension_blocked"
description = "The file extension is blocked for security reasons."
code = 400
class TooManyFilesError(BaseHTTPException):
error_code = "too_many_files"
description = "Only one file is allowed."

View File

@ -24,7 +24,7 @@ except ImportError:
)
else:
warnings.warn("To use python-magic guess MIMETYPE, you need to install `libmagic`", stacklevel=2)
magic = None # type: ignore[assignment]
magic = None # type: ignore
from pydantic import BaseModel

View File

@ -1,10 +1,31 @@
from importlib import import_module
from flask import Blueprint
from flask_restx import Namespace
from libs.external_api import ExternalApi
from .app.app_import import AppImportApi, AppImportCheckDependenciesApi, AppImportConfirmApi
from .explore.audio import ChatAudioApi, ChatTextApi
from .explore.completion import ChatApi, ChatStopApi, CompletionApi, CompletionStopApi
from .explore.conversation import (
ConversationApi,
ConversationListApi,
ConversationPinApi,
ConversationRenameApi,
ConversationUnPinApi,
)
from .explore.message import (
MessageFeedbackApi,
MessageListApi,
MessageMoreLikeThisApi,
MessageSuggestedQuestionApi,
)
from .explore.workflow import (
InstalledAppWorkflowRunApi,
InstalledAppWorkflowTaskStopApi,
)
from .files import FileApi, FilePreviewApi, FileSupportTypeApi
from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi
bp = Blueprint("console", __name__, url_prefix="/console/api")
api = ExternalApi(
@ -14,23 +35,23 @@ api = ExternalApi(
description="Console management APIs for app configuration, monitoring, and administration",
)
# Create namespace
console_ns = Namespace("console", description="Console management API operations", path="/")
RESOURCE_MODULES = (
"controllers.console.app.app_import",
"controllers.console.explore.audio",
"controllers.console.explore.completion",
"controllers.console.explore.conversation",
"controllers.console.explore.message",
"controllers.console.explore.workflow",
"controllers.console.files",
"controllers.console.remote_files",
)
# File
api.add_resource(FileApi, "/files/upload")
api.add_resource(FilePreviewApi, "/files/<uuid:file_id>/preview")
api.add_resource(FileSupportTypeApi, "/files/support-type")
for module_name in RESOURCE_MODULES:
import_module(module_name)
# Remote files
api.add_resource(RemoteFileInfoApi, "/remote-files/<path:url>")
api.add_resource(RemoteFileUploadApi, "/remote-files/upload")
# Import App
api.add_resource(AppImportApi, "/apps/imports")
api.add_resource(AppImportConfirmApi, "/apps/imports/<string:import_id>/confirm")
api.add_resource(AppImportCheckDependenciesApi, "/apps/imports/<string:app_id>/check-dependencies")
# Ensure resource modules are imported so route decorators are evaluated.
# Import other controllers
from . import (
admin,
@ -129,6 +150,77 @@ from .workspace import (
workspace,
)
# Explore Audio
api.add_resource(ChatAudioApi, "/installed-apps/<uuid:installed_app_id>/audio-to-text", endpoint="installed_app_audio")
api.add_resource(ChatTextApi, "/installed-apps/<uuid:installed_app_id>/text-to-audio", endpoint="installed_app_text")
# Explore Completion
api.add_resource(
CompletionApi, "/installed-apps/<uuid:installed_app_id>/completion-messages", endpoint="installed_app_completion"
)
api.add_resource(
CompletionStopApi,
"/installed-apps/<uuid:installed_app_id>/completion-messages/<string:task_id>/stop",
endpoint="installed_app_stop_completion",
)
api.add_resource(
ChatApi, "/installed-apps/<uuid:installed_app_id>/chat-messages", endpoint="installed_app_chat_completion"
)
api.add_resource(
ChatStopApi,
"/installed-apps/<uuid:installed_app_id>/chat-messages/<string:task_id>/stop",
endpoint="installed_app_stop_chat_completion",
)
# Explore Conversation
api.add_resource(
ConversationRenameApi,
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/name",
endpoint="installed_app_conversation_rename",
)
api.add_resource(
ConversationListApi, "/installed-apps/<uuid:installed_app_id>/conversations", endpoint="installed_app_conversations"
)
api.add_resource(
ConversationApi,
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>",
endpoint="installed_app_conversation",
)
api.add_resource(
ConversationPinApi,
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/pin",
endpoint="installed_app_conversation_pin",
)
api.add_resource(
ConversationUnPinApi,
"/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/unpin",
endpoint="installed_app_conversation_unpin",
)
# Explore Message
api.add_resource(MessageListApi, "/installed-apps/<uuid:installed_app_id>/messages", endpoint="installed_app_messages")
api.add_resource(
MessageFeedbackApi,
"/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/feedbacks",
endpoint="installed_app_message_feedback",
)
api.add_resource(
MessageMoreLikeThisApi,
"/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/more-like-this",
endpoint="installed_app_more_like_this",
)
api.add_resource(
MessageSuggestedQuestionApi,
"/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/suggested-questions",
endpoint="installed_app_suggested_question",
)
# Explore Workflow
api.add_resource(InstalledAppWorkflowRunApi, "/installed-apps/<uuid:installed_app_id>/workflows/run")
api.add_resource(
InstalledAppWorkflowTaskStopApi, "/installed-apps/<uuid:installed_app_id>/workflows/tasks/<string:task_id>/stop"
)
api.add_namespace(console_ns)
__all__ = [

View File

@ -15,7 +15,6 @@ from constants.languages import supported_language
from controllers.console import api, console_ns
from controllers.console.wraps import only_edition_cloud
from extensions.ext_database import db
from libs.token import extract_access_token
from models.model import App, InstalledApp, RecommendedApp
@ -25,9 +24,19 @@ def admin_required(view: Callable[P, R]):
if not dify_config.ADMIN_API_KEY:
raise Unauthorized("API key is invalid.")
auth_token = extract_access_token(request)
if not auth_token:
auth_header = request.headers.get("Authorization")
if auth_header is None:
raise Unauthorized("Authorization header is missing.")
if " " not in auth_header:
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
auth_scheme, auth_token = auth_header.split(None, 1)
auth_scheme = auth_scheme.lower()
if auth_scheme != "bearer":
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
if auth_token != dify_config.ADMIN_API_KEY:
raise Unauthorized("API key is invalid.")
@ -61,17 +70,15 @@ class InsertExploreAppListApi(Resource):
@only_edition_cloud
@admin_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("app_id", type=str, required=True, nullable=False, location="json")
.add_argument("desc", type=str, location="json")
.add_argument("copyright", type=str, location="json")
.add_argument("privacy_policy", type=str, location="json")
.add_argument("custom_disclaimer", type=str, location="json")
.add_argument("language", type=supported_language, required=True, nullable=False, location="json")
.add_argument("category", type=str, required=True, nullable=False, location="json")
.add_argument("position", type=int, required=True, nullable=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("app_id", type=str, required=True, nullable=False, location="json")
parser.add_argument("desc", type=str, location="json")
parser.add_argument("copyright", type=str, location="json")
parser.add_argument("privacy_policy", type=str, location="json")
parser.add_argument("custom_disclaimer", type=str, location="json")
parser.add_argument("language", type=supported_language, required=True, nullable=False, location="json")
parser.add_argument("category", type=str, required=True, nullable=False, location="json")
parser.add_argument("position", type=int, required=True, nullable=False, location="json")
args = parser.parse_args()
app = db.session.execute(select(App).where(App.id == args["app_id"])).scalar_one_or_none()

View File

@ -1,4 +1,5 @@
import flask_restx
from flask_login import current_user
from flask_restx import Resource, fields, marshal_with
from flask_restx._http import HTTPStatus
from sqlalchemy import select
@ -7,12 +8,12 @@ from werkzeug.exceptions import Forbidden
from extensions.ext_database import db
from libs.helper import TimestampField
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from models.dataset import Dataset
from models.model import ApiToken, App
from . import api, console_ns
from .wraps import account_initialization_required, edit_permission_required, setup_required
from .wraps import account_initialization_required, setup_required
api_key_fields = {
"id": fields.String,
@ -56,9 +57,7 @@ class BaseApiKeyListResource(Resource):
def get(self, resource_id):
assert self.resource_id_field is not None, "resource_id_field must be set"
resource_id = str(resource_id)
_, current_tenant_id = current_account_with_tenant()
_get_resource(resource_id, current_tenant_id, self.resource_model)
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
keys = db.session.scalars(
select(ApiToken).where(
ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id
@ -67,12 +66,13 @@ class BaseApiKeyListResource(Resource):
return {"items": keys}
@marshal_with(api_key_fields)
@edit_permission_required
def post(self, resource_id):
assert self.resource_id_field is not None, "resource_id_field must be set"
resource_id = str(resource_id)
_, current_tenant_id = current_account_with_tenant()
_get_resource(resource_id, current_tenant_id, self.resource_model)
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
if not current_user.is_editor:
raise Forbidden()
current_key_count = (
db.session.query(ApiToken)
.where(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id)
@ -89,7 +89,7 @@ class BaseApiKeyListResource(Resource):
key = ApiToken.generate_api_key(self.token_prefix or "", 24)
api_token = ApiToken()
setattr(api_token, self.resource_id_field, resource_id)
api_token.tenant_id = current_tenant_id
api_token.tenant_id = current_user.current_tenant_id
api_token.token = key
api_token.type = self.resource_type
db.session.add(api_token)
@ -108,8 +108,7 @@ class BaseApiKeyResource(Resource):
assert self.resource_id_field is not None, "resource_id_field must be set"
resource_id = str(resource_id)
api_key_id = str(api_key_id)
current_user, current_tenant_id = current_account_with_tenant()
_get_resource(resource_id, current_tenant_id, self.resource_model)
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
@ -153,6 +152,11 @@ class AppApiKeyListResource(BaseApiKeyListResource):
"""Create a new API key for an app"""
return super().post(resource_id)
def after_request(self, resp):
resp.headers["Access-Control-Allow-Origin"] = "*"
resp.headers["Access-Control-Allow-Credentials"] = "true"
return resp
resource_type = "app"
resource_model = App
resource_id_field = "app_id"
@ -169,6 +173,11 @@ class AppApiKeyResource(BaseApiKeyResource):
"""Delete an API key for an app"""
return super().delete(resource_id, api_key_id)
def after_request(self, resp):
resp.headers["Access-Control-Allow-Origin"] = "*"
resp.headers["Access-Control-Allow-Credentials"] = "true"
return resp
resource_type = "app"
resource_model = App
resource_id_field = "app_id"
@ -193,6 +202,11 @@ class DatasetApiKeyListResource(BaseApiKeyListResource):
"""Create a new API key for a dataset"""
return super().post(resource_id)
def after_request(self, resp):
resp.headers["Access-Control-Allow-Origin"] = "*"
resp.headers["Access-Control-Allow-Credentials"] = "true"
return resp
resource_type = "dataset"
resource_model = Dataset
resource_id_field = "dataset_id"
@ -209,6 +223,11 @@ class DatasetApiKeyResource(BaseApiKeyResource):
"""Delete an API key for a dataset"""
return super().delete(resource_id, api_key_id)
def after_request(self, resp):
resp.headers["Access-Control-Allow-Origin"] = "*"
resp.headers["Access-Control-Allow-Credentials"] = "true"
return resp
resource_type = "dataset"
resource_model = Dataset
resource_id_field = "dataset_id"

View File

@ -25,13 +25,11 @@ class AdvancedPromptTemplateList(Resource):
@login_required
@account_initialization_required
def get(self):
parser = (
reqparse.RequestParser()
.add_argument("app_mode", type=str, required=True, location="args")
.add_argument("model_mode", type=str, required=True, location="args")
.add_argument("has_context", type=str, required=False, default="true", location="args")
.add_argument("model_name", type=str, required=True, location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("app_mode", type=str, required=True, location="args")
parser.add_argument("model_mode", type=str, required=True, location="args")
parser.add_argument("has_context", type=str, required=False, default="true", location="args")
parser.add_argument("model_name", type=str, required=True, location="args")
args = parser.parse_args()
return AdvancedPromptTemplateService.get_prompt(args)

View File

@ -27,11 +27,9 @@ class AgentLogApi(Resource):
@get_app_model(mode=[AppMode.AGENT_CHAT])
def get(self, app_model):
"""Get agent logs"""
parser = (
reqparse.RequestParser()
.add_argument("message_id", type=uuid_value, required=True, location="args")
.add_argument("conversation_id", type=uuid_value, required=True, location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("message_id", type=uuid_value, required=True, location="args")
parser.add_argument("conversation_id", type=uuid_value, required=True, location="args")
args = parser.parse_args()

View File

@ -1,14 +1,15 @@
from typing import Literal
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
from werkzeug.exceptions import Forbidden
from controllers.common.errors import NoFileUploadedError, TooManyFilesError
from controllers.console import api, console_ns
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_resource_check,
edit_permission_required,
setup_required,
)
from extensions.ext_redis import redis_client
@ -16,7 +17,6 @@ from fields.annotation_fields import (
annotation_fields,
annotation_hit_history_fields,
)
from libs.helper import uuid_value
from libs.login import login_required
from services.annotation_service import AppAnnotationService
@ -42,15 +42,15 @@ class AnnotationReplyActionApi(Resource):
@login_required
@account_initialization_required
@cloud_edition_billing_resource_check("annotation")
@edit_permission_required
def post(self, app_id, action: Literal["enable", "disable"]):
if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
parser = (
reqparse.RequestParser()
.add_argument("score_threshold", required=True, type=float, location="json")
.add_argument("embedding_provider_name", required=True, type=str, location="json")
.add_argument("embedding_model_name", required=True, type=str, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("score_threshold", required=True, type=float, location="json")
parser.add_argument("embedding_provider_name", required=True, type=str, location="json")
parser.add_argument("embedding_model_name", required=True, type=str, location="json")
args = parser.parse_args()
if action == "enable":
result = AppAnnotationService.enable_app_annotation(args, app_id)
@ -69,8 +69,10 @@ class AppAnnotationSettingDetailApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def get(self, app_id):
if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
result = AppAnnotationService.get_app_annotation_setting_by_app_id(app_id)
return result, 200
@ -96,12 +98,15 @@ class AppAnnotationSettingUpdateApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def post(self, app_id, annotation_setting_id):
if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
annotation_setting_id = str(annotation_setting_id)
parser = reqparse.RequestParser().add_argument("score_threshold", required=True, type=float, location="json")
parser = reqparse.RequestParser()
parser.add_argument("score_threshold", required=True, type=float, location="json")
args = parser.parse_args()
result = AppAnnotationService.update_app_annotation_setting(app_id, annotation_setting_id, args)
@ -119,8 +124,10 @@ class AnnotationReplyActionStatusApi(Resource):
@login_required
@account_initialization_required
@cloud_edition_billing_resource_check("annotation")
@edit_permission_required
def get(self, app_id, job_id, action):
if not current_user.is_editor:
raise Forbidden()
job_id = str(job_id)
app_annotation_job_key = f"{action}_app_annotation_job_{str(job_id)}"
cache_result = redis_client.get(app_annotation_job_key)
@ -152,8 +159,10 @@ class AnnotationApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def get(self, app_id):
if not current_user.is_editor:
raise Forbidden()
page = request.args.get("page", default=1, type=int)
limit = request.args.get("limit", default=20, type=int)
keyword = request.args.get("keyword", default="", type=str)
@ -176,10 +185,8 @@ class AnnotationApi(Resource):
api.model(
"CreateAnnotationRequest",
{
"message_id": fields.String(description="Message ID (optional)"),
"question": fields.String(description="Question text (required when message_id not provided)"),
"answer": fields.String(description="Answer text (use 'answer' or 'content')"),
"content": fields.String(description="Content text (use 'answer' or 'content')"),
"question": fields.String(required=True, description="Question text"),
"answer": fields.String(required=True, description="Answer text"),
"annotation_reply": fields.Raw(description="Annotation reply data"),
},
)
@ -191,26 +198,25 @@ class AnnotationApi(Resource):
@account_initialization_required
@cloud_edition_billing_resource_check("annotation")
@marshal_with(annotation_fields)
@edit_permission_required
def post(self, app_id):
if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
parser = (
reqparse.RequestParser()
.add_argument("message_id", required=False, type=uuid_value, location="json")
.add_argument("question", required=False, type=str, location="json")
.add_argument("answer", required=False, type=str, location="json")
.add_argument("content", required=False, type=str, location="json")
.add_argument("annotation_reply", required=False, type=dict, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("question", required=True, type=str, location="json")
parser.add_argument("answer", required=True, type=str, location="json")
args = parser.parse_args()
annotation = AppAnnotationService.up_insert_app_annotation_from_message(args, app_id)
annotation = AppAnnotationService.insert_app_annotation_directly(args, app_id)
return annotation
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def delete(self, app_id):
if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
# Use request.args.getlist to get annotation_ids array directly
@ -243,8 +249,10 @@ class AnnotationExportApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def get(self, app_id):
if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
annotation_list = AppAnnotationService.export_annotation_list_by_app_id(app_id)
response = {"data": marshal(annotation_list, annotation_fields)}
@ -263,16 +271,16 @@ class AnnotationUpdateDeleteApi(Resource):
@login_required
@account_initialization_required
@cloud_edition_billing_resource_check("annotation")
@edit_permission_required
@marshal_with(annotation_fields)
def post(self, app_id, annotation_id):
if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
annotation_id = str(annotation_id)
parser = (
reqparse.RequestParser()
.add_argument("question", required=True, type=str, location="json")
.add_argument("answer", required=True, type=str, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("question", required=True, type=str, location="json")
parser.add_argument("answer", required=True, type=str, location="json")
args = parser.parse_args()
annotation = AppAnnotationService.update_app_annotation_directly(args, app_id, annotation_id)
return annotation
@ -280,8 +288,10 @@ class AnnotationUpdateDeleteApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def delete(self, app_id, annotation_id):
if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
annotation_id = str(annotation_id)
AppAnnotationService.delete_app_annotation(app_id, annotation_id)
@ -300,8 +310,10 @@ class AnnotationBatchImportApi(Resource):
@login_required
@account_initialization_required
@cloud_edition_billing_resource_check("annotation")
@edit_permission_required
def post(self, app_id):
if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
# check file
if "file" not in request.files:
@ -329,8 +341,10 @@ class AnnotationBatchImportStatusApi(Resource):
@login_required
@account_initialization_required
@cloud_edition_billing_resource_check("annotation")
@edit_permission_required
def get(self, app_id, job_id):
if not current_user.is_editor:
raise Forbidden()
job_id = str(job_id)
indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}"
cache_result = redis_client.get(indexing_cache_key)
@ -362,8 +376,10 @@ class AnnotationHitHistoryListApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def get(self, app_id, annotation_id):
if not current_user.is_editor:
raise Forbidden()
page = request.args.get("page", default=1, type=int)
limit = request.args.get("limit", default=20, type=int)
app_id = str(app_id)

View File

@ -1,5 +1,7 @@
import uuid
from typing import cast
from flask_login import current_user
from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse
from sqlalchemy import select
from sqlalchemy.orm import Session
@ -10,16 +12,14 @@ from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_resource_check,
edit_permission_required,
enterprise_license_required,
setup_required,
)
from core.ops.ops_trace_manager import OpsTraceManager
from extensions.ext_database import db
from fields.app_fields import app_detail_fields, app_detail_fields_with_site, app_pagination_fields
from libs.login import current_account_with_tenant, login_required
from libs.validators import validate_description_length
from models import App
from libs.login import login_required
from models import Account, App
from services.app_dsl_service import AppDslService, ImportMode
from services.app_service import AppService
from services.enterprise.enterprise_service import EnterpriseService
@ -28,6 +28,12 @@ from services.feature_service import FeatureService
ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"]
def _validate_description_length(description):
if description and len(description) > 400:
raise ValueError("Description cannot exceed 400 characters.")
return description
@console_ns.route("/apps")
class AppListApi(Resource):
@api.doc("list_apps")
@ -55,7 +61,6 @@ class AppListApi(Resource):
@enterprise_license_required
def get(self):
"""Get app list"""
current_user, current_tenant_id = current_account_with_tenant()
def uuid_list(value):
try:
@ -63,36 +68,34 @@ class AppListApi(Resource):
except ValueError:
abort(400, message="Invalid UUID format in tag_ids.")
parser = (
reqparse.RequestParser()
.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
.add_argument(
"mode",
type=str,
choices=[
"completion",
"chat",
"advanced-chat",
"workflow",
"agent-chat",
"channel",
"all",
],
default="all",
location="args",
required=False,
)
.add_argument("name", type=str, location="args", required=False)
.add_argument("tag_ids", type=uuid_list, location="args", required=False)
.add_argument("is_created_by_me", type=inputs.boolean, location="args", required=False)
parser = reqparse.RequestParser()
parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
parser.add_argument(
"mode",
type=str,
choices=[
"completion",
"chat",
"advanced-chat",
"workflow",
"agent-chat",
"channel",
"all",
],
default="all",
location="args",
required=False,
)
parser.add_argument("name", type=str, location="args", required=False)
parser.add_argument("tag_ids", type=uuid_list, location="args", required=False)
parser.add_argument("is_created_by_me", type=inputs.boolean, location="args", required=False)
args = parser.parse_args()
# get app list
app_service = AppService()
app_pagination = app_service.get_paginate_apps(current_user.id, current_tenant_id, args)
app_pagination = app_service.get_paginate_apps(current_user.id, current_user.current_tenant_id, args)
if not app_pagination:
return {"data": [], "total": 0, "page": 1, "limit": 20, "has_more": False}
@ -131,26 +134,30 @@ class AppListApi(Resource):
@account_initialization_required
@marshal_with(app_detail_fields)
@cloud_edition_billing_resource_check("apps")
@edit_permission_required
def post(self):
"""Create app"""
current_user, current_tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("name", type=str, required=True, location="json")
.add_argument("description", type=validate_description_length, location="json")
.add_argument("mode", type=str, choices=ALLOW_CREATE_APP_MODES, location="json")
.add_argument("icon_type", type=str, location="json")
.add_argument("icon", type=str, location="json")
.add_argument("icon_background", type=str, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=True, location="json")
parser.add_argument("description", type=_validate_description_length, location="json")
parser.add_argument("mode", type=str, choices=ALLOW_CREATE_APP_MODES, location="json")
parser.add_argument("icon_type", type=str, location="json")
parser.add_argument("icon", type=str, location="json")
parser.add_argument("icon_background", type=str, location="json")
args = parser.parse_args()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
if "mode" not in args or args["mode"] is None:
raise BadRequest("mode is required")
app_service = AppService()
app = app_service.create_app(current_tenant_id, args, current_user)
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
if current_user.current_tenant_id is None:
raise ValueError("current_user.current_tenant_id cannot be None")
app = app_service.create_app(current_user.current_tenant_id, args, current_user)
return app, 201
@ -203,20 +210,21 @@ class AppApi(Resource):
@login_required
@account_initialization_required
@get_app_model
@edit_permission_required
@marshal_with(app_detail_fields_with_site)
def put(self, app_model):
"""Update app"""
parser = (
reqparse.RequestParser()
.add_argument("name", type=str, required=True, nullable=False, location="json")
.add_argument("description", type=validate_description_length, location="json")
.add_argument("icon_type", type=str, location="json")
.add_argument("icon", type=str, location="json")
.add_argument("icon_background", type=str, location="json")
.add_argument("use_icon_as_answer_icon", type=bool, location="json")
.add_argument("max_active_requests", type=int, location="json")
)
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
parser.add_argument("description", type=_validate_description_length, location="json")
parser.add_argument("icon_type", type=str, location="json")
parser.add_argument("icon", type=str, location="json")
parser.add_argument("icon_background", type=str, location="json")
parser.add_argument("use_icon_as_answer_icon", type=bool, location="json")
parser.add_argument("max_active_requests", type=int, location="json")
args = parser.parse_args()
app_service = AppService()
@ -245,9 +253,12 @@ class AppApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def delete(self, app_model):
"""Delete app"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
app_service = AppService()
app_service.delete_app(app_model)
@ -277,29 +288,28 @@ class AppCopyApi(Resource):
@login_required
@account_initialization_required
@get_app_model
@edit_permission_required
@marshal_with(app_detail_fields_with_site)
def post(self, app_model):
"""Copy app"""
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
if not current_user.is_editor:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("name", type=str, location="json")
.add_argument("description", type=validate_description_length, location="json")
.add_argument("icon_type", type=str, location="json")
.add_argument("icon", type=str, location="json")
.add_argument("icon_background", type=str, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, location="json")
parser.add_argument("description", type=_validate_description_length, location="json")
parser.add_argument("icon_type", type=str, location="json")
parser.add_argument("icon", type=str, location="json")
parser.add_argument("icon_background", type=str, location="json")
args = parser.parse_args()
with Session(db.engine) as session:
import_service = AppDslService(session)
yaml_content = import_service.export_dsl(app_model=app_model, include_secret=True)
account = cast(Account, current_user)
result = import_service.import_app(
account=current_user,
import_mode=ImportMode.YAML_CONTENT,
account=account,
import_mode=ImportMode.YAML_CONTENT.value,
yaml_content=yaml_content,
name=args.get("name"),
description=args.get("description"),
@ -335,15 +345,16 @@ class AppExportApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def get(self, app_model):
"""Export app"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
# Add include_secret params
parser = (
reqparse.RequestParser()
.add_argument("include_secret", type=inputs.boolean, default=False, location="args")
.add_argument("workflow_id", type=str, location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("include_secret", type=inputs.boolean, default=False, location="args")
parser.add_argument("workflow_id", type=str, location="args")
args = parser.parse_args()
return {
@ -365,9 +376,13 @@ class AppNameApi(Resource):
@account_initialization_required
@get_app_model
@marshal_with(app_detail_fields)
@edit_permission_required
def post(self, app_model):
parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json")
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=True, location="json")
args = parser.parse_args()
app_service = AppService()
@ -398,13 +413,14 @@ class AppIconApi(Resource):
@account_initialization_required
@get_app_model
@marshal_with(app_detail_fields)
@edit_permission_required
def post(self, app_model):
parser = (
reqparse.RequestParser()
.add_argument("icon", type=str, location="json")
.add_argument("icon_background", type=str, location="json")
)
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("icon", type=str, location="json")
parser.add_argument("icon_background", type=str, location="json")
args = parser.parse_args()
app_service = AppService()
@ -430,9 +446,13 @@ class AppSiteStatus(Resource):
@account_initialization_required
@get_app_model
@marshal_with(app_detail_fields)
@edit_permission_required
def post(self, app_model):
parser = reqparse.RequestParser().add_argument("enable_site", type=bool, required=True, location="json")
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("enable_site", type=bool, required=True, location="json")
args = parser.parse_args()
app_service = AppService()
@ -460,11 +480,11 @@ class AppApiStatus(Resource):
@marshal_with(app_detail_fields)
def post(self, app_model):
# The role of the current user in the ta table must be admin or owner
current_user, _ = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser().add_argument("enable_api", type=bool, required=True, location="json")
parser = reqparse.RequestParser()
parser.add_argument("enable_api", type=bool, required=True, location="json")
args = parser.parse_args()
app_service = AppService()
@ -505,14 +525,13 @@ class AppTraceApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def post(self, app_id):
# add app trace
parser = (
reqparse.RequestParser()
.add_argument("enabled", type=bool, required=True, location="json")
.add_argument("tracing_provider", type=str, required=True, location="json")
)
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("enabled", type=bool, required=True, location="json")
parser.add_argument("tracing_provider", type=str, required=True, location="json")
args = parser.parse_args()
OpsTraceManager.update_app_tracing_config(

View File

@ -1,54 +1,54 @@
from typing import cast
from flask_login import current_user
from flask_restx import Resource, marshal_with, reqparse
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_resource_check,
edit_permission_required,
setup_required,
)
from extensions.ext_database import db
from fields.app_fields import app_import_check_dependencies_fields, app_import_fields
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from models import Account
from models.model import App
from services.app_dsl_service import AppDslService, ImportStatus
from services.enterprise.enterprise_service import EnterpriseService
from services.feature_service import FeatureService
from .. import console_ns
@console_ns.route("/apps/imports")
class AppImportApi(Resource):
@setup_required
@login_required
@account_initialization_required
@marshal_with(app_import_fields)
@cloud_edition_billing_resource_check("apps")
@edit_permission_required
def post(self):
# Check user role first
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("mode", type=str, required=True, location="json")
.add_argument("yaml_content", type=str, location="json")
.add_argument("yaml_url", type=str, location="json")
.add_argument("name", type=str, location="json")
.add_argument("description", type=str, location="json")
.add_argument("icon_type", type=str, location="json")
.add_argument("icon", type=str, location="json")
.add_argument("icon_background", type=str, location="json")
.add_argument("app_id", type=str, location="json")
)
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("mode", type=str, required=True, location="json")
parser.add_argument("yaml_content", type=str, location="json")
parser.add_argument("yaml_url", type=str, location="json")
parser.add_argument("name", type=str, location="json")
parser.add_argument("description", type=str, location="json")
parser.add_argument("icon_type", type=str, location="json")
parser.add_argument("icon", type=str, location="json")
parser.add_argument("icon_background", type=str, location="json")
parser.add_argument("app_id", type=str, location="json")
args = parser.parse_args()
# Create service with session
with Session(db.engine) as session:
import_service = AppDslService(session)
# Import app
account = current_user
account = cast(Account, current_user)
result = import_service.import_app(
account=account,
import_mode=args["mode"],
@ -67,47 +67,47 @@ class AppImportApi(Resource):
EnterpriseService.WebAppAuth.update_app_access_mode(result.app_id, "private")
# Return appropriate status code based on result
status = result.status
if status == ImportStatus.FAILED:
if status == ImportStatus.FAILED.value:
return result.model_dump(mode="json"), 400
elif status == ImportStatus.PENDING:
elif status == ImportStatus.PENDING.value:
return result.model_dump(mode="json"), 202
return result.model_dump(mode="json"), 200
@console_ns.route("/apps/imports/<string:import_id>/confirm")
class AppImportConfirmApi(Resource):
@setup_required
@login_required
@account_initialization_required
@marshal_with(app_import_fields)
@edit_permission_required
def post(self, import_id):
# Check user role first
current_user, _ = current_account_with_tenant()
if not current_user.is_editor:
raise Forbidden()
# Create service with session
with Session(db.engine) as session:
import_service = AppDslService(session)
# Confirm import
account = current_user
account = cast(Account, current_user)
result = import_service.confirm_import(import_id=import_id, account=account)
session.commit()
# Return appropriate status code based on result
if result.status == ImportStatus.FAILED:
if result.status == ImportStatus.FAILED.value:
return result.model_dump(mode="json"), 400
return result.model_dump(mode="json"), 200
@console_ns.route("/apps/imports/<string:app_id>/check-dependencies")
class AppImportCheckDependenciesApi(Resource):
@setup_required
@login_required
@get_app_model
@account_initialization_required
@marshal_with(app_import_check_dependencies_fields)
@edit_permission_required
def get(self, app_model: App):
if not current_user.is_editor:
raise Forbidden()
with Session(db.engine) as session:
import_service = AppDslService(session)
result = import_service.check_dependencies(app_model=app_model)

View File

@ -111,13 +111,11 @@ class ChatMessageTextApi(Resource):
@account_initialization_required
def post(self, app_model: App):
try:
parser = (
reqparse.RequestParser()
.add_argument("message_id", type=str, location="json")
.add_argument("text", type=str, location="json")
.add_argument("voice", type=str, location="json")
.add_argument("streaming", type=bool, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("message_id", type=str, location="json")
parser.add_argument("text", type=str, location="json")
parser.add_argument("voice", type=str, location="json")
parser.add_argument("streaming", type=bool, location="json")
args = parser.parse_args()
message_id = args.get("message_id", None)
@ -168,7 +166,8 @@ class TextModesApi(Resource):
@account_initialization_required
def get(self, app_model):
try:
parser = reqparse.RequestParser().add_argument("language", type=str, required=True, location="args")
parser = reqparse.RequestParser()
parser.add_argument("language", type=str, required=True, location="args")
args = parser.parse_args()
response = AudioService.transcript_tts_voices(

View File

@ -2,7 +2,7 @@ import logging
from flask import request
from flask_restx import Resource, fields, reqparse
from werkzeug.exceptions import InternalServerError, NotFound
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from controllers.console import api, console_ns
@ -15,7 +15,7 @@ from controllers.console.app.error import (
ProviderQuotaExceededError,
)
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
@ -64,15 +64,13 @@ class CompletionMessageApi(Resource):
@account_initialization_required
@get_app_model(mode=AppMode.COMPLETION)
def post(self, app_model):
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, location="json")
.add_argument("query", type=str, location="json", default="")
.add_argument("files", type=list, required=False, location="json")
.add_argument("model_config", type=dict, required=True, location="json")
.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
.add_argument("retriever_from", type=str, required=False, default="dev", location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, location="json")
parser.add_argument("query", type=str, location="json", default="")
parser.add_argument("files", type=list, required=False, location="json")
parser.add_argument("model_config", type=dict, required=True, location="json")
parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json")
args = parser.parse_args()
streaming = args["response_mode"] != "blocking"
@ -153,19 +151,22 @@ class ChatMessageApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT])
@edit_permission_required
def post(self, app_model):
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, location="json")
.add_argument("query", type=str, required=True, location="json")
.add_argument("files", type=list, required=False, location="json")
.add_argument("model_config", type=dict, required=True, location="json")
.add_argument("conversation_id", type=uuid_value, location="json")
.add_argument("parent_message_id", type=uuid_value, required=False, location="json")
.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
.add_argument("retriever_from", type=str, required=False, default="dev", location="json")
)
if not isinstance(current_user, Account):
raise Forbidden()
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, location="json")
parser.add_argument("query", type=str, required=True, location="json")
parser.add_argument("files", type=list, required=False, location="json")
parser.add_argument("model_config", type=dict, required=True, location="json")
parser.add_argument("conversation_id", type=uuid_value, location="json")
parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json")
parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json")
args = parser.parse_args()
streaming = args["response_mode"] != "blocking"

View File

@ -1,14 +1,16 @@
import sqlalchemy as sa
from flask import abort
from datetime import datetime
import pytz # pip install pytz
from flask_login import current_user
from flask_restx import Resource, marshal_with, reqparse
from flask_restx.inputs import int_range
from sqlalchemy import func, or_
from sqlalchemy.orm import joinedload
from werkzeug.exceptions import NotFound
from werkzeug.exceptions import Forbidden, NotFound
from controllers.console import api, console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from controllers.console.wraps import account_initialization_required, setup_required
from core.app.entities.app_invoke_entities import InvokeFrom
from extensions.ext_database import db
from fields.conversation_fields import (
@ -17,10 +19,10 @@ from fields.conversation_fields import (
conversation_pagination_fields,
conversation_with_summary_pagination_fields,
)
from libs.datetime_utils import naive_utc_now, parse_time_range
from libs.datetime_utils import naive_utc_now
from libs.helper import DatetimeString
from libs.login import current_account_with_tenant, login_required
from models import Conversation, EndUser, Message, MessageAnnotation
from libs.login import login_required
from models import Account, Conversation, EndUser, Message, MessageAnnotation
from models.model import AppMode
from services.conversation_service import ConversationService
from services.errors.conversation import ConversationNotExistsError
@ -54,27 +56,21 @@ class CompletionConversationApi(Resource):
@account_initialization_required
@get_app_model(mode=AppMode.COMPLETION)
@marshal_with(conversation_pagination_fields)
@edit_permission_required
def get(self, app_model):
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("keyword", type=str, location="args")
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument(
"annotation_status",
type=str,
choices=["annotated", "not_annotated", "all"],
default="all",
location="args",
)
.add_argument("page", type=int_range(1, 99999), default=1, location="args")
.add_argument("limit", type=int_range(1, 100), default=20, location="args")
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("keyword", type=str, location="args")
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument(
"annotation_status", type=str, choices=["annotated", "not_annotated", "all"], default="all", location="args"
)
parser.add_argument("page", type=int_range(1, 99999), default=1, location="args")
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
args = parser.parse_args()
query = sa.select(Conversation).where(
query = db.select(Conversation).where(
Conversation.app_id == app_model.id, Conversation.mode == "completion", Conversation.is_deleted.is_(False)
)
@ -87,18 +83,25 @@ class CompletionConversationApi(Resource):
)
account = current_user
assert account.timezone is not None
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
try:
start_datetime_utc, end_datetime_utc = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
if start_datetime_utc:
query = query.where(Conversation.created_at >= start_datetime_utc)
if end_datetime_utc:
end_datetime_utc = end_datetime_utc.replace(second=59)
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=59)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
query = query.where(Conversation.created_at < end_datetime_utc)
# FIXME, the type ignore in this file
@ -133,8 +136,9 @@ class CompletionConversationDetailApi(Resource):
@account_initialization_required
@get_app_model(mode=AppMode.COMPLETION)
@marshal_with(conversation_message_detail_fields)
@edit_permission_required
def get(self, app_model, conversation_id):
if not current_user.is_editor:
raise Forbidden()
conversation_id = str(conversation_id)
return _get_conversation(app_model, conversation_id)
@ -149,12 +153,14 @@ class CompletionConversationDetailApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.COMPLETION)
@edit_permission_required
def delete(self, app_model, conversation_id):
current_user, _ = current_account_with_tenant()
if not current_user.is_editor:
raise Forbidden()
conversation_id = str(conversation_id)
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
ConversationService.delete(app_model, conversation_id, current_user)
except ConversationNotExistsError:
raise NotFound("Conversation Not Exists.")
@ -199,32 +205,26 @@ class ChatConversationApi(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@marshal_with(conversation_with_summary_pagination_fields)
@edit_permission_required
def get(self, app_model):
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("keyword", type=str, location="args")
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument(
"annotation_status",
type=str,
choices=["annotated", "not_annotated", "all"],
default="all",
location="args",
)
.add_argument("message_count_gte", type=int_range(1, 99999), required=False, location="args")
.add_argument("page", type=int_range(1, 99999), required=False, default=1, location="args")
.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
.add_argument(
"sort_by",
type=str,
choices=["created_at", "-created_at", "updated_at", "-updated_at"],
required=False,
default="-updated_at",
location="args",
)
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("keyword", type=str, location="args")
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument(
"annotation_status", type=str, choices=["annotated", "not_annotated", "all"], default="all", location="args"
)
parser.add_argument("message_count_gte", type=int_range(1, 99999), required=False, location="args")
parser.add_argument("page", type=int_range(1, 99999), required=False, default=1, location="args")
parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
parser.add_argument(
"sort_by",
type=str,
choices=["created_at", "-created_at", "updated_at", "-updated_at"],
required=False,
default="-updated_at",
location="args",
)
args = parser.parse_args()
@ -236,7 +236,7 @@ class ChatConversationApi(Resource):
.subquery()
)
query = sa.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.is_deleted.is_(False))
query = db.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.is_deleted.is_(False))
if args["keyword"]:
keyword_filter = f"%{args['keyword']}%"
@ -259,22 +259,29 @@ class ChatConversationApi(Resource):
)
account = current_user
assert account.timezone is not None
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
try:
start_datetime_utc, end_datetime_utc = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
if start_datetime_utc:
match args["sort_by"]:
case "updated_at" | "-updated_at":
query = query.where(Conversation.updated_at >= start_datetime_utc)
case "created_at" | "-created_at" | _:
query = query.where(Conversation.created_at >= start_datetime_utc)
if end_datetime_utc:
end_datetime_utc = end_datetime_utc.replace(second=59)
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=59)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
match args["sort_by"]:
case "updated_at" | "-updated_at":
query = query.where(Conversation.updated_at <= end_datetime_utc)
@ -301,7 +308,7 @@ class ChatConversationApi(Resource):
)
if app_model.mode == AppMode.ADVANCED_CHAT:
query = query.where(Conversation.invoke_from != InvokeFrom.DEBUGGER)
query = query.where(Conversation.invoke_from != InvokeFrom.DEBUGGER.value)
match args["sort_by"]:
case "created_at":
@ -333,8 +340,9 @@ class ChatConversationDetailApi(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@marshal_with(conversation_detail_fields)
@edit_permission_required
def get(self, app_model, conversation_id):
if not current_user.is_editor:
raise Forbidden()
conversation_id = str(conversation_id)
return _get_conversation(app_model, conversation_id)
@ -349,12 +357,14 @@ class ChatConversationDetailApi(Resource):
@login_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@account_initialization_required
@edit_permission_required
def delete(self, app_model, conversation_id):
current_user, _ = current_account_with_tenant()
if not current_user.is_editor:
raise Forbidden()
conversation_id = str(conversation_id)
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
ConversationService.delete(app_model, conversation_id, current_user)
except ConversationNotExistsError:
raise NotFound("Conversation Not Exists.")
@ -363,7 +373,6 @@ class ChatConversationDetailApi(Resource):
def _get_conversation(app_model, conversation_id):
current_user, _ = current_account_with_tenant()
conversation = (
db.session.query(Conversation)
.where(Conversation.id == conversation_id, Conversation.app_id == app_model.id)

View File

@ -29,7 +29,8 @@ class ConversationVariablesApi(Resource):
@get_app_model(mode=AppMode.ADVANCED_CHAT)
@marshal_with(paginated_conversation_variable_fields)
def get(self, app_model):
parser = reqparse.RequestParser().add_argument("conversation_id", type=str, location="args")
parser = reqparse.RequestParser()
parser.add_argument("conversation_id", type=str, location="args")
args = parser.parse_args()
stmt = (

View File

@ -1,5 +1,6 @@
from collections.abc import Sequence
from flask_login import current_user
from flask_restx import Resource, fields, reqparse
from controllers.console import api, console_ns
@ -16,7 +17,7 @@ from core.helper.code_executor.python3.python3_code_provider import Python3CodeP
from core.llm_generator.llm_generator import LLMGenerator
from core.model_runtime.errors.invoke import InvokeError
from extensions.ext_database import db
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from models import App
from services.workflow_service import WorkflowService
@ -42,18 +43,16 @@ class RuleGenerateApi(Resource):
@login_required
@account_initialization_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("instruction", type=str, required=True, nullable=False, location="json")
.add_argument("model_config", type=dict, required=True, nullable=False, location="json")
.add_argument("no_variable", type=bool, required=True, default=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("instruction", type=str, required=True, nullable=False, location="json")
parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json")
parser.add_argument("no_variable", type=bool, required=True, default=False, location="json")
args = parser.parse_args()
_, current_tenant_id = current_account_with_tenant()
account = current_user
try:
rules = LLMGenerator.generate_rule_config(
tenant_id=current_tenant_id,
tenant_id=account.current_tenant_id,
instruction=args["instruction"],
model_config=args["model_config"],
no_variable=args["no_variable"],
@ -94,19 +93,17 @@ class RuleCodeGenerateApi(Resource):
@login_required
@account_initialization_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("instruction", type=str, required=True, nullable=False, location="json")
.add_argument("model_config", type=dict, required=True, nullable=False, location="json")
.add_argument("no_variable", type=bool, required=True, default=False, location="json")
.add_argument("code_language", type=str, required=False, default="javascript", location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("instruction", type=str, required=True, nullable=False, location="json")
parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json")
parser.add_argument("no_variable", type=bool, required=True, default=False, location="json")
parser.add_argument("code_language", type=str, required=False, default="javascript", location="json")
args = parser.parse_args()
_, current_tenant_id = current_account_with_tenant()
account = current_user
try:
code_result = LLMGenerator.generate_code(
tenant_id=current_tenant_id,
tenant_id=account.current_tenant_id,
instruction=args["instruction"],
model_config=args["model_config"],
code_language=args["code_language"],
@ -143,17 +140,15 @@ class RuleStructuredOutputGenerateApi(Resource):
@login_required
@account_initialization_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("instruction", type=str, required=True, nullable=False, location="json")
.add_argument("model_config", type=dict, required=True, nullable=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("instruction", type=str, required=True, nullable=False, location="json")
parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json")
args = parser.parse_args()
_, current_tenant_id = current_account_with_tenant()
account = current_user
try:
structured_output = LLMGenerator.generate_structured_output(
tenant_id=current_tenant_id,
tenant_id=account.current_tenant_id,
instruction=args["instruction"],
model_config=args["model_config"],
)
@ -194,18 +189,15 @@ class InstructionGenerateApi(Resource):
@login_required
@account_initialization_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("flow_id", type=str, required=True, default="", location="json")
.add_argument("node_id", type=str, required=False, default="", location="json")
.add_argument("current", type=str, required=False, default="", location="json")
.add_argument("language", type=str, required=False, default="javascript", location="json")
.add_argument("instruction", type=str, required=True, nullable=False, location="json")
.add_argument("model_config", type=dict, required=True, nullable=False, location="json")
.add_argument("ideal_output", type=str, required=False, default="", location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("flow_id", type=str, required=True, default="", location="json")
parser.add_argument("node_id", type=str, required=False, default="", location="json")
parser.add_argument("current", type=str, required=False, default="", location="json")
parser.add_argument("language", type=str, required=False, default="javascript", location="json")
parser.add_argument("instruction", type=str, required=True, nullable=False, location="json")
parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json")
parser.add_argument("ideal_output", type=str, required=False, default="", location="json")
args = parser.parse_args()
_, current_tenant_id = current_account_with_tenant()
code_template = (
Python3CodeProvider.get_default_code()
if args["language"] == "python"
@ -230,21 +222,21 @@ class InstructionGenerateApi(Resource):
match node_type:
case "llm":
return LLMGenerator.generate_rule_config(
current_tenant_id,
current_user.current_tenant_id,
instruction=args["instruction"],
model_config=args["model_config"],
no_variable=True,
)
case "agent":
return LLMGenerator.generate_rule_config(
current_tenant_id,
current_user.current_tenant_id,
instruction=args["instruction"],
model_config=args["model_config"],
no_variable=True,
)
case "code":
return LLMGenerator.generate_code(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
instruction=args["instruction"],
model_config=args["model_config"],
code_language=args["language"],
@ -253,7 +245,7 @@ class InstructionGenerateApi(Resource):
return {"error": f"invalid node type: {node_type}"}
if args["node_id"] == "" and args["current"] != "": # For legacy app without a workflow
return LLMGenerator.instruction_modify_legacy(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
flow_id=args["flow_id"],
current=args["current"],
instruction=args["instruction"],
@ -262,7 +254,7 @@ class InstructionGenerateApi(Resource):
)
if args["node_id"] != "" and args["current"] != "": # For workflow node
return LLMGenerator.instruction_modify_workflow(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
flow_id=args["flow_id"],
node_id=args["node_id"],
current=args["current"],
@ -301,7 +293,8 @@ class InstructionGenerationTemplateApi(Resource):
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser().add_argument("type", type=str, required=True, default=False, location="json")
parser = reqparse.RequestParser()
parser.add_argument("type", type=str, required=True, default=False, location="json")
args = parser.parse_args()
match args["type"]:
case "prompt":

View File

@ -1,15 +1,16 @@
import json
from enum import StrEnum
from flask_login import current_user
from flask_restx import Resource, fields, marshal_with, reqparse
from werkzeug.exceptions import NotFound
from controllers.console import api, console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
from fields.app_fields import app_server_fields
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from models.model import AppMCPServer
@ -24,9 +25,9 @@ class AppMCPServerController(Resource):
@api.doc(description="Get MCP server configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "MCP server configuration retrieved successfully", app_server_fields)
@setup_required
@login_required
@account_initialization_required
@setup_required
@get_app_model
@marshal_with(app_server_fields)
def get(self, app_model):
@ -47,19 +48,17 @@ class AppMCPServerController(Resource):
)
@api.response(201, "MCP server configuration created successfully", app_server_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@get_app_model
@login_required
@setup_required
@marshal_with(app_server_fields)
@edit_permission_required
def post(self, app_model):
_, current_tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("description", type=str, required=False, location="json")
.add_argument("parameters", type=dict, required=True, location="json")
)
if not current_user.is_editor:
raise NotFound()
parser = reqparse.RequestParser()
parser.add_argument("description", type=str, required=False, location="json")
parser.add_argument("parameters", type=dict, required=True, location="json")
args = parser.parse_args()
description = args.get("description")
@ -72,7 +71,7 @@ class AppMCPServerController(Resource):
parameters=json.dumps(args["parameters"], ensure_ascii=False),
status=AppMCPServerStatus.ACTIVE,
app_id=app_model.id,
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
server_code=AppMCPServer.generate_server_code(16),
)
db.session.add(server)
@ -96,20 +95,19 @@ class AppMCPServerController(Resource):
@api.response(200, "MCP server configuration updated successfully", app_server_fields)
@api.response(403, "Insufficient permissions")
@api.response(404, "Server not found")
@get_app_model
@login_required
@setup_required
@login_required
@account_initialization_required
@get_app_model
@marshal_with(app_server_fields)
@edit_permission_required
def put(self, app_model):
parser = (
reqparse.RequestParser()
.add_argument("id", type=str, required=True, location="json")
.add_argument("description", type=str, required=False, location="json")
.add_argument("parameters", type=dict, required=True, location="json")
.add_argument("status", type=str, required=False, location="json")
)
if not current_user.is_editor:
raise NotFound()
parser = reqparse.RequestParser()
parser.add_argument("id", type=str, required=True, location="json")
parser.add_argument("description", type=str, required=False, location="json")
parser.add_argument("parameters", type=dict, required=True, location="json")
parser.add_argument("status", type=str, required=False, location="json")
args = parser.parse_args()
server = db.session.query(AppMCPServer).where(AppMCPServer.id == args["id"]).first()
if not server:
@ -144,13 +142,13 @@ class AppMCPServerRefreshController(Resource):
@login_required
@account_initialization_required
@marshal_with(app_server_fields)
@edit_permission_required
def get(self, server_id):
_, current_tenant_id = current_account_with_tenant()
if not current_user.is_editor:
raise NotFound()
server = (
db.session.query(AppMCPServer)
.where(AppMCPServer.id == server_id)
.where(AppMCPServer.tenant_id == current_tenant_id)
.where(AppMCPServer.tenant_id == current_user.current_tenant_id)
.first()
)
if not server:

View File

@ -3,7 +3,7 @@ import logging
from flask_restx import Resource, fields, marshal_with, reqparse
from flask_restx.inputs import int_range
from sqlalchemy import exists, select
from werkzeug.exceptions import InternalServerError, NotFound
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
from controllers.console import api, console_ns
from controllers.console.app.error import (
@ -16,18 +16,20 @@ from controllers.console.app.wraps import get_app_model
from controllers.console.explore.error import AppSuggestedQuestionsAfterAnswerDisabledError
from controllers.console.wraps import (
account_initialization_required,
edit_permission_required,
cloud_edition_billing_resource_check,
setup_required,
)
from core.app.entities.app_invoke_entities import InvokeFrom
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from core.model_runtime.errors.invoke import InvokeError
from extensions.ext_database import db
from fields.conversation_fields import message_detail_fields
from fields.conversation_fields import annotation_fields, message_detail_fields
from libs.helper import uuid_value
from libs.infinite_scroll_pagination import InfiniteScrollPagination
from libs.login import current_account_with_tenant, login_required
from libs.login import current_user, login_required
from models.account import Account
from models.model import AppMode, Conversation, Message, MessageAnnotation, MessageFeedback
from services.annotation_service import AppAnnotationService
from services.errors.conversation import ConversationNotExistsError
from services.errors.message import MessageNotExistsError, SuggestedQuestionsAfterAnswerDisabledError
from services.message_service import MessageService
@ -54,19 +56,16 @@ class ChatMessageListApi(Resource):
)
@api.response(200, "Success", message_infinite_scroll_pagination_fields)
@api.response(404, "Conversation not found")
@login_required
@account_initialization_required
@setup_required
@login_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@account_initialization_required
@marshal_with(message_infinite_scroll_pagination_fields)
@edit_permission_required
def get(self, app_model):
parser = (
reqparse.RequestParser()
.add_argument("conversation_id", required=True, type=uuid_value, location="args")
.add_argument("first_id", type=uuid_value, location="args")
.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("conversation_id", required=True, type=uuid_value, location="args")
parser.add_argument("first_id", type=uuid_value, location="args")
parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
args = parser.parse_args()
conversation = (
@ -152,13 +151,12 @@ class MessageFeedbackApi(Resource):
@login_required
@account_initialization_required
def post(self, app_model):
current_user, _ = current_account_with_tenant()
if current_user is None:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("message_id", required=True, type=uuid_value, location="json")
.add_argument("rating", type=str, choices=["like", "dislike", None], location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("message_id", required=True, type=uuid_value, location="json")
parser.add_argument("rating", type=str, choices=["like", "dislike", None], location="json")
args = parser.parse_args()
message_id = str(args["message_id"])
@ -192,6 +190,47 @@ class MessageFeedbackApi(Resource):
return {"result": "success"}
@console_ns.route("/apps/<uuid:app_id>/annotations")
class MessageAnnotationApi(Resource):
@api.doc("create_message_annotation")
@api.doc(description="Create message annotation")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"MessageAnnotationRequest",
{
"message_id": fields.String(description="Message ID"),
"question": fields.String(required=True, description="Question text"),
"answer": fields.String(required=True, description="Answer text"),
"annotation_reply": fields.Raw(description="Annotation reply"),
},
)
)
@api.response(200, "Annotation created successfully", annotation_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@cloud_edition_billing_resource_check("annotation")
@get_app_model
@marshal_with(annotation_fields)
def post(self, app_model):
if not isinstance(current_user, Account):
raise Forbidden()
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("message_id", required=False, type=uuid_value, location="json")
parser.add_argument("question", required=True, type=str, location="json")
parser.add_argument("answer", required=True, type=str, location="json")
parser.add_argument("annotation_reply", required=False, type=dict, location="json")
args = parser.parse_args()
annotation = AppAnnotationService.up_insert_app_annotation_from_message(args, app_model.id)
return annotation
@console_ns.route("/apps/<uuid:app_id>/annotations/count")
class MessageAnnotationCountApi(Resource):
@api.doc("get_annotation_count")
@ -228,7 +267,6 @@ class MessageSuggestedQuestionApi(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
def get(self, app_model, message_id):
current_user, _ = current_account_with_tenant()
message_id = str(message_id)
try:
@ -263,12 +301,12 @@ class MessageApi(Resource):
@api.doc(params={"app_id": "Application ID", "message_id": "Message ID"})
@api.response(200, "Message retrieved successfully", message_detail_fields)
@api.response(404, "Message not found")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
@marshal_with(message_detail_fields)
def get(self, app_model, message_id: str):
def get(self, app_model, message_id):
message_id = str(message_id)
message = db.session.query(Message).where(Message.id == message_id, Message.app_id == app_model.id).first()

View File

@ -2,6 +2,7 @@ import json
from typing import cast
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields
from werkzeug.exceptions import Forbidden
@ -13,8 +14,8 @@ from core.tools.tool_manager import ToolManager
from core.tools.utils.configuration import ToolParameterConfigurationManager
from events.app_event import app_model_config_was_updated
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from models.account import Account
from models.model import AppMode, AppModelConfig
from services.app_model_config_service import AppModelConfigService
@ -52,14 +53,16 @@ class ModelConfigResource(Resource):
@get_app_model(mode=[AppMode.AGENT_CHAT, AppMode.CHAT, AppMode.COMPLETION])
def post(self, app_model):
"""Modify app model config"""
current_user, current_tenant_id = current_account_with_tenant()
if not isinstance(current_user, Account):
raise Forbidden()
if not current_user.has_edit_permission:
raise Forbidden()
assert current_user.current_tenant_id is not None, "The tenant information should be loaded."
# validate config
model_configuration = AppModelConfigService.validate_configuration(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
config=cast(dict, request.json),
app_mode=AppMode.value_of(app_model.mode),
)
@ -87,16 +90,16 @@ class ModelConfigResource(Resource):
if not isinstance(tool, dict) or len(tool.keys()) <= 3:
continue
agent_tool_entity = AgentToolEntity.model_validate(tool)
agent_tool_entity = AgentToolEntity(**tool)
# get tool
try:
tool_runtime = ToolManager.get_agent_tool_runtime(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
app_id=app_model.id,
agent_tool=agent_tool_entity,
)
manager = ToolParameterConfigurationManager(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
tool_runtime=tool_runtime,
provider_name=agent_tool_entity.provider_id,
provider_type=agent_tool_entity.provider_type,
@ -121,7 +124,7 @@ class ModelConfigResource(Resource):
# encrypt agent tool parameters if it's secret-input
agent_mode = new_app_model_config.agent_mode_dict
for tool in agent_mode.get("tools") or []:
agent_tool_entity = AgentToolEntity.model_validate(tool)
agent_tool_entity = AgentToolEntity(**tool)
# get tool
key = f"{agent_tool_entity.provider_id}.{agent_tool_entity.provider_type}.{agent_tool_entity.tool_name}"
@ -130,7 +133,7 @@ class ModelConfigResource(Resource):
else:
try:
tool_runtime = ToolManager.get_agent_tool_runtime(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
app_id=app_model.id,
agent_tool=agent_tool_entity,
)
@ -138,7 +141,7 @@ class ModelConfigResource(Resource):
continue
manager = ToolParameterConfigurationManager(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
tool_runtime=tool_runtime,
provider_name=agent_tool_entity.provider_id,
provider_type=agent_tool_entity.provider_type,
@ -169,8 +172,6 @@ class ModelConfigResource(Resource):
db.session.flush()
app_model.app_model_config_id = new_app_model_config.id
app_model.updated_by = current_user.id
app_model.updated_at = naive_utc_now()
db.session.commit()
app_model_config_was_updated.send(app_model, app_model_config=new_app_model_config)

View File

@ -30,7 +30,8 @@ class TraceAppConfigApi(Resource):
@login_required
@account_initialization_required
def get(self, app_id):
parser = reqparse.RequestParser().add_argument("tracing_provider", type=str, required=True, location="args")
parser = reqparse.RequestParser()
parser.add_argument("tracing_provider", type=str, required=True, location="args")
args = parser.parse_args()
try:
@ -62,11 +63,9 @@ class TraceAppConfigApi(Resource):
@account_initialization_required
def post(self, app_id):
"""Create a new trace app configuration"""
parser = (
reqparse.RequestParser()
.add_argument("tracing_provider", type=str, required=True, location="json")
.add_argument("tracing_config", type=dict, required=True, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("tracing_provider", type=str, required=True, location="json")
parser.add_argument("tracing_config", type=dict, required=True, location="json")
args = parser.parse_args()
try:
@ -100,11 +99,9 @@ class TraceAppConfigApi(Resource):
@account_initialization_required
def patch(self, app_id):
"""Update an existing trace app configuration"""
parser = (
reqparse.RequestParser()
.add_argument("tracing_provider", type=str, required=True, location="json")
.add_argument("tracing_config", type=dict, required=True, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("tracing_provider", type=str, required=True, location="json")
parser.add_argument("tracing_config", type=dict, required=True, location="json")
args = parser.parse_args()
try:
@ -132,7 +129,8 @@ class TraceAppConfigApi(Resource):
@account_initialization_required
def delete(self, app_id):
"""Delete an existing trace app configuration"""
parser = reqparse.RequestParser().add_argument("tracing_provider", type=str, required=True, location="args")
parser = reqparse.RequestParser()
parser.add_argument("tracing_provider", type=str, required=True, location="args")
args = parser.parse_args()
try:

View File

@ -1,3 +1,4 @@
from flask_login import current_user
from flask_restx import Resource, fields, marshal_with, reqparse
from werkzeug.exceptions import Forbidden, NotFound
@ -8,36 +9,30 @@ from controllers.console.wraps import account_initialization_required, setup_req
from extensions.ext_database import db
from fields.app_fields import app_site_fields
from libs.datetime_utils import naive_utc_now
from libs.login import current_account_with_tenant, login_required
from models import Site
from libs.login import login_required
from models import Account, Site
def parse_app_site_args():
parser = (
reqparse.RequestParser()
.add_argument("title", type=str, required=False, location="json")
.add_argument("icon_type", type=str, required=False, location="json")
.add_argument("icon", type=str, required=False, location="json")
.add_argument("icon_background", type=str, required=False, location="json")
.add_argument("description", type=str, required=False, location="json")
.add_argument("default_language", type=supported_language, required=False, location="json")
.add_argument("chat_color_theme", type=str, required=False, location="json")
.add_argument("chat_color_theme_inverted", type=bool, required=False, location="json")
.add_argument("customize_domain", type=str, required=False, location="json")
.add_argument("copyright", type=str, required=False, location="json")
.add_argument("privacy_policy", type=str, required=False, location="json")
.add_argument("custom_disclaimer", type=str, required=False, location="json")
.add_argument(
"customize_token_strategy",
type=str,
choices=["must", "allow", "not_allow"],
required=False,
location="json",
)
.add_argument("prompt_public", type=bool, required=False, location="json")
.add_argument("show_workflow_steps", type=bool, required=False, location="json")
.add_argument("use_icon_as_answer_icon", type=bool, required=False, location="json")
parser = reqparse.RequestParser()
parser.add_argument("title", type=str, required=False, location="json")
parser.add_argument("icon_type", type=str, required=False, location="json")
parser.add_argument("icon", type=str, required=False, location="json")
parser.add_argument("icon_background", type=str, required=False, location="json")
parser.add_argument("description", type=str, required=False, location="json")
parser.add_argument("default_language", type=supported_language, required=False, location="json")
parser.add_argument("chat_color_theme", type=str, required=False, location="json")
parser.add_argument("chat_color_theme_inverted", type=bool, required=False, location="json")
parser.add_argument("customize_domain", type=str, required=False, location="json")
parser.add_argument("copyright", type=str, required=False, location="json")
parser.add_argument("privacy_policy", type=str, required=False, location="json")
parser.add_argument("custom_disclaimer", type=str, required=False, location="json")
parser.add_argument(
"customize_token_strategy", type=str, choices=["must", "allow", "not_allow"], required=False, location="json"
)
parser.add_argument("prompt_public", type=bool, required=False, location="json")
parser.add_argument("show_workflow_steps", type=bool, required=False, location="json")
parser.add_argument("use_icon_as_answer_icon", type=bool, required=False, location="json")
return parser.parse_args()
@ -81,10 +76,9 @@ class AppSite(Resource):
@marshal_with(app_site_fields)
def post(self, app_model):
args = parse_app_site_args()
current_user, _ = current_account_with_tenant()
# The role of the current user in the ta table must be editor, admin, or owner
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
site = db.session.query(Site).where(Site.app_id == app_model.id).first()
@ -113,6 +107,8 @@ class AppSite(Resource):
if value is not None:
setattr(site, attr_name, value)
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
site.updated_by = current_user.id
site.updated_at = naive_utc_now()
db.session.commit()
@ -135,8 +131,6 @@ class AppSiteAccessTokenReset(Resource):
@marshal_with(app_site_fields)
def post(self, app_model):
# The role of the current user in the ta table must be admin or owner
current_user, _ = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
@ -146,6 +140,8 @@ class AppSiteAccessTokenReset(Resource):
raise NotFound
site.code = Site.generate_code(16)
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
site.updated_by = current_user.id
site.updated_at = naive_utc_now()
db.session.commit()

View File

@ -1,7 +1,10 @@
from datetime import datetime
from decimal import Decimal
import pytz
import sqlalchemy as sa
from flask import abort, jsonify
from flask import jsonify
from flask_login import current_user
from flask_restx import Resource, fields, reqparse
from controllers.console import api, console_ns
@ -9,9 +12,8 @@ from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from core.app.entities.app_invoke_entities import InvokeFrom
from extensions.ext_database import db
from libs.datetime_utils import parse_time_range
from libs.helper import DatetimeString
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from models import AppMode, Message
@ -35,13 +37,11 @@ class DailyMessageStatistic(Resource):
@login_required
@account_initialization_required
def get(self, app_model):
account, _ = current_account_with_tenant()
account = current_user
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
sql_query = """SELECT
@ -50,21 +50,29 @@ class DailyMessageStatistic(Resource):
FROM
messages
WHERE
app_id = :app_id
AND invoke_from != :invoke_from"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER}
assert account.timezone is not None
app_id = :app_id"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
try:
start_datetime_utc, end_datetime_utc = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
if start_datetime_utc:
sql_query += " AND created_at >= :start"
arg_dict["start"] = start_datetime_utc
if end_datetime_utc:
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND created_at < :end"
arg_dict["end"] = end_datetime_utc
@ -100,20 +108,15 @@ class DailyConversationStatistic(Resource):
@login_required
@account_initialization_required
def get(self, app_model):
account, _ = current_account_with_tenant()
account = current_user
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
assert account.timezone is not None
try:
start_datetime_utc, end_datetime_utc = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
stmt = (
sa.select(
@ -123,13 +126,21 @@ class DailyConversationStatistic(Resource):
sa.func.count(sa.distinct(Message.conversation_id)).label("conversation_count"),
)
.select_from(Message)
.where(Message.app_id == app_model.id, Message.invoke_from != InvokeFrom.DEBUGGER)
.where(Message.app_id == app_model.id, Message.invoke_from != InvokeFrom.DEBUGGER.value)
)
if start_datetime_utc:
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
stmt = stmt.where(Message.created_at >= start_datetime_utc)
if end_datetime_utc:
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
stmt = stmt.where(Message.created_at < end_datetime_utc)
stmt = stmt.group_by("date").order_by("date")
@ -163,13 +174,11 @@ class DailyTerminalsStatistic(Resource):
@login_required
@account_initialization_required
def get(self, app_model):
account, _ = current_account_with_tenant()
account = current_user
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
sql_query = """SELECT
@ -178,21 +187,29 @@ class DailyTerminalsStatistic(Resource):
FROM
messages
WHERE
app_id = :app_id
AND invoke_from != :invoke_from"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER}
assert account.timezone is not None
app_id = :app_id"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
try:
start_datetime_utc, end_datetime_utc = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
if start_datetime_utc:
sql_query += " AND created_at >= :start"
arg_dict["start"] = start_datetime_utc
if end_datetime_utc:
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND created_at < :end"
arg_dict["end"] = end_datetime_utc
@ -228,13 +245,11 @@ class DailyTokenCostStatistic(Resource):
@login_required
@account_initialization_required
def get(self, app_model):
account, _ = current_account_with_tenant()
account = current_user
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
sql_query = """SELECT
@ -244,21 +259,29 @@ class DailyTokenCostStatistic(Resource):
FROM
messages
WHERE
app_id = :app_id
AND invoke_from != :invoke_from"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER}
assert account.timezone is not None
app_id = :app_id"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
try:
start_datetime_utc, end_datetime_utc = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
if start_datetime_utc:
sql_query += " AND created_at >= :start"
arg_dict["start"] = start_datetime_utc
if end_datetime_utc:
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND created_at < :end"
arg_dict["end"] = end_datetime_utc
@ -296,13 +319,11 @@ class AverageSessionInteractionStatistic(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
def get(self, app_model):
account, _ = current_account_with_tenant()
account = current_user
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
sql_query = """SELECT
@ -319,21 +340,29 @@ FROM
messages m
ON c.id = m.conversation_id
WHERE
c.app_id = :app_id
AND m.invoke_from != :invoke_from"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER}
assert account.timezone is not None
c.app_id = :app_id"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
try:
start_datetime_utc, end_datetime_utc = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
if start_datetime_utc:
sql_query += " AND c.created_at >= :start"
arg_dict["start"] = start_datetime_utc
if end_datetime_utc:
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND c.created_at < :end"
arg_dict["end"] = end_datetime_utc
@ -380,13 +409,11 @@ class UserSatisfactionRateStatistic(Resource):
@login_required
@account_initialization_required
def get(self, app_model):
account, _ = current_account_with_tenant()
account = current_user
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
sql_query = """SELECT
@ -399,21 +426,29 @@ LEFT JOIN
message_feedbacks mf
ON mf.message_id=m.id AND mf.rating='like'
WHERE
m.app_id = :app_id
AND m.invoke_from != :invoke_from"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER}
assert account.timezone is not None
m.app_id = :app_id"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
try:
start_datetime_utc, end_datetime_utc = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
if start_datetime_utc:
sql_query += " AND m.created_at >= :start"
arg_dict["start"] = start_datetime_utc
if end_datetime_utc:
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND m.created_at < :end"
arg_dict["end"] = end_datetime_utc
@ -454,13 +489,11 @@ class AverageResponseTimeStatistic(Resource):
@account_initialization_required
@get_app_model(mode=AppMode.COMPLETION)
def get(self, app_model):
account, _ = current_account_with_tenant()
account = current_user
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
sql_query = """SELECT
@ -469,21 +502,29 @@ class AverageResponseTimeStatistic(Resource):
FROM
messages
WHERE
app_id = :app_id
AND invoke_from != :invoke_from"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER}
assert account.timezone is not None
app_id = :app_id"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
try:
start_datetime_utc, end_datetime_utc = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
if start_datetime_utc:
sql_query += " AND created_at >= :start"
arg_dict["start"] = start_datetime_utc
if end_datetime_utc:
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND created_at < :end"
arg_dict["end"] = end_datetime_utc
@ -519,13 +560,11 @@ class TokensPerSecondStatistic(Resource):
@login_required
@account_initialization_required
def get(self, app_model):
account, _ = current_account_with_tenant()
account = current_user
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
sql_query = """SELECT
@ -537,21 +576,29 @@ class TokensPerSecondStatistic(Resource):
FROM
messages
WHERE
app_id = :app_id
AND invoke_from != :invoke_from"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER}
assert account.timezone is not None
app_id = :app_id"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
try:
start_datetime_utc, end_datetime_utc = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
if start_datetime_utc:
sql_query += " AND created_at >= :start"
arg_dict["start"] = start_datetime_utc
if end_datetime_utc:
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND created_at < :end"
arg_dict["end"] = end_datetime_utc

View File

@ -9,10 +9,11 @@ from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from configs import dify_config
from controllers.console import api, console_ns
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
from core.app.app_config.features.file_upload.manager import FileUploadConfigManager
from core.app.apps.base_app_queue_manager import AppQueueManager
@ -25,10 +26,10 @@ from factories import file_factory, variable_factory
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
from fields.workflow_run_fields import workflow_run_node_execution_fields
from libs import helper
from libs.datetime_utils import naive_utc_now
from libs.helper import TimestampField, uuid_value
from libs.login import current_account_with_tenant, login_required
from libs.login import current_user, login_required
from models import App
from models.account import Account
from models.model import AppMode
from models.workflow import Workflow
from services.app_generate_service import AppGenerateService
@ -69,11 +70,15 @@ class DraftWorkflowApi(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@marshal_with(workflow_fields)
@edit_permission_required
def get(self, app_model: App):
"""
Get draft workflow
"""
# The role of the current user in the ta table must be admin, owner, or editor
assert isinstance(current_user, Account)
if not current_user.has_edit_permission:
raise Forbidden()
# fetch draft workflow by app_model
workflow_service = WorkflowService()
workflow = workflow_service.get_draft_workflow(app_model=app_model)
@ -102,38 +107,27 @@ class DraftWorkflowApi(Resource):
},
)
)
@api.response(
200,
"Draft workflow synced successfully",
api.model(
"SyncDraftWorkflowResponse",
{
"result": fields.String,
"hash": fields.String,
"updated_at": fields.String,
},
),
)
@api.response(200, "Draft workflow synced successfully", workflow_fields)
@api.response(400, "Invalid workflow configuration")
@api.response(403, "Permission denied")
@edit_permission_required
def post(self, app_model: App):
"""
Sync draft workflow
"""
current_user, _ = current_account_with_tenant()
# The role of the current user in the ta table must be admin, owner, or editor
assert isinstance(current_user, Account)
if not current_user.has_edit_permission:
raise Forbidden()
content_type = request.headers.get("Content-Type", "")
if "application/json" in content_type:
parser = (
reqparse.RequestParser()
.add_argument("graph", type=dict, required=True, nullable=False, location="json")
.add_argument("features", type=dict, required=True, nullable=False, location="json")
.add_argument("hash", type=str, required=False, location="json")
.add_argument("environment_variables", type=list, required=True, location="json")
.add_argument("conversation_variables", type=list, required=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("graph", type=dict, required=True, nullable=False, location="json")
parser.add_argument("features", type=dict, required=True, nullable=False, location="json")
parser.add_argument("hash", type=str, required=False, location="json")
parser.add_argument("environment_variables", type=list, required=True, location="json")
parser.add_argument("conversation_variables", type=list, required=False, location="json")
args = parser.parse_args()
elif "text/plain" in content_type:
try:
@ -155,6 +149,10 @@ class DraftWorkflowApi(Resource):
return {"message": "Invalid JSON data"}, 400
else:
abort(415)
if not isinstance(current_user, Account):
raise Forbidden()
workflow_service = WorkflowService()
try:
@ -208,21 +206,24 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT])
@edit_permission_required
def post(self, app_model: App):
"""
Run draft workflow
"""
current_user, _ = current_account_with_tenant()
# The role of the current user in the ta table must be admin, owner, or editor
assert isinstance(current_user, Account)
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, location="json")
.add_argument("query", type=str, required=True, location="json", default="")
.add_argument("files", type=list, location="json")
.add_argument("conversation_id", type=uuid_value, location="json")
.add_argument("parent_message_id", type=uuid_value, required=False, location="json")
)
if not isinstance(current_user, Account):
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, location="json")
parser.add_argument("query", type=str, required=True, location="json", default="")
parser.add_argument("files", type=list, location="json")
parser.add_argument("conversation_id", type=uuid_value, location="json")
parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json")
args = parser.parse_args()
@ -270,13 +271,18 @@ class AdvancedChatDraftRunIterationNodeApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT])
@edit_permission_required
def post(self, app_model: App, node_id: str):
"""
Run draft workflow iteration node
"""
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json")
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, location="json")
args = parser.parse_args()
try:
@ -317,13 +323,18 @@ class WorkflowDraftRunIterationNodeApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.WORKFLOW])
@edit_permission_required
def post(self, app_model: App, node_id: str):
"""
Run draft workflow iteration node
"""
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json")
# The role of the current user in the ta table must be admin, owner, or editor
if not isinstance(current_user, Account):
raise Forbidden()
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, location="json")
args = parser.parse_args()
try:
@ -364,13 +375,19 @@ class AdvancedChatDraftRunLoopNodeApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT])
@edit_permission_required
def post(self, app_model: App, node_id: str):
"""
Run draft workflow loop node
"""
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json")
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, location="json")
args = parser.parse_args()
try:
@ -411,13 +428,19 @@ class WorkflowDraftRunLoopNodeApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.WORKFLOW])
@edit_permission_required
def post(self, app_model: App, node_id: str):
"""
Run draft workflow loop node
"""
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json")
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, location="json")
args = parser.parse_args()
try:
@ -457,17 +480,20 @@ class DraftWorkflowRunApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.WORKFLOW])
@edit_permission_required
def post(self, app_model: App):
"""
Run draft workflow
"""
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("files", type=list, required=False, location="json")
)
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
parser.add_argument("files", type=list, required=False, location="json")
args = parser.parse_args()
external_trace_id = get_external_trace_id(request)
@ -500,11 +526,17 @@ class WorkflowTaskStopApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@edit_permission_required
def post(self, app_model: App, task_id: str):
"""
Stop workflow task
"""
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
raise Forbidden()
# Stop using both mechanisms for backward compatibility
# Legacy stop flag mechanism (without user check)
AppQueueManager.set_stop_flag_no_user_check(task_id)
@ -536,18 +568,21 @@ class DraftWorkflowNodeRunApi(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@marshal_with(workflow_run_node_execution_fields)
@edit_permission_required
def post(self, app_model: App, node_id: str):
"""
Run draft workflow node
"""
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("query", type=str, required=False, location="json", default="")
.add_argument("files", type=list, location="json", default=[])
)
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
parser.add_argument("query", type=str, required=False, location="json", default="")
parser.add_argument("files", type=list, location="json", default=[])
args = parser.parse_args()
user_inputs = args.get("inputs")
@ -587,11 +622,17 @@ class PublishedWorkflowApi(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@marshal_with(workflow_fields)
@edit_permission_required
def get(self, app_model: App):
"""
Get published workflow
"""
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
raise Forbidden()
# fetch published workflow by app_model
workflow_service = WorkflowService()
workflow = workflow_service.get_published_workflow(app_model=app_model)
@ -603,17 +644,19 @@ class PublishedWorkflowApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@edit_permission_required
def post(self, app_model: App):
"""
Publish workflow
"""
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("marked_name", type=str, required=False, default="", location="json")
.add_argument("marked_comment", type=str, required=False, default="", location="json")
)
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("marked_name", type=str, required=False, default="", location="json")
parser.add_argument("marked_comment", type=str, required=False, default="", location="json")
args = parser.parse_args()
# Validate name and comment length
@ -632,12 +675,8 @@ class PublishedWorkflowApi(Resource):
marked_comment=args.marked_comment or "",
)
# Update app_model within the same session to ensure atomicity
app_model_in_session = session.get(App, app_model.id)
if app_model_in_session:
app_model_in_session.workflow_id = workflow.id
app_model_in_session.updated_by = current_user.id
app_model_in_session.updated_at = naive_utc_now()
app_model.workflow_id = workflow.id
db.session.commit() # NOTE: this is necessary for update app_model.workflow_id
workflow_created_at = TimestampField().format(workflow.created_at)
@ -659,11 +698,17 @@ class DefaultBlockConfigsApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@edit_permission_required
def get(self, app_model: App):
"""
Get default block config
"""
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
raise Forbidden()
# Get default block configs
workflow_service = WorkflowService()
return workflow_service.get_default_block_configs()
@ -680,12 +725,18 @@ class DefaultBlockConfigApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@edit_permission_required
def get(self, app_model: App, block_type: str):
"""
Get default block config
"""
parser = reqparse.RequestParser().add_argument("q", type=str, location="args")
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("q", type=str, location="args")
args = parser.parse_args()
q = args.get("q")
@ -714,23 +765,24 @@ class ConvertToWorkflowApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.COMPLETION])
@edit_permission_required
def post(self, app_model: App):
"""
Convert basic mode of chatbot app to workflow mode
Convert expert mode of chatbot app to workflow mode
Convert Completion App to Workflow App
"""
current_user, _ = current_account_with_tenant()
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
raise Forbidden()
if request.data:
parser = (
reqparse.RequestParser()
.add_argument("name", type=str, required=False, nullable=True, location="json")
.add_argument("icon_type", type=str, required=False, nullable=True, location="json")
.add_argument("icon", type=str, required=False, nullable=True, location="json")
.add_argument("icon_background", type=str, required=False, nullable=True, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=False, nullable=True, location="json")
parser.add_argument("icon_type", type=str, required=False, nullable=True, location="json")
parser.add_argument("icon", type=str, required=False, nullable=True, location="json")
parser.add_argument("icon_background", type=str, required=False, nullable=True, location="json")
args = parser.parse_args()
else:
args = {}
@ -745,6 +797,24 @@ class ConvertToWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/config")
class WorkflowConfigApi(Resource):
"""Resource for workflow configuration."""
@api.doc("get_workflow_config")
@api.doc(description="Get workflow configuration")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Workflow configuration retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
def get(self, app_model: App):
return {
"parallel_depth_limit": dify_config.WORKFLOW_PARALLEL_DEPTH_LIMIT,
}
@console_ns.route("/apps/<uuid:app_id>/workflows")
class PublishedAllWorkflowApi(Resource):
@api.doc("get_all_published_workflows")
@ -756,20 +826,21 @@ class PublishedAllWorkflowApi(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@marshal_with(workflow_pagination_fields)
@edit_permission_required
def get(self, app_model: App):
"""
Get published workflows
"""
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
.add_argument("user_id", type=str, required=False, location="args")
.add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args")
)
if not isinstance(current_user, Account):
raise Forbidden()
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
parser.add_argument("user_id", type=str, required=False, location="args")
parser.add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args")
args = parser.parse_args()
page = int(args.get("page", 1))
limit = int(args.get("limit", 10))
@ -822,17 +893,19 @@ class WorkflowByIdApi(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@marshal_with(workflow_fields)
@edit_permission_required
def patch(self, app_model: App, workflow_id: str):
"""
Update workflow attributes
"""
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("marked_name", type=str, required=False, location="json")
.add_argument("marked_comment", type=str, required=False, location="json")
)
if not isinstance(current_user, Account):
raise Forbidden()
# Check permission
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("marked_name", type=str, required=False, location="json")
parser.add_argument("marked_comment", type=str, required=False, location="json")
args = parser.parse_args()
# Validate name and comment length
@ -875,11 +948,16 @@ class WorkflowByIdApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@edit_permission_required
def delete(self, app_model: App, workflow_id: str):
"""
Delete workflow
"""
if not isinstance(current_user, Account):
raise Forbidden()
# Check permission
if not current_user.has_edit_permission:
raise Forbidden()
workflow_service = WorkflowService()
# Create a session and manage the transaction

View File

@ -42,35 +42,33 @@ class WorkflowAppLogApi(Resource):
"""
Get workflow app logs
"""
parser = (
reqparse.RequestParser()
.add_argument("keyword", type=str, location="args")
.add_argument(
"status", type=str, choices=["succeeded", "failed", "stopped", "partial-succeeded"], location="args"
)
.add_argument(
"created_at__before", type=str, location="args", help="Filter logs created before this timestamp"
)
.add_argument(
"created_at__after", type=str, location="args", help="Filter logs created after this timestamp"
)
.add_argument(
"created_by_end_user_session_id",
type=str,
location="args",
required=False,
default=None,
)
.add_argument(
"created_by_account",
type=str,
location="args",
required=False,
default=None,
)
.add_argument("page", type=int_range(1, 99999), default=1, location="args")
.add_argument("limit", type=int_range(1, 100), default=20, location="args")
parser = reqparse.RequestParser()
parser.add_argument("keyword", type=str, location="args")
parser.add_argument(
"status", type=str, choices=["succeeded", "failed", "stopped", "partial-succeeded"], location="args"
)
parser.add_argument(
"created_at__before", type=str, location="args", help="Filter logs created before this timestamp"
)
parser.add_argument(
"created_at__after", type=str, location="args", help="Filter logs created after this timestamp"
)
parser.add_argument(
"created_by_end_user_session_id",
type=str,
location="args",
required=False,
default=None,
)
parser.add_argument(
"created_by_account",
type=str,
location="args",
required=False,
default=None,
)
parser.add_argument("page", type=int_range(1, 99999), default=1, location="args")
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
args = parser.parse_args()
args.status = WorkflowExecutionStatus(args.status) if args.status else None

View File

@ -22,7 +22,8 @@ from extensions.ext_database import db
from factories.file_factory import build_from_mapping, build_from_mappings
from factories.variable_factory import build_segment_with_type
from libs.login import current_user, login_required
from models import Account, App, AppMode
from models import App, AppMode
from models.account import Account
from models.workflow import WorkflowDraftVariable
from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService
from services.workflow_service import WorkflowService
@ -57,18 +58,16 @@ def _serialize_var_value(variable: WorkflowDraftVariable):
def _create_pagination_parser():
parser = (
reqparse.RequestParser()
.add_argument(
"page",
type=inputs.int_range(1, 100_000),
required=False,
default=1,
location="args",
help="the page of data requested",
)
.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
parser = reqparse.RequestParser()
parser.add_argument(
"page",
type=inputs.int_range(1, 100_000),
required=False,
default=1,
location="args",
help="the page of data requested",
)
parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
return parser
@ -321,11 +320,10 @@ class VariableApi(Resource):
# "upload_file_id": "1602650a-4fe4-423c-85a2-af76c083e3c4"
# }
parser = (
reqparse.RequestParser()
.add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json")
.add_argument(self._PATCH_VALUE_FIELD, type=lambda x: x, required=False, nullable=True, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json")
# Parse 'value' field as-is to maintain its original data structure
parser.add_argument(self._PATCH_VALUE_FIELD, type=lambda x: x, required=False, nullable=True, location="json")
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),

View File

@ -1,5 +1,6 @@
from typing import cast
from flask_login import current_user
from flask_restx import Resource, marshal_with, reqparse
from flask_restx.inputs import int_range
@ -8,81 +9,15 @@ from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from fields.workflow_run_fields import (
advanced_chat_workflow_run_pagination_fields,
workflow_run_count_fields,
workflow_run_detail_fields,
workflow_run_node_execution_list_fields,
workflow_run_pagination_fields,
)
from libs.custom_inputs import time_duration
from libs.helper import uuid_value
from libs.login import current_user, login_required
from models import Account, App, AppMode, EndUser, WorkflowRunTriggeredFrom
from libs.login import login_required
from models import Account, App, AppMode, EndUser
from services.workflow_run_service import WorkflowRunService
# Workflow run status choices for filtering
WORKFLOW_RUN_STATUS_CHOICES = ["running", "succeeded", "failed", "stopped", "partial-succeeded"]
def _parse_workflow_run_list_args():
"""
Parse common arguments for workflow run list endpoints.
Returns:
Parsed arguments containing last_id, limit, status, and triggered_from filters
"""
parser = reqparse.RequestParser()
parser.add_argument("last_id", type=uuid_value, location="args")
parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
parser.add_argument(
"status",
type=str,
choices=WORKFLOW_RUN_STATUS_CHOICES,
location="args",
required=False,
)
parser.add_argument(
"triggered_from",
type=str,
choices=["debugging", "app-run"],
location="args",
required=False,
help="Filter by trigger source: debugging or app-run",
)
return parser.parse_args()
def _parse_workflow_run_count_args():
"""
Parse common arguments for workflow run count endpoints.
Returns:
Parsed arguments containing status, time_range, and triggered_from filters
"""
parser = reqparse.RequestParser()
parser.add_argument(
"status",
type=str,
choices=WORKFLOW_RUN_STATUS_CHOICES,
location="args",
required=False,
)
parser.add_argument(
"time_range",
type=time_duration,
location="args",
required=False,
help="Time range filter (e.g., 7d, 4h, 30m, 30s)",
)
parser.add_argument(
"triggered_from",
type=str,
choices=["debugging", "app-run"],
location="args",
required=False,
help="Filter by trigger source: debugging or app-run",
)
return parser.parse_args()
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs")
class AdvancedChatAppWorkflowRunListApi(Resource):
@ -90,8 +25,6 @@ class AdvancedChatAppWorkflowRunListApi(Resource):
@api.doc(description="Get advanced chat workflow run list")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
@api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"})
@api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"})
@api.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_fields)
@setup_required
@login_required
@ -102,64 +35,13 @@ class AdvancedChatAppWorkflowRunListApi(Resource):
"""
Get advanced chat app workflow run list
"""
args = _parse_workflow_run_list_args()
# Default to DEBUGGING if not specified
triggered_from = (
WorkflowRunTriggeredFrom(args.get("triggered_from"))
if args.get("triggered_from")
else WorkflowRunTriggeredFrom.DEBUGGING
)
parser = reqparse.RequestParser()
parser.add_argument("last_id", type=uuid_value, location="args")
parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
args = parser.parse_args()
workflow_run_service = WorkflowRunService()
result = workflow_run_service.get_paginate_advanced_chat_workflow_runs(
app_model=app_model, args=args, triggered_from=triggered_from
)
return result
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs/count")
class AdvancedChatAppWorkflowRunCountApi(Resource):
@api.doc("get_advanced_chat_workflow_runs_count")
@api.doc(description="Get advanced chat workflow runs count statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"})
@api.doc(
params={
"time_range": (
"Filter by time range (optional): e.g., 7d (7 days), 4h (4 hours), "
"30m (30 minutes), 30s (30 seconds). Filters by created_at field."
)
}
)
@api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"})
@api.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields)
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT])
@marshal_with(workflow_run_count_fields)
def get(self, app_model: App):
"""
Get advanced chat workflow runs count statistics
"""
args = _parse_workflow_run_count_args()
# Default to DEBUGGING if not specified
triggered_from = (
WorkflowRunTriggeredFrom(args.get("triggered_from"))
if args.get("triggered_from")
else WorkflowRunTriggeredFrom.DEBUGGING
)
workflow_run_service = WorkflowRunService()
result = workflow_run_service.get_workflow_runs_count(
app_model=app_model,
status=args.get("status"),
time_range=args.get("time_range"),
triggered_from=triggered_from,
)
result = workflow_run_service.get_paginate_advanced_chat_workflow_runs(app_model=app_model, args=args)
return result
@ -170,8 +52,6 @@ class WorkflowRunListApi(Resource):
@api.doc(description="Get workflow run list")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
@api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"})
@api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"})
@api.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_fields)
@setup_required
@login_required
@ -182,64 +62,13 @@ class WorkflowRunListApi(Resource):
"""
Get workflow run list
"""
args = _parse_workflow_run_list_args()
# Default to DEBUGGING for workflow if not specified (backward compatibility)
triggered_from = (
WorkflowRunTriggeredFrom(args.get("triggered_from"))
if args.get("triggered_from")
else WorkflowRunTriggeredFrom.DEBUGGING
)
parser = reqparse.RequestParser()
parser.add_argument("last_id", type=uuid_value, location="args")
parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
args = parser.parse_args()
workflow_run_service = WorkflowRunService()
result = workflow_run_service.get_paginate_workflow_runs(
app_model=app_model, args=args, triggered_from=triggered_from
)
return result
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/count")
class WorkflowRunCountApi(Resource):
@api.doc("get_workflow_runs_count")
@api.doc(description="Get workflow runs count statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"})
@api.doc(
params={
"time_range": (
"Filter by time range (optional): e.g., 7d (7 days), 4h (4 hours), "
"30m (30 minutes), 30s (30 seconds). Filters by created_at field."
)
}
)
@api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"})
@api.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields)
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@marshal_with(workflow_run_count_fields)
def get(self, app_model: App):
"""
Get workflow runs count statistics
"""
args = _parse_workflow_run_count_args()
# Default to DEBUGGING for workflow if not specified (backward compatibility)
triggered_from = (
WorkflowRunTriggeredFrom(args.get("triggered_from"))
if args.get("triggered_from")
else WorkflowRunTriggeredFrom.DEBUGGING
)
workflow_run_service = WorkflowRunService()
result = workflow_run_service.get_workflow_runs_count(
app_model=app_model,
status=args.get("status"),
time_range=args.get("time_range"),
triggered_from=triggered_from,
)
result = workflow_run_service.get_paginate_workflow_runs(app_model=app_model, args=args)
return result

View File

@ -1,26 +1,24 @@
from flask import abort, jsonify
from datetime import datetime
from decimal import Decimal
import pytz
import sqlalchemy as sa
from flask import jsonify
from flask_login import current_user
from flask_restx import Resource, reqparse
from sqlalchemy.orm import sessionmaker
from controllers.console import api, console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
from libs.datetime_utils import parse_time_range
from libs.helper import DatetimeString
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from models.enums import WorkflowRunTriggeredFrom
from models.model import AppMode
from repositories.factory import DifyAPIRepositoryFactory
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/daily-conversations")
class WorkflowDailyRunsStatistic(Resource):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker)
@api.doc("get_workflow_daily_runs_statistic")
@api.doc(description="Get workflow daily runs statistics")
@api.doc(params={"app_id": "Application ID"})
@ -31,41 +29,64 @@ class WorkflowDailyRunsStatistic(Resource):
@login_required
@account_initialization_required
def get(self, app_model):
account, _ = current_account_with_tenant()
account = current_user
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
assert account.timezone is not None
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
COUNT(id) AS runs
FROM
workflow_runs
WHERE
app_id = :app_id
AND triggered_from = :triggered_from"""
arg_dict = {
"tz": account.timezone,
"app_id": app_model.id,
"triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value,
}
try:
start_date, end_date = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
response_data = self._workflow_run_repo.get_daily_runs_statistics(
tenant_id=app_model.tenant_id,
app_id=app_model.id,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
start_date=start_date,
end_date=end_date,
timezone=account.timezone,
)
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND created_at >= :start"
arg_dict["start"] = start_datetime_utc
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND created_at < :end"
arg_dict["end"] = end_datetime_utc
sql_query += " GROUP BY date ORDER BY date"
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append({"date": str(i.date), "runs": i.runs})
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/daily-terminals")
class WorkflowDailyTerminalsStatistic(Resource):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker)
@api.doc("get_workflow_daily_terminals_statistic")
@api.doc(description="Get workflow daily terminals statistics")
@api.doc(params={"app_id": "Application ID"})
@ -76,41 +97,64 @@ class WorkflowDailyTerminalsStatistic(Resource):
@login_required
@account_initialization_required
def get(self, app_model):
account, _ = current_account_with_tenant()
account = current_user
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
assert account.timezone is not None
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
COUNT(DISTINCT workflow_runs.created_by) AS terminal_count
FROM
workflow_runs
WHERE
app_id = :app_id
AND triggered_from = :triggered_from"""
arg_dict = {
"tz": account.timezone,
"app_id": app_model.id,
"triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value,
}
try:
start_date, end_date = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
response_data = self._workflow_run_repo.get_daily_terminals_statistics(
tenant_id=app_model.tenant_id,
app_id=app_model.id,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
start_date=start_date,
end_date=end_date,
timezone=account.timezone,
)
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND created_at >= :start"
arg_dict["start"] = start_datetime_utc
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND created_at < :end"
arg_dict["end"] = end_datetime_utc
sql_query += " GROUP BY date ORDER BY date"
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append({"date": str(i.date), "terminal_count": i.terminal_count})
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/token-costs")
class WorkflowDailyTokenCostStatistic(Resource):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker)
@api.doc("get_workflow_daily_token_cost_statistic")
@api.doc(description="Get workflow daily token cost statistics")
@api.doc(params={"app_id": "Application ID"})
@ -121,41 +165,69 @@ class WorkflowDailyTokenCostStatistic(Resource):
@login_required
@account_initialization_required
def get(self, app_model):
account, _ = current_account_with_tenant()
account = current_user
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
assert account.timezone is not None
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
SUM(workflow_runs.total_tokens) AS token_count
FROM
workflow_runs
WHERE
app_id = :app_id
AND triggered_from = :triggered_from"""
arg_dict = {
"tz": account.timezone,
"app_id": app_model.id,
"triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value,
}
try:
start_date, end_date = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
response_data = self._workflow_run_repo.get_daily_token_cost_statistics(
tenant_id=app_model.tenant_id,
app_id=app_model.id,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
start_date=start_date,
end_date=end_date,
timezone=account.timezone,
)
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND created_at >= :start"
arg_dict["start"] = start_datetime_utc
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND created_at < :end"
arg_dict["end"] = end_datetime_utc
sql_query += " GROUP BY date ORDER BY date"
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append(
{
"date": str(i.date),
"token_count": i.token_count,
}
)
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/average-app-interactions")
class WorkflowAverageAppInteractionStatistic(Resource):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker)
@api.doc("get_workflow_average_app_interaction_statistic")
@api.doc(description="Get workflow average app interaction statistics")
@api.doc(params={"app_id": "Application ID"})
@ -166,29 +238,74 @@ class WorkflowAverageAppInteractionStatistic(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.WORKFLOW])
def get(self, app_model):
account, _ = current_account_with_tenant()
account = current_user
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
assert account.timezone is not None
sql_query = """SELECT
AVG(sub.interactions) AS interactions,
sub.date
FROM
(
SELECT
DATE(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
c.created_by,
COUNT(c.id) AS interactions
FROM
workflow_runs c
WHERE
c.app_id = :app_id
AND c.triggered_from = :triggered_from
{{start}}
{{end}}
GROUP BY
date, c.created_by
) sub
GROUP BY
sub.date"""
arg_dict = {
"tz": account.timezone,
"app_id": app_model.id,
"triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value,
}
try:
start_date, end_date = parse_time_range(args["start"], args["end"], account.timezone)
except ValueError as e:
abort(400, description=str(e))
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
response_data = self._workflow_run_repo.get_average_app_interaction_statistics(
tenant_id=app_model.tenant_id,
app_id=app_model.id,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
start_date=start_date,
end_date=end_date,
timezone=account.timezone,
)
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
sql_query = sql_query.replace("{{start}}", " AND c.created_at >= :start")
arg_dict["start"] = start_datetime_utc
else:
sql_query = sql_query.replace("{{start}}", "")
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
sql_query = sql_query.replace("{{end}}", " AND c.created_at < :end")
arg_dict["end"] = end_datetime_utc
else:
sql_query = sql_query.replace("{{end}}", "")
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append(
{"date": str(i.date), "interactions": float(i.interactions.quantize(Decimal("0.01")))}
)
return jsonify({"data": response_data})

View File

@ -4,29 +4,28 @@ from typing import ParamSpec, TypeVar, Union
from controllers.console.app.error import AppNotFoundError
from extensions.ext_database import db
from libs.login import current_account_with_tenant
from libs.login import current_user
from models import App, AppMode
from models.account import Account
P = ParamSpec("P")
R = TypeVar("R")
P1 = ParamSpec("P1")
R1 = TypeVar("R1")
def _load_app_model(app_id: str) -> App | None:
_, current_tenant_id = current_account_with_tenant()
assert isinstance(current_user, Account)
app_model = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal")
.where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal")
.first()
)
return app_model
def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None):
def decorator(view_func: Callable[P1, R1]):
def decorator(view_func: Callable[P, R]):
@wraps(view_func)
def decorated_view(*args: P1.args, **kwargs: P1.kwargs):
def decorated_view(*args: P.args, **kwargs: P.kwargs):
if not kwargs.get("app_id"):
raise ValueError("missing app_id in path parameters")

View File

@ -7,14 +7,18 @@ from controllers.console.error import AlreadyActivateError
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from libs.helper import StrLen, email, extract_remote_ip, timezone
from models import AccountStatus
from models.account import AccountStatus
from services.account_service import AccountService, RegisterService
active_check_parser = (
reqparse.RequestParser()
.add_argument("workspace_id", type=str, required=False, nullable=True, location="args", help="Workspace ID")
.add_argument("email", type=email, required=False, nullable=True, location="args", help="Email address")
.add_argument("token", type=str, required=True, nullable=False, location="args", help="Activation token")
active_check_parser = reqparse.RequestParser()
active_check_parser.add_argument(
"workspace_id", type=str, required=False, nullable=True, location="args", help="Workspace ID"
)
active_check_parser.add_argument(
"email", type=email, required=False, nullable=True, location="args", help="Email address"
)
active_check_parser.add_argument(
"token", type=str, required=True, nullable=False, location="args", help="Activation token"
)
@ -56,15 +60,15 @@ class ActivateCheckApi(Resource):
return {"is_valid": False}
active_parser = (
reqparse.RequestParser()
.add_argument("workspace_id", type=str, required=False, nullable=True, location="json")
.add_argument("email", type=email, required=False, nullable=True, location="json")
.add_argument("token", type=str, required=True, nullable=False, location="json")
.add_argument("name", type=StrLen(30), required=True, nullable=False, location="json")
.add_argument("interface_language", type=supported_language, required=True, nullable=False, location="json")
.add_argument("timezone", type=timezone, required=True, nullable=False, location="json")
active_parser = reqparse.RequestParser()
active_parser.add_argument("workspace_id", type=str, required=False, nullable=True, location="json")
active_parser.add_argument("email", type=email, required=False, nullable=True, location="json")
active_parser.add_argument("token", type=str, required=True, nullable=False, location="json")
active_parser.add_argument("name", type=StrLen(30), required=True, nullable=False, location="json")
active_parser.add_argument(
"interface_language", type=supported_language, required=True, nullable=False, location="json"
)
active_parser.add_argument("timezone", type=timezone, required=True, nullable=False, location="json")
@console_ns.route("/activate")
@ -99,7 +103,7 @@ class ActivateApi(Resource):
account.interface_language = args["interface_language"]
account.timezone = args["timezone"]
account.interface_theme = "light"
account.status = AccountStatus.ACTIVE
account.status = AccountStatus.ACTIVE.value
account.initialized_at = naive_utc_now()
db.session.commit()

View File

@ -1,22 +1,21 @@
from flask_login import current_user
from flask_restx import Resource, reqparse
from werkzeug.exceptions import Forbidden
from controllers.console import console_ns
from controllers.console import api
from controllers.console.auth.error import ApiKeyAuthFailedError
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from services.auth.api_key_auth_service import ApiKeyAuthService
from ..wraps import account_initialization_required, setup_required
@console_ns.route("/api-key-auth/data-source")
class ApiKeyAuthDataSource(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self):
_, current_tenant_id = current_account_with_tenant()
data_source_api_key_bindings = ApiKeyAuthService.get_provider_auth_list(current_tenant_id)
data_source_api_key_bindings = ApiKeyAuthService.get_provider_auth_list(current_user.current_tenant_id)
if data_source_api_key_bindings:
return {
"sources": [
@ -34,44 +33,41 @@ class ApiKeyAuthDataSource(Resource):
return {"sources": []}
@console_ns.route("/api-key-auth/data-source/binding")
class ApiKeyAuthDataSourceBinding(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self):
# The role of the current user in the table must be admin or owner
current_user, current_tenant_id = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("category", type=str, required=True, nullable=False, location="json")
.add_argument("provider", type=str, required=True, nullable=False, location="json")
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("category", type=str, required=True, nullable=False, location="json")
parser.add_argument("provider", type=str, required=True, nullable=False, location="json")
parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
args = parser.parse_args()
ApiKeyAuthService.validate_api_key_auth_args(args)
try:
ApiKeyAuthService.create_provider_auth(current_tenant_id, args)
ApiKeyAuthService.create_provider_auth(current_user.current_tenant_id, args)
except Exception as e:
raise ApiKeyAuthFailedError(str(e))
return {"result": "success"}, 200
@console_ns.route("/api-key-auth/data-source/<uuid:binding_id>")
class ApiKeyAuthDataSourceBindingDelete(Resource):
@setup_required
@login_required
@account_initialization_required
def delete(self, binding_id):
# The role of the current user in the table must be admin or owner
current_user, current_tenant_id = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
ApiKeyAuthService.delete_provider_auth(current_tenant_id, binding_id)
ApiKeyAuthService.delete_provider_auth(current_user.current_tenant_id, binding_id)
return {"result": "success"}, 204
api.add_resource(ApiKeyAuthDataSource, "/api-key-auth/data-source")
api.add_resource(ApiKeyAuthDataSourceBinding, "/api-key-auth/data-source/binding")
api.add_resource(ApiKeyAuthDataSourceBindingDelete, "/api-key-auth/data-source/<uuid:binding_id>")

View File

@ -1,13 +1,14 @@
import logging
import httpx
import requests
from flask import current_app, redirect, request
from flask_login import current_user
from flask_restx import Resource, fields
from werkzeug.exceptions import Forbidden
from configs import dify_config
from controllers.console import api, console_ns
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from libs.oauth_data_source import NotionOAuth
from ..wraps import account_initialization_required, setup_required
@ -44,7 +45,6 @@ class OAuthDataSource(Resource):
@api.response(403, "Admin privileges required")
def get(self, provider: str):
# The role of the current user in the table must be admin or owner
current_user, _ = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers()
@ -119,7 +119,7 @@ class OAuthDataSourceBinding(Resource):
return {"error": "Invalid code"}, 400
try:
oauth_provider.get_access_token(code)
except httpx.HTTPStatusError as e:
except requests.HTTPError as e:
logger.exception(
"An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text
)
@ -152,7 +152,7 @@ class OAuthDataSourceSync(Resource):
return {"error": "Invalid provider"}, 400
try:
oauth_provider.sync_data_source(binding_id)
except httpx.HTTPStatusError as e:
except requests.HTTPError as e:
logger.exception(
"An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text
)

View File

@ -5,7 +5,7 @@ from sqlalchemy.orm import Session
from configs import dify_config
from constants.languages import languages
from controllers.console import console_ns
from controllers.console import api
from controllers.console.auth.error import (
EmailAlreadyInUseError,
EmailCodeError,
@ -19,23 +19,20 @@ from controllers.console.wraps import email_password_login_enabled, email_regist
from extensions.ext_database import db
from libs.helper import email, extract_remote_ip
from libs.password import valid_password
from models import Account
from models.account import Account
from services.account_service import AccountService
from services.billing_service import BillingService
from services.errors.account import AccountNotFoundError, AccountRegisterError
@console_ns.route("/email-register/send-email")
class EmailRegisterSendEmailApi(Resource):
@setup_required
@email_password_login_enabled
@email_register_enabled
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("email", type=email, required=True, location="json")
.add_argument("language", type=str, required=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("email", type=email, required=True, location="json")
parser.add_argument("language", type=str, required=False, location="json")
args = parser.parse_args()
ip_address = extract_remote_ip(request)
@ -55,18 +52,15 @@ class EmailRegisterSendEmailApi(Resource):
return {"result": "success", "data": token}
@console_ns.route("/email-register/validity")
class EmailRegisterCheckApi(Resource):
@setup_required
@email_password_login_enabled
@email_register_enabled
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("email", type=str, required=True, location="json")
.add_argument("code", type=str, required=True, location="json")
.add_argument("token", type=str, required=True, nullable=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("email", type=str, required=True, location="json")
parser.add_argument("code", type=str, required=True, location="json")
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
user_email = args["email"]
@ -98,18 +92,15 @@ class EmailRegisterCheckApi(Resource):
return {"is_valid": True, "email": token_data.get("email"), "token": new_token}
@console_ns.route("/email-register")
class EmailRegisterResetApi(Resource):
@setup_required
@email_password_login_enabled
@email_register_enabled
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("token", type=str, required=True, nullable=False, location="json")
.add_argument("new_password", type=valid_password, required=True, nullable=False, location="json")
.add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
parser.add_argument("new_password", type=valid_password, required=True, nullable=False, location="json")
parser.add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json")
args = parser.parse_args()
# Validate passwords match
@ -157,3 +148,8 @@ class EmailRegisterResetApi(Resource):
raise AccountInFreezeError()
return account
api.add_resource(EmailRegisterSendEmailApi, "/email-register/send-email")
api.add_resource(EmailRegisterCheckApi, "/email-register/validity")
api.add_resource(EmailRegisterResetApi, "/email-register")

View File

@ -20,7 +20,7 @@ from events.tenant_event import tenant_was_created
from extensions.ext_database import db
from libs.helper import email, extract_remote_ip
from libs.password import hash_password, valid_password
from models import Account
from models.account import Account
from services.account_service import AccountService, TenantService
from services.feature_service import FeatureService
@ -54,11 +54,9 @@ class ForgotPasswordSendEmailApi(Resource):
@setup_required
@email_password_login_enabled
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("email", type=email, required=True, location="json")
.add_argument("language", type=str, required=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("email", type=email, required=True, location="json")
parser.add_argument("language", type=str, required=False, location="json")
args = parser.parse_args()
ip_address = extract_remote_ip(request)
@ -113,12 +111,10 @@ class ForgotPasswordCheckApi(Resource):
@setup_required
@email_password_login_enabled
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("email", type=str, required=True, location="json")
.add_argument("code", type=str, required=True, location="json")
.add_argument("token", type=str, required=True, nullable=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("email", type=str, required=True, location="json")
parser.add_argument("code", type=str, required=True, location="json")
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
user_email = args["email"]
@ -173,12 +169,10 @@ class ForgotPasswordResetApi(Resource):
@setup_required
@email_password_login_enabled
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("token", type=str, required=True, nullable=False, location="json")
.add_argument("new_password", type=valid_password, required=True, nullable=False, location="json")
.add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
parser.add_argument("new_password", type=valid_password, required=True, nullable=False, location="json")
parser.add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json")
args = parser.parse_args()
# Validate passwords match
@ -227,3 +221,8 @@ class ForgotPasswordResetApi(Resource):
TenantService.create_tenant_member(tenant, account, role="owner")
account.current_tenant = tenant
tenant_was_created.send(tenant)
api.add_resource(ForgotPasswordSendEmailApi, "/forgot-password")
api.add_resource(ForgotPasswordCheckApi, "/forgot-password/validity")
api.add_resource(ForgotPasswordResetApi, "/forgot-password/resets")

View File

@ -1,11 +1,13 @@
from typing import cast
import flask_login
from flask import make_response, request
from flask import request
from flask_restx import Resource, reqparse
import services
from configs import dify_config
from constants.languages import get_valid_language
from controllers.console import console_ns
from constants.languages import languages
from controllers.console import api
from controllers.console.auth.error import (
AuthenticationFailedError,
EmailCodeError,
@ -24,16 +26,7 @@ from controllers.console.error import (
from controllers.console.wraps import email_password_login_enabled, setup_required
from events.tenant_event import tenant_was_created
from libs.helper import email, extract_remote_ip
from libs.login import current_account_with_tenant
from libs.token import (
clear_access_token_from_cookie,
clear_csrf_token_from_cookie,
clear_refresh_token_from_cookie,
extract_refresh_token,
set_access_token_to_cookie,
set_csrf_token_to_cookie,
set_refresh_token_to_cookie,
)
from models.account import Account
from services.account_service import AccountService, RegisterService, TenantService
from services.billing_service import BillingService
from services.errors.account import AccountRegisterError
@ -41,7 +34,6 @@ from services.errors.workspace import WorkSpaceNotAllowedCreateError, Workspaces
from services.feature_service import FeatureService
@console_ns.route("/login")
class LoginApi(Resource):
"""Resource for user login."""
@ -49,13 +41,11 @@ class LoginApi(Resource):
@email_password_login_enabled
def post(self):
"""Authenticate user and login."""
parser = (
reqparse.RequestParser()
.add_argument("email", type=email, required=True, location="json")
.add_argument("password", type=str, required=True, location="json")
.add_argument("remember_me", type=bool, required=False, default=False, location="json")
.add_argument("invite_token", type=str, required=False, default=None, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("email", type=email, required=True, location="json")
parser.add_argument("password", type=str, required=True, location="json")
parser.add_argument("remember_me", type=bool, required=False, default=False, location="json")
parser.add_argument("invite_token", type=str, required=False, default=None, location="json")
args = parser.parse_args()
if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(args["email"]):
@ -98,48 +88,27 @@ class LoginApi(Resource):
token_pair = AccountService.login(account=account, ip_address=extract_remote_ip(request))
AccountService.reset_login_error_rate_limit(args["email"])
# Create response with cookies instead of returning tokens in body
response = make_response({"result": "success"})
set_access_token_to_cookie(request, response, token_pair.access_token)
set_refresh_token_to_cookie(request, response, token_pair.refresh_token)
set_csrf_token_to_cookie(request, response, token_pair.csrf_token)
return response
return {"result": "success", "data": token_pair.model_dump()}
@console_ns.route("/logout")
class LogoutApi(Resource):
@setup_required
def post(self):
current_user, _ = current_account_with_tenant()
account = current_user
def get(self):
account = cast(Account, flask_login.current_user)
if isinstance(account, flask_login.AnonymousUserMixin):
response = make_response({"result": "success"})
else:
AccountService.logout(account=account)
flask_login.logout_user()
response = make_response({"result": "success"})
# Clear cookies on logout
clear_access_token_from_cookie(response)
clear_refresh_token_from_cookie(response)
clear_csrf_token_from_cookie(response)
return response
return {"result": "success"}
AccountService.logout(account=account)
flask_login.logout_user()
return {"result": "success"}
@console_ns.route("/reset-password")
class ResetPasswordSendEmailApi(Resource):
@setup_required
@email_password_login_enabled
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("email", type=email, required=True, location="json")
.add_argument("language", type=str, required=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("email", type=email, required=True, location="json")
parser.add_argument("language", type=str, required=False, location="json")
args = parser.parse_args()
if args["language"] is not None and args["language"] == "zh-Hans":
@ -161,15 +130,12 @@ class ResetPasswordSendEmailApi(Resource):
return {"result": "success", "data": token}
@console_ns.route("/email-code-login")
class EmailCodeLoginSendEmailApi(Resource):
@setup_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("email", type=email, required=True, location="json")
.add_argument("language", type=str, required=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("email", type=email, required=True, location="json")
parser.add_argument("language", type=str, required=False, location="json")
args = parser.parse_args()
ip_address = extract_remote_ip(request)
@ -196,21 +162,16 @@ class EmailCodeLoginSendEmailApi(Resource):
return {"result": "success", "data": token}
@console_ns.route("/email-code-login/validity")
class EmailCodeLoginApi(Resource):
@setup_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("email", type=str, required=True, location="json")
.add_argument("code", type=str, required=True, location="json")
.add_argument("token", type=str, required=True, location="json")
.add_argument("language", type=str, required=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("email", type=str, required=True, location="json")
parser.add_argument("code", type=str, required=True, location="json")
parser.add_argument("token", type=str, required=True, location="json")
args = parser.parse_args()
user_email = args["email"]
language = args["language"]
token_data = AccountService.get_email_code_login_data(args["token"])
if token_data is None:
@ -244,9 +205,7 @@ class EmailCodeLoginApi(Resource):
if account is None:
try:
account = AccountService.create_account_and_tenant(
email=user_email,
name=user_email,
interface_language=get_valid_language(language),
email=user_email, name=user_email, interface_language=languages[0]
)
except WorkSpaceNotAllowedCreateError:
raise NotAllowedCreateWorkspace()
@ -256,36 +215,25 @@ class EmailCodeLoginApi(Resource):
raise WorkspacesLimitExceeded()
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
AccountService.reset_login_error_rate_limit(args["email"])
# Create response with cookies instead of returning tokens in body
response = make_response({"result": "success"})
set_csrf_token_to_cookie(request, response, token_pair.csrf_token)
# Set HTTP-only secure cookies for tokens
set_access_token_to_cookie(request, response, token_pair.access_token)
set_refresh_token_to_cookie(request, response, token_pair.refresh_token)
return response
return {"result": "success", "data": token_pair.model_dump()}
@console_ns.route("/refresh-token")
class RefreshTokenApi(Resource):
def post(self):
# Get refresh token from cookie instead of request body
refresh_token = extract_refresh_token(request)
if not refresh_token:
return {"result": "fail", "message": "No refresh token provided"}, 401
parser = reqparse.RequestParser()
parser.add_argument("refresh_token", type=str, required=True, location="json")
args = parser.parse_args()
try:
new_token_pair = AccountService.refresh_token(refresh_token)
# Create response with new cookies
response = make_response({"result": "success"})
# Update cookies with new tokens
set_csrf_token_to_cookie(request, response, new_token_pair.csrf_token)
set_access_token_to_cookie(request, response, new_token_pair.access_token)
set_refresh_token_to_cookie(request, response, new_token_pair.refresh_token)
return response
new_token_pair = AccountService.refresh_token(args["refresh_token"])
return {"result": "success", "data": new_token_pair.model_dump()}
except Exception as e:
return {"result": "fail", "message": str(e)}, 401
return {"result": "fail", "data": str(e)}, 401
api.add_resource(LoginApi, "/login")
api.add_resource(LogoutApi, "/logout")
api.add_resource(EmailCodeLoginSendEmailApi, "/email-code-login")
api.add_resource(EmailCodeLoginApi, "/email-code-login/validity")
api.add_resource(ResetPasswordSendEmailApi, "/reset-password")
api.add_resource(RefreshTokenApi, "/refresh-token")

View File

@ -1,6 +1,6 @@
import logging
import httpx
import requests
from flask import current_app, redirect, request
from flask_restx import Resource
from sqlalchemy import select
@ -14,12 +14,8 @@ from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from libs.helper import extract_remote_ip
from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo
from libs.token import (
set_access_token_to_cookie,
set_csrf_token_to_cookie,
set_refresh_token_to_cookie,
)
from models import Account, AccountStatus
from models import Account
from models.account import AccountStatus
from services.account_service import AccountService, RegisterService, TenantService
from services.billing_service import BillingService
from services.errors.account import AccountNotFoundError, AccountRegisterError
@ -105,10 +101,8 @@ class OAuthCallback(Resource):
try:
token = oauth_provider.get_access_token(code)
user_info = oauth_provider.get_user_info(token)
except httpx.RequestError as e:
error_text = str(e)
if isinstance(e, httpx.HTTPStatusError):
error_text = e.response.text
except requests.RequestException as e:
error_text = e.response.text if e.response else str(e)
logger.exception("An error occurred during the OAuth process with %s: %s", provider, error_text)
return {"error": "OAuth process failed"}, 400
@ -134,11 +128,11 @@ class OAuthCallback(Resource):
return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message={e.description}")
# Check account status
if account.status == AccountStatus.BANNED:
if account.status == AccountStatus.BANNED.value:
return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Account is banned.")
if account.status == AccountStatus.PENDING:
account.status = AccountStatus.ACTIVE
if account.status == AccountStatus.PENDING.value:
account.status = AccountStatus.ACTIVE.value
account.initialized_at = naive_utc_now()
db.session.commit()
@ -157,12 +151,9 @@ class OAuthCallback(Resource):
ip_address=extract_remote_ip(request),
)
response = redirect(f"{dify_config.CONSOLE_WEB_URL}")
set_access_token_to_cookie(request, response, token_pair.access_token)
set_refresh_token_to_cookie(request, response, token_pair.refresh_token)
set_csrf_token_to_cookie(request, response, token_pair.csrf_token)
return response
return redirect(
f"{dify_config.CONSOLE_WEB_URL}?access_token={token_pair.access_token}&refresh_token={token_pair.refresh_token}"
)
def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Account | None:

View File

@ -1,19 +1,20 @@
from collections.abc import Callable
from functools import wraps
from typing import Concatenate, ParamSpec, TypeVar
from typing import Concatenate, ParamSpec, TypeVar, cast
import flask_login
from flask import jsonify, request
from flask_restx import Resource, reqparse
from werkzeug.exceptions import BadRequest, NotFound
from controllers.console.wraps import account_initialization_required, setup_required
from core.model_runtime.utils.encoders import jsonable_encoder
from libs.login import current_account_with_tenant, login_required
from models import Account
from libs.login import login_required
from models.account import Account
from models.model import OAuthProviderApp
from services.oauth_server import OAUTH_ACCESS_TOKEN_EXPIRES_IN, OAuthGrantType, OAuthServerService
from .. import console_ns
from .. import api
P = ParamSpec("P")
R = TypeVar("R")
@ -23,7 +24,8 @@ T = TypeVar("T")
def oauth_server_client_id_required(view: Callable[Concatenate[T, OAuthProviderApp, P], R]):
@wraps(view)
def decorated(self: T, *args: P.args, **kwargs: P.kwargs):
parser = reqparse.RequestParser().add_argument("client_id", type=str, required=True, location="json")
parser = reqparse.RequestParser()
parser.add_argument("client_id", type=str, required=True, location="json")
parsed_args = parser.parse_args()
client_id = parsed_args.get("client_id")
if not client_id:
@ -84,12 +86,12 @@ def oauth_server_access_token_required(view: Callable[Concatenate[T, OAuthProvid
return decorated
@console_ns.route("/oauth/provider")
class OAuthServerAppApi(Resource):
@setup_required
@oauth_server_client_id_required
def post(self, oauth_provider_app: OAuthProviderApp):
parser = reqparse.RequestParser().add_argument("redirect_uri", type=str, required=True, location="json")
parser = reqparse.RequestParser()
parser.add_argument("redirect_uri", type=str, required=True, location="json")
parsed_args = parser.parse_args()
redirect_uri = parsed_args.get("redirect_uri")
@ -106,15 +108,13 @@ class OAuthServerAppApi(Resource):
)
@console_ns.route("/oauth/provider/authorize")
class OAuthServerUserAuthorizeApi(Resource):
@setup_required
@login_required
@account_initialization_required
@oauth_server_client_id_required
def post(self, oauth_provider_app: OAuthProviderApp):
current_user, _ = current_account_with_tenant()
account = current_user
account = cast(Account, flask_login.current_user)
user_account_id = account.id
code = OAuthServerService.sign_oauth_authorization_code(oauth_provider_app.client_id, user_account_id)
@ -125,19 +125,16 @@ class OAuthServerUserAuthorizeApi(Resource):
)
@console_ns.route("/oauth/provider/token")
class OAuthServerUserTokenApi(Resource):
@setup_required
@oauth_server_client_id_required
def post(self, oauth_provider_app: OAuthProviderApp):
parser = (
reqparse.RequestParser()
.add_argument("grant_type", type=str, required=True, location="json")
.add_argument("code", type=str, required=False, location="json")
.add_argument("client_secret", type=str, required=False, location="json")
.add_argument("redirect_uri", type=str, required=False, location="json")
.add_argument("refresh_token", type=str, required=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("grant_type", type=str, required=True, location="json")
parser.add_argument("code", type=str, required=False, location="json")
parser.add_argument("client_secret", type=str, required=False, location="json")
parser.add_argument("redirect_uri", type=str, required=False, location="json")
parser.add_argument("refresh_token", type=str, required=False, location="json")
parsed_args = parser.parse_args()
try:
@ -183,7 +180,6 @@ class OAuthServerUserTokenApi(Resource):
)
@console_ns.route("/oauth/provider/account")
class OAuthServerUserAccountApi(Resource):
@setup_required
@oauth_server_client_id_required
@ -198,3 +194,9 @@ class OAuthServerUserAccountApi(Resource):
"timezone": account.timezone,
}
)
api.add_resource(OAuthServerAppApi, "/oauth/provider")
api.add_resource(OAuthServerUserAuthorizeApi, "/oauth/provider/authorize")
api.add_resource(OAuthServerUserTokenApi, "/oauth/provider/token")
api.add_resource(OAuthServerUserAccountApi, "/oauth/provider/account")

View File

@ -1,43 +1,42 @@
from flask_restx import Resource, reqparse
from controllers.console import console_ns
from controllers.console import api
from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required
from enums.cloud_plan import CloudPlan
from libs.login import current_account_with_tenant, login_required
from libs.login import current_user, login_required
from models.model import Account
from services.billing_service import BillingService
@console_ns.route("/billing/subscription")
class Subscription(Resource):
@setup_required
@login_required
@account_initialization_required
@only_edition_cloud
def get(self):
current_user, current_tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument(
"plan",
type=str,
required=True,
location="args",
choices=[CloudPlan.PROFESSIONAL, CloudPlan.TEAM],
)
.add_argument("interval", type=str, required=True, location="args", choices=["month", "year"])
)
parser = reqparse.RequestParser()
parser.add_argument("plan", type=str, required=True, location="args", choices=["professional", "team"])
parser.add_argument("interval", type=str, required=True, location="args", choices=["month", "year"])
args = parser.parse_args()
assert isinstance(current_user, Account)
BillingService.is_tenant_owner_or_admin(current_user)
return BillingService.get_subscription(args["plan"], args["interval"], current_user.email, current_tenant_id)
assert current_user.current_tenant_id is not None
return BillingService.get_subscription(
args["plan"], args["interval"], current_user.email, current_user.current_tenant_id
)
@console_ns.route("/billing/invoices")
class Invoices(Resource):
@setup_required
@login_required
@account_initialization_required
@only_edition_cloud
def get(self):
current_user, current_tenant_id = current_account_with_tenant()
assert isinstance(current_user, Account)
BillingService.is_tenant_owner_or_admin(current_user)
return BillingService.get_invoices(current_user.email, current_tenant_id)
assert current_user.current_tenant_id is not None
return BillingService.get_invoices(current_user.email, current_user.current_tenant_id)
api.add_resource(Subscription, "/billing/subscription")
api.add_resource(Invoices, "/billing/invoices")

View File

@ -1,31 +1,35 @@
from flask import request
from flask_login import current_user
from flask_restx import Resource, reqparse
from libs.helper import extract_remote_ip
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from services.billing_service import BillingService
from .. import console_ns
from .. import api
from ..wraps import account_initialization_required, only_edition_cloud, setup_required
@console_ns.route("/compliance/download")
class ComplianceApi(Resource):
@setup_required
@login_required
@account_initialization_required
@only_edition_cloud
def get(self):
current_user, current_tenant_id = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("doc_name", type=str, required=True, location="args")
parser = reqparse.RequestParser()
parser.add_argument("doc_name", type=str, required=True, location="args")
args = parser.parse_args()
ip_address = extract_remote_ip(request)
device_info = request.headers.get("User-Agent", "Unknown device")
return BillingService.get_compliance_download_link(
doc_name=args.doc_name,
account_id=current_user.id,
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
ip=ip_address,
device_info=device_info,
)
api.add_resource(ComplianceApi, "/compliance/download")

View File

@ -3,45 +3,40 @@ from collections.abc import Generator
from typing import cast
from flask import request
from flask_login import current_user
from flask_restx import Resource, marshal_with, reqparse
from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.exceptions import NotFound
from controllers.console import console_ns
from controllers.console import api
from controllers.console.wraps import account_initialization_required, setup_required
from core.datasource.entities.datasource_entities import DatasourceProviderType, OnlineDocumentPagesMessage
from core.datasource.online_document.online_document_plugin import OnlineDocumentDatasourcePlugin
from core.indexing_runner import IndexingRunner
from core.rag.extractor.entity.datasource_type import DatasourceType
from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo
from core.rag.extractor.entity.extract_setting import ExtractSetting
from core.rag.extractor.notion_extractor import NotionExtractor
from extensions.ext_database import db
from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields
from libs.datetime_utils import naive_utc_now
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from models import DataSourceOauthBinding, Document
from services.dataset_service import DatasetService, DocumentService
from services.datasource_provider_service import DatasourceProviderService
from tasks.document_indexing_sync_task import document_indexing_sync_task
@console_ns.route(
"/data-source/integrates",
"/data-source/integrates/<uuid:binding_id>/<string:action>",
)
class DataSourceApi(Resource):
@setup_required
@login_required
@account_initialization_required
@marshal_with(integrate_list_fields)
def get(self):
_, current_tenant_id = current_account_with_tenant()
# get workspace data source integrates
data_source_integrates = db.session.scalars(
select(DataSourceOauthBinding).where(
DataSourceOauthBinding.tenant_id == current_tenant_id,
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
DataSourceOauthBinding.disabled == False,
)
).all()
@ -114,22 +109,19 @@ class DataSourceApi(Resource):
return {"result": "success"}, 200
@console_ns.route("/notion/pre-import/pages")
class DataSourceNotionListApi(Resource):
@setup_required
@login_required
@account_initialization_required
@marshal_with(integrate_notion_info_list_fields)
def get(self):
current_user, current_tenant_id = current_account_with_tenant()
dataset_id = request.args.get("dataset_id", default=None, type=str)
credential_id = request.args.get("credential_id", default=None, type=str)
if not credential_id:
raise ValueError("Credential id is required.")
datasource_provider_service = DatasourceProviderService()
credential = datasource_provider_service.get_datasource_credentials(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
credential_id=credential_id,
provider="notion_datasource",
plugin_id="langgenius/notion_datasource",
@ -149,7 +141,7 @@ class DataSourceNotionListApi(Resource):
documents = session.scalars(
select(Document).filter_by(
dataset_id=dataset_id,
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
data_source_type="notion_import",
enabled=True,
)
@ -164,7 +156,7 @@ class DataSourceNotionListApi(Resource):
datasource_runtime = DatasourceManager.get_datasource_runtime(
provider_id="langgenius/notion_datasource/notion_datasource",
datasource_name="notion_datasource",
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
datasource_type=DatasourceProviderType.ONLINE_DOCUMENT,
)
datasource_provider_service = DatasourceProviderService()
@ -204,23 +196,17 @@ class DataSourceNotionListApi(Resource):
return {"notion_info": {**workspace_info, "pages": pages}}, 200
@console_ns.route(
"/notion/workspaces/<uuid:workspace_id>/pages/<uuid:page_id>/<string:page_type>/preview",
"/datasets/notion-indexing-estimate",
)
class DataSourceNotionApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, workspace_id, page_id, page_type):
_, current_tenant_id = current_account_with_tenant()
credential_id = request.args.get("credential_id", default=None, type=str)
if not credential_id:
raise ValueError("Credential id is required.")
datasource_provider_service = DatasourceProviderService()
credential = datasource_provider_service.get_datasource_credentials(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
credential_id=credential_id,
provider="notion_datasource",
plugin_id="langgenius/notion_datasource",
@ -234,7 +220,7 @@ class DataSourceNotionApi(Resource):
notion_obj_id=page_id,
notion_page_type=page_type,
notion_access_token=credential.get("integration_secret"),
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
)
text_docs = extractor.extract()
@ -244,14 +230,12 @@ class DataSourceNotionApi(Resource):
@login_required
@account_initialization_required
def post(self):
_, current_tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("notion_info_list", type=list, required=True, nullable=True, location="json")
.add_argument("process_rule", type=dict, required=True, nullable=True, location="json")
.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
.add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json")
parser = reqparse.RequestParser()
parser.add_argument("notion_info_list", type=list, required=True, nullable=True, location="json")
parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json")
parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
parser.add_argument(
"doc_language", type=str, default="English", required=False, nullable=False, location="json"
)
args = parser.parse_args()
# validate args
@ -263,22 +247,20 @@ class DataSourceNotionApi(Resource):
credential_id = notion_info.get("credential_id")
for page in notion_info["pages"]:
extract_setting = ExtractSetting(
datasource_type=DatasourceType.NOTION,
notion_info=NotionInfo.model_validate(
{
"credential_id": credential_id,
"notion_workspace_id": workspace_id,
"notion_obj_id": page["page_id"],
"notion_page_type": page["type"],
"tenant_id": current_tenant_id,
}
),
datasource_type=DatasourceType.NOTION.value,
notion_info={
"credential_id": credential_id,
"notion_workspace_id": workspace_id,
"notion_obj_id": page["page_id"],
"notion_page_type": page["type"],
"tenant_id": current_user.current_tenant_id,
},
document_model=args["doc_form"],
)
extract_settings.append(extract_setting)
indexing_runner = IndexingRunner()
response = indexing_runner.indexing_estimate(
current_tenant_id,
current_user.current_tenant_id,
extract_settings,
args["process_rule"],
args["doc_form"],
@ -287,7 +269,6 @@ class DataSourceNotionApi(Resource):
return response.model_dump(), 200
@console_ns.route("/datasets/<uuid:dataset_id>/notion/sync")
class DataSourceNotionDatasetSyncApi(Resource):
@setup_required
@login_required
@ -304,7 +285,6 @@ class DataSourceNotionDatasetSyncApi(Resource):
return {"result": "success"}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/notion/sync")
class DataSourceNotionDocumentSyncApi(Resource):
@setup_required
@login_required
@ -321,3 +301,16 @@ class DataSourceNotionDocumentSyncApi(Resource):
raise NotFound("Document not found.")
document_indexing_sync_task.delay(dataset_id_str, document_id_str)
return {"result": "success"}, 200
api.add_resource(DataSourceApi, "/data-source/integrates", "/data-source/integrates/<uuid:binding_id>/<string:action>")
api.add_resource(DataSourceNotionListApi, "/notion/pre-import/pages")
api.add_resource(
DataSourceNotionApi,
"/notion/workspaces/<uuid:workspace_id>/pages/<uuid:page_id>/<string:page_type>/preview",
"/datasets/notion-indexing-estimate",
)
api.add_resource(DataSourceNotionDatasetSyncApi, "/datasets/<uuid:dataset_id>/notion/sync")
api.add_resource(
DataSourceNotionDocumentSyncApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/notion/sync"
)

View File

@ -1,6 +1,6 @@
from typing import Any, cast
import flask_restx
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
from sqlalchemy import select
from werkzeug.exceptions import Forbidden, NotFound
@ -23,97 +23,29 @@ from core.model_runtime.entities.model_entities import ModelType
from core.provider_manager import ProviderManager
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.extractor.entity.datasource_type import DatasourceType
from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo
from core.rag.extractor.entity.extract_setting import ExtractSetting
from core.rag.retrieval.retrieval_methods import RetrievalMethod
from extensions.ext_database import db
from fields.app_fields import related_app_list
from fields.dataset_fields import dataset_detail_fields, dataset_query_detail_fields
from fields.document_fields import document_status_fields
from libs.login import current_account_with_tenant, login_required
from libs.validators import validate_description_length
from libs.login import login_required
from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile
from models.dataset import DatasetPermissionEnum
from models.provider_ids import ModelProviderID
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
def _validate_name(name: str) -> str:
def _validate_name(name):
if not name or len(name) < 1 or len(name) > 40:
raise ValueError("Name must be between 1 to 40 characters.")
return name
def _get_retrieval_methods_by_vector_type(vector_type: str | None, is_mock: bool = False) -> dict[str, list[str]]:
"""
Get supported retrieval methods based on vector database type.
Args:
vector_type: Vector database type, can be None
is_mock: Whether this is a Mock API, affects MILVUS handling
Returns:
Dictionary containing supported retrieval methods
Raises:
ValueError: If vector_type is None or unsupported
"""
if vector_type is None:
raise ValueError("Vector store type is not configured.")
# Define vector database types that only support semantic search
semantic_only_types = {
VectorType.RELYT,
VectorType.TIDB_VECTOR,
VectorType.CHROMA,
VectorType.PGVECTO_RS,
VectorType.VIKINGDB,
VectorType.UPSTASH,
}
# Define vector database types that support all retrieval methods
full_search_types = {
VectorType.QDRANT,
VectorType.WEAVIATE,
VectorType.OPENSEARCH,
VectorType.ANALYTICDB,
VectorType.MYSCALE,
VectorType.ORACLE,
VectorType.ELASTICSEARCH,
VectorType.ELASTICSEARCH_JA,
VectorType.PGVECTOR,
VectorType.VASTBASE,
VectorType.TIDB_ON_QDRANT,
VectorType.LINDORM,
VectorType.COUCHBASE,
VectorType.OPENGAUSS,
VectorType.OCEANBASE,
VectorType.TABLESTORE,
VectorType.HUAWEI_CLOUD,
VectorType.TENCENT,
VectorType.MATRIXONE,
VectorType.CLICKZETTA,
VectorType.BAIDU,
VectorType.ALIBABACLOUD_MYSQL,
}
semantic_methods = {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]}
full_methods = {
"retrieval_method": [
RetrievalMethod.SEMANTIC_SEARCH.value,
RetrievalMethod.FULL_TEXT_SEARCH.value,
RetrievalMethod.HYBRID_SEARCH.value,
]
}
if vector_type == VectorType.MILVUS:
return semantic_methods if is_mock else full_methods
if vector_type in semantic_only_types:
return semantic_methods
elif vector_type in full_search_types:
return full_methods
else:
raise ValueError(f"Unsupported vector db type {vector_type}.")
def _validate_description_length(description):
if description and len(description) > 400:
raise ValueError("Description cannot exceed 400 characters.")
return description
@console_ns.route("/datasets")
@ -136,7 +68,6 @@ class DatasetListApi(Resource):
@account_initialization_required
@enterprise_license_required
def get(self):
current_user, current_tenant_id = current_account_with_tenant()
page = request.args.get("page", default=1, type=int)
limit = request.args.get("limit", default=20, type=int)
ids = request.args.getlist("ids")
@ -145,15 +76,15 @@ class DatasetListApi(Resource):
tag_ids = request.args.getlist("tag_ids")
include_all = request.args.get("include_all", default="false").lower() == "true"
if ids:
datasets, total = DatasetService.get_datasets_by_ids(ids, current_tenant_id)
datasets, total = DatasetService.get_datasets_by_ids(ids, current_user.current_tenant_id)
else:
datasets, total = DatasetService.get_datasets(
page, limit, current_tenant_id, current_user, search, tag_ids, include_all
page, limit, current_user.current_tenant_id, current_user, search, tag_ids, include_all
)
# check embedding setting
provider_manager = ProviderManager()
configurations = provider_manager.get_configurations(tenant_id=current_tenant_id)
configurations = provider_manager.get_configurations(tenant_id=current_user.current_tenant_id)
embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True)
@ -161,7 +92,7 @@ class DatasetListApi(Resource):
for embedding_model in embedding_models:
model_names.append(f"{embedding_model.model}:{embedding_model.provider.provider}")
data = cast(list[dict[str, Any]], marshal(datasets, dataset_detail_fields))
data = marshal(datasets, dataset_detail_fields)
for item in data:
# convert embedding_model_provider to plugin standard format
if item["indexing_technique"] == "high_quality" and item["embedding_model_provider"]:
@ -206,53 +137,50 @@ class DatasetListApi(Resource):
@account_initialization_required
@cloud_edition_billing_rate_limit_check("knowledge")
def post(self):
parser = (
reqparse.RequestParser()
.add_argument(
"name",
nullable=False,
required=True,
help="type is required. Name must be between 1 to 40 characters.",
type=_validate_name,
)
.add_argument(
"description",
type=validate_description_length,
nullable=True,
required=False,
default="",
)
.add_argument(
"indexing_technique",
type=str,
location="json",
choices=Dataset.INDEXING_TECHNIQUE_LIST,
nullable=True,
help="Invalid indexing technique.",
)
.add_argument(
"external_knowledge_api_id",
type=str,
nullable=True,
required=False,
)
.add_argument(
"provider",
type=str,
nullable=True,
choices=Dataset.PROVIDER_LIST,
required=False,
default="vendor",
)
.add_argument(
"external_knowledge_id",
type=str,
nullable=True,
required=False,
)
parser = reqparse.RequestParser()
parser.add_argument(
"name",
nullable=False,
required=True,
help="type is required. Name must be between 1 to 40 characters.",
type=_validate_name,
)
parser.add_argument(
"description",
type=_validate_description_length,
nullable=True,
required=False,
default="",
)
parser.add_argument(
"indexing_technique",
type=str,
location="json",
choices=Dataset.INDEXING_TECHNIQUE_LIST,
nullable=True,
help="Invalid indexing technique.",
)
parser.add_argument(
"external_knowledge_api_id",
type=str,
nullable=True,
required=False,
)
parser.add_argument(
"provider",
type=str,
nullable=True,
choices=Dataset.PROVIDER_LIST,
required=False,
default="vendor",
)
parser.add_argument(
"external_knowledge_id",
type=str,
nullable=True,
required=False,
)
args = parser.parse_args()
current_user, current_tenant_id = current_account_with_tenant()
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
if not current_user.is_dataset_editor:
@ -260,7 +188,7 @@ class DatasetListApi(Resource):
try:
dataset = DatasetService.create_empty_dataset(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
name=args["name"],
description=args["description"],
indexing_technique=args["indexing_technique"],
@ -288,7 +216,6 @@ class DatasetApi(Resource):
@login_required
@account_initialization_required
def get(self, dataset_id):
current_user, current_tenant_id = current_account_with_tenant()
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)
if dataset is None:
@ -297,7 +224,7 @@ class DatasetApi(Resource):
DatasetService.check_dataset_permission(dataset, current_user)
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields))
data = marshal(dataset, dataset_detail_fields)
if dataset.indexing_technique == "high_quality":
if dataset.embedding_model_provider:
provider_id = ModelProviderID(dataset.embedding_model_provider)
@ -308,7 +235,7 @@ class DatasetApi(Resource):
# check embedding setting
provider_manager = ProviderManager()
configurations = provider_manager.get_configurations(tenant_id=current_tenant_id)
configurations = provider_manager.get_configurations(tenant_id=current_user.current_tenant_id)
embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True)
@ -354,76 +281,73 @@ class DatasetApi(Resource):
if dataset is None:
raise NotFound("Dataset not found.")
parser = (
reqparse.RequestParser()
.add_argument(
"name",
nullable=False,
help="type is required. Name must be between 1 to 40 characters.",
type=_validate_name,
)
.add_argument("description", location="json", store_missing=False, type=validate_description_length)
.add_argument(
"indexing_technique",
type=str,
location="json",
choices=Dataset.INDEXING_TECHNIQUE_LIST,
nullable=True,
help="Invalid indexing technique.",
)
.add_argument(
"permission",
type=str,
location="json",
choices=(
DatasetPermissionEnum.ONLY_ME,
DatasetPermissionEnum.ALL_TEAM,
DatasetPermissionEnum.PARTIAL_TEAM,
),
help="Invalid permission.",
)
.add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.")
.add_argument(
"embedding_model_provider", type=str, location="json", help="Invalid embedding model provider."
)
.add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.")
.add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.")
.add_argument(
"external_retrieval_model",
type=dict,
required=False,
nullable=True,
location="json",
help="Invalid external retrieval model.",
)
.add_argument(
"external_knowledge_id",
type=str,
required=False,
nullable=True,
location="json",
help="Invalid external knowledge id.",
)
.add_argument(
"external_knowledge_api_id",
type=str,
required=False,
nullable=True,
location="json",
help="Invalid external knowledge api id.",
)
.add_argument(
"icon_info",
type=dict,
required=False,
nullable=True,
location="json",
help="Invalid icon info.",
)
parser = reqparse.RequestParser()
parser.add_argument(
"name",
nullable=False,
help="type is required. Name must be between 1 to 40 characters.",
type=_validate_name,
)
parser.add_argument("description", location="json", store_missing=False, type=_validate_description_length)
parser.add_argument(
"indexing_technique",
type=str,
location="json",
choices=Dataset.INDEXING_TECHNIQUE_LIST,
nullable=True,
help="Invalid indexing technique.",
)
parser.add_argument(
"permission",
type=str,
location="json",
choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM),
help="Invalid permission.",
)
parser.add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.")
parser.add_argument(
"embedding_model_provider", type=str, location="json", help="Invalid embedding model provider."
)
parser.add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.")
parser.add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.")
parser.add_argument(
"external_retrieval_model",
type=dict,
required=False,
nullable=True,
location="json",
help="Invalid external retrieval model.",
)
parser.add_argument(
"external_knowledge_id",
type=str,
required=False,
nullable=True,
location="json",
help="Invalid external knowledge id.",
)
parser.add_argument(
"external_knowledge_api_id",
type=str,
required=False,
nullable=True,
location="json",
help="Invalid external knowledge api id.",
)
parser.add_argument(
"icon_info",
type=dict,
required=False,
nullable=True,
location="json",
help="Invalid icon info.",
)
args = parser.parse_args()
data = request.get_json()
current_user, current_tenant_id = current_account_with_tenant()
# check embedding model setting
if (
@ -445,8 +369,8 @@ class DatasetApi(Resource):
if dataset is None:
raise NotFound("Dataset not found.")
result_data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields))
tenant_id = current_tenant_id
result_data = marshal(dataset, dataset_detail_fields)
tenant_id = current_user.current_tenant_id
if data.get("partial_member_list") and data.get("permission") == "partial_members":
DatasetPermissionService.update_partial_member_list(
@ -470,9 +394,9 @@ class DatasetApi(Resource):
@cloud_edition_billing_rate_limit_check("knowledge")
def delete(self, dataset_id):
dataset_id_str = str(dataset_id)
current_user, _ = current_account_with_tenant()
if not (current_user.has_edit_permission or current_user.is_dataset_operator):
# The role of the current user in the ta table must be admin, owner, or editor
if not (current_user.is_editor or current_user.is_dataset_operator):
raise Forbidden()
try:
@ -511,7 +435,6 @@ class DatasetQueryApi(Resource):
@login_required
@account_initialization_required
def get(self, dataset_id):
current_user, _ = current_account_with_tenant()
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)
if dataset is None:
@ -546,31 +469,32 @@ class DatasetIndexingEstimateApi(Resource):
@login_required
@account_initialization_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("info_list", type=dict, required=True, nullable=True, location="json")
.add_argument("process_rule", type=dict, required=True, nullable=True, location="json")
.add_argument(
"indexing_technique",
type=str,
required=True,
choices=Dataset.INDEXING_TECHNIQUE_LIST,
nullable=True,
location="json",
)
.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
.add_argument("dataset_id", type=str, required=False, nullable=False, location="json")
.add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json")
parser = reqparse.RequestParser()
parser.add_argument("info_list", type=dict, required=True, nullable=True, location="json")
parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json")
parser.add_argument(
"indexing_technique",
type=str,
required=True,
choices=Dataset.INDEXING_TECHNIQUE_LIST,
nullable=True,
location="json",
)
parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
parser.add_argument("dataset_id", type=str, required=False, nullable=False, location="json")
parser.add_argument(
"doc_language", type=str, default="English", required=False, nullable=False, location="json"
)
args = parser.parse_args()
_, current_tenant_id = current_account_with_tenant()
# validate args
DocumentService.estimate_args_validate(args)
extract_settings = []
if args["info_list"]["data_source_type"] == "upload_file":
file_ids = args["info_list"]["file_info_list"]["file_ids"]
file_details = db.session.scalars(
select(UploadFile).where(UploadFile.tenant_id == current_tenant_id, UploadFile.id.in_(file_ids))
select(UploadFile).where(
UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id.in_(file_ids)
)
).all()
if file_details is None:
@ -579,7 +503,7 @@ class DatasetIndexingEstimateApi(Resource):
if file_details:
for file_detail in file_details:
extract_setting = ExtractSetting(
datasource_type=DatasourceType.FILE,
datasource_type=DatasourceType.FILE.value,
upload_file=file_detail,
document_model=args["doc_form"],
)
@ -591,16 +515,14 @@ class DatasetIndexingEstimateApi(Resource):
credential_id = notion_info.get("credential_id")
for page in notion_info["pages"]:
extract_setting = ExtractSetting(
datasource_type=DatasourceType.NOTION,
notion_info=NotionInfo.model_validate(
{
"credential_id": credential_id,
"notion_workspace_id": workspace_id,
"notion_obj_id": page["page_id"],
"notion_page_type": page["type"],
"tenant_id": current_tenant_id,
}
),
datasource_type=DatasourceType.NOTION.value,
notion_info={
"credential_id": credential_id,
"notion_workspace_id": workspace_id,
"notion_obj_id": page["page_id"],
"notion_page_type": page["type"],
"tenant_id": current_user.current_tenant_id,
},
document_model=args["doc_form"],
)
extract_settings.append(extract_setting)
@ -608,17 +530,15 @@ class DatasetIndexingEstimateApi(Resource):
website_info_list = args["info_list"]["website_info_list"]
for url in website_info_list["urls"]:
extract_setting = ExtractSetting(
datasource_type=DatasourceType.WEBSITE,
website_info=WebsiteInfo.model_validate(
{
"provider": website_info_list["provider"],
"job_id": website_info_list["job_id"],
"url": url,
"tenant_id": current_tenant_id,
"mode": "crawl",
"only_main_content": website_info_list["only_main_content"],
}
),
datasource_type=DatasourceType.WEBSITE.value,
website_info={
"provider": website_info_list["provider"],
"job_id": website_info_list["job_id"],
"url": url,
"tenant_id": current_user.current_tenant_id,
"mode": "crawl",
"only_main_content": website_info_list["only_main_content"],
},
document_model=args["doc_form"],
)
extract_settings.append(extract_setting)
@ -627,7 +547,7 @@ class DatasetIndexingEstimateApi(Resource):
indexing_runner = IndexingRunner()
try:
response = indexing_runner.indexing_estimate(
current_tenant_id,
current_user.current_tenant_id,
extract_settings,
args["process_rule"],
args["doc_form"],
@ -658,7 +578,6 @@ class DatasetRelatedAppListApi(Resource):
@account_initialization_required
@marshal_with(related_app_list)
def get(self, dataset_id):
current_user, _ = current_account_with_tenant()
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)
if dataset is None:
@ -690,10 +609,11 @@ class DatasetIndexingStatusApi(Resource):
@login_required
@account_initialization_required
def get(self, dataset_id):
_, current_tenant_id = current_account_with_tenant()
dataset_id = str(dataset_id)
documents = db.session.scalars(
select(Document).where(Document.dataset_id == dataset_id, Document.tenant_id == current_tenant_id)
select(Document).where(
Document.dataset_id == dataset_id, Document.tenant_id == current_user.current_tenant_id
)
).all()
documents_status = []
for document in documents:
@ -745,9 +665,10 @@ class DatasetApiKeyApi(Resource):
@account_initialization_required
@marshal_with(api_key_list)
def get(self):
_, current_tenant_id = current_account_with_tenant()
keys = db.session.scalars(
select(ApiToken).where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_tenant_id)
select(ApiToken).where(
ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id
)
).all()
return {"items": keys}
@ -757,18 +678,17 @@ class DatasetApiKeyApi(Resource):
@marshal_with(api_key_fields)
def post(self):
# The role of the current user in the ta table must be admin or owner
current_user, current_tenant_id = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
current_key_count = (
db.session.query(ApiToken)
.where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_tenant_id)
.where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id)
.count()
)
if current_key_count >= self.max_keys:
api.abort(
flask_restx.abort(
400,
message=f"Cannot create more than {self.max_keys} API keys for this resource type.",
code="max_keys_exceeded",
@ -776,7 +696,7 @@ class DatasetApiKeyApi(Resource):
key = ApiToken.generate_api_key(self.token_prefix, 24)
api_token = ApiToken()
api_token.tenant_id = current_tenant_id
api_token.tenant_id = current_user.current_tenant_id
api_token.token = key
api_token.type = self.resource_type
db.session.add(api_token)
@ -796,7 +716,6 @@ class DatasetApiDeleteApi(Resource):
@login_required
@account_initialization_required
def delete(self, api_key_id):
current_user, current_tenant_id = current_account_with_tenant()
api_key_id = str(api_key_id)
# The role of the current user in the ta table must be admin or owner
@ -806,7 +725,7 @@ class DatasetApiDeleteApi(Resource):
key = (
db.session.query(ApiToken)
.where(
ApiToken.tenant_id == current_tenant_id,
ApiToken.tenant_id == current_user.current_tenant_id,
ApiToken.type == self.resource_type,
ApiToken.id == api_key_id,
)
@ -814,7 +733,7 @@ class DatasetApiDeleteApi(Resource):
)
if key is None:
api.abort(404, message="API key not found")
flask_restx.abort(404, message="API key not found")
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
db.session.commit()
@ -857,7 +776,49 @@ class DatasetRetrievalSettingApi(Resource):
@account_initialization_required
def get(self):
vector_type = dify_config.VECTOR_STORE
return _get_retrieval_methods_by_vector_type(vector_type, is_mock=False)
match vector_type:
case (
VectorType.RELYT
| VectorType.TIDB_VECTOR
| VectorType.CHROMA
| VectorType.PGVECTO_RS
| VectorType.BAIDU
| VectorType.VIKINGDB
| VectorType.UPSTASH
):
return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]}
case (
VectorType.QDRANT
| VectorType.WEAVIATE
| VectorType.OPENSEARCH
| VectorType.ANALYTICDB
| VectorType.MYSCALE
| VectorType.ORACLE
| VectorType.ELASTICSEARCH
| VectorType.ELASTICSEARCH_JA
| VectorType.PGVECTOR
| VectorType.VASTBASE
| VectorType.TIDB_ON_QDRANT
| VectorType.LINDORM
| VectorType.COUCHBASE
| VectorType.MILVUS
| VectorType.OPENGAUSS
| VectorType.OCEANBASE
| VectorType.TABLESTORE
| VectorType.HUAWEI_CLOUD
| VectorType.TENCENT
| VectorType.MATRIXONE
| VectorType.CLICKZETTA
):
return {
"retrieval_method": [
RetrievalMethod.SEMANTIC_SEARCH.value,
RetrievalMethod.FULL_TEXT_SEARCH.value,
RetrievalMethod.HYBRID_SEARCH.value,
]
}
case _:
raise ValueError(f"Unsupported vector db type {vector_type}.")
@console_ns.route("/datasets/retrieval-setting/<string:vector_type>")
@ -870,7 +831,48 @@ class DatasetRetrievalSettingMockApi(Resource):
@login_required
@account_initialization_required
def get(self, vector_type):
return _get_retrieval_methods_by_vector_type(vector_type, is_mock=True)
match vector_type:
case (
VectorType.MILVUS
| VectorType.RELYT
| VectorType.TIDB_VECTOR
| VectorType.CHROMA
| VectorType.PGVECTO_RS
| VectorType.BAIDU
| VectorType.VIKINGDB
| VectorType.UPSTASH
):
return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]}
case (
VectorType.QDRANT
| VectorType.WEAVIATE
| VectorType.OPENSEARCH
| VectorType.ANALYTICDB
| VectorType.MYSCALE
| VectorType.ORACLE
| VectorType.ELASTICSEARCH
| VectorType.ELASTICSEARCH_JA
| VectorType.COUCHBASE
| VectorType.PGVECTOR
| VectorType.VASTBASE
| VectorType.LINDORM
| VectorType.OPENGAUSS
| VectorType.OCEANBASE
| VectorType.TABLESTORE
| VectorType.TENCENT
| VectorType.HUAWEI_CLOUD
| VectorType.MATRIXONE
| VectorType.CLICKZETTA
):
return {
"retrieval_method": [
RetrievalMethod.SEMANTIC_SEARCH.value,
RetrievalMethod.FULL_TEXT_SEARCH.value,
RetrievalMethod.HYBRID_SEARCH.value,
]
}
case _:
raise ValueError(f"Unsupported vector db type {vector_type}.")
@console_ns.route("/datasets/<uuid:dataset_id>/error-docs")
@ -905,7 +907,6 @@ class DatasetPermissionUserListApi(Resource):
@login_required
@account_initialization_required
def get(self, dataset_id):
current_user, _ = current_account_with_tenant()
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)
if dataset is None:

View File

@ -4,8 +4,8 @@ from argparse import ArgumentTypeError
from collections.abc import Sequence
from typing import Literal, cast
import sqlalchemy as sa
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
from sqlalchemy import asc, desc, select
from werkzeug.exceptions import Forbidden, NotFound
@ -43,7 +43,7 @@ from core.model_runtime.entities.model_entities import ModelType
from core.model_runtime.errors.invoke import InvokeAuthorizationError
from core.plugin.impl.exc import PluginDaemonClientSideError
from core.rag.extractor.entity.datasource_type import DatasourceType
from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo
from core.rag.extractor.entity.extract_setting import ExtractSetting
from extensions.ext_database import db
from fields.document_fields import (
dataset_and_document_fields,
@ -52,7 +52,7 @@ from fields.document_fields import (
document_with_segments_fields,
)
from libs.datetime_utils import naive_utc_now
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile
from models.dataset import DocumentPipelineExecutionLog
from services.dataset_service import DatasetService, DocumentService
@ -63,7 +63,6 @@ logger = logging.getLogger(__name__)
class DocumentResource(Resource):
def get_document(self, dataset_id: str, document_id: str) -> Document:
current_user, current_tenant_id = current_account_with_tenant()
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
raise NotFound("Dataset not found.")
@ -78,13 +77,12 @@ class DocumentResource(Resource):
if not document:
raise NotFound("Document not found.")
if document.tenant_id != current_tenant_id:
if document.tenant_id != current_user.current_tenant_id:
raise Forbidden("No permission.")
return document
def get_batch_documents(self, dataset_id: str, batch: str) -> Sequence[Document]:
current_user, _ = current_account_with_tenant()
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
raise NotFound("Dataset not found.")
@ -112,7 +110,6 @@ class GetProcessRuleApi(Resource):
@login_required
@account_initialization_required
def get(self):
current_user, _ = current_account_with_tenant()
req_data = request.args
document_id = req_data.get("document_id")
@ -169,7 +166,6 @@ class DatasetDocumentListApi(Resource):
@login_required
@account_initialization_required
def get(self, dataset_id):
current_user, current_tenant_id = current_account_with_tenant()
dataset_id = str(dataset_id)
page = request.args.get("page", default=1, type=int)
limit = request.args.get("limit", default=20, type=int)
@ -201,7 +197,7 @@ class DatasetDocumentListApi(Resource):
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=current_tenant_id)
query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=current_user.current_tenant_id)
if search:
search = f"%{search}%"
@ -215,13 +211,13 @@ class DatasetDocumentListApi(Resource):
if sort == "hit_count":
sub_query = (
sa.select(DocumentSegment.document_id, sa.func.sum(DocumentSegment.hit_count).label("total_hit_count"))
db.select(DocumentSegment.document_id, db.func.sum(DocumentSegment.hit_count).label("total_hit_count"))
.group_by(DocumentSegment.document_id)
.subquery()
)
query = query.outerjoin(sub_query, sub_query.c.document_id == Document.id).order_by(
sort_logic(sa.func.coalesce(sub_query.c.total_hit_count, 0)),
sort_logic(db.func.coalesce(sub_query.c.total_hit_count, 0)),
sort_logic(Document.position),
)
elif sort == "created_at":
@ -275,7 +271,6 @@ class DatasetDocumentListApi(Resource):
@cloud_edition_billing_resource_check("vector_space")
@cloud_edition_billing_rate_limit_check("knowledge")
def post(self, dataset_id):
current_user, _ = current_account_with_tenant()
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
@ -292,23 +287,23 @@ class DatasetDocumentListApi(Resource):
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
parser = (
reqparse.RequestParser()
.add_argument(
"indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json"
)
.add_argument("data_source", type=dict, required=False, location="json")
.add_argument("process_rule", type=dict, required=False, location="json")
.add_argument("duplicate", type=bool, default=True, nullable=False, location="json")
.add_argument("original_document_id", type=str, required=False, location="json")
.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json")
.add_argument("embedding_model", type=str, required=False, nullable=True, location="json")
.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json")
.add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json")
parser = reqparse.RequestParser()
parser.add_argument(
"indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json"
)
parser.add_argument("data_source", type=dict, required=False, location="json")
parser.add_argument("process_rule", type=dict, required=False, location="json")
parser.add_argument("duplicate", type=bool, default=True, nullable=False, location="json")
parser.add_argument("original_document_id", type=str, required=False, location="json")
parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json")
parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json")
parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json")
parser.add_argument(
"doc_language", type=str, default="English", required=False, nullable=False, location="json"
)
args = parser.parse_args()
knowledge_config = KnowledgeConfig.model_validate(args)
knowledge_config = KnowledgeConfig(**args)
if not dataset.indexing_technique and not knowledge_config.indexing_technique:
raise ValueError("indexing_technique is required.")
@ -375,38 +370,37 @@ class DatasetInitApi(Resource):
@cloud_edition_billing_rate_limit_check("knowledge")
def post(self):
# The role of the current user in the ta table must be admin, owner, dataset_operator, or editor
current_user, current_tenant_id = current_account_with_tenant()
if not current_user.is_dataset_editor:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument(
"indexing_technique",
type=str,
choices=Dataset.INDEXING_TECHNIQUE_LIST,
required=True,
nullable=False,
location="json",
)
.add_argument("data_source", type=dict, required=True, nullable=True, location="json")
.add_argument("process_rule", type=dict, required=True, nullable=True, location="json")
.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
.add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json")
.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json")
.add_argument("embedding_model", type=str, required=False, nullable=True, location="json")
.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json")
parser = reqparse.RequestParser()
parser.add_argument(
"indexing_technique",
type=str,
choices=Dataset.INDEXING_TECHNIQUE_LIST,
required=True,
nullable=False,
location="json",
)
parser.add_argument("data_source", type=dict, required=True, nullable=True, location="json")
parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json")
parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
parser.add_argument(
"doc_language", type=str, default="English", required=False, nullable=False, location="json"
)
parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json")
parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json")
parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json")
args = parser.parse_args()
knowledge_config = KnowledgeConfig.model_validate(args)
knowledge_config = KnowledgeConfig(**args)
if knowledge_config.indexing_technique == "high_quality":
if knowledge_config.embedding_model is None or knowledge_config.embedding_model_provider is None:
raise ValueError("embedding model and embedding model provider are required for high quality indexing.")
try:
model_manager = ModelManager()
model_manager.get_model_instance(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
provider=args["embedding_model_provider"],
model_type=ModelType.TEXT_EMBEDDING,
model=args["embedding_model"],
@ -423,9 +417,7 @@ class DatasetInitApi(Resource):
try:
dataset, documents, batch = DocumentService.save_document_without_dataset_id(
tenant_id=current_tenant_id,
knowledge_config=knowledge_config,
account=current_user,
tenant_id=current_user.current_tenant_id, knowledge_config=knowledge_config, account=current_user
)
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
@ -451,7 +443,6 @@ class DocumentIndexingEstimateApi(DocumentResource):
@login_required
@account_initialization_required
def get(self, dataset_id, document_id):
_, current_tenant_id = current_account_with_tenant()
dataset_id = str(dataset_id)
document_id = str(document_id)
document = self.get_document(dataset_id, document_id)
@ -460,7 +451,7 @@ class DocumentIndexingEstimateApi(DocumentResource):
raise DocumentAlreadyFinishedError()
data_process_rule = document.dataset_process_rule
data_process_rule_dict = data_process_rule.to_dict() if data_process_rule else {}
data_process_rule_dict = data_process_rule.to_dict()
response = {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []}
@ -480,14 +471,14 @@ class DocumentIndexingEstimateApi(DocumentResource):
raise NotFound("File not found.")
extract_setting = ExtractSetting(
datasource_type=DatasourceType.FILE, upload_file=file, document_model=document.doc_form
datasource_type=DatasourceType.FILE.value, upload_file=file, document_model=document.doc_form
)
indexing_runner = IndexingRunner()
try:
estimate_response = indexing_runner.indexing_estimate(
current_tenant_id,
current_user.current_tenant_id,
[extract_setting],
data_process_rule_dict,
document.doc_form,
@ -516,14 +507,13 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
@login_required
@account_initialization_required
def get(self, dataset_id, batch):
_, current_tenant_id = current_account_with_tenant()
dataset_id = str(dataset_id)
batch = str(batch)
documents = self.get_batch_documents(dataset_id, batch)
if not documents:
return {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []}, 200
data_process_rule = documents[0].dataset_process_rule
data_process_rule_dict = data_process_rule.to_dict() if data_process_rule else {}
data_process_rule_dict = data_process_rule.to_dict()
extract_settings = []
for document in documents:
if document.indexing_status in {"completed", "error"}:
@ -536,7 +526,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
file_id = data_source_info["upload_file_id"]
file_detail = (
db.session.query(UploadFile)
.where(UploadFile.tenant_id == current_tenant_id, UploadFile.id == file_id)
.where(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id == file_id)
.first()
)
@ -544,7 +534,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
raise NotFound("File not found.")
extract_setting = ExtractSetting(
datasource_type=DatasourceType.FILE, upload_file=file_detail, document_model=document.doc_form
datasource_type=DatasourceType.FILE.value, upload_file=file_detail, document_model=document.doc_form
)
extract_settings.append(extract_setting)
@ -552,16 +542,14 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
if not data_source_info:
continue
extract_setting = ExtractSetting(
datasource_type=DatasourceType.NOTION,
notion_info=NotionInfo.model_validate(
{
"credential_id": data_source_info["credential_id"],
"notion_workspace_id": data_source_info["notion_workspace_id"],
"notion_obj_id": data_source_info["notion_page_id"],
"notion_page_type": data_source_info["type"],
"tenant_id": current_tenant_id,
}
),
datasource_type=DatasourceType.NOTION.value,
notion_info={
"credential_id": data_source_info["credential_id"],
"notion_workspace_id": data_source_info["notion_workspace_id"],
"notion_obj_id": data_source_info["notion_page_id"],
"notion_page_type": data_source_info["type"],
"tenant_id": current_user.current_tenant_id,
},
document_model=document.doc_form,
)
extract_settings.append(extract_setting)
@ -569,17 +557,15 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
if not data_source_info:
continue
extract_setting = ExtractSetting(
datasource_type=DatasourceType.WEBSITE,
website_info=WebsiteInfo.model_validate(
{
"provider": data_source_info["provider"],
"job_id": data_source_info["job_id"],
"url": data_source_info["url"],
"tenant_id": current_tenant_id,
"mode": data_source_info["mode"],
"only_main_content": data_source_info["only_main_content"],
}
),
datasource_type=DatasourceType.WEBSITE.value,
website_info={
"provider": data_source_info["provider"],
"job_id": data_source_info["job_id"],
"url": data_source_info["url"],
"tenant_id": current_user.current_tenant_id,
"mode": data_source_info["mode"],
"only_main_content": data_source_info["only_main_content"],
},
document_model=document.doc_form,
)
extract_settings.append(extract_setting)
@ -589,7 +575,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
indexing_runner = IndexingRunner()
try:
response = indexing_runner.indexing_estimate(
current_tenant_id,
current_user.current_tenant_id,
extract_settings,
data_process_rule_dict,
document.doc_form,
@ -746,7 +732,7 @@ class DocumentApi(DocumentResource):
"name": document.name,
"created_from": document.created_from,
"created_by": document.created_by,
"created_at": int(document.created_at.timestamp()),
"created_at": document.created_at.timestamp(),
"tokens": document.tokens,
"indexing_status": document.indexing_status,
"completed_at": int(document.completed_at.timestamp()) if document.completed_at else None,
@ -766,7 +752,7 @@ class DocumentApi(DocumentResource):
}
else:
dataset_process_rules = DatasetService.get_process_rules(dataset_id)
document_process_rules = document.dataset_process_rule.to_dict() if document.dataset_process_rule else {}
document_process_rules = document.dataset_process_rule.to_dict()
data_source_info = document.data_source_detail_dict
response = {
"id": document.id,
@ -779,7 +765,7 @@ class DocumentApi(DocumentResource):
"name": document.name,
"created_from": document.created_from,
"created_by": document.created_by,
"created_at": int(document.created_at.timestamp()),
"created_at": document.created_at.timestamp(),
"tokens": document.tokens,
"indexing_status": document.indexing_status,
"completed_at": int(document.completed_at.timestamp()) if document.completed_at else None,
@ -840,7 +826,6 @@ class DocumentProcessingApi(DocumentResource):
@account_initialization_required
@cloud_edition_billing_rate_limit_check("knowledge")
def patch(self, dataset_id, document_id, action: Literal["pause", "resume"]):
current_user, _ = current_account_with_tenant()
dataset_id = str(dataset_id)
document_id = str(document_id)
document = self.get_document(dataset_id, document_id)
@ -891,7 +876,6 @@ class DocumentMetadataApi(DocumentResource):
@login_required
@account_initialization_required
def put(self, dataset_id, document_id):
current_user, _ = current_account_with_tenant()
dataset_id = str(dataset_id)
document_id = str(document_id)
document = self.get_document(dataset_id, document_id)
@ -939,7 +923,6 @@ class DocumentStatusApi(DocumentResource):
@cloud_edition_billing_resource_check("vector_space")
@cloud_edition_billing_rate_limit_check("knowledge")
def patch(self, dataset_id, action: Literal["enable", "disable", "archive", "un_archive"]):
current_user, _ = current_account_with_tenant()
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
if dataset is None:
@ -1043,9 +1026,8 @@ class DocumentRetryApi(DocumentResource):
def post(self, dataset_id):
"""retry document."""
parser = reqparse.RequestParser().add_argument(
"document_ids", type=list, required=True, nullable=False, location="json"
)
parser = reqparse.RequestParser()
parser.add_argument("document_ids", type=list, required=True, nullable=False, location="json")
args = parser.parse_args()
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
@ -1087,14 +1069,12 @@ class DocumentRenameApi(DocumentResource):
@marshal_with(document_fields)
def post(self, dataset_id, document_id):
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
current_user, _ = current_account_with_tenant()
if not current_user.is_dataset_editor:
raise Forbidden()
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
raise NotFound("Dataset not found.")
DatasetService.check_dataset_operator_permission(current_user, dataset)
parser = reqparse.RequestParser().add_argument("name", type=str, required=True, nullable=False, location="json")
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
try:
@ -1112,7 +1092,6 @@ class WebsiteDocumentSyncApi(DocumentResource):
@account_initialization_required
def get(self, dataset_id, document_id):
"""sync website document."""
_, current_tenant_id = current_account_with_tenant()
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
@ -1121,7 +1100,7 @@ class WebsiteDocumentSyncApi(DocumentResource):
document = DocumentService.get_document(dataset.id, document_id)
if not document:
raise NotFound("Document not found.")
if document.tenant_id != current_tenant_id:
if document.tenant_id != current_user.current_tenant_id:
raise Forbidden("No permission.")
if document.data_source_type != "website_crawl":
raise ValueError("Document is not a website document.")
@ -1134,7 +1113,6 @@ class WebsiteDocumentSyncApi(DocumentResource):
return {"result": "success"}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/pipeline-execution-log")
class DocumentPipelineExecutionLogApi(DocumentResource):
@setup_required
@login_required
@ -1168,3 +1146,29 @@ class DocumentPipelineExecutionLogApi(DocumentResource):
"input_data": log.input_data,
"datasource_node_id": log.datasource_node_id,
}, 200
api.add_resource(GetProcessRuleApi, "/datasets/process-rule")
api.add_resource(DatasetDocumentListApi, "/datasets/<uuid:dataset_id>/documents")
api.add_resource(DatasetInitApi, "/datasets/init")
api.add_resource(
DocumentIndexingEstimateApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-estimate"
)
api.add_resource(DocumentBatchIndexingEstimateApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-estimate")
api.add_resource(DocumentBatchIndexingStatusApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-status")
api.add_resource(DocumentIndexingStatusApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-status")
api.add_resource(DocumentApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
api.add_resource(
DocumentProcessingApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/<string:action>"
)
api.add_resource(DocumentMetadataApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/metadata")
api.add_resource(DocumentStatusApi, "/datasets/<uuid:dataset_id>/documents/status/<string:action>/batch")
api.add_resource(DocumentPauseApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/pause")
api.add_resource(DocumentRecoverApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/resume")
api.add_resource(DocumentRetryApi, "/datasets/<uuid:dataset_id>/retry")
api.add_resource(DocumentRenameApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/rename")
api.add_resource(WebsiteDocumentSyncApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/website-sync")
api.add_resource(
DocumentPipelineExecutionLogApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/pipeline-execution-log"
)

View File

@ -1,12 +1,13 @@
import uuid
from flask import request
from flask_login import current_user
from flask_restx import Resource, marshal, reqparse
from sqlalchemy import select
from werkzeug.exceptions import Forbidden, NotFound
import services
from controllers.console import console_ns
from controllers.console import api
from controllers.console.app.error import ProviderNotInitializeError
from controllers.console.datasets.error import (
ChildChunkDeleteIndexError,
@ -26,7 +27,7 @@ from core.model_runtime.entities.model_entities import ModelType
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from fields.segment_fields import child_chunk_fields, segment_fields
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from models.dataset import ChildChunk, DocumentSegment
from models.model import UploadFile
from services.dataset_service import DatasetService, DocumentService, SegmentService
@ -36,14 +37,11 @@ from services.errors.chunk import ChildChunkIndexingError as ChildChunkIndexingS
from tasks.batch_create_segment_to_index_task import batch_create_segment_to_index_task
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments")
class DatasetDocumentSegmentListApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id, document_id):
current_user, current_tenant_id = current_account_with_tenant()
dataset_id = str(dataset_id)
document_id = str(document_id)
dataset = DatasetService.get_dataset(dataset_id)
@ -60,15 +58,13 @@ class DatasetDocumentSegmentListApi(Resource):
if not document:
raise NotFound("Document not found.")
parser = (
reqparse.RequestParser()
.add_argument("limit", type=int, default=20, location="args")
.add_argument("status", type=str, action="append", default=[], location="args")
.add_argument("hit_count_gte", type=int, default=None, location="args")
.add_argument("enabled", type=str, default="all", location="args")
.add_argument("keyword", type=str, default=None, location="args")
.add_argument("page", type=int, default=1, location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("limit", type=int, default=20, location="args")
parser.add_argument("status", type=str, action="append", default=[], location="args")
parser.add_argument("hit_count_gte", type=int, default=None, location="args")
parser.add_argument("enabled", type=str, default="all", location="args")
parser.add_argument("keyword", type=str, default=None, location="args")
parser.add_argument("page", type=int, default=1, location="args")
args = parser.parse_args()
@ -82,7 +78,7 @@ class DatasetDocumentSegmentListApi(Resource):
select(DocumentSegment)
.where(
DocumentSegment.document_id == str(document_id),
DocumentSegment.tenant_id == current_tenant_id,
DocumentSegment.tenant_id == current_user.current_tenant_id,
)
.order_by(DocumentSegment.position.asc())
)
@ -118,8 +114,6 @@ class DatasetDocumentSegmentListApi(Resource):
@account_initialization_required
@cloud_edition_billing_rate_limit_check("knowledge")
def delete(self, dataset_id, document_id):
current_user, _ = current_account_with_tenant()
# check dataset
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
@ -145,7 +139,6 @@ class DatasetDocumentSegmentListApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segment/<string:action>")
class DatasetDocumentSegmentApi(Resource):
@setup_required
@login_required
@ -153,8 +146,6 @@ class DatasetDocumentSegmentApi(Resource):
@cloud_edition_billing_resource_check("vector_space")
@cloud_edition_billing_rate_limit_check("knowledge")
def patch(self, dataset_id, document_id, action):
current_user, current_tenant_id = current_account_with_tenant()
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
@ -178,7 +169,7 @@ class DatasetDocumentSegmentApi(Resource):
try:
model_manager = ModelManager()
model_manager.get_model_instance(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
provider=dataset.embedding_model_provider,
model_type=ModelType.TEXT_EMBEDDING,
model=dataset.embedding_model,
@ -202,7 +193,6 @@ class DatasetDocumentSegmentApi(Resource):
return {"result": "success"}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segment")
class DatasetDocumentSegmentAddApi(Resource):
@setup_required
@login_required
@ -211,8 +201,6 @@ class DatasetDocumentSegmentAddApi(Resource):
@cloud_edition_billing_knowledge_limit_check("add_segment")
@cloud_edition_billing_rate_limit_check("knowledge")
def post(self, dataset_id, document_id):
current_user, current_tenant_id = current_account_with_tenant()
# check dataset
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
@ -230,7 +218,7 @@ class DatasetDocumentSegmentAddApi(Resource):
try:
model_manager = ModelManager()
model_manager.get_model_instance(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
provider=dataset.embedding_model_provider,
model_type=ModelType.TEXT_EMBEDDING,
model=dataset.embedding_model,
@ -246,19 +234,16 @@ class DatasetDocumentSegmentAddApi(Resource):
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
# validate args
parser = (
reqparse.RequestParser()
.add_argument("content", type=str, required=True, nullable=False, location="json")
.add_argument("answer", type=str, required=False, nullable=True, location="json")
.add_argument("keywords", type=list, required=False, nullable=True, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("content", type=str, required=True, nullable=False, location="json")
parser.add_argument("answer", type=str, required=False, nullable=True, location="json")
parser.add_argument("keywords", type=list, required=False, nullable=True, location="json")
args = parser.parse_args()
SegmentService.segment_create_args_validate(args, document)
segment = SegmentService.create_segment(args, document, dataset)
return {"data": marshal(segment, segment_fields), "doc_form": document.doc_form}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>")
class DatasetDocumentSegmentUpdateApi(Resource):
@setup_required
@login_required
@ -266,8 +251,6 @@ class DatasetDocumentSegmentUpdateApi(Resource):
@cloud_edition_billing_resource_check("vector_space")
@cloud_edition_billing_rate_limit_check("knowledge")
def patch(self, dataset_id, document_id, segment_id):
current_user, current_tenant_id = current_account_with_tenant()
# check dataset
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
@ -285,7 +268,7 @@ class DatasetDocumentSegmentUpdateApi(Resource):
try:
model_manager = ModelManager()
model_manager.get_model_instance(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
provider=dataset.embedding_model_provider,
model_type=ModelType.TEXT_EMBEDDING,
model=dataset.embedding_model,
@ -300,7 +283,7 @@ class DatasetDocumentSegmentUpdateApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
@ -313,18 +296,16 @@ class DatasetDocumentSegmentUpdateApi(Resource):
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
# validate args
parser = (
reqparse.RequestParser()
.add_argument("content", type=str, required=True, nullable=False, location="json")
.add_argument("answer", type=str, required=False, nullable=True, location="json")
.add_argument("keywords", type=list, required=False, nullable=True, location="json")
.add_argument(
"regenerate_child_chunks", type=bool, required=False, nullable=True, default=False, location="json"
)
parser = reqparse.RequestParser()
parser.add_argument("content", type=str, required=True, nullable=False, location="json")
parser.add_argument("answer", type=str, required=False, nullable=True, location="json")
parser.add_argument("keywords", type=list, required=False, nullable=True, location="json")
parser.add_argument(
"regenerate_child_chunks", type=bool, required=False, nullable=True, default=False, location="json"
)
args = parser.parse_args()
SegmentService.segment_create_args_validate(args, document)
segment = SegmentService.update_segment(SegmentUpdateArgs.model_validate(args), segment, document, dataset)
segment = SegmentService.update_segment(SegmentUpdateArgs(**args), segment, document, dataset)
return {"data": marshal(segment, segment_fields), "doc_form": document.doc_form}, 200
@setup_required
@ -332,8 +313,6 @@ class DatasetDocumentSegmentUpdateApi(Resource):
@account_initialization_required
@cloud_edition_billing_rate_limit_check("knowledge")
def delete(self, dataset_id, document_id, segment_id):
current_user, current_tenant_id = current_account_with_tenant()
# check dataset
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
@ -350,7 +329,7 @@ class DatasetDocumentSegmentUpdateApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
@ -366,10 +345,6 @@ class DatasetDocumentSegmentUpdateApi(Resource):
return {"result": "success"}, 204
@console_ns.route(
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/batch_import",
"/datasets/batch_import_status/<uuid:job_id>",
)
class DatasetDocumentSegmentBatchImportApi(Resource):
@setup_required
@login_required
@ -378,8 +353,6 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
@cloud_edition_billing_knowledge_limit_check("add_segment")
@cloud_edition_billing_rate_limit_check("knowledge")
def post(self, dataset_id, document_id):
current_user, current_tenant_id = current_account_with_tenant()
# check dataset
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
@ -391,9 +364,8 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
if not document:
raise NotFound("Document not found.")
parser = reqparse.RequestParser().add_argument(
"upload_file_id", type=str, required=True, nullable=False, location="json"
)
parser = reqparse.RequestParser()
parser.add_argument("upload_file_id", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
upload_file_id = args["upload_file_id"]
@ -412,12 +384,7 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
# send batch add segments task
redis_client.setnx(indexing_cache_key, "waiting")
batch_create_segment_to_index_task.delay(
str(job_id),
upload_file_id,
dataset_id,
document_id,
current_tenant_id,
current_user.id,
str(job_id), upload_file_id, dataset_id, document_id, current_user.current_tenant_id, current_user.id
)
except Exception as e:
return {"error": str(e)}, 500
@ -426,9 +393,7 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, job_id=None, dataset_id=None, document_id=None):
if job_id is None:
raise NotFound("The job does not exist.")
def get(self, job_id):
job_id = str(job_id)
indexing_cache_key = f"segment_batch_import_{job_id}"
cache_result = redis_client.get(indexing_cache_key)
@ -438,7 +403,6 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
return {"job_id": job_id, "job_status": cache_result.decode()}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>/child_chunks")
class ChildChunkAddApi(Resource):
@setup_required
@login_required
@ -447,8 +411,6 @@ class ChildChunkAddApi(Resource):
@cloud_edition_billing_knowledge_limit_check("add_segment")
@cloud_edition_billing_rate_limit_check("knowledge")
def post(self, dataset_id, document_id, segment_id):
current_user, current_tenant_id = current_account_with_tenant()
# check dataset
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
@ -463,7 +425,7 @@ class ChildChunkAddApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
@ -475,7 +437,7 @@ class ChildChunkAddApi(Resource):
try:
model_manager = ModelManager()
model_manager.get_model_instance(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
provider=dataset.embedding_model_provider,
model_type=ModelType.TEXT_EMBEDDING,
model=dataset.embedding_model,
@ -491,13 +453,11 @@ class ChildChunkAddApi(Resource):
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
# validate args
parser = reqparse.RequestParser().add_argument(
"content", type=str, required=True, nullable=False, location="json"
)
parser = reqparse.RequestParser()
parser.add_argument("content", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
try:
content = args["content"]
child_chunk = SegmentService.create_child_chunk(content, segment, document, dataset)
child_chunk = SegmentService.create_child_chunk(args.get("content"), segment, document, dataset)
except ChildChunkIndexingServiceError as e:
raise ChildChunkIndexingError(str(e))
return {"data": marshal(child_chunk, child_chunk_fields)}, 200
@ -506,8 +466,6 @@ class ChildChunkAddApi(Resource):
@login_required
@account_initialization_required
def get(self, dataset_id, document_id, segment_id):
_, current_tenant_id = current_account_with_tenant()
# check dataset
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
@ -524,17 +482,15 @@ class ChildChunkAddApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
raise NotFound("Segment not found.")
parser = (
reqparse.RequestParser()
.add_argument("limit", type=int, default=20, location="args")
.add_argument("keyword", type=str, default=None, location="args")
.add_argument("page", type=int, default=1, location="args")
)
parser = reqparse.RequestParser()
parser.add_argument("limit", type=int, default=20, location="args")
parser.add_argument("keyword", type=str, default=None, location="args")
parser.add_argument("page", type=int, default=1, location="args")
args = parser.parse_args()
@ -557,8 +513,6 @@ class ChildChunkAddApi(Resource):
@cloud_edition_billing_resource_check("vector_space")
@cloud_edition_billing_rate_limit_check("knowledge")
def patch(self, dataset_id, document_id, segment_id):
current_user, current_tenant_id = current_account_with_tenant()
# check dataset
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
@ -575,7 +529,7 @@ class ChildChunkAddApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
@ -588,30 +542,23 @@ class ChildChunkAddApi(Resource):
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
# validate args
parser = reqparse.RequestParser().add_argument(
"chunks", type=list, required=True, nullable=False, location="json"
)
parser = reqparse.RequestParser()
parser.add_argument("chunks", type=list, required=True, nullable=False, location="json")
args = parser.parse_args()
try:
chunks_data = args["chunks"]
chunks = [ChildChunkUpdateArgs.model_validate(chunk) for chunk in chunks_data]
chunks = [ChildChunkUpdateArgs(**chunk) for chunk in args.get("chunks")]
child_chunks = SegmentService.update_child_chunks(chunks, segment, document, dataset)
except ChildChunkIndexingServiceError as e:
raise ChildChunkIndexingError(str(e))
return {"data": marshal(child_chunks, child_chunk_fields)}, 200
@console_ns.route(
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>/child_chunks/<uuid:child_chunk_id>"
)
class ChildChunkUpdateApi(Resource):
@setup_required
@login_required
@account_initialization_required
@cloud_edition_billing_rate_limit_check("knowledge")
def delete(self, dataset_id, document_id, segment_id, child_chunk_id):
current_user, current_tenant_id = current_account_with_tenant()
# check dataset
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
@ -628,7 +575,7 @@ class ChildChunkUpdateApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
@ -639,7 +586,7 @@ class ChildChunkUpdateApi(Resource):
db.session.query(ChildChunk)
.where(
ChildChunk.id == str(child_chunk_id),
ChildChunk.tenant_id == current_tenant_id,
ChildChunk.tenant_id == current_user.current_tenant_id,
ChildChunk.segment_id == segment.id,
ChildChunk.document_id == document_id,
)
@ -666,8 +613,6 @@ class ChildChunkUpdateApi(Resource):
@cloud_edition_billing_resource_check("vector_space")
@cloud_edition_billing_rate_limit_check("knowledge")
def patch(self, dataset_id, document_id, segment_id, child_chunk_id):
current_user, current_tenant_id = current_account_with_tenant()
# check dataset
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
@ -684,7 +629,7 @@ class ChildChunkUpdateApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
@ -695,7 +640,7 @@ class ChildChunkUpdateApi(Resource):
db.session.query(ChildChunk)
.where(
ChildChunk.id == str(child_chunk_id),
ChildChunk.tenant_id == current_tenant_id,
ChildChunk.tenant_id == current_user.current_tenant_id,
ChildChunk.segment_id == segment.id,
ChildChunk.document_id == document_id,
)
@ -711,13 +656,37 @@ class ChildChunkUpdateApi(Resource):
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
# validate args
parser = reqparse.RequestParser().add_argument(
"content", type=str, required=True, nullable=False, location="json"
)
parser = reqparse.RequestParser()
parser.add_argument("content", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
try:
content = args["content"]
child_chunk = SegmentService.update_child_chunk(content, child_chunk, segment, document, dataset)
child_chunk = SegmentService.update_child_chunk(
args.get("content"), child_chunk, segment, document, dataset
)
except ChildChunkIndexingServiceError as e:
raise ChildChunkIndexingError(str(e))
return {"data": marshal(child_chunk, child_chunk_fields)}, 200
api.add_resource(DatasetDocumentSegmentListApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments")
api.add_resource(
DatasetDocumentSegmentApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segment/<string:action>"
)
api.add_resource(DatasetDocumentSegmentAddApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segment")
api.add_resource(
DatasetDocumentSegmentUpdateApi,
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>",
)
api.add_resource(
DatasetDocumentSegmentBatchImportApi,
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/batch_import",
"/datasets/batch_import_status/<uuid:job_id>",
)
api.add_resource(
ChildChunkAddApi,
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>/child_chunks",
)
api.add_resource(
ChildChunkUpdateApi,
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>/child_chunks/<uuid:child_chunk_id>",
)

View File

@ -1,4 +1,5 @@
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields, marshal, reqparse
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
@ -7,14 +8,14 @@ from controllers.console import api, console_ns
from controllers.console.datasets.error import DatasetNameDuplicateError
from controllers.console.wraps import account_initialization_required, setup_required
from fields.dataset_fields import dataset_detail_fields
from libs.login import current_account_with_tenant, login_required
from libs.login import login_required
from services.dataset_service import DatasetService
from services.external_knowledge_service import ExternalDatasetService
from services.hit_testing_service import HitTestingService
from services.knowledge_service import ExternalDatasetTestService
def _validate_name(name: str) -> str:
def _validate_name(name):
if not name or len(name) < 1 or len(name) > 100:
raise ValueError("Name must be between 1 to 100 characters.")
return name
@ -36,13 +37,12 @@ class ExternalApiTemplateListApi(Resource):
@login_required
@account_initialization_required
def get(self):
_, current_tenant_id = current_account_with_tenant()
page = request.args.get("page", default=1, type=int)
limit = request.args.get("limit", default=20, type=int)
search = request.args.get("keyword", default=None, type=str)
external_knowledge_apis, total = ExternalDatasetService.get_external_knowledge_apis(
page, limit, current_tenant_id, search
page, limit, current_user.current_tenant_id, search
)
response = {
"data": [item.to_dict() for item in external_knowledge_apis],
@ -57,23 +57,20 @@ class ExternalApiTemplateListApi(Resource):
@login_required
@account_initialization_required
def post(self):
current_user, current_tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument(
"name",
nullable=False,
required=True,
help="Name is required. Name must be between 1 to 100 characters.",
type=_validate_name,
)
.add_argument(
"settings",
type=dict,
location="json",
nullable=False,
required=True,
)
parser = reqparse.RequestParser()
parser.add_argument(
"name",
nullable=False,
required=True,
help="Name is required. Name must be between 1 to 100 characters.",
type=_validate_name,
)
parser.add_argument(
"settings",
type=dict,
location="json",
nullable=False,
required=True,
)
args = parser.parse_args()
@ -85,7 +82,7 @@ class ExternalApiTemplateListApi(Resource):
try:
external_knowledge_api = ExternalDatasetService.create_external_knowledge_api(
tenant_id=current_tenant_id, user_id=current_user.id, args=args
tenant_id=current_user.current_tenant_id, user_id=current_user.id, args=args
)
except services.errors.dataset.DatasetNameDuplicateError:
raise DatasetNameDuplicateError()
@ -115,31 +112,28 @@ class ExternalApiTemplateApi(Resource):
@login_required
@account_initialization_required
def patch(self, external_knowledge_api_id):
current_user, current_tenant_id = current_account_with_tenant()
external_knowledge_api_id = str(external_knowledge_api_id)
parser = (
reqparse.RequestParser()
.add_argument(
"name",
nullable=False,
required=True,
help="type is required. Name must be between 1 to 100 characters.",
type=_validate_name,
)
.add_argument(
"settings",
type=dict,
location="json",
nullable=False,
required=True,
)
parser = reqparse.RequestParser()
parser.add_argument(
"name",
nullable=False,
required=True,
help="type is required. Name must be between 1 to 100 characters.",
type=_validate_name,
)
parser.add_argument(
"settings",
type=dict,
location="json",
nullable=False,
required=True,
)
args = parser.parse_args()
ExternalDatasetService.validate_api_list(args["settings"])
external_knowledge_api = ExternalDatasetService.update_external_knowledge_api(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
user_id=current_user.id,
external_knowledge_api_id=external_knowledge_api_id,
args=args,
@ -151,13 +145,13 @@ class ExternalApiTemplateApi(Resource):
@login_required
@account_initialization_required
def delete(self, external_knowledge_api_id):
current_user, current_tenant_id = current_account_with_tenant()
external_knowledge_api_id = str(external_knowledge_api_id)
if not (current_user.has_edit_permission or current_user.is_dataset_operator):
# The role of the current user in the ta table must be admin, owner, or editor
if not (current_user.is_editor or current_user.is_dataset_operator):
raise Forbidden()
ExternalDatasetService.delete_external_knowledge_api(current_tenant_id, external_knowledge_api_id)
ExternalDatasetService.delete_external_knowledge_api(current_user.current_tenant_id, external_knowledge_api_id)
return {"result": "success"}, 204
@ -202,24 +196,21 @@ class ExternalDatasetCreateApi(Resource):
@account_initialization_required
def post(self):
# The role of the current user in the ta table must be admin, owner, or editor
current_user, current_tenant_id = current_account_with_tenant()
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("external_knowledge_api_id", type=str, required=True, nullable=False, location="json")
.add_argument("external_knowledge_id", type=str, required=True, nullable=False, location="json")
.add_argument(
"name",
nullable=False,
required=True,
help="name is required. Name must be between 1 to 100 characters.",
type=_validate_name,
)
.add_argument("description", type=str, required=False, nullable=True, location="json")
.add_argument("external_retrieval_model", type=dict, required=False, location="json")
parser = reqparse.RequestParser()
parser.add_argument("external_knowledge_api_id", type=str, required=True, nullable=False, location="json")
parser.add_argument("external_knowledge_id", type=str, required=True, nullable=False, location="json")
parser.add_argument(
"name",
nullable=False,
required=True,
help="name is required. Name must be between 1 to 100 characters.",
type=_validate_name,
)
parser.add_argument("description", type=str, required=False, nullable=True, location="json")
parser.add_argument("external_retrieval_model", type=dict, required=False, location="json")
args = parser.parse_args()
@ -229,7 +220,7 @@ class ExternalDatasetCreateApi(Resource):
try:
dataset = ExternalDatasetService.create_external_dataset(
tenant_id=current_tenant_id,
tenant_id=current_user.current_tenant_id,
user_id=current_user.id,
args=args,
)
@ -261,7 +252,6 @@ class ExternalKnowledgeHitTestingApi(Resource):
@login_required
@account_initialization_required
def post(self, dataset_id):
current_user, _ = current_account_with_tenant()
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)
if dataset is None:
@ -272,12 +262,10 @@ class ExternalKnowledgeHitTestingApi(Resource):
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
parser = (
reqparse.RequestParser()
.add_argument("query", type=str, location="json")
.add_argument("external_retrieval_model", type=dict, required=False, location="json")
.add_argument("metadata_filtering_conditions", type=dict, required=False, location="json")
)
parser = reqparse.RequestParser()
parser.add_argument("query", type=str, location="json")
parser.add_argument("external_retrieval_model", type=dict, required=False, location="json")
parser.add_argument("metadata_filtering_conditions", type=dict, required=False, location="json")
args = parser.parse_args()
HitTestingService.hit_testing_args_check(args)
@ -313,17 +301,15 @@ class BedrockRetrievalApi(Resource):
)
@api.response(200, "Bedrock retrieval test completed")
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("retrieval_setting", nullable=False, required=True, type=dict, location="json")
.add_argument(
"query",
nullable=False,
required=True,
type=str,
)
.add_argument("knowledge_id", nullable=False, required=True, type=str)
parser = reqparse.RequestParser()
parser.add_argument("retrieval_setting", nullable=False, required=True, type=dict, location="json")
parser.add_argument(
"query",
nullable=False,
required=True,
type=str,
)
parser.add_argument("knowledge_id", nullable=False, required=True, type=str)
args = parser.parse_args()
# Call the knowledge retrieval service

Some files were not shown because too many files have changed in this diff Show More