Compare commits

..

1267 Commits

Author SHA1 Message Date
fae6d4f2dd chore: add default value for FILES_URL
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-09-04 21:30:12 +08:00
495324b85b Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-04 20:52:02 +08:00
65df8f6c57 refactor(DocumentList): Optimize dataset configuration handling and improve data source type checks 2025-09-04 20:51:56 +08:00
d4ee915058 Merge branch 'feat/queue-based-graph-engine' into feat/rag-2 2025-09-04 20:51:48 +08:00
637afe1ab0 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-04 20:36:38 +08:00
df70e28481 chore(docker): Update Docker images to version 2.0.0-beta.1 for API, worker, web, and plugin daemon services 2025-09-04 20:36:23 +08:00
b9394d542c Merge branch 'feat/queue-based-graph-engine' into feat/rag-2
# Conflicts:
#	api/core/app/apps/advanced_chat/generate_task_pipeline.py
#	api/pyproject.toml
#	api/uv.lock
#	docker/docker-compose-template.yaml
#	docker/docker-compose.yaml
#	web/package.json
2025-09-04 20:30:08 +08:00
1fda319ecb chore(docker): Update Docker images to version 2.0.0-beta1 for API, worker, web, and plugin daemon services 2025-09-04 20:24:17 +08:00
814cbee28e chore: Update GitHub Actions workflow to include 'feat/rag-2' path for build triggers 2025-09-04 20:11:16 +08:00
443e1fad32 chore: Bump version to 2.0.0-beta1 2025-09-04 20:06:15 +08:00
e21c133e1d change migration 2025-09-04 19:38:51 +08:00
8506288180 fix(firecrawl): map markdown content to content field in crawl results 2025-09-04 19:24:54 +08:00
ef64729771 refactor(notion-page-preview): update fetchNotionPagePreview to include pageType parameter for improved API request 2025-09-04 19:02:35 +08:00
771ea54a0b change migration 2025-09-04 18:37:58 +08:00
35ec0d25e8 change migration 2025-09-04 18:06:45 +08:00
952fb16b91 change migration 2025-09-04 17:37:21 +08:00
e9f11b4706 change migration 2025-09-04 17:29:09 +08:00
41b5596441 fix file name 2025-09-04 17:11:13 +08:00
55b936003f refactor(context-menu): conditionally render export option based on pipeline ID 2025-09-04 16:47:21 +08:00
a6b0071ca0 feat: add datasource category handling in marketplace list condition 2025-09-04 16:05:29 +08:00
ebeb17ec96 refactor(form-input-item): enhance type switch logic to include select input handling 2025-09-04 15:34:21 +08:00
313069a63e refactor(workflow): improve loading state rendering and enhance control prompt editor re-rendering logic 2025-09-04 15:27:55 +08:00
64fc0c9073 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-04 14:52:34 +08:00
94f2956e3a fix file name 2025-09-04 14:52:23 +08:00
1bf04b7b74 fix: export workflow image 2025-09-04 14:43:14 +08:00
1e5f6c1475 fix file name 2025-09-04 14:07:58 +08:00
9f3c996be2 fix(pipeline-publish): publish not update dataset 2025-09-04 14:07:42 +08:00
0bcdd0db99 fix(field): add z-index to sticky header for improved layering in workflow nodes 2025-09-04 13:18:14 +08:00
5ecf382180 refactor(chunk-preview): improve key assignment for ChunkContainer components and enhance localFileList handling in Preparation component 2025-09-04 13:03:49 +08:00
04d01c8409 refactor(icons): remove unused Globe01 icon and related files 2025-09-04 12:37:01 +08:00
7efe215d2b fix file name 2025-09-04 12:28:52 +08:00
25e3f4da04 fix(pipeline-tracing-log): datasource icon error 2025-09-04 11:44:10 +08:00
68f4d4b97c fix(rag-pipeline-dsl): dsl import session error 2025-09-04 11:03:45 +08:00
c40dfe7060 fix(pipeline): correct API endpoint for stopping workflow runs 2025-09-04 10:39:23 +08:00
647af6fc69 fix file name 2025-09-04 10:36:44 +08:00
5114569017 refactor(document-picker): enhance chunking mode handling and improve parent mode label logic 2025-09-04 09:59:55 +08:00
f811855f79 fix(workflow): ensure variable updates only occur for matching selectors in updateNodeVars 2025-09-03 23:28:32 +08:00
c7510d3f54 refactor(panel): simplify outputSchema mapping for improved readability 2025-09-03 23:06:12 +08:00
e3092837e4 fix file name 2025-09-03 21:26:28 +08:00
46731dc8a1 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-03 20:33:01 +08:00
1214942eb7 refactor(datasets): rename 'markdown' to 'content' for consistency across components 2025-09-03 20:32:48 +08:00
dd89c9aa21 fix preview and recommend plugins 2025-09-03 19:51:07 +08:00
5ab3cd7213 fix preview and recommend plugins 2025-09-03 19:42:20 +08:00
49d268d4a3 fix preview and recommend plugins 2025-09-03 19:20:14 +08:00
aa670c8982 fix(datasource): add datasource icon in tracing panel 2025-09-03 18:23:23 +08:00
c8d60f372d fix: in pipeline not show the node config right 2025-09-03 18:21:51 +08:00
c4f0691454 refactor(workflow): update fetchInspectVars calls to accept empty parameters 2025-09-03 18:15:14 +08:00
451948d49c Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-03 18:10:14 +08:00
fd3b55cc16 fix preview and recommend plugins 2025-09-03 18:09:56 +08:00
0212f0de9f fix(api): fix workflow execution 2025-09-03 17:11:43 +08:00
9eb1c15906 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-03 16:51:57 +08:00
110211d966 refactor(uploader): replace YAML icon with pipeline icon and update label text 2025-09-03 16:51:49 +08:00
08d6fcfd52 fix: large data markdown ui problems 2025-09-03 16:48:41 +08:00
d755b2885e refactor(workflow): enhance handleExportDSL function and clean up code structure 2025-09-03 16:22:59 +08:00
b4c98daa8d refactor(workflow): update RAG tool suggestions and improve filtering logic 2025-09-03 16:05:55 +08:00
58e598dac8 Merge branch 'main' into feat/rag-2 2025-09-03 15:01:41 +08:00
d4aed3df5c Merge branch 'feat/queue-based-graph-engine' into feat/rag-2
# Conflicts:
#	api/core/memory/token_buffer_memory.py
#	api/core/rag/extractor/notion_extractor.py
#	api/core/repositories/sqlalchemy_workflow_node_execution_repository.py
#	api/core/variables/variables.py
#	api/core/workflow/graph/graph.py
#	api/core/workflow/graph_engine/entities/event.py
#	api/services/dataset_service.py
#	web/app/components/app-sidebar/index.tsx
#	web/app/components/base/tag-management/selector.tsx
#	web/app/components/base/toast/index.tsx
#	web/app/components/datasets/create/website/index.tsx
#	web/app/components/datasets/create/website/jina-reader/base/options-wrap.tsx
#	web/app/components/workflow/header/version-history-button.tsx
#	web/app/components/workflow/hooks/use-inspect-vars-crud-common.ts
#	web/app/components/workflow/hooks/use-workflow-interactions.ts
#	web/app/components/workflow/panel/version-history-panel/index.tsx
#	web/service/base.ts
2025-09-03 15:01:06 +08:00
c422d732d2 fix: tool bool input can choose file 2025-09-03 14:41:29 +08:00
df0fe49fcc fix: one step run schema type file not support 2025-09-03 14:28:13 +08:00
97875d2b55 plugin dependencies select all 2025-09-03 14:27:29 +08:00
9a79d8941e Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-03 12:59:34 +08:00
9d3772f7d6 feat(workflow): add DisplayContent component for improved variable inspection and integrate schema type handling 2025-09-03 12:59:22 +08:00
d2e341367e fix(api): fix relations of WorkflowNodeExecutionModel not preloaded
`WorkflowNodeExecutionModel.offload_data` should be preloaded to
provide info about the offloading information of execution record.

The `RagPipelineService.get_node_last_run` does not utilize
`DifyAPISQLAlchemyWorkflowNodeExecutionRepository` so the loading
logics is not changed.

In the commit we migrate to calling
`DifyAPISQLAlchemyWorkflowNodeExecutionRepository` to avoid such issue.
2025-09-03 12:30:20 +08:00
e761f38d26 fix(api): adjust gevent patching 2025-09-03 12:29:39 +08:00
e2ff7fac77 fix(api): fix variable truncation related field not returned. 2025-09-03 12:29:38 +08:00
617dc247f5 fix preview and recommend plugins 2025-09-03 12:15:03 +08:00
e1b6da21f4 fix preview and recommend plugins 2025-09-03 11:35:41 +08:00
b10d7d5b22 fix: datasource file can not be chosen 2025-09-03 11:31:53 +08:00
885c7d26e5 feat: handle choose select 2025-09-03 11:05:15 +08:00
7896eeec5b fix: correct run history tracing sequence 2025-09-03 10:56:23 +08:00
38e24922e1 Merge branch 'feat/tool-rag-tag' into feat/rag-2 2025-09-03 10:27:02 +08:00
d2787dc925 feat: tools add rag tag 2025-09-03 10:25:08 +08:00
706969d812 fix(workflow): enhance variable inspection by integrating schema type handling and refactoring logic 2025-09-03 10:17:38 +08:00
7623dc14bb fix preview and recommend plugins 2025-09-02 20:12:45 +08:00
1ad46d0962 fix(pipeline): add handling for RAG pipeline variables in workflow state 2025-09-02 20:11:38 +08:00
5e854238b0 fix(workflow): improve formatting and add checkbox type to input variable type mapping 2025-09-02 19:57:18 +08:00
343f1a375f fix preview and recommend plugins 2025-09-02 19:41:29 +08:00
fc4bc08796 feat(workflow): enhance RAG recommended plugins structure and update related components 2025-09-02 19:11:03 +08:00
60da4c9048 feat: set var inspect schema type 2025-09-02 18:44:49 +08:00
32a009654f feat(input-field): add isEditMode prop to InputFieldForm and update handling of variable changes 2025-09-02 18:19:40 +08:00
1522fd50df Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-09-02 17:25:54 +08:00
00b5772012 feat(portal): add customContainer prop to PortalToFollowElem for flexible rendering 2025-09-02 17:25:47 +08:00
8467494706 fix preview and recommend plugins 2025-09-02 17:02:25 +08:00
a8cd1e2483 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-02 16:44:33 +08:00
7ace7e65e1 fix preview and recommend plugins 2025-09-02 16:44:18 +08:00
273dae6738 fix(header): update boolean form type to checkbox and improve JSX formatting in form input component 2025-09-02 16:26:41 +08:00
dfb967773b Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-02 11:56:13 +08:00
56fc9088dd add recommended rag plugin endpoint 2025-09-02 11:56:05 +08:00
bf09c3eb10 feat(rag): integrate RAG recommended plugins into workflow component 2025-09-02 10:43:57 +08:00
e5e52ab4b9 refactor(value-content): improve JSX formatting for readability in variable inspect component 2025-09-02 10:19:01 +08:00
826b9d5b21 add recommended rag plugin endpoint 2025-09-01 20:41:14 +08:00
633de2cb47 add recommended rag plugin endpoint 2025-09-01 20:12:27 +08:00
d94e03c72b add recommended rag plugin endpoint 2025-09-01 19:38:28 +08:00
89ec13ec67 fix(api): fix ToolNode._extract_variable_selector_to_variable_mapping
The original selector syntax does not match our current implementation
for injecting user inputs into VariablePool.
2025-09-01 18:11:33 +08:00
1103130f81 add recommended rag plugin endpoint 2025-09-01 18:03:35 +08:00
493dae239f add recommended rag plugin endpoint 2025-09-01 17:08:37 +08:00
eb7b21c7f1 Merge remote-tracking branch 'upstream/feat/rag-2' into feat/rag-2 2025-09-01 17:06:53 +08:00
ad5a2c77c1 Merge remote-tracking branch 'upstream/feat/big-var-value-show-fe' into feat/rag-2 2025-09-01 17:06:23 +08:00
acbf27eb65 chore(api): adjust migration order and date 2025-09-01 15:20:57 +08:00
e2ae89e08d fix(tests): fix broken tests and linter issues 2025-09-01 14:55:35 +08:00
1676207776 add recommended rag plugin endpoint 2025-09-01 14:50:02 +08:00
8c780df8fd Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-09-01 14:46:08 +08:00
7920714f49 add recommended rag plugin endpoint 2025-09-01 14:45:56 +08:00
a41af6f322 refactor: update file upload and download handling for pipeline format 2025-09-01 11:13:11 +08:00
ce3eb0fcbb fix: rag pipeline template 2025-09-01 10:23:08 +08:00
63c035d8a2 chore(api): migrate import for db 2025-08-31 16:46:25 +08:00
e9e7d4f3cd Merge branch 'feat/rag-2' into feat/workflow-draft-var-optimize 2025-08-31 15:17:23 +08:00
c72d847ac7 feat(api): support array[boolean] truncation 2025-08-31 14:32:53 +08:00
982fd9170c WIP: test(api): tests for truncation logic 2025-08-31 13:45:16 +08:00
91fac9b720 WIP: feat(api): draft var cleanup task 2025-08-31 13:45:16 +08:00
a527bd42b5 chore(api): optimize file url signing 2025-08-31 13:45:16 +08:00
621b75b343 feat(api): implement truncation for SSE events 2025-08-31 13:45:15 +08:00
6b9d2e98b9 feat(api): Implement truncation for WorkflowNodeExecution 2025-08-31 13:44:39 +08:00
2fd337e610 feat(api): add WorkflowNodeExecutionOffload model 2025-08-31 13:44:39 +08:00
a7aa17e361 chore(api): remove orphan files in draft var cleanup script 2025-08-31 13:44:38 +08:00
13eb9f7d7d feat(api): implement truncation for draft var 2025-08-31 13:44:38 +08:00
d7db58cabd feat(api): implement VariableTruncator 2025-08-31 13:44:38 +08:00
0cf8a80bdd chore(api): Introduce variable truncation configuration 2025-08-31 13:44:38 +08:00
a10586c8ea fixup! feat(api): Add migration for WorkflowDraftVariableFile 2025-08-31 13:44:38 +08:00
40faa9ce16 refactor(api): Inject db dependency to FileService 2025-08-31 13:44:38 +08:00
58dfae60f0 feat(api): Add migration for WorkflowDraftVariableFile 2025-08-31 13:44:38 +08:00
de08b2310c feat(api): Introduce WorkflowDraftVariableFile model
This model is used to track offloaded variables values for
`WorkflowDraftVariable`.
2025-08-31 13:44:38 +08:00
e9c6038192 docs(api): update docs for UploadFile 2025-08-31 13:44:38 +08:00
1e9bfd8872 feat(chunk-card-list): implement ChunkCard and QAItem components, refactor ChunkCardList to utilize new structure and types 2025-08-29 23:28:43 +08:00
c2afb84884 fix chunk format 2025-08-29 17:10:18 +08:00
3c0adfb48a fix chunk format 2025-08-29 16:27:22 +08:00
b3dbf9fe94 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-29 16:20:54 +08:00
a597ae1a8a feat(block-selector): add RAG tool suggestions component and integrate with existing tools 2025-08-29 16:20:43 +08:00
65215135e5 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-29 14:49:05 +08:00
5cf795f2b8 fix preview run 2025-08-29 14:48:51 +08:00
1c8190f142 feat(search-box): enhance marketplace search box with new triggers and conditional rendering 2025-08-29 14:45:04 +08:00
d0dd728e6c Merge branch 'main' into feat/rag-2
# Conflicts:
#	api/core/app/entities/queue_entities.py
#	api/core/workflow/graph_engine/entities/event.py
2025-08-29 11:29:51 +08:00
0bb3407385 fix preview run 2025-08-29 11:25:08 +08:00
05c21883f9 fix(pipeline): make kb node deletable (#24731) 2025-08-29 09:19:09 +08:00
5b4335c4b5 fix preview run 2025-08-28 22:02:22 +08:00
39080eed10 fix preview run 2025-08-28 22:01:49 +08:00
1db04aa729 Merge branch 'feat/queue-based-graph-engine' into feat/rag-2 2025-08-28 18:12:49 +08:00
64772fb413 fix preview run 2025-08-28 18:12:27 +08:00
1022e67fb6 fix: adjust padding in search box component for improved layout 2025-08-28 18:02:19 +08:00
a67589d5db feat: add title property to DataSourceDefaultValue type in block-selector 2025-08-28 17:25:26 +08:00
9448d49305 feat: add 'rag' tag to plugin constants and translations in multiple languages 2025-08-28 16:21:53 +08:00
169a8edc28 refactor: update import statement style in IfElseNode component 2025-08-28 15:47:39 +08:00
9c93a49605 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-28 15:40:11 +08:00
f6ed1c5643 fix: update chunk structure tip messages in English and Chinese translations for clarity 2025-08-28 15:40:02 +08:00
334bc8f1a2 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-28 15:39:36 +08:00
a9f7ee029e fix preview run 2025-08-28 15:39:22 +08:00
edd41a30eb feat: add AddChunks icon and related components for knowledge base chunk structure 2025-08-28 15:31:30 +08:00
580201ed2c merge main 2025-08-28 14:57:30 +08:00
e955a603c6 fix: can choose file in add in variable aggregator 2025-08-28 14:19:07 +08:00
740f1c5f2c fix: tools value not update caused data outdated 2025-08-28 14:06:28 +08:00
9a13cb5bdf refactor: remove unused state management in usePipelineRun hook 2025-08-28 13:58:03 +08:00
048feb4165 refactor: update local file and online drive state management in create-from-pipeline components 2025-08-28 13:47:20 +08:00
843b14ccc6 refactor: Refactor online drive breadcrumbs navigation 2025-08-28 10:56:51 +08:00
4403a26f37 merge new graph engine 2025-08-27 20:44:14 +08:00
88abaa840c merge new graph engine 2025-08-27 18:33:38 +08:00
4e2c3bfb05 fix: not obj type struct schema render error 2025-08-27 18:32:04 +08:00
a644153e9f feat: just use chunk type in knowledge base 2025-08-27 18:16:46 +08:00
0316eb6064 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-27 18:09:55 +08:00
6a56649be2 feat(web): add support for Tool block type in BlockIcon component 2025-08-27 18:09:48 +08:00
8a585607c1 fix: tool schem not right 2025-08-27 18:07:33 +08:00
90d72f5ddf merge new graph engine 2025-08-27 17:46:46 +08:00
6c8212d509 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-27 17:46:05 +08:00
19d3ba42e5 chore: schema type change 2025-08-27 17:40:05 +08:00
ee144452e2 Merge branch 'main' into feat/rag-2 2025-08-27 17:28:21 +08:00
367b2d0320 refactor(web): streamline data source before run form and enhance run handling logic 2025-08-27 16:56:33 +08:00
bd294ffe0d feat: file schem file replace 2025-08-27 16:07:31 +08:00
392514fa13 Merge branch 'feat/queue-based-graph-engine' into feat/rag-2
# Conflicts:
#	api/commands.py
#	api/core/app/apps/common/workflow_response_converter.py
#	api/core/llm_generator/llm_generator.py
#	api/core/plugin/entities/plugin.py
#	api/core/plugin/impl/tool.py
#	api/core/rag/index_processor/index_processor_base.py
#	api/core/workflow/entities/workflow_execution.py
#	api/core/workflow/entities/workflow_node_execution.py
#	api/core/workflow/enums.py
#	api/core/workflow/graph_engine/entities/graph.py
#	api/core/workflow/graph_engine/graph_engine.py
#	api/core/workflow/nodes/enums.py
#	api/services/dataset_service.py
2025-08-27 16:05:59 +08:00
4cb286c765 feat: handle file schem show 2025-08-27 14:46:44 +08:00
ff33d42c55 rag pipeline initial template 2025-08-27 14:39:39 +08:00
e1a2755e3b fix(web): add spinning animation to loader icon in variable inspect trigger for better UX 2025-08-27 13:55:43 +08:00
b1f348fb31 refactor(web): reorganize imports in document index component for improved clarity 2025-08-27 13:48:04 +08:00
5f0bae0ae5 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-27 13:46:10 +08:00
57c242fff3 Update dataset pipeline header text and translations for improved clarity 2025-08-27 13:46:03 +08:00
1814b99c79 fix(web): correct title color in option card in dark mode (#24579) 2025-08-27 11:27:25 +08:00
f24f573731 feat: show struct schema in output 2025-08-27 11:18:22 +08:00
da48e54778 Merge branch 'main' into feat/rag-2 2025-08-27 11:16:27 +08:00
2db699522c feat: compare schema type 2025-08-27 10:48:38 +08:00
5c15fe7b01 fix pipeline export 2025-08-26 18:52:24 +08:00
da9b38f642 pipeline name 2025-08-26 18:26:25 +08:00
918958743f Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-26 17:18:49 +08:00
51c3f1b2e2 add pipeline async run 2025-08-26 17:18:43 +08:00
bdcd9ad9cb refactor(workflow): update output schema handling in tool and data source nodes for improved integration with plugin info 2025-08-26 16:48:18 +08:00
1f5fd13359 refactor(plugin_migration): improve code readability by formatting the install function signature 2025-08-26 16:03:59 +08:00
61f2f8fd31 refactor(schemas): update titles in JSON schemas for consistency and clarity 2025-08-26 16:03:59 +08:00
60fb242f27 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-26 15:52:01 +08:00
85f0d31fab add pipeline async run 2025-08-26 15:51:49 +08:00
a282b6cea4 refactor(tests): remove unused TestPerformance class from schema resolver tests 2025-08-26 15:47:34 +08:00
46019ea927 refactor(resolver): enhance schema reference resolution with improved error handling and caching 2025-08-26 15:47:34 +08:00
7e20273bce refactor(resolver): implement BFS approach for resolving references in Dify schemas 2025-08-26 15:47:34 +08:00
1d2d0ff49f add pipeline async run 2025-08-26 15:32:39 +08:00
c77bdd1fb3 add pipeline async run 2025-08-26 15:20:40 +08:00
0f3ca1d8f4 fix: ruff 2025-08-26 12:51:40 +08:00
d1be9544fb fix: restful to restx 2025-08-26 12:51:40 +08:00
1692a7bd1b Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-26 12:06:43 +08:00
c3c4ef3a8e add pipeline async run 2025-08-26 12:06:34 +08:00
c911ac8c01 refactor(console): add spec controller import and enhance tool output schema resolution 2025-08-26 11:07:12 +08:00
1bc77204b1 fix(use-initial): handle potential undefined nodeData by providing a fallback object 2025-08-26 10:23:13 +08:00
1ad7b0e852 add pipeline async run 2025-08-25 18:26:05 +08:00
e0e3224413 refactor(option-card): enhance styling and accessibility by updating class names and adding title attribute 2025-08-25 17:46:27 +08:00
f418164648 add pipeline async run 2025-08-25 17:33:49 +08:00
4fc498bd48 fix 2025-08-25 16:55:08 +08:00
14d8788dac refactor: remove unused DataSourceType import and simplify iconType handling in DatasetDetailLayout 2025-08-25 16:43:52 +08:00
101d6504fb fix 2025-08-25 16:14:22 +08:00
d440577913 refactor(operations): remove document download functionality and associated UI elements 2025-08-25 15:32:48 +08:00
85fd97e090 Merge branch 'main' into feat/rag-2 2025-08-25 15:30:18 +08:00
412e4b04f3 fix(use-settings-display): correct translation key for keyword search in settings display 2025-08-25 15:20:59 +08:00
cc1f0d4d8d fix 2025-08-25 13:55:42 +08:00
1b9c817dba feat: add process data truncate 2025-08-25 11:26:57 +08:00
b7ed2cade1 initial template 2025-08-22 18:12:24 +08:00
8c44151e6f Merge branch 'main' into feat/rag-2 2025-08-22 17:40:34 +08:00
570b644a7e refactor(header, option-card): improve layout and responsiveness by adjusting flex properties and adding title attributes 2025-08-22 16:49:28 +08:00
5824a2d71c refactor(use-tool-icon): make data parameter optional and update usage in variable inspect components 2025-08-22 16:02:14 +08:00
83cc3b4710 refactor(credential-selector): enhance layout with overflow handling for better UI responsiveness 2025-08-22 14:49:24 +08:00
0ca7c29c47 feat: preiew show the large data 2025-08-22 11:35:54 +08:00
2f6c51927e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-22 11:34:13 +08:00
6b7ea9885c support name generate 2025-08-22 11:34:01 +08:00
68ba41e57e chore: fix trunate change proplem 2025-08-22 11:08:54 +08:00
8ff4f34773 refactor(create-pipeline): remove unused CreateForm and CreateModal components, streamline dataset creation process 2025-08-22 10:49:23 +08:00
4545f509a5 support name generate 2025-08-21 19:15:16 +08:00
af74019d6e refactor(knowledge-base): update title from 'Question-Answer' to 'Q&A' for improved clarity 2025-08-21 18:13:44 +08:00
d3b6631df8 refactor(retrieval-method): replace all instances of invertedIndex with keyword_search in components and translations 2025-08-21 17:31:25 +08:00
5699adf69c refactor(result-preview): update chunk handling to use GeneralChunkPreview type and extract content 2025-08-21 17:07:20 +08:00
42935997aa refactor(retrieval-method): update retrieval method from invertedIndex to keywordSearch across components 2025-08-21 16:29:23 +08:00
f82f06bdd0 feat: support filter file vars 2025-08-21 16:11:32 +08:00
c5ee6e09d4 fix: output schema file type 2025-08-21 15:12:53 +08:00
60fafc524c feat(data-source): add LOCAL_FILE_OUTPUT to constants and integrate into panel for local file handling 2025-08-21 14:17:25 +08:00
cecef01bf7 feat(rag-pipeline): enhance result preview with chunk formatting and add configuration for preview chunk limit 2025-08-21 11:04:33 +08:00
dfd33b3d84 Merge branch 'main' into feat/rag-2 2025-08-21 09:43:51 +08:00
3e27e97364 feat(publish): improve success and error notifications for knowledge pipeline publishing with localized messages and enhanced user guidance 2025-08-20 18:41:09 +08:00
9d3198f808 refactor(data-source, before-run-form): enhance data handling and user interface for data source selection, integrating new components and improving state management 2025-08-20 18:21:59 +08:00
449755ada4 refactor(test-run, preparation): restructure test run components, enhance data handling, and improve user experience with new loading states and error handling 2025-08-20 16:40:56 +08:00
8ab3f1212b refactor(billing, chunk-preview): update link target and improve file property handling in document preview 2025-08-20 10:23:33 +08:00
be045a68ee refactor(credential-icon, create-from-pipeline, test-run): improve component structure and enhance data handling for online drive files 2025-08-19 15:24:14 +08:00
d4370a8ca5 chore: alert ui 2025-08-19 15:07:53 +08:00
95f60d89ab refactor(datasets): reorganize document components and enhance operations with download functionality 2025-08-19 15:07:52 +08:00
a1666fe058 Merge branch 'main' into feat/rag-2 2025-08-19 14:59:06 +08:00
ed4bd56146 refactor(create-card, template-card): add TODO comments for direct pipeline dataset creation and improve code organization 2025-08-19 14:41:32 +08:00
64897bc6fe chore: hide mock data 2025-08-19 14:21:53 +08:00
559d014b29 chore: use api return truncate 2025-08-19 14:18:41 +08:00
ad523ef4ad feat(publish): enhance success notification for pipeline template publishing with additional information and links 2025-08-19 14:15:49 +08:00
3c4b374038 feat: run result data too long export 2025-08-19 14:04:00 +08:00
d9cdce3f7a fix(i18n): refine Simplified Chinese translations for dataset and pipeline terminology 2025-08-19 13:10:51 +08:00
865e3ee85b fix(billing): adjust padding and margin for pricing plan item components 2025-08-19 10:43:49 +08:00
07887fb24f fix(i18n): update Simplified Chinese translations for "知识管道" to "知识流水线" 2025-08-19 10:24:52 +08:00
fcdbe3b84a fix: plugin service import 2025-08-19 10:06:27 +08:00
38c51b1011 fix(i18n): highlight "プレミアム" in Japanese billing translations 2025-08-18 21:04:57 +08:00
469ed5a311 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-18 21:00:13 +08:00
56273a2dcf fix(i18n): update Japanese and Simplified Chinese translations for billing terms 2025-08-18 20:59:49 +08:00
6ce013ac52 fix: OAuth client parameter retrieval by verifying plugin status 2025-08-18 20:41:22 +08:00
fb6fe4a32b fix(billing): adjust z-index for noise effects in pricing plans and self-hosted plan items 2025-08-18 20:40:45 +08:00
ae183b348c feat: variable preview 2025-08-18 18:16:48 +08:00
b4e76af4a7 feat: string type too long hide 2025-08-18 18:15:10 +08:00
4a5c883988 feat(pipeline): implement footer component for dataset creation and enhance UI with new styles 2025-08-18 16:58:43 +08:00
2391e582f2 feat: debug show big data 2025-08-18 16:57:41 +08:00
cd760633cb feat(billing): add noise effects to pricing plans and update rendering logic 2025-08-18 14:31:09 +08:00
ece1330567 feat(billing): add Enterprise plan component and update plan rendering logic 2025-08-18 11:25:48 +08:00
386614951f Merge branch 'main' into feat/rag-2 2025-08-18 11:16:18 +08:00
a2892773f2 fix: dark mode color 2025-08-17 15:25:10 +08:00
e80645bfa1 feat: enhance billing plan components with new SVG assets and update styles for premium and enterprise plans 2025-08-15 22:17:32 +08:00
c7d5ec1520 feat: add new pricing assets and update billing plan components to utilize them 2025-08-15 18:39:59 +08:00
8cf98ba0ce feat: add knowledge pipeline publishing feature and update billing context; refactor popup component for conditional rendering 2025-08-15 18:06:15 +08:00
fb10706c20 feat: refactor billing plans components to improve structure and add self-hosted plan item button; update pricing layout and translations 2025-08-15 16:29:45 +08:00
fb7dc4e0e1 fix file 2025-08-15 14:55:37 +08:00
d558f98aa6 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-15 14:17:46 +08:00
89c7f71199 feat: add Footer component and integrate it into Pricing layout; refactor Header styles and update Plans component structure 2025-08-15 14:16:35 +08:00
ff76adc88a [autofix.ci] apply automated fixes 2025-08-15 04:17:51 +00:00
cfc555d05d feat: add knowledge pipeline creation feature 2025-08-15 12:15:37 +08:00
153d5e8f03 refactor: Change plan-item directory 2025-08-15 11:14:38 +08:00
ac456c1a95 feat: refactor billing plans components and add new PlanItem structure with tooltip support 2025-08-15 11:08:36 +08:00
8e88765261 fix file 2025-08-14 18:17:44 +08:00
5a2618d002 fix file 2025-08-14 18:07:52 +08:00
69a821db02 fix file 2025-08-14 18:01:29 +08:00
46224724a2 feat: add Cloud and SelfHosted components with updated PlanSwitcher integration 2025-08-14 15:54:07 +08:00
8a5bcd11f2 fix: add missing newline at end of JSON files for icon components 2025-08-14 15:21:53 +08:00
a8fbf123e4 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-14 15:12:29 +08:00
8b8d257f78 fix file 2025-08-14 15:12:09 +08:00
02720c9b95 Merge branch 'main' into feat/rag-2 2025-08-14 15:03:48 +08:00
d8a9645e83 feat: Implement billing plan selection UI with plan switcher and range options 2025-08-14 15:01:38 +08:00
72ea3b4d01 fix variable_pool 2025-08-13 17:38:14 +08:00
3797416fe0 fix online drive 2025-08-13 15:45:33 +08:00
f74706a4a5 fix online drive 2025-08-13 15:28:18 +08:00
463ca7043d fix: sync doc styling change 2025-08-13 15:13:03 +08:00
5a6818c817 Merge branch 'main' into feat/rag-2 2025-08-13 15:05:57 +08:00
acde411629 fix: Update breadcrumb styles in Online Drive component for improved visual consistency 2025-08-13 14:28:55 +08:00
4d34891ac0 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-13 11:34:27 +08:00
cf46fba753 feat: Enhance Online Drive component with bucket handling and breadcrumb navigation 2025-08-13 11:34:22 +08:00
fcdbfbda4f add credential id 2025-08-12 17:56:28 +08:00
71d8a0c0b6 refactor: Refactor FileIcon component to use useMemo for file type determination and rename loading state variable for clarity 2025-08-12 16:55:00 +08:00
ae3addb922 add credential id 2025-08-12 15:43:11 +08:00
bd1d7f8652 add credential id 2025-08-12 15:38:26 +08:00
a0006ce968 add credential id 2025-08-12 14:45:45 +08:00
2b7243dbc7 add credential id 2025-08-12 11:13:47 +08:00
22b3933cc3 Merge branch 'main' into feat/rag-2
# Conflicts:
#	api/core/workflow/entities/variable_pool.py
2025-08-12 11:13:04 +08:00
1bc506603a add credential id 2025-08-12 11:10:21 +08:00
54b935f609 fix 2025-08-12 10:49:49 +08:00
cf4a526e7f refactor: replace db.session with session in DatasourceProviderService for consistency 2025-08-11 20:35:46 +08:00
543f80ad5d refactor: replace get_real_credential_by_id with get_datasource_credentials in multiple services for consistency 2025-08-11 20:04:04 +08:00
7f328328fb Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-11 18:37:22 +08:00
9893b8110d refactor: rename selectedFileKeys to selectedFileIds and update related logic in online drive components 2025-08-11 18:37:15 +08:00
087a4fbd14 add credential id 2025-08-11 18:26:41 +08:00
ada0875ac4 fix: update default value for expires_at in DatasourceProvider model 2025-08-11 11:40:51 +08:00
6b07e0e8d6 feat: add expiration for OAuth credentials in datasource provider 2025-08-11 11:25:50 +08:00
fc779d00df Merge branch 'main' into feat/rag-2 2025-08-11 11:15:58 +08:00
58aca75ee0 fix: json schema 2025-08-08 17:38:01 +08:00
8464ec46e6 fix: json schema 2025-08-08 17:38:01 +08:00
ac7953a32c feat: add credential_id handling in CreateFormPipeline and OnlineDrive components 2025-08-08 14:48:58 +08:00
b21d991fdb feat(rag): pass credentialId to online document preview and wire to data source store 2025-08-08 14:26:38 +08:00
df5a4e5c08 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-08 14:18:45 +08:00
d07ce809be add credential id 2025-08-08 14:18:30 +08:00
ec3cb126a0 feat: add clear data functions for online documents, website crawl, and online drive; integrate credential change handling 2025-08-08 13:49:08 +08:00
184c3c88b7 refactor: Refactor online document and online drive components to handle credential changes 2025-08-08 13:43:01 +08:00
097a6fc1e0 fix: variable 2025-08-07 17:22:29 +08:00
d5f82d0d5f fix: json schema 2025-08-07 16:36:59 +08:00
1b3860d012 Merge branch 'main' into feat/rag-2 2025-08-07 16:27:20 +08:00
5729d38776 feat: add CredentialIcon component and integrate it into credential selector for improved avatar display 2025-08-07 15:34:15 +08:00
9e882122ca fix: json schema 2025-08-07 15:28:42 +08:00
e6f1bc165c add tool file preview 2025-08-07 14:35:04 +08:00
842ced218e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-07 11:13:10 +08:00
ca8f80ee33 notion fix 2025-08-07 11:13:02 +08:00
646b798e9c Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-07 10:10:32 +08:00
e0753359f8 fix: adjust Toast component positioning to top-right corner 2025-08-07 10:10:27 +08:00
1d79c21ae3 feat: implement structured output wrapping for pipeline items 2025-08-06 17:41:07 +08:00
5b433aa2d1 feat: add useFloatingRight hook and integrate it into InputFieldEditorPanel and PreviewPanel for dynamic positioning 2025-08-06 16:59:22 +08:00
218e778099 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-06 15:39:27 +08:00
1567b615dc notion fix 2025-08-06 15:39:22 +08:00
29da2e5c19 feat: enhance output schema descriptions and remove unused constants 2025-08-06 15:12:57 +08:00
facbe02cf7 feat: datasource output schema 2025-08-06 15:12:57 +08:00
2a1e1a8042 feat: datasource output schema 2025-08-06 15:12:57 +08:00
13f38045d4 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-06 15:11:45 +08:00
94a0fb6dc1 notion fix 2025-08-06 15:11:37 +08:00
c5a3bf9b9e refactor: simplify Markdown rendering in ChunkContent component 2025-08-06 14:32:28 +08:00
95982d37a6 refactor: remove unused DataSourceOauthBinding import from dataset_service and document_indexing_sync_task 2025-08-06 14:25:39 +08:00
40f3524cfe Merge branch 'main' into feat/rag-2 2025-08-06 14:23:51 +08:00
da68cf48c4 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-06 11:40:35 +08:00
8717a789b3 feat: dynamically import panel components to improve performance 2025-08-06 11:40:30 +08:00
05e96e56e5 notion fix 2025-08-06 11:13:20 +08:00
6954926df9 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-06 10:33:50 +08:00
ec6fabb222 feat: introduce useInputFieldPanel hook to manage input field panel state and refactor related components 2025-08-06 10:32:57 +08:00
6cae1a2872 fix: rag variable 2025-08-05 18:26:07 +08:00
2f163bad8f transform document 2025-08-05 18:16:24 +08:00
6faa4b107b fix: rag variable 2025-08-05 16:31:14 +08:00
8ab5c47737 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-05 14:20:41 +08:00
0a397ac477 feat: Refactor document context and update chunking mode handling across components 2025-08-05 14:19:28 +08:00
09b5cacbad fix: rag variable 2025-08-05 13:02:51 +08:00
522210bad6 fix: rag variable 2025-08-05 11:39:17 +08:00
0975f5bdc2 merge main 2025-08-05 10:31:12 +08:00
201e4cd64d merge main 2025-08-05 10:30:53 +08:00
1e5317d3f0 feat: Enhance InputFieldPanel to manage preview and edit states more effectively 2025-08-01 16:35:27 +08:00
44d569a7c1 feat: Implement input field management panel 2025-08-01 16:27:53 +08:00
ec501cf664 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-08-01 14:43:00 +08:00
cd9bfe0df3 feat: Update website crawl provider to use jinaReader and synchronize selection changes 2025-08-01 14:42:54 +08:00
d36501203f Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-01 14:11:18 +08:00
f3f3a99e5b transform document 2025-08-01 14:11:11 +08:00
19a93c6554 feat: Enhance Notion integration by adding credential_id to NotionInfo and updating related functions 2025-08-01 14:04:01 +08:00
383ee368e6 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-08-01 13:59:19 +08:00
79f30daf03 transform document 2025-08-01 13:59:11 +08:00
e7e531dc06 feat: Refactor WebsiteCrawl component to improve header configuration and remove unused memoization 2025-08-01 11:50:50 +08:00
d8ac78056e fix: open input field modal from var picker 2025-08-01 11:28:30 +08:00
f75a3ef212 feat: Enhance InputFieldDialog and PreviewPanel with improved styling for better layout 2025-07-31 20:24:18 +08:00
c9ab0fb8f6 feat: Update InputFieldDialog styling to allow for flexible growth 2025-07-31 20:18:20 +08:00
2dc71f059c feat: Update FooterTip component and enhance InputFieldDialog layout 2025-07-31 18:38:04 +08:00
18af3dfe5d Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-31 16:29:00 +08:00
0b871abe59 feat: Add credential handling to Notion page selector and related components 2025-07-31 16:28:53 +08:00
82819af55c transform document 2025-07-31 15:59:30 +08:00
9915364740 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-31 15:21:18 +08:00
97136ca8f0 transform document 2025-07-31 15:21:06 +08:00
195bf6621a Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-31 15:19:11 +08:00
8711a57d92 feat: Enhance NotionPageSelector and NotionPageSelectorModal with loading states and credential handling 2025-07-31 15:19:03 +08:00
8e96b9ed77 transform document 2025-07-31 11:51:40 +08:00
5a21da00c5 chore: knowledge base single run 2025-07-31 11:20:46 +08:00
a7a4c8228e Merge branch 'main' into feat/rag-2
# Conflicts:
#	web/app/components/workflow/hooks/use-workflow.ts
2025-07-31 10:30:28 +08:00
1b6a925b34 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-30 17:42:10 +08:00
41363459b5 feat: Update Header component button styling and remove redundant onClick from icon 2025-07-30 17:41:55 +08:00
deceaa38f0 feat: datasource oauth default credentials 2025-07-30 16:36:27 +08:00
f7ec255b3e feat: oauth 2025-07-30 15:55:16 +08:00
4dab128900 feat: oauth 2025-07-30 15:52:59 +08:00
76b4288b34 datasource change authed page 2025-07-30 15:23:04 +08:00
a1c38a2740 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-30 15:19:16 +08:00
f8d7d07c13 feat: Introduce CredentialSelector component and remove WorkspaceSelector 2025-07-30 15:19:10 +08:00
69738794bc feat: support custom before run form 2025-07-30 14:43:29 +08:00
f37109ef39 transform document 2025-07-30 14:34:38 +08:00
875aea1c22 feat: datasource reauthentication 2025-07-30 13:39:04 +08:00
c70a7e832e chore: data source add single run button 2025-07-29 18:31:35 +08:00
ecba9e44ff transform document 2025-07-29 18:17:56 +08:00
a7d4675831 transform document 2025-07-29 18:12:35 +08:00
21df72a57a transform document 2025-07-29 17:56:28 +08:00
240f6890f1 fix: some lint 2025-07-29 16:29:59 +08:00
27f65150d7 fix: run tool cause page crash because of feature context 2025-07-29 16:19:14 +08:00
e2df3f182d transform document 2025-07-29 16:01:06 +08:00
a996c1d90c merge main 2025-07-29 15:45:01 +08:00
e19a07c2e6 merge main 2025-07-29 15:44:23 +08:00
2c6f88ef82 chore: reload vars 2025-07-29 15:24:26 +08:00
786d121fdf Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-29 15:23:22 +08:00
9cfb531e3b transform document 2025-07-29 15:23:11 +08:00
1c813239c9 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-29 14:52:25 +08:00
fbf3abddf2 feat: Add tooltip for configuration button in header and update translations 2025-07-29 14:52:17 +08:00
e89398f415 add old auth transform 2025-07-29 14:13:50 +08:00
cc911f46f2 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-29 13:46:23 +08:00
c2cbdcd3bf feat: Enhance dataset info and card components with memoization for improved performance 2025-07-29 13:46:17 +08:00
46db1acf98 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-29 11:18:14 +08:00
6d00ffa509 add old auth transform 2025-07-29 11:18:06 +08:00
446301a443 fix: search method 2025-07-29 11:10:16 +08:00
657e813c7f add old auth transform 2025-07-28 19:29:36 +08:00
829e6f0d1a add old auth transform 2025-07-28 19:29:07 +08:00
b0cd4daf54 feat: Add credential seletor for online docuemnts and online drive 2025-07-28 16:55:40 +08:00
fc3250678c fix: module not found 2025-07-28 16:36:40 +08:00
a95cf6f8b0 merge main 2025-07-28 16:00:38 +08:00
acae51c309 initial nodes 2025-07-28 15:38:48 +08:00
347cd6befc publish toast 2025-07-28 14:29:05 +08:00
50fed69c0c r2 transform 2025-07-28 14:00:18 +08:00
9dbc887da5 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-25 17:43:23 +08:00
38e6391be4 r2 transform 2025-07-25 17:43:15 +08:00
e1861f5f9c fix: add dataset reset functionality and improve warning message consistency 2025-07-25 17:30:19 +08:00
f887bbedab r2 transform 2025-07-25 17:06:29 +08:00
f4dd22b9cb r2 transform 2025-07-25 15:17:03 +08:00
7f6759e0ac r2 transform 2025-07-25 14:41:39 +08:00
b01c66acbc Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-25 14:10:20 +08:00
ed1bec9344 r2 transform 2025-07-25 14:10:12 +08:00
4fdcd74f52 fix: publish 2025-07-25 12:00:00 +08:00
bb609ee3ca Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-24 17:43:38 +08:00
1938991162 r2 transform 2025-07-24 17:43:26 +08:00
bae2af0c85 Merge branch main into feat/rag-2 2025-07-24 17:40:04 +08:00
3b0be18d47 r2 transform 2025-07-24 17:08:39 +08:00
0417e2f4d9 fix: auth provider 2025-07-24 16:58:25 +08:00
16603952a0 datasource template 2025-07-23 18:20:32 +08:00
5401299e6e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-23 17:02:12 +08:00
98ef4ffb4d r2 transform 2025-07-23 17:02:01 +08:00
761d717656 fix 2025-07-23 16:59:47 +08:00
44a8b4120b pipeline template 2025-07-23 15:34:32 +08:00
53cdee1e2f pipeline template 2025-07-23 10:46:45 +08:00
d76e37b018 add datasource empty node 2025-07-22 16:48:24 +08:00
3b8d96f45c merge main 2025-07-22 13:52:24 +08:00
10657b6bd3 datasource auth 2025-07-22 10:39:19 +08:00
e5c7fd5b14 feat: enforce maximum length for authorization name in datasource authentication 2025-07-21 19:31:58 +08:00
12661ce0ca feat: improve authorization name validation and enhance credential encryption handling 2025-07-21 19:28:00 +08:00
34072371a1 feat: refactor OAuth client retrieval in datasource authentication 2025-07-21 18:55:21 +08:00
666868fa35 feat: remove unused import for CredentialsValidateFailedError in datasource provider service 2025-07-21 18:53:36 +08:00
ba7f0b3004 feat: enhance datasource authentication by improving credential handling and updating API parameters 2025-07-21 18:51:55 +08:00
386d320650 rename auth name 2025-07-21 17:59:13 +08:00
4d36e784b7 merge main 2025-07-21 17:45:26 +08:00
caa2de3344 datasource oauth 2025-07-21 17:41:19 +08:00
039a053027 feat: standardize credential type string for API key in datasource provider service 2025-07-21 17:40:50 +08:00
7141181732 feat: remove unnecessary blank line in datasource authentication setup 2025-07-21 17:00:10 +08:00
17da96bdd8 feat: refactor datasource authentication APIs for improved credential management 2025-07-21 16:43:50 +08:00
57b48f51b5 feat: convert credential form schemas to lists for consistency 2025-07-21 15:51:24 +08:00
af94602d37 feat: add APIs for setting default datasource provider and updating provider name 2025-07-21 15:49:39 +08:00
9c96f1db6c r2 transform 2025-07-21 14:51:40 +08:00
51d7a9b6be feat: mask hidden values in tenant OAuth client retrieval 2025-07-21 14:35:46 +08:00
529eca70bc feat: enhance datasource credential and OAuth schema serialization 2025-07-21 14:31:26 +08:00
ef8d941633 feat: simplify OAuth encrypter retrieval and remove unnecessary validation 2025-07-21 13:48:05 +08:00
e97f03c130 feat: add custom OAuth client setup and enhance datasource provider model with avatar_url 2025-07-21 12:36:02 +08:00
7364d051d2 feat: refactor provider name generation to use incremental naming & enforce unique constraints 2025-07-18 21:34:59 +08:00
23a5ff410e feat: add avatar_url to datasource providers and update OAuth handling 2025-07-18 19:47:59 +08:00
34a6ed74b6 r2 transform 2025-07-18 19:22:31 +08:00
dc359c6442 r2 transform 2025-07-18 19:04:46 +08:00
dd59cea085 migrations 2025-07-18 14:58:10 +08:00
ab775bce26 feat: remove BuiltinDatasourceProvider class and related credential handling 2025-07-18 14:47:08 +08:00
82b531e949 feat: remove tenant_plugin_auto_upgrade_strategies table and adjust datasource_oauth_params 2025-07-18 14:44:01 +08:00
f325662141 feat: refactor DatasourceNode and KnowledgeIndexNode to use _node_data attribute 2025-07-18 14:25:11 +08:00
32fe8313b4 feat: import and extend dayjs relativeTime plugin in multiple components 2025-07-18 14:15:07 +08:00
6ca5bc1063 feat: add datasource OAuth client setup command and refactor related models 2025-07-18 14:11:15 +08:00
f153319a77 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-18 14:05:55 +08:00
0e428afe43 feat: convert VersionHistoryPanel to dynamic import for improved performance 2025-07-18 14:05:34 +08:00
f336245a45 fix: tool node text overflow 2025-07-18 14:05:08 +08:00
5b2c99e183 Merge branch 'main' into feat/rag-2 2025-07-18 14:03:48 +08:00
399866e0ac fix: add 'no-spinner' class to InputNumber component for better styling 2025-07-18 13:42:05 +08:00
633bfc25e0 feat: update provider parameter naming and refactor related logic in datasource_auth.py 2025-07-18 13:13:20 +08:00
0ac5c0bf3e feat: refactor OAuth provider handling and improve provider name generation 2025-07-18 12:47:32 +08:00
9f2a9ad271 fix: update keyboard shortcut and clean up component structure in various files 2025-07-17 18:22:03 +08:00
59f68cd63b fix: ensure default values are handled correctly in InputNumber and related components 2025-07-17 18:15:52 +08:00
3388e83920 Merge remote-tracking branch 'origin/main' into feat/rag-2
# Conflicts:
#	.github/workflows/build-push.yml
#	web/app/components/workflow/nodes/_base/components/workflow-panel/index.tsx
2025-07-17 17:56:56 +08:00
e0e0a7661d merge main 2025-07-17 16:49:59 +08:00
de47b56ca4 merge main 2025-07-17 16:49:22 +08:00
01566035e3 merge main 2025-07-17 16:48:43 +08:00
cc96b7f507 r2 transform 2025-07-17 16:45:30 +08:00
ad7650e724 r2 transform 2025-07-17 16:36:40 +08:00
f79a90fb21 fix agent default 2025-07-17 16:07:58 +08:00
d0c78d079b r2 transform 2025-07-17 15:32:58 +08:00
cc06ce60fd Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-17 15:22:51 +08:00
761ea0eedb r2 transform 2025-07-17 15:22:39 +08:00
c706793847 fix: file upload config 2025-07-17 13:48:23 +08:00
2c52561060 datasource oauth 2025-07-17 11:18:08 +08:00
a39d7e1f85 r2 transform 2025-07-16 19:26:33 +08:00
aaa5b0e295 r2 transform 2025-07-16 18:05:40 +08:00
3bdb40f37b Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-16 14:25:30 +08:00
c660c98b04 r2 transform 2025-07-16 14:25:16 +08:00
e7f31a66be fix: correct notification type for successful dataset conversion 2025-07-16 14:12:33 +08:00
675ff9bd7d r2 transform 2025-07-16 12:00:26 +08:00
6363ecef97 r2 transform 2025-07-16 11:49:59 +08:00
5c0d19e36d fix: improve handleVariableNameBlur logic to prevent setting label when it already exists 2025-07-16 10:33:59 +08:00
e0753ebfd1 fix: update dataset conversion endpoint path for correct API integration 2025-07-16 10:08:46 +08:00
b8e9b97f07 feat: implement dataset conversion to pipeline with success and error notifications 2025-07-16 09:53:11 +08:00
384073f025 r2 transform 2025-07-16 02:02:08 +08:00
2012ea3213 r2 transform 2025-07-16 01:50:37 +08:00
1ad73ccdc8 r2 2025-07-15 17:54:53 +08:00
96484731a2 r2 2025-07-15 16:13:45 +08:00
537e535d9a r2 2025-07-15 15:33:40 +08:00
3a3b60bab5 r2 2025-07-15 15:00:38 +08:00
63111e8050 r2 2025-07-14 18:17:34 +08:00
405139c377 fix: add isRunning prop to ProcessDocuments and related components for better processing state management 2025-07-14 17:45:19 +08:00
a919e3e135 r2 2025-07-14 17:33:08 +08:00
3e5772c50c fix: enhance layout and tooltip handling in Actions component 2025-07-14 16:36:51 +08:00
cb8fab7364 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-14 16:26:03 +08:00
60f3832c65 fix: refactor OptionCard and StepTwo components for improved structure and readability 2025-07-14 16:25:56 +08:00
cf89d06023 fix: single run url error in pipeline 2025-07-14 16:15:52 +08:00
4b9a5a66c1 r2 2025-07-14 16:14:27 +08:00
e095de05c5 feat: pipeline run 2025-07-14 16:04:43 +08:00
82f4b35d52 chore: use flow type instead of whole url 2025-07-14 15:30:04 +08:00
7a9faf909e feat: workflow use common last run 2025-07-14 15:10:35 +08:00
928751a856 r2 2025-07-14 14:11:58 +08:00
d77d86f53b fix: remove unused showWorkflowEmpty prop from Tools component for cleaner code 2025-07-14 14:11:05 +08:00
2a5bab10b8 fix: pass dataSources prop to PortalToFollowElem for improved functionality 2025-07-14 13:48:45 +08:00
6313f819cf fix: add block enumeration and tool icon handling for enhanced workflow functionality 2025-07-14 11:44:13 +08:00
682b65034c Merge branch 'main' into feat/rag-2 2025-07-14 11:17:42 +08:00
adbad0ad33 fix: enhance bucket list initiation check for improved accuracy 2025-07-14 10:06:55 +08:00
ed77877db1 Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-11 18:46:29 +08:00
9c4356e9a1 r2 2025-07-11 18:46:22 +08:00
0fdb1fedb0 feat: add PluginDependency component to RagPipelineChildren and WorkflowChildren for enhanced functionality 2025-07-11 18:38:18 +08:00
23a6fe3259 fix: adjust default selection of crawl results based on pipeline status 2025-07-11 18:02:23 +08:00
90d0f12ee9 refactor: update file extension handling in ChunkPreview component to use dynamic extension retrieval 2025-07-11 16:53:03 +08:00
50e16f8362 refactor: optimize state selection in data source components using useShallow for improved performance 2025-07-11 16:41:01 +08:00
9dbb06fccc fix: Fix node panel positioning issue when chat log modal is open 2025-07-11 15:58:26 +08:00
3b70f8ac08 r2 2025-07-11 15:25:58 +08:00
58a3471a5f refactor: update variable utility functions to include isRagVariableVar for enhanced variable validation 2025-07-11 13:51:23 +08:00
3e187ba6aa refactor: update BlockIcon component to handle Tool and DataSource types for conditional rendering 2025-07-10 16:56:59 +08:00
f677f2f71b refactor: update useEffect dependency to include currentPage.page_id for proper content fetching 2025-07-10 16:27:56 +08:00
de6867f875 refactor: update CrawledResult and WebsiteCrawl components to handle showPreview prop and adjust previewIndex logic 2025-07-10 15:43:36 +08:00
c39746181d refactor: update data source store usage in LocalFile and WebsiteCrawl components 2025-07-10 15:24:58 +08:00
15cd9e0b12 refactor: rename selectedFileList to selectedFileKeys across components and update related logic 2025-07-10 15:14:23 +08:00
e66c2badda refactor: update error handling to use DataSourceNodeErrorResponse in OnlineDocuments and WebsiteCrawl components 2025-07-10 13:55:24 +08:00
6030ae9d0f refactor: integrate currentNodeIdRef into data source store and update related components 2025-07-10 12:02:54 +08:00
42fd40500a refactor: remove isTruncated state and update related logic to use mutable refs 2025-07-10 10:39:01 +08:00
611bc728d0 fix: update hover background color for disabled and active NavLink states 2025-07-10 10:21:45 +08:00
e2a141b3bb Merge branch 'main' into feat/rag-2 2025-07-10 10:14:12 +08:00
8b97551f1a r2 2025-07-09 18:50:13 +08:00
966e6e03fc style: Update component attributes to use single quotes and adjust z-index in ContentDialog 2025-07-09 18:34:21 +08:00
775983b04b Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-09 18:21:01 +08:00
a2e0bc26c6 feat: Add conversion functionality to Knowledge Pipeline with UI and translations 2025-07-09 18:20:52 +08:00
bd33b9ffec r2 2025-07-09 17:34:42 +08:00
b538eee5dd r2 2025-07-09 17:28:52 +08:00
7c6bdb9ec9 feat: Enhance operations with pause and resume functionality 2025-07-09 16:05:42 +08:00
258c965bd0 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-09 15:55:31 +08:00
9e44f2b805 Merge branch 'main' into feat/rag-2 2025-07-09 15:54:57 +08:00
9dcba51225 r2 2025-07-09 15:48:08 +08:00
e7d394f160 feat: Add DatasetSidebarDropdown component and integrate ExtraInfo for dataset details 2025-07-09 15:13:02 +08:00
dfe3c2caa1 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-09 14:45:03 +08:00
4ea4d227c6 refactor: Remove unused file support types logic from CreateFormPipeline component 2025-07-09 14:44:56 +08:00
b5e4ce6c68 r2 2025-07-09 14:27:49 +08:00
4a8061d14c fix: Integrate dataset list reset functionality in dropdown and step two components 2025-07-09 13:54:49 +08:00
59c3305dcc feat: Enhance dataset dropdown functionality with export and delete options 2025-07-09 13:42:24 +08:00
8fc15c83d0 feat: Refactor dataset info components and add export pipeline functionality 2025-07-09 10:45:50 +08:00
a0942399cd Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-08 20:04:26 +08:00
1c85dada53 feat: Implement sidebar toggle functionality with keyboard shortcuts and improve translations 2025-07-08 20:04:15 +08:00
bc1a517a97 r2 2025-07-08 17:16:10 +08:00
b3431ab0c4 feat: Refactor online drive components to improve file retrieval and selection logic 2025-07-08 16:46:59 +08:00
073a0974a4 fix: Update breadcrumb click handling to close dropdown and adjust prefix slicing logic 2025-07-08 15:40:16 +08:00
e911a4e719 fix: Update button styles and improve file size validation in breadcrumb and item components 2025-07-08 15:28:22 +08:00
5e2b60664f fix: Improve item selection logic and reset selected file list on folder open 2025-07-08 14:26:55 +08:00
b36f36d242 feat: Enhance CreateFormPipeline with file selection and validation for online documents and drives 2025-07-08 14:14:50 +08:00
6332fe795e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-08 14:05:45 +08:00
db886ae3fb r2 2025-07-08 14:04:44 +08:00
467cd2c4c1 fix: Update onlineDrive file check to use selectedFileList for better validation 2025-07-07 16:34:43 +08:00
d3ca50626d feat: Integrate useOnlineDrive hook and enhance datasource handling in CreateFormPipeline 2025-07-07 16:30:15 +08:00
13f168ed1c refactor: Refactor Online Drive components to improve state management and add truncation support 2025-07-07 15:51:59 +08:00
83c8219942 feat: Enhance file item component with support for disabled state and file type validation 2025-07-07 14:36:09 +08:00
a30b92d6b1 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-04 19:15:52 +08:00
1bd664e655 feat: Implement Dropdown and Menu components for breadcrumb navigation in Online Drive 2025-07-04 19:15:37 +08:00
1fb59adba9 r2 2025-07-04 19:09:40 +08:00
1b3888a13e Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-04 18:49:50 +08:00
9c6eb95700 r2 2025-07-04 18:49:37 +08:00
1ff608dfa9 Merge branch 'feat/rag-2' of https://github.com/langgenius/dify into feat/rag-2 2025-07-04 18:24:59 +08:00
933ad0649c feat: Add subItems mapping to Panel component for enhanced item details 2025-07-04 18:24:52 +08:00
a8b676ade0 r2 2025-07-04 18:05:58 +08:00
0d9991ec88 feat: Add ONLINE_DRIVE_OUTPUT and integrate into DataSource components for online drive support 2025-07-04 18:04:47 +08:00
e67a19b26b refactor: Enhance Bucket and Breadcrumbs components; improve event handling and add button for bucket name 2025-07-04 17:55:30 +08:00
d44af3ec46 refactor: Restructure breadcrumbs component; introduce Bucket and BreadcrumbItem components for improved navigation 2025-07-04 16:44:21 +08:00
9ce0c69687 refactor: Update event handling in Checkbox and Radio components; optimize Online Drive file filtering 2025-07-04 15:30:08 +08:00
2ecbcd6a7f refactor: Add loading state and bucket handling to Online Drive components 2025-07-04 15:14:19 +08:00
d3b17ea567 fix: Update key property for onlineDrive datasource handling in TestRunPanel 2025-07-04 14:11:23 +08:00
a4f7d373b5 refactor: Replace useDataSourceStore with useDataSourceStoreWithSelector for improved state selection across components 2025-07-04 14:03:04 +08:00
334f0c905a feat: Enhance Online Drive file handling with selection and folder opening functionality 2025-07-04 13:42:36 +08:00
44c2efcfe4 r2 2025-07-03 18:56:42 +08:00
f2960989c1 refactor: Refactor data source components 2025-07-03 18:34:54 +08:00
816b49483a Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2 2025-07-03 15:15:14 +08:00
798d0880d6 r2 2025-07-03 15:15:01 +08:00
cf4f652105 fix: Enhance data source handling by adding error response type and updating local file and online document slices 2025-07-03 14:58:58 +08:00
76c418c0b7 r2 2025-07-03 14:03:06 +08:00
7c5893db91 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	web/app/components/workflow-app/components/workflow-main.tsx
#	web/app/components/workflow/constants.ts
#	web/app/components/workflow/header/run-and-history.tsx
#	web/app/components/workflow/hooks-store/store.ts
#	web/app/components/workflow/hooks/use-nodes-interactions.ts
#	web/app/components/workflow/hooks/use-workflow-interactions.ts
#	web/app/components/workflow/hooks/use-workflow.ts
#	web/app/components/workflow/nodes/_base/components/panel-operator/panel-operator-popup.tsx
#	web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx
#	web/app/components/workflow/nodes/code/use-config.ts
#	web/app/components/workflow/nodes/llm/default.ts
#	web/app/components/workflow/panel/index.tsx
#	web/app/components/workflow/panel/version-history-panel/index.tsx
#	web/app/components/workflow/store/workflow/index.ts
#	web/app/components/workflow/types.ts
#	web/config/index.ts
#	web/types/workflow.ts
2025-07-03 11:40:54 +08:00
2dd1f41ad3 feat: Implement data source store with slices for local files, online documents, website crawls, and online drives 2025-07-03 10:31:29 +08:00
ddde576b4a refactor: Update notification messages in CreateFromDSLModal and CreateFromScratchModal for dataset creation 2025-07-03 10:17:55 +08:00
38d895ab5f r2 2025-07-02 18:46:36 +08:00
a6ff9b224b r2 2025-07-02 18:20:41 +08:00
832bef053f Merge branch 'main' into feat/r2
# Conflicts:
#	docker/docker-compose.middleware.yaml
#	web/app/components/workflow-app/components/workflow-main.tsx
#	web/app/components/workflow-app/hooks/index.ts
#	web/app/components/workflow/hooks-store/store.ts
#	web/app/components/workflow/hooks/index.ts
#	web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx
2025-07-02 18:20:05 +08:00
81b07dc3be r2 2025-07-02 18:15:23 +08:00
a52bf6211a merge main 2025-07-02 18:07:09 +08:00
d7b0ccd6f7 feat: Add name field to data source credentials update function in usePipeline service 2025-07-02 15:08:20 +08:00
68d59ee8b3 refactor: Refactor useOnlineDocument hook 2025-07-02 14:56:29 +08:00
e23d7e39ec Merge branch 'feat/datasource' into feat/r2
# Conflicts:
#	api/services/rag_pipeline/rag_pipeline.py
#	web/app/components/workflow/constants.ts
#	web/app/components/workflow/header/run-and-history.tsx
#	web/app/components/workflow/hooks/use-nodes-interactions.ts
#	web/app/components/workflow/hooks/use-workflow-interactions.ts
#	web/app/components/workflow/hooks/use-workflow.ts
#	web/app/components/workflow/index.tsx
#	web/app/components/workflow/nodes/_base/components/panel-operator/panel-operator-popup.tsx
#	web/app/components/workflow/nodes/_base/panel.tsx
#	web/app/components/workflow/nodes/code/use-config.ts
#	web/app/components/workflow/nodes/llm/default.ts
#	web/app/components/workflow/panel/index.tsx
#	web/app/components/workflow/panel/version-history-panel/index.tsx
#	web/app/components/workflow/store/workflow/index.ts
#	web/app/components/workflow/types.ts
#	web/config/index.ts
#	web/types/workflow.ts
2025-07-02 14:01:59 +08:00
0284e7556e refactor: Refactor useDatasourceIcon hook and enhance dataset node rendering with AppIcon component 2025-07-02 13:48:11 +08:00
9f14b5db9a r2 2025-07-02 11:55:21 +08:00
39d3f58082 r2 2025-07-02 11:33:00 +08:00
5d7a533ada fix: Improve layout by adding overflow handling in CreateFromPipeline and List components 2025-07-01 17:46:41 +08:00
0db7967e5f refactor: Add useOnlineDrive hook and integrate it into CreateFormPipeline and TestRunPanel components 2025-07-01 16:54:44 +08:00
a81dc49ad2 feat: Refactor OnlineDocuments and PageSelector components to enhance state management and integrate new Actions component 2025-07-01 16:32:21 +08:00
f33b6c0c73 add online drive 2025-07-01 16:08:54 +08:00
a4eddd7dc2 r2 2025-07-01 15:16:33 +08:00
c993a05da7 Merge remote-tracking branch 'origin/feat/r2' into feat/r2 2025-07-01 14:23:58 +08:00
f44f0fa34c r2 2025-07-01 14:23:46 +08:00
2d0d448667 feat: Update selection handling to support multiple choice in OnlineDocuments and PageSelector components 2025-07-01 14:14:28 +08:00
bfcf09b684 feat(datasource): fix datasource icon 2025-07-01 14:04:09 +08:00
cdbba1400c feat(datasource): update fetch_datasource_provider 2025-07-01 11:57:06 +08:00
55d7d7ef76 fix: Update default value handling for number input in useInitialData hook 2025-07-01 11:24:30 +08:00
7b473bb5c9 feat: Integrate OnlineDrive component into CreateFormPipeline and update related components 2025-06-30 18:31:52 +08:00
ff511c6f31 refactor: Remove unused variables and simplify next button logic in TestRunPanel 2025-06-30 17:54:33 +08:00
310102bebd feat: Add SearchMenu icon and integrate it into the file list component with empty state handling 2025-06-30 17:31:27 +08:00
1f5c32525f datasource page 2025-06-30 16:16:54 +08:00
618ad4c291 r2 2025-06-30 15:36:20 +08:00
ada632f9f5 feat: Enhance input field handling by adding allVariableNames prop and localizing error messages 2025-06-30 15:28:55 +08:00
4c82c9d029 feat: Add Online Drive file management components and enhance file icon handling 2025-06-30 14:19:14 +08:00
42655a3b1f fix: checklist 2025-06-30 14:11:26 +08:00
1449ed86c4 feat: rename online driver to online drive and update related classes and methods :) 2025-06-27 20:11:28 +08:00
94674e99ab datasource page add marketplace 2025-06-27 16:44:23 +08:00
eee72101f4 feat(online_driver): add online driver plugin, support browsing and downloading 2025-06-27 16:41:39 +08:00
93eabef58a refactor: Refactor OnlineDocuments component and remove OnlineDocumentSelector 2025-06-27 16:29:30 +08:00
5248fcca56 feat: implement support for single and multiple choice in crawled result items 2025-06-27 15:56:38 +08:00
264b95e572 feat: separate input fields into datasource and global categories in RAG pipeline 2025-06-27 15:23:04 +08:00
8f2ad89027 datasource page 2025-06-27 15:01:33 +08:00
18b1a9cb2e Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-26 15:28:19 +08:00
25fef5d757 merge main 2025-06-26 15:21:24 +08:00
2a25ca2b2c feat: enhance online drive connection UI and add localization for connection status in dataset pipeline 2025-06-26 14:24:50 +08:00
3a9c79b09a feat: refactor data source handling and integrate OnlineDrive component in TestRunPanel 2025-06-26 13:46:12 +08:00
025b55ef3b feat: update tooltip text for test run mode in English and Chinese translations for clarity 2025-06-26 10:17:48 +08:00
cf7574bd10 feat: add FooterTips component and integrate it into TestRunPanel; extend DatasourceType enum with onlineDrive 2025-06-26 10:16:37 +08:00
efccbe4039 r2 2025-06-25 17:32:26 +08:00
c7cec120a6 feat: update variable validation regex for consistency in ExternalDataToolModal and schema 2025-06-25 17:07:31 +08:00
7d7fd18e65 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-25 16:16:41 +08:00
c6ae9628af feat: refactor input variable handling and configurations in pipeline processing components 2025-06-25 16:15:59 +08:00
4631575c12 feat: can support choose current node var 2025-06-25 16:06:08 +08:00
a4f4fea0a5 fix: note node delete 2025-06-25 16:01:45 +08:00
540096a8d8 Merge branch 'main' into feat/r2
# Conflicts:
#	api/core/plugin/impl/oauth.py
#	api/core/workflow/entities/variable_pool.py
#	api/models/workflow.py
#	api/services/dataset_service.py
2025-06-25 14:35:23 +08:00
7b7cdad1d8 r2 2025-06-25 13:28:08 +08:00
261b7cabc8 feat: enhance OnlineDocumentPreview with datasourceNodeId and implement preview functionality 2025-06-25 11:36:56 +08:00
ccd346d1da feat: add handling for RAG pipeline variables in node interactions 2025-06-25 10:40:48 +08:00
a866cbc6d7 feat: implement usePipeline hook for managing pipeline variables and refactor input field handling 2025-06-25 10:11:26 +08:00
6aba39a2dd feat(datasource): add datasource content preview api 2025-06-24 17:43:25 +08:00
8f4a0d4a22 variable picker 2025-06-24 17:27:06 +08:00
49bb15fae1 feat(datasource): add datasource content preview api 2025-06-24 17:14:31 +08:00
e165f4a102 feat(datasource): add datasource content preview api 2025-06-24 17:14:16 +08:00
1c51bef3cb fix: standardize capitalization in translation keys and remove unused group property in FieldListContainer 2025-06-24 14:25:58 +08:00
c31754e6cd fix: create pipeline from customized 2025-06-24 11:12:39 +08:00
83cc484c24 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-23 17:12:26 +08:00
1ff9c07a92 fix notion dataset rule not found 2025-06-23 17:12:08 +08:00
b25b284d7f Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-23 16:41:13 +08:00
2414dbb5f8 feat: clear selected IDs on document deletion action in DocumentList component 2025-06-23 16:38:19 +08:00
916a8c76e7 fix: rename currentDocuments to currentDocument for consistency in online documents handling 2025-06-23 16:31:09 +08:00
9783832223 Merge branch 'feat/datasource' into deploy/rag-dev 2025-06-23 16:12:03 +08:00
b77081a19e feat(datasource): update datasource icon 2025-06-23 15:57:37 +08:00
896906ae77 feat: refactor layout structure in PipelineSettings component for improved responsiveness 2025-06-23 15:57:02 +08:00
2365a3a5fc Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-23 15:39:00 +08:00
dd792210f6 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-23 15:38:34 +08:00
6ba4a4c165 feat: enhance website crawl functionality with state management and result handling 2025-06-23 15:38:24 +08:00
0a6dbf6ee2 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-23 15:21:15 +08:00
ca0979dd43 feat(datasource): update fetch_datasource_provider 2025-06-23 15:18:15 +08:00
0762e5ae50 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-23 15:03:32 +08:00
48f53f3b9b workflow dependency 2025-06-23 15:02:57 +08:00
af64f29e87 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-23 13:59:39 +08:00
b9f59e3a75 Merge branch 'main' into feat/rag-pipeline 2025-06-23 13:59:05 +08:00
b12a8eeb90 feat(datasource): change datasource result type to event-stream 2025-06-20 10:09:47 +08:00
e551cf65c9 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-19 15:58:51 +08:00
3899211c41 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-19 15:58:01 +08:00
335e1e3602 feat: enhance pipeline settings with execution log and processing capabilities 2025-06-19 15:57:49 +08:00
725fc72c6f Merge branch 'feat/r2' into deploy/rag-dev 2025-06-19 15:31:03 +08:00
b618f3bd9e r2 2025-06-19 15:30:46 +08:00
95ba55af4d fix: import dsl sync rag variables 2025-06-19 15:04:26 +08:00
f4e1ea9011 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-19 14:30:05 +08:00
3d0e288e85 r2 2025-06-19 14:29:39 +08:00
9620d6bcd8 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	web/i18n/zh-Hans/app.ts
2025-06-19 13:32:49 +08:00
f7fbded8b9 Merge branch 'main' into feat/r2 2025-06-19 13:32:07 +08:00
0c5706b3f6 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-19 11:10:43 +08:00
82d0a70cb4 feat(datasource): change datasource result type to event-stream 2025-06-19 11:10:24 +08:00
55516c4e57 fix: add type checks for workspace roles in DatasetsLayout component 2025-06-19 10:56:03 +08:00
cc2cd85ff5 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-19 10:46:15 +08:00
6ec742539a r2 2025-06-19 10:45:59 +08:00
09e0a54070 r2 2025-06-19 10:38:10 +08:00
5d25199f42 refactor: update layout for creation title and content in StepThree component 2025-06-19 09:36:04 +08:00
387826674c Merge branch 'main' into feat/rag-pipeline 2025-06-19 09:34:09 +08:00
02ae479636 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-18 18:04:23 +08:00
a103324f25 refactor: enhance UI components with new icons and improved styling in billing and dataset processes 2025-06-18 18:03:43 +08:00
643efc5d85 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-18 17:11:52 +08:00
43e5798e13 feat(datasource): change datasource result type to event-stream 2025-06-18 16:27:10 +08:00
8aca70cd50 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-18 16:05:08 +08:00
2cf980026e feat(datasource): change datasource result type to event-stream 2025-06-18 16:04:47 +08:00
224111081b feat(datasource): change datasource result type to event-stream 2025-06-18 16:04:40 +08:00
4dc6cad588 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-18 15:08:05 +08:00
f85e6a0dea feat: implement SSE for data source node processing and completion events, replacing previous run methods 2025-06-18 15:06:50 +08:00
4b3a54633f refactor: streamline dataset detail fetching and improve dataset list handling across components 2025-06-18 15:05:21 +08:00
6f67a34349 r2 qa index 2025-06-18 14:37:18 +08:00
e51d308312 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-18 13:47:36 +08:00
379c92bd82 Merge branch 'main' into feat/rag-pipeline 2025-06-18 13:47:06 +08:00
fa9f0ebfb1 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-18 11:06:37 +08:00
ac917bb56d r2 2025-06-18 11:05:52 +08:00
f7a4e5d1a6 Merge branch 'main' into feat/r2 2025-06-18 10:57:44 +08:00
515d34bbfb Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 19:07:38 +08:00
66de2e1f0a Merge remote-tracking branch 'origin/feat/r2' into feat/r2
# Conflicts:
#	api/core/workflow/graph_engine/entities/event.py
#	api/services/rag_pipeline/rag_pipeline.py
2025-06-17 19:07:15 +08:00
7f7ea92a45 r2 2025-06-17 19:06:17 +08:00
a014345688 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 18:29:24 +08:00
cf66d111ba feat(datasource): change datasource result type to event-stream 2025-06-17 18:29:02 +08:00
2d01b1a808 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 18:24:52 +08:00
739ebf2117 feat(datasource): change datasource result type to event-stream 2025-06-17 18:24:09 +08:00
176b844cd5 refactor: consolidate DialogWrapper component usage and improve prop handling across input fields 2025-06-17 18:20:30 +08:00
8fc6684ab1 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 18:12:36 +08:00
7c41f71248 r2 2025-06-17 18:11:38 +08:00
2c2bfb4f54 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-17 17:45:50 +08:00
3164f90327 merge main 2025-06-17 17:44:08 +08:00
90ac52482c test: add unit tests for ActionButton component with various states and sizes 2025-06-17 16:48:52 +08:00
879ac940dd Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-17 16:30:37 +08:00
796797d12b feat: centralize variable type mapping by introducing VAR_TYPE_MAP in pipeline model 2025-06-17 16:28:50 +08:00
7ac0f0c08c feat: enhance processing components by adding runDisabled state and fetching indicators 2025-06-17 16:13:49 +08:00
5cc6a2bf33 refactor: update toast notification handling and improve context usage in DocumentDetail 2025-06-17 14:41:06 +08:00
2db0b19044 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 14:05:15 +08:00
1d2ee9020c r2 2025-06-17 14:04:55 +08:00
f2538bf381 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-17 13:56:00 +08:00
f37e28a368 feat(datasource): Comment out the datasource_file_manager. 2025-06-17 13:54:25 +08:00
c5976f5a09 feat(datasource): change datasource result type to event-stream 2025-06-17 13:51:41 +08:00
64a9181ee4 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-17 11:30:38 +08:00
33cd32382f feat: add indexing status batch and process rule hooks; refactor Notion page preview types 2025-06-17 11:29:56 +08:00
9456c59290 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-17 10:14:20 +08:00
ce0bd421ae Merge branch 'main' into feat/rag-pipeline 2025-06-17 10:13:31 +08:00
f9d04c6975 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-16 18:26:01 +08:00
ecb07a5d0d feat: enhance field list functionality by adding chosen and selected properties to SortableItem 2025-06-16 18:25:30 +08:00
a165ba2059 merge main 2025-06-16 15:43:57 +08:00
12fd2903d8 fix 2025-06-16 15:41:27 +08:00
0a2c569b3b fix: replace useGetDocLanguage with useDocLink for consistent documentation linking 2025-06-16 14:58:52 +08:00
9ab0d5fe60 Merge branch 'main' into feat/rag-pipeline 2025-06-16 14:25:58 +08:00
1d71fd5b56 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-16 14:09:04 +08:00
b277acc298 Merge branch 'main' into feat/r2 2025-06-16 14:08:02 +08:00
8d47d8ce4f Merge remote-tracking branch 'origin/feat/r2' into feat/r2
# Conflicts:
#	api/core/datasource/website_crawl/website_crawl_plugin.py
#	api/services/rag_pipeline/rag_pipeline.py
2025-06-16 13:50:33 +08:00
41fef8a21f r2 2025-06-16 13:48:43 +08:00
b853a42e37 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-16 11:45:24 +08:00
1633626d23 Merge branch 'main' into feat/rag-pipeline 2025-06-16 11:44:42 +08:00
6c7a40c571 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-16 10:16:00 +08:00
abb2ed66e7 merge main 2025-06-16 10:15:24 +08:00
5ae78f79b0 datasource dark theme 2025-06-16 10:00:14 +08:00
e3b3a6d040 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 18:32:56 +08:00
6622ce6ad8 fix: update formData construction in convertToInputFieldFormData for improved handling of optional fields
fix: adjust z-index value in DialogWrapper for proper stacking context
2025-06-13 18:32:36 +08:00
5ccb8d9736 feat: online document 2025-06-13 18:22:15 +08:00
55906c8375 fix: remove unused billing plan logic from CreateFromDSLModal component 2025-06-13 18:01:01 +08:00
0908f310fc feat: webcrawl 2025-06-13 17:47:51 +08:00
58842898e1 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-13 16:42:27 +08:00
1c17c8fa36 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 16:39:40 +08:00
26aff400e4 node default configs 2025-06-13 16:38:54 +08:00
4b11d29ede fix: update VAR_TYPE_MAP and initialData handling in useConfigurations for improved variable processing 2025-06-13 15:57:16 +08:00
b2b95412b9 r2 2025-06-13 15:04:22 +08:00
5c228bca4f feat: replace TypeIcon with AppIcon in SelectDataSet component for improved icon display 2025-06-13 15:02:31 +08:00
7bd2509ad5 Update deploy-dev.yml 2025-06-13 14:50:38 +08:00
2a5d70d9e1 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 14:44:53 +08:00
b0107f4128 fix: update z-index values for DialogWrapper components to ensure proper stacking context 2025-06-13 14:44:32 +08:00
dc3c5362e4 Merge branch 'deploy/rag-dev' of https://github.com/langgenius/dify into deploy/rag-dev 2025-06-13 14:43:58 +08:00
1d106c3660 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 14:43:46 +08:00
fcb2fa04e7 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 14:43:38 +08:00
55bff10f0d fix 2025-06-13 14:43:02 +08:00
45c9b77e82 fix: match var reg 2025-06-13 14:42:35 +08:00
767860e76b Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-13 11:44:22 +08:00
80f656f79a fix: adjust layout and visibility conditions in CreateFormPipeline and ChunkPreview components 2025-06-13 11:38:26 +08:00
c891eb28fc fix: in prompt editor show vars error 2025-06-13 11:12:11 +08:00
b9fa3f54e9 refactor: refactor component imports and enhance layout for better responsiveness in dataset previews 2025-06-13 10:54:31 +08:00
4d2f904d72 feat: enhance WorkflowPreview and TemplateCard components with additional styling and className prop 2025-06-13 10:14:45 +08:00
26b7911177 Merge branch 'main' into feat/rag-pipeline 2025-06-13 09:49:03 +08:00
dd91edf70b Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-12 18:11:19 +08:00
d994e6b6c7 feat: replace data source icons with AppIcon component in Item and DatasetItem 2025-06-12 18:11:00 +08:00
aba48bde0b feat: update SettingsModal to integrate keyword number handling and refactor index method logic 2025-06-12 18:06:00 +08:00
3e5d9884cb test: add unit tests for AppIcon component with various rendering scenarios 2025-06-12 17:33:28 +08:00
faadad62ff Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-12 17:01:41 +08:00
406d70e4a3 feat: integrate resetDatasetList hook into CreateOptions and TemplateCard components 2025-06-12 16:59:33 +08:00
f17f256b2b Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-12 16:25:04 +08:00
b367f48de6 add datasource category 2025-06-12 16:24:21 +08:00
dee7b6eb22 Update deploy-dev.yml 2025-06-12 16:19:12 +08:00
6f17200dec refactor: update dataset handling to use runtime_mode instead of pipeline_id 2025-06-12 15:57:07 +08:00
d3dbfbe8b3 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-12 15:18:49 +08:00
b1f250862f Merge branch 'main' into feat/rag-pipeline 2025-06-12 15:18:19 +08:00
141d6b1abf feat: implement document settings and pipeline settings components with localization support 2025-06-12 15:13:15 +08:00
a7eb534761 add datasource category 2025-06-12 15:05:36 +08:00
808f792f55 fix: update isPending condition and add indexing technique checks for segment detail and new segment modal 2025-06-12 11:11:03 +08:00
346d066128 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 18:12:17 +08:00
5c41922b8a fix: update file extension for downloaded DSL files and refine mutation keys for template operations 2025-06-11 18:11:38 +08:00
9c3e3b00d0 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 18:07:20 +08:00
da3a3ce165 r2 2025-06-11 18:07:06 +08:00
b525bc2b81 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 18:03:53 +08:00
14dc3e8642 r2 2025-06-11 18:03:21 +08:00
e52c905aa5 refactor: improve layout-main component structure and readability 2025-06-11 17:46:50 +08:00
7b9a3c1084 fix: update translation keys for document availability messages in English and Chinese 2025-06-11 17:42:45 +08:00
ce8ddae11e Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-11 17:30:45 +08:00
4e8184bc56 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	api/models/dataset.py
2025-06-11 17:30:30 +08:00
9eb8597957 r2 2025-06-11 17:29:14 +08:00
cde584046d Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 17:19:08 +08:00
b7f9d7e94a Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-11 17:17:15 +08:00
88817bf974 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 17:17:10 +08:00
92e6c52c0e refactor: update handleUseTemplate to use callback for dataset creation and improve error handling; change HTTP method for dependency check 2025-06-11 17:17:09 +08:00
309dfe8829 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 17:16:52 +08:00
1d8b390584 Merge branch 'main' into feat/r2
# Conflicts:
#	docker/docker-compose.middleware.yaml
2025-06-11 17:16:27 +08:00
7dea7f77ac Merge branch 'main' into feat/rag-pipeline 2025-06-11 17:12:38 +08:00
4d9b15e519 fix 2025-06-11 17:11:57 +08:00
45a708f17e Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 17:10:42 +08:00
5f08a9314c r2 2025-06-11 17:10:20 +08:00
5802b2b437 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 16:39:18 +08:00
f995436eec feat: implement chunk structure card and related hooks for dataset creation; update translations and refactor pipeline template fetching 2025-06-11 16:38:42 +08:00
25f0c61e65 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 16:36:56 +08:00
66fa68fa18 r2 2025-06-11 16:36:36 +08:00
3e5781c6f1 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 14:03:50 +08:00
a6f7560d2f r2 2025-06-11 14:03:32 +08:00
45c76c1d68 refactor: rename icon property to icon_info in UpdateTemplateInfoRequest and related components 2025-06-11 13:39:07 +08:00
14d5af468c Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 13:12:41 +08:00
874e1bc41d r2 2025-06-11 13:12:18 +08:00
d2ae695b3b Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 11:55:46 +08:00
6ecdac6344 pipeline preview 2025-06-11 11:51:19 +08:00
3c2ce07f38 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-11 11:30:14 +08:00
5c58b11b22 refactor: standardize pipeline template properties and improve related components 2025-06-11 11:25:08 +08:00
be92122f17 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-11 11:21:37 +08:00
2972a06f16 r2 2025-06-11 11:21:17 +08:00
caa275fdbd refactor: remove unused websiteCrawlJobId state and related props from useWebsiteCrawl and CreateFormPipeline components; update loading and file preview components for consistent width 2025-06-11 10:50:03 +08:00
5dbda7f4c5 merge main 2025-06-11 10:41:50 +08:00
0564651f6f publish as customized pipeline 2025-06-11 10:40:49 +08:00
eff8108f1c refactor: update dataset model and improve batch action component 2025-06-11 10:24:07 +08:00
127a77d807 r2 2025-06-10 19:22:08 +08:00
265842223c Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 18:20:44 +08:00
95a24156de r2 2025-06-10 18:20:32 +08:00
80ca5b3356 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 18:16:45 +08:00
e934503fa0 r2 2025-06-10 18:16:30 +08:00
442bcd18c0 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-10 17:59:27 +08:00
aeb1d1946c r2 2025-06-10 17:59:14 +08:00
12f2913e08 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 17:40:08 +08:00
0aeeee49f7 fix: draft sync 2025-06-10 17:39:20 +08:00
eb7479b1ea Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 17:12:12 +08:00
80b219707e r2 2025-06-10 17:11:49 +08:00
65ac022245 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-10 16:06:58 +08:00
6e6090d5a9 test(SegmentedControl): add test cases 2025-06-10 16:06:42 +08:00
58b5daeef3 r2 2025-06-10 15:56:28 +08:00
33fd1fa79d Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 15:40:52 +08:00
978118f770 fix: datasource 2025-06-10 15:40:15 +08:00
a2610b22cc Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 15:20:19 +08:00
f4789d750d publish as pipeline 2025-06-10 15:19:47 +08:00
176f9ea2f4 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 15:10:52 +08:00
5e71f7c825 publish as pipeline 2025-06-10 15:10:13 +08:00
7624edd32d r2 2025-06-10 14:56:18 +08:00
7b79354849 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 14:54:27 +08:00
a7ff2ab470 r2 2025-06-10 14:53:07 +08:00
d3eedaf0ec feat(i18n): add new translation entries for local file, website crawl, and online document 2025-06-10 14:22:09 +08:00
bcb0496bf4 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 14:13:32 +08:00
4d967544f3 r2 2025-06-10 14:13:10 +08:00
c18ee4be50 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 11:45:19 +08:00
65873aa411 r2 2025-06-10 11:44:52 +08:00
b95256d624 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 11:00:11 +08:00
c0d3452494 r2 2025-06-10 10:59:44 +08:00
c91456de1b fix(ChunkStructure): add disabled prop to OptionCard component 2025-06-10 10:43:40 +08:00
e1ce156433 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 10:16:49 +08:00
9e19ed4e67 knowledge base node checklisst 2025-06-10 10:16:13 +08:00
ba383b1b0d Merge branch 'feat/r2' into deploy/rag-dev 2025-06-10 10:00:48 +08:00
ad3d9cf782 r2 2025-06-10 10:00:20 +08:00
69053332e4 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-10 09:47:36 +08:00
5b4d04b348 Merge branch 'main' into feat/rag-pipeline 2025-06-10 09:38:06 +08:00
47664f8fd3 r2 2025-06-09 14:00:34 +08:00
8d8f21addd Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 18:52:57 +08:00
9b9640b3db refactor: remove job ID handling from website crawl components and update related hooks 2025-06-06 18:52:32 +08:00
83ba61203b Merge branch 'feat/r2' into deploy/rag-dev 2025-06-06 17:47:47 +08:00
fcbd5febeb r2 2025-06-06 17:47:06 +08:00
b8813e199f Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 17:27:14 +08:00
2322496552 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	docker/docker-compose.middleware.yaml
2025-06-06 17:15:24 +08:00
21a3509bef r2 2025-06-06 17:14:43 +08:00
3e2f12b065 refactor: update website crawl handling and improve parameter naming in pipeline processing 2025-06-06 17:00:34 +08:00
55e20d189a Update deploy-dev.yml 2025-06-06 16:16:44 +08:00
1aa13bd20d r2 2025-06-06 16:05:49 +08:00
cc2dd052df Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-06 16:03:46 +08:00
4ffdf68a20 r2 2025-06-06 16:03:35 +08:00
547bd3cc1b refactor: rename cancel editor handler and improve variable name validation in field list 2025-06-06 15:54:55 +08:00
f3e9761c75 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 15:35:53 +08:00
83ca59e0f1 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-06 15:35:50 +08:00
d725aa8791 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-06 15:35:31 +08:00
cc8ee0ff69 dsl 2025-06-06 15:35:19 +08:00
4a249c40b1 feat: enhance input field configurations with blur listeners and update translations for display name 2025-06-06 15:35:19 +08:00
04e4a1e3aa Merge branch 'feat/r2' into deploy/rag-dev 2025-06-06 15:20:06 +08:00
d2d5fc62ae r2 2025-06-06 15:19:53 +08:00
52460f6929 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-06 15:06:59 +08:00
06dfc32e0f Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	docker/docker-compose.middleware.yaml
2025-06-06 15:06:47 +08:00
0ca38d8215 r2 2025-06-06 15:06:26 +08:00
3da6becad3 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 15:02:00 +08:00
f9d0a7bdc8 Merge branch 'main' into feat/rag-pipeline 2025-06-06 15:01:27 +08:00
e961722597 dsl 2025-06-06 15:00:37 +08:00
2ddd2616ec confirm publish 2025-06-06 14:24:02 +08:00
a82a9fb9d4 fix: update condition for handling datasource selection in DataSourceOptions 2025-06-06 14:24:02 +08:00
3fce6f2581 Merge branch 'feat/r2' into deploy/rag-dev
# Conflicts:
#	api/services/rag_pipeline/rag_pipeline.py
2025-06-06 14:23:05 +08:00
3db864561e confirm publish 2025-06-06 14:22:15 +08:00
d2750f1a02 r2 2025-06-06 14:22:00 +08:00
30a50c5cc8 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-06 12:08:20 +08:00
0ff746ebf6 r2 2025-06-06 12:08:09 +08:00
5193fa2118 fix: update condition for handling datasource selection in DataSourceOptions 2025-06-06 11:43:00 +08:00
9a0dc82e6a Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 10:55:43 +08:00
8e4165defe datasource 2025-06-06 10:55:13 +08:00
d917bc8ed0 Merge branch 'deploy/rag-dev' of https://github.com/langgenius/dify into deploy/rag-dev 2025-06-06 10:53:23 +08:00
ef7bd262c5 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-06 10:52:53 +08:00
d3e29ffa74 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-06 10:40:57 +08:00
70432952fd r2 2025-06-06 10:40:06 +08:00
cf2ef93ad5 Merge branch 'main' into feat/rag-pipeline 2025-06-06 10:10:53 +08:00
cbf0864edc refactor: refactor online documents handling and update related components 2025-06-06 10:08:19 +08:00
bce2bdd0de Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-05 18:28:58 +08:00
82e7c8a2f9 refactor: update datasource handling and improve documentation properties in pipeline components 2025-06-05 18:28:48 +08:00
2acdb0a4ea fix: var type error in cal var type in data source type 2025-06-05 17:40:49 +08:00
350ea6be6e fix: correct spacing and formatting in variable utility functions 2025-06-05 17:32:03 +08:00
4664174ef3 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-05 16:51:55 +08:00
f0413f359a datasource 2025-06-05 16:51:19 +08:00
53b32c8b22 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 16:44:02 +08:00
b8ef1d9585 r2 2025-06-05 16:43:47 +08:00
90ca98ff3a fix: in node show rag var 2025-06-05 16:41:04 +08:00
d4a1d045f8 fix: to new var format 2025-06-05 16:36:41 +08:00
91fefa0e37 refactor: improve layout and event handling in Header and FieldItem components 2025-06-05 15:44:18 +08:00
067ec17539 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 15:29:37 +08:00
c084b57933 r2 2025-06-05 15:28:44 +08:00
876be7e6e9 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-05 15:07:59 +08:00
468bfdfed9 datasource 2025-06-05 15:07:29 +08:00
82d817f612 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 14:56:19 +08:00
9e84a5321d r2 2025-06-05 14:55:09 +08:00
d77e27ac05 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 14:10:07 +08:00
8a86a2c817 r2 2025-06-05 14:09:50 +08:00
fdc4c36b77 refactor: replace useStore with useDatasetDetailContextWithSelector for pipeline ID retrieval 2025-06-05 14:05:27 +08:00
52c118f5b8 Merge remote-tracking branch 'origin/deploy/rag-dev' into deploy/rag-dev 2025-06-05 11:46:19 +08:00
5d7c7023c3 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 11:46:07 +08:00
3e0a10b7ed r2 2025-06-05 11:45:53 +08:00
84f5272f72 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-05 11:40:47 +08:00
6286f368f1 refactor: replace ImagePlus icon with RiImageCircleAiLine and improve tab button styling 2025-06-05 11:21:17 +08:00
cb2ca0b533 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-05 11:12:32 +08:00
5fe5da7c1d r2 2025-06-05 11:12:06 +08:00
c83370f701 refactor: simplify workflow draft synchronization in InputFieldDialog 2025-06-05 11:07:28 +08:00
7506867fb9 Merge branch 'main' into feat/rag-pipeline 2025-06-05 10:29:18 +08:00
842136959b feat: update data source handling and improve processing parameters integration 2025-06-05 10:24:25 +08:00
4c2cc98ebc Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-04 18:37:25 +08:00
44b9f49ab1 feat: enhance field item interaction and add preprocessing parameters hooks 2025-06-04 18:37:19 +08:00
f7f7952951 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-04 18:10:03 +08:00
a7fa5044e3 datasource 2025-06-04 18:09:31 +08:00
eb84134706 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 17:39:46 +08:00
fbca9010f3 r2 2025-06-04 17:39:31 +08:00
0bf0c7dbe8 feat: var type to inner 2025-06-04 17:34:22 +08:00
e071bd63e6 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 17:29:55 +08:00
8a147a00e8 r2 2025-06-04 17:29:39 +08:00
c9a4c66b07 fix: update input type from 'number-input' to 'number' for consistency 2025-06-04 16:58:08 +08:00
edec654b68 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 16:51:52 +08:00
a82ab1d152 r2 2025-06-04 16:51:23 +08:00
9934eac15c refactor: refactor data source handling and add form for document processing 2025-06-04 16:50:27 +08:00
c155afac29 chore: rename rag var spell errro 2025-06-04 16:43:24 +08:00
7080c9f279 fix: show rag vars names 2025-06-04 16:29:36 +08:00
e41699cbc8 Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 16:23:34 +08:00
133193e7d0 r2 2025-06-04 16:23:12 +08:00
9d6371e0a3 Merge branch 'feat/rag-pipeline' into deploy/rag-dev 2025-06-04 15:48:58 +08:00
dfe091789c datasource 2025-06-04 15:48:29 +08:00
4c9bf78363 knowledge base node checklist 2025-06-04 15:18:03 +08:00
b95ecaf8a3 Update build-push.yml 2025-06-04 15:17:39 +08:00
7a0e8108ae Merge branch 'feat/r2' into deploy/rag-dev 2025-06-04 15:16:47 +08:00
3afd5e73c9 feat: enhance input field dialog with preview functionality and global inputs 2025-06-04 15:16:02 +08:00
c09c8c6e5b r2 2025-06-04 15:12:05 +08:00
cab491795a chore: some special to some fns 2025-06-04 14:54:11 +08:00
e290ddc3e5 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-04 14:50:16 +08:00
db154e33b7 Merge branch 'main' into feat/rag-pipeline 2025-06-04 14:48:10 +08:00
32f9004b5f merge feat/rag-pipeline 2025-06-04 11:43:38 +08:00
225402280e datasource auth 2025-06-04 11:39:31 +08:00
abcca11479 r2 2025-06-03 19:10:40 +08:00
9cdd2cbb27 r2 2025-06-03 19:02:57 +08:00
309fffd1e4 Merge branch 'main' into feat/r2
# Conflicts:
#	api/core/repositories/sqlalchemy_workflow_node_execution_repository.py
#	api/core/workflow/entities/node_entities.py
#	api/core/workflow/enums.py
2025-06-03 18:56:49 +08:00
0a9f50e85f Merge branch 'main' into feat/rag-pipeline 2025-06-03 18:44:53 +08:00
ed1d71f4d0 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-03 18:33:07 +08:00
7039ec33b9 refactor: update retrieval search method from invertedIndex to keywordSearch 2025-06-03 18:33:01 +08:00
025dc7c781 feat: can show rag vars 2025-06-03 18:32:52 +08:00
4130c50643 r2 2025-06-03 18:32:39 +08:00
7b7f8ef51d r2 2025-06-03 18:12:24 +08:00
bad451d5ec Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-06-03 17:42:53 +08:00
87c15062e6 feat: enhance document processing with embedding and rule detail components 2025-06-03 17:42:40 +08:00
573cd15e77 r2 2025-06-03 16:52:21 +08:00
ab1730bbaa r2 2025-06-03 16:51:21 +08:00
163bae3aaf input rag variable 2025-06-03 16:07:58 +08:00
270edd43ab r2 2025-06-03 15:53:17 +08:00
b8f3b23b1a r2 2025-06-03 15:51:31 +08:00
b9c6496fea datasource default value & publish sync draft 2025-06-03 14:42:17 +08:00
0486aa3445 r2 2025-06-03 13:30:51 +08:00
5fb771218c fix: update types and improve data handling in pipeline components 2025-06-03 10:14:48 +08:00
3fb02a7933 r2 2025-05-30 17:28:09 +08:00
898495b5c4 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-30 15:54:46 +08:00
08624878cf fix: update ChunkStructureEnum values for consistency with model naming 2025-05-30 15:53:32 +08:00
6fe473f0fa knowledge base node 2025-05-30 15:45:58 +08:00
11cf23e5fc Merge remote-tracking branch 'origin/feat/r2' into feat/r2 2025-05-30 15:42:56 +08:00
631768ea1d r2 2025-05-30 15:42:36 +08:00
e1d658b482 Update build-push.yml 2025-05-30 15:26:51 +08:00
1274aaed5d fix: add dataset mutation context to Popup component 2025-05-30 15:17:19 +08:00
9be036e0ca Merge branch 'main' into feat/rag-pipeline 2025-05-30 15:10:16 +08:00
7284569c5f Update build-push.yml 2025-05-30 01:02:33 +08:00
976b465e76 r2 2025-05-30 00:55:06 +08:00
804e55824d r2 2025-05-30 00:37:36 +08:00
69529fb16d r2 2025-05-30 00:37:27 +08:00
cb5cfb2dae r2 2025-05-30 00:03:43 +08:00
a826879cf7 Merge branch 'main' into feat/r2 2025-05-29 23:04:38 +08:00
e7c48c0b69 r2 2025-05-29 23:04:04 +08:00
558a280fc8 datasource type 2025-05-29 18:21:29 +08:00
2158c03231 fix: update button disabled state to reflect publishedAt status in Popup component 2025-05-29 17:53:08 +08:00
a61f1f8eb0 refactor: replace anchor tag with Link component for navigation in Actions 2025-05-29 17:42:00 +08:00
9f724c19db refactor: refactor navigation components to use Link for improved routing 2025-05-29 17:33:04 +08:00
4ae936b263 refactor: refactor navigation handling in dataset components to use button elements 2025-05-29 15:48:54 +08:00
80875a109a feat: add logic to handle navigation based on pipeline status in DatasetCard 2025-05-29 15:22:29 +08:00
121e54f3e3 plugins page 2025-05-29 15:18:27 +08:00
1c2c4b62f8 run & tracing 2025-05-29 14:31:35 +08:00
9176790adf feat: enhance dataset detail layout with button disable logic based on pipeline status 2025-05-29 14:06:12 +08:00
6ff6525d1d test run 2025-05-29 11:30:42 +08:00
71ce505631 data source panel 2025-05-29 11:03:22 +08:00
11dfe3713f refactor: enhance document upload flow with step indicators and file preview handling 2025-05-29 10:18:11 +08:00
a025db137d Merge branch 'main' into feat/r2 2025-05-29 09:54:28 +08:00
797d044714 r2 2025-05-29 09:53:42 +08:00
c4169f8aa0 Merge branch 'main' into feat/rag-pipeline 2025-05-29 09:39:36 +08:00
3005419573 feat: implement document upload steps and enhance test run panel with new hooks and components 2025-05-28 18:34:26 +08:00
7f59ffe7af r2 2025-05-28 17:56:04 +08:00
cc7ad5ac97 feat: add input field variables change sync 2025-05-28 16:38:49 +08:00
769b5e185a workflow init config staletime 2025-05-28 15:36:10 +08:00
9e763c9e87 feat: enhance file uploader and test run panel with batch upload limits and tooltips 2025-05-28 14:51:10 +08:00
b9214ca76b knowledge base default data 2025-05-28 13:57:24 +08:00
29d2f2339b refactor: enhance document preview functionality and refactor form handling 2025-05-28 13:44:37 +08:00
5ac1e3584d Merge branch 'main' into feat/rag-pipeline 2025-05-28 11:01:56 +08:00
dd0cf6fadc refactor: streamline data source type handling and improve FieldList props 2025-05-28 10:07:12 +08:00
b320ebe2ba datasource variables 2025-05-27 18:44:29 +08:00
377093b776 fix: conditionally render FieldListContainer based on inputFields length 2025-05-27 18:19:10 +08:00
70119a054a fix: add is_preview flag to datasource submission and improve dataset card rendering logic 2025-05-27 17:54:39 +08:00
69d1e3ec7d input field in datasource 2025-05-27 17:42:30 +08:00
365157c37d refactor: enhance action button logic to include workflow running state 2025-05-27 15:43:10 +08:00
4bc0a1bd37 knowledge base node init 2025-05-27 15:28:35 +08:00
d6640f2adf refactor: streamline input field data conversion and enhance datasource component 2025-05-27 15:25:35 +08:00
987f845e79 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-27 14:42:12 +08:00
84daf49047 node meta data 2025-05-27 14:40:56 +08:00
31e183ef0d refactor: enhance datasource handling by adding fileExtensions support 2025-05-27 14:39:52 +08:00
754a1d1197 refactor: add DatasourceIcon component and update related hooks and options 2025-05-27 14:17:55 +08:00
049a6de4b3 refactor: update data source handling and replace icon implementation 2025-05-27 13:52:43 +08:00
6bd28cadc4 datasource icon 2025-05-27 13:42:13 +08:00
3b9a0b1d25 datasource icon 2025-05-27 11:27:25 +08:00
db963a638c Merge branch 'main' into feat/rag-pipeline 2025-05-27 11:03:49 +08:00
dcb4c9e84a refactor: refactor datasource type handling 2025-05-27 11:01:38 +08:00
5fc2bc58a9 r2 2025-05-27 00:01:23 +08:00
d333645e09 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-26 17:48:59 +08:00
2812c774c6 fix(i18n): Update economy index method description to include keyword count 2025-05-26 17:48:54 +08:00
e2f3f0ae4c datasource 2025-05-26 17:47:03 +08:00
83ca7f8deb feat: add datasource support to PluginDeclaration and PluginCategory 2025-05-26 17:32:25 +08:00
e6c6fa8ed8 tool icon 2025-05-26 17:28:16 +08:00
678d6ffe2b r2 2025-05-26 17:00:16 +08:00
cef77a3717 datasource icon 2025-05-26 16:41:50 +08:00
28726b6cf3 block selector 2025-05-26 16:33:08 +08:00
ef0e41de07 r2 2025-05-26 16:02:11 +08:00
dc2b63b832 Merge branch 'main' into feat/rag-pipeline 2025-05-26 15:58:15 +08:00
0478fc9649 datasource node variable 2025-05-26 15:57:34 +08:00
1b07e612d2 r2 2025-05-26 15:49:37 +08:00
38cce3f62a r2 2025-05-26 14:52:09 +08:00
35be8721b9 Merge branch 'main' into feat/r2 2025-05-26 14:50:33 +08:00
665ffbdc10 r2 2025-05-26 14:49:59 +08:00
b5f88c77a3 datasource list 2025-05-26 14:13:59 +08:00
324c0d7b4c refactor: improve layout and styling in TestRunPanel and FilePreview components 2025-05-26 14:06:04 +08:00
13e3f17493 refactor: standardize terminology by renaming 'data-source' to 'datasource' across components and translations 2025-05-26 10:50:39 +08:00
841bd35ebb Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-26 09:49:15 +08:00
ccefd41606 refactor: rename InputType to InputTypeEnum and update related usages for consistency 2025-05-26 09:48:17 +08:00
ec1c4efca9 r2 2025-05-25 23:09:01 +08:00
0f10852b6b Merge remote-tracking branch 'origin/feat/r2' into feat/r2 2025-05-23 19:30:59 +08:00
6d547447d3 r2 2025-05-23 19:30:48 +08:00
6123f1ab21 refactor: reorganize imports and fix datasource endpoint URL 2025-05-23 19:22:50 +08:00
e7370766bd datasource 2025-05-23 18:19:28 +08:00
db4958be05 fix: fix modal handling in InputFieldEditor 2025-05-23 17:52:00 +08:00
a15bf8e8fe remove output schema 2025-05-23 17:35:26 +08:00
70d2c78176 r2 2025-05-23 17:13:09 +08:00
42fcda3dc8 r2 2025-05-23 17:11:56 +08:00
ac049d938e Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-23 16:57:46 +08:00
3af61f4b5d refactor: update input type mappings and enums for consistency across components 2025-05-23 16:57:27 +08:00
e19adbbbc5 datasource 2025-05-23 16:27:49 +08:00
64d997fdb0 r2 2025-05-23 15:55:41 +08:00
a49942b949 fix: rename first_step_parameters 2025-05-23 15:12:31 +08:00
4460d96e58 feat: add oauth schema 2025-05-23 15:11:40 +08:00
a7d5f2f53b apply ruff 2025-05-23 15:10:56 +08:00
c9bf99a1e2 refactor: update input variable types and initial data handling in pipeline components 2025-05-23 15:10:20 +08:00
4300ebc8aa fix: remove provide type 2025-05-23 15:10:16 +08:00
720ce79901 checklist & datasource icon 2025-05-23 14:26:06 +08:00
693107a6c8 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-23 13:54:59 +08:00
583db24ee7 refactor: update CustomActions type to use structured props across form components 2025-05-23 13:54:49 +08:00
7d92574e02 datasource panel 2025-05-23 11:51:17 +08:00
5aaa06c8b0 refactor: integrate routing for document creation in Popup component 2025-05-23 11:19:57 +08:00
52b773770b refactor: update datasource handling in InputFieldDialog and Datasource components 2025-05-23 11:07:48 +08:00
23adc7d8a8 datasource 2025-05-23 10:47:31 +08:00
e3708bfa85 refactor: enhance ChunkPreview with form handling and preview functionality 2025-05-23 10:29:59 +08:00
7d65e9980c Merge branch 'main' into feat/rag-pipeline 2025-05-23 09:35:08 +08:00
b93d26ee9f Merge remote-tracking branch 'origin/feat/r2' into feat/r2
# Conflicts:
#	api/core/datasource/entities/datasource_entities.py
2025-05-23 00:06:51 +08:00
b82b26bba5 r2 2025-05-23 00:05:57 +08:00
21c24977d8 refactor: enhance document processing UI and functionality with new components and translations 2025-05-22 23:05:58 +08:00
fe435c23c3 i18n 2025-05-22 17:44:07 +08:00
ead1209f98 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-22 17:41:18 +08:00
3994bb1771 refactor: refactor document processing components and update translations 2025-05-22 17:39:39 +08:00
327690e4a7 merge main 2025-05-22 16:45:13 +08:00
c2a7e0e986 version panel 2025-05-22 16:43:30 +08:00
faf6b9ea03 refactor: refactor preview components 2025-05-22 14:49:40 +08:00
3bfc602561 refactor: update datasource entity structure and parameter handling
- Renamed and split parameters in DatasourceEntity into first_step_parameters and second_step_parameters.
- Updated validation methods for new parameter structure.
- Adjusted datasource_node to reference first_step_parameters.
- Cleaned up unused imports and improved type hints in workflow.py.
2025-05-21 20:36:26 +08:00
5fa2aca2c8 feat: add oauth schema to datasource 2025-05-21 20:29:59 +08:00
69a60101fe Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-21 16:37:08 +08:00
b18519b824 refactor: add create-from-pipeline page and associated components for document processing 2025-05-21 16:37:02 +08:00
0d01025254 parallel check 2025-05-21 16:34:41 +08:00
eef1542cb3 use available nodes 2025-05-21 15:51:05 +08:00
9aef4b6d6b refactor: Notion component and add NotionPageSelector for improved page selection 2025-05-21 14:02:37 +08:00
7dba83754f Merge branch 'main' into feat/rag-pipeline 2025-05-21 13:42:28 +08:00
e2585bc778 Merge branch 'main' into feat/rag-pipeline 2025-05-21 11:27:50 +08:00
cc6e2558ef Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-21 10:53:29 +08:00
20343facad refactor: website data source components and hooks 2025-05-21 10:53:18 +08:00
eff123a11c checklist 2025-05-20 16:52:45 +08:00
9bafd3a226 r2 2025-05-20 15:41:10 +08:00
82be119fec Merge branch 'main' into feat/r2 2025-05-20 15:18:52 +08:00
a64df507f6 r2 2025-05-20 15:18:33 +08:00
cf73faf174 feat: add FileUploaderField and TextAreaField components; enhance BaseField to support file inputs 2025-05-20 15:09:30 +08:00
ba52bf27c1 r2 2025-05-20 14:57:26 +08:00
55f4177b01 merge main 2025-05-20 14:03:54 +08:00
14a9052d60 refactor: update variable naming for consistency and improve data source handling in pipeline components 2025-05-20 11:42:22 +08:00
314a2f9be8 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-19 18:21:29 +08:00
8eee344fbb fix: correct hover state logic and refactor environment variable handling in FieldItem and usePipelineInit 2025-05-19 18:21:17 +08:00
0e0a266142 merge main 2025-05-19 18:11:45 +08:00
7bce35913d i18n 2025-05-19 18:09:12 +08:00
7898dbd5bf Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-19 16:26:24 +08:00
bd1073ff1a refactor: update input variable types to use PipelineInputVarType and simplify form data handling 2025-05-19 16:26:13 +08:00
1bbd572593 option card 2025-05-19 15:59:04 +08:00
5199297f61 run and history 2025-05-19 14:23:40 +08:00
c5a2f43ceb refactor: replace BuiltinToolManageService with RagPipelineManageService for datasource management and remove unused datasource engine and related code 2025-05-16 18:42:07 +08:00
8d4ced227e fix: update click handler logic in OptionCard 2025-05-16 17:54:41 +08:00
f481075f8f pipeline sync draft 2025-05-16 17:48:01 +08:00
836cf6453e pipeline sync draft 2025-05-16 17:48:01 +08:00
8bea88c8cc r2 2025-05-16 17:22:17 +08:00
4b7274f9a5 fix: update link in Form component and correct endpoint for related apps query 2025-05-16 16:49:43 +08:00
7de5585da6 refactor: replace SWR with custom hooks for dataset detail and related apps; update context usage in components 2025-05-16 16:32:25 +08:00
87dc80f6fa fix: add cursor pointer and hover effect to MemberItem; adjust padding in PermissionItem 2025-05-16 15:52:28 +08:00
a008c04331 refactor: standardize naming for load more handlers and navigation items across components 2025-05-16 15:43:28 +08:00
56b66b8a57 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-16 15:15:00 +08:00
35a7add4e9 refactor: refactor pipeline-related components and services to use template terminology 2025-05-16 15:14:50 +08:00
f1fe143962 add i18n 2025-05-16 14:53:39 +08:00
9e72afee3c r2 2025-05-16 14:00:35 +08:00
613b94a6e6 r2 2025-05-16 13:45:47 +08:00
7b0d38f7d3 r2 2025-05-16 12:02:35 +08:00
4ff971c8a3 r2 2025-05-16 11:26:56 +08:00
019ef74bf2 refactor: replace Container with List, update DatasetCard z-index, and implement useDatasetList for data fetching 2025-05-16 10:50:31 +08:00
2670557258 merge main 2025-05-16 10:09:24 +08:00
93ac6d37e9 r2 2025-05-15 16:44:55 +08:00
e710a8402c r2 2025-05-15 16:07:17 +08:00
360f8a3375 Merge branch 'main' into feat/r2 2025-05-15 15:15:23 +08:00
818eb46a8b r2 2025-05-15 15:14:52 +08:00
f5c297708b Merge branch 'main' into feat/rag-pipeline 2025-05-15 14:52:54 +08:00
bf8324f7f7 tag filter 2025-05-15 14:52:00 +08:00
b730d153ea Merge branch 'main' into feat/rag-pipeline 2025-05-15 10:27:47 +08:00
11977596c9 merge main 2025-05-15 10:14:40 +08:00
612dca8b7d feat: add WorkflowPreview component to Details and define graph structure in PipelineTemplateByIdResponse 2025-05-14 18:20:29 +08:00
53018289d4 workflow preview 2025-05-14 18:02:58 +08:00
958ff44707 refactor: simplify import DSL confirmation request structure 2025-05-14 16:27:59 +08:00
d910770b3c feat: add dataset_id to DSL import responses and update routing logic in CreateFromDSLModal 2025-05-14 16:00:17 +08:00
5a8f10520f feat: refactor template card actions and details to standardize prop names; add create modal for dataset creation 2025-05-14 15:53:17 +08:00
df928772c0 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-14 14:49:10 +08:00
b713218cab feat: add DSL modal header and tab components; enhance pipeline import functionality 2025-05-14 14:49:01 +08:00
9ea2123e7f component add readonly 2025-05-14 11:14:49 +08:00
de0cb06f8c feat: implement create dataset pipeline forms and modals 2025-05-14 10:48:54 +08:00
cfb6d59513 Merge branch 'main' into feat/rag-pipeline 2025-05-13 18:38:26 +08:00
4c30d1c1eb feat: Enhance InputFieldDialog and workflow hooks to handle ragPipelineVariables 2025-05-13 16:33:38 +08:00
5bb02c79cc Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-13 16:18:07 +08:00
0a891e5392 feat: Update retrieval method configuration to use new OptionCard component and improve layout 2025-05-13 16:17:59 +08:00
f6978ce6b1 fix: pipeline init 2025-05-13 16:02:36 +08:00
4d68aadc1c Refactor: Replace IndexMethodRadio with IndexMethod component, add keyword number functionality, and update related translations 2025-05-13 15:35:21 +08:00
cef6463847 feat: Enhance dataset settings with chunk structure and icon selection 2025-05-13 11:07:31 +08:00
39b8331f81 merge main 2025-05-09 18:20:56 +08:00
212d4c5899 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-09 16:35:14 +08:00
97ec855df4 feat: enhance input field management with internationalization support and improved state handling 2025-05-09 16:35:09 +08:00
d83b9b70e3 fix: import 2025-05-09 16:25:34 +08:00
b51c18c2cf pipeline init 2025-05-09 15:53:31 +08:00
7e31da7882 refactor: update data source handling and improve internationalization support in test run panel 2025-05-09 12:56:57 +08:00
d9ed61287d Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-08 18:29:58 +08:00
6024dbe98d refactor: simplify type definitions in form components and update related configurations 2025-05-08 18:29:49 +08:00
13ce6317f1 pipeline header 2025-05-08 18:27:44 +08:00
0099f2296d Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-08 16:26:37 +08:00
2d93bc6725 refactor: update DatasetInfo component layout and styling for better responsiveness 2025-05-08 16:26:30 +08:00
cb52f9ecc5 pipeline header 2025-05-08 15:32:19 +08:00
1fbeb3a21a refactor: enhance dataset components with new icons and improve layout structure 2025-05-08 13:48:14 +08:00
38f1a42ce8 refactor: remove unused icon components and update imports in dataset components 2025-05-08 11:15:27 +08:00
3d11af2dd6 refactor: update imports for knowledge and pipeline components 2025-05-08 10:44:26 +08:00
d1fd5db7f8 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-08 09:52:53 +08:00
c240cf3bb1 refactor: dataset creation to support pipeline datasets, update related types and hooks 2025-05-08 09:42:02 +08:00
bbbcd68258 portal element 2025-05-07 18:14:26 +08:00
7ce9710229 feat: add pipeline template details and import functionality, enhance dataset pipeline management 2025-05-07 18:09:38 +08:00
3f7f21ce70 show test run panel 2025-05-07 17:31:06 +08:00
fa8ab4ea04 rag pipeline 2025-05-07 16:30:24 +08:00
3f1363503b r2 2025-05-07 16:19:09 +08:00
3f52f491d7 feat: knowledge base node 2025-05-07 15:08:37 +08:00
e86a3fc672 feat: Enhance dataset pipeline creation and management with new export and delete functionalities, improved internationalization, and refactor for better clarity 2025-05-07 14:29:01 +08:00
6f77f67427 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-07 11:30:20 +08:00
4025cd0b46 feat: Refactor dataset pipeline creation components and add internationalization support 2025-05-07 11:30:13 +08:00
3bbb22750c merge main 2025-05-06 18:28:44 +08:00
d196872059 merge main 2025-05-06 17:31:48 +08:00
a478d95950 feat: knowledge base node 2025-05-06 17:25:18 +08:00
12c060b795 feat: enhance dataset icon handling by making icon background and URL optional 2025-05-06 16:58:37 +08:00
c480c3d881 feat: enhance dataset detail layout with new icon structure and additional document count display 2025-05-06 16:37:21 +08:00
a998022c12 r2 2025-05-06 16:18:34 +08:00
a25cc4e8af r2 2025-05-06 13:56:13 +08:00
b4bccf5fef feat: Add External Knowledge Base and Pipeline icons, update DatasetCard component 2025-05-06 11:58:53 +08:00
14ad34af71 feat: enhance dataset creation UI with new pipeline list and edit functionality 2025-05-03 17:16:00 +08:00
7ed398267f Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-05-03 13:43:43 +08:00
fc9556e057 feat: add dataset creation components and functionality 2025-05-03 13:43:37 +08:00
acf6872a50 fix: variable selector 2025-04-30 16:55:00 +08:00
e689f21a60 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-04-30 14:16:25 +08:00
a7f9259e27 feat: new Dataset list 2025-04-30 14:16:13 +08:00
a46b4e3616 Merge branch 'main' into feat/rag-pipeline 2025-04-29 16:27:49 +08:00
e7e12c1d2e fix: node selector 2025-04-29 16:26:53 +08:00
66176c4d71 fix: node default 2025-04-29 16:14:20 +08:00
2613a380b6 fix: Correct link path for creating datasets and optimize Link component with memoization 2025-04-29 15:47:29 +08:00
9392ce259f feat: Refactor dataset components and update translations for new dataset creation options 2025-04-29 15:44:32 +08:00
d1287f08b4 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-04-29 10:44:37 +08:00
7ee8472a5f feat: Add SegmentedControl component with styling and option handling 2025-04-29 10:44:03 +08:00
cdb615deeb knowledge base node 2025-04-28 18:37:18 +08:00
abbba1d004 knowledge base node 2025-04-28 18:37:17 +08:00
3c386c63a6 Merge remote-tracking branch 'origin/feat/r2' into feat/r2 2025-04-28 16:19:29 +08:00
49d1846e63 r2 2025-04-28 16:19:12 +08:00
53f2882077 feat: Implement document processing component with configuration and action handling 2025-04-28 15:55:24 +08:00
8f07e088f5 feat: Add JinaReader and WaterCrawl components with configurations and schema handling 2025-04-28 14:33:01 +08:00
f71b0eccb2 Refactor: dataset creation components and implement Firecrawl functionality 2025-04-28 13:33:16 +08:00
5b89d36ea1 feat: Update Zod schema generation for file types and upload methods to use new constants 2025-04-27 20:44:05 +08:00
7c3af74b0d feat: Update useConfigurations and useHiddenConfigurations to use InputVarType constants for type values 2025-04-27 20:34:25 +08:00
d1d83f8a2a feat: Enhance form components with hidden fields and popup properties for improved configuration 2025-04-27 20:17:50 +08:00
839fe12087 feat: Update OptionsField to use correct Options type and enhance Zod schema generation for options and select input types 2025-04-27 18:45:22 +08:00
fd8ee9f53e Refactor input field form components and schema 2025-04-27 15:29:11 +08:00
c2d02f8f4d Merge branch 'main' into feat/r2 2025-04-27 14:31:19 +08:00
8367ae85de feat: Replace BaseVarType with BaseFieldType for consistent field type usage across components 2025-04-27 10:16:16 +08:00
d1f0e6e5c2 feat: Implement Zod schema generation for form validation and update form component usage 2025-04-27 09:56:48 +08:00
7deb44f864 feat: Add additional field components to form hook for enhanced functionality 2025-04-26 22:43:51 +08:00
d12e9b81e3 feat: Introduce new form field components and enhance existing ones with label options 2025-04-26 21:50:21 +08:00
b1fbaaed95 refactor: Simplify type checks for form field rendering and correct comment grammar 2025-04-25 18:39:05 +08:00
3f8b0b937c Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-04-25 18:13:57 +08:00
734c62998f feat: Implement dynamic form field rendering and replace SubmitButton with Actions component 2025-04-25 18:13:52 +08:00
4792ca1813 knowledge base node 2025-04-25 17:24:47 +08:00
d4007ae073 r2 2025-04-25 15:49:36 +08:00
389f15f8e3 r2 2025-04-25 14:56:22 +08:00
9437145218 r2 2025-04-25 13:42:57 +08:00
076924bbd6 rag pipeline main 2025-04-25 11:32:17 +08:00
97cf6b2d65 refactor workflow 2025-04-25 11:04:14 +08:00
f317ef2fe2 feat: Refactor NotionConnector integration and add Header component for improved UI in NotionPageSelector 2025-04-24 21:26:54 +08:00
f7de55364f chore: refactor workflow 2025-04-24 16:29:58 +08:00
de30e9278c feat: Refactor Notion and LocalFile components to remove unused VectorSpaceFull prop and improve step indicator logic 2025-04-24 15:47:22 +08:00
b9ab1555fb r2 2025-04-24 15:42:30 +08:00
44b9ce0951 feat: Implement Notion connector and related components for data source selection in the RAG pipeline 2025-04-24 15:32:04 +08:00
d768094376 feat: Refactor file upload configuration and validation logic 2025-04-24 13:46:50 +08:00
93f83086c1 feat: add CustomSelectField component and integrate with input field form 2025-04-23 22:16:19 +08:00
8d9c252811 block selector & data source node 2025-04-22 16:46:33 +08:00
c7f4b41920 r2 2025-04-22 16:08:58 +08:00
efb27eb443 feat: enhance FieldList component with sorting and dynamic input field management 2025-04-22 12:56:22 +08:00
5b8c43052e feat: implement input field dialog and related components for rag pipeline 2025-04-22 11:29:03 +08:00
e04ae927b6 Merge branch 'main' into feat/rag-pipeline 2025-04-22 10:14:28 +08:00
ac68d62d1c Merge branch 'main' into feat/rag-pipeline 2025-04-21 18:07:15 +08:00
caa17b8fe9 rag pipeline store 2025-04-21 17:52:34 +08:00
cd1562ee24 Merge branch 'feat/rag-pipeline' of https://github.com/langgenius/dify into feat/rag-pipeline 2025-04-21 16:58:28 +08:00
47af1a9c42 feat: add InputField component and integrate into RagPipeline panel 2025-04-21 16:58:22 +08:00
0cd6a427af add publisher 2025-04-21 14:41:41 +08:00
51165408ed feat: implement input field form with file upload settings and validation 2025-04-21 09:53:35 +08:00
a2dc38f90a feat: add rag pipeline store slice 2025-04-18 15:51:40 +08:00
a36436b585 feat: add rag pipeline store slice 2025-04-18 15:46:54 +08:00
2d87823fc6 init rag pipeline 2025-04-18 14:56:34 +08:00
d238da9826 Merge branch 'main' into feat/rag-pipeline 2025-04-18 14:00:58 +08:00
6eef5990c9 feat: enhance form components with additional props for validation and tooltips; add OptionsField component 2025-04-18 11:32:23 +08:00
5c4bf2a9e4 r2 2025-04-17 15:07:23 +08:00
0345eb4659 feat: add new form components including CheckboxField, NumberInputField, SelectField, TextField, and SubmitButton with updated input sizes 2025-04-17 13:33:33 +08:00
71f78e0d33 feat: replace existing page content with DemoForm component for improved layout 2025-04-16 14:16:56 +08:00
942648e9e9 feat: implement form components including CheckboxField, SelectField, TextField, and SubmitButton with validation 2025-04-16 14:16:32 +08:00
d841581679 feat: add IndeterminateIcon component and update Checkbox to support indeterminate state
refactor: remove mixed state handling and update related styles
fix: update useCallback dependencies for better performance
2025-04-15 17:30:18 +08:00
9f8e05d9f0 r2 2025-04-14 18:17:17 +08:00
3340775052 r2 2025-04-14 11:10:44 +08:00
9987774471 r2 2025-04-10 18:00:22 +08:00
2575 changed files with 82335 additions and 42987 deletions

View File

@ -1,12 +0,0 @@
version: 2
updates:
- package-ecosystem: "npm"
directory: "/web"
schedule:
interval: "weekly"
open-pull-requests-limit: 2
- package-ecosystem: "uv"
directory: "/api"
schedule:
interval: "weekly"
open-pull-requests-limit: 2

View File

@ -20,60 +20,14 @@ jobs:
cd api
uv sync --dev
# Fix lint errors
uv run ruff check --fix .
uv run ruff check --fix-only .
# Format code
uv run ruff format ..
uv run ruff format .
- name: ast-grep
run: |
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all
uvx --from ast-grep-cli sg --pattern 'session.query($WHATEVER).filter($HERE)' --rewrite 'session.query($WHATEVER).where($HERE)' -l py --update-all
# Convert Optional[T] to T | None (ignoring quoted types)
cat > /tmp/optional-rule.yml << 'EOF'
id: convert-optional-to-union
language: python
rule:
kind: generic_type
all:
- has:
kind: identifier
pattern: Optional
- has:
kind: type_parameter
has:
kind: type
pattern: $T
fix: $T | None
EOF
uvx --from ast-grep-cli sg scan --inline-rules "$(cat /tmp/optional-rule.yml)" --update-all
# Fix forward references that were incorrectly converted (Python doesn't support "Type" | None syntax)
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
find . -name "*.py.bak" -type f -delete
- name: mdformat
run: |
uvx mdformat .
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: web/package.json
run_install: false
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
node-version: 22
cache: pnpm
cache-dependency-path: ./web/package.json
- name: Web dependencies
working-directory: ./web
run: pnpm install --frozen-lockfile
- name: oxlint
working-directory: ./web
run: |
pnpx oxlint --fix
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27

View File

@ -8,6 +8,8 @@ on:
- "deploy/enterprise"
- "build/**"
- "release/e-*"
- "deploy/rag-dev"
- "feat/rag-2"
tags:
- "*"

View File

@ -4,7 +4,7 @@ on:
workflow_run:
workflows: ["Build and Push API & Web"]
branches:
- "deploy/dev"
- "deploy/rag-dev"
types:
- completed
@ -12,12 +12,13 @@ jobs:
deploy:
runs-on: ubuntu-latest
if: |
github.event.workflow_run.conclusion == 'success'
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_branch == 'deploy/rag-dev'
steps:
- name: Deploy to server
uses: appleboy/ssh-action@v0.1.8
with:
host: ${{ secrets.SSH_HOST }}
host: ${{ secrets.RAG_SSH_HOST }}
username: ${{ secrets.SSH_USER }}
key: ${{ secrets.SSH_PRIVATE_KEY }}
script: |

View File

@ -19,23 +19,11 @@ jobs:
github.event.workflow_run.head_branch == 'deploy/enterprise'
steps:
- name: trigger deployments
env:
DEV_ENV_ADDRS: ${{ vars.DEV_ENV_ADDRS }}
DEPLOY_SECRET: ${{ secrets.DEPLOY_SECRET }}
run: |
IFS=',' read -ra ENDPOINTS <<< "${DEV_ENV_ADDRS:-}"
BODY='{"project":"dify-api","tag":"deploy-enterprise"}'
for ENDPOINT in "${ENDPOINTS[@]}"; do
ENDPOINT="$(echo "$ENDPOINT" | xargs)"
[ -z "$ENDPOINT" ] && continue
API_SIGNATURE=$(printf '%s' "$BODY" | openssl dgst -sha256 -hmac "$DEPLOY_SECRET" | awk '{print "sha256="$2}')
curl -sSf -X POST \
-H "Content-Type: application/json" \
-H "X-Hub-Signature-256: $API_SIGNATURE" \
-d "$BODY" \
"$ENDPOINT"
done
- name: Deploy to server
uses: appleboy/ssh-action@v0.1.8
with:
host: ${{ secrets.ENTERPRISE_SSH_HOST }}
username: ${{ secrets.ENTERPRISE_SSH_USER }}
password: ${{ secrets.ENTERPRISE_SSH_PASSWORD }}
script: |
${{ vars.ENTERPRISE_SSH_SCRIPT || secrets.ENTERPRISE_SSH_SCRIPT }}

View File

@ -43,18 +43,10 @@ jobs:
if: steps.changed-files.outputs.any_changed == 'true'
run: uv sync --project api --dev
- name: Run Import Linter
if: steps.changed-files.outputs.any_changed == 'true'
run: uv run --directory api --dev lint-imports
- name: Run Basedpyright Checks
if: steps.changed-files.outputs.any_changed == 'true'
run: dev/basedpyright-check
- name: Run Mypy Type Checks
if: steps.changed-files.outputs.any_changed == 'true'
run: uv --directory api run mypy --exclude-gitignore --exclude 'tests/' --exclude 'migrations/' --check-untyped-defs --disable-error-code=import-untyped .
- name: Dotenv check
if: steps.changed-files.outputs.any_changed == 'true'
run: uv run --project api dotenv-linter ./api/.env.example ./web/.env.example

12
.gitignore vendored
View File

@ -198,7 +198,6 @@ sdks/python-client/dify_client.egg-info
!.vscode/launch.json.template
!.vscode/README.md
api/.vscode
web/.vscode
# vscode Code History Extension
.history
@ -216,18 +215,7 @@ mise.toml
# Next.js build output
.next/
# PWA generated files
web/public/sw.js
web/public/sw.js.map
web/public/workbox-*.js
web/public/workbox-*.js.map
web/public/fallback-*.js
# AI Assistant
.roo/
api/.env.backup
/clickzetta
# Benchmark
scripts/stress-test/setup/config/
scripts/stress-test/reports/

View File

@ -1,87 +0,0 @@
# AGENTS.md
## Project Overview
Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management.
The codebase consists of:
- **Backend API** (`/api`): Python Flask application with Domain-Driven Design architecture
- **Frontend Web** (`/web`): Next.js 15 application with TypeScript and React 19
- **Docker deployment** (`/docker`): Containerized deployment configurations
## Development Commands
### Backend (API)
All Python commands must be prefixed with `uv run --project api`:
```bash
# Start development servers
./dev/start-api # Start API server
./dev/start-worker # Start Celery worker
# Run tests
uv run --project api pytest # Run all tests
uv run --project api pytest tests/unit_tests/ # Unit tests only
uv run --project api pytest tests/integration_tests/ # Integration tests
# Code quality
./dev/reformat # Run all formatters and linters
uv run --project api ruff check --fix ./ # Fix linting issues
uv run --project api ruff format ./ # Format code
uv run --directory api basedpyright # Type checking
```
### Frontend (Web)
```bash
cd web
pnpm lint # Run ESLint
pnpm eslint-fix # Fix ESLint issues
pnpm test # Run Jest tests
```
## Testing Guidelines
### Backend Testing
- Use `pytest` for all backend tests
- Write tests first (TDD approach)
- Test structure: Arrange-Act-Assert
## Code Style Requirements
### Python
- Use type hints for all functions and class attributes
- No `Any` types unless absolutely necessary
- Implement special methods (`__repr__`, `__str__`) appropriately
### TypeScript/JavaScript
- Strict TypeScript configuration
- ESLint with Prettier integration
- Avoid `any` type
## Important Notes
- **Environment Variables**: Always use UV for Python commands: `uv run --project api <command>`
- **Comments**: Only write meaningful comments that explain "why", not "what"
- **File Creation**: Always prefer editing existing files over creating new ones
- **Documentation**: Don't create documentation files unless explicitly requested
- **Code Quality**: Always run `./dev/reformat` before committing backend changes
## Common Development Tasks
### Adding a New API Endpoint
1. Create controller in `/api/controllers/`
1. Add service logic in `/api/services/`
1. Update routes in controller's `__init__.py`
1. Write tests in `/api/tests/`
## Project-Specific Conventions
- All async tasks use Celery with Redis as broker
- **Internationalization**: Frontend supports multiple languages with English (`web/i18n/en-US/`) as the source. All user-facing text must use i18n keys, no hardcoded strings. Edit corresponding module files in `en-US/` directory for translations.

1
AGENTS.md Symbolic link
View File

@ -0,0 +1 @@
CLAUDE.md

View File

@ -1 +0,0 @@
AGENTS.md

89
CLAUDE.md Normal file
View File

@ -0,0 +1,89 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Project Overview
Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management.
The codebase consists of:
- **Backend API** (`/api`): Python Flask application with Domain-Driven Design architecture
- **Frontend Web** (`/web`): Next.js 15 application with TypeScript and React 19
- **Docker deployment** (`/docker`): Containerized deployment configurations
## Development Commands
### Backend (API)
All Python commands must be prefixed with `uv run --project api`:
```bash
# Start development servers
./dev/start-api # Start API server
./dev/start-worker # Start Celery worker
# Run tests
uv run --project api pytest # Run all tests
uv run --project api pytest tests/unit_tests/ # Unit tests only
uv run --project api pytest tests/integration_tests/ # Integration tests
# Code quality
./dev/reformat # Run all formatters and linters
uv run --project api ruff check --fix ./ # Fix linting issues
uv run --project api ruff format ./ # Format code
uv run --directory api basedpyright # Type checking
```
### Frontend (Web)
```bash
cd web
pnpm lint # Run ESLint
pnpm eslint-fix # Fix ESLint issues
pnpm test # Run Jest tests
```
## Testing Guidelines
### Backend Testing
- Use `pytest` for all backend tests
- Write tests first (TDD approach)
- Test structure: Arrange-Act-Assert
## Code Style Requirements
### Python
- Use type hints for all functions and class attributes
- No `Any` types unless absolutely necessary
- Implement special methods (`__repr__`, `__str__`) appropriately
### TypeScript/JavaScript
- Strict TypeScript configuration
- ESLint with Prettier integration
- Avoid `any` type
## Important Notes
- **Environment Variables**: Always use UV for Python commands: `uv run --project api <command>`
- **Comments**: Only write meaningful comments that explain "why", not "what"
- **File Creation**: Always prefer editing existing files over creating new ones
- **Documentation**: Don't create documentation files unless explicitly requested
- **Code Quality**: Always run `./dev/reformat` before committing backend changes
## Common Development Tasks
### Adding a New API Endpoint
1. Create controller in `/api/controllers/`
1. Add service logic in `/api/services/`
1. Update routes in controller's `__init__.py`
1. Write tests in `/api/tests/`
## Project-Specific Conventions
- All async tasks use Celery with Redis as broker
- **Internationalization**: Frontend supports multiple languages with English (`web/i18n/en-US/`) as the source. All user-facing text must use i18n keys, no hardcoded strings. Edit corresponding module files in `en-US/` directory for translations.

View File

@ -4,13 +4,10 @@ WEB_IMAGE=$(DOCKER_REGISTRY)/dify-web
API_IMAGE=$(DOCKER_REGISTRY)/dify-api
VERSION=latest
# Default target - show help
.DEFAULT_GOAL := help
# Backend Development Environment Setup
.PHONY: dev-setup prepare-docker prepare-web prepare-api
# Dev setup target
# Default dev setup target
dev-setup: prepare-docker prepare-web prepare-api
@echo "✅ Backend development environment setup complete!"
@ -49,27 +46,6 @@ dev-clean:
@rm -rf api/storage
@echo "✅ Cleanup complete"
# Backend Code Quality Commands
format:
@echo "🎨 Running ruff format..."
@uv run --project api --dev ruff format ./api
@echo "✅ Code formatting complete"
check:
@echo "🔍 Running ruff check..."
@uv run --project api --dev ruff check ./api
@echo "✅ Code check complete"
lint:
@echo "🔧 Running ruff format and check with fixes..."
@uv run --directory api --dev sh -c 'ruff format ./api && ruff check --fix ./api'
@echo "✅ Linting complete"
type-check:
@echo "📝 Running type check with basedpyright..."
@uv run --directory api --dev basedpyright
@echo "✅ Type check complete"
# Build Docker images
build-web:
@echo "Building web Docker image: $(WEB_IMAGE):$(VERSION)..."
@ -114,12 +90,6 @@ help:
@echo " make prepare-api - Set up API environment"
@echo " make dev-clean - Stop Docker middleware containers"
@echo ""
@echo "Backend Code Quality:"
@echo " make format - Format code with ruff"
@echo " make check - Check code with ruff"
@echo " make lint - Format and fix code with ruff"
@echo " make type-check - Run type checking with basedpyright"
@echo ""
@echo "Docker Build Targets:"
@echo " make build-web - Build web Docker image"
@echo " make build-api - Build API Docker image"
@ -128,4 +98,4 @@ help:
@echo " make build-push-all - Build and push all Docker images"
# Phony targets
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help format check lint type-check
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help

View File

@ -328,7 +328,7 @@ MATRIXONE_DATABASE=dify
LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
LINDORM_USERNAME=admin
LINDORM_PASSWORD=admin
LINDORM_USING_UGC=True
USING_UGC_INDEX=False
LINDORM_QUERY_TIMEOUT=1
# OceanBase Vector configuration
@ -540,7 +540,6 @@ ENDPOINT_URL_TEMPLATE=http://localhost:5002/e/{hook_id}
# Reset password token expiry minutes
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES=5
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5
@ -580,7 +579,3 @@ QUEUE_MONITOR_INTERVAL=30
# Swagger UI configuration
SWAGGER_UI_ENABLED=true
SWAGGER_UI_PATH=/swagger-ui.html
# Whether to encrypt dataset IDs when exporting DSL files (default: true)
# Set to false to export dataset IDs as plain text for easier cross-environment import
DSL_EXPORT_ENCRYPT_DATASET_ID=true

View File

@ -22,15 +22,14 @@ containers =
ignore_imports =
core.workflow.nodes.base.node -> core.workflow.graph_events
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_events
core.workflow.nodes.loop.loop_node -> core.workflow.graph_events
core.workflow.nodes.node_factory -> core.workflow.graph
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_engine
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_engine.command_channels
core.workflow.nodes.loop.loop_node -> core.workflow.graph_events
core.workflow.nodes.loop.loop_node -> core.workflow.graph_engine
core.workflow.nodes.loop.loop_node -> core.workflow.graph
core.workflow.nodes.loop.loop_node -> core.workflow.graph_engine.command_channels
core.workflow.nodes.node_factory -> core.workflow.graph
[importlinter:contract:rsc]
name = RSC
@ -58,9 +57,9 @@ layers =
orchestration
command_processing
event_management
error_handler
error_handling
graph_traversal
graph_state_manager
state_management
worker_management
domain
containers =
@ -87,6 +86,14 @@ forbidden_modules =
core.workflow.graph_engine.command_processing
core.workflow.graph_engine.event_management
[importlinter:contract:error-handling-strategies]
name = Error Handling Strategies
type = independence
modules =
core.workflow.graph_engine.error_handling.abort_strategy
core.workflow.graph_engine.error_handling.retry_strategy
core.workflow.graph_engine.error_handling.fail_branch_strategy
core.workflow.graph_engine.error_handling.default_value_strategy
[importlinter:contract:graph-traversal-components]
name = Graph Traversal Components

View File

@ -5,7 +5,7 @@ line-length = 120
quote-style = "double"
[lint]
preview = true
preview = false
select = [
"B", # flake8-bugbear rules
"C4", # flake8-comprehensions
@ -45,7 +45,6 @@ select = [
"G001", # don't use str format to logging messages
"G003", # don't use + in logging messages
"G004", # don't use f-strings to format logging messages
"UP042", # use StrEnum
]
ignore = [
@ -65,7 +64,6 @@ ignore = [
"B006", # mutable-argument-default
"B007", # unused-loop-control-variable
"B026", # star-arg-unpacking-after-keyword-arg
"B901", # allow return in yield
"B903", # class-as-data-structure
"B904", # raise-without-from-inside-except
"B905", # zip-without-explicit-strict

View File

@ -1,4 +1,3 @@
import os
import sys
@ -17,20 +16,20 @@ else:
# It seems that JetBrains Python debugger does not work well with gevent,
# so we need to disable gevent in debug mode.
# If you are using debugpy and set GEVENT_SUPPORT=True, you can debug with gevent.
if (flask_debug := os.environ.get("FLASK_DEBUG", "0")) and flask_debug.lower() in {"false", "0", "no"}:
from gevent import monkey
# if (flask_debug := os.environ.get("FLASK_DEBUG", "0")) and flask_debug.lower() in {"false", "0", "no"}:
# from gevent import monkey
#
# # gevent
# monkey.patch_all()
#
# from grpc.experimental import gevent as grpc_gevent # type: ignore
#
# # grpc gevent
# grpc_gevent.init_gevent()
# gevent
monkey.patch_all()
from grpc.experimental import gevent as grpc_gevent # type: ignore
# grpc gevent
grpc_gevent.init_gevent()
import psycogreen.gevent # type: ignore
psycogreen.gevent.patch_psycopg()
# import psycogreen.gevent # type: ignore
#
# psycogreen.gevent.patch_psycopg()
from app_factory import create_app

View File

@ -25,9 +25,6 @@ def create_flask_app_with_configs() -> DifyApp:
# add an unique identifier to each request
RecyclableContextVar.increment_thread_recycles()
# Capture the decorator's return value to avoid pyright reportUnusedFunction
_ = before_request
return dify_app

22
api/celery_entrypoint.py Normal file
View File

@ -0,0 +1,22 @@
import logging
import psycogreen.gevent as pscycogreen_gevent # type: ignore
from grpc.experimental import gevent as grpc_gevent # type: ignore
_logger = logging.getLogger(__name__)
def _log(message: str):
print(message, flush=True)
# grpc gevent
grpc_gevent.init_gevent()
_log("gRPC patched with gevent.")
pscycogreen_gevent.patch_psycopg()
_log("psycopg2 patched with gevent.")
from app import app, celery
__all__ = ["app", "celery"]

View File

@ -1,9 +1,8 @@
import base64
import json
import logging
import operator
import secrets
from typing import Any
from typing import Any, Optional
import click
import sqlalchemy as sa
@ -14,10 +13,14 @@ from sqlalchemy.exc import SQLAlchemyError
from configs import dify_config
from constants.languages import languages
from core.helper import encrypter
from core.plugin.entities.plugin import PluginInstallationSource
from core.plugin.impl.plugin import PluginInstaller
from core.rag.datasource.vdb.vector_factory import Vector
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.index_processor.constant.built_in_field import BuiltInField
from core.rag.models.document import Document
from core.tools.entities.tool_entities import CredentialType
from core.tools.utils.system_oauth_encryption import encrypt_system_oauth_params
from events.app_event import app_was_created
from extensions.ext_database import db
@ -30,13 +33,16 @@ from models import Tenant
from models.dataset import Dataset, DatasetCollectionBinding, DatasetMetadata, DatasetMetadataBinding, DocumentSegment
from models.dataset import Document as DatasetDocument
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
from models.oauth import DatasourceOauthParamConfig, DatasourceProvider
from models.provider import Provider, ProviderModel
from models.provider_ids import ToolProviderID
from models.provider_ids import DatasourceProviderID, ToolProviderID
from models.source import DataSourceApiKeyAuthBinding, DataSourceOauthBinding
from models.tools import ToolOAuthSystemClient
from services.account_service import AccountService, RegisterService, TenantService
from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpiredLogs
from services.plugin.data_migration import PluginDataMigration
from services.plugin.plugin_migration import PluginMigration
from services.plugin.plugin_service import PluginService
from tasks.remove_app_and_related_data_task import delete_draft_variables_batch
logger = logging.getLogger(__name__)
@ -213,9 +219,7 @@ def migrate_annotation_vector_database():
if not dataset_collection_binding:
click.echo(f"App annotation collection binding not found: {app.id}")
continue
annotations = db.session.scalars(
select(MessageAnnotation).where(MessageAnnotation.app_id == app.id)
).all()
annotations = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app.id).all()
dataset = Dataset(
id=app.id,
tenant_id=app.tenant_id,
@ -370,25 +374,29 @@ def migrate_knowledge_vector_database():
)
raise e
dataset_documents = db.session.scalars(
select(DatasetDocument).where(
dataset_documents = (
db.session.query(DatasetDocument)
.where(
DatasetDocument.dataset_id == dataset.id,
DatasetDocument.indexing_status == "completed",
DatasetDocument.enabled == True,
DatasetDocument.archived == False,
)
).all()
.all()
)
documents = []
segments_count = 0
for dataset_document in dataset_documents:
segments = db.session.scalars(
select(DocumentSegment).where(
segments = (
db.session.query(DocumentSegment)
.where(
DocumentSegment.document_id == dataset_document.id,
DocumentSegment.status == "completed",
DocumentSegment.enabled == True,
)
).all()
.all()
)
for segment in segments:
document = Document(
@ -478,12 +486,12 @@ def convert_to_agent_apps():
click.echo(f"Converting app: {app.id}")
try:
app.mode = AppMode.AGENT_CHAT
app.mode = AppMode.AGENT_CHAT.value
db.session.commit()
# update conversation mode to agent
db.session.query(Conversation).where(Conversation.app_id == app.id).update(
{Conversation.mode: AppMode.AGENT_CHAT}
{Conversation.mode: AppMode.AGENT_CHAT.value}
)
db.session.commit()
@ -510,7 +518,7 @@ def add_qdrant_index(field: str):
from qdrant_client.http.exceptions import UnexpectedResponse
from qdrant_client.http.models import PayloadSchemaType
from core.rag.datasource.vdb.qdrant.qdrant_vector import PathQdrantParams, QdrantConfig
from core.rag.datasource.vdb.qdrant.qdrant_vector import QdrantConfig
for binding in bindings:
if dify_config.QDRANT_URL is None:
@ -524,21 +532,7 @@ def add_qdrant_index(field: str):
prefer_grpc=dify_config.QDRANT_GRPC_ENABLED,
)
try:
params = qdrant_config.to_qdrant_params()
# Check the type before using
if isinstance(params, PathQdrantParams):
# PathQdrantParams case
client = qdrant_client.QdrantClient(path=params.path)
else:
# UrlQdrantParams case - params is UrlQdrantParams
client = qdrant_client.QdrantClient(
url=params.url,
api_key=params.api_key,
timeout=int(params.timeout),
verify=params.verify,
grpc_port=params.grpc_port,
prefer_grpc=params.prefer_grpc,
)
client = qdrant_client.QdrantClient(**qdrant_config.to_qdrant_params())
# create payload index
client.create_payload_index(binding.collection_name, field, field_schema=PayloadSchemaType.KEYWORD)
create_count += 1
@ -640,7 +634,7 @@ def old_metadata_migration():
@click.option("--email", prompt=True, help="Tenant account email.")
@click.option("--name", prompt=True, help="Workspace name.")
@click.option("--language", prompt=True, help="Account language, default: en-US.")
def create_tenant(email: str, language: str | None = None, name: str | None = None):
def create_tenant(email: str, language: Optional[str] = None, name: Optional[str] = None):
"""
Create tenant account
"""
@ -954,7 +948,7 @@ def clear_orphaned_file_records(force: bool):
click.echo(click.style("- Deleting orphaned message_files records", fg="white"))
query = "DELETE FROM message_files WHERE id IN :ids"
with db.engine.begin() as conn:
conn.execute(sa.text(query), {"ids": tuple(record["id"] for record in orphaned_message_files)})
conn.execute(sa.text(query), {"ids": tuple([record["id"] for record in orphaned_message_files])})
click.echo(
click.style(f"Removed {len(orphaned_message_files)} orphaned message_files records.", fg="green")
)
@ -1246,15 +1240,17 @@ def _find_orphaned_draft_variables(batch_size: int = 1000) -> list[str]:
def _count_orphaned_draft_variables() -> dict[str, Any]:
"""
Count orphaned draft variables by app.
Count orphaned draft variables by app, including associated file counts.
Returns:
Dictionary with statistics about orphaned variables
Dictionary with statistics about orphaned variables and files
"""
query = """
# Count orphaned variables by app
variables_query = """
SELECT
wdv.app_id,
COUNT(*) as variable_count
COUNT(*) as variable_count,
COUNT(wdv.file_id) as file_count
FROM workflow_draft_variables AS wdv
WHERE NOT EXISTS(
SELECT 1 FROM apps WHERE apps.id = wdv.app_id
@ -1264,14 +1260,21 @@ def _count_orphaned_draft_variables() -> dict[str, Any]:
"""
with db.engine.connect() as conn:
result = conn.execute(sa.text(query))
orphaned_by_app = {row[0]: row[1] for row in result}
result = conn.execute(sa.text(variables_query))
orphaned_by_app = {}
total_files = 0
total_orphaned = sum(orphaned_by_app.values())
for row in result:
app_id, variable_count, file_count = row
orphaned_by_app[app_id] = {"variables": variable_count, "files": file_count}
total_files += file_count
total_orphaned = sum(app_data["variables"] for app_data in orphaned_by_app.values())
app_count = len(orphaned_by_app)
return {
"total_orphaned_variables": total_orphaned,
"total_orphaned_files": total_files,
"orphaned_app_count": app_count,
"orphaned_by_app": orphaned_by_app,
}
@ -1300,6 +1303,7 @@ def cleanup_orphaned_draft_variables(
stats = _count_orphaned_draft_variables()
logger.info("Found %s orphaned draft variables", stats["total_orphaned_variables"])
logger.info("Found %s associated offload files", stats["total_orphaned_files"])
logger.info("Across %s non-existent apps", stats["orphaned_app_count"])
if stats["total_orphaned_variables"] == 0:
@ -1308,10 +1312,10 @@ def cleanup_orphaned_draft_variables(
if dry_run:
logger.info("DRY RUN: Would delete the following:")
for app_id, count in sorted(stats["orphaned_by_app"].items(), key=operator.itemgetter(1), reverse=True)[
for app_id, data in sorted(stats["orphaned_by_app"].items(), key=lambda x: x[1]["variables"], reverse=True)[
:10
]: # Show top 10
logger.info(" App %s: %s variables", app_id, count)
logger.info(" App %s: %s variables, %s files", app_id, data["variables"], data["files"])
if len(stats["orphaned_by_app"]) > 10:
logger.info(" ... and %s more apps", len(stats["orphaned_by_app"]) - 10)
return
@ -1320,7 +1324,8 @@ def cleanup_orphaned_draft_variables(
if not force:
click.confirm(
f"Are you sure you want to delete {stats['total_orphaned_variables']} "
f"orphaned draft variables from {stats['orphaned_app_count']} apps?",
f"orphaned draft variables and {stats['total_orphaned_files']} associated files "
f"from {stats['orphaned_app_count']} apps?",
abort=True,
)
@ -1353,3 +1358,231 @@ def cleanup_orphaned_draft_variables(
continue
logger.info("Cleanup completed. Total deleted: %s variables across %s apps", total_deleted, processed_apps)
@click.command("setup-datasource-oauth-client", help="Setup datasource oauth client.")
@click.option("--provider", prompt=True, help="Provider name")
@click.option("--client-params", prompt=True, help="Client Params")
def setup_datasource_oauth_client(provider, client_params):
"""
Setup datasource oauth client
"""
provider_id = DatasourceProviderID(provider)
provider_name = provider_id.provider_name
plugin_id = provider_id.plugin_id
try:
# json validate
click.echo(click.style(f"Validating client params: {client_params}", fg="yellow"))
client_params_dict = TypeAdapter(dict[str, Any]).validate_json(client_params)
click.echo(click.style("Client params validated successfully.", fg="green"))
except Exception as e:
click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
return
click.echo(click.style(f"Ready to delete existing oauth client params: {provider_name}", fg="yellow"))
deleted_count = (
db.session.query(DatasourceOauthParamConfig)
.filter_by(
provider=provider_name,
plugin_id=plugin_id,
)
.delete()
)
if deleted_count > 0:
click.echo(click.style(f"Deleted {deleted_count} existing oauth client params.", fg="yellow"))
click.echo(click.style(f"Ready to setup datasource oauth client: {provider_name}", fg="yellow"))
oauth_client = DatasourceOauthParamConfig(
provider=provider_name,
plugin_id=plugin_id,
system_credentials=client_params_dict,
)
db.session.add(oauth_client)
db.session.commit()
click.echo(click.style(f"provider: {provider_name}", fg="green"))
click.echo(click.style(f"plugin_id: {plugin_id}", fg="green"))
click.echo(click.style(f"params: {json.dumps(client_params_dict, indent=2, ensure_ascii=False)}", fg="green"))
click.echo(click.style(f"Datasource oauth client setup successfully. id: {oauth_client.id}", fg="green"))
@click.command("transform-datasource-credentials", help="Transform datasource credentials.")
def transform_datasource_credentials():
"""
Transform datasource credentials
"""
try:
installer_manager = PluginInstaller()
plugin_migration = PluginMigration()
notion_plugin_id = "langgenius/notion_datasource"
firecrawl_plugin_id = "langgenius/firecrawl_datasource"
jina_plugin_id = "langgenius/jina_datasource"
notion_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(notion_plugin_id)
firecrawl_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(firecrawl_plugin_id)
jina_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(jina_plugin_id)
oauth_credential_type = CredentialType.OAUTH2
api_key_credential_type = CredentialType.API_KEY
# deal notion credentials
deal_notion_count = 0
notion_credentials = db.session.query(DataSourceOauthBinding).filter_by(provider="notion").all()
if notion_credentials:
notion_credentials_tenant_mapping: dict[str, list[DataSourceOauthBinding]] = {}
for credential in notion_credentials:
tenant_id = credential.tenant_id
if tenant_id not in notion_credentials_tenant_mapping:
notion_credentials_tenant_mapping[tenant_id] = []
notion_credentials_tenant_mapping[tenant_id].append(credential)
for tenant_id, credentials in notion_credentials_tenant_mapping.items():
# check notion plugin is installed
installed_plugins = installer_manager.list_plugins(tenant_id)
installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
if notion_plugin_id not in installed_plugins_ids:
if notion_plugin_unique_identifier:
# install notion plugin
PluginService.install_from_marketplace_pkg(tenant_id, [notion_plugin_unique_identifier])
auth_count = 0
for credential in credentials:
auth_count += 1
# get credential oauth params
access_token = credential.access_token
# notion info
notion_info = credential.source_info
workspace_id = notion_info.get("workspace_id")
workspace_name = notion_info.get("workspace_name")
workspace_icon = notion_info.get("workspace_icon")
new_credentials = {
"integration_secret": encrypter.encrypt_token(tenant_id, access_token),
"workspace_id": workspace_id,
"workspace_name": workspace_name,
"workspace_icon": workspace_icon,
}
datasource_provider = DatasourceProvider(
provider="notion_datasource",
tenant_id=tenant_id,
plugin_id=notion_plugin_id,
auth_type=oauth_credential_type.value,
encrypted_credentials=new_credentials,
name=f"Auth {auth_count}",
avatar_url=workspace_icon or "default",
is_default=False,
)
db.session.add(datasource_provider)
deal_notion_count += 1
db.session.commit()
# deal firecrawl credentials
deal_firecrawl_count = 0
firecrawl_credentials = db.session.query(DataSourceApiKeyAuthBinding).filter_by(provider="firecrawl").all()
if firecrawl_credentials:
firecrawl_credentials_tenant_mapping: dict[str, list[DataSourceApiKeyAuthBinding]] = {}
for credential in firecrawl_credentials:
tenant_id = credential.tenant_id
if tenant_id not in firecrawl_credentials_tenant_mapping:
firecrawl_credentials_tenant_mapping[tenant_id] = []
firecrawl_credentials_tenant_mapping[tenant_id].append(credential)
for tenant_id, credentials in firecrawl_credentials_tenant_mapping.items():
# check firecrawl plugin is installed
installed_plugins = installer_manager.list_plugins(tenant_id)
installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
if firecrawl_plugin_id not in installed_plugins_ids:
if firecrawl_plugin_unique_identifier:
# install firecrawl plugin
PluginService.install_from_marketplace_pkg(tenant_id, [firecrawl_plugin_unique_identifier])
auth_count = 0
for credential in credentials:
auth_count += 1
# get credential api key
credentials_json =json.loads(credential.credentials)
api_key = credentials_json.get("config", {}).get("api_key")
base_url = credentials_json.get("config", {}).get("base_url")
new_credentials = {
"firecrawl_api_key": api_key,
"base_url": base_url,
}
datasource_provider = DatasourceProvider(
provider="firecrawl",
tenant_id=tenant_id,
plugin_id=firecrawl_plugin_id,
auth_type=api_key_credential_type.value,
encrypted_credentials=new_credentials,
name=f"Auth {auth_count}",
avatar_url="default",
is_default=False,
)
db.session.add(datasource_provider)
deal_firecrawl_count += 1
db.session.commit()
# deal jina credentials
deal_jina_count = 0
jina_credentials = db.session.query(DataSourceApiKeyAuthBinding).filter_by(provider="jinareader").all()
if jina_credentials:
jina_credentials_tenant_mapping: dict[str, list[DataSourceApiKeyAuthBinding]] = {}
for credential in jina_credentials:
tenant_id = credential.tenant_id
if tenant_id not in jina_credentials_tenant_mapping:
jina_credentials_tenant_mapping[tenant_id] = []
jina_credentials_tenant_mapping[tenant_id].append(credential)
for tenant_id, credentials in jina_credentials_tenant_mapping.items():
# check jina plugin is installed
installed_plugins = installer_manager.list_plugins(tenant_id)
installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
if jina_plugin_id not in installed_plugins_ids:
if jina_plugin_unique_identifier:
# install jina plugin
print(jina_plugin_unique_identifier)
PluginService.install_from_marketplace_pkg(tenant_id, [jina_plugin_unique_identifier])
auth_count = 0
for credential in credentials:
auth_count += 1
# get credential api key
credentials_json = json.loads(credential.credentials)
api_key = credentials_json.get("config", {}).get("api_key")
new_credentials = {
"integration_secret": api_key,
}
datasource_provider = DatasourceProvider(
provider="jina",
tenant_id=tenant_id,
plugin_id=jina_plugin_id,
auth_type=api_key_credential_type.value,
encrypted_credentials=new_credentials,
name=f"Auth {auth_count}",
avatar_url="default",
is_default=False,
)
db.session.add(datasource_provider)
deal_jina_count += 1
db.session.commit()
except Exception as e:
click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
return
click.echo(click.style(f"Transforming notion successfully. deal_notion_count: {deal_notion_count}", fg="green"))
click.echo(
click.style(f"Transforming firecrawl successfully. deal_firecrawl_count: {deal_firecrawl_count}", fg="green")
)
click.echo(click.style(f"Transforming jina successfully. deal_jina_count: {deal_jina_count}", fg="green"))
@click.command("install-rag-pipeline-plugins", help="Install rag pipeline plugins.")
@click.option(
"--input_file", prompt=True, help="The file to store the extracted unique identifiers.", default="plugins.jsonl"
)
@click.option(
"--output_file", prompt=True, help="The file to store the installed plugins.", default="installed_plugins.jsonl"
)
@click.option("--workers", prompt=True, help="The number of workers to install plugins.", default=100)
def install_rag_pipeline_plugins(input_file, output_file, workers):
"""
Install rag pipeline plugins
"""
click.echo(click.style("Installing rag pipeline plugins", fg="yellow"))
plugin_migration = PluginMigration()
plugin_migration.install_rag_pipeline_plugins(
input_file,
output_file,
workers,
)
click.echo(click.style("Installing rag pipeline plugins successfully", fg="green"))

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,28 +9,28 @@ class NotionConfig(BaseSettings):
Configuration settings for Notion integration
"""
NOTION_CLIENT_ID: str | None = Field(
NOTION_CLIENT_ID: Optional[str] = Field(
description="Client ID for Notion API authentication. Required for OAuth 2.0 flow.",
default=None,
)
NOTION_CLIENT_SECRET: str | None = Field(
NOTION_CLIENT_SECRET: Optional[str] = Field(
description="Client secret for Notion API authentication. Required for OAuth 2.0 flow.",
default=None,
)
NOTION_INTEGRATION_TYPE: str | None = Field(
NOTION_INTEGRATION_TYPE: Optional[str] = Field(
description="Type of Notion integration."
" Set to 'internal' for internal integrations, or None for public integrations.",
default=None,
)
NOTION_INTERNAL_SECRET: str | None = Field(
NOTION_INTERNAL_SECRET: Optional[str] = Field(
description="Secret key for internal Notion integrations. Required when NOTION_INTEGRATION_TYPE is 'internal'.",
default=None,
)
NOTION_INTEGRATION_TOKEN: str | None = Field(
NOTION_INTEGRATION_TOKEN: Optional[str] = Field(
description="Integration token for Notion API access. Used for direct API calls without OAuth flow.",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeFloat
from pydantic_settings import BaseSettings
@ -7,7 +9,7 @@ class SentryConfig(BaseSettings):
Configuration settings for Sentry error tracking and performance monitoring
"""
SENTRY_DSN: str | None = Field(
SENTRY_DSN: Optional[str] = Field(
description="Sentry Data Source Name (DSN)."
" This is the unique identifier of your Sentry project, used to send events to the correct project.",
default=None,

View File

@ -1,4 +1,4 @@
from typing import Literal
from typing import Literal, Optional
from pydantic import (
AliasChoices,
@ -31,12 +31,6 @@ class SecurityConfig(BaseSettings):
description="Duration in minutes for which a password reset token remains valid",
default=5,
)
EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
description="Duration in minutes for which a email register token remains valid",
default=5,
)
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
description="Duration in minutes for which a change email token remains valid",
default=5,
@ -57,7 +51,7 @@ class SecurityConfig(BaseSettings):
default=False,
)
ADMIN_API_KEY: str | None = Field(
ADMIN_API_KEY: Optional[str] = Field(
description="admin api key for authentication",
default=None,
)
@ -97,17 +91,17 @@ class CodeExecutionSandboxConfig(BaseSettings):
default="dify-sandbox",
)
CODE_EXECUTION_CONNECT_TIMEOUT: float | None = Field(
CODE_EXECUTION_CONNECT_TIMEOUT: Optional[float] = Field(
description="Connection timeout in seconds for code execution requests",
default=10.0,
)
CODE_EXECUTION_READ_TIMEOUT: float | None = Field(
CODE_EXECUTION_READ_TIMEOUT: Optional[float] = Field(
description="Read timeout in seconds for code execution requests",
default=60.0,
)
CODE_EXECUTION_WRITE_TIMEOUT: float | None = Field(
CODE_EXECUTION_WRITE_TIMEOUT: Optional[float] = Field(
description="Write timeout in seconds for code execution request",
default=10.0,
)
@ -368,17 +362,17 @@ class HttpConfig(BaseSettings):
default=3,
)
SSRF_PROXY_ALL_URL: str | None = Field(
SSRF_PROXY_ALL_URL: Optional[str] = Field(
description="Proxy URL for HTTP or HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
default=None,
)
SSRF_PROXY_HTTP_URL: str | None = Field(
SSRF_PROXY_HTTP_URL: Optional[str] = Field(
description="Proxy URL for HTTP requests to prevent Server-Side Request Forgery (SSRF)",
default=None,
)
SSRF_PROXY_HTTPS_URL: str | None = Field(
SSRF_PROXY_HTTPS_URL: Optional[str] = Field(
description="Proxy URL for HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
default=None,
)
@ -420,7 +414,7 @@ class InnerAPIConfig(BaseSettings):
default=False,
)
INNER_API_KEY: str | None = Field(
INNER_API_KEY: Optional[str] = Field(
description="API key for accessing the internal API",
default=None,
)
@ -436,7 +430,7 @@ class LoggingConfig(BaseSettings):
default="INFO",
)
LOG_FILE: str | None = Field(
LOG_FILE: Optional[str] = Field(
description="File path for log output.",
default=None,
)
@ -456,12 +450,12 @@ class LoggingConfig(BaseSettings):
default="%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s",
)
LOG_DATEFORMAT: str | None = Field(
LOG_DATEFORMAT: Optional[str] = Field(
description="Date format string for log timestamps",
default=None,
)
LOG_TZ: str | None = Field(
LOG_TZ: Optional[str] = Field(
description="Timezone for log timestamps (e.g., 'America/New_York')",
default="UTC",
)
@ -505,6 +499,22 @@ class UpdateConfig(BaseSettings):
)
class WorkflowVariableTruncationConfig(BaseSettings):
WORKFLOW_VARIABLE_TRUNCATION_MAX_SIZE: PositiveInt = Field(
# 100KB
1024_000,
description="Maximum size for variable to trigger final truncation.",
)
WORKFLOW_VARIABLE_TRUNCATION_STRING_LENGTH: PositiveInt = Field(
50000,
description="maximum length for string to trigger tuncation, measure in number of characters",
)
WORKFLOW_VARIABLE_TRUNCATION_ARRAY_LENGTH: PositiveInt = Field(
100,
description="maximum length for array to trigger truncation.",
)
class WorkflowConfig(BaseSettings):
"""
Configuration for workflow execution
@ -617,22 +627,22 @@ class AuthConfig(BaseSettings):
default="/console/api/oauth/authorize",
)
GITHUB_CLIENT_ID: str | None = Field(
GITHUB_CLIENT_ID: Optional[str] = Field(
description="GitHub OAuth client ID",
default=None,
)
GITHUB_CLIENT_SECRET: str | None = Field(
GITHUB_CLIENT_SECRET: Optional[str] = Field(
description="GitHub OAuth client secret",
default=None,
)
GOOGLE_CLIENT_ID: str | None = Field(
GOOGLE_CLIENT_ID: Optional[str] = Field(
description="Google OAuth client ID",
default=None,
)
GOOGLE_CLIENT_SECRET: str | None = Field(
GOOGLE_CLIENT_SECRET: Optional[str] = Field(
description="Google OAuth client secret",
default=None,
)
@ -667,11 +677,6 @@ class AuthConfig(BaseSettings):
default=86400,
)
EMAIL_REGISTER_LOCKOUT_DURATION: PositiveInt = Field(
description="Time (in seconds) a user must wait before retrying email register after exceeding the rate limit.",
default=86400,
)
class ModerationConfig(BaseSettings):
"""
@ -700,42 +705,42 @@ class MailConfig(BaseSettings):
Configuration for email services
"""
MAIL_TYPE: str | None = Field(
MAIL_TYPE: Optional[str] = Field(
description="Email service provider type ('smtp' or 'resend' or 'sendGrid), default to None.",
default=None,
)
MAIL_DEFAULT_SEND_FROM: str | None = Field(
MAIL_DEFAULT_SEND_FROM: Optional[str] = Field(
description="Default email address to use as the sender",
default=None,
)
RESEND_API_KEY: str | None = Field(
RESEND_API_KEY: Optional[str] = Field(
description="API key for Resend email service",
default=None,
)
RESEND_API_URL: str | None = Field(
RESEND_API_URL: Optional[str] = Field(
description="API URL for Resend email service",
default=None,
)
SMTP_SERVER: str | None = Field(
SMTP_SERVER: Optional[str] = Field(
description="SMTP server hostname",
default=None,
)
SMTP_PORT: int | None = Field(
SMTP_PORT: Optional[int] = Field(
description="SMTP server port number",
default=465,
)
SMTP_USERNAME: str | None = Field(
SMTP_USERNAME: Optional[str] = Field(
description="Username for SMTP authentication",
default=None,
)
SMTP_PASSWORD: str | None = Field(
SMTP_PASSWORD: Optional[str] = Field(
description="Password for SMTP authentication",
default=None,
)
@ -755,7 +760,7 @@ class MailConfig(BaseSettings):
default=50,
)
SENDGRID_API_KEY: str | None = Field(
SENDGRID_API_KEY: Optional[str] = Field(
description="API key for SendGrid service",
default=None,
)
@ -778,17 +783,17 @@ class RagEtlConfig(BaseSettings):
default="database",
)
UNSTRUCTURED_API_URL: str | None = Field(
UNSTRUCTURED_API_URL: Optional[str] = Field(
description="API URL for Unstructured.io service",
default=None,
)
UNSTRUCTURED_API_KEY: str | None = Field(
UNSTRUCTURED_API_KEY: Optional[str] = Field(
description="API key for Unstructured.io service",
default="",
)
SCARF_NO_ANALYTICS: str | None = Field(
SCARF_NO_ANALYTICS: Optional[str] = Field(
description="This is about whether to disable Scarf analytics in Unstructured library.",
default="false",
)
@ -829,11 +834,6 @@ class DataSetConfig(BaseSettings):
default=30,
)
DSL_EXPORT_ENCRYPT_DATASET_ID: bool = Field(
description="Enable or disable dataset ID encryption when exporting DSL files",
default=True,
)
class WorkspaceConfig(BaseSettings):
"""
@ -1063,5 +1063,6 @@ class FeatureConfig(
CeleryBeatConfig,
CeleryScheduleTasksConfig,
WorkflowLogConfig,
WorkflowVariableTruncationConfig,
):
pass

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt
from pydantic_settings import BaseSettings
@ -38,17 +40,17 @@ class HostedOpenAiConfig(BaseSettings):
Configuration for hosted OpenAI service
"""
HOSTED_OPENAI_API_KEY: str | None = Field(
HOSTED_OPENAI_API_KEY: Optional[str] = Field(
description="API key for hosted OpenAI service",
default=None,
)
HOSTED_OPENAI_API_BASE: str | None = Field(
HOSTED_OPENAI_API_BASE: Optional[str] = Field(
description="Base URL for hosted OpenAI API",
default=None,
)
HOSTED_OPENAI_API_ORGANIZATION: str | None = Field(
HOSTED_OPENAI_API_ORGANIZATION: Optional[str] = Field(
description="Organization ID for hosted OpenAI service",
default=None,
)
@ -108,12 +110,12 @@ class HostedAzureOpenAiConfig(BaseSettings):
default=False,
)
HOSTED_AZURE_OPENAI_API_KEY: str | None = Field(
HOSTED_AZURE_OPENAI_API_KEY: Optional[str] = Field(
description="API key for hosted Azure OpenAI service",
default=None,
)
HOSTED_AZURE_OPENAI_API_BASE: str | None = Field(
HOSTED_AZURE_OPENAI_API_BASE: Optional[str] = Field(
description="Base URL for hosted Azure OpenAI API",
default=None,
)
@ -129,12 +131,12 @@ class HostedAnthropicConfig(BaseSettings):
Configuration for hosted Anthropic service
"""
HOSTED_ANTHROPIC_API_BASE: str | None = Field(
HOSTED_ANTHROPIC_API_BASE: Optional[str] = Field(
description="Base URL for hosted Anthropic API",
default=None,
)
HOSTED_ANTHROPIC_API_KEY: str | None = Field(
HOSTED_ANTHROPIC_API_KEY: Optional[str] = Field(
description="API key for hosted Anthropic service",
default=None,
)
@ -220,11 +222,28 @@ class HostedFetchAppTemplateConfig(BaseSettings):
)
class HostedFetchPipelineTemplateConfig(BaseSettings):
"""
Configuration for fetching pipeline templates
"""
HOSTED_FETCH_PIPELINE_TEMPLATES_MODE: str = Field(
description="Mode for fetching pipeline templates: remote, db, or builtin default to remote,",
default="remote",
)
HOSTED_FETCH_PIPELINE_TEMPLATES_REMOTE_DOMAIN: str = Field(
description="Domain for fetching remote pipeline templates",
default="https://tmpl.dify.ai",
)
class HostedServiceConfig(
# place the configs in alphabet order
HostedAnthropicConfig,
HostedAzureOpenAiConfig,
HostedFetchAppTemplateConfig,
HostedFetchPipelineTemplateConfig,
HostedMinmaxConfig,
HostedOpenAiConfig,
HostedSparkConfig,

View File

@ -1,5 +1,5 @@
import os
from typing import Any, Literal
from typing import Any, Literal, Optional
from urllib.parse import parse_qsl, quote_plus
from pydantic import Field, NonNegativeFloat, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
@ -78,18 +78,18 @@ class StorageConfig(BaseSettings):
class VectorStoreConfig(BaseSettings):
VECTOR_STORE: str | None = Field(
VECTOR_STORE: Optional[str] = Field(
description="Type of vector store to use for efficient similarity search."
" Set to None if not using a vector store.",
default=None,
)
VECTOR_STORE_WHITELIST_ENABLE: bool | None = Field(
VECTOR_STORE_WHITELIST_ENABLE: Optional[bool] = Field(
description="Enable whitelist for vector store.",
default=False,
)
VECTOR_INDEX_NAME_PREFIX: str | None = Field(
VECTOR_INDEX_NAME_PREFIX: Optional[str] = Field(
description="Prefix used to create collection name in vector database",
default="Vector_index",
)
@ -225,26 +225,26 @@ class CeleryConfig(DatabaseConfig):
default="redis",
)
CELERY_BROKER_URL: str | None = Field(
CELERY_BROKER_URL: Optional[str] = Field(
description="URL of the message broker for Celery tasks.",
default=None,
)
CELERY_USE_SENTINEL: bool | None = Field(
CELERY_USE_SENTINEL: Optional[bool] = Field(
description="Whether to use Redis Sentinel for high availability.",
default=False,
)
CELERY_SENTINEL_MASTER_NAME: str | None = Field(
CELERY_SENTINEL_MASTER_NAME: Optional[str] = Field(
description="Name of the Redis Sentinel master.",
default=None,
)
CELERY_SENTINEL_PASSWORD: str | None = Field(
CELERY_SENTINEL_PASSWORD: Optional[str] = Field(
description="Password of the Redis Sentinel master.",
default=None,
)
CELERY_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
CELERY_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
description="Timeout for Redis Sentinel socket operations in seconds.",
default=0.1,
)
@ -268,12 +268,12 @@ class InternalTestConfig(BaseSettings):
Configuration settings for Internal Test
"""
AWS_SECRET_ACCESS_KEY: str | None = Field(
AWS_SECRET_ACCESS_KEY: Optional[str] = Field(
description="Internal test AWS secret access key",
default=None,
)
AWS_ACCESS_KEY_ID: str | None = Field(
AWS_ACCESS_KEY_ID: Optional[str] = Field(
description="Internal test AWS access key ID",
default=None,
)
@ -284,15 +284,15 @@ class DatasetQueueMonitorConfig(BaseSettings):
Configuration settings for Dataset Queue Monitor
"""
QUEUE_MONITOR_THRESHOLD: NonNegativeInt | None = Field(
QUEUE_MONITOR_THRESHOLD: Optional[NonNegativeInt] = Field(
description="Threshold for dataset queue monitor",
default=200,
)
QUEUE_MONITOR_ALERT_EMAILS: str | None = Field(
QUEUE_MONITOR_ALERT_EMAILS: Optional[str] = Field(
description="Emails for dataset queue monitor alert, separated by commas",
default=None,
)
QUEUE_MONITOR_INTERVAL: NonNegativeFloat | None = Field(
QUEUE_MONITOR_INTERVAL: Optional[NonNegativeFloat] = Field(
description="Interval for dataset queue monitor in minutes",
default=30,
)
@ -300,7 +300,8 @@ class DatasetQueueMonitorConfig(BaseSettings):
class MiddlewareConfig(
# place the configs in alphabet order
CeleryConfig, # Note: CeleryConfig already inherits from DatabaseConfig
CeleryConfig,
DatabaseConfig,
KeywordStoreConfig,
RedisConfig,
# configs of storage and storage providers

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt
from pydantic_settings import BaseSettings
@ -17,12 +19,12 @@ class RedisConfig(BaseSettings):
default=6379,
)
REDIS_USERNAME: str | None = Field(
REDIS_USERNAME: Optional[str] = Field(
description="Username for Redis authentication (if required)",
default=None,
)
REDIS_PASSWORD: str | None = Field(
REDIS_PASSWORD: Optional[str] = Field(
description="Password for Redis authentication (if required)",
default=None,
)
@ -42,47 +44,47 @@ class RedisConfig(BaseSettings):
default="CERT_NONE",
)
REDIS_SSL_CA_CERTS: str | None = Field(
REDIS_SSL_CA_CERTS: Optional[str] = Field(
description="Path to the CA certificate file for SSL verification",
default=None,
)
REDIS_SSL_CERTFILE: str | None = Field(
REDIS_SSL_CERTFILE: Optional[str] = Field(
description="Path to the client certificate file for SSL authentication",
default=None,
)
REDIS_SSL_KEYFILE: str | None = Field(
REDIS_SSL_KEYFILE: Optional[str] = Field(
description="Path to the client private key file for SSL authentication",
default=None,
)
REDIS_USE_SENTINEL: bool | None = Field(
REDIS_USE_SENTINEL: Optional[bool] = Field(
description="Enable Redis Sentinel mode for high availability",
default=False,
)
REDIS_SENTINELS: str | None = Field(
REDIS_SENTINELS: Optional[str] = Field(
description="Comma-separated list of Redis Sentinel nodes (host:port)",
default=None,
)
REDIS_SENTINEL_SERVICE_NAME: str | None = Field(
REDIS_SENTINEL_SERVICE_NAME: Optional[str] = Field(
description="Name of the Redis Sentinel service to monitor",
default=None,
)
REDIS_SENTINEL_USERNAME: str | None = Field(
REDIS_SENTINEL_USERNAME: Optional[str] = Field(
description="Username for Redis Sentinel authentication (if required)",
default=None,
)
REDIS_SENTINEL_PASSWORD: str | None = Field(
REDIS_SENTINEL_PASSWORD: Optional[str] = Field(
description="Password for Redis Sentinel authentication (if required)",
default=None,
)
REDIS_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
REDIS_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
description="Socket timeout in seconds for Redis Sentinel connections",
default=0.1,
)
@ -92,12 +94,12 @@ class RedisConfig(BaseSettings):
default=False,
)
REDIS_CLUSTERS: str | None = Field(
REDIS_CLUSTERS: Optional[str] = Field(
description="Comma-separated list of Redis Clusters nodes (host:port)",
default=None,
)
REDIS_CLUSTERS_PASSWORD: str | None = Field(
REDIS_CLUSTERS_PASSWORD: Optional[str] = Field(
description="Password for Redis Clusters authentication (if required)",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,37 +9,37 @@ class AliyunOSSStorageConfig(BaseSettings):
Configuration settings for Aliyun Object Storage Service (OSS)
"""
ALIYUN_OSS_BUCKET_NAME: str | None = Field(
ALIYUN_OSS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Aliyun OSS bucket to store and retrieve objects",
default=None,
)
ALIYUN_OSS_ACCESS_KEY: str | None = Field(
ALIYUN_OSS_ACCESS_KEY: Optional[str] = Field(
description="Access key ID for authenticating with Aliyun OSS",
default=None,
)
ALIYUN_OSS_SECRET_KEY: str | None = Field(
ALIYUN_OSS_SECRET_KEY: Optional[str] = Field(
description="Secret access key for authenticating with Aliyun OSS",
default=None,
)
ALIYUN_OSS_ENDPOINT: str | None = Field(
ALIYUN_OSS_ENDPOINT: Optional[str] = Field(
description="URL of the Aliyun OSS endpoint for your chosen region",
default=None,
)
ALIYUN_OSS_REGION: str | None = Field(
ALIYUN_OSS_REGION: Optional[str] = Field(
description="Aliyun OSS region where your bucket is located (e.g., 'oss-cn-hangzhou')",
default=None,
)
ALIYUN_OSS_AUTH_VERSION: str | None = Field(
ALIYUN_OSS_AUTH_VERSION: Optional[str] = Field(
description="Version of the authentication protocol to use with Aliyun OSS (e.g., 'v4')",
default=None,
)
ALIYUN_OSS_PATH: str | None = Field(
ALIYUN_OSS_PATH: Optional[str] = Field(
description="Base path within the bucket to store objects (e.g., 'my-app-data/')",
default=None,
)

View File

@ -1,4 +1,4 @@
from typing import Literal
from typing import Literal, Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -9,27 +9,27 @@ class S3StorageConfig(BaseSettings):
Configuration settings for S3-compatible object storage
"""
S3_ENDPOINT: str | None = Field(
S3_ENDPOINT: Optional[str] = Field(
description="URL of the S3-compatible storage endpoint (e.g., 'https://s3.amazonaws.com')",
default=None,
)
S3_REGION: str | None = Field(
S3_REGION: Optional[str] = Field(
description="Region where the S3 bucket is located (e.g., 'us-east-1')",
default=None,
)
S3_BUCKET_NAME: str | None = Field(
S3_BUCKET_NAME: Optional[str] = Field(
description="Name of the S3 bucket to store and retrieve objects",
default=None,
)
S3_ACCESS_KEY: str | None = Field(
S3_ACCESS_KEY: Optional[str] = Field(
description="Access key ID for authenticating with the S3 service",
default=None,
)
S3_SECRET_KEY: str | None = Field(
S3_SECRET_KEY: Optional[str] = Field(
description="Secret access key for authenticating with the S3 service",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,22 +9,22 @@ class AzureBlobStorageConfig(BaseSettings):
Configuration settings for Azure Blob Storage
"""
AZURE_BLOB_ACCOUNT_NAME: str | None = Field(
AZURE_BLOB_ACCOUNT_NAME: Optional[str] = Field(
description="Name of the Azure Storage account (e.g., 'mystorageaccount')",
default=None,
)
AZURE_BLOB_ACCOUNT_KEY: str | None = Field(
AZURE_BLOB_ACCOUNT_KEY: Optional[str] = Field(
description="Access key for authenticating with the Azure Storage account",
default=None,
)
AZURE_BLOB_CONTAINER_NAME: str | None = Field(
AZURE_BLOB_CONTAINER_NAME: Optional[str] = Field(
description="Name of the Azure Blob container to store and retrieve objects",
default=None,
)
AZURE_BLOB_ACCOUNT_URL: str | None = Field(
AZURE_BLOB_ACCOUNT_URL: Optional[str] = Field(
description="URL of the Azure Blob storage endpoint (e.g., 'https://mystorageaccount.blob.core.windows.net')",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,22 +9,22 @@ class BaiduOBSStorageConfig(BaseSettings):
Configuration settings for Baidu Object Storage Service (OBS)
"""
BAIDU_OBS_BUCKET_NAME: str | None = Field(
BAIDU_OBS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Baidu OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
default=None,
)
BAIDU_OBS_ACCESS_KEY: str | None = Field(
BAIDU_OBS_ACCESS_KEY: Optional[str] = Field(
description="Access Key ID for authenticating with Baidu OBS",
default=None,
)
BAIDU_OBS_SECRET_KEY: str | None = Field(
BAIDU_OBS_SECRET_KEY: Optional[str] = Field(
description="Secret Access Key for authenticating with Baidu OBS",
default=None,
)
BAIDU_OBS_ENDPOINT: str | None = Field(
BAIDU_OBS_ENDPOINT: Optional[str] = Field(
description="URL of the Baidu OSS endpoint for your chosen region (e.g., 'https://.bj.bcebos.com')",
default=None,
)

View File

@ -1,5 +1,7 @@
"""ClickZetta Volume Storage Configuration"""
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,17 +9,17 @@ from pydantic_settings import BaseSettings
class ClickZettaVolumeStorageConfig(BaseSettings):
"""Configuration for ClickZetta Volume storage."""
CLICKZETTA_VOLUME_USERNAME: str | None = Field(
CLICKZETTA_VOLUME_USERNAME: Optional[str] = Field(
description="Username for ClickZetta Volume authentication",
default=None,
)
CLICKZETTA_VOLUME_PASSWORD: str | None = Field(
CLICKZETTA_VOLUME_PASSWORD: Optional[str] = Field(
description="Password for ClickZetta Volume authentication",
default=None,
)
CLICKZETTA_VOLUME_INSTANCE: str | None = Field(
CLICKZETTA_VOLUME_INSTANCE: Optional[str] = Field(
description="ClickZetta instance identifier",
default=None,
)
@ -47,7 +49,7 @@ class ClickZettaVolumeStorageConfig(BaseSettings):
default="user",
)
CLICKZETTA_VOLUME_NAME: str | None = Field(
CLICKZETTA_VOLUME_NAME: Optional[str] = Field(
description="ClickZetta volume name for external volumes",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,12 +9,12 @@ class GoogleCloudStorageConfig(BaseSettings):
Configuration settings for Google Cloud Storage
"""
GOOGLE_STORAGE_BUCKET_NAME: str | None = Field(
GOOGLE_STORAGE_BUCKET_NAME: Optional[str] = Field(
description="Name of the Google Cloud Storage bucket to store and retrieve objects (e.g., 'my-gcs-bucket')",
default=None,
)
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: str | None = Field(
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: Optional[str] = Field(
description="Base64-encoded JSON key file for Google Cloud service account authentication",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,22 +9,22 @@ class HuaweiCloudOBSStorageConfig(BaseSettings):
Configuration settings for Huawei Cloud Object Storage Service (OBS)
"""
HUAWEI_OBS_BUCKET_NAME: str | None = Field(
HUAWEI_OBS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Huawei Cloud OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
default=None,
)
HUAWEI_OBS_ACCESS_KEY: str | None = Field(
HUAWEI_OBS_ACCESS_KEY: Optional[str] = Field(
description="Access Key ID for authenticating with Huawei Cloud OBS",
default=None,
)
HUAWEI_OBS_SECRET_KEY: str | None = Field(
HUAWEI_OBS_SECRET_KEY: Optional[str] = Field(
description="Secret Access Key for authenticating with Huawei Cloud OBS",
default=None,
)
HUAWEI_OBS_SERVER: str | None = Field(
HUAWEI_OBS_SERVER: Optional[str] = Field(
description="Endpoint URL for Huawei Cloud OBS (e.g., 'https://obs.cn-north-4.myhuaweicloud.com')",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,27 +9,27 @@ class OCIStorageConfig(BaseSettings):
Configuration settings for Oracle Cloud Infrastructure (OCI) Object Storage
"""
OCI_ENDPOINT: str | None = Field(
OCI_ENDPOINT: Optional[str] = Field(
description="URL of the OCI Object Storage endpoint (e.g., 'https://objectstorage.us-phoenix-1.oraclecloud.com')",
default=None,
)
OCI_REGION: str | None = Field(
OCI_REGION: Optional[str] = Field(
description="OCI region where the bucket is located (e.g., 'us-phoenix-1')",
default=None,
)
OCI_BUCKET_NAME: str | None = Field(
OCI_BUCKET_NAME: Optional[str] = Field(
description="Name of the OCI Object Storage bucket to store and retrieve objects (e.g., 'my-oci-bucket')",
default=None,
)
OCI_ACCESS_KEY: str | None = Field(
OCI_ACCESS_KEY: Optional[str] = Field(
description="Access key (also known as API key) for authenticating with OCI Object Storage",
default=None,
)
OCI_SECRET_KEY: str | None = Field(
OCI_SECRET_KEY: Optional[str] = Field(
description="Secret key associated with the access key for authenticating with OCI Object Storage",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,17 +9,17 @@ class SupabaseStorageConfig(BaseSettings):
Configuration settings for Supabase Object Storage Service
"""
SUPABASE_BUCKET_NAME: str | None = Field(
SUPABASE_BUCKET_NAME: Optional[str] = Field(
description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')",
default=None,
)
SUPABASE_API_KEY: str | None = Field(
SUPABASE_API_KEY: Optional[str] = Field(
description="API KEY for authenticating with Supabase",
default=None,
)
SUPABASE_URL: str | None = Field(
SUPABASE_URL: Optional[str] = Field(
description="URL of the Supabase",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,27 +9,27 @@ class TencentCloudCOSStorageConfig(BaseSettings):
Configuration settings for Tencent Cloud Object Storage (COS)
"""
TENCENT_COS_BUCKET_NAME: str | None = Field(
TENCENT_COS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Tencent Cloud COS bucket to store and retrieve objects",
default=None,
)
TENCENT_COS_REGION: str | None = Field(
TENCENT_COS_REGION: Optional[str] = Field(
description="Tencent Cloud region where the COS bucket is located (e.g., 'ap-guangzhou')",
default=None,
)
TENCENT_COS_SECRET_ID: str | None = Field(
TENCENT_COS_SECRET_ID: Optional[str] = Field(
description="SecretId for authenticating with Tencent Cloud COS (part of API credentials)",
default=None,
)
TENCENT_COS_SECRET_KEY: str | None = Field(
TENCENT_COS_SECRET_KEY: Optional[str] = Field(
description="SecretKey for authenticating with Tencent Cloud COS (part of API credentials)",
default=None,
)
TENCENT_COS_SCHEME: str | None = Field(
TENCENT_COS_SCHEME: Optional[str] = Field(
description="Protocol scheme for COS requests: 'https' (recommended) or 'http'",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,27 +9,27 @@ class VolcengineTOSStorageConfig(BaseSettings):
Configuration settings for Volcengine Tinder Object Storage (TOS)
"""
VOLCENGINE_TOS_BUCKET_NAME: str | None = Field(
VOLCENGINE_TOS_BUCKET_NAME: Optional[str] = Field(
description="Name of the Volcengine TOS bucket to store and retrieve objects (e.g., 'my-tos-bucket')",
default=None,
)
VOLCENGINE_TOS_ACCESS_KEY: str | None = Field(
VOLCENGINE_TOS_ACCESS_KEY: Optional[str] = Field(
description="Access Key ID for authenticating with Volcengine TOS",
default=None,
)
VOLCENGINE_TOS_SECRET_KEY: str | None = Field(
VOLCENGINE_TOS_SECRET_KEY: Optional[str] = Field(
description="Secret Access Key for authenticating with Volcengine TOS",
default=None,
)
VOLCENGINE_TOS_ENDPOINT: str | None = Field(
VOLCENGINE_TOS_ENDPOINT: Optional[str] = Field(
description="URL of the Volcengine TOS endpoint (e.g., 'https://tos-cn-beijing.volces.com')",
default=None,
)
VOLCENGINE_TOS_REGION: str | None = Field(
VOLCENGINE_TOS_REGION: Optional[str] = Field(
description="Volcengine region where the TOS bucket is located (e.g., 'cn-beijing')",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@ -9,37 +11,37 @@ class AnalyticdbConfig(BaseSettings):
https://www.alibabacloud.com/help/en/analyticdb-for-postgresql/getting-started/create-an-instance-instances-with-vector-engine-optimization-enabled
"""
ANALYTICDB_KEY_ID: str | None = Field(
ANALYTICDB_KEY_ID: Optional[str] = Field(
default=None, description="The Access Key ID provided by Alibaba Cloud for API authentication."
)
ANALYTICDB_KEY_SECRET: str | None = Field(
ANALYTICDB_KEY_SECRET: Optional[str] = Field(
default=None, description="The Secret Access Key corresponding to the Access Key ID for secure API access."
)
ANALYTICDB_REGION_ID: str | None = Field(
ANALYTICDB_REGION_ID: Optional[str] = Field(
default=None,
description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou', 'ap-southeast-1').",
)
ANALYTICDB_INSTANCE_ID: str | None = Field(
ANALYTICDB_INSTANCE_ID: Optional[str] = Field(
default=None,
description="The unique identifier of the AnalyticDB instance you want to connect to.",
)
ANALYTICDB_ACCOUNT: str | None = Field(
ANALYTICDB_ACCOUNT: Optional[str] = Field(
default=None,
description="The account name used to log in to the AnalyticDB instance"
" (usually the initial account created with the instance).",
)
ANALYTICDB_PASSWORD: str | None = Field(
ANALYTICDB_PASSWORD: Optional[str] = Field(
default=None, description="The password associated with the AnalyticDB account for database authentication."
)
ANALYTICDB_NAMESPACE: str | None = Field(
ANALYTICDB_NAMESPACE: Optional[str] = Field(
default=None, description="The namespace within AnalyticDB for schema isolation (if using namespace feature)."
)
ANALYTICDB_NAMESPACE_PASSWORD: str | None = Field(
ANALYTICDB_NAMESPACE_PASSWORD: Optional[str] = Field(
default=None,
description="The password for accessing the specified namespace within the AnalyticDB instance"
" (if namespace feature is enabled).",
)
ANALYTICDB_HOST: str | None = Field(
ANALYTICDB_HOST: Optional[str] = Field(
default=None, description="The host of the AnalyticDB instance you want to connect to."
)
ANALYTICDB_PORT: PositiveInt = Field(

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
@ -7,7 +9,7 @@ class BaiduVectorDBConfig(BaseSettings):
Configuration settings for Baidu Vector Database
"""
BAIDU_VECTOR_DB_ENDPOINT: str | None = Field(
BAIDU_VECTOR_DB_ENDPOINT: Optional[str] = Field(
description="URL of the Baidu Vector Database service (e.g., 'http://vdb.bj.baidubce.com')",
default=None,
)
@ -17,17 +19,17 @@ class BaiduVectorDBConfig(BaseSettings):
default=30000,
)
BAIDU_VECTOR_DB_ACCOUNT: str | None = Field(
BAIDU_VECTOR_DB_ACCOUNT: Optional[str] = Field(
description="Account for authenticating with the Baidu Vector Database",
default=None,
)
BAIDU_VECTOR_DB_API_KEY: str | None = Field(
BAIDU_VECTOR_DB_API_KEY: Optional[str] = Field(
description="API key for authenticating with the Baidu Vector Database service",
default=None,
)
BAIDU_VECTOR_DB_DATABASE: str | None = Field(
BAIDU_VECTOR_DB_DATABASE: Optional[str] = Field(
description="Name of the specific Baidu Vector Database to connect to",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@ -7,7 +9,7 @@ class ChromaConfig(BaseSettings):
Configuration settings for Chroma vector database
"""
CHROMA_HOST: str | None = Field(
CHROMA_HOST: Optional[str] = Field(
description="Hostname or IP address of the Chroma server (e.g., 'localhost' or '192.168.1.100')",
default=None,
)
@ -17,22 +19,22 @@ class ChromaConfig(BaseSettings):
default=8000,
)
CHROMA_TENANT: str | None = Field(
CHROMA_TENANT: Optional[str] = Field(
description="Tenant identifier for multi-tenancy support in Chroma",
default=None,
)
CHROMA_DATABASE: str | None = Field(
CHROMA_DATABASE: Optional[str] = Field(
description="Name of the Chroma database to connect to",
default=None,
)
CHROMA_AUTH_PROVIDER: str | None = Field(
CHROMA_AUTH_PROVIDER: Optional[str] = Field(
description="Authentication provider for Chroma (e.g., 'basic', 'token', or a custom provider)",
default=None,
)
CHROMA_AUTH_CREDENTIALS: str | None = Field(
CHROMA_AUTH_CREDENTIALS: Optional[str] = Field(
description="Authentication credentials for Chroma (format depends on the auth provider)",
default=None,
)

View File

@ -1,68 +1,69 @@
from pydantic import Field
from pydantic_settings import BaseSettings
from typing import Optional
from pydantic import BaseModel, Field
class ClickzettaConfig(BaseSettings):
class ClickzettaConfig(BaseModel):
"""
Clickzetta Lakehouse vector database configuration
"""
CLICKZETTA_USERNAME: str | None = Field(
CLICKZETTA_USERNAME: Optional[str] = Field(
description="Username for authenticating with Clickzetta Lakehouse",
default=None,
)
CLICKZETTA_PASSWORD: str | None = Field(
CLICKZETTA_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Clickzetta Lakehouse",
default=None,
)
CLICKZETTA_INSTANCE: str | None = Field(
CLICKZETTA_INSTANCE: Optional[str] = Field(
description="Clickzetta Lakehouse instance ID",
default=None,
)
CLICKZETTA_SERVICE: str | None = Field(
CLICKZETTA_SERVICE: Optional[str] = Field(
description="Clickzetta API service endpoint (e.g., 'api.clickzetta.com')",
default="api.clickzetta.com",
)
CLICKZETTA_WORKSPACE: str | None = Field(
CLICKZETTA_WORKSPACE: Optional[str] = Field(
description="Clickzetta workspace name",
default="default",
)
CLICKZETTA_VCLUSTER: str | None = Field(
CLICKZETTA_VCLUSTER: Optional[str] = Field(
description="Clickzetta virtual cluster name",
default="default_ap",
)
CLICKZETTA_SCHEMA: str | None = Field(
CLICKZETTA_SCHEMA: Optional[str] = Field(
description="Database schema name in Clickzetta",
default="public",
)
CLICKZETTA_BATCH_SIZE: int | None = Field(
CLICKZETTA_BATCH_SIZE: Optional[int] = Field(
description="Batch size for bulk insert operations",
default=100,
)
CLICKZETTA_ENABLE_INVERTED_INDEX: bool | None = Field(
CLICKZETTA_ENABLE_INVERTED_INDEX: Optional[bool] = Field(
description="Enable inverted index for full-text search capabilities",
default=True,
)
CLICKZETTA_ANALYZER_TYPE: str | None = Field(
CLICKZETTA_ANALYZER_TYPE: Optional[str] = Field(
description="Analyzer type for full-text search: keyword, english, chinese, unicode",
default="chinese",
)
CLICKZETTA_ANALYZER_MODE: str | None = Field(
CLICKZETTA_ANALYZER_MODE: Optional[str] = Field(
description="Analyzer mode for tokenization: max_word (fine-grained) or smart (intelligent)",
default="smart",
)
CLICKZETTA_VECTOR_DISTANCE_FUNCTION: str | None = Field(
CLICKZETTA_VECTOR_DISTANCE_FUNCTION: Optional[str] = Field(
description="Distance function for vector similarity: l2_distance or cosine_distance",
default="cosine_distance",
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,27 +9,27 @@ class CouchbaseConfig(BaseSettings):
Couchbase configs
"""
COUCHBASE_CONNECTION_STRING: str | None = Field(
COUCHBASE_CONNECTION_STRING: Optional[str] = Field(
description="COUCHBASE connection string",
default=None,
)
COUCHBASE_USER: str | None = Field(
COUCHBASE_USER: Optional[str] = Field(
description="COUCHBASE user",
default=None,
)
COUCHBASE_PASSWORD: str | None = Field(
COUCHBASE_PASSWORD: Optional[str] = Field(
description="COUCHBASE password",
default=None,
)
COUCHBASE_BUCKET_NAME: str | None = Field(
COUCHBASE_BUCKET_NAME: Optional[str] = Field(
description="COUCHBASE bucket name",
default=None,
)
COUCHBASE_SCOPE_NAME: str | None = Field(
COUCHBASE_SCOPE_NAME: Optional[str] = Field(
description="COUCHBASE scope name",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt, model_validator
from pydantic_settings import BaseSettings
@ -8,7 +10,7 @@ class ElasticsearchConfig(BaseSettings):
Can load from environment variables or .env files.
"""
ELASTICSEARCH_HOST: str | None = Field(
ELASTICSEARCH_HOST: Optional[str] = Field(
description="Hostname or IP address of the Elasticsearch server (e.g., 'localhost' or '192.168.1.100')",
default="127.0.0.1",
)
@ -18,28 +20,30 @@ class ElasticsearchConfig(BaseSettings):
default=9200,
)
ELASTICSEARCH_USERNAME: str | None = Field(
ELASTICSEARCH_USERNAME: Optional[str] = Field(
description="Username for authenticating with Elasticsearch (default is 'elastic')",
default="elastic",
)
ELASTICSEARCH_PASSWORD: str | None = Field(
ELASTICSEARCH_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Elasticsearch (default is 'elastic')",
default="elastic",
)
# Elastic Cloud (optional)
ELASTICSEARCH_USE_CLOUD: bool | None = Field(
ELASTICSEARCH_USE_CLOUD: Optional[bool] = Field(
description="Set to True to use Elastic Cloud instead of self-hosted Elasticsearch", default=False
)
ELASTICSEARCH_CLOUD_URL: str | None = Field(
ELASTICSEARCH_CLOUD_URL: Optional[str] = Field(
description="Full URL for Elastic Cloud deployment (e.g., 'https://example.es.region.aws.found.io:443')",
default=None,
)
ELASTICSEARCH_API_KEY: str | None = Field(description="API key for authenticating with Elastic Cloud", default=None)
ELASTICSEARCH_API_KEY: Optional[str] = Field(
description="API key for authenticating with Elastic Cloud", default=None
)
# Common options
ELASTICSEARCH_CA_CERTS: str | None = Field(
ELASTICSEARCH_CA_CERTS: Optional[str] = Field(
description="Path to CA certificate file for SSL verification", default=None
)
ELASTICSEARCH_VERIFY_CERTS: bool = Field(

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,17 +9,17 @@ class HuaweiCloudConfig(BaseSettings):
Configuration settings for Huawei cloud search service
"""
HUAWEI_CLOUD_HOSTS: str | None = Field(
HUAWEI_CLOUD_HOSTS: Optional[str] = Field(
description="Hostname or IP address of the Huawei cloud search service instance",
default=None,
)
HUAWEI_CLOUD_USER: str | None = Field(
HUAWEI_CLOUD_USER: Optional[str] = Field(
description="Username for authenticating with Huawei cloud search service",
default=None,
)
HUAWEI_CLOUD_PASSWORD: str | None = Field(
HUAWEI_CLOUD_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Huawei cloud search service",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,27 +9,27 @@ class LindormConfig(BaseSettings):
Lindorm configs
"""
LINDORM_URL: str | None = Field(
LINDORM_URL: Optional[str] = Field(
description="Lindorm url",
default=None,
)
LINDORM_USERNAME: str | None = Field(
LINDORM_USERNAME: Optional[str] = Field(
description="Lindorm user",
default=None,
)
LINDORM_PASSWORD: str | None = Field(
LINDORM_PASSWORD: Optional[str] = Field(
description="Lindorm password",
default=None,
)
LINDORM_INDEX_TYPE: str | None = Field(
DEFAULT_INDEX_TYPE: Optional[str] = Field(
description="Lindorm Vector Index Type, hnsw or flat is available in dify",
default="hnsw",
)
LINDORM_DISTANCE_TYPE: str | None = Field(
DEFAULT_DISTANCE_TYPE: Optional[str] = Field(
description="Vector Distance Type, support l2, cosinesimil, innerproduct", default="l2"
)
LINDORM_USING_UGC: bool | None = Field(
description="Using UGC index will store indexes with the same IndexType/Dimension in a single big index.",
default=True,
USING_UGC_INDEX: Optional[bool] = Field(
description="Using UGC index will store the same type of Index in a single index but can retrieve separately.",
default=False,
)
LINDORM_QUERY_TIMEOUT: float | None = Field(description="The lindorm search request timeout (s)", default=2.0)
LINDORM_QUERY_TIMEOUT: Optional[float] = Field(description="The lindorm search request timeout (s)", default=2.0)

View File

@ -1,8 +1,7 @@
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field
class MatrixoneConfig(BaseSettings):
class MatrixoneConfig(BaseModel):
"""Matrixone vector database configuration."""
MATRIXONE_HOST: str = Field(default="localhost", description="Host address of the Matrixone server")

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,22 +9,22 @@ class MilvusConfig(BaseSettings):
Configuration settings for Milvus vector database
"""
MILVUS_URI: str | None = Field(
MILVUS_URI: Optional[str] = Field(
description="URI for connecting to the Milvus server (e.g., 'http://localhost:19530' or 'https://milvus-instance.example.com:19530')",
default="http://127.0.0.1:19530",
)
MILVUS_TOKEN: str | None = Field(
MILVUS_TOKEN: Optional[str] = Field(
description="Authentication token for Milvus, if token-based authentication is enabled",
default=None,
)
MILVUS_USER: str | None = Field(
MILVUS_USER: Optional[str] = Field(
description="Username for authenticating with Milvus, if username/password authentication is enabled",
default=None,
)
MILVUS_PASSWORD: str | None = Field(
MILVUS_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Milvus, if username/password authentication is enabled",
default=None,
)
@ -38,7 +40,7 @@ class MilvusConfig(BaseSettings):
default=True,
)
MILVUS_ANALYZER_PARAMS: str | None = Field(
MILVUS_ANALYZER_PARAMS: Optional[str] = Field(
description='Milvus text analyzer parameters, e.g., {"type": "chinese"} for Chinese segmentation support.',
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@ -7,27 +9,27 @@ class OceanBaseVectorConfig(BaseSettings):
Configuration settings for OceanBase Vector database
"""
OCEANBASE_VECTOR_HOST: str | None = Field(
OCEANBASE_VECTOR_HOST: Optional[str] = Field(
description="Hostname or IP address of the OceanBase Vector server (e.g. 'localhost')",
default=None,
)
OCEANBASE_VECTOR_PORT: PositiveInt | None = Field(
OCEANBASE_VECTOR_PORT: Optional[PositiveInt] = Field(
description="Port number on which the OceanBase Vector server is listening (default is 2881)",
default=2881,
)
OCEANBASE_VECTOR_USER: str | None = Field(
OCEANBASE_VECTOR_USER: Optional[str] = Field(
description="Username for authenticating with the OceanBase Vector database",
default=None,
)
OCEANBASE_VECTOR_PASSWORD: str | None = Field(
OCEANBASE_VECTOR_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the OceanBase Vector database",
default=None,
)
OCEANBASE_VECTOR_DATABASE: str | None = Field(
OCEANBASE_VECTOR_DATABASE: Optional[str] = Field(
description="Name of the OceanBase Vector database to connect to",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@ -7,7 +9,7 @@ class OpenGaussConfig(BaseSettings):
Configuration settings for OpenGauss
"""
OPENGAUSS_HOST: str | None = Field(
OPENGAUSS_HOST: Optional[str] = Field(
description="Hostname or IP address of the OpenGauss server(e.g., 'localhost')",
default=None,
)
@ -17,17 +19,17 @@ class OpenGaussConfig(BaseSettings):
default=6600,
)
OPENGAUSS_USER: str | None = Field(
OPENGAUSS_USER: Optional[str] = Field(
description="Username for authenticating with the OpenGauss database",
default=None,
)
OPENGAUSS_PASSWORD: str | None = Field(
OPENGAUSS_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the OpenGauss database",
default=None,
)
OPENGAUSS_DATABASE: str | None = Field(
OPENGAUSS_DATABASE: Optional[str] = Field(
description="Name of the OpenGauss database to connect to",
default=None,
)

View File

@ -1,5 +1,5 @@
from enum import Enum
from typing import Literal
import enum
from typing import Literal, Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@ -10,7 +10,7 @@ class OpenSearchConfig(BaseSettings):
Configuration settings for OpenSearch
"""
class AuthMethod(Enum):
class AuthMethod(enum.StrEnum):
"""
Authentication method for OpenSearch
"""
@ -18,7 +18,7 @@ class OpenSearchConfig(BaseSettings):
BASIC = "basic"
AWS_MANAGED_IAM = "aws_managed_iam"
OPENSEARCH_HOST: str | None = Field(
OPENSEARCH_HOST: Optional[str] = Field(
description="Hostname or IP address of the OpenSearch server (e.g., 'localhost' or 'opensearch.example.com')",
default=None,
)
@ -43,21 +43,21 @@ class OpenSearchConfig(BaseSettings):
default=AuthMethod.BASIC,
)
OPENSEARCH_USER: str | None = Field(
OPENSEARCH_USER: Optional[str] = Field(
description="Username for authenticating with OpenSearch",
default=None,
)
OPENSEARCH_PASSWORD: str | None = Field(
OPENSEARCH_PASSWORD: Optional[str] = Field(
description="Password for authenticating with OpenSearch",
default=None,
)
OPENSEARCH_AWS_REGION: str | None = Field(
OPENSEARCH_AWS_REGION: Optional[str] = Field(
description="AWS region for OpenSearch (e.g. 'us-west-2')",
default=None,
)
OPENSEARCH_AWS_SERVICE: Literal["es", "aoss"] | None = Field(
OPENSEARCH_AWS_SERVICE: Optional[Literal["es", "aoss"]] = Field(
description="AWS service for OpenSearch (e.g. 'aoss' for OpenSearch Serverless)", default=None
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,33 +9,33 @@ class OracleConfig(BaseSettings):
Configuration settings for Oracle database
"""
ORACLE_USER: str | None = Field(
ORACLE_USER: Optional[str] = Field(
description="Username for authenticating with the Oracle database",
default=None,
)
ORACLE_PASSWORD: str | None = Field(
ORACLE_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the Oracle database",
default=None,
)
ORACLE_DSN: str | None = Field(
ORACLE_DSN: Optional[str] = Field(
description="Oracle database connection string. For traditional database, use format 'host:port/service_name'. "
"For autonomous database, use the service name from tnsnames.ora in the wallet",
default=None,
)
ORACLE_CONFIG_DIR: str | None = Field(
ORACLE_CONFIG_DIR: Optional[str] = Field(
description="Directory containing the tnsnames.ora configuration file. Only used in thin mode connection",
default=None,
)
ORACLE_WALLET_LOCATION: str | None = Field(
ORACLE_WALLET_LOCATION: Optional[str] = Field(
description="Oracle wallet directory path containing the wallet files for secure connection",
default=None,
)
ORACLE_WALLET_PASSWORD: str | None = Field(
ORACLE_WALLET_PASSWORD: Optional[str] = Field(
description="Password to decrypt the Oracle wallet, if it is encrypted",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@ -7,7 +9,7 @@ class PGVectorConfig(BaseSettings):
Configuration settings for PGVector (PostgreSQL with vector extension)
"""
PGVECTOR_HOST: str | None = Field(
PGVECTOR_HOST: Optional[str] = Field(
description="Hostname or IP address of the PostgreSQL server with PGVector extension (e.g., 'localhost')",
default=None,
)
@ -17,17 +19,17 @@ class PGVectorConfig(BaseSettings):
default=5433,
)
PGVECTOR_USER: str | None = Field(
PGVECTOR_USER: Optional[str] = Field(
description="Username for authenticating with the PostgreSQL database",
default=None,
)
PGVECTOR_PASSWORD: str | None = Field(
PGVECTOR_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the PostgreSQL database",
default=None,
)
PGVECTOR_DATABASE: str | None = Field(
PGVECTOR_DATABASE: Optional[str] = Field(
description="Name of the PostgreSQL database to connect to",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@ -7,7 +9,7 @@ class PGVectoRSConfig(BaseSettings):
Configuration settings for PGVecto.RS (Rust-based vector extension for PostgreSQL)
"""
PGVECTO_RS_HOST: str | None = Field(
PGVECTO_RS_HOST: Optional[str] = Field(
description="Hostname or IP address of the PostgreSQL server with PGVecto.RS extension (e.g., 'localhost')",
default=None,
)
@ -17,17 +19,17 @@ class PGVectoRSConfig(BaseSettings):
default=5431,
)
PGVECTO_RS_USER: str | None = Field(
PGVECTO_RS_USER: Optional[str] = Field(
description="Username for authenticating with the PostgreSQL database using PGVecto.RS",
default=None,
)
PGVECTO_RS_PASSWORD: str | None = Field(
PGVECTO_RS_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the PostgreSQL database using PGVecto.RS",
default=None,
)
PGVECTO_RS_DATABASE: str | None = Field(
PGVECTO_RS_DATABASE: Optional[str] = Field(
description="Name of the PostgreSQL database with PGVecto.RS extension to connect to",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
@ -7,12 +9,12 @@ class QdrantConfig(BaseSettings):
Configuration settings for Qdrant vector database
"""
QDRANT_URL: str | None = Field(
QDRANT_URL: Optional[str] = Field(
description="URL of the Qdrant server (e.g., 'http://localhost:6333' or 'https://qdrant.example.com')",
default=None,
)
QDRANT_API_KEY: str | None = Field(
QDRANT_API_KEY: Optional[str] = Field(
description="API key for authenticating with the Qdrant server",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@ -7,7 +9,7 @@ class RelytConfig(BaseSettings):
Configuration settings for Relyt database
"""
RELYT_HOST: str | None = Field(
RELYT_HOST: Optional[str] = Field(
description="Hostname or IP address of the Relyt server (e.g., 'localhost' or 'relyt.example.com')",
default=None,
)
@ -17,17 +19,17 @@ class RelytConfig(BaseSettings):
default=9200,
)
RELYT_USER: str | None = Field(
RELYT_USER: Optional[str] = Field(
description="Username for authenticating with the Relyt database",
default=None,
)
RELYT_PASSWORD: str | None = Field(
RELYT_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the Relyt database",
default=None,
)
RELYT_DATABASE: str | None = Field(
RELYT_DATABASE: Optional[str] = Field(
description="Name of the Relyt database to connect to (default is 'default')",
default="default",
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,22 +9,22 @@ class TableStoreConfig(BaseSettings):
Configuration settings for TableStore.
"""
TABLESTORE_ENDPOINT: str | None = Field(
TABLESTORE_ENDPOINT: Optional[str] = Field(
description="Endpoint address of the TableStore server (e.g. 'https://instance-name.cn-hangzhou.ots.aliyuncs.com')",
default=None,
)
TABLESTORE_INSTANCE_NAME: str | None = Field(
TABLESTORE_INSTANCE_NAME: Optional[str] = Field(
description="Instance name to access TableStore server (eg. 'instance-name')",
default=None,
)
TABLESTORE_ACCESS_KEY_ID: str | None = Field(
TABLESTORE_ACCESS_KEY_ID: Optional[str] = Field(
description="AccessKey id for the instance name",
default=None,
)
TABLESTORE_ACCESS_KEY_SECRET: str | None = Field(
TABLESTORE_ACCESS_KEY_SECRET: Optional[str] = Field(
description="AccessKey secret for the instance name",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
@ -7,12 +9,12 @@ class TencentVectorDBConfig(BaseSettings):
Configuration settings for Tencent Vector Database
"""
TENCENT_VECTOR_DB_URL: str | None = Field(
TENCENT_VECTOR_DB_URL: Optional[str] = Field(
description="URL of the Tencent Vector Database service (e.g., 'https://vectordb.tencentcloudapi.com')",
default=None,
)
TENCENT_VECTOR_DB_API_KEY: str | None = Field(
TENCENT_VECTOR_DB_API_KEY: Optional[str] = Field(
description="API key for authenticating with the Tencent Vector Database service",
default=None,
)
@ -22,12 +24,12 @@ class TencentVectorDBConfig(BaseSettings):
default=30,
)
TENCENT_VECTOR_DB_USERNAME: str | None = Field(
TENCENT_VECTOR_DB_USERNAME: Optional[str] = Field(
description="Username for authenticating with the Tencent Vector Database (if required)",
default=None,
)
TENCENT_VECTOR_DB_PASSWORD: str | None = Field(
TENCENT_VECTOR_DB_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the Tencent Vector Database (if required)",
default=None,
)
@ -42,7 +44,7 @@ class TencentVectorDBConfig(BaseSettings):
default=2,
)
TENCENT_VECTOR_DB_DATABASE: str | None = Field(
TENCENT_VECTOR_DB_DATABASE: Optional[str] = Field(
description="Name of the specific Tencent Vector Database to connect to",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
@ -7,12 +9,12 @@ class TidbOnQdrantConfig(BaseSettings):
Tidb on Qdrant configs
"""
TIDB_ON_QDRANT_URL: str | None = Field(
TIDB_ON_QDRANT_URL: Optional[str] = Field(
description="Tidb on Qdrant url",
default=None,
)
TIDB_ON_QDRANT_API_KEY: str | None = Field(
TIDB_ON_QDRANT_API_KEY: Optional[str] = Field(
description="Tidb on Qdrant api key",
default=None,
)
@ -32,37 +34,37 @@ class TidbOnQdrantConfig(BaseSettings):
default=6334,
)
TIDB_PUBLIC_KEY: str | None = Field(
TIDB_PUBLIC_KEY: Optional[str] = Field(
description="Tidb account public key",
default=None,
)
TIDB_PRIVATE_KEY: str | None = Field(
TIDB_PRIVATE_KEY: Optional[str] = Field(
description="Tidb account private key",
default=None,
)
TIDB_API_URL: str | None = Field(
TIDB_API_URL: Optional[str] = Field(
description="Tidb API url",
default=None,
)
TIDB_IAM_API_URL: str | None = Field(
TIDB_IAM_API_URL: Optional[str] = Field(
description="Tidb IAM API url",
default=None,
)
TIDB_REGION: str | None = Field(
TIDB_REGION: Optional[str] = Field(
description="Tidb serverless region",
default="regions/aws-us-east-1",
)
TIDB_PROJECT_ID: str | None = Field(
TIDB_PROJECT_ID: Optional[str] = Field(
description="Tidb project id",
default=None,
)
TIDB_SPEND_LIMIT: int | None = Field(
TIDB_SPEND_LIMIT: Optional[int] = Field(
description="Tidb spend limit",
default=100,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@ -7,27 +9,27 @@ class TiDBVectorConfig(BaseSettings):
Configuration settings for TiDB Vector database
"""
TIDB_VECTOR_HOST: str | None = Field(
TIDB_VECTOR_HOST: Optional[str] = Field(
description="Hostname or IP address of the TiDB Vector server (e.g., 'localhost' or 'tidb.example.com')",
default=None,
)
TIDB_VECTOR_PORT: PositiveInt | None = Field(
TIDB_VECTOR_PORT: Optional[PositiveInt] = Field(
description="Port number on which the TiDB Vector server is listening (default is 4000)",
default=4000,
)
TIDB_VECTOR_USER: str | None = Field(
TIDB_VECTOR_USER: Optional[str] = Field(
description="Username for authenticating with the TiDB Vector database",
default=None,
)
TIDB_VECTOR_PASSWORD: str | None = Field(
TIDB_VECTOR_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the TiDB Vector database",
default=None,
)
TIDB_VECTOR_DATABASE: str | None = Field(
TIDB_VECTOR_DATABASE: Optional[str] = Field(
description="Name of the TiDB Vector database to connect to",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -7,12 +9,12 @@ class UpstashConfig(BaseSettings):
Configuration settings for Upstash vector database
"""
UPSTASH_VECTOR_URL: str | None = Field(
UPSTASH_VECTOR_URL: Optional[str] = Field(
description="URL of the upstash server (e.g., 'https://vector.upstash.io')",
default=None,
)
UPSTASH_VECTOR_TOKEN: str | None = Field(
UPSTASH_VECTOR_TOKEN: Optional[str] = Field(
description="Token for authenticating with the upstash server",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@ -7,7 +9,7 @@ class VastbaseVectorConfig(BaseSettings):
Configuration settings for Vector (Vastbase with vector extension)
"""
VASTBASE_HOST: str | None = Field(
VASTBASE_HOST: Optional[str] = Field(
description="Hostname or IP address of the Vastbase server with Vector extension (e.g., 'localhost')",
default=None,
)
@ -17,17 +19,17 @@ class VastbaseVectorConfig(BaseSettings):
default=5432,
)
VASTBASE_USER: str | None = Field(
VASTBASE_USER: Optional[str] = Field(
description="Username for authenticating with the Vastbase database",
default=None,
)
VASTBASE_PASSWORD: str | None = Field(
VASTBASE_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the Vastbase database",
default=None,
)
VASTBASE_DATABASE: str | None = Field(
VASTBASE_DATABASE: Optional[str] = Field(
description="Name of the Vastbase database to connect to",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
@ -9,14 +11,14 @@ class VikingDBConfig(BaseSettings):
https://www.volcengine.com/docs/6291/65568
"""
VIKINGDB_ACCESS_KEY: str | None = Field(
VIKINGDB_ACCESS_KEY: Optional[str] = Field(
description="The Access Key provided by Volcengine VikingDB for API authentication."
"Refer to the following documentation for details on obtaining credentials:"
"https://www.volcengine.com/docs/6291/65568",
default=None,
)
VIKINGDB_SECRET_KEY: str | None = Field(
VIKINGDB_SECRET_KEY: Optional[str] = Field(
description="The Secret Key provided by Volcengine VikingDB for API authentication.",
default=None,
)

View File

@ -1,3 +1,5 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
@ -7,12 +9,12 @@ class WeaviateConfig(BaseSettings):
Configuration settings for Weaviate vector database
"""
WEAVIATE_ENDPOINT: str | None = Field(
WEAVIATE_ENDPOINT: Optional[str] = Field(
description="URL of the Weaviate server (e.g., 'http://localhost:8080' or 'https://weaviate.example.com')",
default=None,
)
WEAVIATE_API_KEY: str | None = Field(
WEAVIATE_API_KEY: Optional[str] = Field(
description="API key for authenticating with the Weaviate server",
default=None,
)

View File

@ -1,6 +1,6 @@
from pydantic import Field
from configs.packaging.pyproject import PyProjectTomlConfig
from configs.packaging.pyproject import PyProjectConfig, PyProjectTomlConfig
class PackagingInfo(PyProjectTomlConfig):

View File

@ -1,5 +1,5 @@
from collections.abc import Mapping
from typing import Any
from typing import Any, Optional
from pydantic import Field
from pydantic.fields import FieldInfo
@ -15,22 +15,22 @@ class ApolloSettingsSourceInfo(BaseSettings):
Packaging build information
"""
APOLLO_APP_ID: str | None = Field(
APOLLO_APP_ID: Optional[str] = Field(
description="apollo app_id",
default=None,
)
APOLLO_CLUSTER: str | None = Field(
APOLLO_CLUSTER: Optional[str] = Field(
description="apollo cluster",
default=None,
)
APOLLO_CONFIG_URL: str | None = Field(
APOLLO_CONFIG_URL: Optional[str] = Field(
description="apollo config url",
default=None,
)
APOLLO_NAMESPACE: str | None = Field(
APOLLO_NAMESPACE: Optional[str] = Field(
description="apollo namespace",
default=None,
)

View File

@ -4,9 +4,8 @@ import logging
import os
import threading
import time
from collections.abc import Callable, Mapping
from collections.abc import Mapping
from pathlib import Path
from typing import Any
from .python_3x import http_request, makedirs_wrapper
from .utils import (
@ -26,13 +25,13 @@ logger = logging.getLogger(__name__)
class ApolloClient:
def __init__(
self,
config_url: str,
app_id: str,
cluster: str = "default",
secret: str = "",
start_hot_update: bool = True,
change_listener: Callable[[str, str, str, Any], None] | None = None,
_notification_map: dict[str, int] | None = None,
config_url,
app_id,
cluster="default",
secret="",
start_hot_update=True,
change_listener=None,
_notification_map=None,
):
# Core routing parameters
self.config_url = config_url
@ -48,17 +47,17 @@ class ApolloClient:
# Private control variables
self._cycle_time = 5
self._stopping = False
self._cache: dict[str, dict[str, Any]] = {}
self._no_key: dict[str, str] = {}
self._hash: dict[str, str] = {}
self._cache = {}
self._no_key = {}
self._hash = {}
self._pull_timeout = 75
self._cache_file_path = os.path.expanduser("~") + "/.dify/config/remote-settings/apollo/cache/"
self._long_poll_thread: threading.Thread | None = None
self._long_poll_thread = None
self._change_listener = change_listener # "add" "delete" "update"
if _notification_map is None:
_notification_map = {"application": -1}
self._notification_map = _notification_map
self.last_release_key: str | None = None
self.last_release_key = None
# Private startup method
self._path_checker()
if start_hot_update:
@ -69,7 +68,7 @@ class ApolloClient:
heartbeat.daemon = True
heartbeat.start()
def get_json_from_net(self, namespace: str = "application") -> dict[str, Any] | None:
def get_json_from_net(self, namespace="application"):
url = "{}/configs/{}/{}/{}?releaseKey={}&ip={}".format(
self.config_url, self.app_id, self.cluster, namespace, "", self.ip
)
@ -89,7 +88,7 @@ class ApolloClient:
logger.exception("an error occurred in get_json_from_net")
return None
def get_value(self, key: str, default_val: Any = None, namespace: str = "application") -> Any:
def get_value(self, key, default_val=None, namespace="application"):
try:
# read memory configuration
namespace_cache = self._cache.get(namespace)
@ -105,8 +104,7 @@ class ApolloClient:
namespace_data = self.get_json_from_net(namespace)
val = get_value_from_dict(namespace_data, key)
if val is not None:
if namespace_data is not None:
self._update_cache_and_file(namespace_data, namespace)
self._update_cache_and_file(namespace_data, namespace)
return val
# read the file configuration
@ -128,23 +126,23 @@ class ApolloClient:
# to ensure the real-time correctness of the function call.
# If the user does not have the same default val twice
# and the default val is used here, there may be a problem.
def _set_local_cache_none(self, namespace: str, key: str) -> None:
def _set_local_cache_none(self, namespace, key):
no_key = no_key_cache_key(namespace, key)
self._no_key[no_key] = key
def _start_hot_update(self) -> None:
def _start_hot_update(self):
self._long_poll_thread = threading.Thread(target=self._listener)
# When the asynchronous thread is started, the daemon thread will automatically exit
# when the main thread is launched.
self._long_poll_thread.daemon = True
self._long_poll_thread.start()
def stop(self) -> None:
def stop(self):
self._stopping = True
logger.info("Stopping listener...")
# Call the set callback function, and if it is abnormal, try it out
def _call_listener(self, namespace: str, old_kv: dict[str, Any] | None, new_kv: dict[str, Any] | None) -> None:
def _call_listener(self, namespace, old_kv, new_kv):
if self._change_listener is None:
return
if old_kv is None:
@ -170,12 +168,12 @@ class ApolloClient:
except BaseException as e:
logger.warning(str(e))
def _path_checker(self) -> None:
def _path_checker(self):
if not os.path.isdir(self._cache_file_path):
makedirs_wrapper(self._cache_file_path)
# update the local cache and file cache
def _update_cache_and_file(self, namespace_data: dict[str, Any], namespace: str = "application") -> None:
def _update_cache_and_file(self, namespace_data, namespace="application"):
# update the local cache
self._cache[namespace] = namespace_data
# update the file cache
@ -189,7 +187,7 @@ class ApolloClient:
self._hash[namespace] = new_hash
# get the configuration from the local file
def _get_local_cache(self, namespace: str = "application") -> dict[str, Any]:
def _get_local_cache(self, namespace="application"):
cache_file_path = os.path.join(self._cache_file_path, f"{self.app_id}_configuration_{namespace}.txt")
if os.path.isfile(cache_file_path):
with open(cache_file_path) as f:
@ -197,8 +195,8 @@ class ApolloClient:
return result
return {}
def _long_poll(self) -> None:
notifications: list[dict[str, Any]] = []
def _long_poll(self):
notifications = []
for key in self._cache:
namespace_data = self._cache[key]
notification_id = -1
@ -238,7 +236,7 @@ class ApolloClient:
except Exception as e:
logger.warning(str(e))
def _get_net_and_set_local(self, namespace: str, n_id: int, call_change: bool = False) -> None:
def _get_net_and_set_local(self, namespace, n_id, call_change=False):
namespace_data = self.get_json_from_net(namespace)
if not namespace_data:
return
@ -250,7 +248,7 @@ class ApolloClient:
new_kv = namespace_data.get(CONFIGURATIONS)
self._call_listener(namespace, old_kv, new_kv)
def _listener(self) -> None:
def _listener(self):
logger.info("start long_poll")
while not self._stopping:
self._long_poll()
@ -268,13 +266,13 @@ class ApolloClient:
headers["Timestamp"] = time_unix_now
return headers
def _heart_beat(self) -> None:
def _heart_beat(self):
while not self._stopping:
for namespace in self._notification_map:
self._do_heart_beat(namespace)
time.sleep(60 * 10) # 10 minutes
def _do_heart_beat(self, namespace: str) -> None:
def _do_heart_beat(self, namespace):
url = f"{self.config_url}/configs/{self.app_id}/{self.cluster}/{namespace}?ip={self.ip}"
try:
code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
@ -294,7 +292,7 @@ class ApolloClient:
logger.exception("an error occurred in _do_heart_beat")
return None
def get_all_dicts(self, namespace: str) -> dict[str, Any] | None:
def get_all_dicts(self, namespace):
namespace_data = self._cache.get(namespace)
if namespace_data is None:
net_namespace_data = self.get_json_from_net(namespace)

View File

@ -2,8 +2,6 @@ import logging
import os
import ssl
import urllib.request
from collections.abc import Mapping
from typing import Any
from urllib import parse
from urllib.error import HTTPError
@ -21,9 +19,9 @@ urllib.request.install_opener(opener)
logger = logging.getLogger(__name__)
def http_request(url: str, timeout: int | float, headers: Mapping[str, str] = {}) -> tuple[int, str | None]:
def http_request(url, timeout, headers={}):
try:
request = urllib.request.Request(url, headers=dict(headers))
request = urllib.request.Request(url, headers=headers)
res = urllib.request.urlopen(request, timeout=timeout)
body = res.read().decode("utf-8")
return res.code, body
@ -35,9 +33,9 @@ def http_request(url: str, timeout: int | float, headers: Mapping[str, str] = {}
raise e
def url_encode(params: dict[str, Any]) -> str:
def url_encode(params):
return parse.urlencode(params)
def makedirs_wrapper(path: str) -> None:
def makedirs_wrapper(path):
os.makedirs(path, exist_ok=True)

View File

@ -1,6 +1,5 @@
import hashlib
import socket
from typing import Any
from .python_3x import url_encode
@ -11,7 +10,7 @@ NAMESPACE_NAME = "namespaceName"
# add timestamps uris and keys
def signature(timestamp: str, uri: str, secret: str) -> str:
def signature(timestamp, uri, secret):
import base64
import hmac
@ -20,16 +19,16 @@ def signature(timestamp: str, uri: str, secret: str) -> str:
return base64.b64encode(hmac_code).decode()
def url_encode_wrapper(params: dict[str, Any]) -> str:
def url_encode_wrapper(params):
return url_encode(params)
def no_key_cache_key(namespace: str, key: str) -> str:
def no_key_cache_key(namespace, key):
return f"{namespace}{len(namespace)}{key}"
# Returns whether the obtained value is obtained, and None if it does not
def get_value_from_dict(namespace_cache: dict[str, Any] | None, key: str) -> Any:
def get_value_from_dict(namespace_cache, key):
if namespace_cache:
kv_data = namespace_cache.get(CONFIGURATIONS)
if kv_data is None:
@ -39,7 +38,7 @@ def get_value_from_dict(namespace_cache: dict[str, Any] | None, key: str) -> Any
return None
def init_ip() -> str:
def init_ip():
ip = ""
s = None
try:

View File

@ -11,5 +11,5 @@ class RemoteSettingsSource:
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
raise NotImplementedError
def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool):
def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any:
return value

View File

@ -11,16 +11,16 @@ logger = logging.getLogger(__name__)
from configs.remote_settings_sources.base import RemoteSettingsSource
from .utils import parse_config
from .utils import _parse_config
class NacosSettingsSource(RemoteSettingsSource):
def __init__(self, configs: Mapping[str, Any]):
self.configs = configs
self.remote_configs: dict[str, str] = {}
self.remote_configs: dict[str, Any] = {}
self.async_init()
def async_init(self) -> None:
def async_init(self):
data_id = os.getenv("DIFY_ENV_NACOS_DATA_ID", "dify-api-env.properties")
group = os.getenv("DIFY_ENV_NACOS_GROUP", "nacos-dify")
tenant = os.getenv("DIFY_ENV_NACOS_NAMESPACE", "")
@ -33,15 +33,18 @@ class NacosSettingsSource(RemoteSettingsSource):
logger.exception("[get-access-token] exception occurred")
raise
def _parse_config(self, content: str) -> dict[str, str]:
def _parse_config(self, content: str) -> dict:
if not content:
return {}
try:
return parse_config(content)
return _parse_config(self, content)
except Exception as e:
raise RuntimeError(f"Failed to parse config: {e}")
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
if not isinstance(self.remote_configs, dict):
raise ValueError(f"remote configs is not dict, but {type(self.remote_configs)}")
field_value = self.remote_configs.get(field_name)
if field_value is None:
return None, field_name, False

View File

@ -17,17 +17,11 @@ class NacosHttpClient:
self.ak = os.getenv("DIFY_ENV_NACOS_ACCESS_KEY")
self.sk = os.getenv("DIFY_ENV_NACOS_SECRET_KEY")
self.server = os.getenv("DIFY_ENV_NACOS_SERVER_ADDR", "localhost:8848")
self.token: str | None = None
self.token = None
self.token_ttl = 18000
self.token_expire_time: float = 0
def http_request(
self, url: str, method: str = "GET", headers: dict[str, str] | None = None, params: dict[str, str] | None = None
) -> str:
if headers is None:
headers = {}
if params is None:
params = {}
def http_request(self, url, method="GET", headers=None, params=None):
try:
self._inject_auth_info(headers, params)
response = requests.request(method, url="http://" + self.server + url, headers=headers, params=params)
@ -36,7 +30,7 @@ class NacosHttpClient:
except requests.RequestException as e:
return f"Request to Nacos failed: {e}"
def _inject_auth_info(self, headers: dict[str, str], params: dict[str, str], module: str = "config") -> None:
def _inject_auth_info(self, headers, params, module="config"):
headers.update({"User-Agent": "Nacos-Http-Client-In-Dify:v0.0.1"})
if module == "login":
@ -51,17 +45,16 @@ class NacosHttpClient:
headers["timeStamp"] = ts
if self.username and self.password:
self.get_access_token(force_refresh=False)
if self.token is not None:
params["accessToken"] = self.token
params["accessToken"] = self.token
def __do_sign(self, sign_str: str, sk: str) -> str:
def __do_sign(self, sign_str, sk):
return (
base64.encodebytes(hmac.new(sk.encode(), sign_str.encode(), digestmod=hashlib.sha1).digest())
.decode()
.strip()
)
def get_sign_str(self, group: str, tenant: str, ts: str) -> str:
def get_sign_str(self, group, tenant, ts):
sign_str = ""
if tenant:
sign_str = tenant + "+"
@ -70,7 +63,7 @@ class NacosHttpClient:
sign_str += ts # Directly concatenate ts without conditional checks, because the nacos auth header forced it.
return sign_str
def get_access_token(self, force_refresh: bool = False) -> str | None:
def get_access_token(self, force_refresh=False):
current_time = time.time()
if self.token and not force_refresh and self.token_expire_time > current_time:
return self.token
@ -84,7 +77,6 @@ class NacosHttpClient:
self.token = response_data.get("accessToken")
self.token_ttl = response_data.get("tokenTtl", 18000)
self.token_expire_time = current_time + self.token_ttl - 10
return self.token
except Exception:
logger.exception("[get-access-token] exception occur")
raise

View File

@ -1,4 +1,4 @@
def parse_config(content: str) -> dict[str, str]:
def _parse_config(self, content: str) -> dict[str, str]:
config: dict[str, str] = {}
if not content:
return config

View File

@ -16,14 +16,14 @@ AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "amr", "mpga"]
AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])
_doc_extensions: list[str]
if dify_config.ETL_TYPE == "Unstructured":
_doc_extensions = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "vtt", "properties"]
_doc_extensions.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "vtt", "properties"]
DOCUMENT_EXTENSIONS.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
if dify_config.UNSTRUCTURED_API_URL:
_doc_extensions.append("ppt")
DOCUMENT_EXTENSIONS.append("ppt")
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
else:
_doc_extensions = [
DOCUMENT_EXTENSIONS = [
"txt",
"markdown",
"md",
@ -38,4 +38,4 @@ else:
"vtt",
"properties",
]
DOCUMENT_EXTENSIONS = _doc_extensions + [ext.upper() for ext in _doc_extensions]
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])

View File

@ -7,7 +7,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
# workflow default mode
AppMode.WORKFLOW: {
"app": {
"mode": AppMode.WORKFLOW,
"mode": AppMode.WORKFLOW.value,
"enable_site": True,
"enable_api": True,
}
@ -15,7 +15,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
# completion default mode
AppMode.COMPLETION: {
"app": {
"mode": AppMode.COMPLETION,
"mode": AppMode.COMPLETION.value,
"enable_site": True,
"enable_api": True,
},
@ -44,7 +44,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
# chat default mode
AppMode.CHAT: {
"app": {
"mode": AppMode.CHAT,
"mode": AppMode.CHAT.value,
"enable_site": True,
"enable_api": True,
},
@ -60,7 +60,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
# advanced-chat default mode
AppMode.ADVANCED_CHAT: {
"app": {
"mode": AppMode.ADVANCED_CHAT,
"mode": AppMode.ADVANCED_CHAT.value,
"enable_site": True,
"enable_api": True,
},
@ -68,7 +68,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
# agent-chat default mode
AppMode.AGENT_CHAT: {
"app": {
"mode": AppMode.AGENT_CHAT,
"mode": AppMode.AGENT_CHAT.value,
"enable_site": True,
"enable_api": True,
},

View File

@ -3,11 +3,13 @@ from threading import Lock
from typing import TYPE_CHECKING
from contexts.wrapper import RecyclableContextVar
from core.datasource.__base.datasource_provider import DatasourcePluginProviderController
if TYPE_CHECKING:
from core.model_runtime.entities.model_entities import AIModelEntity
from core.plugin.entities.plugin_daemon import PluginModelProviderEntity
from core.tools.plugin_tool.provider import PluginToolProviderController
from core.workflow.entities.variable_pool import VariablePool
"""
@ -32,3 +34,11 @@ plugin_model_schema_lock: RecyclableContextVar[Lock] = RecyclableContextVar(Cont
plugin_model_schemas: RecyclableContextVar[dict[str, "AIModelEntity"]] = RecyclableContextVar(
ContextVar("plugin_model_schemas")
)
datasource_plugin_providers: RecyclableContextVar[dict[str, "DatasourcePluginProviderController"]] = (
RecyclableContextVar(ContextVar("datasource_plugin_providers"))
)
datasource_plugin_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar(
ContextVar("datasource_plugin_providers_lock")
)

View File

@ -1,5 +1,4 @@
from flask import Blueprint
from flask_restx import Namespace
from libs.external_api import ExternalApi
@ -27,16 +26,7 @@ from .files import FileApi, FilePreviewApi, FileSupportTypeApi
from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi
bp = Blueprint("console", __name__, url_prefix="/console/api")
api = ExternalApi(
bp,
version="1.0",
title="Console API",
description="Console management APIs for app configuration, monitoring, and administration",
)
# Create namespace
console_ns = Namespace("console", description="Console management API operations", path="/")
api = ExternalApi(bp)
# File
api.add_resource(FileApi, "/files/upload")
@ -53,16 +43,7 @@ api.add_resource(AppImportConfirmApi, "/apps/imports/<string:import_id>/confirm"
api.add_resource(AppImportCheckDependenciesApi, "/apps/imports/<string:app_id>/check-dependencies")
# Import other controllers
from . import (
admin,
apikey,
extension,
feature,
init_validate,
ping,
setup,
version,
)
from . import admin, apikey, extension, feature, ping, setup, spec, version
# Import app controllers
from .app import (
@ -89,16 +70,7 @@ from .app import (
)
# Import auth controllers
from .auth import (
activate,
data_source_bearer_auth,
data_source_oauth,
email_register,
forgot_password,
login,
oauth,
oauth_server,
)
from .auth import activate, data_source_bearer_auth, data_source_oauth, forgot_password, login, oauth, oauth_server
# Import billing controllers
from .billing import billing, compliance
@ -114,6 +86,15 @@ from .datasets import (
metadata,
website,
)
from .datasets.rag_pipeline import (
datasource_auth,
datasource_content_preview,
rag_pipeline,
rag_pipeline_datasets,
rag_pipeline_draft_variable,
rag_pipeline_import,
rag_pipeline_workflow,
)
# Import explore controllers
from .explore import (
@ -123,23 +104,6 @@ from .explore import (
saved_message,
)
# Import tag controllers
from .tag import tags
# Import workspace controllers
from .workspace import (
account,
agent_providers,
endpoint,
load_balancing_config,
members,
model_providers,
models,
plugin,
tool_providers,
workspace,
)
# Explore Audio
api.add_resource(ChatAudioApi, "/installed-apps/<uuid:installed_app_id>/audio-to-text", endpoint="installed_app_audio")
api.add_resource(ChatTextApi, "/installed-apps/<uuid:installed_app_id>/text-to-audio", endpoint="installed_app_text")
@ -211,71 +175,19 @@ api.add_resource(
InstalledAppWorkflowTaskStopApi, "/installed-apps/<uuid:installed_app_id>/workflows/tasks/<string:task_id>/stop"
)
api.add_namespace(console_ns)
# Import tag controllers
from .tag import tags
__all__ = [
"account",
"activate",
"admin",
"advanced_prompt_template",
"agent",
"agent_providers",
"annotation",
"api",
"apikey",
"app",
"audio",
"billing",
"bp",
"completion",
"compliance",
"console_ns",
"conversation",
"conversation_variables",
"data_source",
"data_source_bearer_auth",
"data_source_oauth",
"datasets",
"datasets_document",
"datasets_segments",
"email_register",
"endpoint",
"extension",
"external",
"feature",
"forgot_password",
"generator",
"hit_testing",
"init_validate",
"installed_app",
"load_balancing_config",
"login",
"mcp_server",
"members",
"message",
"metadata",
"model_config",
"model_providers",
"models",
"oauth",
"oauth_server",
"ops_trace",
"parameter",
"ping",
"plugin",
"recommended_app",
"saved_message",
"setup",
"site",
"statistic",
"tags",
"tool_providers",
"version",
"website",
"workflow",
"workflow_app_log",
"workflow_draft_variable",
"workflow_run",
"workflow_statistic",
"workspace",
]
# Import workspace controllers
from .workspace import (
account,
agent_providers,
endpoint,
load_balancing_config,
members,
model_providers,
models,
plugin,
tool_providers,
workspace,
)

View File

@ -1,26 +1,22 @@
from collections.abc import Callable
from functools import wraps
from typing import ParamSpec, TypeVar
from flask import request
from flask_restx import Resource, fields, reqparse
from flask_restx import Resource, reqparse
from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.exceptions import NotFound, Unauthorized
P = ParamSpec("P")
R = TypeVar("R")
from configs import dify_config
from constants.languages import supported_language
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.wraps import only_edition_cloud
from extensions.ext_database import db
from models.model import App, InstalledApp, RecommendedApp
def admin_required(view: Callable[P, R]):
def admin_required(view):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
def decorated(*args, **kwargs):
if not dify_config.ADMIN_API_KEY:
raise Unauthorized("API key is invalid.")
@ -45,28 +41,7 @@ def admin_required(view: Callable[P, R]):
return decorated
@console_ns.route("/admin/insert-explore-apps")
class InsertExploreAppListApi(Resource):
@api.doc("insert_explore_app")
@api.doc(description="Insert or update an app in the explore list")
@api.expect(
api.model(
"InsertExploreAppRequest",
{
"app_id": fields.String(required=True, description="Application ID"),
"desc": fields.String(description="App description"),
"copyright": fields.String(description="Copyright information"),
"privacy_policy": fields.String(description="Privacy policy"),
"custom_disclaimer": fields.String(description="Custom disclaimer"),
"language": fields.String(required=True, description="Language code"),
"category": fields.String(required=True, description="App category"),
"position": fields.Integer(required=True, description="Display position"),
},
)
)
@api.response(200, "App updated successfully")
@api.response(201, "App inserted successfully")
@api.response(404, "App not found")
@only_edition_cloud
@admin_required
def post(self):
@ -136,12 +111,7 @@ class InsertExploreAppListApi(Resource):
return {"result": "success"}, 200
@console_ns.route("/admin/insert-explore-apps/<uuid:app_id>")
class InsertExploreAppApi(Resource):
@api.doc("delete_explore_app")
@api.doc(description="Remove an app from the explore list")
@api.doc(params={"app_id": "Application ID to remove"})
@api.response(204, "App removed successfully")
@only_edition_cloud
@admin_required
def delete(self, app_id):
@ -178,3 +148,7 @@ class InsertExploreAppApi(Resource):
db.session.commit()
return {"result": "success"}, 204
api.add_resource(InsertExploreAppListApi, "/admin/insert-explore-apps")
api.add_resource(InsertExploreAppApi, "/admin/insert-explore-apps/<uuid:app_id>")

View File

@ -1,7 +1,8 @@
from typing import Any, Optional
import flask_restx
from flask_login import current_user
from flask_restx import Resource, fields, marshal_with
from flask_restx._http import HTTPStatus
from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
@ -12,7 +13,7 @@ from libs.login import login_required
from models.dataset import Dataset
from models.model import ApiToken, App
from . import api, console_ns
from . import api
from .wraps import account_initialization_required, setup_required
api_key_fields = {
@ -39,7 +40,7 @@ def _get_resource(resource_id, tenant_id, resource_model):
).scalar_one_or_none()
if resource is None:
flask_restx.abort(HTTPStatus.NOT_FOUND, message=f"{resource_model.__name__} not found.")
flask_restx.abort(404, message=f"{resource_model.__name__} not found.")
return resource
@ -48,7 +49,7 @@ class BaseApiKeyListResource(Resource):
method_decorators = [account_initialization_required, login_required, setup_required]
resource_type: str | None = None
resource_model: type | None = None
resource_model: Optional[Any] = None
resource_id_field: str | None = None
token_prefix: str | None = None
max_keys = 10
@ -58,11 +59,11 @@ class BaseApiKeyListResource(Resource):
assert self.resource_id_field is not None, "resource_id_field must be set"
resource_id = str(resource_id)
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
keys = db.session.scalars(
select(ApiToken).where(
ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id
)
).all()
keys = (
db.session.query(ApiToken)
.where(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id)
.all()
)
return {"items": keys}
@marshal_with(api_key_fields)
@ -81,12 +82,12 @@ class BaseApiKeyListResource(Resource):
if current_key_count >= self.max_keys:
flask_restx.abort(
HTTPStatus.BAD_REQUEST,
400,
message=f"Cannot create more than {self.max_keys} API keys for this resource type.",
custom="max_keys_exceeded",
)
key = ApiToken.generate_api_key(self.token_prefix or "", 24)
key = ApiToken.generate_api_key(self.token_prefix, 24)
api_token = ApiToken()
setattr(api_token, self.resource_id_field, resource_id)
api_token.tenant_id = current_user.current_tenant_id
@ -101,7 +102,7 @@ class BaseApiKeyResource(Resource):
method_decorators = [account_initialization_required, login_required, setup_required]
resource_type: str | None = None
resource_model: type | None = None
resource_model: Optional[Any] = None
resource_id_field: str | None = None
def delete(self, resource_id, api_key_id):
@ -125,7 +126,7 @@ class BaseApiKeyResource(Resource):
)
if key is None:
flask_restx.abort(HTTPStatus.NOT_FOUND, message="API key not found")
flask_restx.abort(404, message="API key not found")
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
db.session.commit()
@ -133,25 +134,7 @@ class BaseApiKeyResource(Resource):
return {"result": "success"}, 204
@console_ns.route("/apps/<uuid:resource_id>/api-keys")
class AppApiKeyListResource(BaseApiKeyListResource):
@api.doc("get_app_api_keys")
@api.doc(description="Get all API keys for an app")
@api.doc(params={"resource_id": "App ID"})
@api.response(200, "Success", api_key_list)
def get(self, resource_id):
"""Get all API keys for an app"""
return super().get(resource_id)
@api.doc("create_app_api_key")
@api.doc(description="Create a new API key for an app")
@api.doc(params={"resource_id": "App ID"})
@api.response(201, "API key created successfully", api_key_fields)
@api.response(400, "Maximum keys exceeded")
def post(self, resource_id):
"""Create a new API key for an app"""
return super().post(resource_id)
def after_request(self, resp):
resp.headers["Access-Control-Allow-Origin"] = "*"
resp.headers["Access-Control-Allow-Credentials"] = "true"
@ -163,16 +146,7 @@ class AppApiKeyListResource(BaseApiKeyListResource):
token_prefix = "app-"
@console_ns.route("/apps/<uuid:resource_id>/api-keys/<uuid:api_key_id>")
class AppApiKeyResource(BaseApiKeyResource):
@api.doc("delete_app_api_key")
@api.doc(description="Delete an API key for an app")
@api.doc(params={"resource_id": "App ID", "api_key_id": "API key ID"})
@api.response(204, "API key deleted successfully")
def delete(self, resource_id, api_key_id):
"""Delete an API key for an app"""
return super().delete(resource_id, api_key_id)
def after_request(self, resp):
resp.headers["Access-Control-Allow-Origin"] = "*"
resp.headers["Access-Control-Allow-Credentials"] = "true"
@ -183,25 +157,7 @@ class AppApiKeyResource(BaseApiKeyResource):
resource_id_field = "app_id"
@console_ns.route("/datasets/<uuid:resource_id>/api-keys")
class DatasetApiKeyListResource(BaseApiKeyListResource):
@api.doc("get_dataset_api_keys")
@api.doc(description="Get all API keys for a dataset")
@api.doc(params={"resource_id": "Dataset ID"})
@api.response(200, "Success", api_key_list)
def get(self, resource_id):
"""Get all API keys for a dataset"""
return super().get(resource_id)
@api.doc("create_dataset_api_key")
@api.doc(description="Create a new API key for a dataset")
@api.doc(params={"resource_id": "Dataset ID"})
@api.response(201, "API key created successfully", api_key_fields)
@api.response(400, "Maximum keys exceeded")
def post(self, resource_id):
"""Create a new API key for a dataset"""
return super().post(resource_id)
def after_request(self, resp):
resp.headers["Access-Control-Allow-Origin"] = "*"
resp.headers["Access-Control-Allow-Credentials"] = "true"
@ -213,16 +169,7 @@ class DatasetApiKeyListResource(BaseApiKeyListResource):
token_prefix = "ds-"
@console_ns.route("/datasets/<uuid:resource_id>/api-keys/<uuid:api_key_id>")
class DatasetApiKeyResource(BaseApiKeyResource):
@api.doc("delete_dataset_api_key")
@api.doc(description="Delete an API key for a dataset")
@api.doc(params={"resource_id": "Dataset ID", "api_key_id": "API key ID"})
@api.response(204, "API key deleted successfully")
def delete(self, resource_id, api_key_id):
"""Delete an API key for a dataset"""
return super().delete(resource_id, api_key_id)
def after_request(self, resp):
resp.headers["Access-Control-Allow-Origin"] = "*"
resp.headers["Access-Control-Allow-Credentials"] = "true"
@ -231,3 +178,9 @@ class DatasetApiKeyResource(BaseApiKeyResource):
resource_type = "dataset"
resource_model = Dataset
resource_id_field = "dataset_id"
api.add_resource(AppApiKeyListResource, "/apps/<uuid:resource_id>/api-keys")
api.add_resource(AppApiKeyResource, "/apps/<uuid:resource_id>/api-keys/<uuid:api_key_id>")
api.add_resource(DatasetApiKeyListResource, "/datasets/<uuid:resource_id>/api-keys")
api.add_resource(DatasetApiKeyResource, "/datasets/<uuid:resource_id>/api-keys/<uuid:api_key_id>")

View File

@ -1,26 +1,12 @@
from flask_restx import Resource, fields, reqparse
from flask_restx import Resource, reqparse
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.wraps import account_initialization_required, setup_required
from libs.login import login_required
from services.advanced_prompt_template_service import AdvancedPromptTemplateService
@console_ns.route("/app/prompt-templates")
class AdvancedPromptTemplateList(Resource):
@api.doc("get_advanced_prompt_templates")
@api.doc(description="Get advanced prompt templates based on app mode and model configuration")
@api.expect(
api.parser()
.add_argument("app_mode", type=str, required=True, location="args", help="Application mode")
.add_argument("model_mode", type=str, required=True, location="args", help="Model mode")
.add_argument("has_context", type=str, default="true", location="args", help="Whether has context")
.add_argument("model_name", type=str, required=True, location="args", help="Model name")
)
@api.response(
200, "Prompt templates retrieved successfully", fields.List(fields.Raw(description="Prompt template data"))
)
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@ -33,3 +19,6 @@ class AdvancedPromptTemplateList(Resource):
args = parser.parse_args()
return AdvancedPromptTemplateService.get_prompt(args)
api.add_resource(AdvancedPromptTemplateList, "/app/prompt-templates")

View File

@ -1,6 +1,6 @@
from flask_restx import Resource, fields, reqparse
from flask_restx import Resource, reqparse
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from libs.helper import uuid_value
@ -9,18 +9,7 @@ from models.model import AppMode
from services.agent_service import AgentService
@console_ns.route("/apps/<uuid:app_id>/agent/logs")
class AgentLogApi(Resource):
@api.doc("get_agent_logs")
@api.doc(description="Get agent execution logs for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("message_id", type=str, required=True, location="args", help="Message UUID")
.add_argument("conversation_id", type=str, required=True, location="args", help="Conversation UUID")
)
@api.response(200, "Agent logs retrieved successfully", fields.List(fields.Raw(description="Agent log entries")))
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@ -34,3 +23,6 @@ class AgentLogApi(Resource):
args = parser.parse_args()
return AgentService.get_agent_logs(app_model, args["conversation_id"], args["message_id"])
api.add_resource(AgentLogApi, "/apps/<uuid:app_id>/agent/logs")

View File

@ -2,11 +2,11 @@ from typing import Literal
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
from flask_restx import Resource, marshal, marshal_with, reqparse
from werkzeug.exceptions import Forbidden
from controllers.common.errors import NoFileUploadedError, TooManyFilesError
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_resource_check,
@ -21,23 +21,7 @@ from libs.login import login_required
from services.annotation_service import AppAnnotationService
@console_ns.route("/apps/<uuid:app_id>/annotation-reply/<string:action>")
class AnnotationReplyActionApi(Resource):
@api.doc("annotation_reply_action")
@api.doc(description="Enable or disable annotation reply for an app")
@api.doc(params={"app_id": "Application ID", "action": "Action to perform (enable/disable)"})
@api.expect(
api.model(
"AnnotationReplyActionRequest",
{
"score_threshold": fields.Float(required=True, description="Score threshold for annotation matching"),
"embedding_provider_name": fields.String(required=True, description="Embedding provider name"),
"embedding_model_name": fields.String(required=True, description="Embedding model name"),
},
)
)
@api.response(200, "Action completed successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -59,13 +43,7 @@ class AnnotationReplyActionApi(Resource):
return result, 200
@console_ns.route("/apps/<uuid:app_id>/annotation-setting")
class AppAnnotationSettingDetailApi(Resource):
@api.doc("get_annotation_setting")
@api.doc(description="Get annotation settings for an app")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Annotation settings retrieved successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -78,23 +56,7 @@ class AppAnnotationSettingDetailApi(Resource):
return result, 200
@console_ns.route("/apps/<uuid:app_id>/annotation-settings/<uuid:annotation_setting_id>")
class AppAnnotationSettingUpdateApi(Resource):
@api.doc("update_annotation_setting")
@api.doc(description="Update annotation settings for an app")
@api.doc(params={"app_id": "Application ID", "annotation_setting_id": "Annotation setting ID"})
@api.expect(
api.model(
"AnnotationSettingUpdateRequest",
{
"score_threshold": fields.Float(required=True, description="Score threshold"),
"embedding_provider_name": fields.String(required=True, description="Embedding provider"),
"embedding_model_name": fields.String(required=True, description="Embedding model"),
},
)
)
@api.response(200, "Settings updated successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -113,13 +75,7 @@ class AppAnnotationSettingUpdateApi(Resource):
return result, 200
@console_ns.route("/apps/<uuid:app_id>/annotation-reply/<string:action>/status/<uuid:job_id>")
class AnnotationReplyActionStatusApi(Resource):
@api.doc("get_annotation_reply_action_status")
@api.doc(description="Get status of annotation reply action job")
@api.doc(params={"app_id": "Application ID", "job_id": "Job ID", "action": "Action type"})
@api.response(200, "Job status retrieved successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -143,19 +99,7 @@ class AnnotationReplyActionStatusApi(Resource):
return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200
@console_ns.route("/apps/<uuid:app_id>/annotations")
class AnnotationApi(Resource):
@api.doc("list_annotations")
@api.doc(description="Get annotations for an app with pagination")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("page", type=int, location="args", default=1, help="Page number")
.add_argument("limit", type=int, location="args", default=20, help="Page size")
.add_argument("keyword", type=str, location="args", default="", help="Search keyword")
)
@api.response(200, "Annotations retrieved successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -178,21 +122,6 @@ class AnnotationApi(Resource):
}
return response, 200
@api.doc("create_annotation")
@api.doc(description="Create a new annotation for an app")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"CreateAnnotationRequest",
{
"question": fields.String(required=True, description="Question text"),
"answer": fields.String(required=True, description="Answer text"),
"annotation_reply": fields.Raw(description="Annotation reply data"),
},
)
)
@api.response(201, "Annotation created successfully", annotation_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -239,13 +168,7 @@ class AnnotationApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/apps/<uuid:app_id>/annotations/export")
class AnnotationExportApi(Resource):
@api.doc("export_annotations")
@api.doc(description="Export all annotations for an app")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Annotations exported successfully", fields.List(fields.Nested(annotation_fields)))
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -259,14 +182,7 @@ class AnnotationExportApi(Resource):
return response, 200
@console_ns.route("/apps/<uuid:app_id>/annotations/<uuid:annotation_id>")
class AnnotationUpdateDeleteApi(Resource):
@api.doc("update_delete_annotation")
@api.doc(description="Update or delete an annotation")
@api.doc(params={"app_id": "Application ID", "annotation_id": "Annotation ID"})
@api.response(200, "Annotation updated successfully", annotation_fields)
@api.response(204, "Annotation deleted successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -298,14 +214,7 @@ class AnnotationUpdateDeleteApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/apps/<uuid:app_id>/annotations/batch-import")
class AnnotationBatchImportApi(Resource):
@api.doc("batch_import_annotations")
@api.doc(description="Batch import annotations from CSV file")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Batch import started successfully")
@api.response(403, "Insufficient permissions")
@api.response(400, "No file uploaded or too many files")
@setup_required
@login_required
@account_initialization_required
@ -330,13 +239,7 @@ class AnnotationBatchImportApi(Resource):
return AppAnnotationService.batch_import_app_annotations(app_id, file)
@console_ns.route("/apps/<uuid:app_id>/annotations/batch-import-status/<uuid:job_id>")
class AnnotationBatchImportStatusApi(Resource):
@api.doc("get_batch_import_status")
@api.doc(description="Get status of batch import job")
@api.doc(params={"app_id": "Application ID", "job_id": "Job ID"})
@api.response(200, "Job status retrieved successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -359,20 +262,7 @@ class AnnotationBatchImportStatusApi(Resource):
return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200
@console_ns.route("/apps/<uuid:app_id>/annotations/<uuid:annotation_id>/hit-histories")
class AnnotationHitHistoryListApi(Resource):
@api.doc("list_annotation_hit_histories")
@api.doc(description="Get hit histories for an annotation")
@api.doc(params={"app_id": "Application ID", "annotation_id": "Annotation ID"})
@api.expect(
api.parser()
.add_argument("page", type=int, location="args", default=1, help="Page number")
.add_argument("limit", type=int, location="args", default=20, help="Page size")
)
@api.response(
200, "Hit histories retrieved successfully", fields.List(fields.Nested(annotation_hit_history_fields))
)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -395,3 +285,17 @@ class AnnotationHitHistoryListApi(Resource):
"page": page,
}
return response
api.add_resource(AnnotationReplyActionApi, "/apps/<uuid:app_id>/annotation-reply/<string:action>")
api.add_resource(
AnnotationReplyActionStatusApi, "/apps/<uuid:app_id>/annotation-reply/<string:action>/status/<uuid:job_id>"
)
api.add_resource(AnnotationApi, "/apps/<uuid:app_id>/annotations")
api.add_resource(AnnotationExportApi, "/apps/<uuid:app_id>/annotations/export")
api.add_resource(AnnotationUpdateDeleteApi, "/apps/<uuid:app_id>/annotations/<uuid:annotation_id>")
api.add_resource(AnnotationBatchImportApi, "/apps/<uuid:app_id>/annotations/batch-import")
api.add_resource(AnnotationBatchImportStatusApi, "/apps/<uuid:app_id>/annotations/batch-import-status/<uuid:job_id>")
api.add_resource(AnnotationHitHistoryListApi, "/apps/<uuid:app_id>/annotations/<uuid:annotation_id>/hit-histories")
api.add_resource(AppAnnotationSettingDetailApi, "/apps/<uuid:app_id>/annotation-setting")
api.add_resource(AppAnnotationSettingUpdateApi, "/apps/<uuid:app_id>/annotation-settings/<uuid:annotation_setting_id>")

View File

@ -2,12 +2,12 @@ import uuid
from typing import cast
from flask_login import current_user
from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse
from flask_restx import Resource, inputs, marshal, marshal_with, reqparse
from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.exceptions import BadRequest, Forbidden, abort
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import (
account_initialization_required,
@ -34,27 +34,7 @@ def _validate_description_length(description):
return description
@console_ns.route("/apps")
class AppListApi(Resource):
@api.doc("list_apps")
@api.doc(description="Get list of applications with pagination and filtering")
@api.expect(
api.parser()
.add_argument("page", type=int, location="args", help="Page number (1-99999)", default=1)
.add_argument("limit", type=int, location="args", help="Page size (1-100)", default=20)
.add_argument(
"mode",
type=str,
location="args",
choices=["completion", "chat", "advanced-chat", "workflow", "agent-chat", "channel", "all"],
default="all",
help="App mode filter",
)
.add_argument("name", type=str, location="args", help="Filter by app name")
.add_argument("tag_ids", type=str, location="args", help="Comma-separated tag IDs")
.add_argument("is_created_by_me", type=bool, location="args", help="Filter by creator")
)
@api.response(200, "Success", app_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@ -111,24 +91,6 @@ class AppListApi(Resource):
return marshal(app_pagination, app_pagination_fields), 200
@api.doc("create_app")
@api.doc(description="Create a new application")
@api.expect(
api.model(
"CreateAppRequest",
{
"name": fields.String(required=True, description="App name"),
"description": fields.String(description="App description (max 400 chars)"),
"mode": fields.String(required=True, enum=ALLOW_CREATE_APP_MODES, description="App mode"),
"icon_type": fields.String(description="Icon type"),
"icon": fields.String(description="Icon"),
"icon_background": fields.String(description="Icon background color"),
},
)
)
@api.response(201, "App created successfully", app_detail_fields)
@api.response(403, "Insufficient permissions")
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@ -153,21 +115,12 @@ class AppListApi(Resource):
raise BadRequest("mode is required")
app_service = AppService()
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
if current_user.current_tenant_id is None:
raise ValueError("current_user.current_tenant_id cannot be None")
app = app_service.create_app(current_user.current_tenant_id, args, current_user)
return app, 201
@console_ns.route("/apps/<uuid:app_id>")
class AppApi(Resource):
@api.doc("get_app_detail")
@api.doc(description="Get application details")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Success", app_detail_fields_with_site)
@setup_required
@login_required
@account_initialization_required
@ -186,26 +139,6 @@ class AppApi(Resource):
return app_model
@api.doc("update_app")
@api.doc(description="Update application details")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"UpdateAppRequest",
{
"name": fields.String(required=True, description="App name"),
"description": fields.String(description="App description (max 400 chars)"),
"icon_type": fields.String(description="Icon type"),
"icon": fields.String(description="Icon"),
"icon_background": fields.String(description="Icon background color"),
"use_icon_as_answer_icon": fields.Boolean(description="Use icon as answer icon"),
"max_active_requests": fields.Integer(description="Maximum active requests"),
},
)
)
@api.response(200, "App updated successfully", app_detail_fields_with_site)
@api.response(403, "Insufficient permissions")
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@ -228,31 +161,14 @@ class AppApi(Resource):
args = parser.parse_args()
app_service = AppService()
# Construct ArgsDict from parsed arguments
from services.app_service import AppService as AppServiceType
args_dict: AppServiceType.ArgsDict = {
"name": args["name"],
"description": args.get("description", ""),
"icon_type": args.get("icon_type", ""),
"icon": args.get("icon", ""),
"icon_background": args.get("icon_background", ""),
"use_icon_as_answer_icon": args.get("use_icon_as_answer_icon", False),
"max_active_requests": args.get("max_active_requests", 0),
}
app_model = app_service.update_app(app_model, args_dict)
app_model = app_service.update_app(app_model, args)
return app_model
@api.doc("delete_app")
@api.doc(description="Delete application")
@api.doc(params={"app_id": "Application ID"})
@api.response(204, "App deleted successfully")
@api.response(403, "Insufficient permissions")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def delete(self, app_model):
"""Delete app"""
# The role of the current user in the ta table must be admin, owner, or editor
@ -265,25 +181,7 @@ class AppApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/apps/<uuid:app_id>/copy")
class AppCopyApi(Resource):
@api.doc("copy_app")
@api.doc(description="Create a copy of an existing application")
@api.doc(params={"app_id": "Application ID to copy"})
@api.expect(
api.model(
"CopyAppRequest",
{
"name": fields.String(description="Name for the copied app"),
"description": fields.String(description="Description for the copied app"),
"icon_type": fields.String(description="Icon type"),
"icon": fields.String(description="Icon"),
"icon_background": fields.String(description="Icon background color"),
},
)
)
@api.response(201, "App copied successfully", app_detail_fields_with_site)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -325,26 +223,11 @@ class AppCopyApi(Resource):
return app, 201
@console_ns.route("/apps/<uuid:app_id>/export")
class AppExportApi(Resource):
@api.doc("export_app")
@api.doc(description="Export application configuration as DSL")
@api.doc(params={"app_id": "Application ID to export"})
@api.expect(
api.parser()
.add_argument("include_secret", type=bool, location="args", default=False, help="Include secrets in export")
.add_argument("workflow_id", type=str, location="args", help="Specific workflow ID to export")
)
@api.response(
200,
"App exported successfully",
api.model("AppExportResponse", {"data": fields.String(description="DSL export data")}),
)
@api.response(403, "Insufficient permissions")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
"""Export app"""
# The role of the current user in the ta table must be admin, owner, or editor
@ -364,13 +247,7 @@ class AppExportApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/name")
class AppNameApi(Resource):
@api.doc("check_app_name")
@api.doc(description="Check if app name is available")
@api.doc(params={"app_id": "Application ID"})
@api.expect(api.parser().add_argument("name", type=str, required=True, location="args", help="Name to check"))
@api.response(200, "Name availability checked")
@setup_required
@login_required
@account_initialization_required
@ -386,28 +263,12 @@ class AppNameApi(Resource):
args = parser.parse_args()
app_service = AppService()
app_model = app_service.update_app_name(app_model, args["name"])
app_model = app_service.update_app_name(app_model, args.get("name"))
return app_model
@console_ns.route("/apps/<uuid:app_id>/icon")
class AppIconApi(Resource):
@api.doc("update_app_icon")
@api.doc(description="Update application icon")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"AppIconRequest",
{
"icon": fields.String(required=True, description="Icon data"),
"icon_type": fields.String(description="Icon type"),
"icon_background": fields.String(description="Icon background color"),
},
)
)
@api.response(200, "Icon updated successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -424,23 +285,12 @@ class AppIconApi(Resource):
args = parser.parse_args()
app_service = AppService()
app_model = app_service.update_app_icon(app_model, args.get("icon") or "", args.get("icon_background") or "")
app_model = app_service.update_app_icon(app_model, args.get("icon"), args.get("icon_background"))
return app_model
@console_ns.route("/apps/<uuid:app_id>/site-enable")
class AppSiteStatus(Resource):
@api.doc("update_app_site_status")
@api.doc(description="Enable or disable app site")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"AppSiteStatusRequest", {"enable_site": fields.Boolean(required=True, description="Enable or disable site")}
)
)
@api.response(200, "Site status updated successfully", app_detail_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -456,23 +306,12 @@ class AppSiteStatus(Resource):
args = parser.parse_args()
app_service = AppService()
app_model = app_service.update_app_site_status(app_model, args["enable_site"])
app_model = app_service.update_app_site_status(app_model, args.get("enable_site"))
return app_model
@console_ns.route("/apps/<uuid:app_id>/api-enable")
class AppApiStatus(Resource):
@api.doc("update_app_api_status")
@api.doc(description="Enable or disable app API")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"AppApiStatusRequest", {"enable_api": fields.Boolean(required=True, description="Enable or disable API")}
)
)
@api.response(200, "API status updated successfully", app_detail_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -488,17 +327,12 @@ class AppApiStatus(Resource):
args = parser.parse_args()
app_service = AppService()
app_model = app_service.update_app_api_status(app_model, args["enable_api"])
app_model = app_service.update_app_api_status(app_model, args.get("enable_api"))
return app_model
@console_ns.route("/apps/<uuid:app_id>/trace")
class AppTraceApi(Resource):
@api.doc("get_app_trace")
@api.doc(description="Get app tracing configuration")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Trace configuration retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -508,20 +342,6 @@ class AppTraceApi(Resource):
return app_trace_config
@api.doc("update_app_trace")
@api.doc(description="Update app tracing configuration")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"AppTraceRequest",
{
"enabled": fields.Boolean(required=True, description="Enable or disable tracing"),
"tracing_provider": fields.String(required=True, description="Tracing provider"),
},
)
)
@api.response(200, "Trace configuration updated successfully")
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -541,3 +361,14 @@ class AppTraceApi(Resource):
)
return {"result": "success"}
api.add_resource(AppListApi, "/apps")
api.add_resource(AppApi, "/apps/<uuid:app_id>")
api.add_resource(AppCopyApi, "/apps/<uuid:app_id>/copy")
api.add_resource(AppExportApi, "/apps/<uuid:app_id>/export")
api.add_resource(AppNameApi, "/apps/<uuid:app_id>/name")
api.add_resource(AppIconApi, "/apps/<uuid:app_id>/icon")
api.add_resource(AppSiteStatus, "/apps/<uuid:app_id>/site-enable")
api.add_resource(AppApiStatus, "/apps/<uuid:app_id>/api-enable")
api.add_resource(AppTraceApi, "/apps/<uuid:app_id>/trace")

View File

@ -1,11 +1,11 @@
import logging
from flask import request
from flask_restx import Resource, fields, reqparse
from flask_restx import Resource, reqparse
from werkzeug.exceptions import InternalServerError
import services
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.error import (
AppUnavailableError,
AudioTooLargeError,
@ -34,18 +34,7 @@ from services.errors.audio import (
logger = logging.getLogger(__name__)
@console_ns.route("/apps/<uuid:app_id>/audio-to-text")
class ChatMessageAudioApi(Resource):
@api.doc("chat_message_audio_transcript")
@api.doc(description="Transcript audio to text for chat messages")
@api.doc(params={"app_id": "App ID"})
@api.response(
200,
"Audio transcription successful",
api.model("AudioTranscriptResponse", {"text": fields.String(description="Transcribed text from audio")}),
)
@api.response(400, "Bad request - No audio uploaded or unsupported type")
@api.response(413, "Audio file too large")
@setup_required
@login_required
@account_initialization_required
@ -87,28 +76,11 @@ class ChatMessageAudioApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/text-to-audio")
class ChatMessageTextApi(Resource):
@api.doc("chat_message_text_to_speech")
@api.doc(description="Convert text to speech for chat messages")
@api.doc(params={"app_id": "App ID"})
@api.expect(
api.model(
"TextToSpeechRequest",
{
"message_id": fields.String(description="Message ID"),
"text": fields.String(required=True, description="Text to convert to speech"),
"voice": fields.String(description="Voice to use for TTS"),
"streaming": fields.Boolean(description="Whether to stream the audio"),
},
)
)
@api.response(200, "Text to speech conversion successful")
@api.response(400, "Bad request - Invalid parameters")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def post(self, app_model: App):
try:
parser = reqparse.RequestParser()
@ -152,18 +124,11 @@ class ChatMessageTextApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/text-to-audio/voices")
class TextModesApi(Resource):
@api.doc("get_text_to_speech_voices")
@api.doc(description="Get available TTS voices for a specific language")
@api.doc(params={"app_id": "App ID"})
@api.expect(api.parser().add_argument("language", type=str, required=True, location="args", help="Language code"))
@api.response(200, "TTS voices retrieved successfully", fields.List(fields.Raw(description="Available voices")))
@api.response(400, "Invalid language parameter")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
try:
parser = reqparse.RequestParser()
@ -199,3 +164,8 @@ class TextModesApi(Resource):
except Exception as e:
logger.exception("Failed to handle get request to TextModesApi")
raise InternalServerError()
api.add_resource(ChatMessageAudioApi, "/apps/<uuid:app_id>/audio-to-text")
api.add_resource(ChatMessageTextApi, "/apps/<uuid:app_id>/text-to-audio")
api.add_resource(TextModesApi, "/apps/<uuid:app_id>/text-to-audio/voices")

View File

@ -1,11 +1,12 @@
import logging
import flask_login
from flask import request
from flask_restx import Resource, fields, reqparse
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
from flask_restx import Resource, reqparse
from werkzeug.exceptions import InternalServerError, NotFound
import services
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.error import (
AppUnavailableError,
CompletionRequestError,
@ -28,8 +29,7 @@ from core.helper.trace_id_helper import get_external_trace_id
from core.model_runtime.errors.invoke import InvokeError
from libs import helper
from libs.helper import uuid_value
from libs.login import current_user, login_required
from models import Account
from libs.login import login_required
from models.model import AppMode
from services.app_generate_service import AppGenerateService
from services.errors.llm import InvokeRateLimitError
@ -38,27 +38,7 @@ logger = logging.getLogger(__name__)
# define completion message api for user
@console_ns.route("/apps/<uuid:app_id>/completion-messages")
class CompletionMessageApi(Resource):
@api.doc("create_completion_message")
@api.doc(description="Generate completion message for debugging")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"CompletionMessageRequest",
{
"inputs": fields.Raw(required=True, description="Input variables"),
"query": fields.String(description="Query text", default=""),
"files": fields.List(fields.Raw(), description="Uploaded files"),
"model_config": fields.Raw(required=True, description="Model configuration"),
"response_mode": fields.String(enum=["blocking", "streaming"], description="Response mode"),
"retriever_from": fields.String(default="dev", description="Retriever source"),
},
)
)
@api.response(200, "Completion generated successfully")
@api.response(400, "Invalid request parameters")
@api.response(404, "App not found")
@setup_required
@login_required
@account_initialization_required
@ -76,11 +56,11 @@ class CompletionMessageApi(Resource):
streaming = args["response_mode"] != "blocking"
args["auto_generate_name"] = False
account = flask_login.current_user
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account or EndUser instance")
response = AppGenerateService.generate(
app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=streaming
app_model=app_model, user=account, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=streaming
)
return helper.compact_generate_response(response)
@ -106,58 +86,25 @@ class CompletionMessageApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/completion-messages/<string:task_id>/stop")
class CompletionMessageStopApi(Resource):
@api.doc("stop_completion_message")
@api.doc(description="Stop a running completion message generation")
@api.doc(params={"app_id": "Application ID", "task_id": "Task ID to stop"})
@api.response(200, "Task stopped successfully")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.COMPLETION)
def post(self, app_model, task_id):
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id)
account = flask_login.current_user
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, account.id)
return {"result": "success"}, 200
@console_ns.route("/apps/<uuid:app_id>/chat-messages")
class ChatMessageApi(Resource):
@api.doc("create_chat_message")
@api.doc(description="Generate chat message for debugging")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"ChatMessageRequest",
{
"inputs": fields.Raw(required=True, description="Input variables"),
"query": fields.String(required=True, description="User query"),
"files": fields.List(fields.Raw(), description="Uploaded files"),
"model_config": fields.Raw(required=True, description="Model configuration"),
"conversation_id": fields.String(description="Conversation ID"),
"parent_message_id": fields.String(description="Parent message ID"),
"response_mode": fields.String(enum=["blocking", "streaming"], description="Response mode"),
"retriever_from": fields.String(default="dev", description="Retriever source"),
},
)
)
@api.response(200, "Chat message generated successfully")
@api.response(400, "Invalid request parameters")
@api.response(404, "App or conversation not found")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT])
def post(self, app_model):
if not isinstance(current_user, Account):
raise Forbidden()
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, location="json")
parser.add_argument("query", type=str, required=True, location="json")
@ -176,11 +123,11 @@ class ChatMessageApi(Resource):
if external_trace_id:
args["external_trace_id"] = external_trace_id
account = flask_login.current_user
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account or EndUser instance")
response = AppGenerateService.generate(
app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=streaming
app_model=app_model, user=account, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=streaming
)
return helper.compact_generate_response(response)
@ -208,19 +155,20 @@ class ChatMessageApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/chat-messages/<string:task_id>/stop")
class ChatMessageStopApi(Resource):
@api.doc("stop_chat_message")
@api.doc(description="Stop a running chat message generation")
@api.doc(params={"app_id": "Application ID", "task_id": "Task ID to stop"})
@api.response(200, "Task stopped successfully")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
def post(self, app_model, task_id):
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id)
account = flask_login.current_user
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, account.id)
return {"result": "success"}, 200
api.add_resource(CompletionMessageApi, "/apps/<uuid:app_id>/completion-messages")
api.add_resource(CompletionMessageStopApi, "/apps/<uuid:app_id>/completion-messages/<string:task_id>/stop")
api.add_resource(ChatMessageApi, "/apps/<uuid:app_id>/chat-messages")
api.add_resource(ChatMessageStopApi, "/apps/<uuid:app_id>/chat-messages/<string:task_id>/stop")

View File

@ -8,7 +8,7 @@ from sqlalchemy import func, or_
from sqlalchemy.orm import joinedload
from werkzeug.exceptions import Forbidden, NotFound
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from core.app.entities.app_invoke_entities import InvokeFrom
@ -22,35 +22,13 @@ from fields.conversation_fields import (
from libs.datetime_utils import naive_utc_now
from libs.helper import DatetimeString
from libs.login import login_required
from models import Account, Conversation, EndUser, Message, MessageAnnotation
from models import Conversation, EndUser, Message, MessageAnnotation
from models.model import AppMode
from services.conversation_service import ConversationService
from services.errors.conversation import ConversationNotExistsError
@console_ns.route("/apps/<uuid:app_id>/completion-conversations")
class CompletionConversationApi(Resource):
@api.doc("list_completion_conversations")
@api.doc(description="Get completion conversations with pagination and filtering")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("keyword", type=str, location="args", help="Search keyword")
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
.add_argument(
"annotation_status",
type=str,
location="args",
choices=["annotated", "not_annotated", "all"],
default="all",
help="Annotation status filter",
)
.add_argument("page", type=int, location="args", default=1, help="Page number")
.add_argument("limit", type=int, location="args", default=20, help="Page size (1-100)")
)
@api.response(200, "Success", conversation_pagination_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -123,14 +101,7 @@ class CompletionConversationApi(Resource):
return conversations
@console_ns.route("/apps/<uuid:app_id>/completion-conversations/<uuid:conversation_id>")
class CompletionConversationDetailApi(Resource):
@api.doc("get_completion_conversation")
@api.doc(description="Get completion conversation details with messages")
@api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@api.response(200, "Success", conversation_message_detail_fields)
@api.response(403, "Insufficient permissions")
@api.response(404, "Conversation not found")
@setup_required
@login_required
@account_initialization_required
@ -143,12 +114,6 @@ class CompletionConversationDetailApi(Resource):
return _get_conversation(app_model, conversation_id)
@api.doc("delete_completion_conversation")
@api.doc(description="Delete a completion conversation")
@api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@api.response(204, "Conversation deleted successfully")
@api.response(403, "Insufficient permissions")
@api.response(404, "Conversation not found")
@setup_required
@login_required
@account_initialization_required
@ -159,8 +124,6 @@ class CompletionConversationDetailApi(Resource):
conversation_id = str(conversation_id)
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
ConversationService.delete(app_model, conversation_id, current_user)
except ConversationNotExistsError:
raise NotFound("Conversation Not Exists.")
@ -168,38 +131,7 @@ class CompletionConversationDetailApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/apps/<uuid:app_id>/chat-conversations")
class ChatConversationApi(Resource):
@api.doc("list_chat_conversations")
@api.doc(description="Get chat conversations with pagination, filtering and summary")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("keyword", type=str, location="args", help="Search keyword")
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
.add_argument(
"annotation_status",
type=str,
location="args",
choices=["annotated", "not_annotated", "all"],
default="all",
help="Annotation status filter",
)
.add_argument("message_count_gte", type=int, location="args", help="Minimum message count")
.add_argument("page", type=int, location="args", default=1, help="Page number")
.add_argument("limit", type=int, location="args", default=20, help="Page size (1-100)")
.add_argument(
"sort_by",
type=str,
location="args",
choices=["created_at", "-created_at", "updated_at", "-updated_at"],
default="-updated_at",
help="Sort field and direction",
)
)
@api.response(200, "Success", conversation_with_summary_pagination_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -307,7 +239,7 @@ class ChatConversationApi(Resource):
.having(func.count(Message.id) >= args["message_count_gte"])
)
if app_model.mode == AppMode.ADVANCED_CHAT:
if app_model.mode == AppMode.ADVANCED_CHAT.value:
query = query.where(Conversation.invoke_from != InvokeFrom.DEBUGGER.value)
match args["sort_by"]:
@ -327,14 +259,7 @@ class ChatConversationApi(Resource):
return conversations
@console_ns.route("/apps/<uuid:app_id>/chat-conversations/<uuid:conversation_id>")
class ChatConversationDetailApi(Resource):
@api.doc("get_chat_conversation")
@api.doc(description="Get chat conversation details")
@api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@api.response(200, "Success", conversation_detail_fields)
@api.response(403, "Insufficient permissions")
@api.response(404, "Conversation not found")
@setup_required
@login_required
@account_initialization_required
@ -347,12 +272,6 @@ class ChatConversationDetailApi(Resource):
return _get_conversation(app_model, conversation_id)
@api.doc("delete_chat_conversation")
@api.doc(description="Delete a chat conversation")
@api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@api.response(204, "Conversation deleted successfully")
@api.response(403, "Insufficient permissions")
@api.response(404, "Conversation not found")
@setup_required
@login_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@ -363,8 +282,6 @@ class ChatConversationDetailApi(Resource):
conversation_id = str(conversation_id)
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
ConversationService.delete(app_model, conversation_id, current_user)
except ConversationNotExistsError:
raise NotFound("Conversation Not Exists.")
@ -372,6 +289,12 @@ class ChatConversationDetailApi(Resource):
return {"result": "success"}, 204
api.add_resource(CompletionConversationApi, "/apps/<uuid:app_id>/completion-conversations")
api.add_resource(CompletionConversationDetailApi, "/apps/<uuid:app_id>/completion-conversations/<uuid:conversation_id>")
api.add_resource(ChatConversationApi, "/apps/<uuid:app_id>/chat-conversations")
api.add_resource(ChatConversationDetailApi, "/apps/<uuid:app_id>/chat-conversations/<uuid:conversation_id>")
def _get_conversation(app_model, conversation_id):
conversation = (
db.session.query(Conversation)

View File

@ -2,7 +2,7 @@ from flask_restx import Resource, marshal_with, reqparse
from sqlalchemy import select
from sqlalchemy.orm import Session
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
@ -12,17 +12,7 @@ from models import ConversationVariable
from models.model import AppMode
@console_ns.route("/apps/<uuid:app_id>/conversation-variables")
class ConversationVariablesApi(Resource):
@api.doc("get_conversation_variables")
@api.doc(description="Get conversation variables for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser().add_argument(
"conversation_id", type=str, location="args", help="Conversation ID to filter variables"
)
)
@api.response(200, "Conversation variables retrieved successfully", paginated_conversation_variable_fields)
@setup_required
@login_required
@account_initialization_required
@ -65,3 +55,6 @@ class ConversationVariablesApi(Resource):
for row in rows
],
}
api.add_resource(ConversationVariablesApi, "/apps/<uuid:app_id>/conversation-variables")

View File

@ -1,9 +1,9 @@
from collections.abc import Sequence
from flask_login import current_user
from flask_restx import Resource, fields, reqparse
from flask_restx import Resource, reqparse
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.error import (
CompletionRequestError,
ProviderModelCurrentlyNotSupportError,
@ -22,23 +22,7 @@ from models import App
from services.workflow_service import WorkflowService
@console_ns.route("/rule-generate")
class RuleGenerateApi(Resource):
@api.doc("generate_rule_config")
@api.doc(description="Generate rule configuration using LLM")
@api.expect(
api.model(
"RuleGenerateRequest",
{
"instruction": fields.String(required=True, description="Rule generation instruction"),
"model_config": fields.Raw(required=True, description="Model configuration"),
"no_variable": fields.Boolean(required=True, default=False, description="Whether to exclude variables"),
},
)
)
@api.response(200, "Rule configuration generated successfully")
@api.response(400, "Invalid request parameters")
@api.response(402, "Provider quota exceeded")
@setup_required
@login_required
@account_initialization_required
@ -69,26 +53,7 @@ class RuleGenerateApi(Resource):
return rules
@console_ns.route("/rule-code-generate")
class RuleCodeGenerateApi(Resource):
@api.doc("generate_rule_code")
@api.doc(description="Generate code rules using LLM")
@api.expect(
api.model(
"RuleCodeGenerateRequest",
{
"instruction": fields.String(required=True, description="Code generation instruction"),
"model_config": fields.Raw(required=True, description="Model configuration"),
"no_variable": fields.Boolean(required=True, default=False, description="Whether to exclude variables"),
"code_language": fields.String(
default="javascript", description="Programming language for code generation"
),
},
)
)
@api.response(200, "Code rules generated successfully")
@api.response(400, "Invalid request parameters")
@api.response(402, "Provider quota exceeded")
@setup_required
@login_required
@account_initialization_required
@ -120,22 +85,7 @@ class RuleCodeGenerateApi(Resource):
return code_result
@console_ns.route("/rule-structured-output-generate")
class RuleStructuredOutputGenerateApi(Resource):
@api.doc("generate_structured_output")
@api.doc(description="Generate structured output rules using LLM")
@api.expect(
api.model(
"StructuredOutputGenerateRequest",
{
"instruction": fields.String(required=True, description="Structured output generation instruction"),
"model_config": fields.Raw(required=True, description="Model configuration"),
},
)
)
@api.response(200, "Structured output generated successfully")
@api.response(400, "Invalid request parameters")
@api.response(402, "Provider quota exceeded")
@setup_required
@login_required
@account_initialization_required
@ -164,27 +114,7 @@ class RuleStructuredOutputGenerateApi(Resource):
return structured_output
@console_ns.route("/instruction-generate")
class InstructionGenerateApi(Resource):
@api.doc("generate_instruction")
@api.doc(description="Generate instruction for workflow nodes or general use")
@api.expect(
api.model(
"InstructionGenerateRequest",
{
"flow_id": fields.String(required=True, description="Workflow/Flow ID"),
"node_id": fields.String(description="Node ID for workflow context"),
"current": fields.String(description="Current instruction text"),
"language": fields.String(default="javascript", description="Programming language (javascript/python)"),
"instruction": fields.String(required=True, description="Instruction for generation"),
"model_config": fields.Raw(required=True, description="Model configuration"),
"ideal_output": fields.String(description="Expected ideal output"),
},
)
)
@api.response(200, "Instruction generated successfully")
@api.response(400, "Invalid request parameters or flow/workflow not found")
@api.response(402, "Provider quota exceeded")
@setup_required
@login_required
@account_initialization_required
@ -273,25 +203,11 @@ class InstructionGenerateApi(Resource):
raise CompletionRequestError(e.description)
@console_ns.route("/instruction-generate/template")
class InstructionGenerationTemplateApi(Resource):
@api.doc("get_instruction_template")
@api.doc(description="Get instruction generation template")
@api.expect(
api.model(
"InstructionTemplateRequest",
{
"instruction": fields.String(required=True, description="Template instruction"),
"ideal_output": fields.String(description="Expected ideal output"),
},
)
)
@api.response(200, "Template retrieved successfully")
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
def post(self):
def post(self) -> dict:
parser = reqparse.RequestParser()
parser.add_argument("type", type=str, required=True, default=False, location="json")
args = parser.parse_args()
@ -306,3 +222,10 @@ class InstructionGenerationTemplateApi(Resource):
return {"data": INSTRUCTION_GENERATE_TEMPLATE_CODE}
case _:
raise ValueError(f"Invalid type: {args['type']}")
api.add_resource(RuleGenerateApi, "/rule-generate")
api.add_resource(RuleCodeGenerateApi, "/rule-code-generate")
api.add_resource(RuleStructuredOutputGenerateApi, "/rule-structured-output-generate")
api.add_resource(InstructionGenerateApi, "/instruction-generate")
api.add_resource(InstructionGenerationTemplateApi, "/instruction-generate/template")

View File

@ -2,10 +2,10 @@ import json
from enum import StrEnum
from flask_login import current_user
from flask_restx import Resource, fields, marshal_with, reqparse
from flask_restx import Resource, marshal_with, reqparse
from werkzeug.exceptions import NotFound
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
@ -19,12 +19,7 @@ class AppMCPServerStatus(StrEnum):
INACTIVE = "inactive"
@console_ns.route("/apps/<uuid:app_id>/server")
class AppMCPServerController(Resource):
@api.doc("get_app_mcp_server")
@api.doc(description="Get MCP server configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "MCP server configuration retrieved successfully", app_server_fields)
@setup_required
@login_required
@account_initialization_required
@ -34,20 +29,6 @@ class AppMCPServerController(Resource):
server = db.session.query(AppMCPServer).where(AppMCPServer.app_id == app_model.id).first()
return server
@api.doc("create_app_mcp_server")
@api.doc(description="Create MCP server configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"MCPServerCreateRequest",
{
"description": fields.String(description="Server description"),
"parameters": fields.Raw(required=True, description="Server parameters configuration"),
},
)
)
@api.response(201, "MCP server configuration created successfully", app_server_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -78,23 +59,6 @@ class AppMCPServerController(Resource):
db.session.commit()
return server
@api.doc("update_app_mcp_server")
@api.doc(description="Update MCP server configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"MCPServerUpdateRequest",
{
"id": fields.String(required=True, description="Server ID"),
"description": fields.String(description="Server description"),
"parameters": fields.Raw(required=True, description="Server parameters configuration"),
"status": fields.String(description="Server status"),
},
)
)
@api.response(200, "MCP server configuration updated successfully", app_server_fields)
@api.response(403, "Insufficient permissions")
@api.response(404, "Server not found")
@setup_required
@login_required
@account_initialization_required
@ -130,14 +94,7 @@ class AppMCPServerController(Resource):
return server
@console_ns.route("/apps/<uuid:server_id>/server/refresh")
class AppMCPServerRefreshController(Resource):
@api.doc("refresh_app_mcp_server")
@api.doc(description="Refresh MCP server configuration and regenerate server code")
@api.doc(params={"server_id": "Server ID"})
@api.response(200, "MCP server refreshed successfully", app_server_fields)
@api.response(403, "Insufficient permissions")
@api.response(404, "Server not found")
@setup_required
@login_required
@account_initialization_required
@ -156,3 +113,7 @@ class AppMCPServerRefreshController(Resource):
server.server_code = AppMCPServer.generate_server_code(16)
db.session.commit()
return server
api.add_resource(AppMCPServerController, "/apps/<uuid:app_id>/server")
api.add_resource(AppMCPServerRefreshController, "/apps/<uuid:server_id>/server/refresh")

View File

@ -1,11 +1,12 @@
import logging
from flask_login import current_user
from flask_restx import Resource, fields, marshal_with, reqparse
from flask_restx.inputs import int_range
from sqlalchemy import exists, select
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.error import (
CompletionRequestError,
ProviderModelCurrentlyNotSupportError,
@ -26,8 +27,7 @@ from extensions.ext_database import db
from fields.conversation_fields import annotation_fields, message_detail_fields
from libs.helper import uuid_value
from libs.infinite_scroll_pagination import InfiniteScrollPagination
from libs.login import current_user, login_required
from models.account import Account
from libs.login import login_required
from models.model import AppMode, Conversation, Message, MessageAnnotation, MessageFeedback
from services.annotation_service import AppAnnotationService
from services.errors.conversation import ConversationNotExistsError
@ -37,7 +37,6 @@ from services.message_service import MessageService
logger = logging.getLogger(__name__)
@console_ns.route("/apps/<uuid:app_id>/chat-messages")
class ChatMessageListApi(Resource):
message_infinite_scroll_pagination_fields = {
"limit": fields.Integer,
@ -45,17 +44,6 @@ class ChatMessageListApi(Resource):
"data": fields.List(fields.Nested(message_detail_fields)),
}
@api.doc("list_chat_messages")
@api.doc(description="Get chat messages for a conversation with pagination")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("conversation_id", type=str, required=True, location="args", help="Conversation ID")
.add_argument("first_id", type=str, location="args", help="First message ID for pagination")
.add_argument("limit", type=int, location="args", default=20, help="Number of messages to return (1-100)")
)
@api.response(200, "Success", message_infinite_scroll_pagination_fields)
@api.response(404, "Conversation not found")
@setup_required
@login_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@ -129,31 +117,12 @@ class ChatMessageListApi(Resource):
return InfiniteScrollPagination(data=history_messages, limit=args["limit"], has_more=has_more)
@console_ns.route("/apps/<uuid:app_id>/feedbacks")
class MessageFeedbackApi(Resource):
@api.doc("create_message_feedback")
@api.doc(description="Create or update message feedback (like/dislike)")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"MessageFeedbackRequest",
{
"message_id": fields.String(required=True, description="Message ID"),
"rating": fields.String(enum=["like", "dislike"], description="Feedback rating"),
},
)
)
@api.response(200, "Feedback updated successfully")
@api.response(404, "Message not found")
@api.response(403, "Insufficient permissions")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def post(self, app_model):
if current_user is None:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("message_id", required=True, type=uuid_value, location="json")
parser.add_argument("rating", type=str, choices=["like", "dislike", None], location="json")
@ -190,24 +159,7 @@ class MessageFeedbackApi(Resource):
return {"result": "success"}
@console_ns.route("/apps/<uuid:app_id>/annotations")
class MessageAnnotationApi(Resource):
@api.doc("create_message_annotation")
@api.doc(description="Create message annotation")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"MessageAnnotationRequest",
{
"message_id": fields.String(description="Message ID"),
"question": fields.String(required=True, description="Question text"),
"answer": fields.String(required=True, description="Answer text"),
"annotation_reply": fields.Raw(description="Annotation reply"),
},
)
)
@api.response(200, "Annotation created successfully", annotation_fields)
@api.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -215,9 +167,7 @@ class MessageAnnotationApi(Resource):
@get_app_model
@marshal_with(annotation_fields)
def post(self, app_model):
if not isinstance(current_user, Account):
raise Forbidden()
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -231,37 +181,18 @@ class MessageAnnotationApi(Resource):
return annotation
@console_ns.route("/apps/<uuid:app_id>/annotations/count")
class MessageAnnotationCountApi(Resource):
@api.doc("get_annotation_count")
@api.doc(description="Get count of message annotations for the app")
@api.doc(params={"app_id": "Application ID"})
@api.response(
200,
"Annotation count retrieved successfully",
api.model("AnnotationCountResponse", {"count": fields.Integer(description="Number of annotations")}),
)
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
count = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_model.id).count()
return {"count": count}
@console_ns.route("/apps/<uuid:app_id>/chat-messages/<uuid:message_id>/suggested-questions")
class MessageSuggestedQuestionApi(Resource):
@api.doc("get_message_suggested_questions")
@api.doc(description="Get suggested questions for a message")
@api.doc(params={"app_id": "Application ID", "message_id": "Message ID"})
@api.response(
200,
"Suggested questions retrieved successfully",
api.model("SuggestedQuestionsResponse", {"data": fields.List(fields.String(description="Suggested question"))}),
)
@api.response(404, "Message or conversation not found")
@setup_required
@login_required
@account_initialization_required
@ -294,13 +225,7 @@ class MessageSuggestedQuestionApi(Resource):
return {"data": questions}
@console_ns.route("/apps/<uuid:app_id>/messages/<uuid:message_id>")
class MessageApi(Resource):
@api.doc("get_message")
@api.doc(description="Get message details by ID")
@api.doc(params={"app_id": "Application ID", "message_id": "Message ID"})
@api.response(200, "Message retrieved successfully", message_detail_fields)
@api.response(404, "Message not found")
@setup_required
@login_required
@account_initialization_required
@ -315,3 +240,11 @@ class MessageApi(Resource):
raise NotFound("Message Not Exists.")
return message
api.add_resource(MessageSuggestedQuestionApi, "/apps/<uuid:app_id>/chat-messages/<uuid:message_id>/suggested-questions")
api.add_resource(ChatMessageListApi, "/apps/<uuid:app_id>/chat-messages", endpoint="console_chat_messages")
api.add_resource(MessageFeedbackApi, "/apps/<uuid:app_id>/feedbacks")
api.add_resource(MessageAnnotationApi, "/apps/<uuid:app_id>/annotations")
api.add_resource(MessageAnnotationCountApi, "/apps/<uuid:app_id>/annotations/count")
api.add_resource(MessageApi, "/apps/<uuid:app_id>/messages/<uuid:message_id>", endpoint="console_message")

View File

@ -3,10 +3,9 @@ from typing import cast
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields
from werkzeug.exceptions import Forbidden
from flask_restx import Resource
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from core.agent.entities import AgentToolEntity
@ -15,51 +14,17 @@ from core.tools.utils.configuration import ToolParameterConfigurationManager
from events.app_event import app_model_config_was_updated
from extensions.ext_database import db
from libs.login import login_required
from models.account import Account
from models.model import AppMode, AppModelConfig
from services.app_model_config_service import AppModelConfigService
@console_ns.route("/apps/<uuid:app_id>/model-config")
class ModelConfigResource(Resource):
@api.doc("update_app_model_config")
@api.doc(description="Update application model configuration")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"ModelConfigRequest",
{
"provider": fields.String(description="Model provider"),
"model": fields.String(description="Model name"),
"configs": fields.Raw(description="Model configuration parameters"),
"opening_statement": fields.String(description="Opening statement"),
"suggested_questions": fields.List(fields.String(), description="Suggested questions"),
"more_like_this": fields.Raw(description="More like this configuration"),
"speech_to_text": fields.Raw(description="Speech to text configuration"),
"text_to_speech": fields.Raw(description="Text to speech configuration"),
"retrieval_model": fields.Raw(description="Retrieval model configuration"),
"tools": fields.List(fields.Raw(), description="Available tools"),
"dataset_configs": fields.Raw(description="Dataset configurations"),
"agent_mode": fields.Raw(description="Agent mode configuration"),
},
)
)
@api.response(200, "Model configuration updated successfully")
@api.response(400, "Invalid configuration")
@api.response(404, "App not found")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.AGENT_CHAT, AppMode.CHAT, AppMode.COMPLETION])
def post(self, app_model):
"""Modify app model config"""
if not isinstance(current_user, Account):
raise Forbidden()
if not current_user.has_edit_permission:
raise Forbidden()
assert current_user.current_tenant_id is not None, "The tenant information should be loaded."
# validate config
model_configuration = AppModelConfigService.validate_configuration(
tenant_id=current_user.current_tenant_id,
@ -74,7 +39,7 @@ class ModelConfigResource(Resource):
)
new_app_model_config = new_app_model_config.from_model_config_dict(model_configuration)
if app_model.mode == AppMode.AGENT_CHAT or app_model.is_agent:
if app_model.mode == AppMode.AGENT_CHAT.value or app_model.is_agent:
# get original app model config
original_app_model_config = (
db.session.query(AppModelConfig).where(AppModelConfig.id == app_model.app_model_config_id).first()
@ -177,3 +142,6 @@ class ModelConfigResource(Resource):
app_model_config_was_updated.send(app_model, app_model_config=new_app_model_config)
return {"result": "success"}
api.add_resource(ModelConfigResource, "/apps/<uuid:app_id>/model-config")

View File

@ -1,31 +1,18 @@
from flask_restx import Resource, fields, reqparse
from flask_restx import Resource, reqparse
from werkzeug.exceptions import BadRequest
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.error import TracingConfigCheckError, TracingConfigIsExist, TracingConfigNotExist
from controllers.console.wraps import account_initialization_required, setup_required
from libs.login import login_required
from services.ops_service import OpsService
@console_ns.route("/apps/<uuid:app_id>/trace-config")
class TraceAppConfigApi(Resource):
"""
Manage trace app configurations
"""
@api.doc("get_trace_app_config")
@api.doc(description="Get tracing configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser().add_argument(
"tracing_provider", type=str, required=True, location="args", help="Tracing provider name"
)
)
@api.response(
200, "Tracing configuration retrieved successfully", fields.Raw(description="Tracing configuration data")
)
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@ -42,22 +29,6 @@ class TraceAppConfigApi(Resource):
except Exception as e:
raise BadRequest(str(e))
@api.doc("create_trace_app_config")
@api.doc(description="Create a new tracing configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"TraceConfigCreateRequest",
{
"tracing_provider": fields.String(required=True, description="Tracing provider name"),
"tracing_config": fields.Raw(required=True, description="Tracing configuration data"),
},
)
)
@api.response(
201, "Tracing configuration created successfully", fields.Raw(description="Created configuration data")
)
@api.response(400, "Invalid request parameters or configuration already exists")
@setup_required
@login_required
@account_initialization_required
@ -80,20 +51,6 @@ class TraceAppConfigApi(Resource):
except Exception as e:
raise BadRequest(str(e))
@api.doc("update_trace_app_config")
@api.doc(description="Update an existing tracing configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"TraceConfigUpdateRequest",
{
"tracing_provider": fields.String(required=True, description="Tracing provider name"),
"tracing_config": fields.Raw(required=True, description="Updated tracing configuration data"),
},
)
)
@api.response(200, "Tracing configuration updated successfully", fields.Raw(description="Success response"))
@api.response(400, "Invalid request parameters or configuration not found")
@setup_required
@login_required
@account_initialization_required
@ -114,16 +71,6 @@ class TraceAppConfigApi(Resource):
except Exception as e:
raise BadRequest(str(e))
@api.doc("delete_trace_app_config")
@api.doc(description="Delete an existing tracing configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser().add_argument(
"tracing_provider", type=str, required=True, location="args", help="Tracing provider name"
)
)
@api.response(204, "Tracing configuration deleted successfully")
@api.response(400, "Invalid request parameters or configuration not found")
@setup_required
@login_required
@account_initialization_required
@ -140,3 +87,6 @@ class TraceAppConfigApi(Resource):
return {"result": "success"}, 204
except Exception as e:
raise BadRequest(str(e))
api.add_resource(TraceAppConfigApi, "/apps/<uuid:app_id>/trace-config")

View File

@ -1,16 +1,16 @@
from flask_login import current_user
from flask_restx import Resource, fields, marshal_with, reqparse
from flask_restx import Resource, marshal_with, reqparse
from werkzeug.exceptions import Forbidden, NotFound
from constants.languages import supported_language
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
from fields.app_fields import app_site_fields
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import Account, Site
from models import Site
def parse_app_site_args():
@ -36,39 +36,7 @@ def parse_app_site_args():
return parser.parse_args()
@console_ns.route("/apps/<uuid:app_id>/site")
class AppSite(Resource):
@api.doc("update_app_site")
@api.doc(description="Update application site configuration")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"AppSiteRequest",
{
"title": fields.String(description="Site title"),
"icon_type": fields.String(description="Icon type"),
"icon": fields.String(description="Icon"),
"icon_background": fields.String(description="Icon background color"),
"description": fields.String(description="Site description"),
"default_language": fields.String(description="Default language"),
"chat_color_theme": fields.String(description="Chat color theme"),
"chat_color_theme_inverted": fields.Boolean(description="Inverted chat color theme"),
"customize_domain": fields.String(description="Custom domain"),
"copyright": fields.String(description="Copyright text"),
"privacy_policy": fields.String(description="Privacy policy"),
"custom_disclaimer": fields.String(description="Custom disclaimer"),
"customize_token_strategy": fields.String(
enum=["must", "allow", "not_allow"], description="Token strategy"
),
"prompt_public": fields.Boolean(description="Make prompt public"),
"show_workflow_steps": fields.Boolean(description="Show workflow steps"),
"use_icon_as_answer_icon": fields.Boolean(description="Use icon as answer icon"),
},
)
)
@api.response(200, "Site configuration updated successfully", app_site_fields)
@api.response(403, "Insufficient permissions")
@api.response(404, "App not found")
@setup_required
@login_required
@account_initialization_required
@ -107,8 +75,6 @@ class AppSite(Resource):
if value is not None:
setattr(site, attr_name, value)
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
site.updated_by = current_user.id
site.updated_at = naive_utc_now()
db.session.commit()
@ -116,14 +82,7 @@ class AppSite(Resource):
return site
@console_ns.route("/apps/<uuid:app_id>/site/access-token-reset")
class AppSiteAccessTokenReset(Resource):
@api.doc("reset_app_site_access_token")
@api.doc(description="Reset access token for application site")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Access token reset successfully", app_site_fields)
@api.response(403, "Insufficient permissions (admin/owner required)")
@api.response(404, "App or site not found")
@setup_required
@login_required
@account_initialization_required
@ -140,10 +99,12 @@ class AppSiteAccessTokenReset(Resource):
raise NotFound
site.code = Site.generate_code(16)
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
site.updated_by = current_user.id
site.updated_at = naive_utc_now()
db.session.commit()
return site
api.add_resource(AppSite, "/apps/<uuid:app_id>/site")
api.add_resource(AppSiteAccessTokenReset, "/apps/<uuid:app_id>/site/access-token-reset")

View File

@ -5,9 +5,9 @@ import pytz
import sqlalchemy as sa
from flask import jsonify
from flask_login import current_user
from flask_restx import Resource, fields, reqparse
from flask_restx import Resource, reqparse
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from core.app.entities.app_invoke_entities import InvokeFrom
@ -17,25 +17,11 @@ from libs.login import login_required
from models import AppMode, Message
@console_ns.route("/apps/<uuid:app_id>/statistics/daily-messages")
class DailyMessageStatistic(Resource):
@api.doc("get_daily_message_statistics")
@api.doc(description="Get daily message statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
200,
"Daily message statistics retrieved successfully",
fields.List(fields.Raw(description="Daily message count data")),
)
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
account = current_user
@ -88,25 +74,11 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/statistics/daily-conversations")
class DailyConversationStatistic(Resource):
@api.doc("get_daily_conversation_statistics")
@api.doc(description="Get daily conversation statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
200,
"Daily conversation statistics retrieved successfully",
fields.List(fields.Raw(description="Daily conversation count data")),
)
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
account = current_user
@ -154,25 +126,11 @@ class DailyConversationStatistic(Resource):
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/statistics/daily-end-users")
class DailyTerminalsStatistic(Resource):
@api.doc("get_daily_terminals_statistics")
@api.doc(description="Get daily terminal/end-user statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
200,
"Daily terminal statistics retrieved successfully",
fields.List(fields.Raw(description="Daily terminal count data")),
)
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
account = current_user
@ -225,25 +183,11 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/statistics/token-costs")
class DailyTokenCostStatistic(Resource):
@api.doc("get_daily_token_cost_statistics")
@api.doc(description="Get daily token cost statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
200,
"Daily token cost statistics retrieved successfully",
fields.List(fields.Raw(description="Daily token cost data")),
)
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
account = current_user
@ -299,21 +243,7 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/statistics/average-session-interactions")
class AverageSessionInteractionStatistic(Resource):
@api.doc("get_average_session_interaction_statistics")
@api.doc(description="Get average session interaction statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
200,
"Average session interaction statistics retrieved successfully",
fields.List(fields.Raw(description="Average session interaction data")),
)
@setup_required
@login_required
@account_initialization_required
@ -389,25 +319,11 @@ ORDER BY
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/statistics/user-satisfaction-rate")
class UserSatisfactionRateStatistic(Resource):
@api.doc("get_user_satisfaction_rate_statistics")
@api.doc(description="Get user satisfaction rate statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
200,
"User satisfaction rate statistics retrieved successfully",
fields.List(fields.Raw(description="User satisfaction rate data")),
)
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
account = current_user
@ -469,21 +385,7 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/statistics/average-response-time")
class AverageResponseTimeStatistic(Resource):
@api.doc("get_average_response_time_statistics")
@api.doc(description="Get average response time statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
200,
"Average response time statistics retrieved successfully",
fields.List(fields.Raw(description="Average response time data")),
)
@setup_required
@login_required
@account_initialization_required
@ -540,25 +442,11 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/statistics/tokens-per-second")
class TokensPerSecondStatistic(Resource):
@api.doc("get_tokens_per_second_statistics")
@api.doc(description="Get tokens per second statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
200,
"Tokens per second statistics retrieved successfully",
fields.List(fields.Raw(description="Tokens per second data")),
)
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
account = current_user
@ -612,3 +500,13 @@ WHERE
response_data.append({"date": str(i.date), "tps": round(i.tokens_per_second, 4)})
return jsonify({"data": response_data})
api.add_resource(DailyMessageStatistic, "/apps/<uuid:app_id>/statistics/daily-messages")
api.add_resource(DailyConversationStatistic, "/apps/<uuid:app_id>/statistics/daily-conversations")
api.add_resource(DailyTerminalsStatistic, "/apps/<uuid:app_id>/statistics/daily-end-users")
api.add_resource(DailyTokenCostStatistic, "/apps/<uuid:app_id>/statistics/token-costs")
api.add_resource(AverageSessionInteractionStatistic, "/apps/<uuid:app_id>/statistics/average-session-interactions")
api.add_resource(UserSatisfactionRateStatistic, "/apps/<uuid:app_id>/statistics/user-satisfaction-rate")
api.add_resource(AverageResponseTimeStatistic, "/apps/<uuid:app_id>/statistics/average-response-time")
api.add_resource(TokensPerSecondStatistic, "/apps/<uuid:app_id>/statistics/tokens-per-second")

View File

@ -4,14 +4,18 @@ from collections.abc import Sequence
from typing import cast
from flask import abort, request
from flask_restx import Resource, fields, inputs, marshal_with, reqparse
from flask_restx import Resource, inputs, marshal_with, reqparse
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from configs import dify_config
from controllers.console import api, console_ns
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
from controllers.console import api
from controllers.console.app.error import (
ConversationCompletedError,
DraftWorkflowNotExist,
DraftWorkflowNotSync,
)
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
@ -58,13 +62,7 @@ def _parse_file(workflow: Workflow, files: list[dict] | None = None) -> Sequence
return file_objs
@console_ns.route("/apps/<uuid:app_id>/workflows/draft")
class DraftWorkflowApi(Resource):
@api.doc("get_draft_workflow")
@api.doc(description="Get draft workflow for an application")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Draft workflow retrieved successfully", workflow_fields)
@api.response(404, "Draft workflow not found")
@setup_required
@login_required
@account_initialization_required
@ -76,7 +74,7 @@ class DraftWorkflowApi(Resource):
"""
# The role of the current user in the ta table must be admin, owner, or editor
assert isinstance(current_user, Account)
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
# fetch draft workflow by app_model
@ -93,30 +91,13 @@ class DraftWorkflowApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@api.doc("sync_draft_workflow")
@api.doc(description="Sync draft workflow configuration")
@api.expect(
api.model(
"SyncDraftWorkflowRequest",
{
"graph": fields.Raw(required=True, description="Workflow graph configuration"),
"features": fields.Raw(required=True, description="Workflow features configuration"),
"hash": fields.String(description="Workflow hash for validation"),
"environment_variables": fields.List(fields.Raw, required=True, description="Environment variables"),
"conversation_variables": fields.List(fields.Raw, description="Conversation variables"),
},
)
)
@api.response(200, "Draft workflow synced successfully", workflow_fields)
@api.response(400, "Invalid workflow configuration")
@api.response(403, "Permission denied")
def post(self, app_model: App):
"""
Sync draft workflow
"""
# The role of the current user in the ta table must be admin, owner, or editor
assert isinstance(current_user, Account)
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
content_type = request.headers.get("Content-Type", "")
@ -183,25 +164,7 @@ class DraftWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/run")
class AdvancedChatDraftWorkflowRunApi(Resource):
@api.doc("run_advanced_chat_draft_workflow")
@api.doc(description="Run draft workflow for advanced chat application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"AdvancedChatWorkflowRunRequest",
{
"query": fields.String(required=True, description="User query"),
"inputs": fields.Raw(description="Input variables"),
"files": fields.List(fields.Raw, description="File uploads"),
"conversation_id": fields.String(description="Conversation ID"),
},
)
)
@api.response(200, "Workflow run started successfully")
@api.response(400, "Invalid request parameters")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -212,7 +175,7 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
"""
# The role of the current user in the ta table must be admin, owner, or editor
assert isinstance(current_user, Account)
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
if not isinstance(current_user, Account):
@ -250,23 +213,7 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/iteration/nodes/<string:node_id>/run")
class AdvancedChatDraftRunIterationNodeApi(Resource):
@api.doc("run_advanced_chat_draft_iteration_node")
@api.doc(description="Run draft workflow iteration node for advanced chat")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"IterationNodeRunRequest",
{
"task_id": fields.String(required=True, description="Task ID"),
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Iteration node run started successfully")
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@ -278,7 +225,7 @@ class AdvancedChatDraftRunIterationNodeApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -302,23 +249,7 @@ class AdvancedChatDraftRunIterationNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run")
class WorkflowDraftRunIterationNodeApi(Resource):
@api.doc("run_workflow_draft_iteration_node")
@api.doc(description="Run draft workflow iteration node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"WorkflowIterationNodeRunRequest",
{
"task_id": fields.String(required=True, description="Task ID"),
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Workflow iteration node run started successfully")
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@ -330,7 +261,7 @@ class WorkflowDraftRunIterationNodeApi(Resource):
# The role of the current user in the ta table must be admin, owner, or editor
if not isinstance(current_user, Account):
raise Forbidden()
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -354,23 +285,7 @@ class WorkflowDraftRunIterationNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/loop/nodes/<string:node_id>/run")
class AdvancedChatDraftRunLoopNodeApi(Resource):
@api.doc("run_advanced_chat_draft_loop_node")
@api.doc(description="Run draft workflow loop node for advanced chat")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"LoopNodeRunRequest",
{
"task_id": fields.String(required=True, description="Task ID"),
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Loop node run started successfully")
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@ -383,7 +298,7 @@ class AdvancedChatDraftRunLoopNodeApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -407,23 +322,7 @@ class AdvancedChatDraftRunLoopNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/loop/nodes/<string:node_id>/run")
class WorkflowDraftRunLoopNodeApi(Resource):
@api.doc("run_workflow_draft_loop_node")
@api.doc(description="Run draft workflow loop node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"WorkflowLoopNodeRunRequest",
{
"task_id": fields.String(required=True, description="Task ID"),
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Workflow loop node run started successfully")
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@ -436,7 +335,7 @@ class WorkflowDraftRunLoopNodeApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -460,22 +359,7 @@ class WorkflowDraftRunLoopNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/run")
class DraftWorkflowRunApi(Resource):
@api.doc("run_draft_workflow")
@api.doc(description="Run draft workflow")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"DraftWorkflowRunRequest",
{
"inputs": fields.Raw(required=True, description="Input variables"),
"files": fields.List(fields.Raw, description="File uploads"),
},
)
)
@api.response(200, "Draft workflow run started successfully")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -488,7 +372,7 @@ class DraftWorkflowRunApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -514,14 +398,7 @@ class DraftWorkflowRunApi(Resource):
raise InvokeRateLimitHttpError(ex.description)
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop")
class WorkflowTaskStopApi(Resource):
@api.doc("stop_workflow_task")
@api.doc(description="Stop running workflow task")
@api.doc(params={"app_id": "Application ID", "task_id": "Task ID"})
@api.response(200, "Task stopped successfully")
@api.response(404, "Task not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -534,7 +411,7 @@ class WorkflowTaskStopApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
# Stop using both mechanisms for backward compatibility
@ -547,22 +424,7 @@ class WorkflowTaskStopApi(Resource):
return {"result": "success"}
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/run")
class DraftWorkflowNodeRunApi(Resource):
@api.doc("run_draft_workflow_node")
@api.doc(description="Run draft workflow node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"DraftWorkflowNodeRunRequest",
{
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Node run started successfully", workflow_run_node_execution_fields)
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@ -576,7 +438,7 @@ class DraftWorkflowNodeRunApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -610,13 +472,7 @@ class DraftWorkflowNodeRunApi(Resource):
return workflow_node_execution
@console_ns.route("/apps/<uuid:app_id>/workflows/publish")
class PublishedWorkflowApi(Resource):
@api.doc("get_published_workflow")
@api.doc(description="Get published workflow for an application")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Published workflow retrieved successfully", workflow_fields)
@api.response(404, "Published workflow not found")
@setup_required
@login_required
@account_initialization_required
@ -630,7 +486,7 @@ class PublishedWorkflowApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
# fetch published workflow by app_model
@ -651,7 +507,7 @@ class PublishedWorkflowApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -688,12 +544,7 @@ class PublishedWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows/default-workflow-block-configs")
class DefaultBlockConfigsApi(Resource):
@api.doc("get_default_block_configs")
@api.doc(description="Get default block configurations for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Default block configurations retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -706,7 +557,7 @@ class DefaultBlockConfigsApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
# Get default block configs
@ -714,13 +565,7 @@ class DefaultBlockConfigsApi(Resource):
return workflow_service.get_default_block_configs()
@console_ns.route("/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>")
class DefaultBlockConfigApi(Resource):
@api.doc("get_default_block_config")
@api.doc(description="Get default block configuration by type")
@api.doc(params={"app_id": "Application ID", "block_type": "Block type"})
@api.response(200, "Default block configuration retrieved successfully")
@api.response(404, "Block type not found")
@setup_required
@login_required
@account_initialization_required
@ -732,7 +577,7 @@ class DefaultBlockConfigApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -753,14 +598,7 @@ class DefaultBlockConfigApi(Resource):
return workflow_service.get_default_block_config(node_type=block_type, filters=filters)
@console_ns.route("/apps/<uuid:app_id>/convert-to-workflow")
class ConvertToWorkflowApi(Resource):
@api.doc("convert_to_workflow")
@api.doc(description="Convert application to workflow mode")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Application converted to workflow successfully")
@api.response(400, "Application cannot be converted")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -774,7 +612,7 @@ class ConvertToWorkflowApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
if request.data:
@ -797,14 +635,9 @@ class ConvertToWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/config")
class WorkflowConfigApi(Resource):
"""Resource for workflow configuration."""
@api.doc("get_workflow_config")
@api.doc(description="Get workflow configuration")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Workflow configuration retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -815,12 +648,7 @@ class WorkflowConfigApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows")
class PublishedAllWorkflowApi(Resource):
@api.doc("get_all_published_workflows")
@api.doc(description="Get all published workflows for an application")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Published workflows retrieved successfully", workflow_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@ -833,7 +661,7 @@ class PublishedAllWorkflowApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -871,23 +699,7 @@ class PublishedAllWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows/<string:workflow_id>")
class WorkflowByIdApi(Resource):
@api.doc("update_workflow_by_id")
@api.doc(description="Update workflow by ID")
@api.doc(params={"app_id": "Application ID", "workflow_id": "Workflow ID"})
@api.expect(
api.model(
"UpdateWorkflowRequest",
{
"environment_variables": fields.List(fields.Raw, description="Environment variables"),
"conversation_variables": fields.List(fields.Raw, description="Conversation variables"),
},
)
)
@api.response(200, "Workflow updated successfully", workflow_fields)
@api.response(404, "Workflow not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -900,7 +712,7 @@ class WorkflowByIdApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
# Check permission
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -913,6 +725,7 @@ class WorkflowByIdApi(Resource):
raise ValueError("Marked name cannot exceed 20 characters")
if args.marked_comment and len(args.marked_comment) > 100:
raise ValueError("Marked comment cannot exceed 100 characters")
args = parser.parse_args()
# Prepare update data
update_data = {}
@ -955,7 +768,7 @@ class WorkflowByIdApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
# Check permission
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
workflow_service = WorkflowService()
@ -978,14 +791,7 @@ class WorkflowByIdApi(Resource):
return None, 204
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/last-run")
class DraftWorkflowNodeLastRunApi(Resource):
@api.doc("get_draft_workflow_node_last_run")
@api.doc(description="Get last run result for draft workflow node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.response(200, "Node last run retrieved successfully", workflow_run_node_execution_fields)
@api.response(404, "Node last run not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -1004,3 +810,73 @@ class DraftWorkflowNodeLastRunApi(Resource):
if node_exec is None:
raise NotFound("last run not found")
return node_exec
api.add_resource(
DraftWorkflowApi,
"/apps/<uuid:app_id>/workflows/draft",
)
api.add_resource(
WorkflowConfigApi,
"/apps/<uuid:app_id>/workflows/draft/config",
)
api.add_resource(
AdvancedChatDraftWorkflowRunApi,
"/apps/<uuid:app_id>/advanced-chat/workflows/draft/run",
)
api.add_resource(
DraftWorkflowRunApi,
"/apps/<uuid:app_id>/workflows/draft/run",
)
api.add_resource(
WorkflowTaskStopApi,
"/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop",
)
api.add_resource(
DraftWorkflowNodeRunApi,
"/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/run",
)
api.add_resource(
AdvancedChatDraftRunIterationNodeApi,
"/apps/<uuid:app_id>/advanced-chat/workflows/draft/iteration/nodes/<string:node_id>/run",
)
api.add_resource(
WorkflowDraftRunIterationNodeApi,
"/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run",
)
api.add_resource(
AdvancedChatDraftRunLoopNodeApi,
"/apps/<uuid:app_id>/advanced-chat/workflows/draft/loop/nodes/<string:node_id>/run",
)
api.add_resource(
WorkflowDraftRunLoopNodeApi,
"/apps/<uuid:app_id>/workflows/draft/loop/nodes/<string:node_id>/run",
)
api.add_resource(
PublishedWorkflowApi,
"/apps/<uuid:app_id>/workflows/publish",
)
api.add_resource(
PublishedAllWorkflowApi,
"/apps/<uuid:app_id>/workflows",
)
api.add_resource(
DefaultBlockConfigsApi,
"/apps/<uuid:app_id>/workflows/default-workflow-block-configs",
)
api.add_resource(
DefaultBlockConfigApi,
"/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>",
)
api.add_resource(
ConvertToWorkflowApi,
"/apps/<uuid:app_id>/convert-to-workflow",
)
api.add_resource(
WorkflowByIdApi,
"/apps/<uuid:app_id>/workflows/<string:workflow_id>",
)
api.add_resource(
DraftWorkflowNodeLastRunApi,
"/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/last-run",
)

View File

@ -3,7 +3,7 @@ from flask_restx import Resource, marshal_with, reqparse
from flask_restx.inputs import int_range
from sqlalchemy.orm import Session
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from core.workflow.enums import WorkflowExecutionStatus
@ -15,24 +15,7 @@ from models.model import AppMode
from services.workflow_app_service import WorkflowAppService
@console_ns.route("/apps/<uuid:app_id>/workflow-app-logs")
class WorkflowAppLogApi(Resource):
@api.doc("get_workflow_app_logs")
@api.doc(description="Get workflow application execution logs")
@api.doc(params={"app_id": "Application ID"})
@api.doc(
params={
"keyword": "Search keyword for filtering logs",
"status": "Filter by execution status (succeeded, failed, stopped, partial-succeeded)",
"created_at__before": "Filter logs created before this timestamp",
"created_at__after": "Filter logs created after this timestamp",
"created_by_end_user_session_id": "Filter by end user session ID",
"created_by_account": "Filter by account",
"page": "Page number (1-99999)",
"limit": "Number of items per page (1-100)",
}
)
@api.response(200, "Workflow app logs retrieved successfully", workflow_app_log_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@ -95,3 +78,6 @@ class WorkflowAppLogApi(Resource):
)
return workflow_app_log_pagination
api.add_resource(WorkflowAppLogApi, "/apps/<uuid:app_id>/workflow-app-logs")

View File

@ -1,18 +1,19 @@
import logging
from typing import NoReturn
from typing import Any, NoReturn
from flask import Response
from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.error import (
DraftWorkflowNotExist,
)
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.web.error import InvalidArgumentError, NotFoundError
from core.file import helpers as file_helpers
from core.variables.segment_group import SegmentGroup
from core.variables.segments import ArrayFileSegment, FileSegment, Segment
from core.variables.types import SegmentType
@ -30,7 +31,7 @@ from services.workflow_service import WorkflowService
logger = logging.getLogger(__name__)
def _convert_values_to_json_serializable_object(value: Segment):
def _convert_values_to_json_serializable_object(value: Segment) -> Any:
if isinstance(value, FileSegment):
return value.value.model_dump()
elif isinstance(value, ArrayFileSegment):
@ -41,7 +42,8 @@ def _convert_values_to_json_serializable_object(value: Segment):
return value.value
def _serialize_var_value(variable: WorkflowDraftVariable):
def _serialize_var_value(variable: WorkflowDraftVariable) -> Any:
"""Serialize variable value. If variable is truncated, return the truncated value."""
value = variable.get_value()
# create a copy of the value to avoid affecting the model cache.
value = value.model_copy(deep=True)
@ -75,6 +77,22 @@ def _serialize_variable_type(workflow_draft_var: WorkflowDraftVariable) -> str:
return value_type.exposed_type().value
def _serialize_full_content(variable: WorkflowDraftVariable) -> dict | None:
"""Serialize full_content information for large variables."""
if not variable.is_truncated():
return None
variable_file = variable.variable_file
assert variable_file is not None
return {
"size_bytes": variable_file.size,
"value_type": variable_file.value_type.exposed_type().value,
"length": variable_file.length,
"download_url": file_helpers.get_signed_file_url(variable_file.upload_file_id, as_attachment=True),
}
_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS = {
"id": fields.String,
"type": fields.String(attribute=lambda model: model.get_variable_type()),
@ -84,11 +102,13 @@ _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS = {
"value_type": fields.String(attribute=_serialize_variable_type),
"edited": fields.Boolean(attribute=lambda model: model.edited),
"visible": fields.Boolean,
"is_truncated": fields.Boolean(attribute=lambda model: model.file_id is not None),
}
_WORKFLOW_DRAFT_VARIABLE_FIELDS = dict(
_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS,
value=fields.Raw(attribute=_serialize_var_value),
full_content=fields.Raw(attribute=_serialize_full_content),
)
_WORKFLOW_DRAFT_ENV_VARIABLE_FIELDS = {
@ -138,20 +158,14 @@ def _api_prerequisite(f):
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
def wrapper(*args, **kwargs):
assert isinstance(current_user, Account)
if not current_user.has_edit_permission:
if not current_user.is_editor:
raise Forbidden()
return f(*args, **kwargs)
return wrapper
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables")
class WorkflowVariableCollectionApi(Resource):
@api.doc("get_workflow_variables")
@api.doc(description="Get draft workflow variables")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"page": "Page number (1-100000)", "limit": "Number of items per page (1-100)"})
@api.response(200, "Workflow variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
def get(self, app_model: App):
@ -180,9 +194,6 @@ class WorkflowVariableCollectionApi(Resource):
return workflow_vars
@api.doc("delete_workflow_variables")
@api.doc(description="Delete all draft workflow variables")
@api.response(204, "Workflow variables deleted successfully")
@_api_prerequisite
def delete(self, app_model: App):
draft_var_srv = WorkflowDraftVariableService(
@ -211,12 +222,7 @@ def validate_node_id(node_id: str) -> NoReturn | None:
return None
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/variables")
class NodeVariableCollectionApi(Resource):
@api.doc("get_node_variables")
@api.doc(description="Get variables for a specific node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.response(200, "Node variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, app_model: App, node_id: str):
@ -229,9 +235,6 @@ class NodeVariableCollectionApi(Resource):
return node_vars
@api.doc("delete_node_variables")
@api.doc(description="Delete all variables for a specific node")
@api.response(204, "Node variables deleted successfully")
@_api_prerequisite
def delete(self, app_model: App, node_id: str):
validate_node_id(node_id)
@ -241,16 +244,10 @@ class NodeVariableCollectionApi(Resource):
return Response("", 204)
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>")
class VariableApi(Resource):
_PATCH_NAME_FIELD = "name"
_PATCH_VALUE_FIELD = "value"
@api.doc("get_variable")
@api.doc(description="Get a specific workflow variable")
@api.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"})
@api.response(200, "Variable retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
@api.response(404, "Variable not found")
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
def get(self, app_model: App, variable_id: str):
@ -264,19 +261,6 @@ class VariableApi(Resource):
raise NotFoundError(description=f"variable not found, id={variable_id}")
return variable
@api.doc("update_variable")
@api.doc(description="Update a workflow variable")
@api.expect(
api.model(
"UpdateVariableRequest",
{
"name": fields.String(description="Variable name"),
"value": fields.Raw(description="Variable value"),
},
)
)
@api.response(200, "Variable updated successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
@api.response(404, "Variable not found")
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
def patch(self, app_model: App, variable_id: str):
@ -339,10 +323,6 @@ class VariableApi(Resource):
db.session.commit()
return variable
@api.doc("delete_variable")
@api.doc(description="Delete a workflow variable")
@api.response(204, "Variable deleted successfully")
@api.response(404, "Variable not found")
@_api_prerequisite
def delete(self, app_model: App, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
@ -358,14 +338,7 @@ class VariableApi(Resource):
return Response("", 204)
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>/reset")
class VariableResetApi(Resource):
@api.doc("reset_variable")
@api.doc(description="Reset a workflow variable to its default value")
@api.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"})
@api.response(200, "Variable reset successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
@api.response(204, "Variable reset (no content)")
@api.response(404, "Variable not found")
@_api_prerequisite
def put(self, app_model: App, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
@ -406,13 +379,7 @@ def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList:
return draft_vars
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/conversation-variables")
class ConversationVariableCollectionApi(Resource):
@api.doc("get_conversation_variables")
@api.doc(description="Get conversation variables for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Conversation variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@api.response(404, "Draft workflow not found")
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, app_model: App):
@ -428,25 +395,14 @@ class ConversationVariableCollectionApi(Resource):
return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID)
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/system-variables")
class SystemVariableCollectionApi(Resource):
@api.doc("get_system_variables")
@api.doc(description="Get system variables for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "System variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, app_model: App):
return _get_variable_list(app_model, SYSTEM_VARIABLE_NODE_ID)
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/environment-variables")
class EnvironmentVariableCollectionApi(Resource):
@api.doc("get_environment_variables")
@api.doc(description="Get environment variables for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Environment variables retrieved successfully")
@api.response(404, "Draft workflow not found")
@_api_prerequisite
def get(self, app_model: App):
"""
@ -478,3 +434,16 @@ class EnvironmentVariableCollectionApi(Resource):
)
return {"items": env_vars_list}
api.add_resource(
WorkflowVariableCollectionApi,
"/apps/<uuid:app_id>/workflows/draft/variables",
)
api.add_resource(NodeVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/variables")
api.add_resource(VariableApi, "/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>")
api.add_resource(VariableResetApi, "/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>/reset")
api.add_resource(ConversationVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/conversation-variables")
api.add_resource(SystemVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/system-variables")
api.add_resource(EnvironmentVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/environment-variables")

View File

@ -4,7 +4,7 @@ from flask_login import current_user
from flask_restx import Resource, marshal_with, reqparse
from flask_restx.inputs import int_range
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from fields.workflow_run_fields import (
@ -19,13 +19,7 @@ from models import Account, App, AppMode, EndUser
from services.workflow_run_service import WorkflowRunService
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs")
class AdvancedChatAppWorkflowRunListApi(Resource):
@api.doc("get_advanced_chat_workflow_runs")
@api.doc(description="Get advanced chat workflow run list")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
@api.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@ -46,13 +40,7 @@ class AdvancedChatAppWorkflowRunListApi(Resource):
return result
@console_ns.route("/apps/<uuid:app_id>/workflow-runs")
class WorkflowRunListApi(Resource):
@api.doc("get_workflow_runs")
@api.doc(description="Get workflow run list")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
@api.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@ -73,13 +61,7 @@ class WorkflowRunListApi(Resource):
return result
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>")
class WorkflowRunDetailApi(Resource):
@api.doc("get_workflow_run_detail")
@api.doc(description="Get workflow run detail")
@api.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
@api.response(200, "Workflow run detail retrieved successfully", workflow_run_detail_fields)
@api.response(404, "Workflow run not found")
@setup_required
@login_required
@account_initialization_required
@ -97,13 +79,7 @@ class WorkflowRunDetailApi(Resource):
return workflow_run
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/node-executions")
class WorkflowRunNodeExecutionListApi(Resource):
@api.doc("get_workflow_run_node_executions")
@api.doc(description="Get workflow run node execution list")
@api.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
@api.response(200, "Node executions retrieved successfully", workflow_run_node_execution_list_fields)
@api.response(404, "Workflow run not found")
@setup_required
@login_required
@account_initialization_required
@ -124,3 +100,9 @@ class WorkflowRunNodeExecutionListApi(Resource):
)
return {"data": node_executions}
api.add_resource(AdvancedChatAppWorkflowRunListApi, "/apps/<uuid:app_id>/advanced-chat/workflow-runs")
api.add_resource(WorkflowRunListApi, "/apps/<uuid:app_id>/workflow-runs")
api.add_resource(WorkflowRunDetailApi, "/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>")
api.add_resource(WorkflowRunNodeExecutionListApi, "/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/node-executions")

View File

@ -7,7 +7,7 @@ from flask import jsonify
from flask_login import current_user
from flask_restx import Resource, reqparse
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
@ -17,17 +17,11 @@ from models.enums import WorkflowRunTriggeredFrom
from models.model import AppMode
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/daily-conversations")
class WorkflowDailyRunsStatistic(Resource):
@api.doc("get_workflow_daily_runs_statistic")
@api.doc(description="Get workflow daily runs statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Daily runs statistics retrieved successfully")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
account = current_user
@ -85,17 +79,11 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/daily-terminals")
class WorkflowDailyTerminalsStatistic(Resource):
@api.doc("get_workflow_daily_terminals_statistic")
@api.doc(description="Get workflow daily terminals statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Daily terminals statistics retrieved successfully")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
account = current_user
@ -153,17 +141,11 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/token-costs")
class WorkflowDailyTokenCostStatistic(Resource):
@api.doc("get_workflow_daily_token_cost_statistic")
@api.doc(description="Get workflow daily token cost statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Daily token cost statistics retrieved successfully")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@get_app_model
def get(self, app_model):
account = current_user
@ -226,13 +208,7 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/average-app-interactions")
class WorkflowAverageAppInteractionStatistic(Resource):
@api.doc("get_workflow_average_app_interaction_statistic")
@api.doc(description="Get workflow average app interaction statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Average app interaction statistics retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -309,3 +285,11 @@ GROUP BY
)
return jsonify({"data": response_data})
api.add_resource(WorkflowDailyRunsStatistic, "/apps/<uuid:app_id>/workflow/statistics/daily-conversations")
api.add_resource(WorkflowDailyTerminalsStatistic, "/apps/<uuid:app_id>/workflow/statistics/daily-terminals")
api.add_resource(WorkflowDailyTokenCostStatistic, "/apps/<uuid:app_id>/workflow/statistics/token-costs")
api.add_resource(
WorkflowAverageAppInteractionStatistic, "/apps/<uuid:app_id>/workflow/statistics/average-app-interactions"
)

View File

@ -1,6 +1,6 @@
from collections.abc import Callable
from functools import wraps
from typing import ParamSpec, TypeVar, Union
from typing import Optional, Union
from controllers.console.app.error import AppNotFoundError
from extensions.ext_database import db
@ -8,11 +8,8 @@ from libs.login import current_user
from models import App, AppMode
from models.account import Account
P = ParamSpec("P")
R = TypeVar("R")
def _load_app_model(app_id: str) -> App | None:
def _load_app_model(app_id: str) -> Optional[App]:
assert isinstance(current_user, Account)
app_model = (
db.session.query(App)
@ -22,10 +19,10 @@ def _load_app_model(app_id: str) -> App | None:
return app_model
def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None):
def decorator(view_func: Callable[P, R]):
def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode], None] = None):
def decorator(view_func):
@wraps(view_func)
def decorated_view(*args: P.args, **kwargs: P.kwargs):
def decorated_view(*args, **kwargs):
if not kwargs.get("app_id"):
raise ValueError("missing app_id in path parameters")

View File

@ -1,8 +1,8 @@
from flask import request
from flask_restx import Resource, fields, reqparse
from flask_restx import Resource, reqparse
from constants.languages import supported_language
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.error import AlreadyActivateError
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
@ -10,36 +10,14 @@ from libs.helper import StrLen, email, extract_remote_ip, timezone
from models.account import AccountStatus
from services.account_service import AccountService, RegisterService
active_check_parser = reqparse.RequestParser()
active_check_parser.add_argument(
"workspace_id", type=str, required=False, nullable=True, location="args", help="Workspace ID"
)
active_check_parser.add_argument(
"email", type=email, required=False, nullable=True, location="args", help="Email address"
)
active_check_parser.add_argument(
"token", type=str, required=True, nullable=False, location="args", help="Activation token"
)
@console_ns.route("/activate/check")
class ActivateCheckApi(Resource):
@api.doc("check_activation_token")
@api.doc(description="Check if activation token is valid")
@api.expect(active_check_parser)
@api.response(
200,
"Success",
api.model(
"ActivationCheckResponse",
{
"is_valid": fields.Boolean(description="Whether token is valid"),
"data": fields.Raw(description="Activation data if valid"),
},
),
)
def get(self):
args = active_check_parser.parse_args()
parser = reqparse.RequestParser()
parser.add_argument("workspace_id", type=str, required=False, nullable=True, location="args")
parser.add_argument("email", type=email, required=False, nullable=True, location="args")
parser.add_argument("token", type=str, required=True, nullable=False, location="args")
args = parser.parse_args()
workspaceId = args["workspace_id"]
reg_email = args["email"]
@ -60,36 +38,18 @@ class ActivateCheckApi(Resource):
return {"is_valid": False}
active_parser = reqparse.RequestParser()
active_parser.add_argument("workspace_id", type=str, required=False, nullable=True, location="json")
active_parser.add_argument("email", type=email, required=False, nullable=True, location="json")
active_parser.add_argument("token", type=str, required=True, nullable=False, location="json")
active_parser.add_argument("name", type=StrLen(30), required=True, nullable=False, location="json")
active_parser.add_argument(
"interface_language", type=supported_language, required=True, nullable=False, location="json"
)
active_parser.add_argument("timezone", type=timezone, required=True, nullable=False, location="json")
@console_ns.route("/activate")
class ActivateApi(Resource):
@api.doc("activate_account")
@api.doc(description="Activate account with invitation token")
@api.expect(active_parser)
@api.response(
200,
"Account activated successfully",
api.model(
"ActivationResponse",
{
"result": fields.String(description="Operation result"),
"data": fields.Raw(description="Login token data"),
},
),
)
@api.response(400, "Already activated or invalid token")
def post(self):
args = active_parser.parse_args()
parser = reqparse.RequestParser()
parser.add_argument("workspace_id", type=str, required=False, nullable=True, location="json")
parser.add_argument("email", type=email, required=False, nullable=True, location="json")
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
parser.add_argument("name", type=StrLen(30), required=True, nullable=False, location="json")
parser.add_argument(
"interface_language", type=supported_language, required=True, nullable=False, location="json"
)
parser.add_argument("timezone", type=timezone, required=True, nullable=False, location="json")
args = parser.parse_args()
invitation = RegisterService.get_invitation_if_token_valid(args["workspace_id"], args["email"], args["token"])
if invitation is None:
@ -110,3 +70,7 @@ class ActivateApi(Resource):
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
return {"result": "success", "data": token_pair.model_dump()}
api.add_resource(ActivateCheckApi, "/activate/check")
api.add_resource(ActivateApi, "/activate")

View File

@ -3,11 +3,11 @@ import logging
import requests
from flask import current_app, redirect, request
from flask_login import current_user
from flask_restx import Resource, fields
from flask_restx import Resource
from werkzeug.exceptions import Forbidden
from configs import dify_config
from controllers.console import api, console_ns
from controllers.console import api
from libs.login import login_required
from libs.oauth_data_source import NotionOAuth
@ -28,21 +28,7 @@ def get_oauth_providers():
return OAUTH_PROVIDERS
@console_ns.route("/oauth/data-source/<string:provider>")
class OAuthDataSource(Resource):
@api.doc("oauth_data_source")
@api.doc(description="Get OAuth authorization URL for data source provider")
@api.doc(params={"provider": "Data source provider name (notion)"})
@api.response(
200,
"Authorization URL or internal setup success",
api.model(
"OAuthDataSourceResponse",
{"data": fields.Raw(description="Authorization URL or 'internal' for internal setup")},
),
)
@api.response(400, "Invalid provider")
@api.response(403, "Admin privileges required")
def get(self, provider: str):
# The role of the current user in the table must be admin or owner
if not current_user.is_admin_or_owner:
@ -63,19 +49,7 @@ class OAuthDataSource(Resource):
return {"data": auth_url}, 200
@console_ns.route("/oauth/data-source/callback/<string:provider>")
class OAuthDataSourceCallback(Resource):
@api.doc("oauth_data_source_callback")
@api.doc(description="Handle OAuth callback from data source provider")
@api.doc(
params={
"provider": "Data source provider name (notion)",
"code": "Authorization code from OAuth provider",
"error": "Error message from OAuth provider",
}
)
@api.response(302, "Redirect to console with result")
@api.response(400, "Invalid provider")
def get(self, provider: str):
OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers()
with current_app.app_context():
@ -94,19 +68,7 @@ class OAuthDataSourceCallback(Resource):
return redirect(f"{dify_config.CONSOLE_WEB_URL}?type=notion&error=Access denied")
@console_ns.route("/oauth/data-source/binding/<string:provider>")
class OAuthDataSourceBinding(Resource):
@api.doc("oauth_data_source_binding")
@api.doc(description="Bind OAuth data source with authorization code")
@api.doc(
params={"provider": "Data source provider name (notion)", "code": "Authorization code from OAuth provider"}
)
@api.response(
200,
"Data source binding success",
api.model("OAuthDataSourceBindingResponse", {"result": fields.String(description="Operation result")}),
)
@api.response(400, "Invalid provider or code")
def get(self, provider: str):
OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers()
with current_app.app_context():
@ -128,17 +90,7 @@ class OAuthDataSourceBinding(Resource):
return {"result": "success"}, 200
@console_ns.route("/oauth/data-source/<string:provider>/<uuid:binding_id>/sync")
class OAuthDataSourceSync(Resource):
@api.doc("oauth_data_source_sync")
@api.doc(description="Sync data from OAuth data source")
@api.doc(params={"provider": "Data source provider name (notion)", "binding_id": "Data source binding ID"})
@api.response(
200,
"Data source sync success",
api.model("OAuthDataSourceSyncResponse", {"result": fields.String(description="Operation result")}),
)
@api.response(400, "Invalid provider or sync failed")
@setup_required
@login_required
@account_initialization_required
@ -159,3 +111,9 @@ class OAuthDataSourceSync(Resource):
return {"error": "OAuth data source process failed"}, 400
return {"result": "success"}, 200
api.add_resource(OAuthDataSource, "/oauth/data-source/<string:provider>")
api.add_resource(OAuthDataSourceCallback, "/oauth/data-source/callback/<string:provider>")
api.add_resource(OAuthDataSourceBinding, "/oauth/data-source/binding/<string:provider>")
api.add_resource(OAuthDataSourceSync, "/oauth/data-source/<string:provider>/<uuid:binding_id>/sync")

Some files were not shown because too many files have changed in this diff Show More