Compare commits

..

2 Commits

Author SHA1 Message Date
6e6604d28c delete remove tsne position 2024-07-02 14:55:05 +08:00
e7b792f537 delete remove tsne position 2024-07-02 14:52:41 +08:00
1707 changed files with 15468 additions and 68445 deletions

View File

@ -1,10 +1,9 @@
#!/bin/bash
cd web && npm install
pipx install poetry
echo 'alias start-api="cd /workspaces/dify/api && poetry run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
echo 'alias start-worker="cd /workspaces/dify/api && poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
echo 'alias start-api="cd /workspaces/dify/api && flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
echo 'alias start-worker="cd /workspaces/dify/api && celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
echo 'alias start-web="cd /workspaces/dify/web && npm run dev"' >> ~/.bashrc
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify up -d"' >> ~/.bashrc

View File

@ -1,24 +0,0 @@
title: "General Discussion"
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: Content
placeholder: Please describe the content you would like to discuss.
validations:
required: true
- type: markdown
attributes:
value: Please limit one request per issue.

View File

@ -1,30 +0,0 @@
title: "Help"
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: 1. Is this request related to a challenge you're experiencing? Tell me about your story.
placeholder: Please describe the specific scenario or problem you're facing as clearly as possible. For instance "I was trying to use [feature] for [specific task], and [what happened]... It was frustrating because...."
validations:
required: true
- type: textarea
attributes:
label: 2. Additional context or comments
placeholder: (Any other information, comments, documentations, links, or screenshots that would provide more clarity. This is the place to add anything else not covered above.)
validations:
required: false
- type: markdown
attributes:
value: Please limit one request per issue.

View File

@ -1,37 +0,0 @@
title: Suggestions for New Features
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: 1. Is this request related to a challenge you're experiencing? Tell me about your story.
placeholder: Please describe the specific scenario or problem you're facing as clearly as possible. For instance "I was trying to use [feature] for [specific task], and [what happened]... It was frustrating because...."
validations:
required: true
- type: textarea
attributes:
label: 2. Additional context or comments
placeholder: (Any other information, comments, documentations, links, or screenshots that would provide more clarity. This is the place to add anything else not covered above.)
validations:
required: false
- type: checkboxes
attributes:
label: 3. Can you help us with this feature?
description: Let us know! This is not a commitment, but a starting point for collaboration.
options:
- label: I am interested in contributing to this feature.
required: false
- type: markdown
attributes:
value: Please limit one request per issue.

View File

@ -14,7 +14,7 @@ body:
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
- label: "请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
@ -22,6 +22,7 @@ body:
- type: input
attributes:
label: Dify version
placeholder: 0.6.11
description: See about section in Dify console
validations:
required: true

View File

@ -12,7 +12,7 @@ body:
required: true
- label: I confirm that I am using English to submit report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
- label: "请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true

View File

@ -12,7 +12,7 @@ body:
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
- label: "请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true

View File

@ -12,13 +12,14 @@ body:
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:"
- label: "请务必使用英文提交 Issue否则会被关闭。谢谢:"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: input
attributes:
label: Dify version
placeholder: 0.3.21
description: Hover over system tray icon or look at Settings
validations:
required: true

View File

@ -1,21 +1,13 @@
# Checklist:
> [!IMPORTANT]
> Please review the checklist below before submitting your pull request.
- [ ] Please open an issue before creating a PR or link to an existing issue
- [ ] I have performed a self-review of my own code
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] I ran `dev/reformat`(backend) and `cd web && npx lint-staged`(frontend) to appease the lint gods
# Description
Describe the big picture of your changes here to communicate to the maintainers why we should accept this pull request. If it fixes a bug or resolves a feature request, be sure to link to that issue. Close issue syntax: `Fixes #<issue number>`, see [documentation](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword) for more details.
Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change.
Fixes
Fixes # (issue)
## Type of Change
Please delete options that are not relevant.
- [ ] Bug fix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
@ -23,12 +15,18 @@ Fixes
- [ ] Improvement, including but not limited to code refactoring, performance optimization, and UI/UX improvement
- [ ] Dependency upgrade
# Testing Instructions
# How Has This Been Tested?
Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration
- [ ] Test A
- [ ] Test B
- [ ] TODO
# Suggested Checklist:
- [ ] I have performed a self-review of my own code
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] My changes generate no new warnings
- [ ] I ran `dev/reformat`(backend) and `cd web && npx lint-staged`(frontend) to appease the lint gods
- [ ] `optional` I have made corresponding changes to the documentation
- [ ] `optional` I have added tests that prove my fix is effective or that my feature works
- [ ] `optional` New and existing unit tests pass locally with my changes

View File

@ -21,7 +21,6 @@ jobs:
python-version:
- "3.10"
- "3.11"
- "3.12"
steps:
- name: Checkout code
@ -76,7 +75,7 @@ jobs:
- name: Run Workflow
run: poetry run -C api bash dev/pytest/pytest_workflow.sh
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch)
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma)
uses: hoverkraft-tech/compose-action@v2.0.0
with:
compose-file: |
@ -90,6 +89,5 @@ jobs:
pgvecto-rs
pgvector
chroma
elasticsearch
- name: Test Vector Stores
run: poetry run -C api bash dev/pytest/pytest_vdb.sh

View File

@ -48,18 +48,18 @@ jobs:
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_TOKEN }}
- name: Extract metadata for Docker
id: meta
uses: docker/metadata-action@v5

View File

@ -6,6 +6,5 @@ yq eval '.services.chroma.ports += ["8000:8000"]' -i docker/docker-compose.yaml
yq eval '.services["milvus-standalone"].ports += ["19530:19530"]' -i docker/docker-compose.yaml
yq eval '.services.pgvector.ports += ["5433:5432"]' -i docker/docker-compose.yaml
yq eval '.services["pgvecto-rs"].ports += ["5431:5432"]' -i docker/docker-compose.yaml
yq eval '.services["elasticsearch"].ports += ["9200:9200"]' -i docker/docker-compose.yaml
echo "Ports exposed for sandbox, weaviate, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch"
echo "Ports exposed for sandbox, weaviate, qdrant, chroma, milvus, pgvector, pgvecto-rs."

View File

@ -45,10 +45,6 @@ jobs:
if: steps.changed-files.outputs.any_changed == 'true'
run: poetry run -C api dotenv-linter ./api/.env.example ./web/.env.example
- name: Ruff formatter check
if: steps.changed-files.outputs.any_changed == 'true'
run: poetry run -C api ruff format --check ./api
- name: Lint hints
if: failure()
run: echo "Please run 'dev/reformat' to fix the fixable linting errors."

4
.gitignore vendored
View File

@ -155,7 +155,6 @@ docker-legacy/volumes/milvus/*
docker-legacy/volumes/chroma/*
docker/volumes/app/storage/*
docker/volumes/certbot/*
docker/volumes/db/data/*
docker/volumes/redis/data/*
docker/volumes/weaviate/*
@ -175,6 +174,3 @@ sdks/python-client/dify_client.egg-info
.vscode/*
!.vscode/launch.json
pyrightconfig.json
api/.vscode
.idea/

1
.vscode/launch.json vendored
View File

@ -13,6 +13,7 @@
"jinja": true,
"env": {
"FLASK_APP": "app.py",
"FLASK_DEBUG": "1",
"GEVENT_SUPPORT": "True"
},
"args": [

View File

@ -81,7 +81,7 @@ Dify requires the following dependencies to build, make sure they're installed o
Dify is composed of a backend and a frontend. Navigate to the backend directory by `cd api/`, then follow the [Backend README](api/README.md) to install it. In a separate terminal, navigate to the frontend directory by `cd web/`, then follow the [Frontend README](web/README.md) to install.
Check the [installation FAQ](https://docs.dify.ai/learn-more/faq/self-host-faq) for a list of common issues and steps to troubleshoot.
Check the [installation FAQ](https://docs.dify.ai/getting-started/faq/install-faq) for a list of common issues and steps to troubleshoot.
### 5. Visit dify in your browser

View File

@ -2,17 +2,17 @@
考虑到我们的现状,我们需要灵活快速地交付,但我们也希望确保像你这样的贡献者在贡献过程中获得尽可能顺畅的体验。我们为此编写了这份贡献指南,旨在让你熟悉代码库和我们与贡献者的合作方式,以便你能快速进入有趣的部分。
这份指南,就像 Dify 本身一样,是一个不断改进的工作。如果有时它落后于实际项目,我们非常感谢你的理解,并欢迎提供任何反馈以供我们改进。
这份指南,就像 Dify 本身一样,是一个不断改进的工作。如果有时它落后于实际项目,我们非常感谢你的理解,并欢迎任何反馈以供我们改进。
在许可方面,请花一分钟阅读我们简短的 [许可证和贡献者协议](./LICENSE)。社区还遵守 [行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。
在许可方面,请花一分钟阅读我们简短的[许可证和贡献者协议](./LICENSE)。社区还遵守[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。
## 在开始之前
[查找](https://github.com/langgenius/dify/issues?q=is:issue+is:closed)现有问题,或 [创建](https://github.com/langgenius/dify/issues/new/choose) 一个新问题。我们将问题分为两类:
[查找](https://github.com/langgenius/dify/issues?q=is:issue+is:closed)现有问题,或[创建](https://github.com/langgenius/dify/issues/new/choose)一个新问题。我们将问题分为两类:
### 功能请求:
* 如果您要提出新的功能请求,请解释所提议的功能的目标,并尽可能提供详细的上下文。[@perzeusss](https://github.com/perzeuss) 制作了一个很好的 [功能请求助手](https://udify.app/chat/MK2kVSnw1gakVwMX),可以帮助您起草需求。随时尝试一下。
* 如果您要提出新的功能请求,请解释所提议的功能的目标,并尽可能提供详细的上下文。[@perzeusss](https://github.com/perzeuss)制作了一个很好的[功能请求助手](https://udify.app/chat/MK2kVSnw1gakVwMX),可以帮助您起草需求。随时尝试一下。
* 如果您想从现有问题中选择一个,请在其下方留下评论表示您的意愿。
@ -20,44 +20,45 @@
根据所提议的功能所属的领域不同,您可能需要与不同的团队成员交流。以下是我们团队成员目前正在从事的各个领域的概述:
| 团队成员 | 工作范围 |
| Member | Scope |
| ------------------------------------------------------------ | ---------------------------------------------------- |
| [@yeuoly](https://github.com/Yeuoly) | 架构 Agents |
| [@jyong](https://github.com/JohnJyong) | RAG 流水线设计 |
| [@GarfieldDai](https://github.com/GarfieldDai) | 构建 workflow 编排 |
| [@iamjoel](https://github.com/iamjoel) & [@zxhlyh](https://github.com/zxhlyh) | 让我们的前端更易用 |
| [@guchenhe](https://github.com/guchenhe) & [@crazywoola](https://github.com/crazywoola) | 开发人员体验, 综合事项联系人 |
| [@takatost](https://github.com/takatost) | 产品整体方向和架构 |
| [@yeuoly](https://github.com/Yeuoly) | Architecting Agents |
| [@jyong](https://github.com/JohnJyong) | RAG pipeline design |
| [@GarfieldDai](https://github.com/GarfieldDai) | Building workflow orchestrations |
| [@iamjoel](https://github.com/iamjoel) & [@zxhlyh](https://github.com/zxhlyh) | Making our frontend a breeze to use |
| [@guchenhe](https://github.com/guchenhe) & [@crazywoola](https://github.com/crazywoola) | Developer experience, points of contact for anything |
| [@takatost](https://github.com/takatost) | Overall product direction and architecture |
事项优先级:
How we prioritize:
| 功能类型 | 优先级 |
| Feature Type | Priority |
| ------------------------------------------------------------ | --------------- |
| 被团队成员标记为高优先级的功能 | 高优先级 |
| [community feedback board](https://github.com/langgenius/dify/discussions/categories/feedbacks) 内反馈的常见功能请求 | 中等优先级 |
| 非核心功能和小幅改进 | 低优先级 |
| 有价值当不紧急 | 未来功能 |
| High-Priority Features as being labeled by a team member | High Priority |
| Popular feature requests from our [community feedback board](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Medium Priority |
| Non-core features and minor enhancements | Low Priority |
| Valuable but not immediate | Future-Feature |
### 其他任何事情(例如 bug 报告、性能优化、拼写错误更正):
### 其他任何事情例如bug报告、性能优化、拼写错误更正
* 立即开始编码。
事项优先级:
How we prioritize:
| Issue 类型 | 优先级 |
| Issue Type | Priority |
| ------------------------------------------------------------ | --------------- |
| 核心功能的 Bugs例如无法登录、应用无法工作、安全漏洞 | 紧急 |
| 非紧急 bugs, 性能提升 | 中等优先级 |
| 小幅修复(错别字, 能正常工作但存在误导的 UI) | 低优先级 |
| Bugs in core functions (cannot login, applications not working, security loopholes) | Critical |
| Non-critical bugs, performance boosts | Medium Priority |
| Minor fixes (typos, confusing but working UI) | Low Priority |
## 安装
以下是设置 Dify 进行开发的步骤:
以下是设置Dify进行开发的步骤
### 1. Fork 该仓库
### 1. Fork该仓库
### 2. 克隆仓库
从终端克隆代码仓库:
从终端克隆fork的仓库:
```
git clone git@github.com:<github_username>/dify.git
@ -75,72 +76,72 @@ Dify 依赖以下工具和库:
### 4. 安装
Dify 由后端和前端组成。通过 `cd api/` 导航到后端目录,然后按照 [后端 README](api/README.md) 进行安装。在另一个终端中,通过 `cd web/` 导航到前端目录,然后按照 [前端 README](web/README.md) 进行安装。
Dify由后端和前端组成。通过`cd api/`导航到后端目录,然后按照[后端README](api/README.md)进行安装。在另一个终端中,通过`cd web/`导航到前端目录,然后按照[前端README](web/README.md)进行安装。
查看 [安装常见问题解答](https://docs.dify.ai/v/zh-hans/learn-more/faq/install-faq) 以获取常见问题列表和故障排除步骤。
查看[安装常见问题解答](https://docs.dify.ai/getting-started/faq/install-faq)以获取常见问题列表和故障排除步骤。
### 5. 在浏览器中访问 Dify
### 5. 在浏览器中访问Dify
为了验证您的设置,打开浏览器并访问 [http://localhost:3000](http://localhost:3000)(默认或您自定义的 URL 和端口)。现在您应该看到 Dify 正在运行。
为了验证您的设置,打开浏览器并访问[http://localhost:3000](http://localhost:3000)默认或您自定义的URL和端口。现在您应该看到Dify正在运行。
## 开发
如果您要添加模型提供程序,请参考 [此指南](https://github.com/langgenius/dify/blob/main/api/core/model_runtime/README.md)。
如果您要添加模型提供程序,请参考[此指南](https://github.com/langgenius/dify/blob/main/api/core/model_runtime/README.md)。
如果您要向 AgentWorkflow 添加工具提供程序,请参考 [此指南](./api/core/tools/README.md)。
如果您要向AgentWorkflow添加工具提供程序请参考[此指南](./api/core/tools/README.md)。
为了帮助您快速了解您的贡献在哪个部分,以下是 Dify 后端和前端的简要注释大纲:
为了帮助您快速了解您的贡献在哪个部分以下是Dify后端和前端的简要注释大纲
### 后端
Dify 的后端使用 Python 编写,使用 [Flask](https://flask.palletsprojects.com/en/3.0.x/) 框架。它使用 [SQLAlchemy](https://www.sqlalchemy.org/) 作为 ORM使用 [Celery](https://docs.celeryq.dev/en/stable/getting-started/introduction.html) 作为任务队列。授权逻辑通过 Flask-login 进行处理。
Dify的后端使用Python编写使用[Flask](https://flask.palletsprojects.com/en/3.0.x/)框架。它使用[SQLAlchemy](https://www.sqlalchemy.org/)作为ORM使用[Celery](https://docs.celeryq.dev/en/stable/getting-started/introduction.html)作为任务队列。授权逻辑通过Flask-login进行处理。
```
[api/]
├── constants // 用于整个代码库的常量设置。
├── controllers // API 路由定义和请求处理逻辑。
├── core // 核心应用编排、模型集成和工具。
├── docker // Docker 和容器化相关配置。
├── events // 事件处理和处理。
├── extensions // 与第三方框架/平台的扩展。
├── fields // 用于序列化/封装的字段定义。
├── libs // 可重用的库和助手。
├── migrations // 数据库迁移脚本。
├── models // 数据库模型和架构定义。
├── services // 指定业务逻辑。
├── storage // 私钥存储。
├── tasks // 异步任务和后台作业的处理。
├── constants // Constant settings used throughout code base.
├── controllers // API route definitions and request handling logic.
├── core // Core application orchestration, model integrations, and tools.
├── docker // Docker & containerization related configurations.
├── events // Event handling and processing
├── extensions // Extensions with 3rd party frameworks/platforms.
├── fields // field definitions for serialization/marshalling.
├── libs // Reusable libraries and helpers.
├── migrations // Scripts for database migration.
├── models // Database models & schema definitions.
├── services // Specifies business logic.
├── storage // Private key storage.
├── tasks // Handling of async tasks and background jobs.
└── tests
```
### 前端
该网站使用基于 Typescript[Next.js](https://nextjs.org/) 模板进行引导,并使用 [Tailwind CSS](https://tailwindcss.com/) 进行样式设计。[React-i18next](https://react.i18next.com/) 用于国际化。
该网站使用基于Typescript[Next.js](https://nextjs.org/)模板进行引导,并使用[Tailwind CSS](https://tailwindcss.com/)进行样式设计。[React-i18next](https://react.i18next.com/)用于国际化。
```
[web/]
├── app // 布局、页面和组件
│ ├── (commonLayout) // 整个应用通用的布局
│ ├── (shareLayout) // 在特定会话中共享的布局
│ ├── activate // 激活页面
│ ├── components // 页面和布局共享的组件
│ ├── install // 安装页面
│ ├── signin // 登录页面
│ └── styles // 全局共享的样式
├── assets // 静态资源
├── bin // 构建步骤运行的脚本
├── config // 可调整的设置和选项
├── context // 应用中不同部分使用的共享上下文
├── dictionaries // 语言特定的翻译文件
├── docker // 容器配置
├── hooks // 可重用的钩子
├── i18n // 国际化配置
├── models // 描述数据模型和 API 响应的形状
├── public // favicon 等元资源
├── service // 定义 API 操作的形状
├── app // layouts, pages, and components
│ ├── (commonLayout) // common layout used throughout the app
│ ├── (shareLayout) // layouts specifically shared across token-specific sessions
│ ├── activate // activate page
│ ├── components // shared by pages and layouts
│ ├── install // install page
│ ├── signin // signin page
│ └── styles // globally shared styles
├── assets // Static assets
├── bin // scripts ran at build step
├── config // adjustable settings and options
├── context // shared contexts used by different portions of the app
├── dictionaries // Language-specific translate files
├── docker // container configurations
├── hooks // Reusable hooks
├── i18n // Internationalization configuration
├── models // describes data models & shapes of API responses
├── public // meta assets like favicon
├── service // specifies shapes of API actions
├── test
├── types // 函数参数和返回值的描述
└── utils // 共享的实用函数
├── types // descriptions of function params and return values
└── utils // Shared utility functions
```
## 提交你的 PR

View File

@ -1,7 +1,7 @@
Dify にコントリビュートしたいとお考えなのですね。それは素晴らしいことです。
私たちは、LLM アプリケーションの構築と管理のための最も直感的なワークフローを設計するという壮大な野望を持っています。人数も資金も限られている新興企業として、コミュニティからの支援は本当に重要です。
私たちは現状を鑑み、機敏かつ迅速に開発をする必要がありますが、同時にあなたのようなコントリビューターの方々に、可能な限りスムーズな貢献体験をしていただきたいと思っています。そのためにこのコントリビュートガイドを作成しました。
私たちは現状を鑑み、機敏かつ迅速に開発をする必要がありますが、同時にあなたのようなコントリビューターの方々に、可能な限りスムーズな貢献体験をしていただきたいと思っています。そのためにこのコントリビュートガイドを作成しました。
コードベースやコントリビュータの方々と私たちがどのように仕事をしているのかに慣れていただき、楽しいパートにすぐに飛び込めるようにすることが目的です。
このガイドは Dify そのものと同様に、継続的に改善されています。実際のプロジェクトに遅れをとることがあるかもしれませんが、ご理解のほどよろしくお願いいたします。
@ -14,13 +14,13 @@ Dify にコントリビュートしたいとお考えなのですね。それは
### 機能リクエスト
* 新しい機能要望を出す場合は、提案する機能が何を実現するものなのかを説明し、可能な限り多くのコンテキストを含めてください。[@perzeusss](https://github.com/perzeuss)は、あなたの要望を書き出すのに役立つ [Feature Request Copilot](https://udify.app/chat/MK2kVSnw1gakVwMX) を作ってくれました。気軽に試してみてください。
* 新しい機能要望を出す場合は、提案する機能が何を実現するものなのかを説明し、可能な限り多くのコンテキストを含めてください。[@perzeusss](https://github.com/perzeuss)は、あなたの要望を書き出すのに役立つ [Feature Request Copilot](https://udify.app/chat/MK2kVSnw1gakVwMX) を作ってくれました。気軽に試してみてください。
* 既存の課題から 1 つ選びたい場合は、その下にコメントを書いてください。
関連する方向で作業しているチームメンバーが参加します。すべてが良好であれば、コーディングを開始する許可が与えられます。私たちが変更を提案した場合にあなたの作業が無駄になることがないよう、それまでこの機能の作業を控えていただくようお願いいたします。
関連する方向で作業しているチームメンバーが参加します。すべてが良好であれば、コーディングを開始する許可が与えられます。私たちが変更を提案した場合にあなたの作業が無駄になることがないよう、それまでこの機能の作業を控えていただくようお願いいたします。
提案された機能がどの分野に属するかによって、あなたは異なるチーム・メンバーと話をするかもしれません。以下は、各チームメンバーが現在取り組んでいる分野の概要です。
提案された機能がどの分野に属するかによって、あなたは異なるチーム・メンバーと話をするかもしれません。以下は、各チームメンバーが現在取り組んでいる分野の概要です。
| Member | Scope |
| --------------------------------------------------------------------------------------- | ------------------------------------ |
@ -82,7 +82,7 @@ Dify はバックエンドとフロントエンドから構成されています
まず`cd api/`でバックエンドのディレクトリに移動し、[Backend README](api/README.md)に従ってインストールします。
次に別のターミナルで、`cd web/`でフロントエンドのディレクトリに移動し、[Frontend README](web/README.md)に従ってインストールしてください。
よくある問題とトラブルシューティングの手順については、[installation FAQ](https://docs.dify.ai/v/japanese/learn-more/faq/install-faq) を確認してください。
よくある問題とトラブルシューティングの手順については、[installation FAQ](https://docs.dify.ai/getting-started/faq/install-faq) を確認してください。
### 5. ブラウザで dify にアクセスする
@ -153,7 +153,7 @@ Dify のバックエンドは[Flask](https://flask.palletsprojects.com/en/3.0.x/
いよいよ、私たちのリポジトリにプルリクエスト (PR) を提出する時が来ました。主要な機能については、まず `deploy/dev` ブランチにマージしてテストしてから `main` ブランチにマージします。
マージ競合などの問題が発生した場合、またはプル リクエストを開く方法がわからない場合は、[GitHub's pull request tutorial](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests) をチェックしてみてください。
これで完了です!あなたの PR がマージされると、[README](https://github.com/langgenius/dify/blob/main/README.md) にコントリビューターとして紹介されます。
これで完了です!あなたの PR がマージされると、[README](https://github.com/langgenius/dify/blob/main/README.md) にコントリビューターとして紹介されます。
## ヘルプを得る

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Self-hosting</a> ·
<a href="https://docs.dify.ai">Documentation</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Enterprise inquiry</a>
<a href="https://cal.com/guchenhe/60-min-meeting">Enterprise inquiry</a>
</p>
<p align="center">
@ -37,8 +37,6 @@
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
</p>
@ -66,7 +64,7 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
Extensive RAG capabilities that cover everything from document ingestion to retrieval, with out-of-box support for text extraction from PDFs, PPTs, and other common document formats.
**5. Agent capabilities**:
You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DALL·E, Stable Diffusion and WolframAlpha.
You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DELL·E, Stable Diffusion and WolframAlpha.
**6. LLMOps**:
Monitor and analyze application logs and performance over time. You could continuously improve prompts, datasets, and models based on production data and annotations.
@ -152,7 +150,7 @@ Quickly get Dify running in your environment with this [starter guide](#quick-st
Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions.
- **Dify for enterprise / organizations</br>**
We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs. </br>
We provide additional enterprise-centric features. [Schedule a meeting with us](https://cal.com/guchenhe/30min) or [send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs. </br>
> For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one-click. It's an affordable AMI offering with the option to create apps with custom logo and branding.
@ -218,9 +216,27 @@ At the same time, please consider supporting Dify by sharing it on social media
* [Github Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions.
* [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
* [Email](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). Best for: questions you have about using Dify.AI.
* [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
* [Twitter](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
Or, schedule a meeting directly with a team member:
<table>
<tr>
<th>Point of Contact</th>
<th>Purpose</th>
</tr>
<tr>
<td><a href='https://cal.com/guchenhe/15min' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/9ebcd111-1205-4d71-83d5-948d70b809f5' alt='Git-Hub-README-Button-3x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>Business enquiries & product feedback</td>
</tr>
<tr>
<td><a href='https://cal.com/pinkbanana' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/d1edd00a-d7e4-4513-be6c-e57038e143fd' alt='Git-Hub-README-Button-2x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>Contributions, issues & feature requests</td>
</tr>
</table>
## Star history
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">الاستضافة الذاتية</a> ·
<a href="https://docs.dify.ai">التوثيق</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">استفسار الشركات (للإنجليزية فقط)</a>
<a href="https://cal.com/guchenhe/60-min-meeting">استفسارات الشركات</a>
</p>
<p align="center">
@ -37,8 +37,6 @@
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
</p>
<div style="text-align: right;">
@ -58,7 +56,7 @@
**4. خط أنابيب RAG**: قدرات RAG الواسعة التي تغطي كل شيء من استيعاب الوثائق إلى الاسترجاع، مع الدعم الفوري لاستخراج النص من ملفات PDF و PPT وتنسيقات الوثائق الشائعة الأخرى.
**5. قدرات الوكيل**: يمكنك تعريف الوكلاء بناءً على أمر وظيفة LLM أو ReAct، وإضافة أدوات مدمجة أو مخصصة للوكيل. توفر Dify أكثر من 50 أداة مدمجة لوكلاء الذكاء الاصطناعي، مثل البحث في Google و DALL·E وStable Diffusion و WolframAlpha.
**5. قدرات الوكيل**: يمكنك تعريف الوكلاء بناءً على أمر وظيفة LLM أو ReAct، وإضافة أدوات مدمجة أو مخصصة للوكيل. توفر Dify أكثر من 50 أداة مدمجة لوكلاء الذكاء الاصطناعي، مثل البحث في Google و DELL·E وStable Diffusion و WolframAlpha.
**6. الـ LLMOps**: راقب وتحلل سجلات التطبيق والأداء على مر الزمن. يمكنك تحسين الأوامر والبيانات والنماذج باستمرار استنادًا إلى البيانات الإنتاجية والتعليقات.
@ -201,9 +199,27 @@ docker compose up -d
## المجتمع والاتصال
* [مناقشة Github](https://github.com/langgenius/dify/discussions). الأفضل لـ: مشاركة التعليقات وطرح الأسئلة.
* [المشكلات على GitHub](https://github.com/langgenius/dify/issues). الأفضل لـ: الأخطاء التي تواجهها في استخدام Dify.AI، واقتراحات الميزات. انظر [دليل المساهمة](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
* [البريد الإلكتروني](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). الأفضل لـ: الأسئلة التي تتعلق باستخدام Dify.AI.
* [Discord](https://discord.gg/FngNHpbcY7). الأفضل لـ: مشاركة تطبيقاتك والترفيه مع المجتمع.
* [تويتر](https://twitter.com/dify_ai). الأفضل لـ: مشاركة تطبيقاتك والترفيه مع المجتمع.
أو، قم بجدولة اجتماع مباشرة مع أحد أعضاء الفريق:
<table>
<tr>
<th>نقطة الاتصال</th>
<th>الغرض</th>
</tr>
<tr>
<td><a href='https://cal.com/guchenhe/15min' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/9ebcd111-1205-4d71-83d5-948d70b809f5' alt='Git-Hub-README-Button-3x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>استفسارات الأعمال واقتراحات حول المنتج</td>
</tr>
<tr>
<td><a href='https://cal.com/pinkbanana' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/d1edd00a-d7e4-4513-be6c-e57038e143fd' alt='Git-Hub-README-Button-2x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>المساهمات والمشكلات وطلبات الميزات</td>
</tr>
</table>
## تاريخ النجمة
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify 云服务</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">自托管</a> ·
<a href="https://docs.dify.ai">文档</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">(需用英文)常见问题解答 / 联系团队</a>
<a href="https://cal.com/guchenhe/dify-demo">预约演示</a>
</div>
<p align="center">
@ -29,16 +29,13 @@
</p>
<div align="center">
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/英文-d9d9d9"></a>
<a href="./README_CN.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/西班牙语-d9d9d9"></a>
<a href="./README_KL.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/法语-d9d9d9"></a>
<a href="./README_FR.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/克林贡语-d9d9d9"></a>
<a href="./README_KR.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/韓國語-d9d9d9"></a>
</div>
@ -72,7 +69,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI
广泛的 RAG 功能,涵盖从文档摄入到检索的所有内容,支持从 PDF、PPT 和其他常见文档格式中提取文本的开箱即用的支持。
**5. Agent 智能体**:
您可以基于 LLM 函数调用或 ReAct 定义 Agent并为 Agent 添加预构建或自定义工具。Dify 为 AI Agent 提供了50多种内置工具如谷歌搜索、DALL·E、Stable Diffusion 和 WolframAlpha 等。
您可以基于 LLM 函数调用或 ReAct 定义 Agent并为 Agent 添加预构建或自定义工具。Dify 为 AI Agent 提供了50多种内置工具如谷歌搜索、DELL·E、Stable Diffusion 和 WolframAlpha 等。
**6. LLMOps**:
随时间监视和分析应用程序日志和性能。您可以根据生产数据和标注持续改进提示、数据集和模型。
@ -158,7 +155,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI
使用我们的[文档](https://docs.dify.ai)进行进一步的参考和更深入的说明。
- **面向企业/组织的 Dify</br>**
我们提供额外的面向企业的功能。[给我们发送电子邮件](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)讨论企业需求。 </br>
我们提供额外的面向企业的功能。[与我们安排会议](https://cal.com/guchenhe/30min)或[给我们发送电子邮件](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)讨论企业需求。 </br>
> 对于使用 AWS 的初创公司和中小型企业,请查看 [AWS Marketplace 上的 Dify 高级版](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6),并使用一键部署到您自己的 AWS VPC。它是一个价格实惠的 AMI 产品,提供了使用自定义徽标和品牌创建应用程序的选项。
## 保持领先

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Auto-alojamiento</a> ·
<a href="https://docs.dify.ai">Documentación</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Consultas empresariales (en inglés)</a>
<a href="https://cal.com/guchenhe/dify-demo">Programar demostración</a>
</p>
<p align="center">
@ -29,16 +29,13 @@
</p>
<p align="center">
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/Inglés-d9d9d9"></a>
<a href="./README_CN.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_KL.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_FR.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
</p>
#
@ -72,7 +69,7 @@ Dify es una plataforma de desarrollo de aplicaciones de LLM de código abierto.
**5. Capacidades de agente**:
Puedes definir agent
es basados en LLM Function Calling o ReAct, y agregar herramientas preconstruidas o personalizadas para el agente. Dify proporciona más de 50 herramientas integradas para agentes de IA, como Búsqueda de Google, DALL·E, Difusión Estable y WolframAlpha.
es basados en LLM Function Calling o ReAct, y agregar herramientas preconstruidas o personalizadas para el agente. Dify proporciona más de 50 herramientas integradas para agentes de IA, como Búsqueda de Google, DELL·E, Difusión Estable y WolframAlpha.
**6. LLMOps**:
Supervisa y analiza registros de aplicaciones y rendimiento a lo largo del tiempo. Podrías mejorar continuamente prompts, conjuntos de datos y modelos basados en datos de producción y anotaciones.
@ -158,7 +155,7 @@ Pon rápidamente Dify en funcionamiento en tu entorno con esta [guía de inicio
Usa nuestra [documentación](https://docs.dify.ai) para más referencias e instrucciones más detalladas.
- **Dify para Empresas / Organizaciones</br>**
Proporcionamos características adicionales centradas en la empresa. [Envíanos un correo electrónico](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) para discutir las necesidades empresariales. </br>
Proporcionamos características adicionales centradas en la empresa. [Programa una reunión con nosotros](https://cal.com/guchenhe/30min) o [envíanos un correo electrónico](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) para discutir las necesidades empresariales. </br>
> Para startups y pequeñas empresas que utilizan AWS, echa un vistazo a [Dify Premium en AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) e impleméntalo en tu propio VPC de AWS con un clic. Es una AMI asequible que ofrece la opción de crear aplicaciones con logotipo y marca personalizados.
@ -227,9 +224,27 @@ Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en
* [Discusión en GitHub](https://github.com/langgenius/dify/discussions). Lo mejor para: compartir comentarios y hacer preguntas.
* [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
* [Correo electrónico](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). Lo mejor para: preguntas que tengas sobre el uso de Dify.AI.
* [Discord](https://discord.gg/FngNHpbcY7). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad.
* [Twitter](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad.
O, programa una reunión directamente con un miembro del equipo:
<table>
<tr>
<th>Punto de Contacto</th>
<th>Propósito</th>
</tr>
<tr>
<td><a href='https://cal.com/guchenhe/15min' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/9ebcd111-1205-4d71-83d5-948d70b809f5' alt='Git-Hub-README-Button-3x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>Consultas comerciales y retroalimentación del producto</td>
</tr>
<tr>
<td><a href='https://cal.com/pinkbanana' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/d1edd00a-d7e4-4513-be6c-e57038e143fd' alt='Git-Hub-README-Button-2x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>Contribuciones, problemas y solicitudes de características</td>
</tr>
</table>
## Historial de Estrellas
[![Gráfico de Historial de Estrellas](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
@ -241,4 +256,4 @@ Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En
## Licencia
Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales.
Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales.

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Auto-hébergement</a> ·
<a href="https://docs.dify.ai">Documentation</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Demande dentreprise (en anglais seulement)</a>
<a href="https://cal.com/guchenhe/dify-demo">Planifier une démo</a>
</p>
<p align="center">
@ -29,16 +29,13 @@
</p>
<p align="center">
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/Anglais-d9d9d9"></a>
<a href="./README_CN.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_KL.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_FR.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
</p>
#
@ -72,7 +69,7 @@ Dify est une plateforme de développement d'applications LLM open source. Son in
**5. Capac
ités d'agent**:
Vous pouvez définir des agents basés sur l'appel de fonction LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DALL·E, Stable Diffusion et WolframAlpha.
Vous pouvez définir des agents basés sur l'appel de fonction LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DELL·E, Stable Diffusion et WolframAlpha.
**6. LLMOps**:
Surveillez et analysez les journaux d'application et les performances au fil du temps. Vous pouvez continuellement améliorer les prompts, les ensembles de données et les modèles en fonction des données de production et des annotations.
@ -158,7 +155,7 @@ Lancez rapidement Dify dans votre environnement avec ce [guide de démarrage](#q
Utilisez notre [documentation](https://docs.dify.ai) pour plus de références et des instructions plus détaillées.
- **Dify pour les entreprises / organisations</br>**
Nous proposons des fonctionnalités supplémentaires adaptées aux entreprises. [Envoyez-nous un e-mail](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) pour discuter des besoins de l'entreprise. </br>
Nous proposons des fonctionnalités supplémentaires adaptées aux entreprises. [Planifiez une réunion avec nous](https://cal.com/guchenhe/30min) ou [envoyez-nous un e-mail](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) pour discuter des besoins de l'entreprise. </br>
> Pour les startups et les petites entreprises utilisant AWS, consultez [Dify Premium sur AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) et déployez-le dans votre propre VPC AWS en un clic. C'est une offre AMI abordable avec la possibilité de créer des applications avec un logo et une marque personnalisés.
@ -225,9 +222,27 @@ Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur le
* [Discussion GitHub](https://github.com/langgenius/dify/discussions). Meilleur pour: partager des commentaires et poser des questions.
* [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
* [E-mail](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). Meilleur pour: les questions que vous avez sur l'utilisation de Dify.AI.
* [Discord](https://discord.gg/FngNHpbcY7). Meilleur pour: partager vos applications et passer du temps avec la communauté.
* [Twitter](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté.
Ou, planifiez directement une réunion avec un membre de l'équipe:
<table>
<tr>
<th>Point de contact</th>
<th>Objectif</th>
</tr>
<tr>
<td><a href='https://cal.com/guchenhe/15min' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/9ebcd111-1205-4d71-83d5-948d70b809f5' alt='Git-Hub-README-Button-3x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>Demandes commerciales & retours produit</td>
</tr>
<tr>
<td><a href='https://cal.com/pinkbanana' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/d1edd00a-d7e4-4513-be6c-e57038e143fd' alt='Git-Hub-README-Button-2x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>Contributions, problèmes & demandes de fonctionnalités</td>
</tr>
</table>
## Historique des étoiles
[![Graphique de l'historique des étoiles](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">セルフホスティング</a> ·
<a href="https://docs.dify.ai">ドキュメント</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">企業のお問い合わせ(英語のみ)</a>
<a href="https://cal.com/guchenhe/dify-demo">デモの予約</a>
</p>
<p align="center">
@ -29,16 +29,13 @@
</p>
<p align="center">
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README.md"><img alt="先月のコミット" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="先月のコミット" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="先月のコミット" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="先月のコミット" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_KL.md"><img alt="先月のコミット" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_FR.md"><img alt="先月のコミット" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="先月のコミット" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
</p>
#
@ -71,7 +68,7 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
ドキュメントの取り込みから検索までをカバーする広範なRAG機能ができます。ほかにもPDF、PPT、その他の一般的なドキュメントフォーマットからのテキスト抽出のサーポイントも提供します。
**5. エージェント機能**:
LLM Function CallingやReActに基づくエージェントの定義が可能で、AIエージェント用のプリビルトまたはカスタムツールを追加できます。Difyには、Google検索、DALL·E、Stable Diffusion、WolframAlphaなどのAIエージェント用の50以上の組み込みツールが提供します。
LLM Function CallingやReActに基づくエージェントの定義が可能で、AIエージェント用のプリビルトまたはカスタムツールを追加できます。Difyには、Google検索、DELL·E、Stable Diffusion、WolframAlphaなどのAIエージェント用の50以上の組み込みツールが提供します。
**6. LLMOps**:
アプリケーションのログやパフォーマンスを監視と分析し、生産のデータと注釈に基づいて、プロンプト、データセット、モデルを継続的に改善できます。
@ -157,7 +154,7 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
詳しくは[ドキュメント](https://docs.dify.ai)をご覧ください。
- **企業/組織向けのDify</br>**
企業中心の機能を提供しています。[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)して企業のニーズについて相談してください。 </br>
企業中心の機能を提供しています。[こちらからミーティングを予約](https://cal.com/guchenhe/30min)したり、[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)して企業のニーズについて相談してください。 </br>
> AWSを使用しているスタートアップ企業や中小企業の場合は、[AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)のDify Premiumをチェックして、ワンクリックで自分のAWS VPCにデプロイできます。さらに、手頃な価格のAMIオファリングどして、ロゴやブランディングをカスタマイズしてアプリケーションを作成するオプションがあります。
@ -224,9 +221,32 @@ docker compose up -d
* [Github Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。
* [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](CONTRIBUTING_JA.md)を参照してください
* [Email](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). 主に: Dify.AIの使用に関する質問。
* [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。
* [Twitter](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。
または、直接チームメンバーとミーティングをスケジュール:
<table>
<tr>
<th>連絡先</th>
<th>目的</th>
</tr>
<tr>
<td><a href='https://cal.com
/guchenhe/30min'>ミーティング</a></td>
<td>無料の30分間のミーティングをスケジュール</td>
</tr>
<tr>
<td><a href='mailto:support@dify.ai?subject=[GitHub]Technical%20Support'>技術サポート</a></td>
<td>技術的な問題やサポートに関する質問</td>
</tr>
<tr>
<td><a href='mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry'>営業担当</a></td>
<td>法人ライセンスに関するお問い合わせ</td>
</tr>
</table>
## ライセンス

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Self-hosting</a> ·
<a href="https://docs.dify.ai">Documentation</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Commercial enquiries</a>
<a href="https://cal.com/guchenhe/dify-demo">Schedule demo</a>
</p>
<p align="center">
@ -29,16 +29,13 @@
</p>
<p align="center">
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README.md"><img alt="Commits last month" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="Commits last month" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="Commits last month" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="Commits last month" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_KL.md"><img alt="Commits last month" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_FR.md"><img alt="Commits last month" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="Commits last month" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
</p>
#
@ -70,7 +67,7 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
Extensive RAG capabilities that cover everything from document ingestion to retrieval, with out-of-box support for text extraction from PDFs, PPTs, and other common document formats.
**5. Agent capabilities**:
You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DALL·E, Stable Diffusion and WolframAlpha.
You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DELL·E, Stable Diffusion and WolframAlpha.
**6. LLMOps**:
Monitor and analyze application logs and performance over time. You could continuously improve prompts, datasets, and models based on production data and annotations.
@ -158,7 +155,7 @@ Quickly get Dify running in your environment with this [starter guide](#quick-st
Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions.
- **Dify for Enterprise / Organizations</br>**
We provide additional enterprise-centric features. [Send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs. </br>
We provide additional enterprise-centric features. [Schedule a meeting with us](https://cal.com/guchenhe/30min) or [send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs. </br>
> For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one-click. It's an affordable AMI offering with the option to create apps with custom logo and branding.
@ -227,9 +224,27 @@ At the same time, please consider supporting Dify by sharing it on social media
). Best for: sharing feedback and asking questions.
* [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
* [Email](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). Best for: questions you have about using Dify.AI.
* [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
* [Twitter](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
Or, schedule a meeting directly with a team member:
<table>
<tr>
<th>Point of Contact</th>
<th>Purpose</th>
</tr>
<tr>
<td><a href='https://cal.com/guchenhe/15min' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/9ebcd111-1205-4d71-83d5-948d70b809f5' alt='Git-Hub-README-Button-3x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>Business enquiries & product feedback</td>
</tr>
<tr>
<td><a href='https://cal.com/pinkbanana' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/d1edd00a-d7e4-4513-be6c-e57038e143fd' alt='Git-Hub-README-Button-2x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>Contributions, issues & feature requests</td>
</tr>
</table>
## Star History
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
@ -241,4 +256,4 @@ To protect your privacy, please avoid posting security issues on GitHub. Instead
## License
This repository is available under the [Dify Open Source License](LICENSE), which is essentially Apache 2.0 with a few additional restrictions.
This repository is available under the [Dify Open Source License](LICENSE), which is essentially Apache 2.0 with a few additional restrictions.

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify 클라우드</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">셀프-호스팅</a> ·
<a href="https://docs.dify.ai">문서</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">기업 문의 (영어만 가능)</a>
<a href="https://cal.com/guchenhe/60-min-meeting">기업 문의</a>
</p>
<p align="center">
@ -35,10 +35,7 @@
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./README_KR.md"><img alt="한국어 README" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
</p>
@ -66,7 +63,7 @@
문서 수집부터 검색까지 모든 것을 다루며, PDF, PPT 및 기타 일반적인 문서 형식에서 텍스트 추출을 위한 기본 지원이 포함되어 있는 광범위한 RAG 기능을 제공합니다.
**5. 에이전트 기능**:
LLM 함수 호출 또는 ReAct를 기반으로 에이전트를 정의하고 에이전트에 대해 사전 구축된 도구나 사용자 정의 도구를 추가할 수 있습니다. Dify는 Google Search, DALL·E, Stable Diffusion, WolframAlpha 등 AI 에이전트를 위한 50개 이상의 내장 도구를 제공합니다.
LLM 함수 호출 또는 ReAct를 기반으로 에이전트를 정의하고 에이전트에 대해 사전 구축된 도구나 사용자 정의 도구를 추가할 수 있습니다. Dify는 Google Search, DELL·E, Stable Diffusion, WolframAlpha 등 AI 에이전트를 위한 50개 이상의 내장 도구를 제공합니다.
**6. LLMOps**:
시간 경과에 따른 애플리케이션 로그와 성능을 모니터링하고 분석합니다. 생산 데이터와 주석을 기반으로 프롬프트, 데이터세트, 모델을 지속적으로 개선할 수 있습니다.
@ -151,7 +148,7 @@
추가 참조 및 더 심층적인 지침은 [문서](https://docs.dify.ai)를 사용하세요.
- **기업 / 조직을 위한 Dify</br>**
우리는 추가적인 기업 중심 기능을 제공합니다. 잡거나 [이메일 보내기](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)를 통해 기업 요구 사항을 논의하십시오. </br>
우리는 추가적인 기업 중심 기능을 제공합니다. 당사와 [미팅일정](https://cal.com/guchenhe/30min)을 잡거나 [이메일 보내기](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)를 통해 기업 요구 사항을 논의하십시오. </br>
> AWS를 사용하는 스타트업 및 중소기업의 경우 [AWS Marketplace에서 Dify Premium](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)을 확인하고 한 번의 클릭으로 자체 AWS VPC에 배포하십시오. 맞춤형 로고와 브랜딩이 포함된 앱을 생성할 수 있는 옵션이 포함된 저렴한 AMI 제품입니다.
@ -217,9 +214,26 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
* [Github 토론](https://github.com/langgenius/dify/discussions). 피드백 공유 및 질문하기에 적합합니다.
* [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요.
* [이메일](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). Dify.AI 사용에 대한 질문하기에 적합합니다.
* [디스코드](https://discord.gg/FngNHpbcY7). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.
* [트위터](https://twitter.com/dify_ai). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.
또는 팀원과 직접 미팅을 예약하세요:
<table>
<tr>
<th>연락처</th>
<th>목적</th>
</tr>
<tr>
<td><a href='https://cal.com/guchenhe/15min' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/9ebcd111-1205-4d71-83d5-948d70b809f5' alt='Git-Hub-README-Button-3x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>비즈니스 문의 및 제품 피드백</td>
</tr>
<tr>
<td><a href='https://cal.com/pinkbanana' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/d1edd00a-d7e4-4513-be6c-e57038e143fd' alt='Git-Hub-README-Button-2x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
<td>기여, 이슈 및 기능 요청</td>
</tr>
</table>
## Star 히스토리

View File

@ -1,237 +0,0 @@
![cover-v5-optimized](https://github.com/langgenius/dify/assets/13230914/f9e19af5-61ba-4119-b926-d10c4c06ebab)
<p align="center">
<a href="https://cloud.dify.ai">Dify Bulut</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Kendi Sunucunuzda Barındırma</a> ·
<a href="https://docs.dify.ai">Dokümantasyon</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Yalnızca İngilizce: Kurumsal Sorgulama</a>
</p>
<p align="center">
<a href="https://dify.ai" target="_blank">
<img alt="Statik Rozet" src="https://img.shields.io/badge/Ürün-F04438"></a>
<a href="https://dify.ai/pricing" target="_blank">
<img alt="Statik Rozet" src="https://img.shields.io/badge/ücretsiz-fiyatlandırma?logo=free&color=%20%23155EEF&label=fiyatlandirma&labelColor=%20%23528bff"></a>
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
alt="Discord'da sohbet et"></a>
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
alt="Twitter'da takip et"></a>
<a href="https://hub.docker.com/u/langgenius" target="_blank">
<img alt="Docker Çekmeleri" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
<img alt="Geçen ay yapılan commitler" src="https://img.shields.io/github/commit-activity/m/langgenius/dify?labelColor=%20%2332b583&color=%20%2312b76a"></a>
<a href="https://github.com/langgenius/dify/" target="_blank">
<img alt="Kapatılan sorunlar" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=kapatilan%20sorunlar&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
<a href="https://github.com/langgenius/dify/discussions/" target="_blank">
<img alt="Tartışma gönderileri" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
</p>
<p align="center">
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
</p>
Dify, açık kaynaklı bir LLM uygulama geliştirme platformudur. Sezgisel arayüzü, AI iş akışı, RAG pipeline'ı, ajan yetenekleri, model yönetimi, gözlemlenebilirlik özellikleri ve daha fazlasını birleştirerek, prototipten üretime hızlıca geçmenizi sağlar. İşte temel özelliklerin bir listesi:
</br> </br>
**1. Workflow**:
Görsel bir arayüz üzerinde güçlü AI iş akışları oluşturun ve test edin, aşağıdaki tüm özellikleri ve daha fazlasını kullanarak.
https://github.com/langgenius/dify/assets/13230914/356df23e-1604-483d-80a6-9517ece318aa
**2. Kapsamlı model desteği**:
Çok sayıda çıkarım sağlayıcısı ve kendi kendine barındırılan çözümlerden yüzlerce özel / açık kaynaklı LLM ile sorunsuz entegrasyon sağlar. GPT, Mistral, Llama3 ve OpenAI API uyumlu tüm modelleri kapsar. Desteklenen model sağlayıcılarının tam listesine [buradan](https://docs.dify.ai/getting-started/readme/model-providers) ulaşabilirsiniz.
![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3)
Özür dilerim, haklısınız. Daha anlamlı ve akıcı bir çeviri yapmaya çalışayım. İşte güncellenmiş çeviri:
**3. Prompt IDE**:
Komut istemlerini oluşturmak, model performansını karşılaştırmak ve sohbet tabanlı uygulamalara metin-konuşma gibi ek özellikler eklemek için kullanıcı dostu bir arayüz.
**4. RAG Pipeline**:
Belge alımından bilgi çekmeye kadar geniş kapsamlı RAG yetenekleri. PDF'ler, PPT'ler ve diğer yaygın belge formatlarından metin çıkarma için hazır destek sunar.
**5. Ajan yetenekleri**:
LLM Fonksiyon Çağırma veya ReAct'a dayalı ajanlar tanımlayabilir ve bu ajanlara önceden hazırlanmış veya özel araçlar ekleyebilirsiniz. Dify, AI ajanları için Google Arama, DALL·E, Stable Diffusion ve WolframAlpha gibi 50'den fazla yerleşik araç sağlar.
**6. LLMOps**:
Uygulama loglarını ve performans metriklerini zaman içinde izleme ve analiz etme imkanı. Üretim ortamından elde edilen verilere ve kullanıcı geri bildirimlerine dayanarak, prompt'ları, veri setlerini ve modelleri sürekli olarak optimize edebilirsiniz. Bu sayede, AI uygulamanızın performansını ve doğruluğunu sürekli olarak artırabilirsiniz.
**7. Hizmet Olarak Backend**:
Dify'ın tüm özellikleri ilgili API'lerle birlikte gelir, böylece Dify'ı kendi iş mantığınıza kolayca entegre edebilirsiniz.
## Özellik karşılaştırması
<table style="width: 100%;">
<tr>
<th align="center">Özellik</th>
<th align="center">Dify.AI</th>
<th align="center">LangChain</th>
<th align="center">Flowise</th>
<th align="center">OpenAI Assistants API</th>
</tr>
<tr>
<td align="center">Programlama Yaklaşımı</td>
<td align="center">API + Uygulama odaklı</td>
<td align="center">Python Kodu</td>
<td align="center">Uygulama odaklı</td>
<td align="center">API odaklı</td>
</tr>
<tr>
<td align="center">Desteklenen LLM'ler</td>
<td align="center">Zengin Çeşitlilik</td>
<td align="center">Zengin Çeşitlilik</td>
<td align="center">Zengin Çeşitlilik</td>
<td align="center">Yalnızca OpenAI</td>
</tr>
<tr>
<td align="center">RAG Motoru</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Ajan</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">İş Akışı</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Gözlemlenebilirlik</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Kurumsal Özellikler (SSO/Erişim kontrolü)</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Yerel Dağıtım</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
</table>
## Dify'ı Kullanma
- **Cloud </br>**
İşte verdiğiniz metnin Türkçe çevirisi, kod bloğu içinde:
-
Herkesin sıfır kurulumla denemesi için bir [Dify Cloud](https://dify.ai) hizmeti sunuyoruz. Bu hizmet, kendi kendine dağıtılan versiyonun tüm yeteneklerini sağlar ve sandbox planında 200 ücretsiz GPT-4 çağrısı içerir.
- **Dify Topluluk Sürümünü Kendi Sunucunuzda Barındırma</br>**
Bu [başlangıç kılavuzu](#quick-start) ile Dify'ı kendi ortamınızda hızlıca çalıştırın.
Daha fazla referans ve detaylı talimatlar için [dokümantasyonumuzu](https://docs.dify.ai) kullanın.
- **Kurumlar / organizasyonlar için Dify</br>**
Ek kurumsal odaklı özellikler sunuyoruz. Kurumsal ihtiyaçları görüşmek için [bize bir e-posta gönderin](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry). </br>
> AWS kullanan startuplar ve küçük işletmeler için, [AWS Marketplace'deki Dify Premium'a](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) göz atın ve tek tıklamayla kendi AWS VPC'nize dağıtın. Bu, özel logo ve marka ile uygulamalar oluşturma seçeneğine sahip uygun fiyatlı bir AMI teklifdir.
## Güncel Kalma
GitHub'da Dify'a yıldız verin ve yeni sürümlerden anında haberdar olun.
![bizi-yıldızlayın](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4)
## Hızlı başlangıç
> Dify'ı kurmadan önce, makinenizin aşağıdaki minimum sistem gereksinimlerini karşıladığından emin olun:
>
>- CPU >= 2 Çekirdek
>- RAM >= 4GB
</br>
İşte verdiğiniz metnin Türkçe çevirisi, kod bloğu içinde:
Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun:
```bash
cd docker
cp .env.example .env
docker compose up -d
```
Çalıştırdıktan sonra, tarayıcınızda [http://localhost/install](http://localhost/install) adresinden Dify kontrol paneline erişebilir ve başlangıç ayarları sürecini başlatabilirsiniz.
> Eğer Dify'a katkıda bulunmak veya ek geliştirmeler yapmak isterseniz, [kaynak koddan dağıtım kılavuzumuza](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code) başvurun.
## Sonraki adımlar
Yapılandırmayı özelleştirmeniz gerekiyorsa, lütfen [.env.example](docker/.env.example) dosyamızdaki yorumlara bakın ve `.env` dosyanızdaki ilgili değerleri güncelleyin. Ayrıca, spesifik dağıtım ortamınıza ve gereksinimlerinize bağlı olarak `docker-compose.yaml` dosyasının kendisinde de, imaj sürümlerini, port eşlemelerini veya hacim bağlantılarını değiştirmek gibi ayarlamalar yapmanız gerekebilir. Herhangi bir değişiklik yaptıktan sonra, lütfen `docker-compose up -d` komutunu tekrar çalıştırın. Kullanılabilir tüm ortam değişkenlerinin tam listesini [burada](https://docs.dify.ai/getting-started/install-self-hosted/environments) bulabilirsiniz.
Yüksek kullanılabilirliğe sahip bir kurulum yapılandırmak isterseniz, Dify'ın Kubernetes üzerine dağıtılmasına olanak tanıyan topluluk katkılı [Helm Charts](https://helm.sh/) ve YAML dosyaları mevcuttur.
- [@LeoQuote tarafından Helm Chart](https://github.com/douban/charts/tree/master/charts/dify)
- [@BorisPolonsky tarafından Helm Chart](https://github.com/BorisPolonsky/dify-helm)
- [@Winson-030 tarafından YAML dosyası](https://github.com/Winson-030/dify-kubernetes)
#### Dağıtım için Terraform Kullanımı
##### Azure Global
[Terraform](https://www.terraform.io/) kullanarak Dify'ı Azure'a tek tıklamayla dağıtın.
- [@nikawang tarafından Azure Terraform](https://github.com/nikawang/dify-azure-terraform)
## Katkıda Bulunma
Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakabilirsiniz.
Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda paylaşarak desteklemeyi düşünün.
> Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın.
**Katkıda Bulunanlar**
<a href="https://github.com/langgenius/dify/graphs/contributors">
<img src="https://contrib.rocks/image?repo=langgenius/dify" />
</a>
## Topluluk & iletişim
* [Github Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için.
* [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakın.
* [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
* [Twitter](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
## Star history
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
## Güvenlik açıklaması
Gizliliğinizi korumak için, lütfen güvenlik sorunlarını GitHub'da paylaşmaktan kaçının. Bunun yerine, sorularınızı security@dify.ai adresine gönderin ve size daha detaylı bir cevap vereceğiz.
## Lisans
Bu depo, temel olarak Apache 2.0 lisansı ve birkaç ek kısıtlama içeren [Dify Açık Kaynak Lisansı](LICENSE) altında kullanıma sunulmuştur.

View File

@ -1,234 +0,0 @@
![cover-v5-optimized](https://github.com/langgenius/dify/assets/13230914/f9e19af5-61ba-4119-b926-d10c4c06ebab)
<p align="center">
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Tự triển khai</a> ·
<a href="https://docs.dify.ai">Tài liệu</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Yêu cầu doanh nghiệp</a>
</p>
<p align="center">
<a href="https://dify.ai" target="_blank">
<img alt="Static Badge" src="https://img.shields.io/badge/Product-F04438"></a>
<a href="https://dify.ai/pricing" target="_blank">
<img alt="Static Badge" src="https://img.shields.io/badge/free-pricing?logo=free&color=%20%23155EEF&label=pricing&labelColor=%20%23528bff"></a>
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
alt="chat trên Discord"></a>
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
alt="theo dõi trên Twitter"></a>
<a href="https://hub.docker.com/u/langgenius" target="_blank">
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
<img alt="Commits tháng trước" src="https://img.shields.io/github/commit-activity/m/langgenius/dify?labelColor=%20%2332b583&color=%20%2312b76a"></a>
<a href="https://github.com/langgenius/dify/" target="_blank">
<img alt="Vấn đề đã đóng" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=issues%20closed&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
<a href="https://github.com/langgenius/dify/discussions/" target="_blank">
<img alt="Bài thảo luận" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
</p>
<p align="center">
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
</p>
Dify là một nền tảng phát triển ứng dụng LLM mã nguồn mở. Giao diện trực quan kết hợp quy trình làm việc AI, mô hình RAG, khả năng tác nhân, quản lý mô hình, tính năng quan sát và hơn thế nữa, cho phép bạn nhanh chóng chuyển từ nguyên mẫu sang sản phẩm. Đây là danh sách các tính năng cốt lõi:
</br> </br>
**1. Quy trình làm việc**:
Xây dựng và kiểm tra các quy trình làm việc AI mạnh mẽ trên một canvas trực quan, tận dụng tất cả các tính năng sau đây và hơn thế nữa.
https://github.com/langgenius/dify/assets/13230914/356df23e-1604-483d-80a6-9517ece318aa
**2. Hỗ trợ mô hình toàn diện**:
Tích hợp liền mạch với hàng trăm mô hình LLM độc quyền / mã nguồn mở từ hàng chục nhà cung cấp suy luận và giải pháp tự lưu trữ, bao gồm GPT, Mistral, Llama3, và bất kỳ mô hình tương thích API OpenAI nào. Danh sách đầy đủ các nhà cung cấp mô hình được hỗ trợ có thể được tìm thấy [tại đây](https://docs.dify.ai/getting-started/readme/model-providers).
![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3)
**3. IDE Prompt**:
Giao diện trực quan để tạo prompt, so sánh hiệu suất mô hình và thêm các tính năng bổ sung như chuyển văn bản thành giọng nói cho một ứng dụng dựa trên trò chuyện.
**4. Mô hình RAG**:
Khả năng RAG mở rộng bao gồm mọi thứ từ nhập tài liệu đến truy xuất, với hỗ trợ sẵn có cho việc trích xuất văn bản từ PDF, PPT và các định dạng tài liệu phổ biến khác.
**5. Khả năng tác nhân**:
Bạn có thể định nghĩa các tác nhân dựa trên LLM Function Calling hoặc ReAct, và thêm các công cụ được xây dựng sẵn hoặc tùy chỉnh cho tác nhân. Dify cung cấp hơn 50 công cụ tích hợp sẵn cho các tác nhân AI, như Google Search, DALL·E, Stable Diffusion và WolframAlpha.
**6. LLMOps**:
Giám sát và phân tích nhật ký và hiệu suất ứng dụng theo thời gian. Bạn có thể liên tục cải thiện prompt, bộ dữ liệu và mô hình dựa trên dữ liệu sản xuất và chú thích.
**7. Backend-as-a-Service**:
Tất cả các dịch vụ của Dify đều đi kèm với các API tương ứng, vì vậy bạn có thể dễ dàng tích hợp Dify vào logic kinh doanh của riêng mình.
## So sánh tính năng
<table style="width: 100%;">
<tr>
<th align="center">Tính năng</th>
<th align="center">Dify.AI</th>
<th align="center">LangChain</th>
<th align="center">Flowise</th>
<th align="center">OpenAI Assistants API</th>
</tr>
<tr>
<td align="center">Phương pháp lập trình</td>
<td align="center">Hướng API + Ứng dụng</td>
<td align="center">Mã Python</td>
<td align="center">Hướng ứng dụng</td>
<td align="center">Hướng API</td>
</tr>
<tr>
<td align="center">LLMs được hỗ trợ</td>
<td align="center">Đa dạng phong phú</td>
<td align="center">Đa dạng phong phú</td>
<td align="center">Đa dạng phong phú</td>
<td align="center">Chỉ OpenAI</td>
</tr>
<tr>
<td align="center">RAG Engine</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Agent</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Quy trình làm việc</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Khả năng quan sát</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Tính năng doanh nghiệp (SSO/Kiểm soát truy cập)</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Triển khai cục bộ</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
</table>
## Sử dụng Dify
- **Cloud </br>**
Chúng tôi lưu trữ dịch vụ [Dify Cloud](https://dify.ai) cho bất kỳ ai muốn thử mà không cần cài đặt. Nó cung cấp tất cả các khả năng của phiên bản tự triển khai và bao gồm 200 lượt gọi GPT-4 miễn phí trong gói sandbox.
- **Tự triển khai Dify Community Edition</br>**
Nhanh chóng chạy Dify trong môi trường của bạn với [hướng dẫn bắt đầu](#quick-start) này.
Sử dụng [tài liệu](https://docs.dify.ai) của chúng tôi để tham khảo thêm và nhận hướng dẫn chi tiết hơn.
- **Dify cho doanh nghiệp / tổ chức</br>**
Chúng tôi cung cấp các tính năng bổ sung tập trung vào doanh nghiệp. [Ghi lại câu hỏi của bạn cho chúng tôi thông qua chatbot này](https://udify.app/chat/22L1zSxg6yW1cWQg) hoặc [gửi email cho chúng tôi](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) để thảo luận về nhu cầu doanh nghiệp. </br>
> Đối với các công ty khởi nghiệp và doanh nghiệp nhỏ sử dụng AWS, hãy xem [Dify Premium trên AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) và triển khai nó vào AWS VPC của riêng bạn chỉ với một cú nhấp chuột. Đây là một AMI giá cả phải chăng với tùy chọn tạo ứng dụng với logo và thương hiệu tùy chỉnh.
## Luôn cập nhật
Yêu thích Dify trên GitHub và được thông báo ngay lập tức về các bản phát hành mới.
![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4)
## Bắt đầu nhanh
> Trước khi cài đặt Dify, hãy đảm bảo máy của bạn đáp ứng các yêu cầu hệ thống tối thiểu sau:
>
>- CPU >= 2 Core
>- RAM >= 4GB
</br>
Cách dễ nhất để khởi động máy chủ Dify là chạy tệp [docker-compose.yml](docker/docker-compose.yaml) của chúng tôi. Trước khi chạy lệnh cài đặt, hãy đảm bảo rằng [Docker](https://docs.docker.com/get-docker/) và [Docker Compose](https://docs.docker.com/compose/install/) đã được cài đặt trên máy của bạn:
```bash
cd docker
cp .env.example .env
docker compose up -d
```
Sau khi chạy, bạn có thể truy cập bảng điều khiển Dify trong trình duyệt của bạn tại [http://localhost/install](http://localhost/install) và bắt đầu quá trình khởi tạo.
> Nếu bạn muốn đóng góp cho Dify hoặc phát triển thêm, hãy tham khảo [hướng dẫn triển khai từ mã nguồn](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code) của chúng tôi
## Các bước tiếp theo
Nếu bạn cần tùy chỉnh cấu hình, vui lòng tham khảo các nhận xét trong tệp [.env.example](docker/.env.example) của chúng tôi và cập nhật các giá trị tương ứng trong tệp `.env` của bạn. Ngoài ra, bạn có thể cần điều chỉnh tệp `docker-compose.yaml`, chẳng hạn như thay đổi phiên bản hình ảnh, ánh xạ cổng hoặc gắn kết khối lượng, dựa trên môi trường triển khai cụ thể và yêu cầu của bạn. Sau khi thực hiện bất kỳ thay đổi nào, vui lòng chạy lại `docker-compose up -d`. Bạn có thể tìm thấy danh sách đầy đủ các biến môi trường có sẵn [tại đây](https://docs.dify.ai/getting-started/install-self-hosted/environments).
Nếu bạn muốn cấu hình một cài đặt có độ sẵn sàng cao, có các [Helm Charts](https://helm.sh/) và tệp YAML do cộng đồng đóng góp cho phép Dify được triển khai trên Kubernetes.
- [Helm Chart bởi @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [Helm Chart bởi @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [Tệp YAML bởi @Winson-030](https://github.com/Winson-030/dify-kubernetes)
#### Sử dụng Terraform để Triển khai
##### Azure Global
Triển khai Dify lên Azure chỉ với một cú nhấp chuột bằng cách sử dụng [terraform](https://www.terraform.io/).
- [Azure Terraform bởi @nikawang](https://github.com/nikawang/dify-azure-terraform)
## Đóng góp
Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi.
Đồng thời, vui lòng xem xét hỗ trợ Dify bằng cách chia sẻ nó trên mạng xã hội và tại các sự kiện và hội nghị.
> Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi.
**Người đóng góp**
<a href="https://github.com/langgenius/dify/graphs/contributors">
<img src="https://contrib.rocks/image?repo=langgenius/dify" />
</a>
## Cộng đồng & liên hệ
* [Thảo luận GitHub](https://github.com/langgenius/dify/discussions). Tốt nhất cho: chia sẻ phản hồi và đặt câu hỏi.
* [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi.
* [Discord](https://discord.gg/FngNHpbcY7). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng.
* [Twitter](https://twitter.com/dify_ai). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng.
## Lịch sử Yêu thích
[![Biểu đồ Lịch sử Yêu thích](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
## Tiết lộ bảo mật
Để bảo vệ quyền riêng tư của bạn, vui lòng tránh đăng các vấn đề bảo mật trên GitHub. Thay vào đó, hãy gửi câu hỏi của bạn đến security@dify.ai và chúng tôi sẽ cung cấp cho bạn câu trả lời chi tiết hơn.
## Giấy phép
Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung.

View File

@ -83,7 +83,7 @@ OCI_REGION=your-region
WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector
# Vector database configuration, support: weaviate, qdrant, milvus, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector
VECTOR_STORE=weaviate
# Weaviate configuration
@ -106,14 +106,6 @@ MILVUS_USER=root
MILVUS_PASSWORD=Milvus
MILVUS_SECURE=false
# MyScale configuration
MYSCALE_HOST=127.0.0.1
MYSCALE_PORT=8123
MYSCALE_USER=default
MYSCALE_PASSWORD=
MYSCALE_DATABASE=default
MYSCALE_FTS_PARAMS=
# Relyt configuration
RELYT_HOST=127.0.0.1
RELYT_PORT=5432
@ -130,12 +122,6 @@ TENCENT_VECTOR_DB_DATABASE=dify
TENCENT_VECTOR_DB_SHARD=1
TENCENT_VECTOR_DB_REPLICAS=2
# ElasticSearch configuration
ELASTICSEARCH_HOST=127.0.0.1
ELASTICSEARCH_PORT=9200
ELASTICSEARCH_USERNAME=elastic
ELASTICSEARCH_PASSWORD=elastic
# PGVECTO_RS configuration
PGVECTO_RS_HOST=localhost
PGVECTO_RS_PORT=5431
@ -165,16 +151,6 @@ CHROMA_DATABASE=default_database
CHROMA_AUTH_PROVIDER=chromadb.auth.token_authn.TokenAuthenticationServerProvider
CHROMA_AUTH_CREDENTIALS=difyai123456
# AnalyticDB configuration
ANALYTICDB_KEY_ID=your-ak
ANALYTICDB_KEY_SECRET=your-sk
ANALYTICDB_REGION_ID=cn-hangzhou
ANALYTICDB_INSTANCE_ID=gp-ab123456
ANALYTICDB_ACCOUNT=testaccount
ANALYTICDB_PASSWORD=testpassword
ANALYTICDB_NAMESPACE=dify
ANALYTICDB_NAMESPACE_PASSWORD=difypassword
# OpenSearch configuration
OPENSEARCH_HOST=127.0.0.1
OPENSEARCH_PORT=9200
@ -189,7 +165,6 @@ UPLOAD_IMAGE_FILE_SIZE_LIMIT=10
# Model Configuration
MULTIMODAL_SEND_IMAGE_FORMAT=base64
PROMPT_GENERATION_MAX_TOKENS=512
# Mail configuration, support: resend, smtp
MAIL_TYPE=
@ -223,7 +198,6 @@ UNSTRUCTURED_API_KEY=
SSRF_PROXY_HTTP_URL=
SSRF_PROXY_HTTPS_URL=
SSRF_DEFAULT_MAX_RETRIES=3
BATCH_UPLOAD_LIMIT=10
KEYWORD_DATA_SOURCE_TYPE=database
@ -263,8 +237,4 @@ WORKFLOW_CALL_MAX_DEPTH=5
# App configuration
APP_MAX_EXECUTION_TIME=1200
APP_MAX_ACTIVE_REQUESTS=0
# Celery beat configuration
CELERY_BEAT_SCHEDULER_TIME=1

View File

@ -5,14 +5,14 @@ WORKDIR /app/api
# Install Poetry
ENV POETRY_VERSION=1.8.3
RUN pip install --no-cache-dir poetry==${POETRY_VERSION}
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir --upgrade poetry==${POETRY_VERSION}
# Configure Poetry
ENV POETRY_CACHE_DIR=/tmp/poetry_cache
ENV POETRY_NO_INTERACTION=1
ENV POETRY_VIRTUALENVS_IN_PROJECT=true
ENV POETRY_VIRTUALENVS_CREATE=true
ENV POETRY_REQUESTS_TIMEOUT=15
FROM base AS packages
@ -42,12 +42,8 @@ ENV TZ=UTC
WORKDIR /app/api
RUN apt-get update \
&& apt-get install -y --no-install-recommends curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
&& echo "deb http://deb.debian.org/debian testing main" > /etc/apt/sources.list \
&& apt-get update \
# For Security
&& apt-get install -y --no-install-recommends zlib1g=1:1.3.dfsg+really1.3.1-1 expat=2.6.2-1 libldap-2.5-0=2.5.18+dfsg-2 perl=5.38.2-5 libsqlite3-0=3.46.0-1 \
&& apt-get autoremove -y \
&& apt-get install -y --no-install-recommends curl wget vim nodejs ffmpeg libgmp-dev libmpfr-dev libmpc-dev \
&& apt-get autoremove \
&& rm -rf /var/lib/apt/lists/*
# Copy Python environment and packages
@ -55,9 +51,6 @@ ENV VIRTUAL_ENV=/app/api/.venv
COPY --from=packages ${VIRTUAL_ENV} ${VIRTUAL_ENV}
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
# Download nltk data
RUN python -c "import nltk; nltk.download('punkt')"
# Copy source code
COPY . /app/api/

View File

@ -11,9 +11,7 @@
```bash
cd ../docker
cp middleware.env.example middleware.env
# change the profile to other vector database if you are not using weaviate
docker compose -f docker-compose.middleware.yaml --profile weaviate -p dify up -d
docker compose -f docker-compose.middleware.yaml -p dify up -d
cd ../api
```

View File

@ -1,6 +1,8 @@
import os
if os.environ.get("DEBUG", "false").lower() != "true":
from configs.app_config import DifyConfig
if not os.environ.get("DEBUG") or os.environ.get("DEBUG", "false").lower() != 'true':
from gevent import monkey
monkey.patch_all()
@ -21,9 +23,7 @@ from flask import Flask, Response, request
from flask_cors import CORS
from werkzeug.exceptions import Unauthorized
import contexts
from commands import register_commands
from configs import dify_config
# DO NOT REMOVE BELOW
from events import event_handlers
@ -43,8 +43,6 @@ from extensions import (
from extensions.ext_database import db
from extensions.ext_login import login_manager
from libs.passport import PassportService
# TODO: Find a way to avoid importing models here
from models import account, dataset, model, source, task, tool, tools, web
from services.account_service import AccountService
@ -57,7 +55,7 @@ warnings.simplefilter("ignore", ResourceWarning)
if os.name == "nt":
os.system('tzutil /s "UTC"')
else:
os.environ["TZ"] = "UTC"
os.environ['TZ'] = 'UTC'
time.tzset()
@ -70,21 +68,20 @@ class DifyApp(Flask):
# -------------
config_type = os.getenv("EDITION", default="SELF_HOSTED") # ce edition first
config_type = os.getenv('EDITION', default='SELF_HOSTED') # ce edition first
# ----------------------------
# Application Factory Function
# ----------------------------
def create_flask_app_with_configs() -> Flask:
"""
create a raw flask app
with configs loaded from .env file
"""
dify_app = DifyApp(__name__)
dify_app.config.from_mapping(dify_config.model_dump())
dify_app.config.from_mapping(DifyConfig().model_dump())
# populate configs into system environment variables
for key, value in dify_app.config.items():
@ -93,7 +90,7 @@ def create_flask_app_with_configs() -> Flask:
elif isinstance(value, int | float | bool):
os.environ[key] = str(value)
elif value is None:
os.environ[key] = ""
os.environ[key] = ''
return dify_app
@ -101,10 +98,10 @@ def create_flask_app_with_configs() -> Flask:
def create_app() -> Flask:
app = create_flask_app_with_configs()
app.secret_key = app.config["SECRET_KEY"]
app.secret_key = app.config['SECRET_KEY']
log_handlers = None
log_file = app.config.get("LOG_FILE")
log_file = app.config.get('LOG_FILE')
if log_file:
log_dir = os.path.dirname(log_file)
os.makedirs(log_dir, exist_ok=True)
@ -112,24 +109,23 @@ def create_app() -> Flask:
RotatingFileHandler(
filename=log_file,
maxBytes=1024 * 1024 * 1024,
backupCount=5,
backupCount=5
),
logging.StreamHandler(sys.stdout),
logging.StreamHandler(sys.stdout)
]
logging.basicConfig(
level=app.config.get("LOG_LEVEL"),
format=app.config.get("LOG_FORMAT"),
datefmt=app.config.get("LOG_DATEFORMAT"),
level=app.config.get('LOG_LEVEL'),
format=app.config.get('LOG_FORMAT'),
datefmt=app.config.get('LOG_DATEFORMAT'),
handlers=log_handlers,
force=True,
force=True
)
log_tz = app.config.get("LOG_TZ")
log_tz = app.config.get('LOG_TZ')
if log_tz:
from datetime import datetime
import pytz
timezone = pytz.timezone(log_tz)
def time_converter(seconds):
@ -164,39 +160,35 @@ def initialize_extensions(app):
@login_manager.request_loader
def load_user_from_request(request_from_flask_login):
"""Load user based on the request."""
if request.blueprint not in ["console", "inner_api"]:
if request.blueprint not in ['console', 'inner_api']:
return None
# Check if the user_id contains a dot, indicating the old format
auth_header = request.headers.get("Authorization", "")
auth_header = request.headers.get('Authorization', '')
if not auth_header:
auth_token = request.args.get("_token")
auth_token = request.args.get('_token')
if not auth_token:
raise Unauthorized("Invalid Authorization token.")
raise Unauthorized('Invalid Authorization token.')
else:
if " " not in auth_header:
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
if ' ' not in auth_header:
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
auth_scheme, auth_token = auth_header.split(None, 1)
auth_scheme = auth_scheme.lower()
if auth_scheme != "bearer":
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
if auth_scheme != 'bearer':
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
decoded = PassportService().verify(auth_token)
user_id = decoded.get("user_id")
user_id = decoded.get('user_id')
account = AccountService.load_logged_in_account(account_id=user_id, token=auth_token)
if account:
contexts.tenant_id.set(account.current_tenant_id)
return account
return AccountService.load_logged_in_account(account_id=user_id, token=auth_token)
@login_manager.unauthorized_handler
def unauthorized_handler():
"""Handle unauthorized requests."""
return Response(
json.dumps({"code": "unauthorized", "message": "Unauthorized."}),
status=401,
content_type="application/json",
)
return Response(json.dumps({
'code': 'unauthorized',
'message': "Unauthorized."
}), status=401, content_type="application/json")
# register blueprint routers
@ -207,36 +199,38 @@ def register_blueprints(app):
from controllers.service_api import bp as service_api_bp
from controllers.web import bp as web_bp
CORS(
service_api_bp,
allow_headers=["Content-Type", "Authorization", "X-App-Code"],
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
)
CORS(service_api_bp,
allow_headers=['Content-Type', 'Authorization', 'X-App-Code'],
methods=['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS', 'PATCH']
)
app.register_blueprint(service_api_bp)
CORS(
web_bp,
resources={r"/*": {"origins": app.config["WEB_API_CORS_ALLOW_ORIGINS"]}},
supports_credentials=True,
allow_headers=["Content-Type", "Authorization", "X-App-Code"],
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
expose_headers=["X-Version", "X-Env"],
)
CORS(web_bp,
resources={
r"/*": {"origins": app.config['WEB_API_CORS_ALLOW_ORIGINS']}},
supports_credentials=True,
allow_headers=['Content-Type', 'Authorization', 'X-App-Code'],
methods=['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS', 'PATCH'],
expose_headers=['X-Version', 'X-Env']
)
app.register_blueprint(web_bp)
CORS(
console_app_bp,
resources={r"/*": {"origins": app.config["CONSOLE_CORS_ALLOW_ORIGINS"]}},
supports_credentials=True,
allow_headers=["Content-Type", "Authorization"],
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
expose_headers=["X-Version", "X-Env"],
)
CORS(console_app_bp,
resources={
r"/*": {"origins": app.config['CONSOLE_CORS_ALLOW_ORIGINS']}},
supports_credentials=True,
allow_headers=['Content-Type', 'Authorization'],
methods=['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS', 'PATCH'],
expose_headers=['X-Version', 'X-Env']
)
app.register_blueprint(console_app_bp)
CORS(files_bp, allow_headers=["Content-Type"], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"])
CORS(files_bp,
allow_headers=['Content-Type'],
methods=['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS', 'PATCH']
)
app.register_blueprint(files_bp)
app.register_blueprint(inner_api_bp)
@ -246,29 +240,28 @@ def register_blueprints(app):
app = create_app()
celery = app.extensions["celery"]
if app.config.get("TESTING"):
if app.config.get('TESTING'):
print("App is running in TESTING mode")
@app.after_request
def after_request(response):
"""Add Version headers to the response."""
response.set_cookie("remember_token", "", expires=0)
response.headers.add("X-Version", app.config["CURRENT_VERSION"])
response.headers.add("X-Env", app.config["DEPLOY_ENV"])
response.set_cookie('remember_token', '', expires=0)
response.headers.add('X-Version', app.config['CURRENT_VERSION'])
response.headers.add('X-Env', app.config['DEPLOY_ENV'])
return response
@app.route("/health")
@app.route('/health')
def health():
return Response(
json.dumps({"pid": os.getpid(), "status": "ok", "version": app.config["CURRENT_VERSION"]}),
status=200,
content_type="application/json",
)
return Response(json.dumps({
'status': 'ok',
'version': app.config['CURRENT_VERSION']
}), status=200, content_type="application/json")
@app.route("/threads")
@app.route('/threads')
def threads():
num_threads = threading.active_count()
threads = threading.enumerate()
@ -279,34 +272,30 @@ def threads():
thread_id = thread.ident
is_alive = thread.is_alive()
thread_list.append(
{
"name": thread_name,
"id": thread_id,
"is_alive": is_alive,
}
)
thread_list.append({
'name': thread_name,
'id': thread_id,
'is_alive': is_alive
})
return {
"pid": os.getpid(),
"thread_num": num_threads,
"threads": thread_list,
'thread_num': num_threads,
'threads': thread_list
}
@app.route("/db-pool-stat")
@app.route('/db-pool-stat')
def pool_stat():
engine = db.engine
return {
"pid": os.getpid(),
"pool_size": engine.pool.size(),
"checked_in_connections": engine.pool.checkedin(),
"checked_out_connections": engine.pool.checkedout(),
"overflow_connections": engine.pool.overflow(),
"connection_timeout": engine.pool.timeout(),
"recycle_time": db.engine.pool._recycle,
'pool_size': engine.pool.size(),
'checked_in_connections': engine.pool.checkedin(),
'checked_out_connections': engine.pool.checkedout(),
'overflow_connections': engine.pool.overflow(),
'connection_timeout': engine.pool.timeout(),
'recycle_time': db.engine.pool._recycle
}
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5001)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5001)

View File

@ -8,7 +8,6 @@ import click
from flask import current_app
from werkzeug.exceptions import NotFound
from configs import dify_config
from constants.languages import languages
from core.rag.datasource.vdb.vector_factory import Vector
from core.rag.datasource.vdb.vector_type import VectorType
@ -27,29 +26,32 @@ from models.provider import Provider, ProviderModel
from services.account_service import RegisterService, TenantService
@click.command("reset-password", help="Reset the account password.")
@click.option("--email", prompt=True, help="The email address of the account whose password you need to reset")
@click.option("--new-password", prompt=True, help="the new password.")
@click.option("--password-confirm", prompt=True, help="the new password confirm.")
@click.command('reset-password', help='Reset the account password.')
@click.option('--email', prompt=True, help='The email address of the account whose password you need to reset')
@click.option('--new-password', prompt=True, help='the new password.')
@click.option('--password-confirm', prompt=True, help='the new password confirm.')
def reset_password(email, new_password, password_confirm):
"""
Reset password of owner account
Only available in SELF_HOSTED mode
"""
if str(new_password).strip() != str(password_confirm).strip():
click.echo(click.style("sorry. The two passwords do not match.", fg="red"))
click.echo(click.style('sorry. The two passwords do not match.', fg='red'))
return
account = db.session.query(Account).filter(Account.email == email).one_or_none()
account = db.session.query(Account). \
filter(Account.email == email). \
one_or_none()
if not account:
click.echo(click.style("sorry. the account: [{}] not exist .".format(email), fg="red"))
click.echo(click.style('sorry. the account: [{}] not exist .'.format(email), fg='red'))
return
try:
valid_password(new_password)
except:
click.echo(click.style("sorry. The passwords must match {} ".format(password_pattern), fg="red"))
click.echo(
click.style('sorry. The passwords must match {} '.format(password_pattern), fg='red'))
return
# generate password salt
@ -62,87 +64,80 @@ def reset_password(email, new_password, password_confirm):
account.password = base64_password_hashed
account.password_salt = base64_salt
db.session.commit()
click.echo(click.style("Congratulations! Password has been reset.", fg="green"))
click.echo(click.style('Congratulations! Password has been reset.', fg='green'))
@click.command("reset-email", help="Reset the account email.")
@click.option("--email", prompt=True, help="The old email address of the account whose email you need to reset")
@click.option("--new-email", prompt=True, help="the new email.")
@click.option("--email-confirm", prompt=True, help="the new email confirm.")
@click.command('reset-email', help='Reset the account email.')
@click.option('--email', prompt=True, help='The old email address of the account whose email you need to reset')
@click.option('--new-email', prompt=True, help='the new email.')
@click.option('--email-confirm', prompt=True, help='the new email confirm.')
def reset_email(email, new_email, email_confirm):
"""
Replace account email
:return:
"""
if str(new_email).strip() != str(email_confirm).strip():
click.echo(click.style("Sorry, new email and confirm email do not match.", fg="red"))
click.echo(click.style('Sorry, new email and confirm email do not match.', fg='red'))
return
account = db.session.query(Account).filter(Account.email == email).one_or_none()
account = db.session.query(Account). \
filter(Account.email == email). \
one_or_none()
if not account:
click.echo(click.style("sorry. the account: [{}] not exist .".format(email), fg="red"))
click.echo(click.style('sorry. the account: [{}] not exist .'.format(email), fg='red'))
return
try:
email_validate(new_email)
except:
click.echo(click.style("sorry. {} is not a valid email. ".format(email), fg="red"))
click.echo(
click.style('sorry. {} is not a valid email. '.format(email), fg='red'))
return
account.email = new_email
db.session.commit()
click.echo(click.style("Congratulations!, email has been reset.", fg="green"))
click.echo(click.style('Congratulations!, email has been reset.', fg='green'))
@click.command(
"reset-encrypt-key-pair",
help="Reset the asymmetric key pair of workspace for encrypt LLM credentials. "
"After the reset, all LLM credentials will become invalid, "
"requiring re-entry."
"Only support SELF_HOSTED mode.",
)
@click.confirmation_option(
prompt=click.style(
"Are you sure you want to reset encrypt key pair?" " this operation cannot be rolled back!", fg="red"
)
)
@click.command('reset-encrypt-key-pair', help='Reset the asymmetric key pair of workspace for encrypt LLM credentials. '
'After the reset, all LLM credentials will become invalid, '
'requiring re-entry.'
'Only support SELF_HOSTED mode.')
@click.confirmation_option(prompt=click.style('Are you sure you want to reset encrypt key pair?'
' this operation cannot be rolled back!', fg='red'))
def reset_encrypt_key_pair():
"""
Reset the encrypted key pair of workspace for encrypt LLM credentials.
After the reset, all LLM credentials will become invalid, requiring re-entry.
Only support SELF_HOSTED mode.
"""
if dify_config.EDITION != "SELF_HOSTED":
click.echo(click.style("Sorry, only support SELF_HOSTED mode.", fg="red"))
if current_app.config['EDITION'] != 'SELF_HOSTED':
click.echo(click.style('Sorry, only support SELF_HOSTED mode.', fg='red'))
return
tenants = db.session.query(Tenant).all()
for tenant in tenants:
if not tenant:
click.echo(click.style("Sorry, no workspace found. Please enter /install to initialize.", fg="red"))
click.echo(click.style('Sorry, no workspace found. Please enter /install to initialize.', fg='red'))
return
tenant.encrypt_public_key = generate_key_pair(tenant.id)
db.session.query(Provider).filter(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
db.session.query(Provider).filter(Provider.provider_type == 'custom', Provider.tenant_id == tenant.id).delete()
db.session.query(ProviderModel).filter(ProviderModel.tenant_id == tenant.id).delete()
db.session.commit()
click.echo(
click.style(
"Congratulations! " "the asymmetric key pair of workspace {} has been reset.".format(tenant.id),
fg="green",
)
)
click.echo(click.style('Congratulations! '
'the asymmetric key pair of workspace {} has been reset.'.format(tenant.id), fg='green'))
@click.command("vdb-migrate", help="migrate vector db.")
@click.option("--scope", default="all", prompt=False, help="The scope of vector database to migrate, Default is All.")
@click.command('vdb-migrate', help='migrate vector db.')
@click.option('--scope', default='all', prompt=False, help='The scope of vector database to migrate, Default is All.')
def vdb_migrate(scope: str):
if scope in ["knowledge", "all"]:
if scope in ['knowledge', 'all']:
migrate_knowledge_vector_database()
if scope in ["annotation", "all"]:
if scope in ['annotation', 'all']:
migrate_annotation_vector_database()
@ -150,7 +145,7 @@ def migrate_annotation_vector_database():
"""
Migrate annotation datas to target vector database .
"""
click.echo(click.style("Start migrate annotation data.", fg="green"))
click.echo(click.style('Start migrate annotation data.', fg='green'))
create_count = 0
skipped_count = 0
total_count = 0
@ -158,196 +153,189 @@ def migrate_annotation_vector_database():
while True:
try:
# get apps info
apps = (
db.session.query(App)
.filter(App.status == "normal")
.order_by(App.created_at.desc())
.paginate(page=page, per_page=50)
)
apps = db.session.query(App).filter(
App.status == 'normal'
).order_by(App.created_at.desc()).paginate(page=page, per_page=50)
except NotFound:
break
page += 1
for app in apps:
total_count = total_count + 1
click.echo(
f"Processing the {total_count} app {app.id}. " + f"{create_count} created, {skipped_count} skipped."
)
click.echo(f'Processing the {total_count} app {app.id}. '
+ f'{create_count} created, {skipped_count} skipped.')
try:
click.echo("Create app annotation index: {}".format(app.id))
app_annotation_setting = (
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app.id).first()
)
click.echo('Create app annotation index: {}'.format(app.id))
app_annotation_setting = db.session.query(AppAnnotationSetting).filter(
AppAnnotationSetting.app_id == app.id
).first()
if not app_annotation_setting:
skipped_count = skipped_count + 1
click.echo("App annotation setting is disabled: {}".format(app.id))
click.echo('App annotation setting is disabled: {}'.format(app.id))
continue
# get dataset_collection_binding info
dataset_collection_binding = (
db.session.query(DatasetCollectionBinding)
.filter(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
.first()
)
dataset_collection_binding = db.session.query(DatasetCollectionBinding).filter(
DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id
).first()
if not dataset_collection_binding:
click.echo("App annotation collection binding is not exist: {}".format(app.id))
click.echo('App annotation collection binding is not exist: {}'.format(app.id))
continue
annotations = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app.id).all()
dataset = Dataset(
id=app.id,
tenant_id=app.tenant_id,
indexing_technique="high_quality",
indexing_technique='high_quality',
embedding_model_provider=dataset_collection_binding.provider_name,
embedding_model=dataset_collection_binding.model_name,
collection_binding_id=dataset_collection_binding.id,
collection_binding_id=dataset_collection_binding.id
)
documents = []
if annotations:
for annotation in annotations:
document = Document(
page_content=annotation.question,
metadata={"annotation_id": annotation.id, "app_id": app.id, "doc_id": annotation.id},
metadata={
"annotation_id": annotation.id,
"app_id": app.id,
"doc_id": annotation.id
}
)
documents.append(document)
vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
vector = Vector(dataset, attributes=['doc_id', 'annotation_id', 'app_id'])
click.echo(f"Start to migrate annotation, app_id: {app.id}.")
try:
vector.delete()
click.echo(click.style(f"Successfully delete vector index for app: {app.id}.", fg="green"))
click.echo(
click.style(f'Successfully delete vector index for app: {app.id}.',
fg='green'))
except Exception as e:
click.echo(click.style(f"Failed to delete vector index for app {app.id}.", fg="red"))
click.echo(
click.style(f'Failed to delete vector index for app {app.id}.',
fg='red'))
raise e
if documents:
try:
click.echo(
click.style(
f"Start to created vector index with {len(documents)} annotations for app {app.id}.",
fg="green",
)
)
click.echo(click.style(
f'Start to created vector index with {len(documents)} annotations for app {app.id}.',
fg='green'))
vector.create(documents)
click.echo(click.style(f"Successfully created vector index for app {app.id}.", fg="green"))
click.echo(
click.style(f'Successfully created vector index for app {app.id}.', fg='green'))
except Exception as e:
click.echo(click.style(f"Failed to created vector index for app {app.id}.", fg="red"))
click.echo(click.style(f'Failed to created vector index for app {app.id}.', fg='red'))
raise e
click.echo(f"Successfully migrated app annotation {app.id}.")
click.echo(f'Successfully migrated app annotation {app.id}.')
create_count += 1
except Exception as e:
click.echo(
click.style(
"Create app annotation index error: {} {}".format(e.__class__.__name__, str(e)), fg="red"
)
)
click.style('Create app annotation index error: {} {}'.format(e.__class__.__name__, str(e)),
fg='red'))
continue
click.echo(
click.style(
f"Congratulations! Create {create_count} app annotation indexes, and skipped {skipped_count} apps.",
fg="green",
)
)
click.style(f'Congratulations! Create {create_count} app annotation indexes, and skipped {skipped_count} apps.',
fg='green'))
def migrate_knowledge_vector_database():
"""
Migrate vector database datas to target vector database .
"""
click.echo(click.style("Start migrate vector db.", fg="green"))
click.echo(click.style('Start migrate vector db.', fg='green'))
create_count = 0
skipped_count = 0
total_count = 0
vector_type = dify_config.VECTOR_STORE
config = current_app.config
vector_type = config.get('VECTOR_STORE')
page = 1
while True:
try:
datasets = (
db.session.query(Dataset)
.filter(Dataset.indexing_technique == "high_quality")
.order_by(Dataset.created_at.desc())
.paginate(page=page, per_page=50)
)
datasets = db.session.query(Dataset).filter(Dataset.indexing_technique == 'high_quality') \
.order_by(Dataset.created_at.desc()).paginate(page=page, per_page=50)
except NotFound:
break
page += 1
for dataset in datasets:
total_count = total_count + 1
click.echo(
f"Processing the {total_count} dataset {dataset.id}. "
+ f"{create_count} created, {skipped_count} skipped."
)
click.echo(f'Processing the {total_count} dataset {dataset.id}. '
+ f'{create_count} created, {skipped_count} skipped.')
try:
click.echo("Create dataset vdb index: {}".format(dataset.id))
click.echo('Create dataset vdb index: {}'.format(dataset.id))
if dataset.index_struct_dict:
if dataset.index_struct_dict["type"] == vector_type:
if dataset.index_struct_dict['type'] == vector_type:
skipped_count = skipped_count + 1
continue
collection_name = ""
collection_name = ''
if vector_type == VectorType.WEAVIATE:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {"type": VectorType.WEAVIATE, "vector_store": {"class_prefix": collection_name}}
index_struct_dict = {
"type": VectorType.WEAVIATE,
"vector_store": {"class_prefix": collection_name}
}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.QDRANT:
if dataset.collection_binding_id:
dataset_collection_binding = (
db.session.query(DatasetCollectionBinding)
.filter(DatasetCollectionBinding.id == dataset.collection_binding_id)
.one_or_none()
)
dataset_collection_binding = db.session.query(DatasetCollectionBinding). \
filter(DatasetCollectionBinding.id == dataset.collection_binding_id). \
one_or_none()
if dataset_collection_binding:
collection_name = dataset_collection_binding.collection_name
else:
raise ValueError("Dataset Collection Bindings is not exist!")
raise ValueError('Dataset Collection Bindings is not exist!')
else:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {"type": VectorType.QDRANT, "vector_store": {"class_prefix": collection_name}}
index_struct_dict = {
"type": VectorType.QDRANT,
"vector_store": {"class_prefix": collection_name}
}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.MILVUS:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {"type": VectorType.MILVUS, "vector_store": {"class_prefix": collection_name}}
index_struct_dict = {
"type": VectorType.MILVUS,
"vector_store": {"class_prefix": collection_name}
}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.RELYT:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {"type": "relyt", "vector_store": {"class_prefix": collection_name}}
index_struct_dict = {
"type": 'relyt',
"vector_store": {"class_prefix": collection_name}
}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.TENCENT:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {"type": VectorType.TENCENT, "vector_store": {"class_prefix": collection_name}}
index_struct_dict = {
"type": VectorType.TENCENT,
"vector_store": {"class_prefix": collection_name}
}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.PGVECTOR:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {"type": VectorType.PGVECTOR, "vector_store": {"class_prefix": collection_name}}
index_struct_dict = {
"type": VectorType.PGVECTOR,
"vector_store": {"class_prefix": collection_name}
}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.OPENSEARCH:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {
"type": VectorType.OPENSEARCH,
"vector_store": {"class_prefix": collection_name},
"vector_store": {"class_prefix": collection_name}
}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.ANALYTICDB:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {
"type": VectorType.ANALYTICDB,
"vector_store": {"class_prefix": collection_name},
}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.ELASTICSEARCH:
dataset_id = dataset.id
index_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {"type": "elasticsearch", "vector_store": {"class_prefix": index_name}}
dataset.index_struct = json.dumps(index_struct_dict)
else:
raise ValueError(f"Vector store {vector_type} is not supported.")
@ -357,41 +345,29 @@ def migrate_knowledge_vector_database():
try:
vector.delete()
click.echo(
click.style(
f"Successfully delete vector index {collection_name} for dataset {dataset.id}.", fg="green"
)
)
click.style(f'Successfully delete vector index {collection_name} for dataset {dataset.id}.',
fg='green'))
except Exception as e:
click.echo(
click.style(
f"Failed to delete vector index {collection_name} for dataset {dataset.id}.", fg="red"
)
)
click.style(f'Failed to delete vector index {collection_name} for dataset {dataset.id}.',
fg='red'))
raise e
dataset_documents = (
db.session.query(DatasetDocument)
.filter(
DatasetDocument.dataset_id == dataset.id,
DatasetDocument.indexing_status == "completed",
DatasetDocument.enabled == True,
DatasetDocument.archived == False,
)
.all()
)
dataset_documents = db.session.query(DatasetDocument).filter(
DatasetDocument.dataset_id == dataset.id,
DatasetDocument.indexing_status == 'completed',
DatasetDocument.enabled == True,
DatasetDocument.archived == False,
).all()
documents = []
segments_count = 0
for dataset_document in dataset_documents:
segments = (
db.session.query(DocumentSegment)
.filter(
DocumentSegment.document_id == dataset_document.id,
DocumentSegment.status == "completed",
DocumentSegment.enabled == True,
)
.all()
)
segments = db.session.query(DocumentSegment).filter(
DocumentSegment.document_id == dataset_document.id,
DocumentSegment.status == 'completed',
DocumentSegment.enabled == True
).all()
for segment in segments:
document = Document(
@ -401,7 +377,7 @@ def migrate_knowledge_vector_database():
"doc_hash": segment.index_node_hash,
"document_id": segment.document_id,
"dataset_id": segment.dataset_id,
},
}
)
documents.append(document)
@ -409,43 +385,37 @@ def migrate_knowledge_vector_database():
if documents:
try:
click.echo(
click.style(
f"Start to created vector index with {len(documents)} documents of {segments_count} segments for dataset {dataset.id}.",
fg="green",
)
)
click.echo(click.style(
f'Start to created vector index with {len(documents)} documents of {segments_count} segments for dataset {dataset.id}.',
fg='green'))
vector.create(documents)
click.echo(
click.style(f"Successfully created vector index for dataset {dataset.id}.", fg="green")
)
click.style(f'Successfully created vector index for dataset {dataset.id}.', fg='green'))
except Exception as e:
click.echo(click.style(f"Failed to created vector index for dataset {dataset.id}.", fg="red"))
click.echo(click.style(f'Failed to created vector index for dataset {dataset.id}.', fg='red'))
raise e
db.session.add(dataset)
db.session.commit()
click.echo(f"Successfully migrated dataset {dataset.id}.")
click.echo(f'Successfully migrated dataset {dataset.id}.')
create_count += 1
except Exception as e:
db.session.rollback()
click.echo(
click.style("Create dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red")
)
click.style('Create dataset index error: {} {}'.format(e.__class__.__name__, str(e)),
fg='red'))
continue
click.echo(
click.style(
f"Congratulations! Create {create_count} dataset indexes, and skipped {skipped_count} datasets.", fg="green"
)
)
click.style(f'Congratulations! Create {create_count} dataset indexes, and skipped {skipped_count} datasets.',
fg='green'))
@click.command("convert-to-agent-apps", help="Convert Agent Assistant to Agent App.")
@click.command('convert-to-agent-apps', help='Convert Agent Assistant to Agent App.')
def convert_to_agent_apps():
"""
Convert Agent Assistant to Agent App.
"""
click.echo(click.style("Start convert to agent apps.", fg="green"))
click.echo(click.style('Start convert to agent apps.', fg='green'))
proceeded_app_ids = []
@ -480,7 +450,7 @@ def convert_to_agent_apps():
break
for app in apps:
click.echo("Converting app: {}".format(app.id))
click.echo('Converting app: {}'.format(app.id))
try:
app.mode = AppMode.AGENT_CHAT.value
@ -492,139 +462,136 @@ def convert_to_agent_apps():
)
db.session.commit()
click.echo(click.style("Converted app: {}".format(app.id), fg="green"))
click.echo(click.style('Converted app: {}'.format(app.id), fg='green'))
except Exception as e:
click.echo(click.style("Convert app error: {} {}".format(e.__class__.__name__, str(e)), fg="red"))
click.echo(
click.style('Convert app error: {} {}'.format(e.__class__.__name__,
str(e)), fg='red'))
click.echo(click.style("Congratulations! Converted {} agent apps.".format(len(proceeded_app_ids)), fg="green"))
click.echo(click.style('Congratulations! Converted {} agent apps.'.format(len(proceeded_app_ids)), fg='green'))
@click.command("add-qdrant-doc-id-index", help="add qdrant doc_id index.")
@click.option("--field", default="metadata.doc_id", prompt=False, help="index field , default is metadata.doc_id.")
@click.command('add-qdrant-doc-id-index', help='add qdrant doc_id index.')
@click.option('--field', default='metadata.doc_id', prompt=False, help='index field , default is metadata.doc_id.')
def add_qdrant_doc_id_index(field: str):
click.echo(click.style("Start add qdrant doc_id index.", fg="green"))
vector_type = dify_config.VECTOR_STORE
click.echo(click.style('Start add qdrant doc_id index.', fg='green'))
config = current_app.config
vector_type = config.get('VECTOR_STORE')
if vector_type != "qdrant":
click.echo(click.style("Sorry, only support qdrant vector store.", fg="red"))
click.echo(click.style('Sorry, only support qdrant vector store.', fg='red'))
return
create_count = 0
try:
bindings = db.session.query(DatasetCollectionBinding).all()
if not bindings:
click.echo(click.style("Sorry, no dataset collection bindings found.", fg="red"))
click.echo(click.style('Sorry, no dataset collection bindings found.', fg='red'))
return
import qdrant_client
from qdrant_client.http.exceptions import UnexpectedResponse
from qdrant_client.http.models import PayloadSchemaType
from core.rag.datasource.vdb.qdrant.qdrant_vector import QdrantConfig
for binding in bindings:
if dify_config.QDRANT_URL is None:
raise ValueError("Qdrant url is required.")
qdrant_config = QdrantConfig(
endpoint=dify_config.QDRANT_URL,
api_key=dify_config.QDRANT_API_KEY,
endpoint=config.get('QDRANT_URL'),
api_key=config.get('QDRANT_API_KEY'),
root_path=current_app.root_path,
timeout=dify_config.QDRANT_CLIENT_TIMEOUT,
grpc_port=dify_config.QDRANT_GRPC_PORT,
prefer_grpc=dify_config.QDRANT_GRPC_ENABLED,
timeout=config.get('QDRANT_CLIENT_TIMEOUT'),
grpc_port=config.get('QDRANT_GRPC_PORT'),
prefer_grpc=config.get('QDRANT_GRPC_ENABLED')
)
try:
client = qdrant_client.QdrantClient(**qdrant_config.to_qdrant_params())
# create payload index
client.create_payload_index(binding.collection_name, field, field_schema=PayloadSchemaType.KEYWORD)
client.create_payload_index(binding.collection_name, field,
field_schema=PayloadSchemaType.KEYWORD)
create_count += 1
except UnexpectedResponse as e:
# Collection does not exist, so return
if e.status_code == 404:
click.echo(
click.style(f"Collection not found, collection_name:{binding.collection_name}.", fg="red")
)
click.echo(click.style(f'Collection not found, collection_name:{binding.collection_name}.', fg='red'))
continue
# Some other error occurred, so re-raise the exception
else:
click.echo(
click.style(
f"Failed to create qdrant index, collection_name:{binding.collection_name}.", fg="red"
)
)
click.echo(click.style(f'Failed to create qdrant index, collection_name:{binding.collection_name}.', fg='red'))
except Exception as e:
click.echo(click.style("Failed to create qdrant client.", fg="red"))
click.echo(click.style('Failed to create qdrant client.', fg='red'))
click.echo(click.style(f"Congratulations! Create {create_count} collection indexes.", fg="green"))
click.echo(
click.style(f'Congratulations! Create {create_count} collection indexes.',
fg='green'))
@click.command("create-tenant", help="Create account and tenant.")
@click.option("--email", prompt=True, help="The email address of the tenant account.")
@click.option("--language", prompt=True, help="Account language, default: en-US.")
@click.command('create-tenant', help='Create account and tenant.')
@click.option('--email', prompt=True, help='The email address of the tenant account.')
@click.option('--language', prompt=True, help='Account language, default: en-US.')
def create_tenant(email: str, language: Optional[str] = None):
"""
Create tenant account
"""
if not email:
click.echo(click.style("Sorry, email is required.", fg="red"))
click.echo(click.style('Sorry, email is required.', fg='red'))
return
# Create account
email = email.strip()
if "@" not in email:
click.echo(click.style("Sorry, invalid email address.", fg="red"))
if '@' not in email:
click.echo(click.style('Sorry, invalid email address.', fg='red'))
return
account_name = email.split("@")[0]
account_name = email.split('@')[0]
if language not in languages:
language = "en-US"
language = 'en-US'
# generate random password
new_password = secrets.token_urlsafe(16)
# register account
account = RegisterService.register(email=email, name=account_name, password=new_password, language=language)
account = RegisterService.register(
email=email,
name=account_name,
password=new_password,
language=language
)
TenantService.create_owner_tenant_if_not_exist(account)
click.echo(
click.style(
"Congratulations! Account and tenant created.\n" "Account: {}\nPassword: {}".format(email, new_password),
fg="green",
)
)
click.echo(click.style('Congratulations! Account and tenant created.\n'
'Account: {}\nPassword: {}'.format(email, new_password), fg='green'))
@click.command("upgrade-db", help="upgrade the database")
@click.command('upgrade-db', help='upgrade the database')
def upgrade_db():
click.echo("Preparing database migration...")
lock = redis_client.lock(name="db_upgrade_lock", timeout=60)
click.echo('Preparing database migration...')
lock = redis_client.lock(name='db_upgrade_lock', timeout=60)
if lock.acquire(blocking=False):
try:
click.echo(click.style("Start database migration.", fg="green"))
click.echo(click.style('Start database migration.', fg='green'))
# run db migration
import flask_migrate
flask_migrate.upgrade()
click.echo(click.style("Database migration successful!", fg="green"))
click.echo(click.style('Database migration successful!', fg='green'))
except Exception as e:
logging.exception(f"Database migration failed, error: {e}")
logging.exception(f'Database migration failed, error: {e}')
finally:
lock.release()
else:
click.echo("Database migration skipped")
click.echo('Database migration skipped')
@click.command("fix-app-site-missing", help="Fix app related site missing issue.")
@click.command('fix-app-site-missing', help='Fix app related site missing issue.')
def fix_app_site_missing():
"""
Fix app related site missing issue.
"""
click.echo(click.style("Start fix app related site missing issue.", fg="green"))
click.echo(click.style('Start fix app related site missing issue.', fg='green'))
failed_app_ids = []
while True:
@ -655,14 +622,15 @@ where sites.id is null limit 1000"""
app_was_created.send(app, account=account)
except Exception as e:
failed_app_ids.append(app_id)
click.echo(click.style("Fix app {} related site missing issue failed!".format(app_id), fg="red"))
logging.exception(f"Fix app related site missing issue failed, error: {e}")
click.echo(click.style('Fix app {} related site missing issue failed!'.format(app_id), fg='red'))
logging.exception(f'Fix app related site missing issue failed, error: {e}')
continue
if not processed_count:
break
click.echo(click.style("Congratulations! Fix app related site missing issue successful!", fg="green"))
click.echo(click.style('Congratulations! Fix app related site missing issue successful!', fg='green'))
def register_commands(app):

View File

@ -1,3 +0,0 @@
from .app_config import DifyConfig
dify_config = DifyConfig()

View File

@ -1,5 +1,4 @@
from pydantic import Field, computed_field
from pydantic_settings import SettingsConfigDict
from pydantic_settings import BaseSettings, SettingsConfigDict
from configs.deploy import DeploymentConfig
from configs.enterprise import EnterpriseFeatureConfig
@ -9,57 +8,38 @@ from configs.middleware import MiddlewareConfig
from configs.packaging import PackagingInfo
# TODO: Both `BaseModel` and `BaseSettings` has `model_config` attribute but they are in different types.
# This inheritance is depends on the order of the classes.
# It is better to use `BaseSettings` as the base class.
class DifyConfig(
# based on pydantic-settings
BaseSettings,
# Packaging info
PackagingInfo,
# Deployment configs
DeploymentConfig,
# Feature configs
FeatureConfig,
# Middleware configs
MiddlewareConfig,
# Extra service configs
ExtraServiceConfig,
# Enterprise feature configs
# **Before using, please contact business@dify.ai by email to inquire about licensing matters.**
EnterpriseFeatureConfig,
):
DEBUG: bool = Field(default=False, description='whether to enable debug mode.')
model_config = SettingsConfigDict(
# read from dotenv format config file
env_file='.env',
env_file_encoding='utf-8',
frozen=True,
# ignore extra attributes
extra='ignore',
)
CODE_MAX_NUMBER: int = 9223372036854775807
CODE_MIN_NUMBER: int = -9223372036854775808
CODE_MAX_STRING_LENGTH: int = 80000
CODE_MAX_STRING_ARRAY_LENGTH: int = 30
CODE_MAX_OBJECT_ARRAY_LENGTH: int = 30
CODE_MAX_NUMBER_ARRAY_LENGTH: int = 1000
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: int = 300
HTTP_REQUEST_MAX_READ_TIMEOUT: int = 600
HTTP_REQUEST_MAX_WRITE_TIMEOUT: int = 600
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: int = 1024 * 1024 * 10
@computed_field
def HTTP_REQUEST_NODE_READABLE_MAX_BINARY_SIZE(self) -> str:
return f'{self.HTTP_REQUEST_NODE_MAX_BINARY_SIZE / 1024 / 1024:.2f}MB'
HTTP_REQUEST_NODE_MAX_TEXT_SIZE: int = 1024 * 1024
@computed_field
def HTTP_REQUEST_NODE_READABLE_MAX_TEXT_SIZE(self) -> str:
return f'{self.HTTP_REQUEST_NODE_MAX_TEXT_SIZE / 1024 / 1024:.2f}MB'
SSRF_PROXY_HTTP_URL: str | None = None
SSRF_PROXY_HTTPS_URL: str | None = None
MODERATION_BUFFER_SIZE: int = Field(default=300, description='The buffer size for moderation.')
MAX_VARIABLE_SIZE: int = Field(default=5 * 1024, description='The maximum size of a variable. default is 5KB.')

View File

@ -1,8 +1,7 @@
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field
class DeploymentConfig(BaseSettings):
class DeploymentConfig(BaseModel):
"""
Deployment configs
"""

View File

@ -1,8 +1,7 @@
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field
class EnterpriseFeatureConfig(BaseSettings):
class EnterpriseFeatureConfig(BaseModel):
"""
Enterprise feature configs.
**Before using, please contact business@dify.ai by email to inquire about licensing matters.**

View File

@ -1,3 +1,5 @@
from pydantic import BaseModel
from configs.extra.notion_config import NotionConfig
from configs.extra.sentry_config import SentryConfig

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field
class NotionConfig(BaseSettings):
class NotionConfig(BaseModel):
"""
Notion integration configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, NonNegativeFloat
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, NonNegativeFloat
class SentryConfig(BaseSettings):
class SentryConfig(BaseModel):
"""
Sentry configs
"""

View File

@ -1,12 +1,11 @@
from typing import Optional
from pydantic import AliasChoices, Field, NonNegativeInt, PositiveInt, computed_field
from pydantic_settings import BaseSettings
from pydantic import AliasChoices, BaseModel, Field, NonNegativeInt, PositiveInt, computed_field
from configs.feature.hosted_service import HostedServiceConfig
class SecurityConfig(BaseSettings):
class SecurityConfig(BaseModel):
"""
Secret Key configs
"""
@ -18,13 +17,8 @@ class SecurityConfig(BaseSettings):
default=None,
)
RESET_PASSWORD_TOKEN_EXPIRY_HOURS: PositiveInt = Field(
description='Expiry time in hours for reset token',
default=24,
)
class AppExecutionConfig(BaseSettings):
class AppExecutionConfig(BaseModel):
"""
App Execution configs
"""
@ -32,13 +26,9 @@ class AppExecutionConfig(BaseSettings):
description='execution timeout in seconds for app execution',
default=1200,
)
APP_MAX_ACTIVE_REQUESTS: NonNegativeInt = Field(
description='max active request per app, 0 means unlimited',
default=0,
)
class CodeExecutionSandboxConfig(BaseSettings):
class CodeExecutionSandboxConfig(BaseModel):
"""
Code Execution Sandbox configs
"""
@ -53,36 +43,36 @@ class CodeExecutionSandboxConfig(BaseSettings):
)
class EndpointConfig(BaseSettings):
class EndpointConfig(BaseModel):
"""
Module URL configs
"""
CONSOLE_API_URL: str = Field(
description='The backend URL prefix of the console API.'
'used to concatenate the login authorization callback or notion integration callback.',
default='',
default='https://cloud.dify.ai',
)
CONSOLE_WEB_URL: str = Field(
description='The front-end URL prefix of the console web.'
'used to concatenate some front-end addresses and for CORS configuration use.',
default='',
default='https://cloud.dify.ai',
)
SERVICE_API_URL: str = Field(
description='Service API Url prefix.'
'used to display Service API Base Url to the front-end.',
default='',
default='https://api.dify.ai',
)
APP_WEB_URL: str = Field(
description='WebApp Url prefix.'
'used to display WebAPP API Base Url to the front-end.',
default='',
default='https://udify.app',
)
class FileAccessConfig(BaseSettings):
class FileAccessConfig(BaseModel):
"""
File Access configs
"""
@ -92,7 +82,7 @@ class FileAccessConfig(BaseSettings):
'Url is signed and has expiration time.',
validation_alias=AliasChoices('FILES_URL', 'CONSOLE_API_URL'),
alias_priority=1,
default='',
default='https://cloud.dify.ai',
)
FILES_ACCESS_TIMEOUT: int = Field(
@ -101,7 +91,7 @@ class FileAccessConfig(BaseSettings):
)
class FileUploadConfig(BaseSettings):
class FileUploadConfig(BaseModel):
"""
File Uploading configs
"""
@ -126,7 +116,7 @@ class FileUploadConfig(BaseSettings):
)
class HttpConfig(BaseSettings):
class HttpConfig(BaseModel):
"""
HTTP configs
"""
@ -158,7 +148,7 @@ class HttpConfig(BaseSettings):
return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(',')
class InnerAPIConfig(BaseSettings):
class InnerAPIConfig(BaseModel):
"""
Inner API configs
"""
@ -173,7 +163,7 @@ class InnerAPIConfig(BaseSettings):
)
class LoggingConfig(BaseSettings):
class LoggingConfig(BaseModel):
"""
Logging configs
"""
@ -205,7 +195,7 @@ class LoggingConfig(BaseSettings):
)
class ModelLoadBalanceConfig(BaseSettings):
class ModelLoadBalanceConfig(BaseModel):
"""
Model load balance configs
"""
@ -215,7 +205,7 @@ class ModelLoadBalanceConfig(BaseSettings):
)
class BillingConfig(BaseSettings):
class BillingConfig(BaseModel):
"""
Platform Billing Configurations
"""
@ -225,7 +215,7 @@ class BillingConfig(BaseSettings):
)
class UpdateConfig(BaseSettings):
class UpdateConfig(BaseModel):
"""
Update configs
"""
@ -235,7 +225,7 @@ class UpdateConfig(BaseSettings):
)
class WorkflowConfig(BaseSettings):
class WorkflowConfig(BaseModel):
"""
Workflow feature configs
"""
@ -256,7 +246,7 @@ class WorkflowConfig(BaseSettings):
)
class OAuthConfig(BaseSettings):
class OAuthConfig(BaseModel):
"""
oauth configs
"""
@ -286,7 +276,7 @@ class OAuthConfig(BaseSettings):
)
class ModerationConfig(BaseSettings):
class ModerationConfig(BaseModel):
"""
Moderation in app configs.
"""
@ -298,7 +288,7 @@ class ModerationConfig(BaseSettings):
)
class ToolConfig(BaseSettings):
class ToolConfig(BaseModel):
"""
Tool configs
"""
@ -309,7 +299,7 @@ class ToolConfig(BaseSettings):
)
class MailConfig(BaseSettings):
class MailConfig(BaseModel):
"""
Mail Configurations
"""
@ -365,7 +355,7 @@ class MailConfig(BaseSettings):
)
class RagEtlConfig(BaseSettings):
class RagEtlConfig(BaseModel):
"""
RAG ETL Configurations.
"""
@ -391,7 +381,7 @@ class RagEtlConfig(BaseSettings):
)
class DataSetConfig(BaseSettings):
class DataSetConfig(BaseModel):
"""
Dataset configs
"""
@ -401,12 +391,8 @@ class DataSetConfig(BaseSettings):
default=30,
)
DATASET_OPERATOR_ENABLED: bool = Field(
description='whether to enable dataset operator',
default=False,
)
class WorkspaceConfig(BaseSettings):
class WorkspaceConfig(BaseModel):
"""
Workspace configs
"""
@ -417,7 +403,7 @@ class WorkspaceConfig(BaseSettings):
)
class IndexingConfig(BaseSettings):
class IndexingConfig(BaseModel):
"""
Indexing configs.
"""
@ -428,20 +414,13 @@ class IndexingConfig(BaseSettings):
)
class ImageFormatConfig(BaseSettings):
class ImageFormatConfig(BaseModel):
MULTIMODAL_SEND_IMAGE_FORMAT: str = Field(
description='multi model send image format, support base64, url, default is base64',
default='base64',
)
class CeleryBeatConfig(BaseSettings):
CELERY_BEAT_SCHEDULER_TIME: int = Field(
description='the time of the celery scheduler, default to 1 day',
default=1,
)
class FeatureConfig(
# place the configs in alphabet order
AppExecutionConfig,
@ -469,6 +448,5 @@ class FeatureConfig(
# hosted services config
HostedServiceConfig,
CeleryBeatConfig,
):
pass

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, NonNegativeInt
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, NonNegativeInt
class HostedOpenAiConfig(BaseSettings):
class HostedOpenAiConfig(BaseModel):
"""
Hosted OpenAI service config
"""
@ -69,7 +68,7 @@ class HostedOpenAiConfig(BaseSettings):
)
class HostedAzureOpenAiConfig(BaseSettings):
class HostedAzureOpenAiConfig(BaseModel):
"""
Hosted OpenAI service config
"""
@ -79,7 +78,7 @@ class HostedAzureOpenAiConfig(BaseSettings):
default=False,
)
HOSTED_AZURE_OPENAI_API_KEY: Optional[str] = Field(
HOSTED_OPENAI_API_KEY: Optional[str] = Field(
description='',
default=None,
)
@ -95,7 +94,7 @@ class HostedAzureOpenAiConfig(BaseSettings):
)
class HostedAnthropicConfig(BaseSettings):
class HostedAnthropicConfig(BaseModel):
"""
Hosted Azure OpenAI service config
"""
@ -126,7 +125,7 @@ class HostedAnthropicConfig(BaseSettings):
)
class HostedMinmaxConfig(BaseSettings):
class HostedMinmaxConfig(BaseModel):
"""
Hosted Minmax service config
"""
@ -137,7 +136,7 @@ class HostedMinmaxConfig(BaseSettings):
)
class HostedSparkConfig(BaseSettings):
class HostedSparkConfig(BaseModel):
"""
Hosted Spark service config
"""
@ -148,7 +147,7 @@ class HostedSparkConfig(BaseSettings):
)
class HostedZhipuAIConfig(BaseSettings):
class HostedZhipuAIConfig(BaseModel):
"""
Hosted Minmax service config
"""
@ -159,7 +158,7 @@ class HostedZhipuAIConfig(BaseSettings):
)
class HostedModerationConfig(BaseSettings):
class HostedModerationConfig(BaseModel):
"""
Hosted Moderation service config
"""
@ -175,7 +174,7 @@ class HostedModerationConfig(BaseSettings):
)
class HostedFetchAppTemplateConfig(BaseSettings):
class HostedFetchAppTemplateConfig(BaseModel):
"""
Hosted Moderation service config
"""

View File

@ -1,8 +1,6 @@
from typing import Any, Optional
from urllib.parse import quote_plus
from pydantic import Field, NonNegativeInt, PositiveInt, computed_field
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, NonNegativeInt, PositiveInt, computed_field
from configs.middleware.cache.redis_config import RedisConfig
from configs.middleware.storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
@ -11,10 +9,8 @@ from configs.middleware.storage.azure_blob_storage_config import AzureBlobStorag
from configs.middleware.storage.google_cloud_storage_config import GoogleCloudStorageConfig
from configs.middleware.storage.oci_storage_config import OCIStorageConfig
from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig
from configs.middleware.vdb.chroma_config import ChromaConfig
from configs.middleware.vdb.milvus_config import MilvusConfig
from configs.middleware.vdb.myscale_config import MyScaleConfig
from configs.middleware.vdb.opensearch_config import OpenSearchConfig
from configs.middleware.vdb.oracle_config import OracleConfig
from configs.middleware.vdb.pgvector_config import PGVectorConfig
@ -26,7 +22,7 @@ from configs.middleware.vdb.tidb_vector_config import TiDBVectorConfig
from configs.middleware.vdb.weaviate_config import WeaviateConfig
class StorageConfig(BaseSettings):
class StorageConfig(BaseModel):
STORAGE_TYPE: str = Field(
description='storage type,'
' default to `local`,'
@ -40,14 +36,14 @@ class StorageConfig(BaseSettings):
)
class VectorStoreConfig(BaseSettings):
class VectorStoreConfig(BaseModel):
VECTOR_STORE: Optional[str] = Field(
description='vector store type',
default=None,
)
class KeywordStoreConfig(BaseSettings):
class KeywordStoreConfig(BaseModel):
KEYWORD_STORE: str = Field(
description='keyword store type',
default='jieba',
@ -85,11 +81,6 @@ class DatabaseConfig:
default='',
)
DB_EXTRAS: str = Field(
description='db extras options. Example: keepalives_idle=60&keepalives=1',
default='',
)
SQLALCHEMY_DATABASE_URI_SCHEME: str = Field(
description='db uri scheme',
default='postgresql',
@ -98,14 +89,9 @@ class DatabaseConfig:
@computed_field
@property
def SQLALCHEMY_DATABASE_URI(self) -> str:
db_extras = (
f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}"
if self.DB_CHARSET
else self.DB_EXTRAS
).strip("&")
db_extras = f"?{db_extras}" if db_extras else ""
db_extras = f"?client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else ""
return (f"{self.SQLALCHEMY_DATABASE_URI_SCHEME}://"
f"{quote_plus(self.DB_USERNAME)}:{quote_plus(self.DB_PASSWORD)}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_DATABASE}"
f"{self.DB_USERNAME}:{self.DB_PASSWORD}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_DATABASE}"
f"{db_extras}")
SQLALCHEMY_POOL_SIZE: NonNegativeInt = Field(
@ -128,7 +114,7 @@ class DatabaseConfig:
default=False,
)
SQLALCHEMY_ECHO: bool | str = Field(
SQLALCHEMY_ECHO: bool = Field(
description='whether to enable SqlAlchemy echo',
default=False,
)
@ -186,10 +172,8 @@ class MiddlewareConfig(
# configs of vdb and vdb providers
VectorStoreConfig,
AnalyticdbConfig,
ChromaConfig,
MilvusConfig,
MyScaleConfig,
OpenSearchConfig,
OracleConfig,
PGVectorConfig,

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, NonNegativeInt, PositiveInt
class RedisConfig(BaseSettings):
class RedisConfig(BaseModel):
"""
Redis configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field
class AliyunOSSStorageConfig(BaseSettings):
class AliyunOSSStorageConfig(BaseModel):
"""
Aliyun storage configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field
class S3StorageConfig(BaseSettings):
class S3StorageConfig(BaseModel):
"""
S3 storage configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field
class AzureBlobStorageConfig(BaseSettings):
class AzureBlobStorageConfig(BaseModel):
"""
Azure Blob storage configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field
class GoogleCloudStorageConfig(BaseSettings):
class GoogleCloudStorageConfig(BaseModel):
"""
Google Cloud storage configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field
class OCIStorageConfig(BaseSettings):
class OCIStorageConfig(BaseModel):
"""
OCI storage configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field
class TencentCloudCOSStorageConfig(BaseSettings):
class TencentCloudCOSStorageConfig(BaseModel):
"""
Tencent Cloud COS storage configs
"""

View File

@ -1,44 +0,0 @@
from typing import Optional
from pydantic import BaseModel, Field
class AnalyticdbConfig(BaseModel):
"""
Configuration for connecting to AnalyticDB.
Refer to the following documentation for details on obtaining credentials:
https://www.alibabacloud.com/help/en/analyticdb-for-postgresql/getting-started/create-an-instance-instances-with-vector-engine-optimization-enabled
"""
ANALYTICDB_KEY_ID : Optional[str] = Field(
default=None,
description="The Access Key ID provided by Alibaba Cloud for authentication."
)
ANALYTICDB_KEY_SECRET : Optional[str] = Field(
default=None,
description="The Secret Access Key corresponding to the Access Key ID for secure access."
)
ANALYTICDB_REGION_ID : Optional[str] = Field(
default=None,
description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou')."
)
ANALYTICDB_INSTANCE_ID : Optional[str] = Field(
default=None,
description="The unique identifier of the AnalyticDB instance you want to connect to (e.g., 'gp-ab123456').."
)
ANALYTICDB_ACCOUNT : Optional[str] = Field(
default=None,
description="The account name used to log in to the AnalyticDB instance."
)
ANALYTICDB_PASSWORD : Optional[str] = Field(
default=None,
description="The password associated with the AnalyticDB account for authentication."
)
ANALYTICDB_NAMESPACE : Optional[str] = Field(
default=None,
description="The namespace within AnalyticDB for schema isolation."
)
ANALYTICDB_NAMESPACE_PASSWORD : Optional[str] = Field(
default=None,
description="The password for accessing the specified namespace within the AnalyticDB instance."
)

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, PositiveInt
class ChromaConfig(BaseSettings):
class ChromaConfig(BaseModel):
"""
Chroma configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, PositiveInt
class MilvusConfig(BaseSettings):
class MilvusConfig(BaseModel):
"""
Milvus configs
"""

View File

@ -1,38 +0,0 @@
from pydantic import BaseModel, Field, PositiveInt
class MyScaleConfig(BaseModel):
"""
MyScale configs
"""
MYSCALE_HOST: str = Field(
description='MyScale host',
default='localhost',
)
MYSCALE_PORT: PositiveInt = Field(
description='MyScale port',
default=8123,
)
MYSCALE_USER: str = Field(
description='MyScale user',
default='default',
)
MYSCALE_PASSWORD: str = Field(
description='MyScale password',
default='',
)
MYSCALE_DATABASE: str = Field(
description='MyScale database name',
default='default',
)
MYSCALE_FTS_PARAMS: str = Field(
description='MyScale fts index parameters',
default='',
)

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, PositiveInt
class OpenSearchConfig(BaseSettings):
class OpenSearchConfig(BaseModel):
"""
OpenSearch configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, PositiveInt
class OracleConfig(BaseSettings):
class OracleConfig(BaseModel):
"""
ORACLE configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, PositiveInt
class PGVectorConfig(BaseSettings):
class PGVectorConfig(BaseModel):
"""
PGVector configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, PositiveInt
class PGVectoRSConfig(BaseSettings):
class PGVectoRSConfig(BaseModel):
"""
PGVectoRS configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, NonNegativeInt, PositiveInt
class QdrantConfig(BaseSettings):
class QdrantConfig(BaseModel):
"""
Qdrant configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, PositiveInt
class RelytConfig(BaseSettings):
class RelytConfig(BaseModel):
"""
Relyt configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, NonNegativeInt, PositiveInt
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, PositiveInt
class TencentVectorDBConfig(BaseSettings):
class TencentVectorDBConfig(BaseModel):
"""
Tencent Vector configs
"""
@ -25,7 +24,7 @@ class TencentVectorDBConfig(BaseSettings):
)
TENCENT_VECTOR_DB_USERNAME: Optional[str] = Field(
description='Tencent Vector username',
description='Tencent Vector password',
default=None,
)
@ -39,12 +38,7 @@ class TencentVectorDBConfig(BaseSettings):
default=1,
)
TENCENT_VECTOR_DB_REPLICAS: NonNegativeInt = Field(
TENCENT_VECTOR_DB_REPLICAS: PositiveInt = Field(
description='Tencent Vector replicas',
default=2,
)
TENCENT_VECTOR_DB_DATABASE: Optional[str] = Field(
description='Tencent Vector Database',
default=None,
)

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, PositiveInt
class TiDBVectorConfig(BaseSettings):
class TiDBVectorConfig(BaseModel):
"""
TiDB Vector configs
"""

View File

@ -1,10 +1,9 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field, PositiveInt
class WeaviateConfig(BaseSettings):
class WeaviateConfig(BaseModel):
"""
Weaviate configs
"""

View File

@ -1,15 +1,14 @@
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic import BaseModel, Field
class PackagingInfo(BaseSettings):
class PackagingInfo(BaseModel):
"""
Packaging build information
"""
CURRENT_VERSION: str = Field(
description='Dify version',
default='0.7.0',
default='0.6.12-fix1',
)
COMMIT_SHA: str = Field(

View File

@ -1 +0,0 @@
HIDDEN_VALUE = "[__HIDDEN__]"

View File

@ -1,22 +1,20 @@
language_timezone_mapping = {
"en-US": "America/New_York",
"zh-Hans": "Asia/Shanghai",
"zh-Hant": "Asia/Taipei",
"pt-BR": "America/Sao_Paulo",
"es-ES": "Europe/Madrid",
"fr-FR": "Europe/Paris",
"de-DE": "Europe/Berlin",
"ja-JP": "Asia/Tokyo",
"ko-KR": "Asia/Seoul",
"ru-RU": "Europe/Moscow",
"it-IT": "Europe/Rome",
"uk-UA": "Europe/Kyiv",
"vi-VN": "Asia/Ho_Chi_Minh",
"ro-RO": "Europe/Bucharest",
"pl-PL": "Europe/Warsaw",
"hi-IN": "Asia/Kolkata",
"tr-TR": "Europe/Istanbul",
"fa-IR": "Asia/Tehran",
'en-US': 'America/New_York',
'zh-Hans': 'Asia/Shanghai',
'zh-Hant': 'Asia/Taipei',
'pt-BR': 'America/Sao_Paulo',
'es-ES': 'Europe/Madrid',
'fr-FR': 'Europe/Paris',
'de-DE': 'Europe/Berlin',
'ja-JP': 'Asia/Tokyo',
'ko-KR': 'Asia/Seoul',
'ru-RU': 'Europe/Moscow',
'it-IT': 'Europe/Rome',
'uk-UA': 'Europe/Kyiv',
'vi-VN': 'Asia/Ho_Chi_Minh',
'ro-RO': 'Europe/Bucharest',
'pl-PL': 'Europe/Warsaw',
'hi-IN': 'Asia/Kolkata'
}
languages = list(language_timezone_mapping.keys())
@ -26,5 +24,6 @@ def supported_language(lang):
if lang in languages:
return lang
error = "{lang} is not a valid language.".format(lang=lang)
error = ('{lang} is not a valid language.'
.format(lang=lang))
raise ValueError(error)

View File

@ -5,79 +5,82 @@ from models.model import AppMode
default_app_templates = {
# workflow default mode
AppMode.WORKFLOW: {
"app": {
"mode": AppMode.WORKFLOW.value,
"enable_site": True,
"enable_api": True,
'app': {
'mode': AppMode.WORKFLOW.value,
'enable_site': True,
'enable_api': True
}
},
# completion default mode
AppMode.COMPLETION: {
"app": {
"mode": AppMode.COMPLETION.value,
"enable_site": True,
"enable_api": True,
'app': {
'mode': AppMode.COMPLETION.value,
'enable_site': True,
'enable_api': True
},
"model_config": {
"model": {
'model_config': {
'model': {
"provider": "openai",
"name": "gpt-4o",
"mode": "chat",
"completion_params": {},
"completion_params": {}
},
"user_input_form": json.dumps(
[
{
"paragraph": {
"label": "Query",
"variable": "query",
"required": True,
"default": "",
},
},
]
),
"pre_prompt": "{{query}}",
'user_input_form': json.dumps([
{
"paragraph": {
"label": "Query",
"variable": "query",
"required": True,
"default": ""
}
}
]),
'pre_prompt': '{{query}}'
},
},
# chat default mode
AppMode.CHAT: {
"app": {
"mode": AppMode.CHAT.value,
"enable_site": True,
"enable_api": True,
'app': {
'mode': AppMode.CHAT.value,
'enable_site': True,
'enable_api': True
},
"model_config": {
"model": {
'model_config': {
'model': {
"provider": "openai",
"name": "gpt-4o",
"mode": "chat",
"completion_params": {},
},
},
"completion_params": {}
}
}
},
# advanced-chat default mode
AppMode.ADVANCED_CHAT: {
"app": {
"mode": AppMode.ADVANCED_CHAT.value,
"enable_site": True,
"enable_api": True,
},
'app': {
'mode': AppMode.ADVANCED_CHAT.value,
'enable_site': True,
'enable_api': True
}
},
# agent-chat default mode
AppMode.AGENT_CHAT: {
"app": {
"mode": AppMode.AGENT_CHAT.value,
"enable_site": True,
"enable_api": True,
'app': {
'mode': AppMode.AGENT_CHAT.value,
'enable_site': True,
'enable_api': True
},
"model_config": {
"model": {
'model_config': {
'model': {
"provider": "openai",
"name": "gpt-4o",
"mode": "chat",
"completion_params": {},
},
},
},
"completion_params": {}
}
}
}
}

File diff suppressed because one or more lines are too long

View File

@ -1,4 +0,0 @@
TTS_AUTO_PLAY_TIMEOUT = 5
# sleep 20 ms ( 40ms => 1280 byte audio file,20ms => 640 byte audio file)
TTS_AUTO_PLAY_YIELD_CPU_TIME = 0.02

View File

@ -1,7 +0,0 @@
from contextvars import ContextVar
from core.workflow.entities.variable_pool import VariablePool
tenant_id: ContextVar[str] = ContextVar("tenant_id")
workflow_variable_pool: ContextVar[VariablePool] = ContextVar("workflow_variable_pool")

View File

@ -17,7 +17,6 @@ from .app import (
audio,
completion,
conversation,
conversation_variables,
generator,
message,
model_config,
@ -31,7 +30,7 @@ from .app import (
)
# Import auth controllers
from .auth import activate, data_source_bearer_auth, data_source_oauth, forgot_password, login, oauth
from .auth import activate, data_source_bearer_auth, data_source_oauth, login, oauth
# Import billing controllers
from .billing import billing

View File

@ -23,7 +23,8 @@ class AnnotationReplyActionApi(Resource):
@account_initialization_required
@cloud_edition_billing_resource_check('annotation')
def post(self, app_id, action):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@ -46,7 +47,8 @@ class AppAnnotationSettingDetailApi(Resource):
@login_required
@account_initialization_required
def get(self, app_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@ -59,7 +61,8 @@ class AppAnnotationSettingUpdateApi(Resource):
@login_required
@account_initialization_required
def post(self, app_id, annotation_setting_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@ -79,7 +82,8 @@ class AnnotationReplyActionStatusApi(Resource):
@account_initialization_required
@cloud_edition_billing_resource_check('annotation')
def get(self, app_id, job_id, action):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
job_id = str(job_id)
@ -106,7 +110,8 @@ class AnnotationListApi(Resource):
@login_required
@account_initialization_required
def get(self, app_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
page = request.args.get('page', default=1, type=int)
@ -130,7 +135,8 @@ class AnnotationExportApi(Resource):
@login_required
@account_initialization_required
def get(self, app_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@ -148,7 +154,8 @@ class AnnotationCreateApi(Resource):
@cloud_edition_billing_resource_check('annotation')
@marshal_with(annotation_fields)
def post(self, app_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@ -167,7 +174,8 @@ class AnnotationUpdateDeleteApi(Resource):
@cloud_edition_billing_resource_check('annotation')
@marshal_with(annotation_fields)
def post(self, app_id, annotation_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@ -183,7 +191,8 @@ class AnnotationUpdateDeleteApi(Resource):
@login_required
@account_initialization_required
def delete(self, app_id, annotation_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@ -198,7 +207,8 @@ class AnnotationBatchImportApi(Resource):
@account_initialization_required
@cloud_edition_billing_resource_check('annotation')
def post(self, app_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
app_id = str(app_id)
@ -222,7 +232,8 @@ class AnnotationBatchImportStatusApi(Resource):
@account_initialization_required
@cloud_edition_billing_resource_check('annotation')
def get(self, app_id, job_id):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
job_id = str(job_id)
@ -248,7 +259,8 @@ class AnnotationHitHistoryListApi(Resource):
@login_required
@account_initialization_required
def get(self, app_id, annotation_id):
if not current_user.is_editor:
# The role of the current user in the table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
page = request.args.get('page', default=1, type=int)

View File

@ -15,7 +15,6 @@ from fields.app_fields import (
app_pagination_fields,
)
from libs.login import login_required
from services.app_dsl_service import AppDslService
from services.app_service import AppService
ALLOW_CREATE_APP_MODES = ['chat', 'agent-chat', 'advanced-chat', 'workflow', 'completion']
@ -61,7 +60,6 @@ class AppListApi(Resource):
parser.add_argument('name', type=str, required=True, location='json')
parser.add_argument('description', type=str, location='json')
parser.add_argument('mode', type=str, choices=ALLOW_CREATE_APP_MODES, location='json')
parser.add_argument('icon_type', type=str, location='json')
parser.add_argument('icon', type=str, location='json')
parser.add_argument('icon_background', type=str, location='json')
args = parser.parse_args()
@ -95,47 +93,12 @@ class AppImportApi(Resource):
parser.add_argument('data', type=str, required=True, nullable=False, location='json')
parser.add_argument('name', type=str, location='json')
parser.add_argument('description', type=str, location='json')
parser.add_argument('icon_type', type=str, location='json')
parser.add_argument('icon', type=str, location='json')
parser.add_argument('icon_background', type=str, location='json')
args = parser.parse_args()
app = AppDslService.import_and_create_new_app(
tenant_id=current_user.current_tenant_id,
data=args['data'],
args=args,
account=current_user
)
return app, 201
class AppImportFromUrlApi(Resource):
@setup_required
@login_required
@account_initialization_required
@marshal_with(app_detail_fields_with_site)
@cloud_edition_billing_resource_check('apps')
def post(self):
"""Import app from url"""
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('url', type=str, required=True, nullable=False, location='json')
parser.add_argument('name', type=str, location='json')
parser.add_argument('description', type=str, location='json')
parser.add_argument('icon', type=str, location='json')
parser.add_argument('icon_background', type=str, location='json')
args = parser.parse_args()
app = AppDslService.import_and_create_new_app_from_url(
tenant_id=current_user.current_tenant_id,
url=args['url'],
args=args,
account=current_user
)
app_service = AppService()
app = app_service.import_app(current_user.current_tenant_id, args['data'], args, current_user)
return app, 201
@ -169,10 +132,8 @@ class AppApi(Resource):
parser = reqparse.RequestParser()
parser.add_argument('name', type=str, required=True, nullable=False, location='json')
parser.add_argument('description', type=str, location='json')
parser.add_argument('icon_type', type=str, location='json')
parser.add_argument('icon', type=str, location='json')
parser.add_argument('icon_background', type=str, location='json')
parser.add_argument('max_active_requests', type=int, location='json')
args = parser.parse_args()
app_service = AppService()
@ -211,18 +172,13 @@ class AppCopyApi(Resource):
parser = reqparse.RequestParser()
parser.add_argument('name', type=str, location='json')
parser.add_argument('description', type=str, location='json')
parser.add_argument('icon_type', type=str, location='json')
parser.add_argument('icon', type=str, location='json')
parser.add_argument('icon_background', type=str, location='json')
args = parser.parse_args()
data = AppDslService.export_dsl(app_model=app_model, include_secret=True)
app = AppDslService.import_and_create_new_app(
tenant_id=current_user.current_tenant_id,
data=data,
args=args,
account=current_user
)
app_service = AppService()
data = app_service.export_app(app_model)
app = app_service.import_app(current_user.current_tenant_id, data, args, current_user)
return app, 201
@ -238,13 +194,10 @@ class AppExportApi(Resource):
if not current_user.is_editor:
raise Forbidden()
# Add include_secret params
parser = reqparse.RequestParser()
parser.add_argument('include_secret', type=inputs.boolean, default=False, location='args')
args = parser.parse_args()
app_service = AppService()
return {
"data": AppDslService.export_dsl(app_model=app_model, include_secret=args['include_secret'])
"data": app_service.export_app(app_model)
}
@ -368,7 +321,6 @@ class AppTraceApi(Resource):
api.add_resource(AppListApi, '/apps')
api.add_resource(AppImportApi, '/apps/import')
api.add_resource(AppImportFromUrlApi, '/apps/import/url')
api.add_resource(AppApi, '/apps/<uuid:app_id>')
api.add_resource(AppCopyApi, '/apps/<uuid:app_id>/copy')
api.add_resource(AppExportApi, '/apps/<uuid:app_id>/export')

View File

@ -81,36 +81,15 @@ class ChatMessageTextApi(Resource):
@account_initialization_required
@get_app_model
def post(self, app_model):
from werkzeug.exceptions import InternalServerError
try:
parser = reqparse.RequestParser()
parser.add_argument('message_id', type=str, location='json')
parser.add_argument('text', type=str, location='json')
parser.add_argument('voice', type=str, location='json')
parser.add_argument('streaming', type=bool, location='json')
args = parser.parse_args()
message_id = args.get('message_id', None)
text = args.get('text', None)
if (app_model.mode in [AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value]
and app_model.workflow
and app_model.workflow.features_dict):
text_to_speech = app_model.workflow.features_dict.get('text_to_speech')
voice = args.get('voice') if args.get('voice') else text_to_speech.get('voice')
else:
try:
voice = args.get('voice') if args.get('voice') else app_model.app_model_config.text_to_speech_dict.get(
'voice')
except Exception:
voice = None
response = AudioService.transcript_tts(
app_model=app_model,
text=text,
message_id=message_id,
voice=voice
text=request.form['text'],
voice=request.form['voice'],
streaming=False
)
return response
return {'data': response.data.decode('latin1')}
except services.errors.app_model_config.AppModelConfigBrokenError:
logging.exception("App model config broken.")
raise AppUnavailableError()

View File

@ -19,12 +19,7 @@ from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.errors.error import (
AppInvokeQuotaExceededError,
ModelCurrentlyNotSupportError,
ProviderTokenNotInitError,
QuotaExceededError,
)
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from core.model_runtime.errors.invoke import InvokeError
from libs import helper
from libs.helper import uuid_value
@ -80,7 +75,7 @@ class CompletionMessageApi(Resource):
raise ProviderModelCurrentlyNotSupportError()
except InvokeError as e:
raise CompletionRequestError(e.description)
except (ValueError, AppInvokeQuotaExceededError) as e:
except ValueError as e:
raise e
except Exception as e:
logging.exception("internal server error.")
@ -146,7 +141,7 @@ class ChatMessageApi(Resource):
raise ProviderModelCurrentlyNotSupportError()
except InvokeError as e:
raise CompletionRequestError(e.description)
except (ValueError, AppInvokeQuotaExceededError) as e:
except ValueError as e:
raise e
except Exception as e:
logging.exception("internal server error.")

View File

@ -22,7 +22,7 @@ from fields.conversation_fields import (
)
from libs.helper import datetime_string
from libs.login import login_required
from models.model import AppMode, Conversation, EndUser, Message, MessageAnnotation
from models.model import AppMode, Conversation, Message, MessageAnnotation
class CompletionConversationApi(Resource):
@ -33,7 +33,7 @@ class CompletionConversationApi(Resource):
@get_app_model(mode=AppMode.COMPLETION)
@marshal_with(conversation_pagination_fields)
def get(self, app_model):
if not current_user.is_editor:
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('keyword', type=str, location='args')
@ -108,7 +108,7 @@ class CompletionConversationDetailApi(Resource):
@get_app_model(mode=AppMode.COMPLETION)
@marshal_with(conversation_message_detail_fields)
def get(self, app_model, conversation_id):
if not current_user.is_editor:
if not current_user.is_admin_or_owner:
raise Forbidden()
conversation_id = str(conversation_id)
@ -119,7 +119,7 @@ class CompletionConversationDetailApi(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
def delete(self, app_model, conversation_id):
if not current_user.is_editor:
if not current_user.is_admin_or_owner:
raise Forbidden()
conversation_id = str(conversation_id)
@ -143,7 +143,7 @@ class ChatConversationApi(Resource):
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@marshal_with(conversation_with_summary_pagination_fields)
def get(self, app_model):
if not current_user.is_editor:
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument('keyword', type=str, location='args')
@ -156,31 +156,19 @@ class ChatConversationApi(Resource):
parser.add_argument('limit', type=int_range(1, 100), required=False, default=20, location='args')
args = parser.parse_args()
subquery = (
db.session.query(
Conversation.id.label('conversation_id'),
EndUser.session_id.label('from_end_user_session_id')
)
.outerjoin(EndUser, Conversation.from_end_user_id == EndUser.id)
.subquery()
)
query = db.select(Conversation).where(Conversation.app_id == app_model.id)
if args['keyword']:
keyword_filter = '%{}%'.format(args['keyword'])
query = query.join(
Message, Message.conversation_id == Conversation.id,
).join(
subquery, subquery.c.conversation_id == Conversation.id
Message, Message.conversation_id == Conversation.id
).filter(
or_(
Message.query.ilike(keyword_filter),
Message.answer.ilike(keyword_filter),
Conversation.name.ilike(keyword_filter),
Conversation.introduction.ilike(keyword_filter),
subquery.c.from_end_user_session_id.ilike(keyword_filter)
Message.query.ilike('%{}%'.format(args['keyword'])),
Message.answer.ilike('%{}%'.format(args['keyword'])),
Conversation.name.ilike('%{}%'.format(args['keyword'])),
Conversation.introduction.ilike('%{}%'.format(args['keyword'])),
),
)
account = current_user
@ -245,7 +233,7 @@ class ChatConversationDetailApi(Resource):
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@marshal_with(conversation_detail_fields)
def get(self, app_model, conversation_id):
if not current_user.is_editor:
if not current_user.is_admin_or_owner:
raise Forbidden()
conversation_id = str(conversation_id)
@ -256,7 +244,7 @@ class ChatConversationDetailApi(Resource):
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@account_initialization_required
def delete(self, app_model, conversation_id):
if not current_user.is_editor:
if not current_user.is_admin_or_owner:
raise Forbidden()
conversation_id = str(conversation_id)

View File

@ -1,61 +0,0 @@
from flask_restful import Resource, marshal_with, reqparse
from sqlalchemy import select
from sqlalchemy.orm import Session
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from extensions.ext_database import db
from fields.conversation_variable_fields import paginated_conversation_variable_fields
from libs.login import login_required
from models import ConversationVariable
from models.model import AppMode
class ConversationVariablesApi(Resource):
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.ADVANCED_CHAT)
@marshal_with(paginated_conversation_variable_fields)
def get(self, app_model):
parser = reqparse.RequestParser()
parser.add_argument('conversation_id', type=str, location='args')
args = parser.parse_args()
stmt = (
select(ConversationVariable)
.where(ConversationVariable.app_id == app_model.id)
.order_by(ConversationVariable.created_at)
)
if args['conversation_id']:
stmt = stmt.where(ConversationVariable.conversation_id == args['conversation_id'])
else:
raise ValueError('conversation_id is required')
# NOTE: This is a temporary solution to avoid performance issues.
page = 1
page_size = 100
stmt = stmt.limit(page_size).offset((page - 1) * page_size)
with Session(db.engine) as session:
rows = session.scalars(stmt).all()
return {
'page': page,
'limit': page_size,
'total': len(rows),
'has_more': False,
'data': [
{
'created_at': row.created_at,
'updated_at': row.updated_at,
**row.to_variable().model_dump(),
}
for row in rows
],
}
api.add_resource(ConversationVariablesApi, '/apps/<uuid:app_id>/conversation-variables')

View File

@ -1,5 +1,3 @@
import os
from flask_login import current_user
from flask_restful import Resource, reqparse
@ -24,21 +22,17 @@ class RuleGenerateApi(Resource):
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('instruction', type=str, required=True, nullable=False, location='json')
parser.add_argument('model_config', type=dict, required=True, nullable=False, location='json')
parser.add_argument('no_variable', type=bool, required=True, default=False, location='json')
parser.add_argument('audiences', type=str, required=True, nullable=False, location='json')
parser.add_argument('hoping_to_solve', type=str, required=True, nullable=False, location='json')
args = parser.parse_args()
account = current_user
PROMPT_GENERATION_MAX_TOKENS = int(os.getenv('PROMPT_GENERATION_MAX_TOKENS', '512'))
try:
rules = LLMGenerator.generate_rule_config(
tenant_id=account.current_tenant_id,
instruction=args['instruction'],
model_config=args['model_config'],
no_variable=args['no_variable'],
rule_config_max_tokens=PROMPT_GENERATION_MAX_TOKENS
account.current_tenant_id,
args['audiences'],
args['hoping_to_solve']
)
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)

View File

@ -149,7 +149,8 @@ class MessageAnnotationApi(Resource):
@get_app_model
@marshal_with(annotation_fields)
def post(self, app_model):
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser()

View File

@ -16,7 +16,6 @@ from models.model import Site
def parse_app_site_args():
parser = reqparse.RequestParser()
parser.add_argument('title', type=str, required=False, location='json')
parser.add_argument('icon_type', type=str, required=False, location='json')
parser.add_argument('icon', type=str, required=False, location='json')
parser.add_argument('icon_background', type=str, required=False, location='json')
parser.add_argument('description', type=str, required=False, location='json')
@ -54,7 +53,6 @@ class AppSite(Resource):
for attr_name in [
'title',
'icon_type',
'icon',
'icon_background',
'description',

View File

@ -281,7 +281,7 @@ class UserSatisfactionRateStatistic(Resource):
SELECT date(DATE_TRUNC('day', m.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
COUNT(m.id) as message_count, COUNT(mf.id) as feedback_count
FROM messages m
LEFT JOIN message_feedbacks mf on mf.message_id=m.id and mf.rating='like'
LEFT JOIN message_feedbacks mf on mf.message_id=m.id
WHERE m.app_id = :app_id
'''
arg_dict = {'tz': account.timezone, 'app_id': app_model.id}

View File

@ -13,15 +13,12 @@ from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.app.segments import factory
from core.errors.error import AppInvokeQuotaExceededError
from fields.workflow_fields import workflow_fields
from fields.workflow_run_fields import workflow_run_node_execution_fields
from libs import helper
from libs.helper import TimestampField, uuid_value
from libs.login import current_user, login_required
from models.model import App, AppMode
from services.app_dsl_service import AppDslService
from services.app_generate_service import AppGenerateService
from services.errors.app import WorkflowHashNotEqualError
from services.workflow_service import WorkflowService
@ -42,7 +39,7 @@ class DraftWorkflowApi(Resource):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
# fetch draft workflow by app_model
workflow_service = WorkflowService()
workflow = workflow_service.get_draft_workflow(app_model=app_model)
@ -65,16 +62,13 @@ class DraftWorkflowApi(Resource):
if not current_user.is_editor:
raise Forbidden()
content_type = request.headers.get('Content-Type', '')
content_type = request.headers.get('Content-Type')
if 'application/json' in content_type:
parser = reqparse.RequestParser()
parser.add_argument('graph', type=dict, required=True, nullable=False, location='json')
parser.add_argument('features', type=dict, required=True, nullable=False, location='json')
parser.add_argument('hash', type=str, required=False, location='json')
# TODO: set this to required=True after frontend is updated
parser.add_argument('environment_variables', type=list, required=False, location='json')
parser.add_argument('conversation_variables', type=list, required=False, location='json')
args = parser.parse_args()
elif 'text/plain' in content_type:
try:
@ -88,9 +82,7 @@ class DraftWorkflowApi(Resource):
args = {
'graph': data.get('graph'),
'features': data.get('features'),
'hash': data.get('hash'),
'environment_variables': data.get('environment_variables'),
'conversation_variables': data.get('conversation_variables'),
'hash': data.get('hash')
}
except json.JSONDecodeError:
return {'message': 'Invalid JSON data'}, 400
@ -100,18 +92,12 @@ class DraftWorkflowApi(Resource):
workflow_service = WorkflowService()
try:
environment_variables_list = args.get('environment_variables') or []
environment_variables = [factory.build_variable_from_mapping(obj) for obj in environment_variables_list]
conversation_variables_list = args.get('conversation_variables') or []
conversation_variables = [factory.build_variable_from_mapping(obj) for obj in conversation_variables_list]
workflow = workflow_service.sync_draft_workflow(
app_model=app_model,
graph=args['graph'],
features=args['features'],
graph=args.get('graph'),
features=args.get('features'),
unique_hash=args.get('hash'),
account=current_user,
environment_variables=environment_variables,
conversation_variables=conversation_variables,
account=current_user
)
except WorkflowHashNotEqualError:
raise DraftWorkflowNotSync()
@ -141,7 +127,8 @@ class DraftWorkflowImportApi(Resource):
parser.add_argument('data', type=str, required=True, nullable=False, location='json')
args = parser.parse_args()
workflow = AppDslService.import_and_overwrite_workflow(
workflow_service = WorkflowService()
workflow = workflow_service.import_draft_workflow(
app_model=app_model,
data=args['data'],
account=current_user
@ -292,7 +279,7 @@ class DraftWorkflowRunApi(Resource):
)
return helper.compact_generate_response(response)
except (ValueError, AppInvokeQuotaExceededError) as e:
except ValueError as e:
raise e
except Exception as e:
logging.exception("internal server error.")
@ -459,7 +446,6 @@ class ConvertToWorkflowApi(Resource):
if request.data:
parser = reqparse.RequestParser()
parser.add_argument('name', type=str, required=False, nullable=True, location='json')
parser.add_argument('icon_type', type=str, required=False, nullable=True, location='json')
parser.add_argument('icon', type=str, required=False, nullable=True, location='json')
parser.add_argument('icon_background', type=str, required=False, nullable=True, location='json')
args = parser.parse_args()

View File

@ -6,7 +6,6 @@ from flask_login import current_user
from flask_restful import Resource
from werkzeug.exceptions import Forbidden
from configs import dify_config
from controllers.console import api
from libs.login import login_required
from libs.oauth_data_source import NotionOAuth
@ -17,9 +16,11 @@ from ..wraps import account_initialization_required
def get_oauth_providers():
with current_app.app_context():
notion_oauth = NotionOAuth(client_id=dify_config.NOTION_CLIENT_ID,
client_secret=dify_config.NOTION_CLIENT_SECRET,
redirect_uri=dify_config.CONSOLE_API_URL + '/console/api/oauth/data-source/callback/notion')
notion_oauth = NotionOAuth(client_id=current_app.config.get('NOTION_CLIENT_ID'),
client_secret=current_app.config.get(
'NOTION_CLIENT_SECRET'),
redirect_uri=current_app.config.get(
'CONSOLE_API_URL') + '/console/api/oauth/data-source/callback/notion')
OAUTH_PROVIDERS = {
'notion': notion_oauth
@ -38,10 +39,8 @@ class OAuthDataSource(Resource):
print(vars(oauth_provider))
if not oauth_provider:
return {'error': 'Invalid provider'}, 400
if dify_config.NOTION_INTEGRATION_TYPE == 'internal':
internal_secret = dify_config.NOTION_INTERNAL_SECRET
if not internal_secret:
return {'error': 'Internal secret is not set'},
if current_app.config.get('NOTION_INTEGRATION_TYPE') == 'internal':
internal_secret = current_app.config.get('NOTION_INTERNAL_SECRET')
oauth_provider.save_internal_access_token(internal_secret)
return { 'data': '' }
else:
@ -61,13 +60,13 @@ class OAuthDataSourceCallback(Resource):
if 'code' in request.args:
code = request.args.get('code')
return redirect(f'{dify_config.CONSOLE_WEB_URL}?type=notion&code={code}')
return redirect(f'{current_app.config.get("CONSOLE_WEB_URL")}?type=notion&code={code}')
elif 'error' in request.args:
error = request.args.get('error')
return redirect(f'{dify_config.CONSOLE_WEB_URL}?type=notion&error={error}')
return redirect(f'{current_app.config.get("CONSOLE_WEB_URL")}?type=notion&error={error}')
else:
return redirect(f'{dify_config.CONSOLE_WEB_URL}?type=notion&error=Access denied')
return redirect(f'{current_app.config.get("CONSOLE_WEB_URL")}?type=notion&error=Access denied')
class OAuthDataSourceBinding(Resource):

View File

@ -5,28 +5,3 @@ class ApiKeyAuthFailedError(BaseHTTPException):
error_code = 'auth_failed'
description = "{message}"
code = 500
class InvalidEmailError(BaseHTTPException):
error_code = 'invalid_email'
description = "The email address is not valid."
code = 400
class PasswordMismatchError(BaseHTTPException):
error_code = 'password_mismatch'
description = "The passwords do not match."
code = 400
class InvalidTokenError(BaseHTTPException):
error_code = 'invalid_or_expired_token'
description = "The token is invalid or has expired."
code = 400
class PasswordResetRateLimitExceededError(BaseHTTPException):
error_code = 'password_reset_rate_limit_exceeded'
description = "Password reset rate limit exceeded. Try again later."
code = 429

View File

@ -1,107 +0,0 @@
import base64
import logging
import secrets
from flask_restful import Resource, reqparse
from controllers.console import api
from controllers.console.auth.error import (
InvalidEmailError,
InvalidTokenError,
PasswordMismatchError,
PasswordResetRateLimitExceededError,
)
from controllers.console.setup import setup_required
from extensions.ext_database import db
from libs.helper import email as email_validate
from libs.password import hash_password, valid_password
from models.account import Account
from services.account_service import AccountService
from services.errors.account import RateLimitExceededError
class ForgotPasswordSendEmailApi(Resource):
@setup_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('email', type=str, required=True, location='json')
args = parser.parse_args()
email = args['email']
if not email_validate(email):
raise InvalidEmailError()
account = Account.query.filter_by(email=email).first()
if account:
try:
AccountService.send_reset_password_email(account=account)
except RateLimitExceededError:
logging.warning(f"Rate limit exceeded for email: {account.email}")
raise PasswordResetRateLimitExceededError()
else:
# Return success to avoid revealing email registration status
logging.warning(f"Attempt to reset password for unregistered email: {email}")
return {"result": "success"}
class ForgotPasswordCheckApi(Resource):
@setup_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('token', type=str, required=True, nullable=False, location='json')
args = parser.parse_args()
token = args['token']
reset_data = AccountService.get_reset_password_data(token)
if reset_data is None:
return {'is_valid': False, 'email': None}
return {'is_valid': True, 'email': reset_data.get('email')}
class ForgotPasswordResetApi(Resource):
@setup_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('token', type=str, required=True, nullable=False, location='json')
parser.add_argument('new_password', type=valid_password, required=True, nullable=False, location='json')
parser.add_argument('password_confirm', type=valid_password, required=True, nullable=False, location='json')
args = parser.parse_args()
new_password = args['new_password']
password_confirm = args['password_confirm']
if str(new_password).strip() != str(password_confirm).strip():
raise PasswordMismatchError()
token = args['token']
reset_data = AccountService.get_reset_password_data(token)
if reset_data is None:
raise InvalidTokenError()
AccountService.revoke_reset_password_token(token)
salt = secrets.token_bytes(16)
base64_salt = base64.b64encode(salt).decode()
password_hashed = hash_password(new_password, salt)
base64_password_hashed = base64.b64encode(password_hashed).decode()
account = Account.query.filter_by(email=reset_data.get('email')).first()
account.password = base64_password_hashed
account.password_salt = base64_salt
db.session.commit()
return {'result': 'success'}
api.add_resource(ForgotPasswordSendEmailApi, '/forgot-password')
api.add_resource(ForgotPasswordCheckApi, '/forgot-password/validity')
api.add_resource(ForgotPasswordResetApi, '/forgot-password/resets')

View File

@ -1,7 +1,7 @@
from typing import cast
import flask_login
from flask import request
from flask import current_app, request
from flask_restful import Resource, reqparse
import services
@ -56,14 +56,14 @@ class LogoutApi(Resource):
class ResetPasswordApi(Resource):
@setup_required
def get(self):
# parser = reqparse.RequestParser()
# parser.add_argument('email', type=email, required=True, location='json')
# args = parser.parse_args()
parser = reqparse.RequestParser()
parser.add_argument('email', type=email, required=True, location='json')
args = parser.parse_args()
# import mailchimp_transactional as MailchimpTransactional
# from mailchimp_transactional.api_client import ApiClientError
# account = {'email': args['email']}
account = {'email': args['email']}
# account = AccountService.get_by_email(args['email'])
# if account is None:
# raise ValueError('Email not found')
@ -71,28 +71,28 @@ class ResetPasswordApi(Resource):
# AccountService.update_password(account, new_password)
# todo: Send email
# MAILCHIMP_API_KEY = dify_config.MAILCHIMP_TRANSACTIONAL_API_KEY
MAILCHIMP_API_KEY = current_app.config['MAILCHIMP_TRANSACTIONAL_API_KEY']
# mailchimp = MailchimpTransactional(MAILCHIMP_API_KEY)
# message = {
# 'from_email': 'noreply@example.com',
# 'to': [{'email': account['email']}],
# 'subject': 'Reset your Dify password',
# 'html': """
# <p>Dear User,</p>
# <p>The Dify team has generated a new password for you, details as follows:</p>
# <p><strong>{new_password}</strong></p>
# <p>Please change your password to log in as soon as possible.</p>
# <p>Regards,</p>
# <p>The Dify Team</p>
# """
# }
message = {
'from_email': 'noreply@example.com',
'to': [{'email': account.email}],
'subject': 'Reset your Dify password',
'html': """
<p>Dear User,</p>
<p>The Dify team has generated a new password for you, details as follows:</p>
<p><strong>{new_password}</strong></p>
<p>Please change your password to log in as soon as possible.</p>
<p>Regards,</p>
<p>The Dify Team</p>
"""
}
# response = mailchimp.messages.send({
# 'message': message,
# # required for transactional email
# ' settings': {
# 'sandbox_mode': dify_config.MAILCHIMP_SANDBOX_MODE,
# 'sandbox_mode': current_app.config['MAILCHIMP_SANDBOX_MODE'],
# },
# })

View File

@ -6,7 +6,6 @@ import requests
from flask import current_app, redirect, request
from flask_restful import Resource
from configs import dify_config
from constants.languages import languages
from extensions.ext_database import db
from libs.helper import get_remote_ip
@ -19,24 +18,22 @@ from .. import api
def get_oauth_providers():
with current_app.app_context():
if not dify_config.GITHUB_CLIENT_ID or not dify_config.GITHUB_CLIENT_SECRET:
github_oauth = None
else:
github_oauth = GitHubOAuth(
client_id=dify_config.GITHUB_CLIENT_ID,
client_secret=dify_config.GITHUB_CLIENT_SECRET,
redirect_uri=dify_config.CONSOLE_API_URL + '/console/api/oauth/authorize/github',
)
if not dify_config.GOOGLE_CLIENT_ID or not dify_config.GOOGLE_CLIENT_SECRET:
google_oauth = None
else:
google_oauth = GoogleOAuth(
client_id=dify_config.GOOGLE_CLIENT_ID,
client_secret=dify_config.GOOGLE_CLIENT_SECRET,
redirect_uri=dify_config.CONSOLE_API_URL + '/console/api/oauth/authorize/google',
)
github_oauth = GitHubOAuth(client_id=current_app.config.get('GITHUB_CLIENT_ID'),
client_secret=current_app.config.get(
'GITHUB_CLIENT_SECRET'),
redirect_uri=current_app.config.get(
'CONSOLE_API_URL') + '/console/api/oauth/authorize/github')
OAUTH_PROVIDERS = {'github': github_oauth, 'google': google_oauth}
google_oauth = GoogleOAuth(client_id=current_app.config.get('GOOGLE_CLIENT_ID'),
client_secret=current_app.config.get(
'GOOGLE_CLIENT_SECRET'),
redirect_uri=current_app.config.get(
'CONSOLE_API_URL') + '/console/api/oauth/authorize/google')
OAUTH_PROVIDERS = {
'github': github_oauth,
'google': google_oauth
}
return OAUTH_PROVIDERS
@ -66,7 +63,8 @@ class OAuthCallback(Resource):
token = oauth_provider.get_access_token(code)
user_info = oauth_provider.get_user_info(token)
except requests.exceptions.HTTPError as e:
logging.exception(f'An error occurred during the OAuth process with {provider}: {e.response.text}')
logging.exception(
f"An error occurred during the OAuth process with {provider}: {e.response.text}")
return {'error': 'OAuth process failed'}, 400
account = _generate_account(provider, user_info)
@ -83,7 +81,7 @@ class OAuthCallback(Resource):
token = AccountService.login(account, ip_address=get_remote_ip(request))
return redirect(f'{dify_config.CONSOLE_WEB_URL}?console_token={token}')
return redirect(f'{current_app.config.get("CONSOLE_WEB_URL")}?console_token={token}')
def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]:
@ -103,7 +101,11 @@ def _generate_account(provider: str, user_info: OAuthUserInfo):
# Create account
account_name = user_info.name if user_info.name else 'Dify'
account = RegisterService.register(
email=user_info.email, name=account_name, password=None, open_id=user_info.id, provider=provider
email=user_info.email,
name=account_name,
password=None,
open_id=user_info.id,
provider=provider
)
# Set interface language

View File

@ -1,11 +1,10 @@
import flask_restful
from flask import request
from flask import current_app, request
from flask_login import current_user
from flask_restful import Resource, marshal, marshal_with, reqparse
from werkzeug.exceptions import Forbidden, NotFound
import services
from configs import dify_config
from controllers.console import api
from controllers.console.apikey import api_key_fields, api_key_list
from controllers.console.app.error import ProviderNotInitializeError
@ -26,7 +25,7 @@ from fields.document_fields import document_status_fields
from libs.login import login_required
from models.dataset import Dataset, Document, DocumentSegment
from models.model import ApiToken, UploadFile
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
from services.dataset_service import DatasetService, DocumentService
def _validate_name(name):
@ -86,12 +85,6 @@ class DatasetListApi(Resource):
else:
item['embedding_available'] = True
if item.get('permission') == 'partial_members':
part_users_list = DatasetPermissionService.get_dataset_partial_member_list(item['id'])
item.update({'partial_member_list': part_users_list})
else:
item.update({'partial_member_list': []})
response = {
'data': data,
'has_more': len(datasets) == limit,
@ -115,8 +108,8 @@ class DatasetListApi(Resource):
help='Invalid indexing technique.')
args = parser.parse_args()
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
if not current_user.is_dataset_editor:
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
try:
@ -147,10 +140,6 @@ class DatasetApi(Resource):
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
data = marshal(dataset, dataset_detail_fields)
if data.get('permission') == 'partial_members':
part_users_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
data.update({'partial_member_list': part_users_list})
# check embedding setting
provider_manager = ProviderManager()
configurations = provider_manager.get_configurations(
@ -174,11 +163,6 @@ class DatasetApi(Resource):
data['embedding_available'] = False
else:
data['embedding_available'] = True
if data.get('permission') == 'partial_members':
part_users_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
data.update({'partial_member_list': part_users_list})
return data, 200
@setup_required
@ -189,6 +173,8 @@ class DatasetApi(Resource):
dataset = DatasetService.get_dataset(dataset_id_str)
if dataset is None:
raise NotFound("Dataset not found.")
# check user's model setting
DatasetService.check_dataset_model_setting(dataset)
parser = reqparse.RequestParser()
parser.add_argument('name', nullable=False,
@ -202,28 +188,17 @@ class DatasetApi(Resource):
nullable=True,
help='Invalid indexing technique.')
parser.add_argument('permission', type=str, location='json', choices=(
'only_me', 'all_team_members', 'partial_members'), help='Invalid permission.'
)
'only_me', 'all_team_members'), help='Invalid permission.')
parser.add_argument('embedding_model', type=str,
location='json', help='Invalid embedding model.')
parser.add_argument('embedding_model_provider', type=str,
location='json', help='Invalid embedding model provider.')
parser.add_argument('retrieval_model', type=dict, location='json', help='Invalid retrieval model.')
parser.add_argument('partial_member_list', type=list, location='json', help='Invalid parent user list.')
args = parser.parse_args()
data = request.get_json()
# check embedding model setting
if data.get('indexing_technique') == 'high_quality':
DatasetService.check_embedding_model_setting(dataset.tenant_id,
data.get('embedding_model_provider'),
data.get('embedding_model')
)
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
DatasetPermissionService.check_permission(
current_user, dataset, data.get('permission'), data.get('partial_member_list')
)
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
dataset = DatasetService.update_dataset(
dataset_id_str, args, current_user)
@ -231,21 +206,7 @@ class DatasetApi(Resource):
if dataset is None:
raise NotFound("Dataset not found.")
result_data = marshal(dataset, dataset_detail_fields)
tenant_id = current_user.current_tenant_id
if data.get('partial_member_list') and data.get('permission') == 'partial_members':
DatasetPermissionService.update_partial_member_list(
tenant_id, dataset_id_str, data.get('partial_member_list')
)
# clear partial member list when permission is only_me or all_team_members
elif data.get('permission') == 'only_me' or data.get('permission') == 'all_team_members':
DatasetPermissionService.clear_partial_member_list(dataset_id_str)
partial_member_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
result_data.update({'partial_member_list': partial_member_list})
return result_data, 200
return marshal(dataset, dataset_detail_fields), 200
@setup_required
@login_required
@ -254,12 +215,11 @@ class DatasetApi(Resource):
dataset_id_str = str(dataset_id)
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor or current_user.is_dataset_operator:
if not current_user.is_editor:
raise Forbidden()
try:
if DatasetService.delete_dataset(dataset_id_str, current_user):
DatasetPermissionService.clear_partial_member_list(dataset_id_str)
return {'result': 'success'}, 204
else:
raise NotFound("Dataset not found.")
@ -537,7 +497,7 @@ class DatasetApiBaseUrlApi(Resource):
@account_initialization_required
def get(self):
return {
'api_base_url': (dify_config.SERVICE_API_URL if dify_config.SERVICE_API_URL
'api_base_url': (current_app.config['SERVICE_API_URL'] if current_app.config['SERVICE_API_URL']
else request.host_url.rstrip('/')) + '/v1'
}
@ -547,20 +507,20 @@ class DatasetRetrievalSettingApi(Resource):
@login_required
@account_initialization_required
def get(self):
vector_type = dify_config.VECTOR_STORE
vector_type = current_app.config['VECTOR_STORE']
match vector_type:
case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA | VectorType.TENCENT:
case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA | VectorType.TENCENT | VectorType.ORACLE:
return {
'retrieval_method': [
RetrievalMethod.SEMANTIC_SEARCH.value
RetrievalMethod.SEMANTIC_SEARCH
]
}
case VectorType.QDRANT | VectorType.WEAVIATE | VectorType.OPENSEARCH | VectorType.ANALYTICDB | VectorType.MYSCALE | VectorType.ORACLE | VectorType.ELASTICSEARCH:
case VectorType.QDRANT | VectorType.WEAVIATE | VectorType.OPENSEARCH:
return {
'retrieval_method': [
RetrievalMethod.SEMANTIC_SEARCH.value,
RetrievalMethod.FULL_TEXT_SEARCH.value,
RetrievalMethod.HYBRID_SEARCH.value,
RetrievalMethod.SEMANTIC_SEARCH,
RetrievalMethod.FULL_TEXT_SEARCH,
RetrievalMethod.HYBRID_SEARCH,
]
}
case _:
@ -573,18 +533,18 @@ class DatasetRetrievalSettingMockApi(Resource):
@account_initialization_required
def get(self, vector_type):
match vector_type:
case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA | VectorType.TENCENT:
case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA | VectorType.TENCENT | VectorType.ORACLE:
return {
'retrieval_method': [
RetrievalMethod.SEMANTIC_SEARCH.value
RetrievalMethod.SEMANTIC_SEARCH
]
}
case VectorType.QDRANT | VectorType.WEAVIATE | VectorType.OPENSEARCH| VectorType.ANALYTICDB | VectorType.MYSCALE | VectorType.ORACLE | VectorType.ELASTICSEARCH:
case VectorType.QDRANT | VectorType.WEAVIATE | VectorType.OPENSEARCH:
return {
'retrieval_method': [
RetrievalMethod.SEMANTIC_SEARCH.value,
RetrievalMethod.FULL_TEXT_SEARCH.value,
RetrievalMethod.HYBRID_SEARCH.value,
RetrievalMethod.SEMANTIC_SEARCH,
RetrievalMethod.FULL_TEXT_SEARCH,
RetrievalMethod.HYBRID_SEARCH,
]
}
case _:
@ -609,27 +569,6 @@ class DatasetErrorDocs(Resource):
}, 200
class DatasetPermissionUserListApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id):
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)
if dataset is None:
raise NotFound("Dataset not found.")
try:
DatasetService.check_dataset_permission(dataset, current_user)
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
partial_members_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
return {
'data': partial_members_list,
}, 200
api.add_resource(DatasetListApi, '/datasets')
api.add_resource(DatasetApi, '/datasets/<uuid:dataset_id>')
api.add_resource(DatasetUseCheckApi, '/datasets/<uuid:dataset_id>/use-check')
@ -643,4 +582,3 @@ api.add_resource(DatasetApiDeleteApi, '/datasets/api-keys/<uuid:api_key_id>')
api.add_resource(DatasetApiBaseUrlApi, '/datasets/api-base-info')
api.add_resource(DatasetRetrievalSettingApi, '/datasets/retrieval-setting')
api.add_resource(DatasetRetrievalSettingMockApi, '/datasets/retrieval-setting/<string:vector_type>')
api.add_resource(DatasetPermissionUserListApi, '/datasets/<uuid:dataset_id>/permission-part-users')

View File

@ -178,20 +178,11 @@ class DatasetDocumentListApi(Resource):
.subquery()
query = query.outerjoin(sub_query, sub_query.c.document_id == Document.id) \
.order_by(
sort_logic(db.func.coalesce(sub_query.c.total_hit_count, 0)),
sort_logic(Document.position),
)
.order_by(sort_logic(db.func.coalesce(sub_query.c.total_hit_count, 0)))
elif sort == 'created_at':
query = query.order_by(
sort_logic(Document.created_at),
sort_logic(Document.position),
)
query = query.order_by(sort_logic(Document.created_at))
else:
query = query.order_by(
desc(Document.created_at),
desc(Document.position),
)
query = query.order_by(desc(Document.created_at))
paginated_documents = query.paginate(
page=page, per_page=limit, max_per_page=100, error_out=False)
@ -237,7 +228,7 @@ class DatasetDocumentListApi(Resource):
raise NotFound('Dataset not found.')
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_dataset_editor:
if not current_user.is_editor:
raise Forbidden()
try:
@ -303,11 +294,6 @@ class DatasetInitApi(Resource):
parser.add_argument('retrieval_model', type=dict, required=False, nullable=False,
location='json')
args = parser.parse_args()
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
if not current_user.is_dataset_editor:
raise Forbidden()
if args['indexing_technique'] == 'high_quality':
try:
model_manager = ModelManager()
@ -771,19 +757,15 @@ class DocumentStatusApi(DocumentResource):
dataset = DatasetService.get_dataset(dataset_id)
if dataset is None:
raise NotFound("Dataset not found.")
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_dataset_editor:
raise Forbidden()
# check user's model setting
DatasetService.check_dataset_model_setting(dataset)
# check user's permission
DatasetService.check_dataset_permission(dataset, current_user)
document = self.get_document(dataset_id, document_id)
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
indexing_cache_key = 'document_{}_indexing'.format(document.id)
cache_result = redis_client.get(indexing_cache_key)
if cache_result is not None:
@ -973,11 +955,10 @@ class DocumentRenameApi(DocumentResource):
@account_initialization_required
@marshal_with(document_fields)
def post(self, dataset_id, document_id):
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
if not current_user.is_dataset_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
dataset = DatasetService.get_dataset(dataset_id)
DatasetService.check_dataset_operator_permission(current_user, dataset)
parser = reqparse.RequestParser()
parser.add_argument('name', type=str, required=True, nullable=False, location='json')
args = parser.parse_args()

View File

@ -75,7 +75,7 @@ class DatasetDocumentSegmentListApi(Resource):
)
if last_id is not None:
last_segment = db.session.get(DocumentSegment, str(last_id))
last_segment = DocumentSegment.query.get(str(last_id))
if last_segment:
query = query.filter(
DocumentSegment.position > last_segment.position)
@ -223,7 +223,8 @@ class DatasetDocumentSegmentAddApi(Resource):
document = DocumentService.get_document(dataset_id, document_id)
if not document:
raise NotFound('Document not found.')
if not current_user.is_editor:
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
# check embedding model setting
if dataset.indexing_technique == 'high_quality':
@ -346,7 +347,7 @@ class DatasetDocumentSegmentUpdateApi(Resource):
if not segment:
raise NotFound('Segment not found.')
# The role of the current user in the ta table must be admin or owner
if not current_user.is_editor:
if not current_user.is_admin_or_owner:
raise Forbidden()
try:
DatasetService.check_dataset_permission(dataset, current_user)

View File

@ -1,9 +1,8 @@
from flask import request
from flask import current_app, request
from flask_login import current_user
from flask_restful import Resource, marshal_with
import services
from configs import dify_config
from controllers.console import api
from controllers.console.datasets.error import (
FileTooLargeError,
@ -27,9 +26,9 @@ class FileApi(Resource):
@account_initialization_required
@marshal_with(upload_config_fields)
def get(self):
file_size_limit = dify_config.UPLOAD_FILE_SIZE_LIMIT
batch_count_limit = dify_config.UPLOAD_FILE_BATCH_LIMIT
image_file_size_limit = dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT
file_size_limit = current_app.config.get("UPLOAD_FILE_SIZE_LIMIT")
batch_count_limit = current_app.config.get("UPLOAD_FILE_BATCH_LIMIT")
image_file_size_limit = current_app.config.get("UPLOAD_IMAGE_FILE_SIZE_LIMIT")
return {
'file_size_limit': file_size_limit,
'batch_count_limit': batch_count_limit,
@ -77,7 +76,7 @@ class FileSupportTypeApi(Resource):
@login_required
@account_initialization_required
def get(self):
etl_type = dify_config.ETL_TYPE
etl_type = current_app.config['ETL_TYPE']
allowed_extensions = UNSTRUCTURED_ALLOWED_EXTENSIONS if etl_type == 'Unstructured' else ALLOWED_EXTENSIONS
return {'allowed_extensions': allowed_extensions}

View File

@ -19,7 +19,6 @@ from controllers.console.app.error import (
from controllers.console.explore.wraps import InstalledAppResource
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from core.model_runtime.errors.invoke import InvokeError
from models.model import AppMode
from services.audio_service import AudioService
from services.errors.audio import (
AudioTooLargeServiceError,
@ -71,36 +70,16 @@ class ChatAudioApi(InstalledAppResource):
class ChatTextApi(InstalledAppResource):
def post(self, installed_app):
from flask_restful import reqparse
app_model = installed_app.app
try:
parser = reqparse.RequestParser()
parser.add_argument('message_id', type=str, required=False, location='json')
parser.add_argument('voice', type=str, location='json')
parser.add_argument('text', type=str, location='json')
parser.add_argument('streaming', type=bool, location='json')
args = parser.parse_args()
message_id = args.get('message_id', None)
text = args.get('text', None)
if (app_model.mode in [AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value]
and app_model.workflow
and app_model.workflow.features_dict):
text_to_speech = app_model.workflow.features_dict.get('text_to_speech')
voice = args.get('voice') if args.get('voice') else text_to_speech.get('voice')
else:
try:
voice = args.get('voice') if args.get('voice') else app_model.app_model_config.text_to_speech_dict.get('voice')
except Exception:
voice = None
try:
response = AudioService.transcript_tts(
app_model=app_model,
message_id=message_id,
voice=voice,
text=text
text=request.form['text'],
voice=request.form['voice'] if request.form.get('voice') else app_model.app_model_config.text_to_speech_dict.get('voice'),
streaming=False
)
return response
return {'data': response.data.decode('latin1')}
except services.errors.app_model_config.AppModelConfigBrokenError:
logging.exception("App model config broken.")
raise AppUnavailableError()
@ -129,5 +108,3 @@ class ChatTextApi(InstalledAppResource):
api.add_resource(ChatAudioApi, '/installed-apps/<uuid:installed_app_id>/audio-to-text', endpoint='installed_app_audio')
api.add_resource(ChatTextApi, '/installed-apps/<uuid:installed_app_id>/text-to-audio', endpoint='installed_app_text')
# api.add_resource(ChatTextApiWithMessageId, '/installed-apps/<uuid:installed_app_id>/text-to-audio/message-id',
# endpoint='installed_app_text_with_message_id')

View File

@ -1,7 +1,7 @@
from flask import current_app
from flask_restful import fields, marshal_with
from configs import dify_config
from controllers.console import api
from controllers.console.app.error import AppUnavailableError
from controllers.console.explore.wraps import InstalledAppResource
@ -78,7 +78,7 @@ class AppParameterApi(InstalledAppResource):
"transfer_methods": ["remote_url", "local_file"]
}}),
'system_parameters': {
'image_file_size_limit': dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT
'image_file_size_limit': current_app.config.get('UPLOAD_IMAGE_FILE_SIZE_LIMIT')
}
}

View File

@ -1,7 +1,6 @@
from flask_login import current_user
from flask_restful import Resource, marshal_with, reqparse
from constants import HIDDEN_VALUE
from controllers.console import api
from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
@ -90,7 +89,7 @@ class APIBasedExtensionDetailAPI(Resource):
extension_data_from_db.name = args['name']
extension_data_from_db.api_endpoint = args['api_endpoint']
if args['api_key'] != HIDDEN_VALUE:
if args['api_key'] != '[__HIDDEN__]':
extension_data_from_db.api_key = args['api_key']
return APIBasedExtensionService.save(extension_data_from_db)

View File

@ -1,9 +1,8 @@
import os
from flask import session
from flask import current_app, session
from flask_restful import Resource, reqparse
from configs import dify_config
from libs.helper import str_len
from models.model import DifySetup
from services.account_service import TenantService
@ -41,7 +40,7 @@ class InitValidateAPI(Resource):
return {'result': 'success'}, 201
def get_init_validate_status():
if dify_config.EDITION == 'SELF_HOSTED':
if current_app.config['EDITION'] == 'SELF_HOSTED':
if os.environ.get('INIT_PASSWORD'):
return session.get('is_init_validated') or DifySetup.query.first()

View File

@ -1,9 +1,8 @@
from functools import wraps
from flask import request
from flask import current_app, request
from flask_restful import Resource, reqparse
from configs import dify_config
from libs.helper import email, get_remote_ip, str_len
from libs.password import valid_password
from models.model import DifySetup
@ -18,7 +17,7 @@ from .wraps import only_edition_self_hosted
class SetupApi(Resource):
def get(self):
if dify_config.EDITION == 'SELF_HOSTED':
if current_app.config['EDITION'] == 'SELF_HOSTED':
setup_status = get_setup_status()
if setup_status:
return {
@ -78,7 +77,7 @@ def setup_required(view):
def get_setup_status():
if dify_config.EDITION == 'SELF_HOSTED':
if current_app.config['EDITION'] == 'SELF_HOSTED':
return DifySetup.query.first()
else:
return True

View File

@ -36,7 +36,7 @@ class TagListApi(Resource):
@account_initialization_required
def post(self):
# The role of the current user in the ta table must be admin, owner, or editor
if not (current_user.is_editor or current_user.is_dataset_editor):
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -68,7 +68,7 @@ class TagUpdateDeleteApi(Resource):
def patch(self, tag_id):
tag_id = str(tag_id)
# The role of the current user in the ta table must be admin, owner, or editor
if not (current_user.is_editor or current_user.is_dataset_editor):
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -109,8 +109,8 @@ class TagBindingCreateApi(Resource):
@login_required
@account_initialization_required
def post(self):
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
if not (current_user.is_editor or current_user.is_dataset_editor):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
@ -134,8 +134,8 @@ class TagBindingDeleteApi(Resource):
@login_required
@account_initialization_required
def post(self):
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
if not (current_user.is_editor or current_user.is_dataset_editor):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()

View File

@ -3,10 +3,9 @@ import json
import logging
import requests
from flask import current_app
from flask_restful import Resource, reqparse
from configs import dify_config
from . import api
@ -16,16 +15,16 @@ class VersionApi(Resource):
parser = reqparse.RequestParser()
parser.add_argument('current_version', type=str, required=True, location='args')
args = parser.parse_args()
check_update_url = dify_config.CHECK_UPDATE_URL
check_update_url = current_app.config['CHECK_UPDATE_URL']
result = {
'version': dify_config.CURRENT_VERSION,
'version': current_app.config['CURRENT_VERSION'],
'release_date': '',
'release_notes': '',
'can_auto_update': False,
'features': {
'can_replace_logo': dify_config.CAN_REPLACE_LOGO,
'model_load_balancing_enabled': dify_config.MODEL_LB_ENABLED
'can_replace_logo': current_app.config['CAN_REPLACE_LOGO'],
'model_load_balancing_enabled': current_app.config['MODEL_LB_ENABLED']
}
}

View File

@ -1,11 +1,10 @@
import datetime
import pytz
from flask import request
from flask import current_app, request
from flask_login import current_user
from flask_restful import Resource, fields, marshal_with, reqparse
from configs import dify_config
from constants.languages import supported_language
from controllers.console import api
from controllers.console.setup import setup_required
@ -37,7 +36,7 @@ class AccountInitApi(Resource):
parser = reqparse.RequestParser()
if dify_config.EDITION == 'CLOUD':
if current_app.config['EDITION'] == 'CLOUD':
parser.add_argument('invitation_code', type=str, location='json')
parser.add_argument(
@ -46,7 +45,7 @@ class AccountInitApi(Resource):
required=True, location='json')
args = parser.parse_args()
if dify_config.EDITION == 'CLOUD':
if current_app.config['EDITION'] == 'CLOUD':
if not args['invitation_code']:
raise ValueError('invitation_code is required')
@ -246,8 +245,6 @@ class AccountIntegrateApi(Resource):
return {'data': integrate_data}
# Register API resources
api.add_resource(AccountInitApi, '/account/init')
api.add_resource(AccountProfileApi, '/account/profile')

View File

@ -1,8 +1,8 @@
from flask import current_app
from flask_login import current_user
from flask_restful import Resource, abort, marshal_with, reqparse
import services
from configs import dify_config
from controllers.console import api
from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check
@ -48,7 +48,7 @@ class MemberInviteEmailApi(Resource):
inviter = current_user
invitation_results = []
console_web_url = dify_config.CONSOLE_WEB_URL
console_web_url = current_app.config.get("CONSOLE_WEB_URL")
for invitee_email in invitee_emails:
try:
token = RegisterService.invite_new_member(inviter.current_tenant, invitee_email, interface_language, role=invitee_role, inviter=inviter)
@ -117,7 +117,7 @@ class MemberUpdateRoleApi(Resource):
if not TenantAccountRole.is_valid_role(new_role):
return {'code': 'invalid-role', 'message': 'Invalid role'}, 400
member = db.session.get(Account, str(member_id))
member = Account.query.get(str(member_id))
if not member:
abort(404)
@ -131,20 +131,7 @@ class MemberUpdateRoleApi(Resource):
return {'result': 'success'}
class DatasetOperatorMemberListApi(Resource):
"""List all members of current tenant."""
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_with_role_list_fields)
def get(self):
members = TenantService.get_dataset_operator_members(current_user.current_tenant)
return {'result': 'success', 'accounts': members}, 200
api.add_resource(MemberListApi, '/workspaces/current/members')
api.add_resource(MemberInviteEmailApi, '/workspaces/current/members/invite-email')
api.add_resource(MemberCancelInviteApi, '/workspaces/current/members/<uuid:member_id>')
api.add_resource(MemberUpdateRoleApi, '/workspaces/current/members/<uuid:member_id>/update-role')
api.add_resource(DatasetOperatorMemberListApi, '/workspaces/current/dataset-operators')

Some files were not shown because too many files have changed in this diff Show More