mirror of
https://github.com/langgenius/dify.git
synced 2026-01-31 08:57:01 +08:00
Compare commits
344 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| e60350d95d | |||
| f40743183e | |||
| e852a21634 | |||
| 45deaee762 | |||
| 444fdb79dc | |||
| 140dd873f1 | |||
| 27dae156db | |||
| 2deb23e00e | |||
| 8152bc6fbf | |||
| af026c5953 | |||
| 11275cbaaf | |||
| fe9bf5fc4a | |||
| cd4924d472 | |||
| f56b984d97 | |||
| f804adbff3 | |||
| 109aabc6f2 | |||
| ad620f02c7 | |||
| 5bd432a85f | |||
| 026175c8f7 | |||
| f156014daa | |||
| 2cd55e456a | |||
| 8c2ca60c8b | |||
| 3fda2245a4 | |||
| 296887754f | |||
| 9ae72cdcf4 | |||
| 16fec084f5 | |||
| 10c61da686 | |||
| 24624491cd | |||
| 233c4150d1 | |||
| 5893ebec55 | |||
| 11642192d1 | |||
| e57bdd4e58 | |||
| 461488e9bf | |||
| 2988b67c24 | |||
| 4f62541bfb | |||
| 24576a39e5 | |||
| 3ab19be9ea | |||
| 5b009a5afb | |||
| 3efb5fe7e2 | |||
| ee53f98d8c | |||
| d5a33a0323 | |||
| f25927855e | |||
| c873035084 | |||
| f32bba6531 | |||
| 40bc936739 | |||
| 6b5685ef0c | |||
| e8e213ad1e | |||
| 5f4df34829 | |||
| c255a20d7c | |||
| b5204111da | |||
| 3a51f2a778 | |||
| 4086f5051c | |||
| 5440108431 | |||
| 46bd53a929 | |||
| a10c2ccd41 | |||
| 1cca100a48 | |||
| 04ad46dd31 | |||
| aa13d14019 | |||
| b1f003646b | |||
| e90eccdf92 | |||
| ba06447cd5 | |||
| 9808520992 | |||
| 8b931b085c | |||
| 528faceb35 | |||
| c2a8fa91b1 | |||
| 091fba74cb | |||
| 083ef2e6fc | |||
| de3a7603ac | |||
| 0ac5d621b6 | |||
| bdd409970f | |||
| d8f38f79f2 | |||
| 0f1172f55b | |||
| 3df47b7b59 | |||
| 6e9066ebf4 | |||
| dd94931116 | |||
| da81233d61 | |||
| a76ae2d756 | |||
| 97b65f9b4b | |||
| c0fe414e0a | |||
| 182dadd433 | |||
| 1d0f88264f | |||
| 332baca538 | |||
| e2a78888b9 | |||
| 5d6d0e63c5 | |||
| 2eb468f885 | |||
| 98140ae5d9 | |||
| d1ccb22d8a | |||
| 66c8070da8 | |||
| 3271e3e803 | |||
| 16d47923c3 | |||
| 6f1633fa75 | |||
| 08e4103fa1 | |||
| eee95190cc | |||
| e8311357ff | |||
| 0f14fdd4c9 | |||
| ece0f08a2b | |||
| 5edb3d55e5 | |||
| 63382f758e | |||
| bbef964eb5 | |||
| e6db7ad1d5 | |||
| 8cc492721b | |||
| a80fe20456 | |||
| f7986805c6 | |||
| aa5ca90f00 | |||
| 4af00e4a45 | |||
| c01c95d77f | |||
| 20a9037d5b | |||
| 34d3998566 | |||
| 198d6c00d6 | |||
| 1663df8a05 | |||
| d8926a2571 | |||
| 4796f9d914 | |||
| a588df4371 | |||
| 2c1c660c6e | |||
| 13f4ed6e0e | |||
| 1e451991db | |||
| 749b236d3d | |||
| 00ce372b71 | |||
| 370e1c1a17 | |||
| 28495273b4 | |||
| 36a9c5cc6b | |||
| 228de1f12a | |||
| 01555463d2 | |||
| 6b99075dc8 | |||
| 8578ee0864 | |||
| 897e07f639 | |||
| 875249eb00 | |||
| 4d5a4e4cef | |||
| 86a6e6bd04 | |||
| 8f3042e5b3 | |||
| a1ab87107b | |||
| f49c99937c | |||
| 9b24f12bf5 | |||
| 487ce7c82a | |||
| cc835d523c | |||
| 64c3bc070a | |||
| 7405b2e819 | |||
| ca5081e327 | |||
| a79941df22 | |||
| 8137d63000 | |||
| 4aa21242b6 | |||
| 8ce93faf08 | |||
| 903ece6160 | |||
| 9f440c11e0 | |||
| 58bd5627bf | |||
| 97dcb8977a | |||
| 2fdd64c1b5 | |||
| 591b993685 | |||
| 543a00e597 | |||
| f361c7004d | |||
| bb7c62777d | |||
| d51f52a649 | |||
| e353809680 | |||
| c2f0f958ef | |||
| 087b7a6607 | |||
| 6271463240 | |||
| 6f1911533c | |||
| d5d8b98d82 | |||
| e7fe7ec0f6 | |||
| 049abd698f | |||
| 45d21677a0 | |||
| 76bec6ce7f | |||
| 6563cb6ec6 | |||
| 13cd409575 | |||
| 13292ff73e | |||
| 3f8e2456f7 | |||
| 822ee7db88 | |||
| 94a650475d | |||
| 03cf00422a | |||
| 51a9e678f0 | |||
| ad76ee76a8 | |||
| 630136b5b7 | |||
| b5f101bdac | |||
| 5940564d84 | |||
| 67902b5da7 | |||
| c0476c7881 | |||
| f68b6b0e5e | |||
| 44857702ae | |||
| b1399cd5f9 | |||
| 6f1e4a19a2 | |||
| 93393e005e | |||
| 4ea2755fce | |||
| ecb51a83d4 | |||
| 093b5c0e63 | |||
| bf42b0ae44 | |||
| 342b4fd19d | |||
| cbdb861ee4 | |||
| da5a8b9a59 | |||
| 1e6e8b446d | |||
| c1fdaa6ae0 | |||
| 142814d451 | |||
| 704755d005 | |||
| d1263700c0 | |||
| 0704fe9695 | |||
| 1d3f1d88ef | |||
| 8b3edac091 | |||
| 05cab85579 | |||
| b72fbe200d | |||
| b1194da6a5 | |||
| 338e4669e5 | |||
| c5e2659771 | |||
| 1d432728ac | |||
| 2fd702a319 | |||
| f26ad16af7 | |||
| 8f2ae51fe5 | |||
| 2f84d00300 | |||
| b82a2d97ef | |||
| 3e9dbe3e0a | |||
| 975b2fb79e | |||
| fa509ce64e | |||
| 99292edd46 | |||
| 3e992cb23c | |||
| e7b4d024ee | |||
| ff67a6d338 | |||
| 8e4989ed03 | |||
| 0940f01634 | |||
| 9d1cb1bc92 | |||
| 0ca4e30b19 | |||
| ba88f8a6f0 | |||
| aefe0cbf51 | |||
| 9ad489d133 | |||
| 661b30784e | |||
| 43a5ba9415 | |||
| 08a65d74d5 | |||
| cefe156811 | |||
| 3b5b4d628b | |||
| 8746e48df0 | |||
| 0ec8b57825 | |||
| 045827043d | |||
| 4d66a86579 | |||
| 2a8881d0e8 | |||
| ffc60bb917 | |||
| 2e454c770b | |||
| 7d711135bc | |||
| f62b2b5b45 | |||
| 7919596a21 | |||
| 9b4898efeb | |||
| 45dd1683fd | |||
| 8bca908f15 | |||
| 9cbb8ddd7f | |||
| 1be222af2e | |||
| bf9fc8fef4 | |||
| 86e7330fa2 | |||
| 34bfb715e1 | |||
| 019d7069f8 | |||
| c54fcfb45d | |||
| cde87cb225 | |||
| 12435774ca | |||
| 80b9507e7a | |||
| 0ac0f0ffd0 | |||
| 3d14aba4b4 | |||
| 64f694865c | |||
| d36b728088 | |||
| 1a7b4c42ab | |||
| 2a64ce740e | |||
| 78988ed60e | |||
| 2832adda88 | |||
| a4e4fb4094 | |||
| 777ec64635 | |||
| 9cec8c1750 | |||
| 8ca5aa1190 | |||
| 4d8f1b9ca4 | |||
| 3da179f77b | |||
| a34e8cb0bd | |||
| b249767c5c | |||
| 89a7434565 | |||
| 3b537cbdeb | |||
| 731464f5b8 | |||
| 1ad70f8721 | |||
| 2ea8c73cd8 | |||
| f257f2c396 | |||
| 3cd8e6f5c6 | |||
| 0715db7681 | |||
| a39de8a686 | |||
| ccaf335466 | |||
| 40e36e9b52 | |||
| 9eebe9d54e | |||
| a23a191615 | |||
| 7d9c5586f9 | |||
| f07c89bba4 | |||
| 59cba930e5 | |||
| 39ae56e136 | |||
| f92130338b | |||
| 2867d29021 | |||
| f76ac8bdee | |||
| 83caffe000 | |||
| 96160837d2 | |||
| 3480f1c59e | |||
| 65ac4f69af | |||
| 2c50fab3dd | |||
| 9525ccac4f | |||
| ff76c4bd5d | |||
| 5dacf77627 | |||
| 2a213c6af7 | |||
| b2535e7db6 | |||
| 28236147ee | |||
| 4969783383 | |||
| b64080be1b | |||
| aadebd6d23 | |||
| 71cc0074ef | |||
| d77f52bf85 | |||
| b71163706b | |||
| 1fb7df12d7 | |||
| b3996b3221 | |||
| 7251748d59 | |||
| 73e9f35ab1 | |||
| d7f0056e2d | |||
| 9b7b133cbc | |||
| 7545e5de6c | |||
| a0c30702c1 | |||
| 03c988388e | |||
| 0a56c522eb | |||
| 646858ea08 | |||
| d9b821cecc | |||
| d5448e07ab | |||
| 3aa182e26a | |||
| de3b490f8e | |||
| 4481906be2 | |||
| d9f1a8ce9f | |||
| aa6d2e3035 | |||
| 4365843c20 | |||
| b4d2d635f7 | |||
| b9b28900b1 | |||
| d463b82aba | |||
| ed861ff782 | |||
| 8cc1944160 | |||
| 80e390b906 | |||
| c2acb2be60 | |||
| 8ba95c08a1 | |||
| c7de51ca9a | |||
| e02ee3bb2e | |||
| 394ceee141 | |||
| 40b48510f4 | |||
| be3b37114c | |||
| e212a87b86 | |||
| b890c11c14 | |||
| 2e27425e93 | |||
| 6269e011db | |||
| e70482dfc0 | |||
| 9b8861e3e1 | |||
| 38ca3b29b5 | |||
| 066076b157 | |||
| be27ac0e69 | |||
| 9e6d4eeb92 |
@ -1,4 +1,4 @@
|
||||
# Devlopment with devcontainer
|
||||
# Development with devcontainer
|
||||
This project includes a devcontainer configuration that allows you to open the project in a container with a fully configured development environment.
|
||||
Both frontend and backend environments are initialized when the container is started.
|
||||
## GitHub Codespaces
|
||||
@ -33,5 +33,5 @@ Performance Impact: While usually minimal, programs running inside a devcontaine
|
||||
if you see such error message when you open this project in codespaces:
|
||||

|
||||
|
||||
a simple workaround is change `/signin` endpoint into another one, then login with github account and close the tab, then change it back to `/signin` endpoint. Then all things will be fine.
|
||||
a simple workaround is change `/signin` endpoint into another one, then login with GitHub account and close the tab, then change it back to `/signin` endpoint. Then all things will be fine.
|
||||
The reason is `signin` endpoint is not allowed in codespaces, details can be found [here](https://github.com/orgs/community/discussions/5204)
|
||||
@ -32,8 +32,8 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"postStartCommand": "cd api && pip install -r requirements.txt",
|
||||
"postCreateCommand": "cd web && npm install"
|
||||
"postStartCommand": "./.devcontainer/post_start_command.sh",
|
||||
"postCreateCommand": "./.devcontainer/post_create_command.sh"
|
||||
|
||||
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||
// "features": {},
|
||||
|
||||
10
.devcontainer/post_create_command.sh
Executable file
10
.devcontainer/post_create_command.sh
Executable file
@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd web && npm install
|
||||
|
||||
echo 'alias start-api="cd /workspaces/dify/api && flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
|
||||
echo 'alias start-worker="cd /workspaces/dify/api && celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail"' >> ~/.bashrc
|
||||
echo 'alias start-web="cd /workspaces/dify/web && npm run dev"' >> ~/.bashrc
|
||||
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify up -d"' >> ~/.bashrc
|
||||
|
||||
source /home/vscode/.bashrc
|
||||
3
.devcontainer/post_start_command.sh
Executable file
3
.devcontainer/post_start_command.sh
Executable file
@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd api && pip install -r requirements.txt
|
||||
4
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
4
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -8,13 +8,13 @@ body:
|
||||
label: Self Checks
|
||||
description: "To make sure we get to you in time, please check the following :)"
|
||||
options:
|
||||
- label: This is only for bug report, if you would like to ask a quesion, please head to [Discussions](https://github.com/langgenius/dify/discussions/categories/general).
|
||||
- label: This is only for bug report, if you would like to ask a question, please head to [Discussions](https://github.com/langgenius/dify/discussions/categories/general).
|
||||
required: true
|
||||
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
|
||||
required: true
|
||||
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
|
||||
required: true
|
||||
- label: "Pleas do not modify this template :) and fill in all the required fields."
|
||||
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/document_issue.yml
vendored
4
.github/ISSUE_TEMPLATE/document_issue.yml
vendored
@ -1,7 +1,7 @@
|
||||
name: "📚 Documentation Issue"
|
||||
description: Report issues in our documentation
|
||||
labels:
|
||||
- ducumentation
|
||||
- documentation
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
@ -12,7 +12,7 @@ body:
|
||||
required: true
|
||||
- label: I confirm that I am using English to submit report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
|
||||
required: true
|
||||
- label: "Pleas do not modify this template :) and fill in all the required fields."
|
||||
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@ -12,7 +12,7 @@ body:
|
||||
required: true
|
||||
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
|
||||
required: true
|
||||
- label: "Pleas do not modify this template :) and fill in all the required fields."
|
||||
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/translation_issue.yml
vendored
2
.github/ISSUE_TEMPLATE/translation_issue.yml
vendored
@ -12,7 +12,7 @@ body:
|
||||
required: true
|
||||
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
|
||||
required: true
|
||||
- label: "Pleas do not modify this template :) and fill in all the required fields."
|
||||
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
|
||||
72
.github/workflows/api-tests.yml
vendored
72
.github/workflows/api-tests.yml
vendored
@ -8,39 +8,20 @@ on:
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
OPENAI_API_KEY: sk-IamNotARealKeyJustForMockTestKawaiiiiiiiiii
|
||||
AZURE_OPENAI_API_BASE: https://difyai-openai.openai.azure.com
|
||||
AZURE_OPENAI_API_KEY: xxxxb1707exxxxxxxxxxaaxxxxxf94
|
||||
ANTHROPIC_API_KEY: sk-ant-api11-IamNotARealKeyJustForMockTestKawaiiiiiiiiii-NotBaka-ASkksz
|
||||
CHATGLM_API_BASE: http://a.abc.com:11451
|
||||
XINFERENCE_SERVER_URL: http://a.abc.com:11451
|
||||
XINFERENCE_GENERATION_MODEL_UID: generate
|
||||
XINFERENCE_CHAT_MODEL_UID: chat
|
||||
XINFERENCE_EMBEDDINGS_MODEL_UID: embedding
|
||||
XINFERENCE_RERANK_MODEL_UID: rerank
|
||||
GOOGLE_API_KEY: abcdefghijklmnopqrstuvwxyz
|
||||
HUGGINGFACE_API_KEY: hf-awuwuwuwuwuwuwuwuwuwuwuwuwuwuwuwuwu
|
||||
HUGGINGFACE_TEXT_GEN_ENDPOINT_URL: a
|
||||
HUGGINGFACE_TEXT2TEXT_GEN_ENDPOINT_URL: b
|
||||
HUGGINGFACE_EMBEDDINGS_ENDPOINT_URL: c
|
||||
MOCK_SWITCH: true
|
||||
CODE_MAX_STRING_LENGTH: 80000
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.10"
|
||||
- "3.11"
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install APT packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1
|
||||
with:
|
||||
packages: ffmpeg
|
||||
|
||||
- name: Set up Python
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'pip'
|
||||
cache-dependency-path: |
|
||||
./api/requirements.txt
|
||||
@ -49,11 +30,44 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: pip install -r ./api/requirements.txt -r ./api/requirements-dev.txt
|
||||
|
||||
- name: Run Unit tests
|
||||
run: dev/pytest/pytest_unit_tests.sh
|
||||
|
||||
- name: Run ModelRuntime
|
||||
run: pytest api/tests/integration_tests/model_runtime/anthropic api/tests/integration_tests/model_runtime/azure_openai api/tests/integration_tests/model_runtime/openai api/tests/integration_tests/model_runtime/chatglm api/tests/integration_tests/model_runtime/google api/tests/integration_tests/model_runtime/xinference api/tests/integration_tests/model_runtime/huggingface_hub/test_llm.py
|
||||
run: dev/pytest/pytest_model_runtime.sh
|
||||
|
||||
- name: Run Tool
|
||||
run: pytest api/tests/integration_tests/tools/test_all_provider.py
|
||||
run: dev/pytest/pytest_tools.sh
|
||||
|
||||
- name: Set up Sandbox
|
||||
uses: hoverkraft-tech/compose-action@v2.0.0
|
||||
with:
|
||||
compose-file: |
|
||||
docker/docker-compose.middleware.yaml
|
||||
services: |
|
||||
sandbox
|
||||
ssrf_proxy
|
||||
|
||||
- name: Run Workflow
|
||||
run: pytest api/tests/integration_tests/workflow
|
||||
run: dev/pytest/pytest_workflow.sh
|
||||
|
||||
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS)
|
||||
uses: hoverkraft-tech/compose-action@v2.0.0
|
||||
with:
|
||||
compose-file: |
|
||||
docker/docker-compose.middleware.yaml
|
||||
docker/docker-compose.qdrant.yaml
|
||||
docker/docker-compose.milvus.yaml
|
||||
docker/docker-compose.pgvecto-rs.yaml
|
||||
docker/docker-compose.pgvector.yaml
|
||||
services: |
|
||||
weaviate
|
||||
qdrant
|
||||
etcd
|
||||
minio
|
||||
milvus-standalone
|
||||
pgvecto-rs
|
||||
pgvector
|
||||
|
||||
- name: Test Vector Stores
|
||||
run: dev/pytest/pytest_vdb.sh
|
||||
|
||||
5
.github/workflows/style.yml
vendored
5
.github/workflows/style.yml
vendored
@ -24,11 +24,14 @@ jobs:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Python dependencies
|
||||
run: pip install ruff
|
||||
run: pip install ruff dotenv-linter
|
||||
|
||||
- name: Ruff check
|
||||
run: ruff check ./api
|
||||
|
||||
- name: Dotenv check
|
||||
run: dotenv-linter ./api/.env.example ./web/.env.example
|
||||
|
||||
- name: Lint hints
|
||||
if: failure()
|
||||
run: echo "Please run 'dev/reformat' to fix the fixable linting errors."
|
||||
|
||||
@ -4,7 +4,7 @@ We need to be nimble and ship fast given where we are, but we also want to make
|
||||
|
||||
This guide, like Dify itself, is a constant work in progress. We highly appreciate your understanding if at times it lags behind the actual project, and welcome any feedback for us to improve.
|
||||
|
||||
In terms of licensing, please take a minute to read our short [License and Contributor Agreement](./license). The community also adheres to the [code of conduct](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
|
||||
In terms of licensing, please take a minute to read our short [License and Contributor Agreement](./LICENSE). The community also adheres to the [code of conduct](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
|
||||
|
||||
## Before you jump in
|
||||
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
|
||||
这份指南,就像 Dify 本身一样,是一个不断改进的工作。如果有时它落后于实际项目,我们非常感谢你的理解,并欢迎任何反馈以供我们改进。
|
||||
|
||||
在许可方面,请花一分钟阅读我们简短的[许可证和贡献者协议](./license)。社区还遵守[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。
|
||||
在许可方面,请花一分钟阅读我们简短的[许可证和贡献者协议](./LICENSE)。社区还遵守[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。
|
||||
|
||||
## 在开始之前
|
||||
|
||||
|
||||
160
CONTRIBUTING_JA.md
Normal file
160
CONTRIBUTING_JA.md
Normal file
@ -0,0 +1,160 @@
|
||||
Dify にコントリビュートしたいとお考えなのですね。それは素晴らしいことです。
|
||||
私たちは、LLM アプリケーションの構築と管理のための最も直感的なワークフローを設計するという壮大な野望を持っています。人数も資金も限られている新興企業として、コミュニティからの支援は本当に重要です。
|
||||
|
||||
私たちは現状を鑑み、機敏かつ迅速に開発をする必要がありますが、同時にあなたのようなコントリビューターの方々に、可能な限りスムーズな貢献体験をしていただきたいと思っています。そのためにこのコントリビュートガイドを作成しました。
|
||||
コードベースやコントリビュータの方々と私たちがどのように仕事をしているのかに慣れていただき、楽しいパートにすぐに飛び込めるようにすることが目的です。
|
||||
|
||||
このガイドは Dify そのものと同様に、継続的に改善されています。実際のプロジェクトに遅れをとることがあるかもしれませんが、ご理解をお願いします。
|
||||
|
||||
ライセンスに関しては、私たちの短い[ライセンスおよびコントリビューター規約](./LICENSE)をお読みください。また、コミュニティは[行動規範](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)を遵守しています。
|
||||
|
||||
## 飛び込む前に
|
||||
|
||||
[既存の Issue](https://github.com/langgenius/dify/issues?q=is:issue+is:closed) を探すか、[新しい Issue](https://github.com/langgenius/dify/issues/new/choose) を作成してください。私たちは Issue を 2 つのタイプに分類しています。
|
||||
|
||||
### 機能リクエスト
|
||||
|
||||
* 新しい機能要望を出す場合は、提案する機能が何を実現するものなのかを説明し、可能な限り多くの文脈を含めてください。[@perzeusss](https://github.com/perzeuss)は、あなたの要望を書き出すのに役立つ [Feature Request Copilot](https://udify.app/chat/MK2kVSnw1gakVwMX) を作ってくれました。気軽に試してみてください。
|
||||
|
||||
* 既存の課題から 1 つ選びたい場合は、その下にコメントを書いてください。
|
||||
|
||||
関連する方向で作業しているチームメンバーが参加します。すべてが良好であれば、コーディングを開始する許可が与えられます。私たちが変更を提案した場合にあなたの作業が無駄になることがないよう、それまでこの機能の作業を控えていただくようお願いいたします。
|
||||
|
||||
提案された機能がどの分野に属するかによって、あなたは異なるチーム・メンバーと話をするかもしれません。以下は、各チームメンバーが現在取り組んでいる分野の概要です。
|
||||
|
||||
| Member | Scope |
|
||||
| --------------------------------------------------------------------------------------- | ------------------------------------ |
|
||||
| [@yeuoly](https://github.com/Yeuoly) | エージェントアーキテクチャ |
|
||||
| [@jyong](https://github.com/JohnJyong) | RAG パイプライン設計 |
|
||||
| [@GarfieldDai](https://github.com/GarfieldDai) | workflow orchestrations の構築 |
|
||||
| [@iamjoel](https://github.com/iamjoel) & [@zxhlyh](https://github.com/zxhlyh) | フロントエンドを使いやすくする |
|
||||
| [@guchenhe](https://github.com/guchenhe) & [@crazywoola](https://github.com/crazywoola) | 開発者体験、何でも相談できる窓口 |
|
||||
| [@takatost](https://github.com/takatost) | 全体的な製品の方向性とアーキテクチャ |
|
||||
|
||||
優先順位の付け方:
|
||||
|
||||
| Feature Type | Priority |
|
||||
| --------------------------------------------------------------------------------------------------------------------- | --------------- |
|
||||
| チームメンバーによってラベル付けされた優先度の高い機能 | High Priority |
|
||||
| [community feedback board](https://github.com/langgenius/dify/discussions/categories/feedbacks)の人気の機能リクエスト | Medium Priority |
|
||||
| 非コア機能とマイナーな機能強化 | Low Priority |
|
||||
| 価値はあるが即効性はない | Future-Feature |
|
||||
|
||||
### その他 (バグレポート、パフォーマンスの最適化、誤字の修正など)
|
||||
|
||||
* すぐにコーディングを始めてください
|
||||
|
||||
優先順位の付け方:
|
||||
|
||||
| Issue Type | Priority |
|
||||
| -------------------------------------------------------------------------------------- | --------------- |
|
||||
| コア機能のバグ(ログインできない、アプリケーションが動作しない、セキュリティの抜け穴) | Critical |
|
||||
| 致命的でないバグ、パフォーマンス向上 | Medium Priority |
|
||||
| 細かな修正(誤字脱字、機能はするが分かりにくい UI) | Low Priority |
|
||||
|
||||
## インストール
|
||||
|
||||
Dify を開発用にセットアップする手順は以下の通りです。
|
||||
|
||||
### 1. このリポジトリをフォークする
|
||||
|
||||
### 2. リポジトリをクローンする
|
||||
|
||||
フォークしたリポジトリをターミナルからクローンします。
|
||||
|
||||
```
|
||||
git clone git@github.com:<github_username>/dify.git
|
||||
```
|
||||
|
||||
### 3. 依存関係の確認
|
||||
|
||||
Dify を構築するには次の依存関係が必要です。それらがシステムにインストールされていることを確認してください。
|
||||
|
||||
- [Docker](https://www.docker.com/)
|
||||
- [Docker Compose](https://docs.docker.com/compose/install/)
|
||||
- [Node.js v18.x (LTS)](http://nodejs.org)
|
||||
- [npm](https://www.npmjs.com/) version 8.x.x or [Yarn](https://yarnpkg.com/)
|
||||
- [Python](https://www.python.org/) version 3.10.x
|
||||
|
||||
### 4. インストール
|
||||
|
||||
Dify はバックエンドとフロントエンドから構成されています。
|
||||
まず`cd api/`でバックエンドのディレクトリに移動し、[Backend README](api/README.md)に従ってインストールします。
|
||||
次に別のターミナルで、`cd web/`でフロントエンドのディレクトリに移動し、[Frontend README](web/README.md)に従ってインストールしてください。
|
||||
|
||||
よくある問題とトラブルシューティングの手順については、[installation FAQ](https://docs.dify.ai/getting-started/faq/install-faq) を確認してください。
|
||||
|
||||
### 5. ブラウザで dify にアクセスする
|
||||
|
||||
設定を確認するために、ブラウザで[http://localhost:3000](http://localhost:3000)(デフォルト、または自分で設定した URL とポート)にアクセスしてください。Dify が起動して実行中であることが確認できるはずです。
|
||||
|
||||
## 開発中
|
||||
|
||||
モデルプロバイダーを追加する場合は、[このガイド](https://github.com/langgenius/dify/blob/main/api/core/model_runtime/README.md)が役立ちます。
|
||||
|
||||
Agent や Workflow にツールプロバイダーを追加する場合は、[このガイド](./api/core/tools/README.md)が役立ちます。
|
||||
|
||||
Dify のバックエンドとフロントエンドの概要を簡単に説明します。
|
||||
|
||||
### バックエンド
|
||||
|
||||
Dify のバックエンドは[Flask](https://flask.palletsprojects.com/en/3.0.x/)を使って Python で書かれています。ORM には[SQLAlchemy](https://www.sqlalchemy.org/)を、タスクキューには[Celery](https://docs.celeryq.dev/en/stable/getting-started/introduction.html)を使っています。認証ロジックは Flask-login 経由で行われます。
|
||||
|
||||
```
|
||||
[api/]
|
||||
├── constants // コードベース全体で使用される定数設定
|
||||
├── controllers // APIルート定義とリクエスト処理ロジック
|
||||
├── core // アプリケーションの中核的な管理、モデル統合、およびツール
|
||||
├── docker // Dockerおよびコンテナ関連の設定
|
||||
├── events // イベントのハンドリングと処理
|
||||
├── extensions // 第三者のフレームワーク/プラットフォームとの拡張
|
||||
├── fields // シリアライゼーション/マーシャリング用のフィールド定義
|
||||
├── libs // 再利用可能なライブラリとヘルパー
|
||||
├── migrations // データベースマイグレーションスクリプト
|
||||
├── models // データベースモデルとスキーマ定義
|
||||
├── services // ビジネスロジックの定義
|
||||
├── storage // 秘密鍵の保存
|
||||
├── tasks // 非同期タスクとバックグラウンドジョブの処理
|
||||
└── tests // テスト関連のファイル
|
||||
```
|
||||
|
||||
### フロントエンド
|
||||
|
||||
このウェブサイトは、Typescript の[Next.js](https://nextjs.org/)ボイラープレートでブートストラップされており、スタイリングには[Tailwind CSS](https://tailwindcss.com/)を使用しています。国際化には[React-i18next](https://react.i18next.com/)を使用しています。
|
||||
|
||||
```
|
||||
[web/]
|
||||
├── app // レイアウト、ページ、コンポーネント
|
||||
│ ├── (commonLayout) // アプリ全体で共通のレイアウト
|
||||
│ ├── (shareLayout) // トークン特有のセッションで共有されるレイアウト
|
||||
│ ├── activate // アクティベートページ
|
||||
│ ├── components // ページやレイアウトで共有されるコンポーネント
|
||||
│ ├── install // インストールページ
|
||||
│ ├── signin // サインインページ
|
||||
│ └── styles // グローバルに共有されるスタイル
|
||||
├── assets // 静的アセット
|
||||
├── bin // ビルドステップで実行されるスクリプト
|
||||
├── config // 調整可能な設定とオプション
|
||||
├── context // アプリの異なる部分で使用される共有コンテキスト
|
||||
├── dictionaries // 言語別の翻訳ファイル
|
||||
├── docker // コンテナ設定
|
||||
├── hooks // 再利用可能なフック
|
||||
├── i18n // 国際化設定
|
||||
├── models // データモデルとAPIレスポンスの形状を記述
|
||||
├── public // ファビコンなどのメタアセット
|
||||
├── service // APIアクションの形状を指定
|
||||
├── test
|
||||
├── types // 関数のパラメータと戻り値の記述
|
||||
└── utils // 共有ユーティリティ関数
|
||||
```
|
||||
|
||||
## PR を投稿する
|
||||
|
||||
いよいよ、私たちのリポジトリにプルリクエスト (PR) を提出する時が来ました。主要な機能については、まず `deploy/dev` ブランチにマージしてテストしてから `main` ブランチにマージします。
|
||||
マージ競合などの問題が発生した場合、またはプル リクエストを開く方法がわからない場合は、[GitHub's pull request tutorial](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests) をチェックしてみてください。
|
||||
|
||||
これで完了です!あなたの PR がマージされると、[README](https://github.com/langgenius/dify/blob/main/README.md) にコントリビューターとして紹介されます。
|
||||
|
||||
## ヘルプを得る
|
||||
|
||||
コントリビュート中に行き詰まったり、疑問が生じたりした場合は、GitHub の関連する issue から質問していただくか、[Discord](https://discord.gg/8Tpq4AcN9c)でチャットしてください。
|
||||
23
README.md
23
README.md
@ -29,19 +29,16 @@
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="./README.md"><img alt="Commits last month" src="https://img.shields.io/badge/English-d9d9d9"></a>
|
||||
<a href="./README_CN.md"><img alt="Commits last month" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
<a href="./README_JA.md"><img alt="Commits last month" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
|
||||
<a href="./README_ES.md"><img alt="Commits last month" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="Commits last month" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="Commits last month" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
|
||||
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
|
||||
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
</p>
|
||||
|
||||
#
|
||||
|
||||
<p align="center">
|
||||
<a href="https://trendshift.io/repositories/2152" target="_blank"><img src="https://trendshift.io/api/badge/repositories/2152" alt="langgenius%2Fdify | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
</p>
|
||||
Dify is an open-source LLM app development platform. Its intuitive interface combines AI workflow, RAG pipeline, agent capabilities, model management, observability features and more, letting you quickly go from prototype to production. Here's a list of the core features:
|
||||
</br> </br>
|
||||
|
||||
@ -54,7 +51,7 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
|
||||
|
||||
|
||||
**2. Comprehensive model support**:
|
||||
Seamless integration with hundreds of proprietary / open-source LLMs from dozens of inference providers and self-hosted solutions, covering GPT, Mistral, Llama2, and any OpenAI API-compatible models. A full list of supported model providers can be found [here](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||
Seamless integration with hundreds of proprietary / open-source LLMs from dozens of inference providers and self-hosted solutions, covering GPT, Mistral, Llama3, and any OpenAI API-compatible models. A full list of supported model providers can be found [here](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||
|
||||

|
||||
|
||||
@ -109,7 +106,7 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
|
||||
<td align="center">Agent</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
@ -127,7 +124,7 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Enterprise Feature (SSO/Access control)</td>
|
||||
<td align="center">Enterprise Features (SSO/Access control)</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
|
||||
35
README_CN.md
35
README_CN.md
@ -35,6 +35,7 @@
|
||||
<a href="./README_ES.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/西班牙语-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/法语-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/克林贡语-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="上个月的提交次数" src="https://img.shields.io/badge/韓國語-d9d9d9"></a>
|
||||
</div>
|
||||
|
||||
|
||||
@ -44,11 +45,11 @@
|
||||
<a href="https://trendshift.io/repositories/2152" target="_blank"><img src="https://trendshift.io/api/badge/repositories/2152" alt="langgenius%2Fdify | 趋势转变" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
</div>
|
||||
|
||||
Dify 是一个开源的LLM应用开发平台。其直观的界面结合了AI工作流程、RAG管道、代理功能、模型管理、可观察性功能等,让您可以快速从原型到生产。以下是其核心功能列表:
|
||||
Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI 工作流、RAG 管道、Agent、模型管理、可观测性功能等,让您可以快速从原型到生产。以下是其核心功能列表:
|
||||
</br> </br>
|
||||
|
||||
**1. 工作流**:
|
||||
在视觉画布上构建和测试功能强大的AI工作流程,利用以下所有功能以及更多功能。
|
||||
在画布上构建和测试功能强大的 AI 工作流程,利用以下所有功能以及更多功能。
|
||||
|
||||
|
||||
https://github.com/langgenius/dify/assets/13230914/356df23e-1604-483d-80a6-9517ece318aa
|
||||
@ -56,7 +57,7 @@ Dify 是一个开源的LLM应用开发平台。其直观的界面结合了AI工
|
||||
|
||||
|
||||
**2. 全面的模型支持**:
|
||||
与数百种专有/开源LLMs以及数十种推理提供商和自托管解决方案无缝集成,涵盖GPT、Mistral、Llama2以及任何与OpenAI API兼容的模型。完整的支持模型提供商列表可在[此处](https://docs.dify.ai/getting-started/readme/model-providers)找到。
|
||||
与数百种专有/开源 LLMs 以及数十种推理提供商和自托管解决方案无缝集成,涵盖 GPT、Mistral、Llama3 以及任何与 OpenAI API 兼容的模型。完整的支持模型提供商列表可在[此处](https://docs.dify.ai/getting-started/readme/model-providers)找到。
|
||||
|
||||

|
||||
|
||||
@ -65,16 +66,16 @@ Dify 是一个开源的LLM应用开发平台。其直观的界面结合了AI工
|
||||
用于制作提示、比较模型性能以及向基于聊天的应用程序添加其他功能(如文本转语音)的直观界面。
|
||||
|
||||
**4. RAG Pipeline**:
|
||||
广泛的RAG功能,涵盖从文档摄入到检索的所有内容,支持从PDF、PPT和其他常见文档格式中提取文本的开箱即用的支持。
|
||||
广泛的 RAG 功能,涵盖从文档摄入到检索的所有内容,支持从 PDF、PPT 和其他常见文档格式中提取文本的开箱即用的支持。
|
||||
|
||||
**5. Agent 智能体**:
|
||||
您可以基于LLM函数调用或ReAct定义代理,并为代理添加预构建或自定义工具。Dify为AI代理提供了50多种内置工具,如谷歌搜索、DELL·E、稳定扩散和WolframAlpha等。
|
||||
您可以基于 LLM 函数调用或 ReAct 定义 Agent,并为 Agent 添加预构建或自定义工具。Dify 为 AI Agent 提供了50多种内置工具,如谷歌搜索、DELL·E、Stable Diffusion 和 WolframAlpha 等。
|
||||
|
||||
**6. LLMOps**:
|
||||
随时间监视和分析应用程序日志和性能。您可以根据生产数据和注释持续改进提示、数据集和模型。
|
||||
随时间监视和分析应用程序日志和性能。您可以根据生产数据和标注持续改进提示、数据集和模型。
|
||||
|
||||
**7. 后端即服务**:
|
||||
所有Dify的功能都带有相应的API,因此您可以轻松地将Dify集成到自己的业务逻辑中。
|
||||
所有 Dify 的功能都带有相应的 API,因此您可以轻松地将 Dify 集成到自己的业务逻辑中。
|
||||
|
||||
|
||||
## 功能比较
|
||||
@ -84,21 +85,21 @@ Dify 是一个开源的LLM应用开发平台。其直观的界面结合了AI工
|
||||
<th align="center">Dify.AI</th>
|
||||
<th align="center">LangChain</th>
|
||||
<th align="center">Flowise</th>
|
||||
<th align="center">OpenAI助理API</th>
|
||||
<th align="center">OpenAI Assistant API</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">编程方法</td>
|
||||
<td align="center">API + 应用程序导向</td>
|
||||
<td align="center">Python代码</td>
|
||||
<td align="center">Python 代码</td>
|
||||
<td align="center">应用程序导向</td>
|
||||
<td align="center">API导向</td>
|
||||
<td align="center">API 导向</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">支持的LLMs</td>
|
||||
<td align="center">支持的 LLMs</td>
|
||||
<td align="center">丰富多样</td>
|
||||
<td align="center">丰富多样</td>
|
||||
<td align="center">丰富多样</td>
|
||||
<td align="center">仅限OpenAI</td>
|
||||
<td align="center">仅限 OpenAI</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">RAG引擎</td>
|
||||
@ -108,21 +109,21 @@ Dify 是一个开源的LLM应用开发平台。其直观的界面结合了AI工
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">代理</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">Agent</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">工作流程</td>
|
||||
<td align="center">工作流</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">可观察性</td>
|
||||
<td align="center">可观测性</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
@ -202,7 +203,7 @@ docker compose up -d
|
||||
## Contributing
|
||||
|
||||
对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。
|
||||
同时,请考虑通过社交媒体、活动和会议来支持Dify的分享。
|
||||
同时,请考虑通过社交媒体、活动和会议来支持 Dify 的分享。
|
||||
|
||||
> 我们正在寻找贡献者来帮助将Dify翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md)获取更多信息,并在我们的[Discord社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。
|
||||
|
||||
|
||||
@ -35,6 +35,7 @@
|
||||
<a href="./README_ES.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="Actividad de Commits el último mes" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
</p>
|
||||
|
||||
#
|
||||
@ -54,7 +55,7 @@ Dify es una plataforma de desarrollo de aplicaciones de LLM de código abierto.
|
||||
|
||||
|
||||
**2. Soporte de modelos completo**:
|
||||
Integración perfecta con cientos de LLMs propietarios / de código abierto de docenas de proveedores de inferencia y soluciones auto-alojadas, que cubren GPT, Mistral, Llama2 y cualquier modelo compatible con la API de OpenAI. Se puede encontrar una lista completa de proveedores de modelos admitidos [aquí](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||
Integración perfecta con cientos de LLMs propietarios / de código abierto de docenas de proveedores de inferencia y soluciones auto-alojadas, que cubren GPT, Mistral, Llama3 y cualquier modelo compatible con la API de OpenAI. Se puede encontrar una lista completa de proveedores de modelos admitidos [aquí](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||
|
||||

|
||||
|
||||
@ -111,7 +112,7 @@ es basados en LLM Function Calling o ReAct, y agregar herramientas preconstruida
|
||||
<td align="center">Agente</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
|
||||
@ -35,6 +35,7 @@
|
||||
<a href="./README_ES.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="Commits le mois dernier" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
</p>
|
||||
|
||||
#
|
||||
@ -54,7 +55,7 @@ Dify est une plateforme de développement d'applications LLM open source. Son in
|
||||
|
||||
|
||||
**2. Prise en charge complète des modèles**:
|
||||
Intégration transparente avec des centaines de LLM propriétaires / open source provenant de dizaines de fournisseurs d'inférence et de solutions auto-hébergées, couvrant GPT, Mistral, Llama2, et tous les modèles compatibles avec l'API OpenAI. Une liste complète des fournisseurs de modèles pris en charge se trouve [ici](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||
Intégration transparente avec des centaines de LLM propriétaires / open source provenant de dizaines de fournisseurs d'inférence et de solutions auto-hébergées, couvrant GPT, Mistral, Llama3, et tous les modèles compatibles avec l'API OpenAI. Une liste complète des fournisseurs de modèles pris en charge se trouve [ici](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||
|
||||

|
||||
|
||||
@ -111,7 +112,7 @@ ités d'agent**:
|
||||
<td align="center">Agent</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
|
||||
41
README_JA.md
41
README_JA.md
@ -2,7 +2,7 @@
|
||||
|
||||
<p align="center">
|
||||
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
|
||||
<a href="https://docs.dify.ai/getting-started/install-self-hosted">自己ホスティング</a> ·
|
||||
<a href="https://docs.dify.ai/getting-started/install-self-hosted">セルフホスト</a> ·
|
||||
<a href="https://docs.dify.ai">ドキュメント</a> ·
|
||||
<a href="https://cal.com/guchenhe/dify-demo">デモのスケジュール</a>
|
||||
</p>
|
||||
@ -35,6 +35,7 @@
|
||||
<a href="./README_ES.md"><img alt="先月のコミット" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="先月のコミット" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="先月のコミット" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="先月のコミット" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
</p>
|
||||
|
||||
#
|
||||
@ -54,10 +55,8 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
|
||||
|
||||
|
||||
|
||||
**2. 網羅的なモデルサポート**:
|
||||
数百のプロプライエタリ/オープンソースのLLMと、数十の推論プロバイダーおよびセルフホスティングソリューションとのシームレスな統合を提供します。GPT、Mistral、Llama2、およびOpenAI API互換のモデルをカバーします。サポートされているモデルプロバイダーの完全なリストは[こちら](https://docs
|
||||
|
||||
.dify.ai/getting-started/readme/model-providers)をご覧ください。
|
||||
**2. 包括的なモデルサポート**:
|
||||
数百のプロプライエタリ/オープンソースのLLMと、数十の推論プロバイダーおよびセルフホスティングソリューションとのシームレスな統合を提供します。GPT、Mistral、Llama3、およびOpenAI API互換のモデルをカバーします。サポートされているモデルプロバイダーの完全なリストは[こちら](https://docs.dify.ai/getting-started/readme/model-providers)をご覧ください。
|
||||
|
||||

|
||||
|
||||
@ -96,9 +95,9 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">サポートされているLLM</td>
|
||||
<td align="center">豊富なバリエーション</td>
|
||||
<td align="center">豊富なバリエーション</td>
|
||||
<td align="center">豊富なバリエーション</td>
|
||||
<td align="center">バリエーション豊富</td>
|
||||
<td align="center">バリエーション豊富</td>
|
||||
<td align="center">バリエーション豊富</td>
|
||||
<td align="center">OpenAIのみ</td>
|
||||
</tr>
|
||||
<tr>
|
||||
@ -112,7 +111,7 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
|
||||
<td align="center">エージェント</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
@ -148,36 +147,34 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
|
||||
## Difyの使用方法
|
||||
|
||||
- **クラウド </br>**
|
||||
[こちら](https://dify.ai)のDify Cloudサービスを利用して、セットアップが不要で誰でも試すことができます。サンドボックスプランでは、200回の無料のGPT-4呼び出しが含まれています。
|
||||
[こちら](https://dify.ai)のDify Cloudサービスを利用して、セットアップ不要で試すことができます。サンドボックスプランには、200回の無料のGPT-4呼び出しが含まれています。
|
||||
|
||||
- **Dify Community Editionのセルフホスティング</br>**
|
||||
この[スターターガイド](#quick-start)を使用して、環境でDifyをすばやく実行できます。
|
||||
さらなる参照や詳細な手順については、[ドキュメント](https://docs.dify.ai)をご覧ください。
|
||||
この[スターターガイド](#quick-start)を使用して、ローカル環境でDifyを簡単に実行できます。
|
||||
さらなる参考資料や詳細な手順については、[ドキュメント](https://docs.dify.ai)をご覧ください。
|
||||
|
||||
- **エンタープライズ/組織向けのDify</br>**
|
||||
追加のエンタープライズ向け機能を提供しています。[こちらからミーティ
|
||||
|
||||
ングを予約](https://cal.com/guchenhe/30min)したり、[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)してエンタープライズのニーズについて相談してください。 </br>
|
||||
追加のエンタープライズ向け機能を提供しています。[こちらからミーティングを予約](https://cal.com/guchenhe/30min)したり、[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)してエンタープライズのニーズについて相談してください。 </br>
|
||||
> AWSを使用しているスタートアップや中小企業の場合は、[AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)のDify Premiumをチェックして、ワンクリックで独自のAWS VPCにデプロイできます。カスタムロゴとブランディングでアプリを作成するオプションを備えた手頃な価格のAMIオファリングです。
|
||||
|
||||
|
||||
## 先を見る
|
||||
## 最新の情報を入手
|
||||
|
||||
GitHubでDifyにスターを付け、新しいリリースをすぐに通知されます。
|
||||
GitHub上でDifyにスターを付けることで、Difyに関する新しいニュースを受け取れます。
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
## クイックスタート
|
||||
> Difyをインストールする前に、マシンが以下の最小システム要件を満たしていることを確認してください:
|
||||
> Difyをインストールする前に、お使いのマシンが以下の最小システム要件を満たしていることを確認してください:
|
||||
>
|
||||
>- CPU >= 2コア
|
||||
>- RAM >= 4GB
|
||||
|
||||
</br>
|
||||
|
||||
Difyサーバーを起動する最も簡単な方法は、当社の[docker-compose.yml](docker/docker-compose.yaml)ファイルを実行することです。インストールコマンドを実行する前に、マシンに[Docker](https://docs.docker.com/get-docker/)と[Docker Compose](https://docs.docker.com/compose/install/)がインストールされていることを確認してください。
|
||||
Difyサーバーを起動する最も簡単な方法は、[docker-compose.yml](docker/docker-compose.yaml)ファイルを実行することです。インストールコマンドを実行する前に、マシンに[Docker](https://docs.docker.com/get-docker/)と[Docker Compose](https://docs.docker.com/compose/install/)がインストールされていることを確認してください。
|
||||
|
||||
```bash
|
||||
cd docker
|
||||
@ -220,7 +217,7 @@ docker compose up -d
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。
|
||||
* [Twitter](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。
|
||||
|
||||
または、直接チームメンバーとミーティングをスケジュールします:
|
||||
または、直接チームメンバーとミーティングをスケジュール:
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
@ -231,7 +228,7 @@ docker compose up -d
|
||||
<td><a href='https://cal.com
|
||||
|
||||
/guchenhe/30min'>ミーティング</a></td>
|
||||
<td>無料の30分間のミーティングをスケジュールしてください。</td>
|
||||
<td>無料の30分間のミーティングをスケジュール</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a href='mailto:support@dify.ai?subject=[GitHub]Technical%20Support'>技術サポート</a></td>
|
||||
@ -246,4 +243,4 @@ docker compose up -d
|
||||
|
||||
## ライセンス
|
||||
|
||||
プロジェクトはMITライセンスの下で利用可能です。[LICENSE](LICENSE)をご参照ください。
|
||||
このリポジトリは、Dify Open Source License にいくつかの追加制限を加えた[Difyオープンソースライセンス](LICENSE)の下で利用可能です。
|
||||
|
||||
@ -35,6 +35,7 @@
|
||||
<a href="./README_ES.md"><img alt="Commits last month" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="Commits last month" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="Commits last month" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="Commits last month" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
</p>
|
||||
|
||||
#
|
||||
@ -54,7 +55,7 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
|
||||
|
||||
|
||||
**2. Comprehensive model support**:
|
||||
Seamless integration with hundreds of proprietary / open-source LLMs from dozens of inference providers and self-hosted solutions, covering GPT, Mistral, Llama2, and any OpenAI API-compatible models. A full list of supported model providers can be found [here](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||
Seamless integration with hundreds of proprietary / open-source LLMs from dozens of inference providers and self-hosted solutions, covering GPT, Mistral, Llama3, and any OpenAI API-compatible models. A full list of supported model providers can be found [here](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||
|
||||

|
||||
|
||||
@ -111,7 +112,7 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
|
||||
<td align="center">Agent</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
|
||||
243
README_KR.md
Normal file
243
README_KR.md
Normal file
@ -0,0 +1,243 @@
|
||||

|
||||
|
||||
<p align="center">
|
||||
<a href="https://cloud.dify.ai">Dify 클라우드</a> ·
|
||||
<a href="https://docs.dify.ai/getting-started/install-self-hosted">셀프-호스팅</a> ·
|
||||
<a href="https://docs.dify.ai">문서</a> ·
|
||||
<a href="https://cal.com/guchenhe/60-min-meeting">기업 문의</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://dify.ai" target="_blank">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Product-F04438"></a>
|
||||
<a href="https://dify.ai/pricing" target="_blank">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/free-pricing?logo=free&color=%20%23155EEF&label=pricing&labelColor=%20%23528bff"></a>
|
||||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on Twitter"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
<img alt="Commits last month" src="https://img.shields.io/github/commit-activity/m/langgenius/dify?labelColor=%20%2332b583&color=%20%2312b76a"></a>
|
||||
<a href="https://github.com/langgenius/dify/" target="_blank">
|
||||
<img alt="Issues closed" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=issues%20closed&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
|
||||
<a href="https://github.com/langgenius/dify/discussions/" target="_blank">
|
||||
<img alt="Discussion posts" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
|
||||
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
|
||||
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="한국어 README" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
|
||||
</p>
|
||||
|
||||
|
||||
Dify는 오픈 소스 LLM 앱 개발 플랫폼입니다. 직관적인 인터페이스를 통해 AI 워크플로우, RAG 파이프라인, 에이전트 기능, 모델 관리, 관찰 기능 등을 결합하여 프로토타입에서 프로덕션까지 빠르게 전환할 수 있습니다. 주요 기능 목록은 다음과 같습니다:</br> </br>
|
||||
|
||||
**1. 워크플로우**:
|
||||
다음 기능들을 비롯한 다양한 기능을 활용하여 시각적 캔버스에서 강력한 AI 워크플로우를 구축하고 테스트하세요.
|
||||
|
||||
|
||||
https://github.com/langgenius/dify/assets/13230914/356df23e-1604-483d-80a6-9517ece318aa
|
||||
|
||||
|
||||
|
||||
**2. 포괄적인 모델 지원:**:
|
||||
|
||||
수십 개의 추론 제공업체와 자체 호스팅 솔루션에서 제공하는 수백 개의 독점 및 오픈 소스 LLM과 원활하게 통합되며, GPT, Mistral, Llama3 및 모든 OpenAI API 호환 모델을 포함합니다. 지원되는 모델 제공업체의 전체 목록은 [여기](https://docs.dify.ai/getting-started/readme/model-providers)에서 확인할 수 있습니다.
|
||||

|
||||
|
||||
|
||||
**3. 통합 개발환경**:
|
||||
프롬프트를 작성하고, 모델 성능을 비교하며, 텍스트-음성 변환과 같은 추가 기능을 채팅 기반 앱에 추가할 수 있는 직관적인 인터페이스를 제공합니다.
|
||||
|
||||
**4. RAG 파이프라인**:
|
||||
문서 수집부터 검색까지 모든 것을 다루며, PDF, PPT 및 기타 일반적인 문서 형식에서 텍스트 추출을 위한 기본 지원이 포함되어 있는 광범위한 RAG 기능을 제공합니다.
|
||||
|
||||
**5. 에이전트 기능**:
|
||||
LLM 함수 호출 또는 ReAct를 기반으로 에이전트를 정의하고 에이전트에 대해 사전 구축된 도구나 사용자 정의 도구를 추가할 수 있습니다. Dify는 Google Search, DELL·E, Stable Diffusion, WolframAlpha 등 AI 에이전트를 위한 50개 이상의 내장 도구를 제공합니다.
|
||||
|
||||
**6. LLMOps**:
|
||||
시간 경과에 따른 애플리케이션 로그와 성능을 모니터링하고 분석합니다. 생산 데이터와 주석을 기반으로 프롬프트, 데이터세트, 모델을 지속적으로 개선할 수 있습니다.
|
||||
|
||||
**7. Backend-as-a-Service**:
|
||||
Dify의 모든 제품에는 해당 API가 함께 제공되므로 Dify를 자신의 비즈니스 로직에 쉽게 통합할 수 있습니다.
|
||||
|
||||
## 기능 비교
|
||||
<table style="width: 100%;">
|
||||
<tr>
|
||||
<th align="center">기능</th>
|
||||
<th align="center">Dify.AI</th>
|
||||
<th align="center">LangChain</th>
|
||||
<th align="center">Flowise</th>
|
||||
<th align="center">OpenAI Assistants API</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">프로그래밍 접근 방식</td>
|
||||
<td align="center">API + 앱 중심</td>
|
||||
<td align="center">Python 코드</td>
|
||||
<td align="center">앱 중심</td>
|
||||
<td align="center">API 중심</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">지원되는 LLMs</td>
|
||||
<td align="center">다양한 종류</td>
|
||||
<td align="center">다양한 종류</td>
|
||||
<td align="center">다양한 종류</td>
|
||||
<td align="center">OpenAI 전용</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">RAG 엔진</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">에이전트</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">워크플로우</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">가시성</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">기업용 기능 (SSO/접근 제어)</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">로컬 배포</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## Dify 사용하기
|
||||
|
||||
- **클라우드 </br>**
|
||||
우리는 누구나 설정이 필요 없이 사용해 볼 수 있도록 [Dify 클라우드](https://dify.ai) 서비스를 호스팅합니다. 이는 자체 배포 버전의 모든 기능을 제공하며, 샌드박스 플랜에서 무료로 200회의 GPT-4 호출을 포함합니다.
|
||||
|
||||
- **셀프-호스팅 Dify 커뮤니티 에디션</br>**
|
||||
환경에서 Dify를 빠르게 실행하려면 이 [스타터 가이드를](#quick-start) 참조하세요.
|
||||
추가 참조 및 더 심층적인 지침은 [문서](https://docs.dify.ai)를 사용하세요.
|
||||
|
||||
- **기업 / 조직을 위한 Dify</br>**
|
||||
우리는 추가적인 기업 중심 기능을 제공합니다. 당사와 [미팅일정](https://cal.com/guchenhe/30min)을 잡거나 [이메일 보내기](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)를 통해 기업 요구 사항을 논의하십시오. </br>
|
||||
> AWS를 사용하는 스타트업 및 중소기업의 경우 [AWS Marketplace에서 Dify Premium](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)을 확인하고 한 번의 클릭으로 자체 AWS VPC에 배포하십시오. 맞춤형 로고와 브랜딩이 포함된 앱을 생성할 수 있는 옵션이 포함된 저렴한 AMI 제품입니다.
|
||||
|
||||
|
||||
|
||||
## 앞서가기
|
||||
|
||||
GitHub에서 Dify에 별표를 찍어 새로운 릴리스를 즉시 알림 받으세요.
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
## 빠른 시작
|
||||
>Dify를 설치하기 전에 컴퓨터가 다음과 같은 최소 시스템 요구 사항을 충족하는지 확인하세요 :
|
||||
>- CPU >= 2 Core
|
||||
>- RAM >= 4GB
|
||||
|
||||
</br>
|
||||
|
||||
Dify 서버를 시작하는 가장 쉬운 방법은 [docker-compose.yml](docker/docker-compose.yaml) 파일을 실행하는 것입니다. 설치 명령을 실행하기 전에 [Docker](https://docs.docker.com/get-docker/) 및 [Docker Compose](https://docs.docker.com/compose/install/)가 머신에 설치되어 있는지 확인하세요.
|
||||
|
||||
```bash
|
||||
cd docker
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
실행 후 브라우저의 [http://localhost/install](http://localhost/install) 에서 Dify 대시보드에 액세스하고 초기화 프로세스를 시작할 수 있습니다.
|
||||
|
||||
> Dify에 기여하거나 추가 개발을 하고 싶다면 소스 코드에서 [배포에 대한 가이드](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code)를 참조하세요.
|
||||
|
||||
## 다음 단계
|
||||
|
||||
구성 커스터마이징이 필요한 경우, [docker-compose.yml](docker/docker-compose.yaml) 파일의 코멘트를 참조하여 환경 구성을 수동으로 설정하십시오. 변경 후 `docker-compose up -d` 를 다시 실행하십시오. 환경 변수의 전체 목록은 [여기](https://docs.dify.ai/getting-started/install-self-hosted/environments)에서 확인할 수 있습니다.
|
||||
|
||||
|
||||
고가용성 설정을 구성하려면 Dify를 Kubernetes에 배포할 수 있는 커뮤니티 제공 [Helm Charts](https://helm.sh/)가 있습니다.
|
||||
|
||||
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
|
||||
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
|
||||
|
||||
|
||||
## 기여
|
||||
|
||||
코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요.
|
||||
동시에 Dify를 소셜 미디어와 행사 및 컨퍼런스에 공유하여 지원하는 것을 고려해 주시기 바랍니다.
|
||||
|
||||
|
||||
> 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요.
|
||||
|
||||
**기여자**
|
||||
|
||||
<a href="https://github.com/langgenius/dify/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=langgenius/dify" />
|
||||
</a>
|
||||
|
||||
## 커뮤니티 & 연락처
|
||||
|
||||
* [Github 토론](https://github.com/langgenius/dify/discussions). 피드백 공유 및 질문하기에 적합합니다.
|
||||
* [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요.
|
||||
* [이메일](mailto:support@dify.ai?subject=[GitHub]Questions%20About%20Dify). Dify.AI 사용에 대한 질문하기에 적합합니다.
|
||||
* [디스코드](https://discord.gg/FngNHpbcY7). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.
|
||||
* [트위터](https://twitter.com/dify_ai). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.
|
||||
|
||||
또는 팀원과 직접 미팅을 예약하세요:
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th>연락처</th>
|
||||
<th>목적</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a href='https://cal.com/guchenhe/15min' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/9ebcd111-1205-4d71-83d5-948d70b809f5' alt='Git-Hub-README-Button-3x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
|
||||
<td>비즈니스 문의 및 제품 피드백</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a href='https://cal.com/pinkbanana' target='_blank'><img class="schedule-button" src='https://github.com/langgenius/dify/assets/13230914/d1edd00a-d7e4-4513-be6c-e57038e143fd' alt='Git-Hub-README-Button-2x' style="width: 180px; height: auto; object-fit: contain;"/></a></td>
|
||||
<td>기여, 이슈 및 기능 요청</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## Star 히스토리
|
||||
|
||||
[](https://star-history.com/#langgenius/dify&Date)
|
||||
|
||||
|
||||
## 보안 공개
|
||||
|
||||
개인정보 보호를 위해 보안 문제를 GitHub에 게시하지 마십시오. 대신 security@dify.ai로 질문을 보내주시면 더 자세한 답변을 드리겠습니다.
|
||||
|
||||
## 라이선스
|
||||
|
||||
이 저장소는 기본적으로 몇 가지 추가 제한 사항이 있는 Apache 2.0인 [Dify 오픈 소스 라이선스](LICENSE)에 따라 사용할 수 있습니다.
|
||||
@ -1,6 +1,3 @@
|
||||
# Server Edition
|
||||
EDITION=SELF_HOSTED
|
||||
|
||||
# Your App secret key will be used for securely signing the session cookie
|
||||
# Make sure you are changing this key for your deployment with a strong key.
|
||||
# You can generate a strong key using `openssl rand -base64 42`.
|
||||
@ -20,6 +17,9 @@ APP_WEB_URL=http://127.0.0.1:3000
|
||||
# Files URL
|
||||
FILES_URL=http://127.0.0.1:5001
|
||||
|
||||
# The time in seconds after the signature is rejected
|
||||
FILES_ACCESS_TIMEOUT=300
|
||||
|
||||
# celery configuration
|
||||
CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1
|
||||
|
||||
@ -52,12 +52,23 @@ AZURE_BLOB_ACCOUNT_NAME=your-account-name
|
||||
AZURE_BLOB_ACCOUNT_KEY=your-account-key
|
||||
AZURE_BLOB_CONTAINER_NAME=yout-container-name
|
||||
AZURE_BLOB_ACCOUNT_URL=https://<your_account_name>.blob.core.windows.net
|
||||
# Aliyun oss Storage configuration
|
||||
ALIYUN_OSS_BUCKET_NAME=your-bucket-name
|
||||
ALIYUN_OSS_ACCESS_KEY=your-access-key
|
||||
ALIYUN_OSS_SECRET_KEY=your-secret-key
|
||||
ALIYUN_OSS_ENDPOINT=your-endpoint
|
||||
ALIYUN_OSS_AUTH_VERSION=v1
|
||||
ALIYUN_OSS_REGION=your-region
|
||||
|
||||
# Google Storage configuration
|
||||
GOOGLE_STORAGE_BUCKET_NAME=yout-bucket-name
|
||||
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON=your-google-service-account-json-base64-string
|
||||
|
||||
# CORS configuration
|
||||
WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||
|
||||
# Vector database configuration, support: weaviate, qdrant, milvus, relyt
|
||||
# Vector database configuration, support: weaviate, qdrant, milvus, relyt, pgvecto_rs, pgvector
|
||||
VECTOR_STORE=weaviate
|
||||
|
||||
# Weaviate configuration
|
||||
@ -70,6 +81,8 @@ WEAVIATE_BATCH_SIZE=100
|
||||
QDRANT_URL=http://localhost:6333
|
||||
QDRANT_API_KEY=difyai123456
|
||||
QDRANT_CLIENT_TIMEOUT=20
|
||||
QDRANT_GRPC_ENABLED=false
|
||||
QDRANT_GRPC_PORT=6334
|
||||
|
||||
# Milvus configuration
|
||||
MILVUS_HOST=127.0.0.1
|
||||
@ -85,6 +98,20 @@ RELYT_USER=postgres
|
||||
RELYT_PASSWORD=postgres
|
||||
RELYT_DATABASE=postgres
|
||||
|
||||
# PGVECTO_RS configuration
|
||||
PGVECTO_RS_HOST=localhost
|
||||
PGVECTO_RS_PORT=5431
|
||||
PGVECTO_RS_USER=postgres
|
||||
PGVECTO_RS_PASSWORD=difyai123456
|
||||
PGVECTO_RS_DATABASE=postgres
|
||||
|
||||
# PGVector configuration
|
||||
PGVECTOR_HOST=127.0.0.1
|
||||
PGVECTOR_PORT=5433
|
||||
PGVECTOR_USER=postgres
|
||||
PGVECTOR_PASSWORD=postgres
|
||||
PGVECTOR_DATABASE=postgres
|
||||
|
||||
# Upload configuration
|
||||
UPLOAD_FILE_SIZE_LIMIT=15
|
||||
UPLOAD_FILE_BATCH_LIMIT=5
|
||||
@ -118,27 +145,9 @@ NOTION_CLIENT_SECRET=you-client-secret
|
||||
NOTION_CLIENT_ID=you-client-id
|
||||
NOTION_INTERNAL_SECRET=you-internal-secret
|
||||
|
||||
# Hosted Model Credentials
|
||||
HOSTED_OPENAI_API_KEY=
|
||||
HOSTED_OPENAI_API_BASE=
|
||||
HOSTED_OPENAI_API_ORGANIZATION=
|
||||
HOSTED_OPENAI_TRIAL_ENABLED=false
|
||||
HOSTED_OPENAI_QUOTA_LIMIT=200
|
||||
HOSTED_OPENAI_PAID_ENABLED=false
|
||||
|
||||
HOSTED_AZURE_OPENAI_ENABLED=false
|
||||
HOSTED_AZURE_OPENAI_API_KEY=
|
||||
HOSTED_AZURE_OPENAI_API_BASE=
|
||||
HOSTED_AZURE_OPENAI_QUOTA_LIMIT=200
|
||||
|
||||
HOSTED_ANTHROPIC_API_BASE=
|
||||
HOSTED_ANTHROPIC_API_KEY=
|
||||
HOSTED_ANTHROPIC_TRIAL_ENABLED=false
|
||||
HOSTED_ANTHROPIC_QUOTA_LIMIT=600000
|
||||
HOSTED_ANTHROPIC_PAID_ENABLED=false
|
||||
|
||||
ETL_TYPE=dify
|
||||
UNSTRUCTURED_API_URL=
|
||||
UNSTRUCTURED_API_KEY=
|
||||
|
||||
SSRF_PROXY_HTTP_URL=
|
||||
SSRF_PROXY_HTTPS_URL=
|
||||
@ -160,3 +169,16 @@ CODE_MAX_NUMBER_ARRAY_LENGTH=1000
|
||||
# API Tool configuration
|
||||
API_TOOL_DEFAULT_CONNECT_TIMEOUT=10
|
||||
API_TOOL_DEFAULT_READ_TIMEOUT=60
|
||||
|
||||
# HTTP Node configuration
|
||||
HTTP_REQUEST_MAX_CONNECT_TIMEOUT=300
|
||||
HTTP_REQUEST_MAX_READ_TIMEOUT=600
|
||||
HTTP_REQUEST_MAX_WRITE_TIMEOUT=600
|
||||
HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 # 10MB
|
||||
HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 # 1MB
|
||||
|
||||
# Log file path
|
||||
LOG_FILE=
|
||||
|
||||
# Indexing configuration
|
||||
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=1000
|
||||
|
||||
@ -55,3 +55,16 @@
|
||||
9. If you need to debug local async processing, please start the worker service by running
|
||||
`celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail`.
|
||||
The started celery app handles the async tasks, e.g. dataset importing and documents indexing.
|
||||
|
||||
|
||||
## Testing
|
||||
|
||||
1. Install dependencies for both the backend and the test environment
|
||||
```bash
|
||||
pip install -r requirements.txt -r requirements-dev.txt
|
||||
```
|
||||
|
||||
2. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`
|
||||
```bash
|
||||
dev/pytest/pytest_all_tests.sh
|
||||
```
|
||||
|
||||
53
api/app.py
53
api/app.py
@ -4,23 +4,28 @@ if not os.environ.get("DEBUG") or os.environ.get("DEBUG").lower() != 'true':
|
||||
from gevent import monkey
|
||||
|
||||
monkey.patch_all()
|
||||
# if os.environ.get("VECTOR_STORE") == 'milvus':
|
||||
|
||||
import grpc.experimental.gevent
|
||||
|
||||
grpc.experimental.gevent.init_gevent()
|
||||
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import warnings
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
from flask import Flask, Response, request
|
||||
from flask_cors import CORS
|
||||
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from commands import register_commands
|
||||
from config import CloudEditionConfig, Config
|
||||
from config import Config
|
||||
|
||||
# DO NOT REMOVE BELOW
|
||||
from events import event_handlers
|
||||
from extensions import (
|
||||
ext_celery,
|
||||
ext_code_based_extension,
|
||||
@ -37,11 +42,8 @@ from extensions import (
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_login import login_manager
|
||||
from libs.passport import PassportService
|
||||
from services.account_service import AccountService
|
||||
|
||||
# DO NOT REMOVE BELOW
|
||||
from events import event_handlers
|
||||
from models import account, dataset, model, source, task, tool, tools, web
|
||||
from services.account_service import AccountService
|
||||
|
||||
# DO NOT REMOVE ABOVE
|
||||
|
||||
@ -73,20 +75,32 @@ config_type = os.getenv('EDITION', default='SELF_HOSTED') # ce edition first
|
||||
# ----------------------------
|
||||
|
||||
|
||||
def create_app(test_config=None) -> Flask:
|
||||
def create_app() -> Flask:
|
||||
app = DifyApp(__name__)
|
||||
|
||||
if test_config:
|
||||
app.config.from_object(test_config)
|
||||
else:
|
||||
if config_type == "CLOUD":
|
||||
app.config.from_object(CloudEditionConfig())
|
||||
else:
|
||||
app.config.from_object(Config())
|
||||
app.config.from_object(Config())
|
||||
|
||||
app.secret_key = app.config['SECRET_KEY']
|
||||
|
||||
logging.basicConfig(level=app.config.get('LOG_LEVEL', 'INFO'))
|
||||
log_handlers = None
|
||||
log_file = app.config.get('LOG_FILE')
|
||||
if log_file:
|
||||
log_dir = os.path.dirname(log_file)
|
||||
os.makedirs(log_dir, exist_ok=True)
|
||||
log_handlers = [
|
||||
RotatingFileHandler(
|
||||
filename=log_file,
|
||||
maxBytes=1024 * 1024 * 1024,
|
||||
backupCount=5
|
||||
),
|
||||
logging.StreamHandler(sys.stdout)
|
||||
]
|
||||
|
||||
logging.basicConfig(
|
||||
level=app.config.get('LOG_LEVEL'),
|
||||
format=app.config.get('LOG_FORMAT'),
|
||||
datefmt=app.config.get('LOG_DATEFORMAT'),
|
||||
handlers=log_handlers
|
||||
)
|
||||
|
||||
initialize_extensions(app)
|
||||
register_blueprints(app)
|
||||
@ -115,7 +129,7 @@ def initialize_extensions(app):
|
||||
@login_manager.request_loader
|
||||
def load_user_from_request(request_from_flask_login):
|
||||
"""Load user based on the request."""
|
||||
if request.blueprint == 'console':
|
||||
if request.blueprint in ['console', 'inner_api']:
|
||||
# Check if the user_id contains a dot, indicating the old format
|
||||
auth_header = request.headers.get('Authorization', '')
|
||||
if not auth_header:
|
||||
@ -151,6 +165,7 @@ def unauthorized_handler():
|
||||
def register_blueprints(app):
|
||||
from controllers.console import bp as console_app_bp
|
||||
from controllers.files import bp as files_bp
|
||||
from controllers.inner_api import bp as inner_api_bp
|
||||
from controllers.service_api import bp as service_api_bp
|
||||
from controllers.web import bp as web_bp
|
||||
|
||||
@ -188,6 +203,8 @@ def register_blueprints(app):
|
||||
)
|
||||
app.register_blueprint(files_bp)
|
||||
|
||||
app.register_blueprint(inner_api_bp)
|
||||
|
||||
|
||||
# create app
|
||||
app = create_app()
|
||||
|
||||
@ -305,6 +305,14 @@ def migrate_knowledge_vector_database():
|
||||
"vector_store": {"class_prefix": collection_name}
|
||||
}
|
||||
dataset.index_struct = json.dumps(index_struct_dict)
|
||||
elif vector_type == "pgvector":
|
||||
dataset_id = dataset.id
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
index_struct_dict = {
|
||||
"type": 'pgvector',
|
||||
"vector_store": {"class_prefix": collection_name}
|
||||
}
|
||||
dataset.index_struct = json.dumps(index_struct_dict)
|
||||
else:
|
||||
raise ValueError(f"Vector store {config.get('VECTOR_STORE')} is not supported.")
|
||||
|
||||
@ -440,9 +448,64 @@ def convert_to_agent_apps():
|
||||
click.echo(click.style('Congratulations! Converted {} agent apps.'.format(len(proceeded_app_ids)), fg='green'))
|
||||
|
||||
|
||||
@click.command('add-qdrant-doc-id-index', help='add qdrant doc_id index.')
|
||||
@click.option('--field', default='metadata.doc_id', prompt=False, help='index field , default is metadata.doc_id.')
|
||||
def add_qdrant_doc_id_index(field: str):
|
||||
click.echo(click.style('Start add qdrant doc_id index.', fg='green'))
|
||||
config = current_app.config
|
||||
vector_type = config.get('VECTOR_STORE')
|
||||
if vector_type != "qdrant":
|
||||
click.echo(click.style('Sorry, only support qdrant vector store.', fg='red'))
|
||||
return
|
||||
create_count = 0
|
||||
|
||||
try:
|
||||
bindings = db.session.query(DatasetCollectionBinding).all()
|
||||
if not bindings:
|
||||
click.echo(click.style('Sorry, no dataset collection bindings found.', fg='red'))
|
||||
return
|
||||
import qdrant_client
|
||||
from qdrant_client.http.exceptions import UnexpectedResponse
|
||||
from qdrant_client.http.models import PayloadSchemaType
|
||||
|
||||
from core.rag.datasource.vdb.qdrant.qdrant_vector import QdrantConfig
|
||||
for binding in bindings:
|
||||
qdrant_config = QdrantConfig(
|
||||
endpoint=config.get('QDRANT_URL'),
|
||||
api_key=config.get('QDRANT_API_KEY'),
|
||||
root_path=current_app.root_path,
|
||||
timeout=config.get('QDRANT_CLIENT_TIMEOUT'),
|
||||
grpc_port=config.get('QDRANT_GRPC_PORT'),
|
||||
prefer_grpc=config.get('QDRANT_GRPC_ENABLED')
|
||||
)
|
||||
try:
|
||||
client = qdrant_client.QdrantClient(**qdrant_config.to_qdrant_params())
|
||||
# create payload index
|
||||
client.create_payload_index(binding.collection_name, field,
|
||||
field_schema=PayloadSchemaType.KEYWORD)
|
||||
create_count += 1
|
||||
except UnexpectedResponse as e:
|
||||
# Collection does not exist, so return
|
||||
if e.status_code == 404:
|
||||
click.echo(click.style(f'Collection not found, collection_name:{binding.collection_name}.', fg='red'))
|
||||
continue
|
||||
# Some other error occurred, so re-raise the exception
|
||||
else:
|
||||
click.echo(click.style(f'Failed to create qdrant index, collection_name:{binding.collection_name}.', fg='red'))
|
||||
|
||||
except Exception as e:
|
||||
click.echo(click.style('Failed to create qdrant client.', fg='red'))
|
||||
|
||||
click.echo(
|
||||
click.style(f'Congratulations! Create {create_count} collection indexes.',
|
||||
fg='green'))
|
||||
|
||||
|
||||
def register_commands(app):
|
||||
app.cli.add_command(reset_password)
|
||||
app.cli.add_command(reset_email)
|
||||
app.cli.add_command(reset_encrypt_key_pair)
|
||||
app.cli.add_command(vdb_migrate)
|
||||
app.cli.add_command(convert_to_agent_apps)
|
||||
app.cli.add_command(add_qdrant_doc_id_index)
|
||||
|
||||
|
||||
@ -5,6 +5,7 @@ import dotenv
|
||||
dotenv.load_dotenv()
|
||||
|
||||
DEFAULTS = {
|
||||
'EDITION': 'SELF_HOSTED',
|
||||
'DB_USERNAME': 'postgres',
|
||||
'DB_PASSWORD': '',
|
||||
'DB_HOST': 'localhost',
|
||||
@ -22,22 +23,30 @@ DEFAULTS = {
|
||||
'SERVICE_API_URL': 'https://api.dify.ai',
|
||||
'APP_WEB_URL': 'https://udify.app',
|
||||
'FILES_URL': '',
|
||||
'FILES_ACCESS_TIMEOUT': 300,
|
||||
'S3_ADDRESS_STYLE': 'auto',
|
||||
'STORAGE_TYPE': 'local',
|
||||
'STORAGE_LOCAL_PATH': 'storage',
|
||||
'CHECK_UPDATE_URL': 'https://updates.dify.ai',
|
||||
'DEPLOY_ENV': 'PRODUCTION',
|
||||
'SQLALCHEMY_DATABASE_URI_SCHEME': 'postgresql',
|
||||
'SQLALCHEMY_POOL_SIZE': 30,
|
||||
'SQLALCHEMY_MAX_OVERFLOW': 10,
|
||||
'SQLALCHEMY_POOL_RECYCLE': 3600,
|
||||
'SQLALCHEMY_POOL_PRE_PING': 'False',
|
||||
'SQLALCHEMY_ECHO': 'False',
|
||||
'SENTRY_TRACES_SAMPLE_RATE': 1.0,
|
||||
'SENTRY_PROFILES_SAMPLE_RATE': 1.0,
|
||||
'WEAVIATE_GRPC_ENABLED': 'True',
|
||||
'WEAVIATE_BATCH_SIZE': 100,
|
||||
'QDRANT_CLIENT_TIMEOUT': 20,
|
||||
'QDRANT_GRPC_ENABLED': 'False',
|
||||
'QDRANT_GRPC_PORT': '6334',
|
||||
'CELERY_BACKEND': 'database',
|
||||
'LOG_LEVEL': 'INFO',
|
||||
'LOG_FILE': '',
|
||||
'LOG_FORMAT': '%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s',
|
||||
'LOG_DATEFORMAT': '%Y-%m-%d %H:%M:%S',
|
||||
'HOSTED_OPENAI_QUOTA_LIMIT': 200,
|
||||
'HOSTED_OPENAI_TRIAL_ENABLED': 'False',
|
||||
'HOSTED_OPENAI_TRIAL_MODELS': 'gpt-3.5-turbo,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-16k,gpt-3.5-turbo-16k-0613,gpt-3.5-turbo-0613,gpt-3.5-turbo-0125,text-davinci-003',
|
||||
@ -69,6 +78,11 @@ DEFAULTS = {
|
||||
'TOOL_ICON_CACHE_MAX_AGE': 3600,
|
||||
'MILVUS_DATABASE': 'default',
|
||||
'KEYWORD_DATA_SOURCE_TYPE': 'database',
|
||||
'INNER_API': 'False',
|
||||
'ENTERPRISE_ENABLED': 'False',
|
||||
'INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH': 1000,
|
||||
'WORKFLOW_MAX_EXECUTION_STEPS': 50,
|
||||
'WORKFLOW_MAX_EXECUTION_TIME': 600,
|
||||
}
|
||||
|
||||
|
||||
@ -99,12 +113,15 @@ class Config:
|
||||
# ------------------------
|
||||
# General Configurations.
|
||||
# ------------------------
|
||||
self.CURRENT_VERSION = "0.6.3"
|
||||
self.CURRENT_VERSION = "0.6.9"
|
||||
self.COMMIT_SHA = get_env('COMMIT_SHA')
|
||||
self.EDITION = "SELF_HOSTED"
|
||||
self.EDITION = get_env('EDITION')
|
||||
self.DEPLOY_ENV = get_env('DEPLOY_ENV')
|
||||
self.TESTING = False
|
||||
self.LOG_LEVEL = get_env('LOG_LEVEL')
|
||||
self.LOG_FILE = get_env('LOG_FILE')
|
||||
self.LOG_FORMAT = get_env('LOG_FORMAT')
|
||||
self.LOG_DATEFORMAT = get_env('LOG_DATEFORMAT')
|
||||
|
||||
# The backend URL prefix of the console API.
|
||||
# used to concatenate the login authorization callback or notion integration callback.
|
||||
@ -127,12 +144,21 @@ class Config:
|
||||
# Url is signed and has expiration time.
|
||||
self.FILES_URL = get_env('FILES_URL') if get_env('FILES_URL') else self.CONSOLE_API_URL
|
||||
|
||||
# File Access Time specifies a time interval in seconds for the file to be accessed.
|
||||
# The default value is 300 seconds.
|
||||
self.FILES_ACCESS_TIMEOUT = int(get_env('FILES_ACCESS_TIMEOUT'))
|
||||
|
||||
# Your App secret key will be used for securely signing the session cookie
|
||||
# Make sure you are changing this key for your deployment with a strong key.
|
||||
# You can generate a strong key using `openssl rand -base64 42`.
|
||||
# Alternatively you can set it with `SECRET_KEY` environment variable.
|
||||
self.SECRET_KEY = get_env('SECRET_KEY')
|
||||
|
||||
# Enable or disable the inner API.
|
||||
self.INNER_API = get_bool_env('INNER_API')
|
||||
# The inner API key is used to authenticate the inner API.
|
||||
self.INNER_API_KEY = get_env('INNER_API_KEY')
|
||||
|
||||
# cors settings
|
||||
self.CONSOLE_CORS_ALLOW_ORIGINS = get_cors_allow_origins(
|
||||
'CONSOLE_CORS_ALLOW_ORIGINS', self.CONSOLE_WEB_URL)
|
||||
@ -149,14 +175,17 @@ class Config:
|
||||
key: get_env(key) for key in
|
||||
['DB_USERNAME', 'DB_PASSWORD', 'DB_HOST', 'DB_PORT', 'DB_DATABASE', 'DB_CHARSET']
|
||||
}
|
||||
self.SQLALCHEMY_DATABASE_URI_SCHEME = get_env('SQLALCHEMY_DATABASE_URI_SCHEME')
|
||||
|
||||
db_extras = f"?client_encoding={db_credentials['DB_CHARSET']}" if db_credentials['DB_CHARSET'] else ""
|
||||
|
||||
self.SQLALCHEMY_DATABASE_URI = f"postgresql://{db_credentials['DB_USERNAME']}:{db_credentials['DB_PASSWORD']}@{db_credentials['DB_HOST']}:{db_credentials['DB_PORT']}/{db_credentials['DB_DATABASE']}{db_extras}"
|
||||
self.SQLALCHEMY_DATABASE_URI = f"{self.SQLALCHEMY_DATABASE_URI_SCHEME}://{db_credentials['DB_USERNAME']}:{db_credentials['DB_PASSWORD']}@{db_credentials['DB_HOST']}:{db_credentials['DB_PORT']}/{db_credentials['DB_DATABASE']}{db_extras}"
|
||||
self.SQLALCHEMY_ENGINE_OPTIONS = {
|
||||
'pool_size': int(get_env('SQLALCHEMY_POOL_SIZE')),
|
||||
'max_overflow': int(get_env('SQLALCHEMY_MAX_OVERFLOW')),
|
||||
'pool_recycle': int(get_env('SQLALCHEMY_POOL_RECYCLE'))
|
||||
'pool_recycle': int(get_env('SQLALCHEMY_POOL_RECYCLE')),
|
||||
'pool_pre_ping': get_bool_env('SQLALCHEMY_POOL_PRE_PING'),
|
||||
'connect_args': {'options': '-c timezone=UTC'},
|
||||
}
|
||||
|
||||
self.SQLALCHEMY_ECHO = get_bool_env('SQLALCHEMY_ECHO')
|
||||
@ -195,10 +224,18 @@ class Config:
|
||||
self.AZURE_BLOB_ACCOUNT_KEY = get_env('AZURE_BLOB_ACCOUNT_KEY')
|
||||
self.AZURE_BLOB_CONTAINER_NAME = get_env('AZURE_BLOB_CONTAINER_NAME')
|
||||
self.AZURE_BLOB_ACCOUNT_URL = get_env('AZURE_BLOB_ACCOUNT_URL')
|
||||
self.ALIYUN_OSS_BUCKET_NAME = get_env('ALIYUN_OSS_BUCKET_NAME')
|
||||
self.ALIYUN_OSS_ACCESS_KEY = get_env('ALIYUN_OSS_ACCESS_KEY')
|
||||
self.ALIYUN_OSS_SECRET_KEY = get_env('ALIYUN_OSS_SECRET_KEY')
|
||||
self.ALIYUN_OSS_ENDPOINT = get_env('ALIYUN_OSS_ENDPOINT')
|
||||
self.ALIYUN_OSS_REGION = get_env('ALIYUN_OSS_REGION')
|
||||
self.ALIYUN_OSS_AUTH_VERSION = get_env('ALIYUN_OSS_AUTH_VERSION')
|
||||
self.GOOGLE_STORAGE_BUCKET_NAME = get_env('GOOGLE_STORAGE_BUCKET_NAME')
|
||||
self.GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64 = get_env('GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64')
|
||||
|
||||
# ------------------------
|
||||
# Vector Store Configurations.
|
||||
# Currently, only support: qdrant, milvus, zilliz, weaviate, relyt
|
||||
# Currently, only support: qdrant, milvus, zilliz, weaviate, relyt, pgvector
|
||||
# ------------------------
|
||||
self.VECTOR_STORE = get_env('VECTOR_STORE')
|
||||
self.KEYWORD_STORE = get_env('KEYWORD_STORE')
|
||||
@ -206,6 +243,8 @@ class Config:
|
||||
self.QDRANT_URL = get_env('QDRANT_URL')
|
||||
self.QDRANT_API_KEY = get_env('QDRANT_API_KEY')
|
||||
self.QDRANT_CLIENT_TIMEOUT = get_env('QDRANT_CLIENT_TIMEOUT')
|
||||
self.QDRANT_GRPC_ENABLED = get_env('QDRANT_GRPC_ENABLED')
|
||||
self.QDRANT_GRPC_PORT = get_env('QDRANT_GRPC_PORT')
|
||||
|
||||
# milvus / zilliz setting
|
||||
self.MILVUS_HOST = get_env('MILVUS_HOST')
|
||||
@ -228,6 +267,20 @@ class Config:
|
||||
self.RELYT_PASSWORD = get_env('RELYT_PASSWORD')
|
||||
self.RELYT_DATABASE = get_env('RELYT_DATABASE')
|
||||
|
||||
# pgvecto rs settings
|
||||
self.PGVECTO_RS_HOST = get_env('PGVECTO_RS_HOST')
|
||||
self.PGVECTO_RS_PORT = get_env('PGVECTO_RS_PORT')
|
||||
self.PGVECTO_RS_USER = get_env('PGVECTO_RS_USER')
|
||||
self.PGVECTO_RS_PASSWORD = get_env('PGVECTO_RS_PASSWORD')
|
||||
self.PGVECTO_RS_DATABASE = get_env('PGVECTO_RS_DATABASE')
|
||||
|
||||
# pgvector settings
|
||||
self.PGVECTOR_HOST = get_env('PGVECTOR_HOST')
|
||||
self.PGVECTOR_PORT = get_env('PGVECTOR_PORT')
|
||||
self.PGVECTOR_USER = get_env('PGVECTOR_USER')
|
||||
self.PGVECTOR_PASSWORD = get_env('PGVECTOR_PASSWORD')
|
||||
self.PGVECTOR_DATABASE = get_env('PGVECTOR_DATABASE')
|
||||
|
||||
# ------------------------
|
||||
# Mail Configurations.
|
||||
# ------------------------
|
||||
@ -241,9 +294,9 @@ class Config:
|
||||
self.SMTP_USERNAME = get_env('SMTP_USERNAME')
|
||||
self.SMTP_PASSWORD = get_env('SMTP_PASSWORD')
|
||||
self.SMTP_USE_TLS = get_bool_env('SMTP_USE_TLS')
|
||||
|
||||
|
||||
# ------------------------
|
||||
# Workpace Configurations.
|
||||
# Workspace Configurations.
|
||||
# ------------------------
|
||||
self.INVITE_EXPIRY_HOURS = int(get_env('INVITE_EXPIRY_HOURS'))
|
||||
|
||||
@ -269,6 +322,9 @@ class Config:
|
||||
self.UPLOAD_FILE_BATCH_LIMIT = int(get_env('UPLOAD_FILE_BATCH_LIMIT'))
|
||||
self.UPLOAD_IMAGE_FILE_SIZE_LIMIT = int(get_env('UPLOAD_IMAGE_FILE_SIZE_LIMIT'))
|
||||
|
||||
self.WORKFLOW_MAX_EXECUTION_STEPS = int(get_env('WORKFLOW_MAX_EXECUTION_STEPS'))
|
||||
self.WORKFLOW_MAX_EXECUTION_TIME = int(get_env('WORKFLOW_MAX_EXECUTION_TIME'))
|
||||
|
||||
# Moderation in app Configurations.
|
||||
self.OUTPUT_MODERATION_BUFFER_SIZE = int(get_env('OUTPUT_MODERATION_BUFFER_SIZE'))
|
||||
|
||||
@ -282,6 +338,12 @@ class Config:
|
||||
# ------------------------
|
||||
# Platform Configurations.
|
||||
# ------------------------
|
||||
self.GITHUB_CLIENT_ID = get_env('GITHUB_CLIENT_ID')
|
||||
self.GITHUB_CLIENT_SECRET = get_env('GITHUB_CLIENT_SECRET')
|
||||
self.GOOGLE_CLIENT_ID = get_env('GOOGLE_CLIENT_ID')
|
||||
self.GOOGLE_CLIENT_SECRET = get_env('GOOGLE_CLIENT_SECRET')
|
||||
self.OAUTH_REDIRECT_PATH = get_env('OAUTH_REDIRECT_PATH')
|
||||
|
||||
self.HOSTED_OPENAI_API_KEY = get_env('HOSTED_OPENAI_API_KEY')
|
||||
self.HOSTED_OPENAI_API_BASE = get_env('HOSTED_OPENAI_API_BASE')
|
||||
self.HOSTED_OPENAI_API_ORGANIZATION = get_env('HOSTED_OPENAI_API_ORGANIZATION')
|
||||
@ -315,6 +377,7 @@ class Config:
|
||||
|
||||
self.ETL_TYPE = get_env('ETL_TYPE')
|
||||
self.UNSTRUCTURED_API_URL = get_env('UNSTRUCTURED_API_URL')
|
||||
self.UNSTRUCTURED_API_KEY = get_env('UNSTRUCTURED_API_KEY')
|
||||
self.BILLING_ENABLED = get_bool_env('BILLING_ENABLED')
|
||||
self.CAN_REPLACE_LOGO = get_bool_env('CAN_REPLACE_LOGO')
|
||||
|
||||
@ -327,16 +390,9 @@ class Config:
|
||||
self.TOOL_ICON_CACHE_MAX_AGE = get_env('TOOL_ICON_CACHE_MAX_AGE')
|
||||
|
||||
self.KEYWORD_DATA_SOURCE_TYPE = get_env('KEYWORD_DATA_SOURCE_TYPE')
|
||||
self.ENTERPRISE_ENABLED = get_bool_env('ENTERPRISE_ENABLED')
|
||||
|
||||
class CloudEditionConfig(Config):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.EDITION = "CLOUD"
|
||||
|
||||
self.GITHUB_CLIENT_ID = get_env('GITHUB_CLIENT_ID')
|
||||
self.GITHUB_CLIENT_SECRET = get_env('GITHUB_CLIENT_SECRET')
|
||||
self.GOOGLE_CLIENT_ID = get_env('GOOGLE_CLIENT_ID')
|
||||
self.GOOGLE_CLIENT_SECRET = get_env('GOOGLE_CLIENT_SECRET')
|
||||
self.OAUTH_REDIRECT_PATH = get_env('OAUTH_REDIRECT_PATH')
|
||||
# ------------------------
|
||||
# Indexing Configurations.
|
||||
# ------------------------
|
||||
self.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH = get_env('INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH')
|
||||
|
||||
@ -1,10 +1,11 @@
|
||||
|
||||
|
||||
languages = ['en-US', 'zh-Hans', 'pt-BR', 'es-ES', 'fr-FR', 'de-DE', 'ja-JP', 'ko-KR', 'ru-RU', 'it-IT', 'uk-UA', 'vi-VN']
|
||||
languages = ['en-US', 'zh-Hans', 'zh-Hant', 'pt-BR', 'es-ES', 'fr-FR', 'de-DE', 'ja-JP', 'ko-KR', 'ru-RU', 'it-IT', 'uk-UA', 'vi-VN', 'pl-PL']
|
||||
|
||||
language_timezone_mapping = {
|
||||
'en-US': 'America/New_York',
|
||||
'zh-Hans': 'Asia/Shanghai',
|
||||
'zh-Hant': 'Asia/Taipei',
|
||||
'pt-BR': 'America/Sao_Paulo',
|
||||
'es-ES': 'Europe/Madrid',
|
||||
'fr-FR': 'Europe/Paris',
|
||||
@ -15,6 +16,8 @@ language_timezone_mapping = {
|
||||
'it-IT': 'Europe/Rome',
|
||||
'uk-UA': 'Europe/Kyiv',
|
||||
'vi-VN': 'Asia/Ho_Chi_Minh',
|
||||
'ro-RO': 'Europe/Bucharest',
|
||||
'pl-PL': 'Europe/Warsaw',
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -24,7 +24,8 @@
|
||||
"description": "Welcome to your personalized Investment Analysis Copilot service, where we delve into the depths of stock analysis to provide you with comprehensive insights. \n",
|
||||
"is_listed": true,
|
||||
"position": 0,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -40,7 +41,8 @@
|
||||
"description": "Code interpreter, clarifying the syntax and semantics of the code.",
|
||||
"is_listed": true,
|
||||
"position": 13,
|
||||
"privacy_policy": "https://dify.ai"
|
||||
"privacy_policy": "https://dify.ai",
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -56,7 +58,8 @@
|
||||
"description": "Hello, I am your creative partner in bringing ideas to vivid life! I can assist you in creating stunning designs by leveraging abilities of DALL\u00b7E 3. ",
|
||||
"is_listed": true,
|
||||
"position": 4,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -72,7 +75,8 @@
|
||||
"description": "Fully SEO Optimized Article including FAQs",
|
||||
"is_listed": true,
|
||||
"position": 1,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -88,7 +92,8 @@
|
||||
"description": "Generate Flat Style Image",
|
||||
"is_listed": true,
|
||||
"position": 10,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -104,7 +109,8 @@
|
||||
"description": "A multilingual translator that provides translation capabilities in multiple languages. Input the text you need to translate and select the target language.",
|
||||
"is_listed": true,
|
||||
"position": 10,
|
||||
"privacy_policy": "https://dify.ai"
|
||||
"privacy_policy": "https://dify.ai",
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -120,7 +126,8 @@
|
||||
"description": "I am a YouTube Channel Data Analysis Copilot, I am here to provide expert data analysis tailored to your needs. ",
|
||||
"is_listed": true,
|
||||
"position": 2,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -136,7 +143,8 @@
|
||||
"description": "Meeting minutes generator",
|
||||
"is_listed": true,
|
||||
"position": 0,
|
||||
"privacy_policy": "https://dify.ai"
|
||||
"privacy_policy": "https://dify.ai",
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -152,7 +160,8 @@
|
||||
"description": "Tell me the main elements, I will generate a cyberpunk style image for you. ",
|
||||
"is_listed": true,
|
||||
"position": 10,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -168,7 +177,8 @@
|
||||
"description": "Write SQL from natural language by pasting in your schema with the request.Please describe your query requirements in natural language and select the target database type.",
|
||||
"is_listed": true,
|
||||
"position": 13,
|
||||
"privacy_policy": "https://dify.ai"
|
||||
"privacy_policy": "https://dify.ai",
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -184,7 +194,8 @@
|
||||
"description": "Welcome to your personalized travel service with Consultant! \ud83c\udf0d\u2708\ufe0f Ready to embark on a journey filled with adventure and relaxation? Let's dive into creating your unforgettable travel experience. ",
|
||||
"is_listed": true,
|
||||
"position": 3,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -200,7 +211,8 @@
|
||||
"description": "I can answer your questions related to strategic marketing.",
|
||||
"is_listed": true,
|
||||
"position": 10,
|
||||
"privacy_policy": "https://dify.ai"
|
||||
"privacy_policy": "https://dify.ai",
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -216,7 +228,8 @@
|
||||
"description": "A simulated front-end interviewer that tests the skill level of front-end development through questioning.",
|
||||
"is_listed": true,
|
||||
"position": 19,
|
||||
"privacy_policy": "https://dify.ai"
|
||||
"privacy_policy": "https://dify.ai",
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -232,7 +245,8 @@
|
||||
"description": "I'm here to hear about your feature request about Dify and help you flesh it out further. What's on your mind?",
|
||||
"is_listed": true,
|
||||
"position": 6,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
}
|
||||
]
|
||||
},
|
||||
@ -261,7 +275,8 @@
|
||||
"description": "\u4e00\u4e2a\u6a21\u62df\u7684\u524d\u7aef\u9762\u8bd5\u5b98\uff0c\u901a\u8fc7\u63d0\u95ee\u7684\u65b9\u5f0f\u5bf9\u524d\u7aef\u5f00\u53d1\u7684\u6280\u80fd\u6c34\u5e73\u8fdb\u884c\u68c0\u9a8c\u3002",
|
||||
"is_listed": true,
|
||||
"position": 20,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -277,7 +292,8 @@
|
||||
"description": "\u8f93\u5165\u76f8\u5173\u5143\u7d20\uff0c\u4e3a\u4f60\u751f\u6210\u6241\u5e73\u63d2\u753b\u98ce\u683c\u7684\u5c01\u9762\u56fe\u7247",
|
||||
"is_listed": true,
|
||||
"position": 10,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -293,7 +309,8 @@
|
||||
"description": "\u4e00\u4e2a\u591a\u8bed\u8a00\u7ffb\u8bd1\u5668\uff0c\u63d0\u4f9b\u591a\u79cd\u8bed\u8a00\u7ffb\u8bd1\u80fd\u529b\uff0c\u8f93\u5165\u4f60\u9700\u8981\u7ffb\u8bd1\u7684\u6587\u672c\uff0c\u9009\u62e9\u76ee\u6807\u8bed\u8a00\u5373\u53ef\u3002\u63d0\u793a\u8bcd\u6765\u81ea\u5b9d\u7389\u3002",
|
||||
"is_listed": true,
|
||||
"position": 10,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -309,7 +326,8 @@
|
||||
"description": "\u6211\u5c06\u5e2e\u52a9\u4f60\u628a\u81ea\u7136\u8bed\u8a00\u8f6c\u5316\u6210\u6307\u5b9a\u7684\u6570\u636e\u5e93\u67e5\u8be2 SQL \u8bed\u53e5\uff0c\u8bf7\u5728\u4e0b\u65b9\u8f93\u5165\u4f60\u9700\u8981\u67e5\u8be2\u7684\u6761\u4ef6\uff0c\u5e76\u9009\u62e9\u76ee\u6807\u6570\u636e\u5e93\u7c7b\u578b\u3002",
|
||||
"is_listed": true,
|
||||
"position": 12,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -325,7 +343,8 @@
|
||||
"description": "\u9610\u660e\u4ee3\u7801\u7684\u8bed\u6cd5\u548c\u8bed\u4e49\u3002",
|
||||
"is_listed": true,
|
||||
"position": 2,
|
||||
"privacy_policy": "https://dify.ai"
|
||||
"privacy_policy": "https://dify.ai",
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -341,7 +360,8 @@
|
||||
"description": "\u8f93\u5165\u76f8\u5173\u5143\u7d20\uff0c\u4e3a\u4f60\u751f\u6210\u8d5b\u535a\u670b\u514b\u98ce\u683c\u7684\u63d2\u753b",
|
||||
"is_listed": true,
|
||||
"position": 10,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -357,7 +377,8 @@
|
||||
"description": "\u6211\u662f\u4e00\u540dSEO\u4e13\u5bb6\uff0c\u53ef\u4ee5\u6839\u636e\u60a8\u63d0\u4f9b\u7684\u6807\u9898\u3001\u5173\u952e\u8bcd\u3001\u76f8\u5173\u4fe1\u606f\u6765\u6279\u91cf\u751f\u6210SEO\u6587\u7ae0\u3002",
|
||||
"is_listed": true,
|
||||
"position": 10,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -373,7 +394,8 @@
|
||||
"description": "\u5e2e\u4f60\u91cd\u65b0\u7ec4\u7ec7\u548c\u8f93\u51fa\u6df7\u4e71\u590d\u6742\u7684\u4f1a\u8bae\u7eaa\u8981\u3002",
|
||||
"is_listed": true,
|
||||
"position": 6,
|
||||
"privacy_policy": "https://dify.ai"
|
||||
"privacy_policy": "https://dify.ai",
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -389,7 +411,8 @@
|
||||
"description": "\u6b22\u8fce\u4f7f\u7528\u60a8\u7684\u4e2a\u6027\u5316\u7f8e\u80a1\u6295\u8d44\u5206\u6790\u52a9\u624b\uff0c\u5728\u8fd9\u91cc\u6211\u4eec\u6df1\u5165\u7684\u8fdb\u884c\u80a1\u7968\u5206\u6790\uff0c\u4e3a\u60a8\u63d0\u4f9b\u5168\u9762\u7684\u6d1e\u5bdf\u3002",
|
||||
"is_listed": true,
|
||||
"position": 0,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -405,7 +428,8 @@
|
||||
"description": "\u60a8\u597d\uff0c\u6211\u662f\u60a8\u7684\u521b\u610f\u4f19\u4f34\uff0c\u5c06\u5e2e\u52a9\u60a8\u5c06\u60f3\u6cd5\u751f\u52a8\u5730\u5b9e\u73b0\uff01\u6211\u53ef\u4ee5\u534f\u52a9\u60a8\u5229\u7528DALL\u00b7E 3\u7684\u80fd\u529b\u521b\u9020\u51fa\u4ee4\u4eba\u60ca\u53f9\u7684\u8bbe\u8ba1\u3002",
|
||||
"is_listed": true,
|
||||
"position": 4,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -421,7 +445,8 @@
|
||||
"description": "\u7ffb\u8bd1\u4e13\u5bb6\uff1a\u63d0\u4f9b\u4e2d\u82f1\u6587\u4e92\u8bd1",
|
||||
"is_listed": true,
|
||||
"position": 4,
|
||||
"privacy_policy": "https://dify.ai"
|
||||
"privacy_policy": "https://dify.ai",
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -437,7 +462,8 @@
|
||||
"description": "\u60a8\u7684\u79c1\u4eba\u5b66\u4e60\u5bfc\u5e08\uff0c\u5e2e\u60a8\u5236\u5b9a\u5b66\u4e60\u8ba1\u5212\u5e76\u8f85\u5bfc",
|
||||
"is_listed": true,
|
||||
"position": 26,
|
||||
"privacy_policy": "https://dify.ai"
|
||||
"privacy_policy": "https://dify.ai",
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -453,7 +479,8 @@
|
||||
"description": "\u5e2e\u4f60\u64b0\u5199\u8bba\u6587\u6587\u732e\u7efc\u8ff0",
|
||||
"is_listed": true,
|
||||
"position": 7,
|
||||
"privacy_policy": "https://dify.ai"
|
||||
"privacy_policy": "https://dify.ai",
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -469,7 +496,8 @@
|
||||
"description": "\u4f60\u597d\uff0c\u544a\u8bc9\u6211\u60a8\u60f3\u5206\u6790\u7684 YouTube \u9891\u9053\uff0c\u6211\u5c06\u4e3a\u60a8\u6574\u7406\u4e00\u4efd\u5b8c\u6574\u7684\u6570\u636e\u5206\u6790\u62a5\u544a\u3002",
|
||||
"is_listed": true,
|
||||
"position": 0,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
},
|
||||
{
|
||||
"app": {
|
||||
@ -485,7 +513,8 @@
|
||||
"description": "\u6b22\u8fce\u4f7f\u7528\u60a8\u7684\u4e2a\u6027\u5316\u65c5\u884c\u670d\u52a1\u987e\u95ee\uff01\ud83c\udf0d\u2708\ufe0f \u51c6\u5907\u597d\u8e0f\u4e0a\u4e00\u6bb5\u5145\u6ee1\u5192\u9669\u4e0e\u653e\u677e\u7684\u65c5\u7a0b\u4e86\u5417\uff1f\u8ba9\u6211\u4eec\u4e00\u8d77\u6df1\u5165\u6253\u9020\u60a8\u96be\u5fd8\u7684\u65c5\u884c\u4f53\u9a8c\u5427\u3002",
|
||||
"is_listed": true,
|
||||
"position": 0,
|
||||
"privacy_policy": null
|
||||
"privacy_policy": null,
|
||||
"custom_disclaimer": null
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
|
||||
|
||||
|
||||
@ -1,22 +1,57 @@
|
||||
from flask import Blueprint
|
||||
|
||||
from libs.external_api import ExternalApi
|
||||
|
||||
bp = Blueprint('console', __name__, url_prefix='/console/api')
|
||||
api = ExternalApi(bp)
|
||||
|
||||
# Import other controllers
|
||||
from . import admin, apikey, extension, feature, setup, version, ping
|
||||
from . import admin, apikey, extension, feature, ping, setup, version
|
||||
|
||||
# Import app controllers
|
||||
from .app import (advanced_prompt_template, annotation, app, audio, completion, conversation, generator, message,
|
||||
model_config, site, statistic, workflow, workflow_run, workflow_app_log, workflow_statistic, agent)
|
||||
from .app import (
|
||||
advanced_prompt_template,
|
||||
agent,
|
||||
annotation,
|
||||
app,
|
||||
audio,
|
||||
completion,
|
||||
conversation,
|
||||
generator,
|
||||
message,
|
||||
model_config,
|
||||
site,
|
||||
statistic,
|
||||
workflow,
|
||||
workflow_app_log,
|
||||
workflow_run,
|
||||
workflow_statistic,
|
||||
)
|
||||
|
||||
# Import auth controllers
|
||||
from .auth import activate, data_source_oauth, login, oauth
|
||||
|
||||
# Import billing controllers
|
||||
from .billing import billing
|
||||
|
||||
# Import datasets controllers
|
||||
from .datasets import data_source, datasets, datasets_document, datasets_segments, file, hit_testing
|
||||
|
||||
# Import explore controllers
|
||||
from .explore import (audio, completion, conversation, installed_app, message, parameter, recommended_app,
|
||||
saved_message, workflow)
|
||||
from .explore import (
|
||||
audio,
|
||||
completion,
|
||||
conversation,
|
||||
installed_app,
|
||||
message,
|
||||
parameter,
|
||||
recommended_app,
|
||||
saved_message,
|
||||
workflow,
|
||||
)
|
||||
|
||||
# Import tag controllers
|
||||
from .tag import tags
|
||||
|
||||
# Import workspace controllers
|
||||
from .workspace import account, members, model_providers, models, tool_providers, workspace
|
||||
from .workspace import account, members, model_providers, models, tool_providers, workspace
|
||||
|
||||
@ -48,6 +48,7 @@ class InsertExploreAppListApi(Resource):
|
||||
parser.add_argument('desc', type=str, location='json')
|
||||
parser.add_argument('copyright', type=str, location='json')
|
||||
parser.add_argument('privacy_policy', type=str, location='json')
|
||||
parser.add_argument('custom_disclaimer', type=str, location='json')
|
||||
parser.add_argument('language', type=supported_language, required=True, nullable=False, location='json')
|
||||
parser.add_argument('category', type=str, required=True, nullable=False, location='json')
|
||||
parser.add_argument('position', type=int, required=True, nullable=False, location='json')
|
||||
@ -62,6 +63,7 @@ class InsertExploreAppListApi(Resource):
|
||||
desc = args['desc'] if args['desc'] else ''
|
||||
copy_right = args['copyright'] if args['copyright'] else ''
|
||||
privacy_policy = args['privacy_policy'] if args['privacy_policy'] else ''
|
||||
custom_disclaimer = args['custom_disclaimer'] if args['custom_disclaimer'] else ''
|
||||
else:
|
||||
desc = site.description if site.description else \
|
||||
args['desc'] if args['desc'] else ''
|
||||
@ -69,6 +71,8 @@ class InsertExploreAppListApi(Resource):
|
||||
args['copyright'] if args['copyright'] else ''
|
||||
privacy_policy = site.privacy_policy if site.privacy_policy else \
|
||||
args['privacy_policy'] if args['privacy_policy'] else ''
|
||||
custom_disclaimer = site.custom_disclaimer if site.custom_disclaimer else \
|
||||
args['custom_disclaimer'] if args['custom_disclaimer'] else ''
|
||||
|
||||
recommended_app = RecommendedApp.query.filter(RecommendedApp.app_id == args['app_id']).first()
|
||||
|
||||
@ -78,6 +82,7 @@ class InsertExploreAppListApi(Resource):
|
||||
description=desc,
|
||||
copyright=copy_right,
|
||||
privacy_policy=privacy_policy,
|
||||
custom_disclaimer=custom_disclaimer,
|
||||
language=args['language'],
|
||||
category=args['category'],
|
||||
position=args['position']
|
||||
@ -93,6 +98,7 @@ class InsertExploreAppListApi(Resource):
|
||||
recommended_app.description = desc
|
||||
recommended_app.copyright = copy_right
|
||||
recommended_app.privacy_policy = privacy_policy
|
||||
recommended_app.custom_disclaimer = custom_disclaimer
|
||||
recommended_app.language = args['language']
|
||||
recommended_app.category = args['category']
|
||||
recommended_app.position = args['position']
|
||||
|
||||
@ -1,26 +1,25 @@
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, inputs, marshal_with, reqparse
|
||||
from werkzeug.exceptions import Forbidden, BadRequest
|
||||
from flask_restful import Resource, inputs, marshal, marshal_with, reqparse
|
||||
from werkzeug.exceptions import BadRequest, Forbidden, abort
|
||||
|
||||
from controllers.console import api
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check
|
||||
from core.agent.entities import AgentToolEntity
|
||||
from extensions.ext_database import db
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from core.tools.utils.configuration import ToolParameterConfigurationManager
|
||||
from fields.app_fields import (
|
||||
app_detail_fields,
|
||||
app_detail_fields_with_site,
|
||||
app_pagination_fields,
|
||||
)
|
||||
from libs.login import login_required
|
||||
from models.model import App, AppMode, AppModelConfig
|
||||
from services.app_service import AppService
|
||||
from models.model import App, AppModelConfig, AppMode
|
||||
from core.tools.utils.configuration import ToolParameterConfigurationManager
|
||||
from core.tools.tool_manager import ToolManager
|
||||
|
||||
from services.tag_service import TagService
|
||||
|
||||
ALLOW_CREATE_APP_MODES = ['chat', 'agent-chat', 'advanced-chat', 'workflow', 'completion']
|
||||
|
||||
@ -30,21 +29,29 @@ class AppListApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(app_pagination_fields)
|
||||
def get(self):
|
||||
"""Get app list"""
|
||||
def uuid_list(value):
|
||||
try:
|
||||
return [str(uuid.UUID(v)) for v in value.split(',')]
|
||||
except ValueError:
|
||||
abort(400, message="Invalid UUID format in tag_ids.")
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('page', type=inputs.int_range(1, 99999), required=False, default=1, location='args')
|
||||
parser.add_argument('limit', type=inputs.int_range(1, 100), required=False, default=20, location='args')
|
||||
parser.add_argument('mode', type=str, choices=['chat', 'workflow', 'agent-chat', 'channel', 'all'], default='all', location='args', required=False)
|
||||
parser.add_argument('name', type=str, location='args', required=False)
|
||||
parser.add_argument('tag_ids', type=uuid_list, location='args', required=False)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# get app list
|
||||
app_service = AppService()
|
||||
app_pagination = app_service.get_paginate_apps(current_user.current_tenant_id, args)
|
||||
if not app_pagination:
|
||||
return {'data': [], 'total': 0, 'page': 1, 'limit': 20, 'has_more': False}
|
||||
|
||||
return app_pagination
|
||||
return marshal(app_pagination, app_pagination_fields)
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@ -109,43 +116,9 @@ class AppApi(Resource):
|
||||
@marshal_with(app_detail_fields_with_site)
|
||||
def get(self, app_model):
|
||||
"""Get app detail"""
|
||||
# get original app model config
|
||||
if app_model.mode == AppMode.AGENT_CHAT.value or app_model.is_agent:
|
||||
model_config: AppModelConfig = app_model.app_model_config
|
||||
agent_mode = model_config.agent_mode_dict
|
||||
# decrypt agent tool parameters if it's secret-input
|
||||
for tool in agent_mode.get('tools') or []:
|
||||
if not isinstance(tool, dict) or len(tool.keys()) <= 3:
|
||||
continue
|
||||
agent_tool_entity = AgentToolEntity(**tool)
|
||||
# get tool
|
||||
try:
|
||||
tool_runtime = ToolManager.get_agent_tool_runtime(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
agent_tool=agent_tool_entity,
|
||||
)
|
||||
manager = ToolParameterConfigurationManager(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
tool_runtime=tool_runtime,
|
||||
provider_name=agent_tool_entity.provider_id,
|
||||
provider_type=agent_tool_entity.provider_type,
|
||||
)
|
||||
app_service = AppService()
|
||||
|
||||
# get decrypted parameters
|
||||
if agent_tool_entity.tool_parameters:
|
||||
parameters = manager.decrypt_tool_parameters(agent_tool_entity.tool_parameters or {})
|
||||
masked_parameter = manager.mask_tool_parameters(parameters or {})
|
||||
else:
|
||||
masked_parameter = {}
|
||||
|
||||
# override tool parameters
|
||||
tool['tool_parameters'] = masked_parameter
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
# override agent mode
|
||||
model_config.agent_mode = json.dumps(agent_mode)
|
||||
db.session.commit()
|
||||
app_model = app_service.get_app(app_model)
|
||||
|
||||
return app_model
|
||||
|
||||
|
||||
@ -85,7 +85,7 @@ class ChatMessageTextApi(Resource):
|
||||
response = AudioService.transcript_tts(
|
||||
app_model=app_model,
|
||||
text=request.form['text'],
|
||||
voice=request.form.get('voice'),
|
||||
voice=request.form['voice'] if request.form.get('voice') else app_model.app_model_config.text_to_speech_dict.get('voice'),
|
||||
streaming=False
|
||||
)
|
||||
|
||||
|
||||
@ -91,3 +91,9 @@ class DraftWorkflowNotExist(BaseHTTPException):
|
||||
error_code = 'draft_workflow_not_exist'
|
||||
description = "Draft workflow need to be initialized."
|
||||
code = 400
|
||||
|
||||
|
||||
class DraftWorkflowNotSync(BaseHTTPException):
|
||||
error_code = 'draft_workflow_not_sync'
|
||||
description = "Workflow graph might have been modified, please refresh and resubmit."
|
||||
code = 400
|
||||
|
||||
@ -57,6 +57,7 @@ class ModelConfigResource(Resource):
|
||||
try:
|
||||
tool_runtime = ToolManager.get_agent_tool_runtime(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
agent_tool=agent_tool_entity,
|
||||
)
|
||||
manager = ToolParameterConfigurationManager(
|
||||
@ -64,6 +65,7 @@ class ModelConfigResource(Resource):
|
||||
tool_runtime=tool_runtime,
|
||||
provider_name=agent_tool_entity.provider_id,
|
||||
provider_type=agent_tool_entity.provider_type,
|
||||
identity_id=f'AGENT.{app_model.id}'
|
||||
)
|
||||
except Exception as e:
|
||||
continue
|
||||
@ -94,6 +96,7 @@ class ModelConfigResource(Resource):
|
||||
try:
|
||||
tool_runtime = ToolManager.get_agent_tool_runtime(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
agent_tool=agent_tool_entity,
|
||||
)
|
||||
except Exception as e:
|
||||
@ -104,6 +107,7 @@ class ModelConfigResource(Resource):
|
||||
tool_runtime=tool_runtime,
|
||||
provider_name=agent_tool_entity.provider_id,
|
||||
provider_type=agent_tool_entity.provider_type,
|
||||
identity_id=f'AGENT.{app_model.id}'
|
||||
)
|
||||
manager.delete_tool_parameters_cache()
|
||||
|
||||
@ -111,9 +115,11 @@ class ModelConfigResource(Resource):
|
||||
if agent_tool_entity.tool_parameters:
|
||||
if key not in masked_parameter_map:
|
||||
continue
|
||||
|
||||
if agent_tool_entity.tool_parameters == masked_parameter_map[key]:
|
||||
agent_tool_entity.tool_parameters = parameter_map[key]
|
||||
|
||||
for masked_key, masked_value in masked_parameter_map[key].items():
|
||||
if masked_key in agent_tool_entity.tool_parameters and \
|
||||
agent_tool_entity.tool_parameters[masked_key] == masked_value:
|
||||
agent_tool_entity.tool_parameters[masked_key] = parameter_map[key].get(masked_key)
|
||||
|
||||
# encrypt parameters
|
||||
if agent_tool_entity.tool_parameters:
|
||||
|
||||
@ -23,6 +23,7 @@ def parse_app_site_args():
|
||||
parser.add_argument('customize_domain', type=str, required=False, location='json')
|
||||
parser.add_argument('copyright', type=str, required=False, location='json')
|
||||
parser.add_argument('privacy_policy', type=str, required=False, location='json')
|
||||
parser.add_argument('custom_disclaimer', type=str, required=False, location='json')
|
||||
parser.add_argument('customize_token_strategy', type=str, choices=['must', 'allow', 'not_allow'],
|
||||
required=False,
|
||||
location='json')
|
||||
@ -56,6 +57,7 @@ class AppSite(Resource):
|
||||
'customize_domain',
|
||||
'copyright',
|
||||
'privacy_policy',
|
||||
'custom_disclaimer',
|
||||
'customize_token_strategy',
|
||||
'prompt_public'
|
||||
]:
|
||||
|
||||
@ -7,7 +7,7 @@ from werkzeug.exceptions import InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
from controllers.console import api
|
||||
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist
|
||||
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required
|
||||
@ -20,6 +20,7 @@ from libs.helper import TimestampField, uuid_value
|
||||
from libs.login import current_user, login_required
|
||||
from models.model import App, AppMode
|
||||
from services.app_generate_service import AppGenerateService
|
||||
from services.errors.app import WorkflowHashNotEqualError
|
||||
from services.workflow_service import WorkflowService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -59,6 +60,7 @@ class DraftWorkflowApi(Resource):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('graph', type=dict, required=True, nullable=False, location='json')
|
||||
parser.add_argument('features', type=dict, required=True, nullable=False, location='json')
|
||||
parser.add_argument('hash', type=str, required=False, location='json')
|
||||
args = parser.parse_args()
|
||||
elif 'text/plain' in content_type:
|
||||
try:
|
||||
@ -71,7 +73,8 @@ class DraftWorkflowApi(Resource):
|
||||
|
||||
args = {
|
||||
'graph': data.get('graph'),
|
||||
'features': data.get('features')
|
||||
'features': data.get('features'),
|
||||
'hash': data.get('hash')
|
||||
}
|
||||
except json.JSONDecodeError:
|
||||
return {'message': 'Invalid JSON data'}, 400
|
||||
@ -79,15 +82,21 @@ class DraftWorkflowApi(Resource):
|
||||
abort(415)
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
workflow = workflow_service.sync_draft_workflow(
|
||||
app_model=app_model,
|
||||
graph=args.get('graph'),
|
||||
features=args.get('features'),
|
||||
account=current_user
|
||||
)
|
||||
|
||||
try:
|
||||
workflow = workflow_service.sync_draft_workflow(
|
||||
app_model=app_model,
|
||||
graph=args.get('graph'),
|
||||
features=args.get('features'),
|
||||
unique_hash=args.get('hash'),
|
||||
account=current_user
|
||||
)
|
||||
except WorkflowHashNotEqualError:
|
||||
raise DraftWorkflowNotSync()
|
||||
|
||||
return {
|
||||
"result": "success",
|
||||
"hash": workflow.unique_hash,
|
||||
"updated_at": TimestampField().format(workflow.updated_at or workflow.created_at)
|
||||
}
|
||||
|
||||
@ -128,6 +137,71 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
|
||||
logging.exception("internal server error.")
|
||||
raise InternalServerError()
|
||||
|
||||
class AdvancedChatDraftRunIterationNodeApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT])
|
||||
def post(self, app_model: App, node_id: str):
|
||||
"""
|
||||
Run draft workflow iteration node
|
||||
"""
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('inputs', type=dict, location='json')
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
response = AppGenerateService.generate_single_iteration(
|
||||
app_model=app_model,
|
||||
user=current_user,
|
||||
node_id=node_id,
|
||||
args=args,
|
||||
streaming=True
|
||||
)
|
||||
|
||||
return helper.compact_generate_response(response)
|
||||
except services.errors.conversation.ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
except services.errors.conversation.ConversationCompletedError:
|
||||
raise ConversationCompletedError()
|
||||
except ValueError as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
logging.exception("internal server error.")
|
||||
raise InternalServerError()
|
||||
|
||||
class WorkflowDraftRunIterationNodeApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.WORKFLOW])
|
||||
def post(self, app_model: App, node_id: str):
|
||||
"""
|
||||
Run draft workflow iteration node
|
||||
"""
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('inputs', type=dict, location='json')
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
response = AppGenerateService.generate_single_iteration(
|
||||
app_model=app_model,
|
||||
user=current_user,
|
||||
node_id=node_id,
|
||||
args=args,
|
||||
streaming=True
|
||||
)
|
||||
|
||||
return helper.compact_generate_response(response)
|
||||
except services.errors.conversation.ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
except services.errors.conversation.ConversationCompletedError:
|
||||
raise ConversationCompletedError()
|
||||
except ValueError as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
logging.exception("internal server error.")
|
||||
raise InternalServerError()
|
||||
|
||||
class DraftWorkflowRunApi(Resource):
|
||||
@setup_required
|
||||
@ -317,6 +391,8 @@ api.add_resource(AdvancedChatDraftWorkflowRunApi, '/apps/<uuid:app_id>/advanced-
|
||||
api.add_resource(DraftWorkflowRunApi, '/apps/<uuid:app_id>/workflows/draft/run')
|
||||
api.add_resource(WorkflowTaskStopApi, '/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop')
|
||||
api.add_resource(DraftWorkflowNodeRunApi, '/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/run')
|
||||
api.add_resource(AdvancedChatDraftRunIterationNodeApi, '/apps/<uuid:app_id>/advanced-chat/workflows/draft/iteration/nodes/<string:node_id>/run')
|
||||
api.add_resource(WorkflowDraftRunIterationNodeApi, '/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run')
|
||||
api.add_resource(PublishedWorkflowApi, '/apps/<uuid:app_id>/workflows/publish')
|
||||
api.add_resource(DefaultBlockConfigsApi, '/apps/<uuid:app_id>/workflows/default-workflow-block-configs')
|
||||
api.add_resource(DefaultBlockConfigApi, '/apps/<uuid:app_id>/workflows/default-workflow-block-configs'
|
||||
|
||||
@ -227,7 +227,7 @@ class WorkflowAverageAppInteractionStatistic(Resource):
|
||||
{{start}}
|
||||
{{end}}
|
||||
GROUP BY date, c.created_by) sub
|
||||
GROUP BY sub.created_by, sub.date
|
||||
GROUP BY sub.date
|
||||
"""
|
||||
arg_dict = {'tz': account.timezone, 'app_id': app_model.id, 'triggered_from': WorkflowRunTriggeredFrom.APP_RUN.value}
|
||||
|
||||
|
||||
@ -26,10 +26,13 @@ class LoginApi(Resource):
|
||||
|
||||
try:
|
||||
account = AccountService.authenticate(args['email'], args['password'])
|
||||
except services.errors.account.AccountLoginError:
|
||||
return {'code': 'unauthorized', 'message': 'Invalid email or password'}, 401
|
||||
except services.errors.account.AccountLoginError as e:
|
||||
return {'code': 'unauthorized', 'message': str(e)}, 401
|
||||
|
||||
TenantService.create_owner_tenant_if_not_exist(account)
|
||||
# SELF_HOSTED only have one workspace
|
||||
tenants = TenantService.get_join_tenants(account)
|
||||
if len(tenants) == 0:
|
||||
return {'result': 'fail', 'data': 'workspace not found, please contact system admin to invite you to join in a workspace'}
|
||||
|
||||
AccountService.update_last_login(account, request)
|
||||
|
||||
|
||||
@ -48,11 +48,14 @@ class DatasetListApi(Resource):
|
||||
limit = request.args.get('limit', default=20, type=int)
|
||||
ids = request.args.getlist('ids')
|
||||
provider = request.args.get('provider', default="vendor")
|
||||
search = request.args.get('keyword', default=None, type=str)
|
||||
tag_ids = request.args.getlist('tag_ids')
|
||||
|
||||
if ids:
|
||||
datasets, total = DatasetService.get_datasets_by_ids(ids, current_user.current_tenant_id)
|
||||
else:
|
||||
datasets, total = DatasetService.get_datasets(page, limit, provider,
|
||||
current_user.current_tenant_id, current_user)
|
||||
current_user.current_tenant_id, current_user, search, tag_ids)
|
||||
|
||||
# check embedding setting
|
||||
provider_manager = ProviderManager()
|
||||
@ -184,6 +187,10 @@ class DatasetApi(Resource):
|
||||
help='Invalid indexing technique.')
|
||||
parser.add_argument('permission', type=str, location='json', choices=(
|
||||
'only_me', 'all_team_members'), help='Invalid permission.')
|
||||
parser.add_argument('embedding_model', type=str,
|
||||
location='json', help='Invalid embedding model.')
|
||||
parser.add_argument('embedding_model_provider', type=str,
|
||||
location='json', help='Invalid embedding model provider.')
|
||||
parser.add_argument('retrieval_model', type=dict, location='json', help='Invalid retrieval model.')
|
||||
args = parser.parse_args()
|
||||
|
||||
@ -469,13 +476,13 @@ class DatasetRetrievalSettingApi(Resource):
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
vector_type = current_app.config['VECTOR_STORE']
|
||||
if vector_type == 'milvus':
|
||||
if vector_type in {"milvus", "relyt", "pgvector", "pgvecto_rs"}:
|
||||
return {
|
||||
'retrieval_method': [
|
||||
'semantic_search'
|
||||
]
|
||||
}
|
||||
elif vector_type == 'qdrant' or vector_type == 'weaviate':
|
||||
elif vector_type in {"qdrant", "weaviate"}:
|
||||
return {
|
||||
'retrieval_method': [
|
||||
'semantic_search', 'full_text_search', 'hybrid_search'
|
||||
@ -490,14 +497,13 @@ class DatasetRetrievalSettingMockApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, vector_type):
|
||||
|
||||
if vector_type == 'milvus':
|
||||
if vector_type in {'milvus', 'relyt', 'pgvector'}:
|
||||
return {
|
||||
'retrieval_method': [
|
||||
'semantic_search'
|
||||
]
|
||||
}
|
||||
elif vector_type == 'qdrant' or vector_type == 'weaviate':
|
||||
elif vector_type in {'qdrant', 'weaviate'}:
|
||||
return {
|
||||
'retrieval_method': [
|
||||
'semantic_search', 'full_text_search', 'hybrid_search'
|
||||
@ -506,10 +512,27 @@ class DatasetRetrievalSettingMockApi(Resource):
|
||||
else:
|
||||
raise ValueError("Unsupported vector db type.")
|
||||
|
||||
class DatasetErrorDocs(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, dataset_id):
|
||||
dataset_id_str = str(dataset_id)
|
||||
dataset = DatasetService.get_dataset(dataset_id_str)
|
||||
if dataset is None:
|
||||
raise NotFound("Dataset not found.")
|
||||
results = DocumentService.get_error_documents_by_dataset_id(dataset_id_str)
|
||||
|
||||
return {
|
||||
'data': [marshal(item, document_status_fields) for item in results],
|
||||
'total': len(results)
|
||||
}, 200
|
||||
|
||||
|
||||
api.add_resource(DatasetListApi, '/datasets')
|
||||
api.add_resource(DatasetApi, '/datasets/<uuid:dataset_id>')
|
||||
api.add_resource(DatasetQueryApi, '/datasets/<uuid:dataset_id>/queries')
|
||||
api.add_resource(DatasetErrorDocs, '/datasets/<uuid:dataset_id>/error-docs')
|
||||
api.add_resource(DatasetIndexingEstimateApi, '/datasets/indexing-estimate')
|
||||
api.add_resource(DatasetRelatedAppListApi, '/datasets/<uuid:dataset_id>/related-apps')
|
||||
api.add_resource(DatasetIndexingStatusApi, '/datasets/<uuid:dataset_id>/indexing-status')
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from flask import request
|
||||
@ -233,7 +234,7 @@ class DatasetDocumentListApi(Resource):
|
||||
location='json')
|
||||
parser.add_argument('data_source', type=dict, required=False, location='json')
|
||||
parser.add_argument('process_rule', type=dict, required=False, location='json')
|
||||
parser.add_argument('duplicate', type=bool, nullable=False, location='json')
|
||||
parser.add_argument('duplicate', type=bool, default=True, nullable=False, location='json')
|
||||
parser.add_argument('original_document_id', type=str, required=False, location='json')
|
||||
parser.add_argument('doc_form', type=str, default='text_model', required=False, nullable=False, location='json')
|
||||
parser.add_argument('doc_language', type=str, default='English', required=False, nullable=False,
|
||||
@ -393,9 +394,6 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
|
||||
def get(self, dataset_id, batch):
|
||||
dataset_id = str(dataset_id)
|
||||
batch = str(batch)
|
||||
dataset = DatasetService.get_dataset(dataset_id)
|
||||
if dataset is None:
|
||||
raise NotFound("Dataset not found.")
|
||||
documents = self.get_batch_documents(dataset_id, batch)
|
||||
response = {
|
||||
"tokens": 0,
|
||||
@ -883,6 +881,49 @@ class DocumentRecoverApi(DocumentResource):
|
||||
return {'result': 'success'}, 204
|
||||
|
||||
|
||||
class DocumentRetryApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, dataset_id):
|
||||
"""retry document."""
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('document_ids', type=list, required=True, nullable=False,
|
||||
location='json')
|
||||
args = parser.parse_args()
|
||||
dataset_id = str(dataset_id)
|
||||
dataset = DatasetService.get_dataset(dataset_id)
|
||||
retry_documents = []
|
||||
if not dataset:
|
||||
raise NotFound('Dataset not found.')
|
||||
for document_id in args['document_ids']:
|
||||
try:
|
||||
document_id = str(document_id)
|
||||
|
||||
document = DocumentService.get_document(dataset.id, document_id)
|
||||
|
||||
# 404 if document not found
|
||||
if document is None:
|
||||
raise NotFound("Document Not Exists.")
|
||||
|
||||
# 403 if document is archived
|
||||
if DocumentService.check_archived(document):
|
||||
raise ArchivedDocumentImmutableError()
|
||||
|
||||
# 400 if document is completed
|
||||
if document.indexing_status == 'completed':
|
||||
raise DocumentAlreadyFinishedError()
|
||||
retry_documents.append(document)
|
||||
except Exception as e:
|
||||
logging.error(f"Document {document_id} retry failed: {str(e)}")
|
||||
continue
|
||||
# retry document
|
||||
DocumentService.retry_document(dataset_id, retry_documents)
|
||||
|
||||
return {'result': 'success'}, 204
|
||||
|
||||
|
||||
api.add_resource(GetProcessRuleApi, '/datasets/process-rule')
|
||||
api.add_resource(DatasetDocumentListApi,
|
||||
'/datasets/<uuid:dataset_id>/documents')
|
||||
@ -908,3 +949,4 @@ api.add_resource(DocumentStatusApi,
|
||||
'/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/status/<string:action>')
|
||||
api.add_resource(DocumentPauseApi, '/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/pause')
|
||||
api.add_resource(DocumentRecoverApi, '/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/resume')
|
||||
api.add_resource(DocumentRetryApi, '/datasets/<uuid:dataset_id>/retry')
|
||||
|
||||
@ -12,7 +12,7 @@ from controllers.console.app.error import (
|
||||
ProviderNotInitializeError,
|
||||
ProviderQuotaExceededError,
|
||||
)
|
||||
from controllers.console.datasets.error import DatasetNotInitializedError, HighQualityDatasetOnlyError
|
||||
from controllers.console.datasets.error import DatasetNotInitializedError
|
||||
from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required
|
||||
from core.errors.error import (
|
||||
@ -45,10 +45,6 @@ class HitTestingApi(Resource):
|
||||
except services.errors.account.NoPermissionError as e:
|
||||
raise Forbidden(str(e))
|
||||
|
||||
# only high quality dataset can be used for hit testing
|
||||
if dataset.indexing_technique != 'high_quality':
|
||||
raise HighQualityDatasetOnlyError()
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('query', type=str, location='json')
|
||||
parser.add_argument('retrieval_model', type=dict, required=False, location='json')
|
||||
|
||||
@ -76,7 +76,7 @@ class ChatTextApi(InstalledAppResource):
|
||||
response = AudioService.transcript_tts(
|
||||
app_model=app_model,
|
||||
text=request.form['text'],
|
||||
voice=request.form.get('voice'),
|
||||
voice=request.form['voice'] if request.form.get('voice') else app_model.app_model_config.text_to_speech_dict.get('voice'),
|
||||
streaming=False
|
||||
)
|
||||
return {'data': response.data.decode('latin1')}
|
||||
|
||||
@ -21,6 +21,7 @@ recommended_app_fields = {
|
||||
'description': fields.String(attribute='description'),
|
||||
'copyright': fields.String,
|
||||
'privacy_policy': fields.String,
|
||||
'custom_disclaimer': fields.String,
|
||||
'category': fields.String,
|
||||
'position': fields.Integer,
|
||||
'is_listed': fields.Boolean
|
||||
|
||||
@ -14,4 +14,10 @@ class FeatureApi(Resource):
|
||||
return FeatureService.get_features(current_user.current_tenant_id).dict()
|
||||
|
||||
|
||||
class SystemFeatureApi(Resource):
|
||||
def get(self):
|
||||
return FeatureService.get_system_features().dict()
|
||||
|
||||
|
||||
api.add_resource(FeatureApi, '/features')
|
||||
api.add_resource(SystemFeatureApi, '/system-features')
|
||||
|
||||
@ -58,6 +58,8 @@ class SetupApi(Resource):
|
||||
password=args['password']
|
||||
)
|
||||
|
||||
TenantService.create_owner_tenant_if_not_exist(account)
|
||||
|
||||
setup()
|
||||
AccountService.update_last_login(account, request)
|
||||
|
||||
|
||||
159
api/controllers/console/tag/tags.py
Normal file
159
api/controllers/console/tag/tags.py
Normal file
@ -0,0 +1,159 @@
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, marshal_with, reqparse
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
from controllers.console import api
|
||||
from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required
|
||||
from fields.tag_fields import tag_fields
|
||||
from libs.login import login_required
|
||||
from models.model import Tag
|
||||
from services.tag_service import TagService
|
||||
|
||||
|
||||
def _validate_name(name):
|
||||
if not name or len(name) < 1 or len(name) > 40:
|
||||
raise ValueError('Name must be between 1 to 50 characters.')
|
||||
return name
|
||||
|
||||
|
||||
class TagListApi(Resource):
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(tag_fields)
|
||||
def get(self):
|
||||
tag_type = request.args.get('type', type=str)
|
||||
keyword = request.args.get('keyword', default=None, type=str)
|
||||
tags = TagService.get_tags(tag_type, current_user.current_tenant_id, keyword)
|
||||
|
||||
return tags, 200
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
# The role of the current user in the ta table must be admin or owner
|
||||
if not current_user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('name', nullable=False, required=True,
|
||||
help='Name must be between 1 to 50 characters.',
|
||||
type=_validate_name)
|
||||
parser.add_argument('type', type=str, location='json',
|
||||
choices=Tag.TAG_TYPE_LIST,
|
||||
nullable=True,
|
||||
help='Invalid tag type.')
|
||||
args = parser.parse_args()
|
||||
tag = TagService.save_tags(args)
|
||||
|
||||
response = {
|
||||
'id': tag.id,
|
||||
'name': tag.name,
|
||||
'type': tag.type,
|
||||
'binding_count': 0
|
||||
}
|
||||
|
||||
return response, 200
|
||||
|
||||
|
||||
class TagUpdateDeleteApi(Resource):
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def patch(self, tag_id):
|
||||
tag_id = str(tag_id)
|
||||
# The role of the current user in the ta table must be admin or owner
|
||||
if not current_user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('name', nullable=False, required=True,
|
||||
help='Name must be between 1 to 50 characters.',
|
||||
type=_validate_name)
|
||||
args = parser.parse_args()
|
||||
tag = TagService.update_tags(args, tag_id)
|
||||
|
||||
binding_count = TagService.get_tag_binding_count(tag_id)
|
||||
|
||||
response = {
|
||||
'id': tag.id,
|
||||
'name': tag.name,
|
||||
'type': tag.type,
|
||||
'binding_count': binding_count
|
||||
}
|
||||
|
||||
return response, 200
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def delete(self, tag_id):
|
||||
tag_id = str(tag_id)
|
||||
# The role of the current user in the ta table must be admin or owner
|
||||
if not current_user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
TagService.delete_tag(tag_id)
|
||||
|
||||
return 200
|
||||
|
||||
|
||||
class TagBindingCreateApi(Resource):
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
# The role of the current user in the ta table must be admin or owner
|
||||
if not current_user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('tag_ids', type=list, nullable=False, required=True, location='json',
|
||||
help='Tag IDs is required.')
|
||||
parser.add_argument('target_id', type=str, nullable=False, required=True, location='json',
|
||||
help='Target ID is required.')
|
||||
parser.add_argument('type', type=str, location='json',
|
||||
choices=Tag.TAG_TYPE_LIST,
|
||||
nullable=True,
|
||||
help='Invalid tag type.')
|
||||
args = parser.parse_args()
|
||||
TagService.save_tag_binding(args)
|
||||
|
||||
return 200
|
||||
|
||||
|
||||
class TagBindingDeleteApi(Resource):
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
# The role of the current user in the ta table must be admin or owner
|
||||
if not current_user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('tag_id', type=str, nullable=False, required=True,
|
||||
help='Tag ID is required.')
|
||||
parser.add_argument('target_id', type=str, nullable=False, required=True,
|
||||
help='Target ID is required.')
|
||||
parser.add_argument('type', type=str, location='json',
|
||||
choices=Tag.TAG_TYPE_LIST,
|
||||
nullable=True,
|
||||
help='Invalid tag type.')
|
||||
args = parser.parse_args()
|
||||
TagService.delete_tag_binding(args)
|
||||
|
||||
return 200
|
||||
|
||||
|
||||
api.add_resource(TagListApi, '/tags')
|
||||
api.add_resource(TagUpdateDeleteApi, '/tags/<uuid:tag_id>')
|
||||
api.add_resource(TagBindingCreateApi, '/tag-bindings/create')
|
||||
api.add_resource(TagBindingDeleteApi, '/tag-bindings/remove')
|
||||
@ -9,7 +9,7 @@ from controllers.console.wraps import account_initialization_required, cloud_edi
|
||||
from extensions.ext_database import db
|
||||
from fields.member_fields import account_with_role_list_fields
|
||||
from libs.login import login_required
|
||||
from models.account import Account
|
||||
from models.account import Account, TenantAccountRole
|
||||
from services.account_service import RegisterService, TenantService
|
||||
from services.errors.account import AccountAlreadyInTenantError
|
||||
|
||||
@ -43,7 +43,7 @@ class MemberInviteEmailApi(Resource):
|
||||
invitee_emails = args['emails']
|
||||
invitee_role = args['role']
|
||||
interface_language = args['language']
|
||||
if invitee_role not in ['admin', 'normal']:
|
||||
if invitee_role not in [TenantAccountRole.ADMIN, TenantAccountRole.NORMAL]:
|
||||
return {'code': 'invalid-role', 'message': 'Invalid role'}, 400
|
||||
|
||||
inviter = current_user
|
||||
|
||||
@ -11,6 +11,7 @@ from core.model_runtime.entities.model_entities import ModelType
|
||||
from core.model_runtime.errors.validate import CredentialsValidateFailedError
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from libs.login import login_required
|
||||
from models.account import TenantAccountRole
|
||||
from services.model_provider_service import ModelProviderService
|
||||
|
||||
|
||||
@ -94,7 +95,7 @@ class ModelProviderModelApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, provider: str):
|
||||
if current_user.current_tenant.current_role not in ['admin', 'owner']:
|
||||
if not TenantAccountRole.is_privileged_role(current_user.current_tenant.current_role):
|
||||
raise Forbidden()
|
||||
|
||||
tenant_id = current_user.current_tenant_id
|
||||
@ -125,7 +126,7 @@ class ModelProviderModelApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def delete(self, provider: str):
|
||||
if current_user.current_tenant.current_role not in ['admin', 'owner']:
|
||||
if not TenantAccountRole.is_privileged_role(current_user.current_tenant.current_role):
|
||||
raise Forbidden()
|
||||
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
@ -9,8 +9,13 @@ from controllers.console import api
|
||||
from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from libs.helper import alphanumeric, uuid_value
|
||||
from libs.login import login_required
|
||||
from services.tools_manage_service import ToolManageService
|
||||
from services.tools.api_tools_manage_service import ApiToolManageService
|
||||
from services.tools.builtin_tools_manage_service import BuiltinToolManageService
|
||||
from services.tools.tool_labels_service import ToolLabelsService
|
||||
from services.tools.tools_manage_service import ToolCommonService
|
||||
from services.tools.workflow_tools_manage_service import WorkflowToolManageService
|
||||
|
||||
|
||||
class ToolProviderListApi(Resource):
|
||||
@ -21,7 +26,11 @@ class ToolProviderListApi(Resource):
|
||||
user_id = current_user.id
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
return ToolManageService.list_tool_providers(user_id, tenant_id)
|
||||
req = reqparse.RequestParser()
|
||||
req.add_argument('type', type=str, choices=['builtin', 'model', 'api', 'workflow'], required=False, nullable=True, location='args')
|
||||
args = req.parse_args()
|
||||
|
||||
return ToolCommonService.list_tool_providers(user_id, tenant_id, args.get('type', None))
|
||||
|
||||
class ToolBuiltinProviderListToolsApi(Resource):
|
||||
@setup_required
|
||||
@ -31,7 +40,7 @@ class ToolBuiltinProviderListToolsApi(Resource):
|
||||
user_id = current_user.id
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
return jsonable_encoder(ToolManageService.list_builtin_tool_provider_tools(
|
||||
return jsonable_encoder(BuiltinToolManageService.list_builtin_tool_provider_tools(
|
||||
user_id,
|
||||
tenant_id,
|
||||
provider,
|
||||
@ -48,7 +57,7 @@ class ToolBuiltinProviderDeleteApi(Resource):
|
||||
user_id = current_user.id
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
return ToolManageService.delete_builtin_tool_provider(
|
||||
return BuiltinToolManageService.delete_builtin_tool_provider(
|
||||
user_id,
|
||||
tenant_id,
|
||||
provider,
|
||||
@ -70,7 +79,7 @@ class ToolBuiltinProviderUpdateApi(Resource):
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
return ToolManageService.update_builtin_tool_provider(
|
||||
return BuiltinToolManageService.update_builtin_tool_provider(
|
||||
user_id,
|
||||
tenant_id,
|
||||
provider,
|
||||
@ -85,7 +94,7 @@ class ToolBuiltinProviderGetCredentialsApi(Resource):
|
||||
user_id = current_user.id
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
return ToolManageService.get_builtin_tool_provider_credentials(
|
||||
return BuiltinToolManageService.get_builtin_tool_provider_credentials(
|
||||
user_id,
|
||||
tenant_id,
|
||||
provider,
|
||||
@ -94,35 +103,10 @@ class ToolBuiltinProviderGetCredentialsApi(Resource):
|
||||
class ToolBuiltinProviderIconApi(Resource):
|
||||
@setup_required
|
||||
def get(self, provider):
|
||||
icon_bytes, mimetype = ToolManageService.get_builtin_tool_provider_icon(provider)
|
||||
icon_bytes, mimetype = BuiltinToolManageService.get_builtin_tool_provider_icon(provider)
|
||||
icon_cache_max_age = int(current_app.config.get('TOOL_ICON_CACHE_MAX_AGE'))
|
||||
return send_file(io.BytesIO(icon_bytes), mimetype=mimetype, max_age=icon_cache_max_age)
|
||||
|
||||
class ToolModelProviderIconApi(Resource):
|
||||
@setup_required
|
||||
def get(self, provider):
|
||||
icon_bytes, mimetype = ToolManageService.get_model_tool_provider_icon(provider)
|
||||
return send_file(io.BytesIO(icon_bytes), mimetype=mimetype)
|
||||
|
||||
class ToolModelProviderListToolsApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
user_id = current_user.id
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('provider', type=str, required=True, nullable=False, location='args')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
return jsonable_encoder(ToolManageService.list_model_tool_provider_tools(
|
||||
user_id,
|
||||
tenant_id,
|
||||
args['provider'],
|
||||
))
|
||||
|
||||
class ToolApiProviderAddApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@ -141,10 +125,12 @@ class ToolApiProviderAddApi(Resource):
|
||||
parser.add_argument('provider', type=str, required=True, nullable=False, location='json')
|
||||
parser.add_argument('icon', type=dict, required=True, nullable=False, location='json')
|
||||
parser.add_argument('privacy_policy', type=str, required=False, nullable=True, location='json')
|
||||
parser.add_argument('labels', type=list[str], required=False, nullable=True, location='json', default=[])
|
||||
parser.add_argument('custom_disclaimer', type=str, required=False, nullable=True, location='json')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
return ToolManageService.create_api_tool_provider(
|
||||
return ApiToolManageService.create_api_tool_provider(
|
||||
user_id,
|
||||
tenant_id,
|
||||
args['provider'],
|
||||
@ -153,6 +139,8 @@ class ToolApiProviderAddApi(Resource):
|
||||
args['schema_type'],
|
||||
args['schema'],
|
||||
args.get('privacy_policy', ''),
|
||||
args.get('custom_disclaimer', ''),
|
||||
args.get('labels', []),
|
||||
)
|
||||
|
||||
class ToolApiProviderGetRemoteSchemaApi(Resource):
|
||||
@ -166,7 +154,7 @@ class ToolApiProviderGetRemoteSchemaApi(Resource):
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
return ToolManageService.get_api_tool_provider_remote_schema(
|
||||
return ApiToolManageService.get_api_tool_provider_remote_schema(
|
||||
current_user.id,
|
||||
current_user.current_tenant_id,
|
||||
args['url'],
|
||||
@ -186,7 +174,7 @@ class ToolApiProviderListToolsApi(Resource):
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
return jsonable_encoder(ToolManageService.list_api_tool_provider_tools(
|
||||
return jsonable_encoder(ApiToolManageService.list_api_tool_provider_tools(
|
||||
user_id,
|
||||
tenant_id,
|
||||
args['provider'],
|
||||
@ -211,10 +199,12 @@ class ToolApiProviderUpdateApi(Resource):
|
||||
parser.add_argument('original_provider', type=str, required=True, nullable=False, location='json')
|
||||
parser.add_argument('icon', type=dict, required=True, nullable=False, location='json')
|
||||
parser.add_argument('privacy_policy', type=str, required=True, nullable=True, location='json')
|
||||
parser.add_argument('labels', type=list[str], required=False, nullable=True, location='json')
|
||||
parser.add_argument('custom_disclaimer', type=str, required=True, nullable=True, location='json')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
return ToolManageService.update_api_tool_provider(
|
||||
return ApiToolManageService.update_api_tool_provider(
|
||||
user_id,
|
||||
tenant_id,
|
||||
args['provider'],
|
||||
@ -224,6 +214,8 @@ class ToolApiProviderUpdateApi(Resource):
|
||||
args['schema_type'],
|
||||
args['schema'],
|
||||
args['privacy_policy'],
|
||||
args['custom_disclaimer'],
|
||||
args.get('labels', []),
|
||||
)
|
||||
|
||||
class ToolApiProviderDeleteApi(Resource):
|
||||
@ -243,7 +235,7 @@ class ToolApiProviderDeleteApi(Resource):
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
return ToolManageService.delete_api_tool_provider(
|
||||
return ApiToolManageService.delete_api_tool_provider(
|
||||
user_id,
|
||||
tenant_id,
|
||||
args['provider'],
|
||||
@ -263,7 +255,7 @@ class ToolApiProviderGetApi(Resource):
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
return ToolManageService.get_api_tool_provider(
|
||||
return ApiToolManageService.get_api_tool_provider(
|
||||
user_id,
|
||||
tenant_id,
|
||||
args['provider'],
|
||||
@ -274,7 +266,7 @@ class ToolBuiltinProviderCredentialsSchemaApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, provider):
|
||||
return ToolManageService.list_builtin_provider_credentials_schema(provider)
|
||||
return BuiltinToolManageService.list_builtin_provider_credentials_schema(provider)
|
||||
|
||||
class ToolApiProviderSchemaApi(Resource):
|
||||
@setup_required
|
||||
@ -287,7 +279,7 @@ class ToolApiProviderSchemaApi(Resource):
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
return ToolManageService.parser_api_schema(
|
||||
return ApiToolManageService.parser_api_schema(
|
||||
schema=args['schema'],
|
||||
)
|
||||
|
||||
@ -307,7 +299,7 @@ class ToolApiProviderPreviousTestApi(Resource):
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
return ToolManageService.test_api_tool_preview(
|
||||
return ApiToolManageService.test_api_tool_preview(
|
||||
current_user.current_tenant_id,
|
||||
args['provider_name'] if args['provider_name'] else '',
|
||||
args['tool_name'],
|
||||
@ -317,6 +309,153 @@ class ToolApiProviderPreviousTestApi(Resource):
|
||||
args['schema'],
|
||||
)
|
||||
|
||||
class ToolWorkflowProviderCreateApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
if not current_user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
user_id = current_user.id
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
reqparser = reqparse.RequestParser()
|
||||
reqparser.add_argument('workflow_app_id', type=uuid_value, required=True, nullable=False, location='json')
|
||||
reqparser.add_argument('name', type=alphanumeric, required=True, nullable=False, location='json')
|
||||
reqparser.add_argument('label', type=str, required=True, nullable=False, location='json')
|
||||
reqparser.add_argument('description', type=str, required=True, nullable=False, location='json')
|
||||
reqparser.add_argument('icon', type=dict, required=True, nullable=False, location='json')
|
||||
reqparser.add_argument('parameters', type=list[dict], required=True, nullable=False, location='json')
|
||||
reqparser.add_argument('privacy_policy', type=str, required=False, nullable=True, location='json', default='')
|
||||
reqparser.add_argument('labels', type=list[str], required=False, nullable=True, location='json')
|
||||
|
||||
args = reqparser.parse_args()
|
||||
|
||||
return WorkflowToolManageService.create_workflow_tool(
|
||||
user_id,
|
||||
tenant_id,
|
||||
args['workflow_app_id'],
|
||||
args['name'],
|
||||
args['label'],
|
||||
args['icon'],
|
||||
args['description'],
|
||||
args['parameters'],
|
||||
args['privacy_policy'],
|
||||
args.get('labels', []),
|
||||
)
|
||||
|
||||
class ToolWorkflowProviderUpdateApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
if not current_user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
user_id = current_user.id
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
reqparser = reqparse.RequestParser()
|
||||
reqparser.add_argument('workflow_tool_id', type=uuid_value, required=True, nullable=False, location='json')
|
||||
reqparser.add_argument('name', type=alphanumeric, required=True, nullable=False, location='json')
|
||||
reqparser.add_argument('label', type=str, required=True, nullable=False, location='json')
|
||||
reqparser.add_argument('description', type=str, required=True, nullable=False, location='json')
|
||||
reqparser.add_argument('icon', type=dict, required=True, nullable=False, location='json')
|
||||
reqparser.add_argument('parameters', type=list[dict], required=True, nullable=False, location='json')
|
||||
reqparser.add_argument('privacy_policy', type=str, required=False, nullable=True, location='json', default='')
|
||||
reqparser.add_argument('labels', type=list[str], required=False, nullable=True, location='json')
|
||||
|
||||
args = reqparser.parse_args()
|
||||
|
||||
if not args['workflow_tool_id']:
|
||||
raise ValueError('incorrect workflow_tool_id')
|
||||
|
||||
return WorkflowToolManageService.update_workflow_tool(
|
||||
user_id,
|
||||
tenant_id,
|
||||
args['workflow_tool_id'],
|
||||
args['name'],
|
||||
args['label'],
|
||||
args['icon'],
|
||||
args['description'],
|
||||
args['parameters'],
|
||||
args['privacy_policy'],
|
||||
args.get('labels', []),
|
||||
)
|
||||
|
||||
class ToolWorkflowProviderDeleteApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
if not current_user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
user_id = current_user.id
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
reqparser = reqparse.RequestParser()
|
||||
reqparser.add_argument('workflow_tool_id', type=uuid_value, required=True, nullable=False, location='json')
|
||||
|
||||
args = reqparser.parse_args()
|
||||
|
||||
return WorkflowToolManageService.delete_workflow_tool(
|
||||
user_id,
|
||||
tenant_id,
|
||||
args['workflow_tool_id'],
|
||||
)
|
||||
|
||||
class ToolWorkflowProviderGetApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
user_id = current_user.id
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('workflow_tool_id', type=uuid_value, required=False, nullable=True, location='args')
|
||||
parser.add_argument('workflow_app_id', type=uuid_value, required=False, nullable=True, location='args')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.get('workflow_tool_id'):
|
||||
tool = WorkflowToolManageService.get_workflow_tool_by_tool_id(
|
||||
user_id,
|
||||
tenant_id,
|
||||
args['workflow_tool_id'],
|
||||
)
|
||||
elif args.get('workflow_app_id'):
|
||||
tool = WorkflowToolManageService.get_workflow_tool_by_app_id(
|
||||
user_id,
|
||||
tenant_id,
|
||||
args['workflow_app_id'],
|
||||
)
|
||||
else:
|
||||
raise ValueError('incorrect workflow_tool_id or workflow_app_id')
|
||||
|
||||
return jsonable_encoder(tool)
|
||||
|
||||
class ToolWorkflowProviderListToolApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
user_id = current_user.id
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('workflow_tool_id', type=uuid_value, required=True, nullable=False, location='args')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
return jsonable_encoder(WorkflowToolManageService.list_single_workflow_tools(
|
||||
user_id,
|
||||
tenant_id,
|
||||
args['workflow_tool_id'],
|
||||
))
|
||||
|
||||
class ToolBuiltinListApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@ -325,7 +464,7 @@ class ToolBuiltinListApi(Resource):
|
||||
user_id = current_user.id
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
return jsonable_encoder([provider.to_dict() for provider in ToolManageService.list_builtin_tools(
|
||||
return jsonable_encoder([provider.to_dict() for provider in BuiltinToolManageService.list_builtin_tools(
|
||||
user_id,
|
||||
tenant_id,
|
||||
)])
|
||||
@ -338,20 +477,43 @@ class ToolApiListApi(Resource):
|
||||
user_id = current_user.id
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
return jsonable_encoder([provider.to_dict() for provider in ToolManageService.list_api_tools(
|
||||
return jsonable_encoder([provider.to_dict() for provider in ApiToolManageService.list_api_tools(
|
||||
user_id,
|
||||
tenant_id,
|
||||
)])
|
||||
|
||||
class ToolWorkflowListApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
user_id = current_user.id
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
return jsonable_encoder([provider.to_dict() for provider in WorkflowToolManageService.list_tenant_workflow_tools(
|
||||
user_id,
|
||||
tenant_id,
|
||||
)])
|
||||
|
||||
class ToolLabelsApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
return jsonable_encoder(ToolLabelsService.list_tool_labels())
|
||||
|
||||
# tool provider
|
||||
api.add_resource(ToolProviderListApi, '/workspaces/current/tool-providers')
|
||||
|
||||
# builtin tool provider
|
||||
api.add_resource(ToolBuiltinProviderListToolsApi, '/workspaces/current/tool-provider/builtin/<provider>/tools')
|
||||
api.add_resource(ToolBuiltinProviderDeleteApi, '/workspaces/current/tool-provider/builtin/<provider>/delete')
|
||||
api.add_resource(ToolBuiltinProviderUpdateApi, '/workspaces/current/tool-provider/builtin/<provider>/update')
|
||||
api.add_resource(ToolBuiltinProviderGetCredentialsApi, '/workspaces/current/tool-provider/builtin/<provider>/credentials')
|
||||
api.add_resource(ToolBuiltinProviderCredentialsSchemaApi, '/workspaces/current/tool-provider/builtin/<provider>/credentials_schema')
|
||||
api.add_resource(ToolBuiltinProviderIconApi, '/workspaces/current/tool-provider/builtin/<provider>/icon')
|
||||
api.add_resource(ToolModelProviderIconApi, '/workspaces/current/tool-provider/model/<provider>/icon')
|
||||
api.add_resource(ToolModelProviderListToolsApi, '/workspaces/current/tool-provider/model/tools')
|
||||
|
||||
# api tool provider
|
||||
api.add_resource(ToolApiProviderAddApi, '/workspaces/current/tool-provider/api/add')
|
||||
api.add_resource(ToolApiProviderGetRemoteSchemaApi, '/workspaces/current/tool-provider/api/remote')
|
||||
api.add_resource(ToolApiProviderListToolsApi, '/workspaces/current/tool-provider/api/tools')
|
||||
@ -361,5 +523,15 @@ api.add_resource(ToolApiProviderGetApi, '/workspaces/current/tool-provider/api/g
|
||||
api.add_resource(ToolApiProviderSchemaApi, '/workspaces/current/tool-provider/api/schema')
|
||||
api.add_resource(ToolApiProviderPreviousTestApi, '/workspaces/current/tool-provider/api/test/pre')
|
||||
|
||||
# workflow tool provider
|
||||
api.add_resource(ToolWorkflowProviderCreateApi, '/workspaces/current/tool-provider/workflow/create')
|
||||
api.add_resource(ToolWorkflowProviderUpdateApi, '/workspaces/current/tool-provider/workflow/update')
|
||||
api.add_resource(ToolWorkflowProviderDeleteApi, '/workspaces/current/tool-provider/workflow/delete')
|
||||
api.add_resource(ToolWorkflowProviderGetApi, '/workspaces/current/tool-provider/workflow/get')
|
||||
api.add_resource(ToolWorkflowProviderListToolApi, '/workspaces/current/tool-provider/workflow/tools')
|
||||
|
||||
api.add_resource(ToolBuiltinListApi, '/workspaces/current/tools/builtin')
|
||||
api.add_resource(ToolApiListApi, '/workspaces/current/tools/api')
|
||||
api.add_resource(ToolApiListApi, '/workspaces/current/tools/api')
|
||||
api.add_resource(ToolWorkflowListApi, '/workspaces/current/tools/workflow')
|
||||
|
||||
api.add_resource(ToolLabelsApi, '/workspaces/current/tool-labels')
|
||||
@ -3,6 +3,7 @@ import logging
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, fields, inputs, marshal, marshal_with, reqparse
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
import services
|
||||
from controllers.console import api
|
||||
@ -19,7 +20,7 @@ from controllers.console.wraps import account_initialization_required, cloud_edi
|
||||
from extensions.ext_database import db
|
||||
from libs.helper import TimestampField
|
||||
from libs.login import login_required
|
||||
from models.account import Tenant
|
||||
from models.account import Tenant, TenantStatus
|
||||
from services.account_service import TenantService
|
||||
from services.file_service import FileService
|
||||
from services.workspace_service import WorkspaceService
|
||||
@ -116,6 +117,16 @@ class TenantApi(Resource):
|
||||
|
||||
tenant = current_user.current_tenant
|
||||
|
||||
if tenant.status == TenantStatus.ARCHIVE:
|
||||
tenants = TenantService.get_join_tenants(current_user)
|
||||
# if there is any tenant, switch to the first one
|
||||
if len(tenants) > 0:
|
||||
TenantService.switch_tenant(current_user, tenants[0].id)
|
||||
tenant = tenants[0]
|
||||
# else, raise Unauthorized
|
||||
else:
|
||||
raise Unauthorized('workspace is archived')
|
||||
|
||||
return WorkspaceService.get_tenant_info(tenant), 200
|
||||
|
||||
|
||||
@ -150,13 +161,13 @@ class CustomConfigWorkspaceApi(Resource):
|
||||
parser.add_argument('replace_webapp_logo', type=str, location='json')
|
||||
args = parser.parse_args()
|
||||
|
||||
tenant = db.session.query(Tenant).filter(Tenant.id == current_user.current_tenant_id).one_or_404()
|
||||
|
||||
custom_config_dict = {
|
||||
'remove_webapp_brand': args['remove_webapp_brand'],
|
||||
'replace_webapp_logo': args['replace_webapp_logo'],
|
||||
'replace_webapp_logo': args['replace_webapp_logo'] if args['replace_webapp_logo'] is not None else tenant.custom_config_dict.get('replace_webapp_logo') ,
|
||||
}
|
||||
|
||||
tenant = db.session.query(Tenant).filter(Tenant.id == current_user.current_tenant_id).one_or_404()
|
||||
|
||||
tenant.custom_config_dict = custom_config_dict
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from flask import Blueprint
|
||||
|
||||
from libs.external_api import ExternalApi
|
||||
|
||||
bp = Blueprint('files', __name__)
|
||||
|
||||
9
api/controllers/inner_api/__init__.py
Normal file
9
api/controllers/inner_api/__init__.py
Normal file
@ -0,0 +1,9 @@
|
||||
from flask import Blueprint
|
||||
|
||||
from libs.external_api import ExternalApi
|
||||
|
||||
bp = Blueprint('inner_api', __name__, url_prefix='/inner/api')
|
||||
api = ExternalApi(bp)
|
||||
|
||||
from .workspace import workspace
|
||||
|
||||
37
api/controllers/inner_api/workspace/workspace.py
Normal file
37
api/controllers/inner_api/workspace/workspace.py
Normal file
@ -0,0 +1,37 @@
|
||||
from flask_restful import Resource, reqparse
|
||||
|
||||
from controllers.console.setup import setup_required
|
||||
from controllers.inner_api import api
|
||||
from controllers.inner_api.wraps import inner_api_only
|
||||
from events.tenant_event import tenant_was_created
|
||||
from models.account import Account
|
||||
from services.account_service import TenantService
|
||||
|
||||
|
||||
class EnterpriseWorkspace(Resource):
|
||||
|
||||
@setup_required
|
||||
@inner_api_only
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('name', type=str, required=True, location='json')
|
||||
parser.add_argument('owner_email', type=str, required=True, location='json')
|
||||
args = parser.parse_args()
|
||||
|
||||
account = Account.query.filter_by(email=args['owner_email']).first()
|
||||
if account is None:
|
||||
return {
|
||||
'message': 'owner account not found.'
|
||||
}, 404
|
||||
|
||||
tenant = TenantService.create_tenant(args['name'])
|
||||
TenantService.create_tenant_member(tenant, account, role='owner')
|
||||
|
||||
tenant_was_created.send(tenant)
|
||||
|
||||
return {
|
||||
'message': 'enterprise workspace created.'
|
||||
}
|
||||
|
||||
|
||||
api.add_resource(EnterpriseWorkspace, '/enterprise/workspace')
|
||||
61
api/controllers/inner_api/wraps.py
Normal file
61
api/controllers/inner_api/wraps.py
Normal file
@ -0,0 +1,61 @@
|
||||
from base64 import b64encode
|
||||
from functools import wraps
|
||||
from hashlib import sha1
|
||||
from hmac import new as hmac_new
|
||||
|
||||
from flask import abort, current_app, request
|
||||
|
||||
from extensions.ext_database import db
|
||||
from models.model import EndUser
|
||||
|
||||
|
||||
def inner_api_only(view):
|
||||
@wraps(view)
|
||||
def decorated(*args, **kwargs):
|
||||
if not current_app.config['INNER_API']:
|
||||
abort(404)
|
||||
|
||||
# get header 'X-Inner-Api-Key'
|
||||
inner_api_key = request.headers.get('X-Inner-Api-Key')
|
||||
if not inner_api_key or inner_api_key != current_app.config['INNER_API_KEY']:
|
||||
abort(404)
|
||||
|
||||
return view(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
def inner_api_user_auth(view):
|
||||
@wraps(view)
|
||||
def decorated(*args, **kwargs):
|
||||
if not current_app.config['INNER_API']:
|
||||
return view(*args, **kwargs)
|
||||
|
||||
# get header 'X-Inner-Api-Key'
|
||||
authorization = request.headers.get('Authorization')
|
||||
if not authorization:
|
||||
return view(*args, **kwargs)
|
||||
|
||||
parts = authorization.split(':')
|
||||
if len(parts) != 2:
|
||||
return view(*args, **kwargs)
|
||||
|
||||
user_id, token = parts
|
||||
if ' ' in user_id:
|
||||
user_id = user_id.split(' ')[1]
|
||||
|
||||
inner_api_key = request.headers.get('X-Inner-Api-Key')
|
||||
|
||||
data_to_sign = f'DIFY {user_id}'
|
||||
|
||||
signature = hmac_new(inner_api_key.encode('utf-8'), data_to_sign.encode('utf-8'), sha1)
|
||||
signature = b64encode(signature.digest()).decode('utf-8')
|
||||
|
||||
if signature != token:
|
||||
return view(*args, **kwargs)
|
||||
|
||||
kwargs['user'] = db.session.query(EndUser).filter(EndUser.id == user_id).first()
|
||||
|
||||
return view(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
@ -1,5 +1,5 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from flask import Blueprint
|
||||
|
||||
from libs.external_api import ExternalApi
|
||||
|
||||
bp = Blueprint('service_api', __name__, url_prefix='/v1')
|
||||
|
||||
@ -97,7 +97,7 @@ class MessageListApi(Resource):
|
||||
|
||||
|
||||
class MessageFeedbackApi(Resource):
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON))
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON, required=True))
|
||||
def post(self, app_model: App, end_user: EndUser, message_id):
|
||||
message_id = str(message_id)
|
||||
|
||||
@ -114,7 +114,7 @@ class MessageFeedbackApi(Resource):
|
||||
|
||||
|
||||
class MessageSuggestedApi(Resource):
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY))
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY, required=True))
|
||||
def get(self, app_model: App, end_user: EndUser, message_id):
|
||||
message_id = str(message_id)
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
|
||||
@ -26,8 +26,11 @@ class DatasetApi(DatasetApiResource):
|
||||
page = request.args.get('page', default=1, type=int)
|
||||
limit = request.args.get('limit', default=20, type=int)
|
||||
provider = request.args.get('provider', default="vendor")
|
||||
search = request.args.get('keyword', default=None, type=str)
|
||||
tag_ids = request.args.getlist('tag_ids')
|
||||
|
||||
datasets, total = DatasetService.get_datasets(page, limit, provider,
|
||||
tenant_id, current_user)
|
||||
tenant_id, current_user, search, tag_ids)
|
||||
# check embedding setting
|
||||
provider_manager = ProviderManager()
|
||||
configurations = provider_manager.get_configurations(
|
||||
|
||||
@ -174,7 +174,7 @@ class DocumentAddByFileApi(DatasetApiResource):
|
||||
|
||||
if not dataset:
|
||||
raise ValueError('Dataset is not exist.')
|
||||
if not dataset.indexing_technique and not args['indexing_technique']:
|
||||
if not dataset.indexing_technique and not args.get('indexing_technique'):
|
||||
raise ValueError('indexing_technique is required.')
|
||||
|
||||
# save file info
|
||||
|
||||
@ -12,7 +12,7 @@ from werkzeug.exceptions import Forbidden, NotFound, Unauthorized
|
||||
|
||||
from extensions.ext_database import db
|
||||
from libs.login import _get_user
|
||||
from models.account import Account, Tenant, TenantAccountJoin
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantStatus
|
||||
from models.model import ApiToken, App, EndUser
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
@ -47,6 +47,10 @@ def validate_app_token(view: Optional[Callable] = None, *, fetch_user_arg: Optio
|
||||
if not app_model.enable_api:
|
||||
raise NotFound()
|
||||
|
||||
tenant = db.session.query(Tenant).filter(Tenant.id == app_model.tenant_id).first()
|
||||
if tenant.status == TenantStatus.ARCHIVE:
|
||||
raise NotFound()
|
||||
|
||||
kwargs['app_model'] = app_model
|
||||
|
||||
if fetch_user_arg:
|
||||
@ -137,6 +141,7 @@ def validate_dataset_token(view=None):
|
||||
.filter(Tenant.id == api_token.tenant_id) \
|
||||
.filter(TenantAccountJoin.tenant_id == Tenant.id) \
|
||||
.filter(TenantAccountJoin.role.in_(['owner'])) \
|
||||
.filter(Tenant.status == TenantStatus.NORMAL) \
|
||||
.one_or_none() # TODO: only owner information is required, so only one is returned.
|
||||
if tenant_account_join:
|
||||
tenant, ta = tenant_account_join
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from flask import Blueprint
|
||||
|
||||
from libs.external_api import ExternalApi
|
||||
|
||||
bp = Blueprint('web', __name__, url_prefix='/api')
|
||||
api = ExternalApi(bp)
|
||||
|
||||
|
||||
from . import app, audio, completion, conversation, file, message, passport, saved_message, site, workflow
|
||||
from . import app, audio, completion, conversation, feature, file, message, passport, saved_message, site, workflow
|
||||
|
||||
@ -1,14 +1,10 @@
|
||||
import json
|
||||
|
||||
from flask import current_app
|
||||
from flask_restful import fields, marshal_with
|
||||
|
||||
from controllers.web import api
|
||||
from controllers.web.error import AppUnavailableError
|
||||
from controllers.web.wraps import WebApiResource
|
||||
from extensions.ext_database import db
|
||||
from models.model import App, AppModelConfig, AppMode
|
||||
from models.tools import ApiToolProvider
|
||||
from models.model import App, AppMode
|
||||
from services.app_service import AppService
|
||||
|
||||
|
||||
|
||||
@ -74,7 +74,7 @@ class TextApi(WebApiResource):
|
||||
app_model=app_model,
|
||||
text=request.form['text'],
|
||||
end_user=end_user.external_user_id,
|
||||
voice=request.form.get('voice'),
|
||||
voice=request.form['voice'] if request.form.get('voice') else app_model.app_model_config.text_to_speech_dict.get('voice'),
|
||||
streaming=False
|
||||
)
|
||||
|
||||
|
||||
@ -115,3 +115,9 @@ class UnsupportedFileTypeError(BaseHTTPException):
|
||||
error_code = 'unsupported_file_type'
|
||||
description = "File type not allowed."
|
||||
code = 415
|
||||
|
||||
|
||||
class WebSSOAuthRequiredError(BaseHTTPException):
|
||||
error_code = 'web_sso_auth_required'
|
||||
description = "Web SSO authentication required."
|
||||
code = 401
|
||||
|
||||
12
api/controllers/web/feature.py
Normal file
12
api/controllers/web/feature.py
Normal file
@ -0,0 +1,12 @@
|
||||
from flask_restful import Resource
|
||||
|
||||
from controllers.web import api
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
|
||||
class SystemFeatureApi(Resource):
|
||||
def get(self):
|
||||
return FeatureService.get_system_features().dict()
|
||||
|
||||
|
||||
api.add_resource(SystemFeatureApi, '/system-features')
|
||||
@ -5,14 +5,21 @@ from flask_restful import Resource
|
||||
from werkzeug.exceptions import NotFound, Unauthorized
|
||||
|
||||
from controllers.web import api
|
||||
from controllers.web.error import WebSSOAuthRequiredError
|
||||
from extensions.ext_database import db
|
||||
from libs.passport import PassportService
|
||||
from models.model import App, EndUser, Site
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
|
||||
class PassportResource(Resource):
|
||||
"""Base resource for passport."""
|
||||
def get(self):
|
||||
|
||||
system_features = FeatureService.get_system_features()
|
||||
if system_features.sso_enforced_for_web:
|
||||
raise WebSSOAuthRequiredError()
|
||||
|
||||
app_code = request.headers.get('X-App-Code')
|
||||
if app_code is None:
|
||||
raise Unauthorized('X-App-Code header is missing.')
|
||||
@ -28,7 +35,7 @@ class PassportResource(Resource):
|
||||
app_model = db.session.query(App).filter(App.id == site.app_id).first()
|
||||
if not app_model or app_model.status != 'normal' or not app_model.enable_site:
|
||||
raise NotFound()
|
||||
|
||||
|
||||
end_user = EndUser(
|
||||
tenant_id=app_model.tenant_id,
|
||||
app_id=app_model.id,
|
||||
@ -36,6 +43,7 @@ class PassportResource(Resource):
|
||||
is_anonymous=True,
|
||||
session_id=generate_session_id(),
|
||||
)
|
||||
|
||||
db.session.add(end_user)
|
||||
db.session.commit()
|
||||
|
||||
@ -53,8 +61,10 @@ class PassportResource(Resource):
|
||||
'access_token': tk,
|
||||
}
|
||||
|
||||
|
||||
api.add_resource(PassportResource, '/passport')
|
||||
|
||||
|
||||
def generate_session_id():
|
||||
"""
|
||||
Generate a unique session ID.
|
||||
|
||||
@ -6,6 +6,7 @@ from werkzeug.exceptions import Forbidden
|
||||
from controllers.web import api
|
||||
from controllers.web.wraps import WebApiResource
|
||||
from extensions.ext_database import db
|
||||
from models.account import TenantStatus
|
||||
from models.model import Site
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
@ -30,6 +31,7 @@ class AppSiteApi(WebApiResource):
|
||||
'description': fields.String,
|
||||
'copyright': fields.String,
|
||||
'privacy_policy': fields.String,
|
||||
'custom_disclaimer': fields.String,
|
||||
'default_language': fields.String,
|
||||
'prompt_public': fields.Boolean
|
||||
}
|
||||
@ -54,6 +56,9 @@ class AppSiteApi(WebApiResource):
|
||||
if not site:
|
||||
raise Forbidden()
|
||||
|
||||
if app_model.tenant.status == TenantStatus.ARCHIVE:
|
||||
raise Forbidden()
|
||||
|
||||
can_replace_logo = FeatureService.get_features(app_model.tenant_id).can_replace_logo
|
||||
|
||||
return AppSiteInfo(app_model.tenant, app_model, site, end_user.id, can_replace_logo)
|
||||
|
||||
@ -2,11 +2,13 @@ from functools import wraps
|
||||
|
||||
from flask import request
|
||||
from flask_restful import Resource
|
||||
from werkzeug.exceptions import NotFound, Unauthorized
|
||||
from werkzeug.exceptions import BadRequest, NotFound, Unauthorized
|
||||
|
||||
from controllers.web.error import WebSSOAuthRequiredError
|
||||
from extensions.ext_database import db
|
||||
from libs.passport import PassportService
|
||||
from models.model import App, EndUser, Site
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
|
||||
def validate_jwt_token(view=None):
|
||||
@ -21,34 +23,60 @@ def validate_jwt_token(view=None):
|
||||
return decorator(view)
|
||||
return decorator
|
||||
|
||||
|
||||
def decode_jwt_token():
|
||||
auth_header = request.headers.get('Authorization')
|
||||
if auth_header is None:
|
||||
raise Unauthorized('Authorization header is missing.')
|
||||
system_features = FeatureService.get_system_features()
|
||||
|
||||
if ' ' not in auth_header:
|
||||
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
|
||||
|
||||
auth_scheme, tk = auth_header.split(None, 1)
|
||||
auth_scheme = auth_scheme.lower()
|
||||
try:
|
||||
auth_header = request.headers.get('Authorization')
|
||||
if auth_header is None:
|
||||
raise Unauthorized('Authorization header is missing.')
|
||||
|
||||
if auth_scheme != 'bearer':
|
||||
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
|
||||
decoded = PassportService().verify(tk)
|
||||
app_code = decoded.get('app_code')
|
||||
app_model = db.session.query(App).filter(App.id == decoded['app_id']).first()
|
||||
site = db.session.query(Site).filter(Site.code == app_code).first()
|
||||
if not app_model:
|
||||
raise NotFound()
|
||||
if not app_code or not site:
|
||||
raise Unauthorized('Site URL is no longer valid.')
|
||||
if app_model.enable_site is False:
|
||||
raise Unauthorized('Site is disabled.')
|
||||
end_user = db.session.query(EndUser).filter(EndUser.id == decoded['end_user_id']).first()
|
||||
if not end_user:
|
||||
raise NotFound()
|
||||
if ' ' not in auth_header:
|
||||
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
|
||||
|
||||
auth_scheme, tk = auth_header.split(None, 1)
|
||||
auth_scheme = auth_scheme.lower()
|
||||
|
||||
if auth_scheme != 'bearer':
|
||||
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
|
||||
decoded = PassportService().verify(tk)
|
||||
app_code = decoded.get('app_code')
|
||||
app_model = db.session.query(App).filter(App.id == decoded['app_id']).first()
|
||||
site = db.session.query(Site).filter(Site.code == app_code).first()
|
||||
if not app_model:
|
||||
raise NotFound()
|
||||
if not app_code or not site:
|
||||
raise BadRequest('Site URL is no longer valid.')
|
||||
if app_model.enable_site is False:
|
||||
raise BadRequest('Site is disabled.')
|
||||
end_user = db.session.query(EndUser).filter(EndUser.id == decoded['end_user_id']).first()
|
||||
if not end_user:
|
||||
raise NotFound()
|
||||
|
||||
_validate_web_sso_token(decoded, system_features)
|
||||
|
||||
return app_model, end_user
|
||||
except Unauthorized as e:
|
||||
if system_features.sso_enforced_for_web:
|
||||
raise WebSSOAuthRequiredError()
|
||||
|
||||
raise Unauthorized(e.description)
|
||||
|
||||
|
||||
def _validate_web_sso_token(decoded, system_features):
|
||||
# Check if SSO is enforced for web, and if the token source is not SSO, raise an error and redirect to SSO login
|
||||
if system_features.sso_enforced_for_web:
|
||||
source = decoded.get('token_source')
|
||||
if not source or source != 'sso':
|
||||
raise WebSSOAuthRequiredError()
|
||||
|
||||
# Check if SSO is not enforced for web, and if the token source is SSO, raise an error and redirect to normal passport login
|
||||
if not system_features.sso_enforced_for_web:
|
||||
source = decoded.get('token_source')
|
||||
if source and source == 'sso':
|
||||
raise Unauthorized('sso token expired.')
|
||||
|
||||
return app_model, end_user
|
||||
|
||||
class WebApiResource(Resource):
|
||||
method_decorators = [validate_jwt_token]
|
||||
|
||||
@ -163,7 +163,9 @@ class BaseAgentRunner(AppRunner):
|
||||
"""
|
||||
tool_entity = ToolManager.get_agent_tool_runtime(
|
||||
tenant_id=self.tenant_id,
|
||||
app_id=self.app_config.app_id,
|
||||
agent_tool=tool,
|
||||
invoke_from=self.application_generate_entity.invoke_from
|
||||
)
|
||||
tool_entity.load_variables(self.variables_pool)
|
||||
|
||||
|
||||
@ -121,7 +121,7 @@ class CotAgentRunner(BaseAgentRunner, ABC):
|
||||
raise ValueError("failed to invoke llm")
|
||||
|
||||
usage_dict = {}
|
||||
react_chunks = CotAgentOutputParser.handle_react_stream_output(chunks)
|
||||
react_chunks = CotAgentOutputParser.handle_react_stream_output(chunks, usage_dict)
|
||||
scratchpad = AgentScratchpadUnit(
|
||||
agent_response='',
|
||||
thought='',
|
||||
@ -189,7 +189,7 @@ class CotAgentRunner(BaseAgentRunner, ABC):
|
||||
|
||||
if not scratchpad.action:
|
||||
# failed to extract action, return final answer directly
|
||||
final_answer = scratchpad.agent_response or ''
|
||||
final_answer = ''
|
||||
else:
|
||||
if scratchpad.action.action_name.lower() == "final answer":
|
||||
# action is final answer, return final answer directly
|
||||
@ -219,7 +219,7 @@ class CotAgentRunner(BaseAgentRunner, ABC):
|
||||
tool_input={scratchpad.action.action_name: scratchpad.action.action_input},
|
||||
thought=scratchpad.thought,
|
||||
observation={scratchpad.action.action_name: tool_invoke_response},
|
||||
tool_invoke_meta=tool_invoke_meta.to_dict(),
|
||||
tool_invoke_meta={scratchpad.action.action_name: tool_invoke_meta.to_dict()},
|
||||
answer=scratchpad.agent_response,
|
||||
messages_ids=message_file_ids,
|
||||
llm_usage=usage_dict['usage']
|
||||
|
||||
@ -8,7 +8,7 @@ class AgentToolEntity(BaseModel):
|
||||
"""
|
||||
Agent Tool Entity.
|
||||
"""
|
||||
provider_type: Literal["builtin", "api"]
|
||||
provider_type: Literal["builtin", "api", "workflow"]
|
||||
provider_id: str
|
||||
tool_name: str
|
||||
tool_parameters: dict[str, Any] = {}
|
||||
|
||||
@ -9,7 +9,7 @@ from core.model_runtime.entities.llm_entities import LLMResultChunk
|
||||
|
||||
class CotAgentOutputParser:
|
||||
@classmethod
|
||||
def handle_react_stream_output(cls, llm_response: Generator[LLMResultChunk, None, None]) -> \
|
||||
def handle_react_stream_output(cls, llm_response: Generator[LLMResultChunk, None, None], usage_dict: dict) -> \
|
||||
Generator[Union[str, AgentScratchpadUnit.Action], None, None]:
|
||||
def parse_action(json_str):
|
||||
try:
|
||||
@ -58,6 +58,8 @@ class CotAgentOutputParser:
|
||||
thought_idx = 0
|
||||
|
||||
for response in llm_response:
|
||||
if response.delta.usage:
|
||||
usage_dict['usage'] = response.delta.usage
|
||||
response = response.delta.message.content
|
||||
if not isinstance(response, str):
|
||||
continue
|
||||
|
||||
@ -11,7 +11,7 @@ class SensitiveWordAvoidanceConfigManager:
|
||||
if not sensitive_word_avoidance_dict:
|
||||
return None
|
||||
|
||||
if 'enabled' in sensitive_word_avoidance_dict and sensitive_word_avoidance_dict['enabled']:
|
||||
if sensitive_word_avoidance_dict.get('enabled'):
|
||||
return SensitiveWordAvoidanceEntity(
|
||||
type=sensitive_word_avoidance_dict.get('type'),
|
||||
config=sensitive_word_avoidance_dict.get('config'),
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
from typing import Optional
|
||||
|
||||
from core.agent.entities import AgentEntity, AgentPromptEntity, AgentToolEntity
|
||||
from core.tools.prompt.template import REACT_PROMPT_TEMPLATES
|
||||
from core.agent.prompt.template import REACT_PROMPT_TEMPLATES
|
||||
|
||||
|
||||
class AgentConfigManager:
|
||||
|
||||
@ -239,4 +239,4 @@ class WorkflowUIBasedAppConfig(AppConfig):
|
||||
"""
|
||||
Workflow UI Based App Config Entity.
|
||||
"""
|
||||
workflow_id: str
|
||||
workflow_id: str
|
||||
@ -14,7 +14,7 @@ class FileUploadConfigManager:
|
||||
"""
|
||||
file_upload_dict = config.get('file_upload')
|
||||
if file_upload_dict:
|
||||
if 'image' in file_upload_dict and file_upload_dict['image']:
|
||||
if file_upload_dict.get('image'):
|
||||
if 'enabled' in file_upload_dict['image'] and file_upload_dict['image']['enabled']:
|
||||
image_config = {
|
||||
'number_limits': file_upload_dict['image']['number_limits'],
|
||||
|
||||
@ -9,7 +9,7 @@ class MoreLikeThisConfigManager:
|
||||
more_like_this = False
|
||||
more_like_this_dict = config.get('more_like_this')
|
||||
if more_like_this_dict:
|
||||
if 'enabled' in more_like_this_dict and more_like_this_dict['enabled']:
|
||||
if more_like_this_dict.get('enabled'):
|
||||
more_like_this = True
|
||||
|
||||
return more_like_this
|
||||
|
||||
@ -4,7 +4,7 @@ class RetrievalResourceConfigManager:
|
||||
show_retrieve_source = False
|
||||
retriever_resource_dict = config.get('retriever_resource')
|
||||
if retriever_resource_dict:
|
||||
if 'enabled' in retriever_resource_dict and retriever_resource_dict['enabled']:
|
||||
if retriever_resource_dict.get('enabled'):
|
||||
show_retrieve_source = True
|
||||
|
||||
return show_retrieve_source
|
||||
|
||||
@ -9,7 +9,7 @@ class SpeechToTextConfigManager:
|
||||
speech_to_text = False
|
||||
speech_to_text_dict = config.get('speech_to_text')
|
||||
if speech_to_text_dict:
|
||||
if 'enabled' in speech_to_text_dict and speech_to_text_dict['enabled']:
|
||||
if speech_to_text_dict.get('enabled'):
|
||||
speech_to_text = True
|
||||
|
||||
return speech_to_text
|
||||
|
||||
@ -9,7 +9,7 @@ class SuggestedQuestionsAfterAnswerConfigManager:
|
||||
suggested_questions_after_answer = False
|
||||
suggested_questions_after_answer_dict = config.get('suggested_questions_after_answer')
|
||||
if suggested_questions_after_answer_dict:
|
||||
if 'enabled' in suggested_questions_after_answer_dict and suggested_questions_after_answer_dict['enabled']:
|
||||
if suggested_questions_after_answer_dict.get('enabled'):
|
||||
suggested_questions_after_answer = True
|
||||
|
||||
return suggested_questions_after_answer
|
||||
|
||||
@ -12,7 +12,7 @@ class TextToSpeechConfigManager:
|
||||
text_to_speech = False
|
||||
text_to_speech_dict = config.get('text_to_speech')
|
||||
if text_to_speech_dict:
|
||||
if 'enabled' in text_to_speech_dict and text_to_speech_dict['enabled']:
|
||||
if text_to_speech_dict.get('enabled'):
|
||||
text_to_speech = TextToSpeechEntity(
|
||||
enabled=text_to_speech_dict.get('enabled'),
|
||||
voice=text_to_speech_dict.get('voice'),
|
||||
|
||||
@ -66,7 +66,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
conversation = self._get_conversation_by_user(app_model, args.get('conversation_id'), user)
|
||||
|
||||
# parse files
|
||||
files = args['files'] if 'files' in args and args['files'] else []
|
||||
files = args['files'] if args.get('files') else []
|
||||
message_file_parser = MessageFileParser(tenant_id=app_model.tenant_id, app_id=app_model.id)
|
||||
file_extra_config = FileUploadConfigManager.convert(workflow.features_dict, is_vision=False)
|
||||
if file_extra_config:
|
||||
@ -98,6 +98,90 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
extras=extras
|
||||
)
|
||||
|
||||
return self._generate(
|
||||
app_model=app_model,
|
||||
workflow=workflow,
|
||||
user=user,
|
||||
invoke_from=invoke_from,
|
||||
application_generate_entity=application_generate_entity,
|
||||
conversation=conversation,
|
||||
stream=stream
|
||||
)
|
||||
|
||||
def single_iteration_generate(self, app_model: App,
|
||||
workflow: Workflow,
|
||||
node_id: str,
|
||||
user: Account,
|
||||
args: dict,
|
||||
stream: bool = True) \
|
||||
-> Union[dict, Generator[dict, None, None]]:
|
||||
"""
|
||||
Generate App response.
|
||||
|
||||
:param app_model: App
|
||||
:param workflow: Workflow
|
||||
:param user: account or end user
|
||||
:param args: request args
|
||||
:param invoke_from: invoke from source
|
||||
:param stream: is stream
|
||||
"""
|
||||
if not node_id:
|
||||
raise ValueError('node_id is required')
|
||||
|
||||
if args.get('inputs') is None:
|
||||
raise ValueError('inputs is required')
|
||||
|
||||
extras = {
|
||||
"auto_generate_conversation_name": False
|
||||
}
|
||||
|
||||
# get conversation
|
||||
conversation = None
|
||||
if args.get('conversation_id'):
|
||||
conversation = self._get_conversation_by_user(app_model, args.get('conversation_id'), user)
|
||||
|
||||
# convert to app config
|
||||
app_config = AdvancedChatAppConfigManager.get_app_config(
|
||||
app_model=app_model,
|
||||
workflow=workflow
|
||||
)
|
||||
|
||||
# init application generate entity
|
||||
application_generate_entity = AdvancedChatAppGenerateEntity(
|
||||
task_id=str(uuid.uuid4()),
|
||||
app_config=app_config,
|
||||
conversation_id=conversation.id if conversation else None,
|
||||
inputs={},
|
||||
query='',
|
||||
files=[],
|
||||
user_id=user.id,
|
||||
stream=stream,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
extras=extras,
|
||||
single_iteration_run=AdvancedChatAppGenerateEntity.SingleIterationRunEntity(
|
||||
node_id=node_id,
|
||||
inputs=args['inputs']
|
||||
)
|
||||
)
|
||||
|
||||
return self._generate(
|
||||
app_model=app_model,
|
||||
workflow=workflow,
|
||||
user=user,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
application_generate_entity=application_generate_entity,
|
||||
conversation=conversation,
|
||||
stream=stream
|
||||
)
|
||||
|
||||
def _generate(self, app_model: App,
|
||||
workflow: Workflow,
|
||||
user: Union[Account, EndUser],
|
||||
invoke_from: InvokeFrom,
|
||||
application_generate_entity: AdvancedChatAppGenerateEntity,
|
||||
conversation: Conversation = None,
|
||||
stream: bool = True) \
|
||||
-> Union[dict, Generator[dict, None, None]]:
|
||||
is_first_conversation = False
|
||||
if not conversation:
|
||||
is_first_conversation = True
|
||||
@ -167,18 +251,30 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
"""
|
||||
with flask_app.app_context():
|
||||
try:
|
||||
# get conversation and message
|
||||
conversation = self._get_conversation(conversation_id)
|
||||
message = self._get_message(message_id)
|
||||
|
||||
# chatbot app
|
||||
runner = AdvancedChatAppRunner()
|
||||
runner.run(
|
||||
application_generate_entity=application_generate_entity,
|
||||
queue_manager=queue_manager,
|
||||
conversation=conversation,
|
||||
message=message
|
||||
)
|
||||
if application_generate_entity.single_iteration_run:
|
||||
single_iteration_run = application_generate_entity.single_iteration_run
|
||||
runner.single_iteration_run(
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
workflow_id=application_generate_entity.app_config.workflow_id,
|
||||
queue_manager=queue_manager,
|
||||
inputs=single_iteration_run.inputs,
|
||||
node_id=single_iteration_run.node_id,
|
||||
user_id=application_generate_entity.user_id
|
||||
)
|
||||
else:
|
||||
# get conversation and message
|
||||
conversation = self._get_conversation(conversation_id)
|
||||
message = self._get_message(message_id)
|
||||
|
||||
# chatbot app
|
||||
runner = AdvancedChatAppRunner()
|
||||
runner.run(
|
||||
application_generate_entity=application_generate_entity,
|
||||
queue_manager=queue_manager,
|
||||
conversation=conversation,
|
||||
message=message
|
||||
)
|
||||
except GenerateTaskStoppedException:
|
||||
pass
|
||||
except InvokeAuthorizationError:
|
||||
|
||||
@ -18,7 +18,7 @@ from core.workflow.entities.node_entities import SystemVariable
|
||||
from core.workflow.nodes.base_node import UserFrom
|
||||
from core.workflow.workflow_engine_manager import WorkflowEngineManager
|
||||
from extensions.ext_database import db
|
||||
from models.model import App, Conversation, Message
|
||||
from models.model import App, Conversation, EndUser, Message
|
||||
from models.workflow import Workflow
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -56,6 +56,14 @@ class AdvancedChatAppRunner(AppRunner):
|
||||
query = application_generate_entity.query
|
||||
files = application_generate_entity.files
|
||||
|
||||
user_id = None
|
||||
if application_generate_entity.invoke_from in [InvokeFrom.WEB_APP, InvokeFrom.SERVICE_API]:
|
||||
end_user = db.session.query(EndUser).filter(EndUser.id == application_generate_entity.user_id).first()
|
||||
if end_user:
|
||||
user_id = end_user.session_id
|
||||
else:
|
||||
user_id = application_generate_entity.user_id
|
||||
|
||||
# moderation
|
||||
if self.handle_input_moderation(
|
||||
queue_manager=queue_manager,
|
||||
@ -94,12 +102,43 @@ class AdvancedChatAppRunner(AppRunner):
|
||||
user_from=UserFrom.ACCOUNT
|
||||
if application_generate_entity.invoke_from in [InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER]
|
||||
else UserFrom.END_USER,
|
||||
invoke_from=application_generate_entity.invoke_from,
|
||||
user_inputs=inputs,
|
||||
system_inputs={
|
||||
SystemVariable.QUERY: query,
|
||||
SystemVariable.FILES: files,
|
||||
SystemVariable.CONVERSATION: conversation.id,
|
||||
SystemVariable.CONVERSATION_ID: conversation.id,
|
||||
SystemVariable.USER_ID: user_id
|
||||
},
|
||||
callbacks=workflow_callbacks,
|
||||
call_depth=application_generate_entity.call_depth
|
||||
)
|
||||
|
||||
def single_iteration_run(self, app_id: str, workflow_id: str,
|
||||
queue_manager: AppQueueManager,
|
||||
inputs: dict, node_id: str, user_id: str) -> None:
|
||||
"""
|
||||
Single iteration run
|
||||
"""
|
||||
app_record: App = db.session.query(App).filter(App.id == app_id).first()
|
||||
if not app_record:
|
||||
raise ValueError("App not found")
|
||||
|
||||
workflow = self.get_workflow(app_model=app_record, workflow_id=workflow_id)
|
||||
if not workflow:
|
||||
raise ValueError("Workflow not initialized")
|
||||
|
||||
workflow_callbacks = [WorkflowEventTriggerCallback(
|
||||
queue_manager=queue_manager,
|
||||
workflow=workflow
|
||||
)]
|
||||
|
||||
workflow_engine_manager = WorkflowEngineManager()
|
||||
workflow_engine_manager.single_step_run_iteration_workflow_node(
|
||||
workflow=workflow,
|
||||
node_id=node_id,
|
||||
user_id=user_id,
|
||||
user_inputs=inputs,
|
||||
callbacks=workflow_callbacks
|
||||
)
|
||||
|
||||
|
||||
@ -8,6 +8,8 @@ from core.app.entities.task_entities import (
|
||||
ChatbotAppStreamResponse,
|
||||
ErrorStreamResponse,
|
||||
MessageEndStreamResponse,
|
||||
NodeFinishStreamResponse,
|
||||
NodeStartStreamResponse,
|
||||
PingStreamResponse,
|
||||
)
|
||||
|
||||
@ -111,6 +113,8 @@ class AdvancedChatAppGenerateResponseConverter(AppGenerateResponseConverter):
|
||||
if isinstance(sub_stream_response, ErrorStreamResponse):
|
||||
data = cls._error_to_stream_response(sub_stream_response.err)
|
||||
response_chunk.update(data)
|
||||
elif isinstance(sub_stream_response, NodeStartStreamResponse | NodeFinishStreamResponse):
|
||||
response_chunk.update(sub_stream_response.to_ignore_detail_dict())
|
||||
else:
|
||||
response_chunk.update(sub_stream_response.to_dict())
|
||||
|
||||
|
||||
@ -12,6 +12,9 @@ from core.app.entities.queue_entities import (
|
||||
QueueAdvancedChatMessageEndEvent,
|
||||
QueueAnnotationReplyEvent,
|
||||
QueueErrorEvent,
|
||||
QueueIterationCompletedEvent,
|
||||
QueueIterationNextEvent,
|
||||
QueueIterationStartEvent,
|
||||
QueueMessageReplaceEvent,
|
||||
QueueNodeFailedEvent,
|
||||
QueueNodeStartedEvent,
|
||||
@ -28,9 +31,9 @@ from core.app.entities.task_entities import (
|
||||
AdvancedChatTaskState,
|
||||
ChatbotAppBlockingResponse,
|
||||
ChatbotAppStreamResponse,
|
||||
ChatflowStreamGenerateRoute,
|
||||
ErrorStreamResponse,
|
||||
MessageEndStreamResponse,
|
||||
StreamGenerateRoute,
|
||||
StreamResponse,
|
||||
)
|
||||
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
||||
@ -64,6 +67,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
||||
_workflow: Workflow
|
||||
_user: Union[Account, EndUser]
|
||||
_workflow_system_variables: dict[SystemVariable, Any]
|
||||
_iteration_nested_relations: dict[str, list[str]]
|
||||
|
||||
def __init__(self, application_generate_entity: AdvancedChatAppGenerateEntity,
|
||||
workflow: Workflow,
|
||||
@ -84,13 +88,19 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
||||
"""
|
||||
super().__init__(application_generate_entity, queue_manager, user, stream)
|
||||
|
||||
if isinstance(self._user, EndUser):
|
||||
user_id = self._user.session_id
|
||||
else:
|
||||
user_id = self._user.id
|
||||
|
||||
self._workflow = workflow
|
||||
self._conversation = conversation
|
||||
self._message = message
|
||||
self._workflow_system_variables = {
|
||||
SystemVariable.QUERY: message.query,
|
||||
SystemVariable.FILES: application_generate_entity.files,
|
||||
SystemVariable.CONVERSATION: conversation.id,
|
||||
SystemVariable.CONVERSATION_ID: conversation.id,
|
||||
SystemVariable.USER_ID: user_id
|
||||
}
|
||||
|
||||
self._task_state = AdvancedChatTaskState(
|
||||
@ -98,6 +108,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
||||
)
|
||||
|
||||
self._stream_generate_routes = self._get_stream_generate_routes()
|
||||
self._iteration_nested_relations = self._get_iteration_nested_relations(self._workflow.graph_dict)
|
||||
self._conversation_name_generate_thread = None
|
||||
|
||||
def process(self) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]:
|
||||
@ -198,6 +209,8 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
||||
# search stream_generate_routes if node id is answer start at node
|
||||
if not self._task_state.current_stream_generate_state and event.node_id in self._stream_generate_routes:
|
||||
self._task_state.current_stream_generate_state = self._stream_generate_routes[event.node_id]
|
||||
# reset current route position to 0
|
||||
self._task_state.current_stream_generate_state.current_route_position = 0
|
||||
|
||||
# generate stream outputs when node started
|
||||
yield from self._generate_stream_outputs_when_node_started()
|
||||
@ -219,6 +232,22 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution
|
||||
)
|
||||
|
||||
if isinstance(event, QueueNodeFailedEvent):
|
||||
yield from self._handle_iteration_exception(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
error=f'Child node failed: {event.error}'
|
||||
)
|
||||
elif isinstance(event, QueueIterationStartEvent | QueueIterationNextEvent | QueueIterationCompletedEvent):
|
||||
if isinstance(event, QueueIterationNextEvent):
|
||||
# clear ran node execution infos of current iteration
|
||||
iteration_relations = self._iteration_nested_relations.get(event.node_id)
|
||||
if iteration_relations:
|
||||
for node_id in iteration_relations:
|
||||
self._task_state.ran_node_execution_infos.pop(node_id, None)
|
||||
|
||||
yield self._handle_iteration_to_stream_response(self._application_generate_entity.task_id, event)
|
||||
self._handle_iteration_operation(event)
|
||||
elif isinstance(event, QueueStopEvent | QueueWorkflowSucceededEvent | QueueWorkflowFailedEvent):
|
||||
workflow_run = self._handle_workflow_finished(event)
|
||||
if workflow_run:
|
||||
@ -257,10 +286,6 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
||||
self._handle_retriever_resources(event)
|
||||
elif isinstance(event, QueueAnnotationReplyEvent):
|
||||
self._handle_annotation_reply(event)
|
||||
# elif isinstance(event, QueueMessageFileEvent):
|
||||
# response = self._message_file_to_stream_response(event)
|
||||
# if response:
|
||||
# yield response
|
||||
elif isinstance(event, QueueTextChunkEvent):
|
||||
delta_text = event.text
|
||||
if delta_text is None:
|
||||
@ -336,8 +361,8 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
||||
id=self._message.id,
|
||||
**extras
|
||||
)
|
||||
|
||||
def _get_stream_generate_routes(self) -> dict[str, StreamGenerateRoute]:
|
||||
|
||||
def _get_stream_generate_routes(self) -> dict[str, ChatflowStreamGenerateRoute]:
|
||||
"""
|
||||
Get stream generate routes.
|
||||
:return:
|
||||
@ -360,13 +385,13 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
||||
continue
|
||||
|
||||
for start_node_id in start_node_ids:
|
||||
stream_generate_routes[start_node_id] = StreamGenerateRoute(
|
||||
stream_generate_routes[start_node_id] = ChatflowStreamGenerateRoute(
|
||||
answer_node_id=answer_node_id,
|
||||
generate_route=generate_route
|
||||
)
|
||||
|
||||
return stream_generate_routes
|
||||
|
||||
|
||||
def _get_answer_start_at_node_ids(self, graph: dict, target_node_id: str) \
|
||||
-> list[str]:
|
||||
"""
|
||||
@ -395,14 +420,23 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
||||
continue
|
||||
|
||||
node_type = source_node.get('data', {}).get('type')
|
||||
node_iteration_id = source_node.get('data', {}).get('iteration_id')
|
||||
iteration_start_node_id = None
|
||||
if node_iteration_id:
|
||||
iteration_node = next((node for node in nodes if node.get('id') == node_iteration_id), None)
|
||||
iteration_start_node_id = iteration_node.get('data', {}).get('start_node_id')
|
||||
|
||||
if node_type in [
|
||||
NodeType.ANSWER.value,
|
||||
NodeType.IF_ELSE.value,
|
||||
NodeType.QUESTION_CLASSIFIER.value
|
||||
NodeType.QUESTION_CLASSIFIER.value,
|
||||
NodeType.ITERATION.value,
|
||||
NodeType.LOOP.value
|
||||
]:
|
||||
start_node_id = target_node_id
|
||||
start_node_ids.append(start_node_id)
|
||||
elif node_type == NodeType.START.value:
|
||||
elif node_type == NodeType.START.value or \
|
||||
node_iteration_id is not None and iteration_start_node_id == source_node.get('id'):
|
||||
start_node_id = source_node_id
|
||||
start_node_ids.append(start_node_id)
|
||||
else:
|
||||
@ -411,7 +445,27 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
||||
start_node_ids.extend(sub_start_node_ids)
|
||||
|
||||
return start_node_ids
|
||||
|
||||
def _get_iteration_nested_relations(self, graph: dict) -> dict[str, list[str]]:
|
||||
"""
|
||||
Get iteration nested relations.
|
||||
:param graph: graph
|
||||
:return:
|
||||
"""
|
||||
nodes = graph.get('nodes')
|
||||
|
||||
iteration_ids = [node.get('id') for node in nodes
|
||||
if node.get('data', {}).get('type') in [
|
||||
NodeType.ITERATION.value,
|
||||
NodeType.LOOP.value,
|
||||
]]
|
||||
|
||||
return {
|
||||
iteration_id: [
|
||||
node.get('id') for node in nodes if node.get('data', {}).get('iteration_id') == iteration_id
|
||||
] for iteration_id in iteration_ids
|
||||
}
|
||||
|
||||
def _generate_stream_outputs_when_node_started(self) -> Generator:
|
||||
"""
|
||||
Generate stream outputs.
|
||||
@ -419,20 +473,20 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
||||
"""
|
||||
if self._task_state.current_stream_generate_state:
|
||||
route_chunks = self._task_state.current_stream_generate_state.generate_route[
|
||||
self._task_state.current_stream_generate_state.current_route_position:]
|
||||
self._task_state.current_stream_generate_state.current_route_position:
|
||||
]
|
||||
|
||||
for route_chunk in route_chunks:
|
||||
if route_chunk.type == 'text':
|
||||
route_chunk = cast(TextGenerateRouteChunk, route_chunk)
|
||||
for token in route_chunk.text:
|
||||
# handle output moderation chunk
|
||||
should_direct_answer = self._handle_output_moderation_chunk(token)
|
||||
if should_direct_answer:
|
||||
continue
|
||||
|
||||
self._task_state.answer += token
|
||||
yield self._message_to_stream_response(token, self._message.id)
|
||||
time.sleep(0.01)
|
||||
# handle output moderation chunk
|
||||
should_direct_answer = self._handle_output_moderation_chunk(route_chunk.text)
|
||||
if should_direct_answer:
|
||||
continue
|
||||
|
||||
self._task_state.answer += route_chunk.text
|
||||
yield self._message_to_stream_response(route_chunk.text, self._message.id)
|
||||
else:
|
||||
break
|
||||
|
||||
@ -453,14 +507,12 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
||||
|
||||
route_chunks = self._task_state.current_stream_generate_state.generate_route[
|
||||
self._task_state.current_stream_generate_state.current_route_position:]
|
||||
|
||||
|
||||
for route_chunk in route_chunks:
|
||||
if route_chunk.type == 'text':
|
||||
route_chunk = cast(TextGenerateRouteChunk, route_chunk)
|
||||
for token in route_chunk.text:
|
||||
self._task_state.answer += token
|
||||
yield self._message_to_stream_response(token, self._message.id)
|
||||
time.sleep(0.01)
|
||||
self._task_state.answer += route_chunk.text
|
||||
yield self._message_to_stream_response(route_chunk.text, self._message.id)
|
||||
else:
|
||||
route_chunk = cast(VarGenerateRouteChunk, route_chunk)
|
||||
value_selector = route_chunk.value_selector
|
||||
|
||||
@ -1,8 +1,11 @@
|
||||
from typing import Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
||||
from core.app.entities.queue_entities import (
|
||||
AppQueueEvent,
|
||||
QueueIterationCompletedEvent,
|
||||
QueueIterationNextEvent,
|
||||
QueueIterationStartEvent,
|
||||
QueueNodeFailedEvent,
|
||||
QueueNodeStartedEvent,
|
||||
QueueNodeSucceededEvent,
|
||||
@ -130,6 +133,66 @@ class WorkflowEventTriggerCallback(BaseWorkflowCallback):
|
||||
), PublishFrom.APPLICATION_MANAGER
|
||||
)
|
||||
|
||||
def on_workflow_iteration_started(self,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
node_run_index: int = 1,
|
||||
node_data: Optional[BaseNodeData] = None,
|
||||
inputs: dict = None,
|
||||
predecessor_node_id: Optional[str] = None,
|
||||
metadata: Optional[dict] = None) -> None:
|
||||
"""
|
||||
Publish iteration started
|
||||
"""
|
||||
self._queue_manager.publish(
|
||||
QueueIterationStartEvent(
|
||||
node_id=node_id,
|
||||
node_type=node_type,
|
||||
node_run_index=node_run_index,
|
||||
node_data=node_data,
|
||||
inputs=inputs,
|
||||
predecessor_node_id=predecessor_node_id,
|
||||
metadata=metadata
|
||||
),
|
||||
PublishFrom.APPLICATION_MANAGER
|
||||
)
|
||||
|
||||
def on_workflow_iteration_next(self, node_id: str,
|
||||
node_type: NodeType,
|
||||
index: int,
|
||||
node_run_index: int,
|
||||
output: Optional[Any]) -> None:
|
||||
"""
|
||||
Publish iteration next
|
||||
"""
|
||||
self._queue_manager._publish(
|
||||
QueueIterationNextEvent(
|
||||
node_id=node_id,
|
||||
node_type=node_type,
|
||||
index=index,
|
||||
node_run_index=node_run_index,
|
||||
output=output
|
||||
),
|
||||
PublishFrom.APPLICATION_MANAGER
|
||||
)
|
||||
|
||||
def on_workflow_iteration_completed(self, node_id: str,
|
||||
node_type: NodeType,
|
||||
node_run_index: int,
|
||||
outputs: dict) -> None:
|
||||
"""
|
||||
Publish iteration completed
|
||||
"""
|
||||
self._queue_manager._publish(
|
||||
QueueIterationCompletedEvent(
|
||||
node_id=node_id,
|
||||
node_type=node_type,
|
||||
node_run_index=node_run_index,
|
||||
outputs=outputs
|
||||
),
|
||||
PublishFrom.APPLICATION_MANAGER
|
||||
)
|
||||
|
||||
def on_event(self, event: AppQueueEvent) -> None:
|
||||
"""
|
||||
Publish event
|
||||
|
||||
@ -83,7 +83,7 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
|
||||
)
|
||||
|
||||
# parse files
|
||||
files = args['files'] if 'files' in args and args['files'] else []
|
||||
files = args['files'] if args.get('files') else []
|
||||
message_file_parser = MessageFileParser(tenant_id=app_model.tenant_id, app_id=app_model.id)
|
||||
file_extra_config = FileUploadConfigManager.convert(override_model_config_dict or app_model_config.to_dict())
|
||||
if file_extra_config:
|
||||
@ -115,7 +115,8 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
|
||||
user_id=user.id,
|
||||
stream=stream,
|
||||
invoke_from=invoke_from,
|
||||
extras=extras
|
||||
extras=extras,
|
||||
call_depth=0
|
||||
)
|
||||
|
||||
# init generate records
|
||||
|
||||
@ -26,7 +26,10 @@ class AppGenerateResponseConverter(ABC):
|
||||
else:
|
||||
def _generate():
|
||||
for chunk in cls.convert_stream_full_response(response):
|
||||
yield f'data: {chunk}\n\n'
|
||||
if chunk == 'ping':
|
||||
yield f'event: {chunk}\n\n'
|
||||
else:
|
||||
yield f'data: {chunk}\n\n'
|
||||
|
||||
return _generate()
|
||||
else:
|
||||
@ -35,7 +38,10 @@ class AppGenerateResponseConverter(ABC):
|
||||
else:
|
||||
def _generate():
|
||||
for chunk in cls.convert_stream_simple_response(response):
|
||||
yield f'data: {chunk}\n\n'
|
||||
if chunk == 'ping':
|
||||
yield f'event: {chunk}\n\n'
|
||||
else:
|
||||
yield f'data: {chunk}\n\n'
|
||||
|
||||
return _generate()
|
||||
|
||||
|
||||
@ -13,7 +13,9 @@ class BaseAppGenerator:
|
||||
for variable_config in variables:
|
||||
variable = variable_config.variable
|
||||
|
||||
if variable not in user_inputs or not user_inputs[variable]:
|
||||
if (variable not in user_inputs
|
||||
or user_inputs[variable] is None
|
||||
or (isinstance(user_inputs[variable], str) and user_inputs[variable] == '')):
|
||||
if variable_config.required:
|
||||
raise ValueError(f"{variable} is required in input form")
|
||||
else:
|
||||
@ -22,21 +24,29 @@ class BaseAppGenerator:
|
||||
|
||||
value = user_inputs[variable]
|
||||
|
||||
if value:
|
||||
if not isinstance(value, str):
|
||||
if value is not None:
|
||||
if variable_config.type != VariableEntity.Type.NUMBER and not isinstance(value, str):
|
||||
raise ValueError(f"{variable} in input form must be a string")
|
||||
elif variable_config.type == VariableEntity.Type.NUMBER and isinstance(value, str):
|
||||
if '.' in value:
|
||||
value = float(value)
|
||||
else:
|
||||
value = int(value)
|
||||
|
||||
if variable_config.type == VariableEntity.Type.SELECT:
|
||||
options = variable_config.options if variable_config.options is not None else []
|
||||
if value not in options:
|
||||
raise ValueError(f"{variable} in input form must be one of the following: {options}")
|
||||
else:
|
||||
elif variable_config.type in [VariableEntity.Type.TEXT_INPUT, VariableEntity.Type.PARAGRAPH]:
|
||||
if variable_config.max_length is not None:
|
||||
max_length = variable_config.max_length
|
||||
if len(value) > max_length:
|
||||
raise ValueError(f'{variable} in input form must be less than {max_length} characters')
|
||||
|
||||
filtered_inputs[variable] = value.replace('\x00', '') if value else None
|
||||
if value and isinstance(value, str):
|
||||
filtered_inputs[variable] = value.replace('\x00', '')
|
||||
else:
|
||||
filtered_inputs[variable] = value if value is not None else None
|
||||
|
||||
return filtered_inputs
|
||||
|
||||
|
||||
@ -80,7 +80,7 @@ class ChatAppGenerator(MessageBasedAppGenerator):
|
||||
)
|
||||
|
||||
# parse files
|
||||
files = args['files'] if 'files' in args and args['files'] else []
|
||||
files = args['files'] if args.get('files') else []
|
||||
message_file_parser = MessageFileParser(tenant_id=app_model.tenant_id, app_id=app_model.id)
|
||||
file_extra_config = FileUploadConfigManager.convert(override_model_config_dict or app_model_config.to_dict())
|
||||
if file_extra_config:
|
||||
|
||||
@ -75,7 +75,7 @@ class CompletionAppGenerator(MessageBasedAppGenerator):
|
||||
)
|
||||
|
||||
# parse files
|
||||
files = args['files'] if 'files' in args and args['files'] else []
|
||||
files = args['files'] if args.get('files') else []
|
||||
message_file_parser = MessageFileParser(tenant_id=app_model.tenant_id, app_id=app_model.id)
|
||||
file_extra_config = FileUploadConfigManager.convert(override_model_config_dict or app_model_config.to_dict())
|
||||
if file_extra_config:
|
||||
|
||||
@ -34,7 +34,8 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
user: Union[Account, EndUser],
|
||||
args: dict,
|
||||
invoke_from: InvokeFrom,
|
||||
stream: bool = True) \
|
||||
stream: bool = True,
|
||||
call_depth: int = 0) \
|
||||
-> Union[dict, Generator[dict, None, None]]:
|
||||
"""
|
||||
Generate App response.
|
||||
@ -49,7 +50,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
inputs = args['inputs']
|
||||
|
||||
# parse files
|
||||
files = args['files'] if 'files' in args and args['files'] else []
|
||||
files = args['files'] if args.get('files') else []
|
||||
message_file_parser = MessageFileParser(tenant_id=app_model.tenant_id, app_id=app_model.id)
|
||||
file_extra_config = FileUploadConfigManager.convert(workflow.features_dict, is_vision=False)
|
||||
if file_extra_config:
|
||||
@ -75,9 +76,38 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
files=file_objs,
|
||||
user_id=user.id,
|
||||
stream=stream,
|
||||
invoke_from=invoke_from
|
||||
invoke_from=invoke_from,
|
||||
call_depth=call_depth
|
||||
)
|
||||
|
||||
return self._generate(
|
||||
app_model=app_model,
|
||||
workflow=workflow,
|
||||
user=user,
|
||||
application_generate_entity=application_generate_entity,
|
||||
invoke_from=invoke_from,
|
||||
stream=stream,
|
||||
call_depth=call_depth
|
||||
)
|
||||
|
||||
def _generate(self, app_model: App,
|
||||
workflow: Workflow,
|
||||
user: Union[Account, EndUser],
|
||||
application_generate_entity: WorkflowAppGenerateEntity,
|
||||
invoke_from: InvokeFrom,
|
||||
stream: bool = True,
|
||||
call_depth: int = 0) \
|
||||
-> Union[dict, Generator[dict, None, None]]:
|
||||
"""
|
||||
Generate App response.
|
||||
|
||||
:param app_model: App
|
||||
:param workflow: Workflow
|
||||
:param user: account or end user
|
||||
:param application_generate_entity: application generate entity
|
||||
:param invoke_from: invoke from source
|
||||
:param stream: is stream
|
||||
"""
|
||||
# init queue manager
|
||||
queue_manager = WorkflowAppQueueManager(
|
||||
task_id=application_generate_entity.task_id,
|
||||
@ -109,6 +139,64 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
invoke_from=invoke_from
|
||||
)
|
||||
|
||||
def single_iteration_generate(self, app_model: App,
|
||||
workflow: Workflow,
|
||||
node_id: str,
|
||||
user: Account,
|
||||
args: dict,
|
||||
stream: bool = True) \
|
||||
-> Union[dict, Generator[dict, None, None]]:
|
||||
"""
|
||||
Generate App response.
|
||||
|
||||
:param app_model: App
|
||||
:param workflow: Workflow
|
||||
:param user: account or end user
|
||||
:param args: request args
|
||||
:param invoke_from: invoke from source
|
||||
:param stream: is stream
|
||||
"""
|
||||
if not node_id:
|
||||
raise ValueError('node_id is required')
|
||||
|
||||
if args.get('inputs') is None:
|
||||
raise ValueError('inputs is required')
|
||||
|
||||
extras = {
|
||||
"auto_generate_conversation_name": False
|
||||
}
|
||||
|
||||
# convert to app config
|
||||
app_config = WorkflowAppConfigManager.get_app_config(
|
||||
app_model=app_model,
|
||||
workflow=workflow
|
||||
)
|
||||
|
||||
# init application generate entity
|
||||
application_generate_entity = WorkflowAppGenerateEntity(
|
||||
task_id=str(uuid.uuid4()),
|
||||
app_config=app_config,
|
||||
inputs={},
|
||||
files=[],
|
||||
user_id=user.id,
|
||||
stream=stream,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
extras=extras,
|
||||
single_iteration_run=WorkflowAppGenerateEntity.SingleIterationRunEntity(
|
||||
node_id=node_id,
|
||||
inputs=args['inputs']
|
||||
)
|
||||
)
|
||||
|
||||
return self._generate(
|
||||
app_model=app_model,
|
||||
workflow=workflow,
|
||||
user=user,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
application_generate_entity=application_generate_entity,
|
||||
stream=stream
|
||||
)
|
||||
|
||||
def _generate_worker(self, flask_app: Flask,
|
||||
application_generate_entity: WorkflowAppGenerateEntity,
|
||||
queue_manager: AppQueueManager) -> None:
|
||||
@ -123,10 +211,21 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
try:
|
||||
# workflow app
|
||||
runner = WorkflowAppRunner()
|
||||
runner.run(
|
||||
application_generate_entity=application_generate_entity,
|
||||
queue_manager=queue_manager
|
||||
)
|
||||
if application_generate_entity.single_iteration_run:
|
||||
single_iteration_run = application_generate_entity.single_iteration_run
|
||||
runner.single_iteration_run(
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
workflow_id=application_generate_entity.app_config.workflow_id,
|
||||
queue_manager=queue_manager,
|
||||
inputs=single_iteration_run.inputs,
|
||||
node_id=single_iteration_run.node_id,
|
||||
user_id=application_generate_entity.user_id
|
||||
)
|
||||
else:
|
||||
runner.run(
|
||||
application_generate_entity=application_generate_entity,
|
||||
queue_manager=queue_manager
|
||||
)
|
||||
except GenerateTaskStoppedException:
|
||||
pass
|
||||
except InvokeAuthorizationError:
|
||||
|
||||
@ -14,7 +14,7 @@ from core.workflow.entities.node_entities import SystemVariable
|
||||
from core.workflow.nodes.base_node import UserFrom
|
||||
from core.workflow.workflow_engine_manager import WorkflowEngineManager
|
||||
from extensions.ext_database import db
|
||||
from models.model import App
|
||||
from models.model import App, EndUser
|
||||
from models.workflow import Workflow
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -36,6 +36,14 @@ class WorkflowAppRunner:
|
||||
app_config = application_generate_entity.app_config
|
||||
app_config = cast(WorkflowAppConfig, app_config)
|
||||
|
||||
user_id = None
|
||||
if application_generate_entity.invoke_from in [InvokeFrom.WEB_APP, InvokeFrom.SERVICE_API]:
|
||||
end_user = db.session.query(EndUser).filter(EndUser.id == application_generate_entity.user_id).first()
|
||||
if end_user:
|
||||
user_id = end_user.session_id
|
||||
else:
|
||||
user_id = application_generate_entity.user_id
|
||||
|
||||
app_record = db.session.query(App).filter(App.id == app_config.app_id).first()
|
||||
if not app_record:
|
||||
raise ValueError("App not found")
|
||||
@ -65,10 +73,44 @@ class WorkflowAppRunner:
|
||||
user_from=UserFrom.ACCOUNT
|
||||
if application_generate_entity.invoke_from in [InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER]
|
||||
else UserFrom.END_USER,
|
||||
invoke_from=application_generate_entity.invoke_from,
|
||||
user_inputs=inputs,
|
||||
system_inputs={
|
||||
SystemVariable.FILES: files
|
||||
SystemVariable.FILES: files,
|
||||
SystemVariable.USER_ID: user_id
|
||||
},
|
||||
callbacks=workflow_callbacks,
|
||||
call_depth=application_generate_entity.call_depth
|
||||
)
|
||||
|
||||
def single_iteration_run(self, app_id: str, workflow_id: str,
|
||||
queue_manager: AppQueueManager,
|
||||
inputs: dict, node_id: str, user_id: str) -> None:
|
||||
"""
|
||||
Single iteration run
|
||||
"""
|
||||
app_record: App = db.session.query(App).filter(App.id == app_id).first()
|
||||
if not app_record:
|
||||
raise ValueError("App not found")
|
||||
|
||||
if not app_record.workflow_id:
|
||||
raise ValueError("Workflow not initialized")
|
||||
|
||||
workflow = self.get_workflow(app_model=app_record, workflow_id=workflow_id)
|
||||
if not workflow:
|
||||
raise ValueError("Workflow not initialized")
|
||||
|
||||
workflow_callbacks = [WorkflowEventTriggerCallback(
|
||||
queue_manager=queue_manager,
|
||||
workflow=workflow
|
||||
)]
|
||||
|
||||
workflow_engine_manager = WorkflowEngineManager()
|
||||
workflow_engine_manager.single_step_run_iteration_workflow_node(
|
||||
workflow=workflow,
|
||||
node_id=node_id,
|
||||
user_id=user_id,
|
||||
user_inputs=inputs,
|
||||
callbacks=workflow_callbacks
|
||||
)
|
||||
|
||||
|
||||
@ -5,6 +5,8 @@ from typing import cast
|
||||
from core.app.apps.base_app_generate_response_converter import AppGenerateResponseConverter
|
||||
from core.app.entities.task_entities import (
|
||||
ErrorStreamResponse,
|
||||
NodeFinishStreamResponse,
|
||||
NodeStartStreamResponse,
|
||||
PingStreamResponse,
|
||||
WorkflowAppBlockingResponse,
|
||||
WorkflowAppStreamResponse,
|
||||
@ -68,4 +70,24 @@ class WorkflowAppGenerateResponseConverter(AppGenerateResponseConverter):
|
||||
:param stream_response: stream response
|
||||
:return:
|
||||
"""
|
||||
return cls.convert_stream_full_response(stream_response)
|
||||
for chunk in stream_response:
|
||||
chunk = cast(WorkflowAppStreamResponse, chunk)
|
||||
sub_stream_response = chunk.stream_response
|
||||
|
||||
if isinstance(sub_stream_response, PingStreamResponse):
|
||||
yield 'ping'
|
||||
continue
|
||||
|
||||
response_chunk = {
|
||||
'event': sub_stream_response.event.value,
|
||||
'workflow_run_id': chunk.workflow_run_id,
|
||||
}
|
||||
|
||||
if isinstance(sub_stream_response, ErrorStreamResponse):
|
||||
data = cls._error_to_stream_response(sub_stream_response.err)
|
||||
response_chunk.update(data)
|
||||
elif isinstance(sub_stream_response, NodeStartStreamResponse | NodeFinishStreamResponse):
|
||||
response_chunk.update(sub_stream_response.to_ignore_detail_dict())
|
||||
else:
|
||||
response_chunk.update(sub_stream_response.to_dict())
|
||||
yield json.dumps(response_chunk)
|
||||
|
||||
@ -9,6 +9,9 @@ from core.app.entities.app_invoke_entities import (
|
||||
)
|
||||
from core.app.entities.queue_entities import (
|
||||
QueueErrorEvent,
|
||||
QueueIterationCompletedEvent,
|
||||
QueueIterationNextEvent,
|
||||
QueueIterationStartEvent,
|
||||
QueueMessageReplaceEvent,
|
||||
QueueNodeFailedEvent,
|
||||
QueueNodeStartedEvent,
|
||||
@ -28,11 +31,13 @@ from core.app.entities.task_entities import (
|
||||
WorkflowAppBlockingResponse,
|
||||
WorkflowAppStreamResponse,
|
||||
WorkflowFinishStreamResponse,
|
||||
WorkflowStreamGenerateNodes,
|
||||
WorkflowTaskState,
|
||||
)
|
||||
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
||||
from core.app.task_pipeline.workflow_cycle_manage import WorkflowCycleManage
|
||||
from core.workflow.entities.node_entities import SystemVariable
|
||||
from core.workflow.entities.node_entities import NodeType, SystemVariable
|
||||
from core.workflow.nodes.end.end_node import EndNode
|
||||
from extensions.ext_database import db
|
||||
from models.account import Account
|
||||
from models.model import EndUser
|
||||
@ -40,6 +45,7 @@ from models.workflow import (
|
||||
Workflow,
|
||||
WorkflowAppLog,
|
||||
WorkflowAppLogCreatedFrom,
|
||||
WorkflowNodeExecution,
|
||||
WorkflowRun,
|
||||
)
|
||||
|
||||
@ -55,6 +61,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
|
||||
_task_state: WorkflowTaskState
|
||||
_application_generate_entity: WorkflowAppGenerateEntity
|
||||
_workflow_system_variables: dict[SystemVariable, Any]
|
||||
_iteration_nested_relations: dict[str, list[str]]
|
||||
|
||||
def __init__(self, application_generate_entity: WorkflowAppGenerateEntity,
|
||||
workflow: Workflow,
|
||||
@ -71,12 +78,22 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
|
||||
"""
|
||||
super().__init__(application_generate_entity, queue_manager, user, stream)
|
||||
|
||||
if isinstance(self._user, EndUser):
|
||||
user_id = self._user.session_id
|
||||
else:
|
||||
user_id = self._user.id
|
||||
|
||||
self._workflow = workflow
|
||||
self._workflow_system_variables = {
|
||||
SystemVariable.FILES: application_generate_entity.files,
|
||||
SystemVariable.USER_ID: user_id
|
||||
}
|
||||
|
||||
self._task_state = WorkflowTaskState()
|
||||
self._task_state = WorkflowTaskState(
|
||||
iteration_nested_node_ids=[]
|
||||
)
|
||||
self._stream_generate_nodes = self._get_stream_generate_nodes()
|
||||
self._iteration_nested_relations = self._get_iteration_nested_relations(self._workflow.graph_dict)
|
||||
|
||||
def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
|
||||
"""
|
||||
@ -161,6 +178,14 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
|
||||
)
|
||||
elif isinstance(event, QueueNodeStartedEvent):
|
||||
workflow_node_execution = self._handle_node_start(event)
|
||||
|
||||
# search stream_generate_routes if node id is answer start at node
|
||||
if not self._task_state.current_stream_generate_state and event.node_id in self._stream_generate_nodes:
|
||||
self._task_state.current_stream_generate_state = self._stream_generate_nodes[event.node_id]
|
||||
|
||||
# generate stream outputs when node started
|
||||
yield from self._generate_stream_outputs_when_node_started()
|
||||
|
||||
yield self._workflow_node_start_to_stream_response(
|
||||
event=event,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
@ -168,10 +193,27 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
|
||||
)
|
||||
elif isinstance(event, QueueNodeSucceededEvent | QueueNodeFailedEvent):
|
||||
workflow_node_execution = self._handle_node_finished(event)
|
||||
|
||||
yield self._workflow_node_finish_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution
|
||||
)
|
||||
|
||||
if isinstance(event, QueueNodeFailedEvent):
|
||||
yield from self._handle_iteration_exception(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
error=f'Child node failed: {event.error}'
|
||||
)
|
||||
elif isinstance(event, QueueIterationStartEvent | QueueIterationNextEvent | QueueIterationCompletedEvent):
|
||||
if isinstance(event, QueueIterationNextEvent):
|
||||
# clear ran node execution infos of current iteration
|
||||
iteration_relations = self._iteration_nested_relations.get(event.node_id)
|
||||
if iteration_relations:
|
||||
for node_id in iteration_relations:
|
||||
self._task_state.ran_node_execution_infos.pop(node_id, None)
|
||||
|
||||
yield self._handle_iteration_to_stream_response(self._application_generate_entity.task_id, event)
|
||||
self._handle_iteration_operation(event)
|
||||
elif isinstance(event, QueueStopEvent | QueueWorkflowSucceededEvent | QueueWorkflowFailedEvent):
|
||||
workflow_run = self._handle_workflow_finished(event)
|
||||
|
||||
@ -187,6 +229,11 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
|
||||
if delta_text is None:
|
||||
continue
|
||||
|
||||
if not self._is_stream_out_support(
|
||||
event=event
|
||||
):
|
||||
continue
|
||||
|
||||
self._task_state.answer += delta_text
|
||||
yield self._text_chunk_to_stream_response(delta_text)
|
||||
elif isinstance(event, QueueMessageReplaceEvent):
|
||||
@ -248,3 +295,170 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
text=TextReplaceStreamResponse.Data(text=text)
|
||||
)
|
||||
|
||||
def _get_stream_generate_nodes(self) -> dict[str, WorkflowStreamGenerateNodes]:
|
||||
"""
|
||||
Get stream generate nodes.
|
||||
:return:
|
||||
"""
|
||||
# find all answer nodes
|
||||
graph = self._workflow.graph_dict
|
||||
end_node_configs = [
|
||||
node for node in graph['nodes']
|
||||
if node.get('data', {}).get('type') == NodeType.END.value
|
||||
]
|
||||
|
||||
# parse stream output node value selectors of end nodes
|
||||
stream_generate_routes = {}
|
||||
for node_config in end_node_configs:
|
||||
# get generate route for stream output
|
||||
end_node_id = node_config['id']
|
||||
generate_nodes = EndNode.extract_generate_nodes(graph, node_config)
|
||||
start_node_ids = self._get_end_start_at_node_ids(graph, end_node_id)
|
||||
if not start_node_ids:
|
||||
continue
|
||||
|
||||
for start_node_id in start_node_ids:
|
||||
stream_generate_routes[start_node_id] = WorkflowStreamGenerateNodes(
|
||||
end_node_id=end_node_id,
|
||||
stream_node_ids=generate_nodes
|
||||
)
|
||||
|
||||
return stream_generate_routes
|
||||
|
||||
def _get_end_start_at_node_ids(self, graph: dict, target_node_id: str) \
|
||||
-> list[str]:
|
||||
"""
|
||||
Get end start at node id.
|
||||
:param graph: graph
|
||||
:param target_node_id: target node ID
|
||||
:return:
|
||||
"""
|
||||
nodes = graph.get('nodes')
|
||||
edges = graph.get('edges')
|
||||
|
||||
# fetch all ingoing edges from source node
|
||||
ingoing_edges = []
|
||||
for edge in edges:
|
||||
if edge.get('target') == target_node_id:
|
||||
ingoing_edges.append(edge)
|
||||
|
||||
if not ingoing_edges:
|
||||
return []
|
||||
|
||||
start_node_ids = []
|
||||
for ingoing_edge in ingoing_edges:
|
||||
source_node_id = ingoing_edge.get('source')
|
||||
source_node = next((node for node in nodes if node.get('id') == source_node_id), None)
|
||||
if not source_node:
|
||||
continue
|
||||
|
||||
node_type = source_node.get('data', {}).get('type')
|
||||
node_iteration_id = source_node.get('data', {}).get('iteration_id')
|
||||
iteration_start_node_id = None
|
||||
if node_iteration_id:
|
||||
iteration_node = next((node for node in nodes if node.get('id') == node_iteration_id), None)
|
||||
iteration_start_node_id = iteration_node.get('data', {}).get('start_node_id')
|
||||
|
||||
if node_type in [
|
||||
NodeType.IF_ELSE.value,
|
||||
NodeType.QUESTION_CLASSIFIER.value
|
||||
]:
|
||||
start_node_id = target_node_id
|
||||
start_node_ids.append(start_node_id)
|
||||
elif node_type == NodeType.START.value or \
|
||||
node_iteration_id is not None and iteration_start_node_id == source_node.get('id'):
|
||||
start_node_id = source_node_id
|
||||
start_node_ids.append(start_node_id)
|
||||
else:
|
||||
sub_start_node_ids = self._get_end_start_at_node_ids(graph, source_node_id)
|
||||
if sub_start_node_ids:
|
||||
start_node_ids.extend(sub_start_node_ids)
|
||||
|
||||
return start_node_ids
|
||||
|
||||
def _generate_stream_outputs_when_node_started(self) -> Generator:
|
||||
"""
|
||||
Generate stream outputs.
|
||||
:return:
|
||||
"""
|
||||
if self._task_state.current_stream_generate_state:
|
||||
stream_node_ids = self._task_state.current_stream_generate_state.stream_node_ids
|
||||
|
||||
for node_id, node_execution_info in self._task_state.ran_node_execution_infos.items():
|
||||
if node_id not in stream_node_ids:
|
||||
continue
|
||||
|
||||
node_execution_info = self._task_state.ran_node_execution_infos[node_id]
|
||||
|
||||
# get chunk node execution
|
||||
route_chunk_node_execution = db.session.query(WorkflowNodeExecution).filter(
|
||||
WorkflowNodeExecution.id == node_execution_info.workflow_node_execution_id).first()
|
||||
|
||||
if not route_chunk_node_execution:
|
||||
continue
|
||||
|
||||
outputs = route_chunk_node_execution.outputs_dict
|
||||
|
||||
if not outputs:
|
||||
continue
|
||||
|
||||
# get value from outputs
|
||||
text = outputs.get('text')
|
||||
|
||||
if text:
|
||||
self._task_state.answer += text
|
||||
yield self._text_chunk_to_stream_response(text)
|
||||
|
||||
db.session.close()
|
||||
|
||||
def _is_stream_out_support(self, event: QueueTextChunkEvent) -> bool:
|
||||
"""
|
||||
Is stream out support
|
||||
:param event: queue text chunk event
|
||||
:return:
|
||||
"""
|
||||
if not event.metadata:
|
||||
return False
|
||||
|
||||
if 'node_id' not in event.metadata:
|
||||
return False
|
||||
|
||||
node_id = event.metadata.get('node_id')
|
||||
node_type = event.metadata.get('node_type')
|
||||
stream_output_value_selector = event.metadata.get('value_selector')
|
||||
if not stream_output_value_selector:
|
||||
return False
|
||||
|
||||
if not self._task_state.current_stream_generate_state:
|
||||
return False
|
||||
|
||||
if node_id not in self._task_state.current_stream_generate_state.stream_node_ids:
|
||||
return False
|
||||
|
||||
if node_type != NodeType.LLM:
|
||||
# only LLM support chunk stream output
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _get_iteration_nested_relations(self, graph: dict) -> dict[str, list[str]]:
|
||||
"""
|
||||
Get iteration nested relations.
|
||||
:param graph: graph
|
||||
:return:
|
||||
"""
|
||||
nodes = graph.get('nodes')
|
||||
|
||||
iteration_ids = [node.get('id') for node in nodes
|
||||
if node.get('data', {}).get('type') in [
|
||||
NodeType.ITERATION.value,
|
||||
NodeType.LOOP.value,
|
||||
]]
|
||||
|
||||
return {
|
||||
iteration_id: [
|
||||
node.get('id') for node in nodes if node.get('data', {}).get('iteration_id') == iteration_id
|
||||
] for iteration_id in iteration_ids
|
||||
}
|
||||
|
||||
@ -1,11 +1,15 @@
|
||||
from typing import Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
||||
from core.app.entities.queue_entities import (
|
||||
AppQueueEvent,
|
||||
QueueIterationCompletedEvent,
|
||||
QueueIterationNextEvent,
|
||||
QueueIterationStartEvent,
|
||||
QueueNodeFailedEvent,
|
||||
QueueNodeStartedEvent,
|
||||
QueueNodeSucceededEvent,
|
||||
QueueTextChunkEvent,
|
||||
QueueWorkflowFailedEvent,
|
||||
QueueWorkflowStartedEvent,
|
||||
QueueWorkflowSucceededEvent,
|
||||
@ -119,8 +123,76 @@ class WorkflowEventTriggerCallback(BaseWorkflowCallback):
|
||||
"""
|
||||
Publish text chunk
|
||||
"""
|
||||
pass
|
||||
self._queue_manager.publish(
|
||||
QueueTextChunkEvent(
|
||||
text=text,
|
||||
metadata={
|
||||
"node_id": node_id,
|
||||
**metadata
|
||||
}
|
||||
), PublishFrom.APPLICATION_MANAGER
|
||||
)
|
||||
|
||||
def on_workflow_iteration_started(self,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
node_run_index: int = 1,
|
||||
node_data: Optional[BaseNodeData] = None,
|
||||
inputs: dict = None,
|
||||
predecessor_node_id: Optional[str] = None,
|
||||
metadata: Optional[dict] = None) -> None:
|
||||
"""
|
||||
Publish iteration started
|
||||
"""
|
||||
self._queue_manager.publish(
|
||||
QueueIterationStartEvent(
|
||||
node_id=node_id,
|
||||
node_type=node_type,
|
||||
node_run_index=node_run_index,
|
||||
node_data=node_data,
|
||||
inputs=inputs,
|
||||
predecessor_node_id=predecessor_node_id,
|
||||
metadata=metadata
|
||||
),
|
||||
PublishFrom.APPLICATION_MANAGER
|
||||
)
|
||||
|
||||
def on_workflow_iteration_next(self, node_id: str,
|
||||
node_type: NodeType,
|
||||
index: int,
|
||||
node_run_index: int,
|
||||
output: Optional[Any]) -> None:
|
||||
"""
|
||||
Publish iteration next
|
||||
"""
|
||||
self._queue_manager.publish(
|
||||
QueueIterationNextEvent(
|
||||
node_id=node_id,
|
||||
node_type=node_type,
|
||||
index=index,
|
||||
node_run_index=node_run_index,
|
||||
output=output
|
||||
),
|
||||
PublishFrom.APPLICATION_MANAGER
|
||||
)
|
||||
|
||||
def on_workflow_iteration_completed(self, node_id: str,
|
||||
node_type: NodeType,
|
||||
node_run_index: int,
|
||||
outputs: dict) -> None:
|
||||
"""
|
||||
Publish iteration completed
|
||||
"""
|
||||
self._queue_manager.publish(
|
||||
QueueIterationCompletedEvent(
|
||||
node_id=node_id,
|
||||
node_type=node_type,
|
||||
node_run_index=node_run_index,
|
||||
outputs=outputs
|
||||
),
|
||||
PublishFrom.APPLICATION_MANAGER
|
||||
)
|
||||
|
||||
def on_event(self, event: AppQueueEvent) -> None:
|
||||
"""
|
||||
Publish event
|
||||
|
||||
@ -102,6 +102,39 @@ class WorkflowLoggingCallback(BaseWorkflowCallback):
|
||||
|
||||
self.print_text(text, color="pink", end="")
|
||||
|
||||
def on_workflow_iteration_started(self,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
node_run_index: int = 1,
|
||||
node_data: Optional[BaseNodeData] = None,
|
||||
inputs: dict = None,
|
||||
predecessor_node_id: Optional[str] = None,
|
||||
metadata: Optional[dict] = None) -> None:
|
||||
"""
|
||||
Publish iteration started
|
||||
"""
|
||||
self.print_text("\n[on_workflow_iteration_started]", color='blue')
|
||||
self.print_text(f"Node ID: {node_id}", color='blue')
|
||||
|
||||
def on_workflow_iteration_next(self, node_id: str,
|
||||
node_type: NodeType,
|
||||
index: int,
|
||||
node_run_index: int,
|
||||
output: Optional[dict]) -> None:
|
||||
"""
|
||||
Publish iteration next
|
||||
"""
|
||||
self.print_text("\n[on_workflow_iteration_next]", color='blue')
|
||||
|
||||
def on_workflow_iteration_completed(self, node_id: str,
|
||||
node_type: NodeType,
|
||||
node_run_index: int,
|
||||
outputs: dict) -> None:
|
||||
"""
|
||||
Publish iteration completed
|
||||
"""
|
||||
self.print_text("\n[on_workflow_iteration_completed]", color='blue')
|
||||
|
||||
def on_event(self, event: AppQueueEvent) -> None:
|
||||
"""
|
||||
Publish event
|
||||
|
||||
@ -72,7 +72,7 @@ class AppGenerateEntity(BaseModel):
|
||||
# app config
|
||||
app_config: AppConfig
|
||||
|
||||
inputs: dict[str, str]
|
||||
inputs: dict[str, Any]
|
||||
files: list[FileVar] = []
|
||||
user_id: str
|
||||
|
||||
@ -80,6 +80,9 @@ class AppGenerateEntity(BaseModel):
|
||||
stream: bool
|
||||
invoke_from: InvokeFrom
|
||||
|
||||
# invoke call depth
|
||||
call_depth: int = 0
|
||||
|
||||
# extra parameters, like: auto_generate_conversation_name
|
||||
extras: dict[str, Any] = {}
|
||||
|
||||
@ -126,6 +129,14 @@ class AdvancedChatAppGenerateEntity(AppGenerateEntity):
|
||||
conversation_id: Optional[str] = None
|
||||
query: Optional[str] = None
|
||||
|
||||
class SingleIterationRunEntity(BaseModel):
|
||||
"""
|
||||
Single Iteration Run Entity.
|
||||
"""
|
||||
node_id: str
|
||||
inputs: dict
|
||||
|
||||
single_iteration_run: Optional[SingleIterationRunEntity] = None
|
||||
|
||||
class WorkflowAppGenerateEntity(AppGenerateEntity):
|
||||
"""
|
||||
@ -133,3 +144,12 @@ class WorkflowAppGenerateEntity(AppGenerateEntity):
|
||||
"""
|
||||
# app config
|
||||
app_config: WorkflowUIBasedAppConfig
|
||||
|
||||
class SingleIterationRunEntity(BaseModel):
|
||||
"""
|
||||
Single Iteration Run Entity.
|
||||
"""
|
||||
node_id: str
|
||||
inputs: dict
|
||||
|
||||
single_iteration_run: Optional[SingleIterationRunEntity] = None
|
||||
@ -1,7 +1,7 @@
|
||||
from enum import Enum
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, validator
|
||||
|
||||
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk
|
||||
from core.workflow.entities.base_node_data_entities import BaseNodeData
|
||||
@ -21,6 +21,9 @@ class QueueEvent(Enum):
|
||||
WORKFLOW_STARTED = "workflow_started"
|
||||
WORKFLOW_SUCCEEDED = "workflow_succeeded"
|
||||
WORKFLOW_FAILED = "workflow_failed"
|
||||
ITERATION_START = "iteration_start"
|
||||
ITERATION_NEXT = "iteration_next"
|
||||
ITERATION_COMPLETED = "iteration_completed"
|
||||
NODE_STARTED = "node_started"
|
||||
NODE_SUCCEEDED = "node_succeeded"
|
||||
NODE_FAILED = "node_failed"
|
||||
@ -47,6 +50,55 @@ class QueueLLMChunkEvent(AppQueueEvent):
|
||||
event = QueueEvent.LLM_CHUNK
|
||||
chunk: LLMResultChunk
|
||||
|
||||
class QueueIterationStartEvent(AppQueueEvent):
|
||||
"""
|
||||
QueueIterationStartEvent entity
|
||||
"""
|
||||
event = QueueEvent.ITERATION_START
|
||||
node_id: str
|
||||
node_type: NodeType
|
||||
node_data: BaseNodeData
|
||||
|
||||
node_run_index: int
|
||||
inputs: dict = None
|
||||
predecessor_node_id: Optional[str] = None
|
||||
metadata: Optional[dict] = None
|
||||
|
||||
class QueueIterationNextEvent(AppQueueEvent):
|
||||
"""
|
||||
QueueIterationNextEvent entity
|
||||
"""
|
||||
event = QueueEvent.ITERATION_NEXT
|
||||
|
||||
index: int
|
||||
node_id: str
|
||||
node_type: NodeType
|
||||
|
||||
node_run_index: int
|
||||
output: Optional[Any] # output for the current iteration
|
||||
|
||||
@validator('output', pre=True, always=True)
|
||||
def set_output(cls, v):
|
||||
"""
|
||||
Set output
|
||||
"""
|
||||
if v is None:
|
||||
return None
|
||||
if isinstance(v, int | float | str | bool | dict | list):
|
||||
return v
|
||||
raise ValueError('output must be a valid type')
|
||||
|
||||
class QueueIterationCompletedEvent(AppQueueEvent):
|
||||
"""
|
||||
QueueIterationCompletedEvent entity
|
||||
"""
|
||||
event = QueueEvent.ITERATION_COMPLETED
|
||||
|
||||
node_id: str
|
||||
node_type: NodeType
|
||||
|
||||
node_run_index: int
|
||||
outputs: dict
|
||||
|
||||
class QueueTextChunkEvent(AppQueueEvent):
|
||||
"""
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user