mirror of
https://github.com/langgenius/dify.git
synced 2026-01-19 11:45:05 +08:00
Compare commits
359 Commits
test/perfo
...
1.7.2
| Author | SHA1 | Date | |
|---|---|---|---|
| 0baccb9e82 | |||
| 2c81db5a1c | |||
| 43411d7a9e | |||
| 2dbf20a3e9 | |||
| aaf9fc1562 | |||
| d30f898274 | |||
| 4a72fa6268 | |||
| 0c5e66bccb | |||
| ff791efe18 | |||
| 6083b1d618 | |||
| 69c3439c3a | |||
| 7ee170f0a7 | |||
| 36b221b170 | |||
| d1fc98200c | |||
| bb852ef6d2 | |||
| a17b7b3d89 | |||
| dc65a72d93 | |||
| ea502d36a9 | |||
| 79a3c1618f | |||
| 0be3b4e7a6 | |||
| 5f8967918e | |||
| 6900b08134 | |||
| dc641348f6 | |||
| 431e0105de | |||
| cbe0d9d053 | |||
| f9abcfd789 | |||
| 5a0a2b7e44 | |||
| 41345199d8 | |||
| 8362365eae | |||
| 14e1c16cf2 | |||
| b32b7712e2 | |||
| 5cf55fcbab | |||
| 57c4fc6bf8 | |||
| 92fcf0854b | |||
| f73ec60311 | |||
| 1c60b7f070 | |||
| 084dcd1a50 | |||
| fd536a943a | |||
| 6f80fb72cb | |||
| cb5e2ad9b2 | |||
| 62772e8871 | |||
| 4b0480c8b3 | |||
| c8c591d73c | |||
| 2edd32fdea | |||
| 5889059ce4 | |||
| 7230497bf4 | |||
| d98071a088 | |||
| ac02c12e49 | |||
| 11d29e8d3e | |||
| b44ecf9bf7 | |||
| e60f148824 | |||
| e9045a8838 | |||
| 55487ba0c6 | |||
| 305ea0a2d5 | |||
| a5ca76befb | |||
| e01510e2a6 | |||
| 2931c891a7 | |||
| ad1b1193fa | |||
| 85f33fb73d | |||
| f3c98a274b | |||
| f6c7175828 | |||
| d253ca192a | |||
| e072b7dafa | |||
| 3ff52f1809 | |||
| ad61b42494 | |||
| c95761f4e6 | |||
| 1f15cba9a0 | |||
| 3344aaabb6 | |||
| b8ef0c84e6 | |||
| 6b8b31ff64 | |||
| 823872d294 | |||
| 724ec12bf3 | |||
| 8aac402b24 | |||
| eb12fd9461 | |||
| ad622cea9e | |||
| 2575eaf1d6 | |||
| fc5ed9f316 | |||
| 40a11b6942 | |||
| 84543a591a | |||
| 2cd3fe0dce | |||
| 5eb061466f | |||
| 52050d3dff | |||
| 904af20023 | |||
| 4934dbd0e6 | |||
| d080bea20b | |||
| 607dfc8be7 | |||
| 3b5130b03d | |||
| 75f722a959 | |||
| b946378b38 | |||
| 0cee57acca | |||
| ab78e12089 | |||
| 7fe23a0ca6 | |||
| d8584dc03a | |||
| a724f35672 | |||
| 60c7663a80 | |||
| 8041808b53 | |||
| 146d870098 | |||
| 78d2f49e01 | |||
| 54c8bd29ee | |||
| 406c1952b8 | |||
| 0ebcee9a6b | |||
| 964fa132cb | |||
| dedd5f571c | |||
| 90373c7165 | |||
| d470120a60 | |||
| 0c925bd088 | |||
| 76d123fe19 | |||
| 20f0238aab | |||
| 6d5a7684b4 | |||
| 7831d44099 | |||
| fbf844efd5 | |||
| 99a4bd82b5 | |||
| 58608f51da | |||
| ff9fd0cdb2 | |||
| aac849d4f4 | |||
| 688d07e9c3 | |||
| f9b3cd1b68 | |||
| b2c8718f35 | |||
| 46ba0a8781 | |||
| bc18d4d1b9 | |||
| a4b14fc992 | |||
| be914438a5 | |||
| ec488a4c43 | |||
| f78b903a49 | |||
| fd086b06a6 | |||
| 759ded3e3a | |||
| 05b002a8b7 | |||
| f7016fd922 | |||
| da5c003f97 | |||
| c33741a5e9 | |||
| 872ff3f1d4 | |||
| 8ab3fda5a8 | |||
| 1821726d4f | |||
| 98aea05ad2 | |||
| 79ea94483e | |||
| a0a30bfdcc | |||
| caa5928ac4 | |||
| 9400832b2b | |||
| a82b55005b | |||
| a434f6240f | |||
| f5e177db89 | |||
| 5febd66808 | |||
| afac1fe590 | |||
| 4251515b4e | |||
| 1b2046da3f | |||
| 646900b00c | |||
| 142ab74784 | |||
| ffddabde43 | |||
| 8c6d87f08a | |||
| 270dd955d0 | |||
| 4e2129d74f | |||
| 07cff1ed2c | |||
| 070379a900 | |||
| bbdeb15501 | |||
| 28478cdc41 | |||
| 11ec62ca70 | |||
| 4499cda186 | |||
| c05c5953a8 | |||
| eee576355b | |||
| a3ef869db6 | |||
| a51998e4aa | |||
| 0b44edaca9 | |||
| ab163a5f75 | |||
| f17ca26b10 | |||
| 0ea010d7ee | |||
| 72a2c3decf | |||
| ab7c2cf000 | |||
| 6914c1c85e | |||
| ea542d42ca | |||
| cba5bd588c | |||
| 00cb1c26a1 | |||
| f4d4a32af2 | |||
| 1bf0df03b5 | |||
| ae28ca0b8d | |||
| 51a6b9dc57 | |||
| 4c65a8091a | |||
| 27f400e13f | |||
| 7721648867 | |||
| 47cc951841 | |||
| 63b6026e6e | |||
| 84aa38586f | |||
| a70d59d4a6 | |||
| 57e0a12ccd | |||
| f5e1fa4bd2 | |||
| a7ce1e5789 | |||
| 5f550126b3 | |||
| 572a2bbe53 | |||
| 537c04745d | |||
| 7f004e2f41 | |||
| 7c6415551d | |||
| 0d2d349ab3 | |||
| de28bb1075 | |||
| d409fb5f58 | |||
| 3248d728ac | |||
| beebcae0a1 | |||
| 15757110cf | |||
| fce126b206 | |||
| 5c5f61b2aa | |||
| 3f8fb18c89 | |||
| f72c03a174 | |||
| ee731c7810 | |||
| bd5b938571 | |||
| 7eb707f811 | |||
| 0546351d3e | |||
| 1c05491f1c | |||
| 177b0fb5e8 | |||
| 67a0751cf3 | |||
| 665fcad655 | |||
| d776a7cde7 | |||
| e0fe158f0b | |||
| 5411fd3757 | |||
| 3328addb58 | |||
| 1446f19709 | |||
| 773932b1e7 | |||
| faaf828dff | |||
| ee50a2bcd5 | |||
| 31985d94fa | |||
| 9c5b1e7012 | |||
| ad472d59e0 | |||
| 32df3b68c7 | |||
| a189d293f8 | |||
| 570aee5fe6 | |||
| 88952cbb7a | |||
| 89415ac453 | |||
| 8bbed5aeea | |||
| 8340d775bd | |||
| cff6a488f8 | |||
| d6b980a2dd | |||
| 206bc4b36d | |||
| 45cebf09b0 | |||
| bd43ca6275 | |||
| 9237976988 | |||
| 6ac06486e3 | |||
| 061d4c8ea0 | |||
| aca8b83669 | |||
| a8f09ad43f | |||
| de611ab344 | |||
| 371fe7a700 | |||
| c6d7328e15 | |||
| a327d024e9 | |||
| b8504ac7d0 | |||
| bb33335dd4 | |||
| 5a02e599e1 | |||
| d1572f47a0 | |||
| ef51678c73 | |||
| e64e7563f6 | |||
| 0731db8c22 | |||
| 8c3e390172 | |||
| 8278b39f85 | |||
| 1c3c40db69 | |||
| 7ec94eb83c | |||
| 79ab8b205f | |||
| 882f8bdd2c | |||
| a366de26c4 | |||
| cf07189bd2 | |||
| b4e152f775 | |||
| eaae79a581 | |||
| e6913744ae | |||
| 6b544aa0b9 | |||
| 60c37fe492 | |||
| 82cc37bf51 | |||
| ad67094e54 | |||
| 6d3e198c3c | |||
| 0f4809b9b8 | |||
| 5c7f0a533a | |||
| 095bae01b2 | |||
| 366d42d9c0 | |||
| a2048fd0f4 | |||
| ea069cc2d4 | |||
| 90c4df7919 | |||
| ce15ca8929 | |||
| 841e53dbbe | |||
| c987001a19 | |||
| e9c9c5d8f1 | |||
| c2c69ffb82 | |||
| 2d8eace34b | |||
| eb06de0921 | |||
| 58d92970a9 | |||
| db09e7386f | |||
| b5599b2945 | |||
| f70ff72a58 | |||
| e9893f1518 | |||
| 29f0a9ab94 | |||
| 308f1340dd | |||
| 5d5fa88857 | |||
| 659d51a2da | |||
| 8246f946c2 | |||
| 62b29b3d76 | |||
| 8fa3b3f931 | |||
| a83e4ed9a4 | |||
| ab012fe1a2 | |||
| c7382150b5 | |||
| 74981a65c6 | |||
| 9251a66a10 | |||
| 3b23fc5ad8 | |||
| bddeebd4c9 | |||
| d45e48eed7 | |||
| cbc3474bbb | |||
| 383a79772c | |||
| f8c7b28da7 | |||
| 74940ad3f2 | |||
| 17a8f1a0f1 | |||
| f9f46bfcbe | |||
| bd2014d13b | |||
| cb660e8104 | |||
| a4a67ef1ec | |||
| 09abc9951d | |||
| 19c09d6111 | |||
| 6248658c04 | |||
| 274142c4c2 | |||
| ce794335e9 | |||
| 5985055aef | |||
| ff8fc96ebb | |||
| c70b0cb730 | |||
| dba42567b1 | |||
| 71d96b671b | |||
| a93db6d797 | |||
| f2389771cf | |||
| ed263aed9f | |||
| d37b6716cd | |||
| b035f3f884 | |||
| 1f9cd99bc2 | |||
| ffee6f3288 | |||
| 460a825ef1 | |||
| 54c56f2d05 | |||
| 61a5741c05 | |||
| 0e235e5872 | |||
| d0bece1679 | |||
| 1715dd4320 | |||
| 14513b7677 | |||
| b88dd17fc1 | |||
| 3826b57424 | |||
| 62586719b3 | |||
| e7d80bf7bf | |||
| 7a69b57823 | |||
| 2423f97c72 | |||
| a4ef900916 | |||
| 965e952336 | |||
| 3cfba9e47b | |||
| 4b604bd79a | |||
| 74caebac32 | |||
| fafb1d5fd7 | |||
| 4b2baeea65 | |||
| 93c27b134d | |||
| 853c97a910 | |||
| 97f080fa03 | |||
| aeb68f99bd | |||
| 10e6b11ff6 | |||
| c3037c5491 | |||
| e4ae1e2b94 | |||
| a4f421028c | |||
| a324d3942e | |||
| a3ced1b5a6 | |||
| fb5c6dd644 | |||
| d2933c2bfe | |||
| 3587bd4040 | |||
| 3aecceff27 | |||
| f082452c9b | |||
| 30aa052a57 |
@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
npm add -g pnpm@10.11.1
|
||||
npm add -g pnpm@10.13.1
|
||||
cd web && pnpm install
|
||||
pipx install uv
|
||||
|
||||
@ -12,3 +12,4 @@ echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f do
|
||||
echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down"' >> ~/.bashrc
|
||||
|
||||
source /home/vscode/.bashrc
|
||||
|
||||
|
||||
1197
.env.example
Normal file
1197
.env.example
Normal file
File diff suppressed because it is too large
Load Diff
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -16,6 +16,8 @@ body:
|
||||
required: true
|
||||
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
|
||||
required: true
|
||||
- label: 【中文用户 & Non English User】请使用英语提交,否则会被关闭 :)
|
||||
required: true
|
||||
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||
required: true
|
||||
|
||||
|
||||
44
.github/ISSUE_TEMPLATE/chore.yaml
vendored
Normal file
44
.github/ISSUE_TEMPLATE/chore.yaml
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
name: "✨ Refactor"
|
||||
description: Refactor existing code for improved readability and maintainability.
|
||||
title: "[Chore/Refactor] "
|
||||
labels:
|
||||
- refactor
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Self Checks
|
||||
description: "To make sure we get to you in time, please check the following :)"
|
||||
options:
|
||||
- label: I have read the [Contributing Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) and [Language Policy](https://github.com/langgenius/dify/issues/1542).
|
||||
required: true
|
||||
- label: This is only for refactoring, if you would like to ask a question, please head to [Discussions](https://github.com/langgenius/dify/discussions/categories/general).
|
||||
required: true
|
||||
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
|
||||
required: true
|
||||
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
|
||||
required: true
|
||||
- label: 【中文用户 & Non English User】请使用英语提交,否则会被关闭 :)
|
||||
required: true
|
||||
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
placeholder: "Describe the refactor you are proposing."
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: motivation
|
||||
attributes:
|
||||
label: Motivation
|
||||
placeholder: "Explain why this refactor is necessary."
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: additional-context
|
||||
attributes:
|
||||
label: Additional Context
|
||||
placeholder: "Add any other context or screenshots about the request here."
|
||||
validations:
|
||||
required: false
|
||||
3
.github/workflows/api-tests.yml
vendored
3
.github/workflows/api-tests.yml
vendored
@ -99,3 +99,6 @@ jobs:
|
||||
|
||||
- name: Run Tool
|
||||
run: uv run --project api bash dev/pytest/pytest_tools.sh
|
||||
|
||||
- name: Run TestContainers
|
||||
run: uv run --project api bash dev/pytest/pytest_testcontainers.sh
|
||||
|
||||
28
.github/workflows/autofix.yml
vendored
Normal file
28
.github/workflows/autofix.yml
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
name: autofix.ci
|
||||
on:
|
||||
workflow_call:
|
||||
pull_request:
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
autofix:
|
||||
if: github.repository == 'langgenius/dify'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Use uv to ensure we have the same ruff version in CI and locally.
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f
|
||||
- run: |
|
||||
cd api
|
||||
uv sync --dev
|
||||
# Fix lint errors
|
||||
uv run ruff check --fix-only .
|
||||
# Format code
|
||||
uv run ruff format .
|
||||
|
||||
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||
|
||||
2
.github/workflows/build-push.yml
vendored
2
.github/workflows/build-push.yml
vendored
@ -6,6 +6,8 @@ on:
|
||||
- "main"
|
||||
- "deploy/dev"
|
||||
- "deploy/enterprise"
|
||||
- "build/**"
|
||||
- "release/e-*"
|
||||
tags:
|
||||
- "*"
|
||||
|
||||
|
||||
30
.github/workflows/style.yml
vendored
30
.github/workflows/style.yml
vendored
@ -28,7 +28,7 @@ jobs:
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v45
|
||||
uses: tj-actions/changed-files@v46
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
@ -49,8 +49,8 @@ jobs:
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
uv run --directory api ruff --version
|
||||
uv run --directory api ruff check --diff ./
|
||||
uv run --directory api ruff format --check --diff ./
|
||||
uv run --directory api ruff check ./
|
||||
uv run --directory api ruff format --check ./
|
||||
|
||||
- name: Dotenv check
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
@ -75,7 +75,7 @@ jobs:
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v45
|
||||
uses: tj-actions/changed-files@v46
|
||||
with:
|
||||
files: web/**
|
||||
|
||||
@ -113,7 +113,7 @@ jobs:
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v45
|
||||
uses: tj-actions/changed-files@v46
|
||||
with:
|
||||
files: |
|
||||
docker/generate_docker_compose
|
||||
@ -144,7 +144,7 @@ jobs:
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v45
|
||||
uses: tj-actions/changed-files@v46
|
||||
with:
|
||||
files: |
|
||||
**.sh
|
||||
@ -152,13 +152,15 @@ jobs:
|
||||
**.yml
|
||||
**Dockerfile
|
||||
dev/**
|
||||
.editorconfig
|
||||
|
||||
- name: Super-linter
|
||||
uses: super-linter/super-linter/slim@v7
|
||||
uses: super-linter/super-linter/slim@v8
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
env:
|
||||
BASH_SEVERITY: warning
|
||||
DEFAULT_BRANCH: main
|
||||
DEFAULT_BRANCH: origin/main
|
||||
EDITORCONFIG_FILE_NAME: editorconfig-checker.json
|
||||
FILTER_REGEX_INCLUDE: pnpm-lock.yaml
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IGNORE_GENERATED_FILES: true
|
||||
@ -168,16 +170,6 @@ jobs:
|
||||
# FIXME: temporarily disabled until api-docker.yaml's run script is fixed for shellcheck
|
||||
# VALIDATE_GITHUB_ACTIONS: true
|
||||
VALIDATE_DOCKERFILE_HADOLINT: true
|
||||
VALIDATE_EDITORCONFIG: true
|
||||
VALIDATE_XML: true
|
||||
VALIDATE_YAML: true
|
||||
|
||||
- name: EditorConfig checks
|
||||
uses: super-linter/super-linter/slim@v7
|
||||
env:
|
||||
DEFAULT_BRANCH: main
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IGNORE_GENERATED_FILES: true
|
||||
IGNORE_GITIGNORED_FILES: true
|
||||
# EditorConfig validation
|
||||
VALIDATE_EDITORCONFIG: true
|
||||
EDITORCONFIG_FILE_NAME: editorconfig-checker.json
|
||||
|
||||
@ -1,13 +1,18 @@
|
||||
name: Check i18n Files and Create PR
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'web/i18n/en-US/*.ts'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
check-and-update:
|
||||
if: github.event.pull_request.merged == true
|
||||
if: github.repository == 'langgenius/dify'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@ -15,8 +20,8 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2 # last 2 commits
|
||||
persist-credentials: false
|
||||
fetch-depth: 2
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Check for file changes in i18n/en-US
|
||||
id: check_files
|
||||
@ -27,6 +32,13 @@ jobs:
|
||||
echo "Changed files: $changed_files"
|
||||
if [ -n "$changed_files" ]; then
|
||||
echo "FILES_CHANGED=true" >> $GITHUB_ENV
|
||||
file_args=""
|
||||
for file in $changed_files; do
|
||||
filename=$(basename "$file" .ts)
|
||||
file_args="$file_args --file=$filename"
|
||||
done
|
||||
echo "FILE_ARGS=$file_args" >> $GITHUB_ENV
|
||||
echo "File arguments: $file_args"
|
||||
else
|
||||
echo "FILES_CHANGED=false" >> $GITHUB_ENV
|
||||
fi
|
||||
@ -49,14 +61,15 @@ jobs:
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run npm script
|
||||
- name: Generate i18n translations
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
run: pnpm run auto-gen-i18n
|
||||
run: pnpm run auto-gen-i18n ${{ env.FILE_ARGS }}
|
||||
|
||||
- name: Create Pull Request
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: Update i18n files based on en-US changes
|
||||
title: 'chore: translate i18n files'
|
||||
body: This PR was automatically created to update i18n files based on changes in en-US locale.
|
||||
|
||||
2
.github/workflows/web-tests.yml
vendored
2
.github/workflows/web-tests.yml
vendored
@ -27,7 +27,7 @@ jobs:
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v45
|
||||
uses: tj-actions/changed-files@v46
|
||||
with:
|
||||
files: web/**
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -215,3 +215,4 @@ mise.toml
|
||||
# AI Assistant
|
||||
.roo/
|
||||
api/.env.backup
|
||||
/clickzetta
|
||||
|
||||
@ -235,13 +235,17 @@ Quickly deploy Dify to Alibaba cloud with [Alibaba Cloud Computing Nest](https:/
|
||||
|
||||
One-Click deploy Dify to Alibaba Cloud with [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
#### Deploy to AKS with Azure Devops Pipeline
|
||||
|
||||
One-Click deploy Dify to AKS with [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
At the same time, please consider supporting Dify by sharing it on social media and at events and conferences.
|
||||
|
||||
> We are looking for contributors to help translate Dify into languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
|
||||
> We are looking for contributors to help translate Dify into languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
|
||||
|
||||
## Community & contact
|
||||
|
||||
|
||||
@ -217,13 +217,17 @@ docker compose up -d
|
||||
|
||||
انشر Dify على علي بابا كلاود بنقرة واحدة باستخدام [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
#### استخدام Azure Devops Pipeline للنشر على AKS
|
||||
|
||||
انشر Dify على AKS بنقرة واحدة باستخدام [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
|
||||
|
||||
|
||||
## المساهمة
|
||||
|
||||
لأولئك الذين يرغبون في المساهمة، انظر إلى [دليل المساهمة](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) لدينا.
|
||||
في الوقت نفسه، يرجى النظر في دعم Dify عن طريق مشاركته على وسائل التواصل الاجتماعي وفي الفعاليات والمؤتمرات.
|
||||
|
||||
> نحن نبحث عن مساهمين لمساعدة في ترجمة Dify إلى لغات أخرى غير اللغة الصينية المندرين أو الإنجليزية. إذا كنت مهتمًا بالمساعدة، يرجى الاطلاع على [README للترجمة](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) لمزيد من المعلومات، واترك لنا تعليقًا في قناة `global-users` على [خادم المجتمع على Discord](https://discord.gg/8Tpq4AcN9c).
|
||||
> نحن نبحث عن مساهمين لمساعدة في ترجمة Dify إلى لغات أخرى غير اللغة الصينية المندرين أو الإنجليزية. إذا كنت مهتمًا بالمساعدة، يرجى الاطلاع على [README للترجمة](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) لمزيد من المعلومات، واترك لنا تعليقًا في قناة `global-users` على [خادم المجتمع على Discord](https://discord.gg/8Tpq4AcN9c).
|
||||
|
||||
**المساهمون**
|
||||
|
||||
|
||||
@ -235,13 +235,17 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন
|
||||
|
||||
[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
#### AKS-এ ডিপ্লয় করার জন্য Azure Devops Pipeline ব্যবহার
|
||||
|
||||
[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) ব্যবহার করে Dify কে AKS-এ এক ক্লিকে ডিপ্লয় করুন
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা] দেখুন (https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)।
|
||||
একই সাথে, সোশ্যাল মিডিয়া এবং ইভেন্ট এবং কনফারেন্সে এটি শেয়ার করে Dify কে সমর্থন করুন।
|
||||
|
||||
> আমরা ম্যান্ডারিন বা ইংরেজি ছাড়া অন্য ভাষায় Dify অনুবাদ করতে সাহায্য করার জন্য অবদানকারীদের খুঁজছি। আপনি যদি সাহায্য করতে আগ্রহী হন, তাহলে আরও তথ্যের জন্য [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) দেখুন এবং আমাদের [ডিসকর্ড কমিউনিটি সার্ভার](https://discord.gg/8Tpq4AcN9c) এর `গ্লোবাল-ইউজারস` চ্যানেলে আমাদের একটি মন্তব্য করুন।
|
||||
> আমরা ম্যান্ডারিন বা ইংরেজি ছাড়া অন্য ভাষায় Dify অনুবাদ করতে সাহায্য করার জন্য অবদানকারীদের খুঁজছি। আপনি যদি সাহায্য করতে আগ্রহী হন, তাহলে আরও তথ্যের জন্য [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) দেখুন এবং আমাদের [ডিসকর্ড কমিউনিটি সার্ভার](https://discord.gg/8Tpq4AcN9c) এর `গ্লোবাল-ইউজারস` চ্যানেলে আমাদের একটি মন্তব্য করুন।
|
||||
|
||||
## কমিউনিটি এবং যোগাযোগ
|
||||
|
||||
|
||||
@ -233,6 +233,9 @@ docker compose up -d
|
||||
|
||||
使用 [阿里云数据管理DMS](https://help.aliyun.com/zh/dms/dify-in-invitational-preview) 将 Dify 一键部署到 阿里云
|
||||
|
||||
#### 使用 Azure Devops Pipeline 部署到AKS
|
||||
|
||||
使用[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) 将 Dify 一键部署到 AKS
|
||||
|
||||
## Star History
|
||||
|
||||
@ -244,7 +247,7 @@ docker compose up -d
|
||||
对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。
|
||||
同时,请考虑通过社交媒体、活动和会议来支持 Dify 的分享。
|
||||
|
||||
> 我们正在寻找贡献者来帮助将 Dify 翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md)获取更多信息,并在我们的[Discord 社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。
|
||||
> 我们正在寻找贡献者来帮助将 Dify 翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)获取更多信息,并在我们的[Discord 社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。
|
||||
|
||||
**Contributors**
|
||||
|
||||
|
||||
@ -230,13 +230,17 @@ Bereitstellung von Dify auf AWS mit [CDK](https://aws.amazon.com/cdk/)
|
||||
|
||||
Ein-Klick-Bereitstellung von Dify in der Alibaba Cloud mit [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
#### Verwendung von Azure Devops Pipeline für AKS-Bereitstellung
|
||||
|
||||
Stellen Sie Dify mit einem Klick in AKS bereit, indem Sie [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) verwenden
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren.
|
||||
|
||||
|
||||
> Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen – außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c).
|
||||
> Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen – außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c).
|
||||
|
||||
## Gemeinschaft & Kontakt
|
||||
|
||||
|
||||
@ -230,6 +230,10 @@ Despliegue Dify en AWS usando [CDK](https://aws.amazon.com/cdk/)
|
||||
|
||||
Despliega Dify en Alibaba Cloud con un solo clic con [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
#### Uso de Azure Devops Pipeline para implementar en AKS
|
||||
|
||||
Implementa Dify en AKS con un clic usando [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
|
||||
|
||||
|
||||
## Contribuir
|
||||
|
||||
@ -237,7 +241,7 @@ Para aquellos que deseen contribuir con código, consulten nuestra [Guía de con
|
||||
Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en eventos y conferencias.
|
||||
|
||||
|
||||
> Estamos buscando colaboradores para ayudar con la traducción de Dify a idiomas que no sean el mandarín o el inglés. Si estás interesado en ayudar, consulta el [README de i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) para obtener más información y déjanos un comentario en el canal `global-users` de nuestro [Servidor de Comunidad en Discord](https://discord.gg/8Tpq4AcN9c).
|
||||
> Estamos buscando colaboradores para ayudar con la traducción de Dify a idiomas que no sean el mandarín o el inglés. Si estás interesado en ayudar, consulta el [README de i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para obtener más información y déjanos un comentario en el canal `global-users` de nuestro [Servidor de Comunidad en Discord](https://discord.gg/8Tpq4AcN9c).
|
||||
|
||||
**Contribuidores**
|
||||
|
||||
|
||||
@ -228,6 +228,10 @@ Déployez Dify sur AWS en utilisant [CDK](https://aws.amazon.com/cdk/)
|
||||
|
||||
Déployez Dify en un clic sur Alibaba Cloud avec [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
#### Utilisation d'Azure Devops Pipeline pour déployer sur AKS
|
||||
|
||||
Déployez Dify sur AKS en un clic en utilisant [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
|
||||
|
||||
|
||||
## Contribuer
|
||||
|
||||
@ -235,7 +239,7 @@ Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribut
|
||||
Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur les réseaux sociaux et lors d'événements et de conférences.
|
||||
|
||||
|
||||
> Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c).
|
||||
> Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c).
|
||||
|
||||
**Contributeurs**
|
||||
|
||||
|
||||
@ -227,6 +227,10 @@ docker compose up -d
|
||||
#### Alibaba Cloud Data Management
|
||||
[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) を利用して、DifyをAlibaba Cloudへワンクリックでデプロイできます
|
||||
|
||||
#### AKSへのデプロイにAzure Devops Pipelineを使用
|
||||
|
||||
[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)を使用してDifyをAKSにワンクリックでデプロイ
|
||||
|
||||
|
||||
## 貢献
|
||||
|
||||
@ -234,7 +238,7 @@ docker compose up -d
|
||||
同時に、DifyをSNSやイベント、カンファレンスで共有してサポートしていただけると幸いです。
|
||||
|
||||
|
||||
> Difyを英語または中国語以外の言語に翻訳してくれる貢献者を募集しています。興味がある場合は、詳細については[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md)を参照してください。また、[Discordコミュニティサーバー](https://discord.gg/8Tpq4AcN9c)の`global-users`チャンネルにコメントを残してください。
|
||||
> Difyを英語または中国語以外の言語に翻訳してくれる貢献者を募集しています。興味がある場合は、詳細については[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)を参照してください。また、[Discordコミュニティサーバー](https://discord.gg/8Tpq4AcN9c)の`global-users`チャンネルにコメントを残してください。
|
||||
|
||||
**貢献者**
|
||||
|
||||
|
||||
@ -228,6 +228,10 @@ wa'logh nIqHom neH ghun deployment toy'wI' [CDK](https://aws.amazon.com/cdk/) lo
|
||||
|
||||
[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
#### AKS 'e' Deploy je Azure Devops Pipeline lo'laH
|
||||
|
||||
[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) lo'laH Dify AKS 'e' wa'DIch click 'e' Deploy
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
@ -235,7 +239,7 @@ For those who'd like to contribute code, see our [Contribution Guide](https://gi
|
||||
At the same time, please consider supporting Dify by sharing it on social media and at events and conferences.
|
||||
|
||||
|
||||
> We are looking for contributors to help with translating Dify to languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
|
||||
> We are looking for contributors to help with translating Dify to languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
|
||||
|
||||
**Contributors**
|
||||
|
||||
|
||||
@ -222,6 +222,10 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
|
||||
|
||||
[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)를 통해 원클릭으로 Dify를 Alibaba Cloud에 배포할 수 있습니다
|
||||
|
||||
#### AKS에 배포하기 위해 Azure Devops Pipeline 사용
|
||||
|
||||
[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)을 사용하여 Dify를 AKS에 원클릭으로 배포
|
||||
|
||||
|
||||
## 기여
|
||||
|
||||
@ -229,7 +233,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
|
||||
동시에 Dify를 소셜 미디어와 행사 및 컨퍼런스에 공유하여 지원하는 것을 고려해 주시기 바랍니다.
|
||||
|
||||
|
||||
> 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요.
|
||||
> 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요.
|
||||
|
||||
**기여자**
|
||||
|
||||
|
||||
@ -227,13 +227,17 @@ Implante o Dify na AWS usando [CDK](https://aws.amazon.com/cdk/)
|
||||
|
||||
Implante o Dify na Alibaba Cloud com um clique usando o [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
#### Usando Azure Devops Pipeline para Implantar no AKS
|
||||
|
||||
Implante o Dify no AKS com um clique usando [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
|
||||
|
||||
|
||||
## Contribuindo
|
||||
|
||||
Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em eventos e conferências.
|
||||
|
||||
> Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c).
|
||||
> Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c).
|
||||
|
||||
**Contribuidores**
|
||||
|
||||
|
||||
@ -228,6 +228,10 @@ Uvedite Dify v AWS z uporabo [CDK](https://aws.amazon.com/cdk/)
|
||||
|
||||
Z enim klikom namestite Dify na Alibaba Cloud z [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
#### Uporaba Azure Devops Pipeline za uvajanje v AKS
|
||||
|
||||
Z enim klikom namestite Dify v AKS z uporabo [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
|
||||
|
||||
|
||||
## Prispevam
|
||||
|
||||
|
||||
@ -221,13 +221,17 @@ Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.ter
|
||||
|
||||
[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) kullanarak Dify'ı tek tıkla Alibaba Cloud'a dağıtın
|
||||
|
||||
#### AKS'ye Dağıtım için Azure Devops Pipeline Kullanımı
|
||||
|
||||
[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) kullanarak Dify'ı tek tıkla AKS'ye dağıtın
|
||||
|
||||
|
||||
## Katkıda Bulunma
|
||||
|
||||
Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakabilirsiniz.
|
||||
Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda paylaşarak desteklemeyi düşünün.
|
||||
|
||||
> Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın.
|
||||
> Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın.
|
||||
|
||||
**Katkıda Bulunanlar**
|
||||
|
||||
|
||||
@ -233,13 +233,17 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify
|
||||
|
||||
透過 [阿里雲數據管理DMS](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/),一鍵將 Dify 部署至阿里雲
|
||||
|
||||
#### 使用 Azure Devops Pipeline 部署到AKS
|
||||
|
||||
使用[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) 將 Dify 一鍵部署到 AKS
|
||||
|
||||
|
||||
## 貢獻
|
||||
|
||||
對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。
|
||||
同時,也請考慮透過在社群媒體和各種活動與會議上分享 Dify 來支持我們。
|
||||
|
||||
> 我們正在尋找貢獻者協助將 Dify 翻譯成中文和英文以外的語言。如果您有興趣幫忙,請查看 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) 獲取更多資訊,並在我們的 [Discord 社群伺服器](https://discord.gg/8Tpq4AcN9c) 的 `global-users` 頻道留言給我們。
|
||||
> 我們正在尋找貢獻者協助將 Dify 翻譯成中文和英文以外的語言。如果您有興趣幫忙,請查看 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) 獲取更多資訊,並在我們的 [Discord 社群伺服器](https://discord.gg/8Tpq4AcN9c) 的 `global-users` 頻道留言給我們。
|
||||
|
||||
## 社群與聯絡方式
|
||||
|
||||
|
||||
@ -224,6 +224,10 @@ Triển khai Dify trên AWS bằng [CDK](https://aws.amazon.com/cdk/)
|
||||
|
||||
Triển khai Dify lên Alibaba Cloud chỉ với một cú nhấp chuột bằng [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
#### Sử dụng Azure Devops Pipeline để Triển khai lên AKS
|
||||
|
||||
Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure Devops Pipeline Helm Chart bởi @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
|
||||
|
||||
|
||||
## Đóng góp
|
||||
|
||||
@ -231,7 +235,7 @@ Triển khai Dify lên Alibaba Cloud chỉ với một cú nhấp chuột bằng
|
||||
Đồng thời, vui lòng xem xét hỗ trợ Dify bằng cách chia sẻ nó trên mạng xã hội và tại các sự kiện và hội nghị.
|
||||
|
||||
|
||||
> Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi.
|
||||
> Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi.
|
||||
|
||||
**Người đóng góp**
|
||||
|
||||
|
||||
@ -4,18 +4,23 @@
|
||||
# Alternatively you can set it with `SECRET_KEY` environment variable.
|
||||
SECRET_KEY=
|
||||
|
||||
# Ensure UTF-8 encoding
|
||||
LANG=en_US.UTF-8
|
||||
LC_ALL=en_US.UTF-8
|
||||
PYTHONIOENCODING=utf-8
|
||||
|
||||
# Console API base URL
|
||||
CONSOLE_API_URL=http://127.0.0.1:5001
|
||||
CONSOLE_WEB_URL=http://127.0.0.1:3000
|
||||
CONSOLE_API_URL=http://localhost:5001
|
||||
CONSOLE_WEB_URL=http://localhost:3000
|
||||
|
||||
# Service API base URL
|
||||
SERVICE_API_URL=http://127.0.0.1:5001
|
||||
SERVICE_API_URL=http://localhost:5001
|
||||
|
||||
# Web APP base URL
|
||||
APP_WEB_URL=http://127.0.0.1:3000
|
||||
APP_WEB_URL=http://localhost:3000
|
||||
|
||||
# Files URL
|
||||
FILES_URL=http://127.0.0.1:5001
|
||||
FILES_URL=http://localhost:5001
|
||||
|
||||
# INTERNAL_FILES_URL is used for plugin daemon communication within Docker network.
|
||||
# Set this to the internal Docker service URL for proper plugin file access.
|
||||
@ -54,7 +59,7 @@ REDIS_CLUSTERS_PASSWORD=
|
||||
|
||||
# celery configuration
|
||||
CELERY_BROKER_URL=redis://:difyai123456@localhost:${REDIS_PORT}/1
|
||||
|
||||
CELERY_BACKEND=redis
|
||||
# PostgreSQL database configuration
|
||||
DB_USERNAME=postgres
|
||||
DB_PASSWORD=difyai123456
|
||||
@ -138,12 +143,14 @@ SUPABASE_API_KEY=your-access-key
|
||||
SUPABASE_URL=your-server-url
|
||||
|
||||
# CORS configuration
|
||||
WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||
WEB_API_CORS_ALLOW_ORIGINS=http://localhost:3000,*
|
||||
CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,*
|
||||
|
||||
# Vector database configuration
|
||||
# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase, opengauss, tablestore, matrixone
|
||||
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
|
||||
VECTOR_STORE=weaviate
|
||||
# Prefix used to create collection name in vector database
|
||||
VECTOR_INDEX_NAME_PREFIX=Vector_index
|
||||
|
||||
# Weaviate configuration
|
||||
WEAVIATE_ENDPOINT=http://localhost:8080
|
||||
@ -225,6 +232,7 @@ TABLESTORE_ENDPOINT=https://instance-name.cn-hangzhou.ots.aliyuncs.com
|
||||
TABLESTORE_INSTANCE_NAME=instance-name
|
||||
TABLESTORE_ACCESS_KEY_ID=xxx
|
||||
TABLESTORE_ACCESS_KEY_SECRET=xxx
|
||||
TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE=false
|
||||
|
||||
# Tidb Vector configuration
|
||||
TIDB_VECTOR_HOST=xxx.eu-central-1.xxx.aws.tidbcloud.com
|
||||
@ -469,6 +477,16 @@ APP_MAX_ACTIVE_REQUESTS=0
|
||||
# Celery beat configuration
|
||||
CELERY_BEAT_SCHEDULER_TIME=1
|
||||
|
||||
# Celery schedule tasks configuration
|
||||
ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false
|
||||
ENABLE_CLEAN_UNUSED_DATASETS_TASK=false
|
||||
ENABLE_CREATE_TIDB_SERVERLESS_TASK=false
|
||||
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false
|
||||
ENABLE_CLEAN_MESSAGES=false
|
||||
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false
|
||||
ENABLE_DATASETS_QUEUE_MONITOR=false
|
||||
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true
|
||||
|
||||
# Position configuration
|
||||
POSITION_TOOL_PINS=
|
||||
POSITION_TOOL_INCLUDES=
|
||||
@ -495,6 +513,8 @@ ENDPOINT_URL_TEMPLATE=http://localhost:5002/e/{hook_id}
|
||||
|
||||
# Reset password token expiry minutes
|
||||
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
|
||||
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5
|
||||
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5
|
||||
|
||||
CREATE_TIDB_SERVICE_JOB_ENABLED=false
|
||||
|
||||
@ -505,6 +525,8 @@ LOGIN_LOCKOUT_DURATION=86400
|
||||
|
||||
# Enable OpenTelemetry
|
||||
ENABLE_OTEL=false
|
||||
OTLP_TRACE_ENDPOINT=
|
||||
OTLP_METRIC_ENDPOINT=
|
||||
OTLP_BASE_ENDPOINT=http://localhost:4318
|
||||
OTLP_API_KEY=
|
||||
OTEL_EXPORTER_OTLP_PROTOCOL=
|
||||
|
||||
@ -42,6 +42,8 @@ select = [
|
||||
"S301", # suspicious-pickle-usage, disallow use of `pickle` and its wrappers.
|
||||
"S302", # suspicious-marshal-usage, disallow use of `marshal` module
|
||||
"S311", # suspicious-non-cryptographic-random-usage
|
||||
"G001", # don't use str format to logging messages
|
||||
"G004", # don't use f-strings to format logging messages
|
||||
]
|
||||
|
||||
ignore = [
|
||||
|
||||
@ -19,7 +19,7 @@ RUN apt-get update \
|
||||
|
||||
# Install Python dependencies
|
||||
COPY pyproject.toml uv.lock ./
|
||||
RUN uv sync --locked
|
||||
RUN uv sync --locked --no-dev
|
||||
|
||||
# production stage
|
||||
FROM base AS production
|
||||
@ -37,6 +37,11 @@ EXPOSE 5001
|
||||
# set timezone
|
||||
ENV TZ=UTC
|
||||
|
||||
# Set UTF-8 locale
|
||||
ENV LANG=en_US.UTF-8
|
||||
ENV LC_ALL=en_US.UTF-8
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
|
||||
WORKDIR /app/api
|
||||
|
||||
RUN \
|
||||
@ -47,6 +52,8 @@ RUN \
|
||||
curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
|
||||
# For Security
|
||||
expat libldap-2.5-0 perl libsqlite3-0 zlib1g \
|
||||
# install fonts to support the use of tools like pypdfium2
|
||||
fonts-noto-cjk \
|
||||
# install a package to improve the accuracy of guessing mime type and file extension
|
||||
media-types \
|
||||
# install libmagic to support the use of python-magic guess MIMETYPE
|
||||
|
||||
@ -74,7 +74,12 @@
|
||||
10. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
|
||||
|
||||
```bash
|
||||
uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion
|
||||
uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin
|
||||
```
|
||||
|
||||
Addition, if you want to debug the celery scheduled tasks, you can use the following command in another terminal:
|
||||
```bash
|
||||
uv run celery -A app.celery beat
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
@ -32,7 +32,7 @@ def create_app() -> DifyApp:
|
||||
initialize_extensions(app)
|
||||
end_time = time.perf_counter()
|
||||
if dify_config.DEBUG:
|
||||
logging.info(f"Finished create_app ({round((end_time - start_time) * 1000, 2)} ms)")
|
||||
logging.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2))
|
||||
return app
|
||||
|
||||
|
||||
@ -91,14 +91,14 @@ def initialize_extensions(app: DifyApp):
|
||||
is_enabled = ext.is_enabled() if hasattr(ext, "is_enabled") else True
|
||||
if not is_enabled:
|
||||
if dify_config.DEBUG:
|
||||
logging.info(f"Skipped {short_name}")
|
||||
logging.info("Skipped %s", short_name)
|
||||
continue
|
||||
|
||||
start_time = time.perf_counter()
|
||||
ext.init_app(app)
|
||||
end_time = time.perf_counter()
|
||||
if dify_config.DEBUG:
|
||||
logging.info(f"Loaded {short_name} ({round((end_time - start_time) * 1000, 2)} ms)")
|
||||
logging.info("Loaded %s (%s ms)", short_name, round((end_time - start_time) * 1000, 2))
|
||||
|
||||
|
||||
def create_migrations_app():
|
||||
|
||||
167
api/commands.py
167
api/commands.py
@ -2,19 +2,23 @@ import base64
|
||||
import json
|
||||
import logging
|
||||
import secrets
|
||||
from typing import Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
import click
|
||||
import sqlalchemy as sa
|
||||
from flask import current_app
|
||||
from pydantic import TypeAdapter
|
||||
from sqlalchemy import select
|
||||
from werkzeug.exceptions import NotFound
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from configs import dify_config
|
||||
from constants.languages import languages
|
||||
from core.plugin.entities.plugin import ToolProviderID
|
||||
from core.rag.datasource.vdb.vector_factory import Vector
|
||||
from core.rag.datasource.vdb.vector_type import VectorType
|
||||
from core.rag.index_processor.constant.built_in_field import BuiltInField
|
||||
from core.rag.models.document import Document
|
||||
from core.tools.utils.system_oauth_encryption import encrypt_system_oauth_params
|
||||
from events.app_event import app_was_created
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
@ -27,6 +31,7 @@ from models.dataset import Dataset, DatasetCollectionBinding, DatasetMetadata, D
|
||||
from models.dataset import Document as DatasetDocument
|
||||
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
|
||||
from models.provider import Provider, ProviderModel
|
||||
from models.tools import ToolOAuthSystemClient
|
||||
from services.account_service import AccountService, RegisterService, TenantService
|
||||
from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpiredLogs
|
||||
from services.plugin.data_migration import PluginDataMigration
|
||||
@ -46,16 +51,16 @@ def reset_password(email, new_password, password_confirm):
|
||||
click.echo(click.style("Passwords do not match.", fg="red"))
|
||||
return
|
||||
|
||||
account = db.session.query(Account).filter(Account.email == email).one_or_none()
|
||||
account = db.session.query(Account).where(Account.email == email).one_or_none()
|
||||
|
||||
if not account:
|
||||
click.echo(click.style("Account not found for email: {}".format(email), fg="red"))
|
||||
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
|
||||
return
|
||||
|
||||
try:
|
||||
valid_password(new_password)
|
||||
except:
|
||||
click.echo(click.style("Invalid password. Must match {}".format(password_pattern), fg="red"))
|
||||
click.echo(click.style(f"Invalid password. Must match {password_pattern}", fg="red"))
|
||||
return
|
||||
|
||||
# generate password salt
|
||||
@ -85,16 +90,16 @@ def reset_email(email, new_email, email_confirm):
|
||||
click.echo(click.style("New emails do not match.", fg="red"))
|
||||
return
|
||||
|
||||
account = db.session.query(Account).filter(Account.email == email).one_or_none()
|
||||
account = db.session.query(Account).where(Account.email == email).one_or_none()
|
||||
|
||||
if not account:
|
||||
click.echo(click.style("Account not found for email: {}".format(email), fg="red"))
|
||||
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
|
||||
return
|
||||
|
||||
try:
|
||||
email_validate(new_email)
|
||||
except:
|
||||
click.echo(click.style("Invalid email: {}".format(new_email), fg="red"))
|
||||
click.echo(click.style(f"Invalid email: {new_email}", fg="red"))
|
||||
return
|
||||
|
||||
account.email = new_email
|
||||
@ -132,13 +137,13 @@ def reset_encrypt_key_pair():
|
||||
|
||||
tenant.encrypt_public_key = generate_key_pair(tenant.id)
|
||||
|
||||
db.session.query(Provider).filter(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
|
||||
db.session.query(ProviderModel).filter(ProviderModel.tenant_id == tenant.id).delete()
|
||||
db.session.query(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
|
||||
db.session.query(ProviderModel).where(ProviderModel.tenant_id == tenant.id).delete()
|
||||
db.session.commit()
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
"Congratulations! The asymmetric key pair of workspace {} has been reset.".format(tenant.id),
|
||||
f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
@ -168,7 +173,7 @@ def migrate_annotation_vector_database():
|
||||
per_page = 50
|
||||
apps = (
|
||||
db.session.query(App)
|
||||
.filter(App.status == "normal")
|
||||
.where(App.status == "normal")
|
||||
.order_by(App.created_at.desc())
|
||||
.limit(per_page)
|
||||
.offset((page - 1) * per_page)
|
||||
@ -176,8 +181,8 @@ def migrate_annotation_vector_database():
|
||||
)
|
||||
if not apps:
|
||||
break
|
||||
except NotFound:
|
||||
break
|
||||
except SQLAlchemyError:
|
||||
raise
|
||||
|
||||
page += 1
|
||||
for app in apps:
|
||||
@ -186,25 +191,25 @@ def migrate_annotation_vector_database():
|
||||
f"Processing the {total_count} app {app.id}. " + f"{create_count} created, {skipped_count} skipped."
|
||||
)
|
||||
try:
|
||||
click.echo("Creating app annotation index: {}".format(app.id))
|
||||
click.echo(f"Creating app annotation index: {app.id}")
|
||||
app_annotation_setting = (
|
||||
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app.id).first()
|
||||
db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first()
|
||||
)
|
||||
|
||||
if not app_annotation_setting:
|
||||
skipped_count = skipped_count + 1
|
||||
click.echo("App annotation setting disabled: {}".format(app.id))
|
||||
click.echo(f"App annotation setting disabled: {app.id}")
|
||||
continue
|
||||
# get dataset_collection_binding info
|
||||
dataset_collection_binding = (
|
||||
db.session.query(DatasetCollectionBinding)
|
||||
.filter(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
|
||||
.where(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
|
||||
.first()
|
||||
)
|
||||
if not dataset_collection_binding:
|
||||
click.echo("App annotation collection binding not found: {}".format(app.id))
|
||||
click.echo(f"App annotation collection binding not found: {app.id}")
|
||||
continue
|
||||
annotations = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app.id).all()
|
||||
annotations = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app.id).all()
|
||||
dataset = Dataset(
|
||||
id=app.id,
|
||||
tenant_id=app.tenant_id,
|
||||
@ -248,9 +253,7 @@ def migrate_annotation_vector_database():
|
||||
create_count += 1
|
||||
except Exception as e:
|
||||
click.echo(
|
||||
click.style(
|
||||
"Error creating app annotation index: {} {}".format(e.__class__.__name__, str(e)), fg="red"
|
||||
)
|
||||
click.style(f"Error creating app annotation index: {e.__class__.__name__} {str(e)}", fg="red")
|
||||
)
|
||||
continue
|
||||
|
||||
@ -301,12 +304,12 @@ def migrate_knowledge_vector_database():
|
||||
while True:
|
||||
try:
|
||||
stmt = (
|
||||
select(Dataset).filter(Dataset.indexing_technique == "high_quality").order_by(Dataset.created_at.desc())
|
||||
select(Dataset).where(Dataset.indexing_technique == "high_quality").order_by(Dataset.created_at.desc())
|
||||
)
|
||||
|
||||
datasets = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False)
|
||||
except NotFound:
|
||||
break
|
||||
except SQLAlchemyError:
|
||||
raise
|
||||
|
||||
page += 1
|
||||
for dataset in datasets:
|
||||
@ -315,7 +318,7 @@ def migrate_knowledge_vector_database():
|
||||
f"Processing the {total_count} dataset {dataset.id}. {create_count} created, {skipped_count} skipped."
|
||||
)
|
||||
try:
|
||||
click.echo("Creating dataset vector database index: {}".format(dataset.id))
|
||||
click.echo(f"Creating dataset vector database index: {dataset.id}")
|
||||
if dataset.index_struct_dict:
|
||||
if dataset.index_struct_dict["type"] == vector_type:
|
||||
skipped_count = skipped_count + 1
|
||||
@ -328,7 +331,7 @@ def migrate_knowledge_vector_database():
|
||||
if dataset.collection_binding_id:
|
||||
dataset_collection_binding = (
|
||||
db.session.query(DatasetCollectionBinding)
|
||||
.filter(DatasetCollectionBinding.id == dataset.collection_binding_id)
|
||||
.where(DatasetCollectionBinding.id == dataset.collection_binding_id)
|
||||
.one_or_none()
|
||||
)
|
||||
if dataset_collection_binding:
|
||||
@ -363,7 +366,7 @@ def migrate_knowledge_vector_database():
|
||||
|
||||
dataset_documents = (
|
||||
db.session.query(DatasetDocument)
|
||||
.filter(
|
||||
.where(
|
||||
DatasetDocument.dataset_id == dataset.id,
|
||||
DatasetDocument.indexing_status == "completed",
|
||||
DatasetDocument.enabled == True,
|
||||
@ -377,7 +380,7 @@ def migrate_knowledge_vector_database():
|
||||
for dataset_document in dataset_documents:
|
||||
segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(
|
||||
.where(
|
||||
DocumentSegment.document_id == dataset_document.id,
|
||||
DocumentSegment.status == "completed",
|
||||
DocumentSegment.enabled == True,
|
||||
@ -419,9 +422,7 @@ def migrate_knowledge_vector_database():
|
||||
create_count += 1
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
click.echo(
|
||||
click.style("Error creating dataset index: {} {}".format(e.__class__.__name__, str(e)), fg="red")
|
||||
)
|
||||
click.echo(click.style(f"Error creating dataset index: {e.__class__.__name__} {str(e)}", fg="red"))
|
||||
continue
|
||||
|
||||
click.echo(
|
||||
@ -457,14 +458,14 @@ def convert_to_agent_apps():
|
||||
"""
|
||||
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(sql_query))
|
||||
rs = conn.execute(sa.text(sql_query))
|
||||
|
||||
apps = []
|
||||
for i in rs:
|
||||
app_id = str(i.id)
|
||||
if app_id not in proceeded_app_ids:
|
||||
proceeded_app_ids.append(app_id)
|
||||
app = db.session.query(App).filter(App.id == app_id).first()
|
||||
app = db.session.query(App).where(App.id == app_id).first()
|
||||
if app is not None:
|
||||
apps.append(app)
|
||||
|
||||
@ -472,23 +473,23 @@ def convert_to_agent_apps():
|
||||
break
|
||||
|
||||
for app in apps:
|
||||
click.echo("Converting app: {}".format(app.id))
|
||||
click.echo(f"Converting app: {app.id}")
|
||||
|
||||
try:
|
||||
app.mode = AppMode.AGENT_CHAT.value
|
||||
db.session.commit()
|
||||
|
||||
# update conversation mode to agent
|
||||
db.session.query(Conversation).filter(Conversation.app_id == app.id).update(
|
||||
db.session.query(Conversation).where(Conversation.app_id == app.id).update(
|
||||
{Conversation.mode: AppMode.AGENT_CHAT.value}
|
||||
)
|
||||
|
||||
db.session.commit()
|
||||
click.echo(click.style("Converted app: {}".format(app.id), fg="green"))
|
||||
click.echo(click.style(f"Converted app: {app.id}", fg="green"))
|
||||
except Exception as e:
|
||||
click.echo(click.style("Convert app error: {} {}".format(e.__class__.__name__, str(e)), fg="red"))
|
||||
click.echo(click.style(f"Convert app error: {e.__class__.__name__} {str(e)}", fg="red"))
|
||||
|
||||
click.echo(click.style("Conversion complete. Converted {} agent apps.".format(len(proceeded_app_ids)), fg="green"))
|
||||
click.echo(click.style(f"Conversion complete. Converted {len(proceeded_app_ids)} agent apps.", fg="green"))
|
||||
|
||||
|
||||
@click.command("add-qdrant-index", help="Add Qdrant index.")
|
||||
@ -556,12 +557,12 @@ def old_metadata_migration():
|
||||
try:
|
||||
stmt = (
|
||||
select(DatasetDocument)
|
||||
.filter(DatasetDocument.doc_metadata.is_not(None))
|
||||
.where(DatasetDocument.doc_metadata.is_not(None))
|
||||
.order_by(DatasetDocument.created_at.desc())
|
||||
)
|
||||
documents = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False)
|
||||
except NotFound:
|
||||
break
|
||||
except SQLAlchemyError:
|
||||
raise
|
||||
if not documents:
|
||||
break
|
||||
for document in documents:
|
||||
@ -574,7 +575,7 @@ def old_metadata_migration():
|
||||
else:
|
||||
dataset_metadata = (
|
||||
db.session.query(DatasetMetadata)
|
||||
.filter(DatasetMetadata.dataset_id == document.dataset_id, DatasetMetadata.name == key)
|
||||
.where(DatasetMetadata.dataset_id == document.dataset_id, DatasetMetadata.name == key)
|
||||
.first()
|
||||
)
|
||||
if not dataset_metadata:
|
||||
@ -598,7 +599,7 @@ def old_metadata_migration():
|
||||
else:
|
||||
dataset_metadata_binding = (
|
||||
db.session.query(DatasetMetadataBinding) # type: ignore
|
||||
.filter(
|
||||
.where(
|
||||
DatasetMetadataBinding.dataset_id == document.dataset_id,
|
||||
DatasetMetadataBinding.document_id == document.id,
|
||||
DatasetMetadataBinding.metadata_id == dataset_metadata.id,
|
||||
@ -661,7 +662,7 @@ def create_tenant(email: str, language: Optional[str] = None, name: Optional[str
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
"Account and tenant created.\nAccount: {}\nPassword: {}".format(email, new_password),
|
||||
f"Account and tenant created.\nAccount: {email}\nPassword: {new_password}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
@ -702,7 +703,7 @@ def fix_app_site_missing():
|
||||
sql = """select apps.id as id from apps left join sites on sites.app_id=apps.id
|
||||
where sites.id is null limit 1000"""
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(sql))
|
||||
rs = conn.execute(sa.text(sql))
|
||||
|
||||
processed_count = 0
|
||||
for i in rs:
|
||||
@ -713,7 +714,7 @@ where sites.id is null limit 1000"""
|
||||
continue
|
||||
|
||||
try:
|
||||
app = db.session.query(App).filter(App.id == app_id).first()
|
||||
app = db.session.query(App).where(App.id == app_id).first()
|
||||
if not app:
|
||||
print(f"App {app_id} not found")
|
||||
continue
|
||||
@ -722,16 +723,16 @@ where sites.id is null limit 1000"""
|
||||
if tenant:
|
||||
accounts = tenant.get_accounts()
|
||||
if not accounts:
|
||||
print("Fix failed for app {}".format(app.id))
|
||||
print(f"Fix failed for app {app.id}")
|
||||
continue
|
||||
|
||||
account = accounts[0]
|
||||
print("Fixing missing site for app {}".format(app.id))
|
||||
print(f"Fixing missing site for app {app.id}")
|
||||
app_was_created.send(app, account=account)
|
||||
except Exception:
|
||||
failed_app_ids.append(app_id)
|
||||
click.echo(click.style("Failed to fix missing site for app {}".format(app_id), fg="red"))
|
||||
logging.exception(f"Failed to fix app related site missing issue, app_id: {app_id}")
|
||||
click.echo(click.style(f"Failed to fix missing site for app {app_id}", fg="red"))
|
||||
logging.exception("Failed to fix app related site missing issue, app_id: %s", app_id)
|
||||
continue
|
||||
|
||||
if not processed_count:
|
||||
@ -916,7 +917,7 @@ def clear_orphaned_file_records(force: bool):
|
||||
)
|
||||
orphaned_message_files = []
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(query))
|
||||
rs = conn.execute(sa.text(query))
|
||||
for i in rs:
|
||||
orphaned_message_files.append({"id": str(i[0]), "message_id": str(i[1])})
|
||||
|
||||
@ -937,7 +938,7 @@ def clear_orphaned_file_records(force: bool):
|
||||
click.echo(click.style("- Deleting orphaned message_files records", fg="white"))
|
||||
query = "DELETE FROM message_files WHERE id IN :ids"
|
||||
with db.engine.begin() as conn:
|
||||
conn.execute(db.text(query), {"ids": tuple([record["id"] for record in orphaned_message_files])})
|
||||
conn.execute(sa.text(query), {"ids": tuple([record["id"] for record in orphaned_message_files])})
|
||||
click.echo(
|
||||
click.style(f"Removed {len(orphaned_message_files)} orphaned message_files records.", fg="green")
|
||||
)
|
||||
@ -954,7 +955,7 @@ def clear_orphaned_file_records(force: bool):
|
||||
click.echo(click.style(f"- Listing file records in table {files_table['table']}", fg="white"))
|
||||
query = f"SELECT {files_table['id_column']}, {files_table['key_column']} FROM {files_table['table']}"
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(query))
|
||||
rs = conn.execute(sa.text(query))
|
||||
for i in rs:
|
||||
all_files_in_tables.append({"table": files_table["table"], "id": str(i[0]), "key": i[1]})
|
||||
click.echo(click.style(f"Found {len(all_files_in_tables)} files in tables.", fg="white"))
|
||||
@ -974,7 +975,7 @@ def clear_orphaned_file_records(force: bool):
|
||||
f"SELECT {ids_table['column']} FROM {ids_table['table']} WHERE {ids_table['column']} IS NOT NULL"
|
||||
)
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(query))
|
||||
rs = conn.execute(sa.text(query))
|
||||
for i in rs:
|
||||
all_ids_in_tables.append({"table": ids_table["table"], "id": str(i[0])})
|
||||
elif ids_table["type"] == "text":
|
||||
@ -989,7 +990,7 @@ def clear_orphaned_file_records(force: bool):
|
||||
f"FROM {ids_table['table']}"
|
||||
)
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(query))
|
||||
rs = conn.execute(sa.text(query))
|
||||
for i in rs:
|
||||
for j in i[0]:
|
||||
all_ids_in_tables.append({"table": ids_table["table"], "id": j})
|
||||
@ -1008,7 +1009,7 @@ def clear_orphaned_file_records(force: bool):
|
||||
f"FROM {ids_table['table']}"
|
||||
)
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(query))
|
||||
rs = conn.execute(sa.text(query))
|
||||
for i in rs:
|
||||
for j in i[0]:
|
||||
all_ids_in_tables.append({"table": ids_table["table"], "id": j})
|
||||
@ -1037,7 +1038,7 @@ def clear_orphaned_file_records(force: bool):
|
||||
click.echo(click.style(f"- Deleting orphaned file records in table {files_table['table']}", fg="white"))
|
||||
query = f"DELETE FROM {files_table['table']} WHERE {files_table['id_column']} IN :ids"
|
||||
with db.engine.begin() as conn:
|
||||
conn.execute(db.text(query), {"ids": tuple(orphaned_files)})
|
||||
conn.execute(sa.text(query), {"ids": tuple(orphaned_files)})
|
||||
except Exception as e:
|
||||
click.echo(click.style(f"Error deleting orphaned file records: {str(e)}", fg="red"))
|
||||
return
|
||||
@ -1107,7 +1108,7 @@ def remove_orphaned_files_on_storage(force: bool):
|
||||
click.echo(click.style(f"- Listing files from table {files_table['table']}", fg="white"))
|
||||
query = f"SELECT {files_table['key_column']} FROM {files_table['table']}"
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(query))
|
||||
rs = conn.execute(sa.text(query))
|
||||
for i in rs:
|
||||
all_files_in_tables.append(str(i[0]))
|
||||
click.echo(click.style(f"Found {len(all_files_in_tables)} files in tables.", fg="white"))
|
||||
@ -1155,3 +1156,49 @@ def remove_orphaned_files_on_storage(force: bool):
|
||||
click.echo(click.style(f"Removed {removed_files} orphaned files without errors.", fg="green"))
|
||||
else:
|
||||
click.echo(click.style(f"Removed {removed_files} orphaned files, with {error_files} errors.", fg="yellow"))
|
||||
|
||||
|
||||
@click.command("setup-system-tool-oauth-client", help="Setup system tool oauth client.")
|
||||
@click.option("--provider", prompt=True, help="Provider name")
|
||||
@click.option("--client-params", prompt=True, help="Client Params")
|
||||
def setup_system_tool_oauth_client(provider, client_params):
|
||||
"""
|
||||
Setup system tool oauth client
|
||||
"""
|
||||
provider_id = ToolProviderID(provider)
|
||||
provider_name = provider_id.provider_name
|
||||
plugin_id = provider_id.plugin_id
|
||||
|
||||
try:
|
||||
# json validate
|
||||
click.echo(click.style(f"Validating client params: {client_params}", fg="yellow"))
|
||||
client_params_dict = TypeAdapter(dict[str, Any]).validate_json(client_params)
|
||||
click.echo(click.style("Client params validated successfully.", fg="green"))
|
||||
|
||||
click.echo(click.style(f"Encrypting client params: {client_params}", fg="yellow"))
|
||||
click.echo(click.style(f"Using SECRET_KEY: `{dify_config.SECRET_KEY}`", fg="yellow"))
|
||||
oauth_client_params = encrypt_system_oauth_params(client_params_dict)
|
||||
click.echo(click.style("Client params encrypted successfully.", fg="green"))
|
||||
except Exception as e:
|
||||
click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
|
||||
return
|
||||
|
||||
deleted_count = (
|
||||
db.session.query(ToolOAuthSystemClient)
|
||||
.filter_by(
|
||||
provider=provider_name,
|
||||
plugin_id=plugin_id,
|
||||
)
|
||||
.delete()
|
||||
)
|
||||
if deleted_count > 0:
|
||||
click.echo(click.style(f"Deleted {deleted_count} existing oauth client params.", fg="yellow"))
|
||||
|
||||
oauth_client = ToolOAuthSystemClient(
|
||||
provider=provider_name,
|
||||
plugin_id=plugin_id,
|
||||
encrypted_oauth_params=oauth_client_params,
|
||||
)
|
||||
db.session.add(oauth_client)
|
||||
db.session.commit()
|
||||
click.echo(click.style(f"OAuth client params setup successfully. id: {oauth_client.id}", fg="green"))
|
||||
|
||||
@ -41,7 +41,7 @@ class RemoteSettingsSourceFactory(PydanticBaseSettingsSource):
|
||||
case RemoteSettingsSourceName.NACOS:
|
||||
remote_source = NacosSettingsSource(current_state)
|
||||
case _:
|
||||
logger.warning(f"Unsupported remote source: {remote_source_name}")
|
||||
logger.warning("Unsupported remote source: %s", remote_source_name)
|
||||
return {}
|
||||
|
||||
d: dict[str, Any] = {}
|
||||
|
||||
@ -31,6 +31,15 @@ class SecurityConfig(BaseSettings):
|
||||
description="Duration in minutes for which a password reset token remains valid",
|
||||
default=5,
|
||||
)
|
||||
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
|
||||
description="Duration in minutes for which a change email token remains valid",
|
||||
default=5,
|
||||
)
|
||||
|
||||
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
|
||||
description="Duration in minutes for which a owner transfer token remains valid",
|
||||
default=5,
|
||||
)
|
||||
|
||||
LOGIN_DISABLED: bool = Field(
|
||||
description="Whether to disable login checks",
|
||||
@ -321,17 +330,17 @@ class HttpConfig(BaseSettings):
|
||||
def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
||||
return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
|
||||
|
||||
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: Annotated[
|
||||
PositiveInt, Field(ge=10, description="Maximum connection timeout in seconds for HTTP requests")
|
||||
] = 10
|
||||
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: int = Field(
|
||||
ge=1, description="Maximum connection timeout in seconds for HTTP requests", default=10
|
||||
)
|
||||
|
||||
HTTP_REQUEST_MAX_READ_TIMEOUT: Annotated[
|
||||
PositiveInt, Field(ge=60, description="Maximum read timeout in seconds for HTTP requests")
|
||||
] = 60
|
||||
HTTP_REQUEST_MAX_READ_TIMEOUT: int = Field(
|
||||
ge=1, description="Maximum read timeout in seconds for HTTP requests", default=60
|
||||
)
|
||||
|
||||
HTTP_REQUEST_MAX_WRITE_TIMEOUT: Annotated[
|
||||
PositiveInt, Field(ge=10, description="Maximum write timeout in seconds for HTTP requests")
|
||||
] = 20
|
||||
HTTP_REQUEST_MAX_WRITE_TIMEOUT: int = Field(
|
||||
ge=1, description="Maximum write timeout in seconds for HTTP requests", default=20
|
||||
)
|
||||
|
||||
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: PositiveInt = Field(
|
||||
description="Maximum allowed size in bytes for binary data in HTTP requests",
|
||||
@ -614,6 +623,16 @@ class AuthConfig(BaseSettings):
|
||||
default=86400,
|
||||
)
|
||||
|
||||
CHANGE_EMAIL_LOCKOUT_DURATION: PositiveInt = Field(
|
||||
description="Time (in seconds) a user must wait before retrying change email after exceeding the rate limit.",
|
||||
default=86400,
|
||||
)
|
||||
|
||||
OWNER_TRANSFER_LOCKOUT_DURATION: PositiveInt = Field(
|
||||
description="Time (in seconds) a user must wait before retrying owner transfer after exceeding the rate limit.",
|
||||
default=86400,
|
||||
)
|
||||
|
||||
|
||||
class ModerationConfig(BaseSettings):
|
||||
"""
|
||||
@ -813,6 +832,41 @@ class CeleryBeatConfig(BaseSettings):
|
||||
)
|
||||
|
||||
|
||||
class CeleryScheduleTasksConfig(BaseSettings):
|
||||
ENABLE_CLEAN_EMBEDDING_CACHE_TASK: bool = Field(
|
||||
description="Enable clean embedding cache task",
|
||||
default=False,
|
||||
)
|
||||
ENABLE_CLEAN_UNUSED_DATASETS_TASK: bool = Field(
|
||||
description="Enable clean unused datasets task",
|
||||
default=False,
|
||||
)
|
||||
ENABLE_CREATE_TIDB_SERVERLESS_TASK: bool = Field(
|
||||
description="Enable create tidb service job task",
|
||||
default=False,
|
||||
)
|
||||
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK: bool = Field(
|
||||
description="Enable update tidb service job status task",
|
||||
default=False,
|
||||
)
|
||||
ENABLE_CLEAN_MESSAGES: bool = Field(
|
||||
description="Enable clean messages task",
|
||||
default=False,
|
||||
)
|
||||
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: bool = Field(
|
||||
description="Enable mail clean document notify task",
|
||||
default=False,
|
||||
)
|
||||
ENABLE_DATASETS_QUEUE_MONITOR: bool = Field(
|
||||
description="Enable queue monitor task",
|
||||
default=False,
|
||||
)
|
||||
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK: bool = Field(
|
||||
description="Enable check upgradable plugin task",
|
||||
default=True,
|
||||
)
|
||||
|
||||
|
||||
class PositionConfig(BaseSettings):
|
||||
POSITION_PROVIDER_PINS: str = Field(
|
||||
description="Comma-separated list of pinned model providers",
|
||||
@ -942,5 +996,6 @@ class FeatureConfig(
|
||||
# hosted services config
|
||||
HostedServiceConfig,
|
||||
CeleryBeatConfig,
|
||||
CeleryScheduleTasksConfig,
|
||||
):
|
||||
pass
|
||||
|
||||
@ -10,6 +10,7 @@ from .storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
|
||||
from .storage.amazon_s3_storage_config import S3StorageConfig
|
||||
from .storage.azure_blob_storage_config import AzureBlobStorageConfig
|
||||
from .storage.baidu_obs_storage_config import BaiduOBSStorageConfig
|
||||
from .storage.clickzetta_volume_storage_config import ClickZettaVolumeStorageConfig
|
||||
from .storage.google_cloud_storage_config import GoogleCloudStorageConfig
|
||||
from .storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig
|
||||
from .storage.oci_storage_config import OCIStorageConfig
|
||||
@ -20,6 +21,7 @@ from .storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
|
||||
from .vdb.analyticdb_config import AnalyticdbConfig
|
||||
from .vdb.baidu_vector_config import BaiduVectorDBConfig
|
||||
from .vdb.chroma_config import ChromaConfig
|
||||
from .vdb.clickzetta_config import ClickzettaConfig
|
||||
from .vdb.couchbase_config import CouchbaseConfig
|
||||
from .vdb.elasticsearch_config import ElasticsearchConfig
|
||||
from .vdb.huawei_cloud_config import HuaweiCloudConfig
|
||||
@ -52,6 +54,7 @@ class StorageConfig(BaseSettings):
|
||||
"aliyun-oss",
|
||||
"azure-blob",
|
||||
"baidu-obs",
|
||||
"clickzetta-volume",
|
||||
"google-storage",
|
||||
"huawei-obs",
|
||||
"oci-storage",
|
||||
@ -61,8 +64,9 @@ class StorageConfig(BaseSettings):
|
||||
"local",
|
||||
] = Field(
|
||||
description="Type of storage to use."
|
||||
" Options: 'opendal', '(deprecated) local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', 'google-storage', "
|
||||
"'huawei-obs', 'oci-storage', 'tencent-cos', 'volcengine-tos', 'supabase'. Default is 'opendal'.",
|
||||
" Options: 'opendal', '(deprecated) local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', "
|
||||
"'clickzetta-volume', 'google-storage', 'huawei-obs', 'oci-storage', 'tencent-cos', "
|
||||
"'volcengine-tos', 'supabase'. Default is 'opendal'.",
|
||||
default="opendal",
|
||||
)
|
||||
|
||||
@ -85,6 +89,11 @@ class VectorStoreConfig(BaseSettings):
|
||||
default=False,
|
||||
)
|
||||
|
||||
VECTOR_INDEX_NAME_PREFIX: Optional[str] = Field(
|
||||
description="Prefix used to create collection name in vector database",
|
||||
default="Vector_index",
|
||||
)
|
||||
|
||||
|
||||
class KeywordStoreConfig(BaseSettings):
|
||||
KEYWORD_STORE: str = Field(
|
||||
@ -135,7 +144,8 @@ class DatabaseConfig(BaseSettings):
|
||||
default="postgresql",
|
||||
)
|
||||
|
||||
@computed_field
|
||||
@computed_field # type: ignore[misc]
|
||||
@property
|
||||
def SQLALCHEMY_DATABASE_URI(self) -> str:
|
||||
db_extras = (
|
||||
f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else self.DB_EXTRAS
|
||||
@ -210,8 +220,8 @@ class DatabaseConfig(BaseSettings):
|
||||
|
||||
class CeleryConfig(DatabaseConfig):
|
||||
CELERY_BACKEND: str = Field(
|
||||
description="Backend for Celery task results. Options: 'database', 'redis'.",
|
||||
default="database",
|
||||
description="Backend for Celery task results. Options: 'database', 'redis', 'rabbitmq'.",
|
||||
default="redis",
|
||||
)
|
||||
|
||||
CELERY_BROKER_URL: Optional[str] = Field(
|
||||
@ -240,11 +250,12 @@ class CeleryConfig(DatabaseConfig):
|
||||
|
||||
@computed_field
|
||||
def CELERY_RESULT_BACKEND(self) -> str | None:
|
||||
return (
|
||||
"db+{}".format(self.SQLALCHEMY_DATABASE_URI)
|
||||
if self.CELERY_BACKEND == "database"
|
||||
else self.CELERY_BROKER_URL
|
||||
)
|
||||
if self.CELERY_BACKEND in ("database", "rabbitmq"):
|
||||
return f"db+{self.SQLALCHEMY_DATABASE_URI}"
|
||||
elif self.CELERY_BACKEND == "redis":
|
||||
return self.CELERY_BROKER_URL
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def BROKER_USE_SSL(self) -> bool:
|
||||
@ -297,6 +308,7 @@ class MiddlewareConfig(
|
||||
AliyunOSSStorageConfig,
|
||||
AzureBlobStorageConfig,
|
||||
BaiduOBSStorageConfig,
|
||||
ClickZettaVolumeStorageConfig,
|
||||
GoogleCloudStorageConfig,
|
||||
HuaweiCloudOBSStorageConfig,
|
||||
OCIStorageConfig,
|
||||
@ -309,6 +321,7 @@ class MiddlewareConfig(
|
||||
VectorStoreConfig,
|
||||
AnalyticdbConfig,
|
||||
ChromaConfig,
|
||||
ClickzettaConfig,
|
||||
HuaweiCloudConfig,
|
||||
MilvusConfig,
|
||||
MyScaleConfig,
|
||||
|
||||
@ -0,0 +1,65 @@
|
||||
"""ClickZetta Volume Storage Configuration"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class ClickZettaVolumeStorageConfig(BaseSettings):
|
||||
"""Configuration for ClickZetta Volume storage."""
|
||||
|
||||
CLICKZETTA_VOLUME_USERNAME: Optional[str] = Field(
|
||||
description="Username for ClickZetta Volume authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_PASSWORD: Optional[str] = Field(
|
||||
description="Password for ClickZetta Volume authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_INSTANCE: Optional[str] = Field(
|
||||
description="ClickZetta instance identifier",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_SERVICE: str = Field(
|
||||
description="ClickZetta service endpoint",
|
||||
default="api.clickzetta.com",
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_WORKSPACE: str = Field(
|
||||
description="ClickZetta workspace name",
|
||||
default="quick_start",
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_VCLUSTER: str = Field(
|
||||
description="ClickZetta virtual cluster name",
|
||||
default="default_ap",
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_SCHEMA: str = Field(
|
||||
description="ClickZetta schema name",
|
||||
default="dify",
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_TYPE: str = Field(
|
||||
description="ClickZetta volume type (table|user|external)",
|
||||
default="user",
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_NAME: Optional[str] = Field(
|
||||
description="ClickZetta volume name for external volumes",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_TABLE_PREFIX: str = Field(
|
||||
description="Prefix for ClickZetta volume table names",
|
||||
default="dataset_",
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_DIFY_PREFIX: str = Field(
|
||||
description="Directory prefix for User Volume to organize Dify files",
|
||||
default="dify_km",
|
||||
)
|
||||
69
api/configs/middleware/vdb/clickzetta_config.py
Normal file
69
api/configs/middleware/vdb/clickzetta_config.py
Normal file
@ -0,0 +1,69 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ClickzettaConfig(BaseModel):
|
||||
"""
|
||||
Clickzetta Lakehouse vector database configuration
|
||||
"""
|
||||
|
||||
CLICKZETTA_USERNAME: Optional[str] = Field(
|
||||
description="Username for authenticating with Clickzetta Lakehouse",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_PASSWORD: Optional[str] = Field(
|
||||
description="Password for authenticating with Clickzetta Lakehouse",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_INSTANCE: Optional[str] = Field(
|
||||
description="Clickzetta Lakehouse instance ID",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_SERVICE: Optional[str] = Field(
|
||||
description="Clickzetta API service endpoint (e.g., 'api.clickzetta.com')",
|
||||
default="api.clickzetta.com",
|
||||
)
|
||||
|
||||
CLICKZETTA_WORKSPACE: Optional[str] = Field(
|
||||
description="Clickzetta workspace name",
|
||||
default="default",
|
||||
)
|
||||
|
||||
CLICKZETTA_VCLUSTER: Optional[str] = Field(
|
||||
description="Clickzetta virtual cluster name",
|
||||
default="default_ap",
|
||||
)
|
||||
|
||||
CLICKZETTA_SCHEMA: Optional[str] = Field(
|
||||
description="Database schema name in Clickzetta",
|
||||
default="public",
|
||||
)
|
||||
|
||||
CLICKZETTA_BATCH_SIZE: Optional[int] = Field(
|
||||
description="Batch size for bulk insert operations",
|
||||
default=100,
|
||||
)
|
||||
|
||||
CLICKZETTA_ENABLE_INVERTED_INDEX: Optional[bool] = Field(
|
||||
description="Enable inverted index for full-text search capabilities",
|
||||
default=True,
|
||||
)
|
||||
|
||||
CLICKZETTA_ANALYZER_TYPE: Optional[str] = Field(
|
||||
description="Analyzer type for full-text search: keyword, english, chinese, unicode",
|
||||
default="chinese",
|
||||
)
|
||||
|
||||
CLICKZETTA_ANALYZER_MODE: Optional[str] = Field(
|
||||
description="Analyzer mode for tokenization: max_word (fine-grained) or smart (intelligent)",
|
||||
default="smart",
|
||||
)
|
||||
|
||||
CLICKZETTA_VECTOR_DISTANCE_FUNCTION: Optional[str] = Field(
|
||||
description="Distance function for vector similarity: l2_distance or cosine_distance",
|
||||
default="cosine_distance",
|
||||
)
|
||||
@ -1,12 +1,13 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic import Field, PositiveInt, model_validator
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class ElasticsearchConfig(BaseSettings):
|
||||
"""
|
||||
Configuration settings for Elasticsearch
|
||||
Configuration settings for both self-managed and Elastic Cloud deployments.
|
||||
Can load from environment variables or .env files.
|
||||
"""
|
||||
|
||||
ELASTICSEARCH_HOST: Optional[str] = Field(
|
||||
@ -28,3 +29,50 @@ class ElasticsearchConfig(BaseSettings):
|
||||
description="Password for authenticating with Elasticsearch (default is 'elastic')",
|
||||
default="elastic",
|
||||
)
|
||||
|
||||
# Elastic Cloud (optional)
|
||||
ELASTICSEARCH_USE_CLOUD: Optional[bool] = Field(
|
||||
description="Set to True to use Elastic Cloud instead of self-hosted Elasticsearch", default=False
|
||||
)
|
||||
ELASTICSEARCH_CLOUD_URL: Optional[str] = Field(
|
||||
description="Full URL for Elastic Cloud deployment (e.g., 'https://example.es.region.aws.found.io:443')",
|
||||
default=None,
|
||||
)
|
||||
ELASTICSEARCH_API_KEY: Optional[str] = Field(
|
||||
description="API key for authenticating with Elastic Cloud", default=None
|
||||
)
|
||||
|
||||
# Common options
|
||||
ELASTICSEARCH_CA_CERTS: Optional[str] = Field(
|
||||
description="Path to CA certificate file for SSL verification", default=None
|
||||
)
|
||||
ELASTICSEARCH_VERIFY_CERTS: bool = Field(
|
||||
description="Whether to verify SSL certificates (default is False)", default=False
|
||||
)
|
||||
ELASTICSEARCH_REQUEST_TIMEOUT: int = Field(
|
||||
description="Request timeout in milliseconds (default is 100000)", default=100000
|
||||
)
|
||||
ELASTICSEARCH_RETRY_ON_TIMEOUT: bool = Field(
|
||||
description="Whether to retry requests on timeout (default is True)", default=True
|
||||
)
|
||||
ELASTICSEARCH_MAX_RETRIES: int = Field(
|
||||
description="Maximum number of retry attempts (default is 10000)", default=10000
|
||||
)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def validate_elasticsearch_config(self):
|
||||
"""Validate Elasticsearch configuration based on deployment type."""
|
||||
if self.ELASTICSEARCH_USE_CLOUD:
|
||||
if not self.ELASTICSEARCH_CLOUD_URL:
|
||||
raise ValueError("ELASTICSEARCH_CLOUD_URL is required when using Elastic Cloud")
|
||||
if not self.ELASTICSEARCH_API_KEY:
|
||||
raise ValueError("ELASTICSEARCH_API_KEY is required when using Elastic Cloud")
|
||||
else:
|
||||
if not self.ELASTICSEARCH_HOST:
|
||||
raise ValueError("ELASTICSEARCH_HOST is required for self-hosted Elasticsearch")
|
||||
if not self.ELASTICSEARCH_USERNAME:
|
||||
raise ValueError("ELASTICSEARCH_USERNAME is required for self-hosted Elasticsearch")
|
||||
if not self.ELASTICSEARCH_PASSWORD:
|
||||
raise ValueError("ELASTICSEARCH_PASSWORD is required for self-hosted Elasticsearch")
|
||||
|
||||
return self
|
||||
|
||||
@ -28,3 +28,8 @@ class TableStoreConfig(BaseSettings):
|
||||
description="AccessKey secret for the instance name",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE: bool = Field(
|
||||
description="Whether to normalize full-text search scores to [0, 1]",
|
||||
default=False,
|
||||
)
|
||||
|
||||
@ -12,6 +12,16 @@ class OTelConfig(BaseSettings):
|
||||
default=False,
|
||||
)
|
||||
|
||||
OTLP_TRACE_ENDPOINT: str = Field(
|
||||
description="OTLP trace endpoint",
|
||||
default="",
|
||||
)
|
||||
|
||||
OTLP_METRIC_ENDPOINT: str = Field(
|
||||
description="OTLP metric endpoint",
|
||||
default="",
|
||||
)
|
||||
|
||||
OTLP_BASE_ENDPOINT: str = Field(
|
||||
description="OTLP base endpoint",
|
||||
default="http://localhost:4318",
|
||||
|
||||
@ -76,7 +76,7 @@ class ApolloClient:
|
||||
code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
|
||||
if code == 200:
|
||||
if not body:
|
||||
logger.error(f"get_json_from_net load configs failed, body is {body}")
|
||||
logger.error("get_json_from_net load configs failed, body is %s", body)
|
||||
return None
|
||||
data = json.loads(body)
|
||||
data = data["configurations"]
|
||||
@ -207,7 +207,7 @@ class ApolloClient:
|
||||
# if the length is 0 it is returned directly
|
||||
if len(notifications) == 0:
|
||||
return
|
||||
url = "{}/notifications/v2".format(self.config_url)
|
||||
url = f"{self.config_url}/notifications/v2"
|
||||
params = {
|
||||
"appId": self.app_id,
|
||||
"cluster": self.cluster,
|
||||
@ -222,7 +222,7 @@ class ApolloClient:
|
||||
return
|
||||
if http_code == 200:
|
||||
if not body:
|
||||
logger.error(f"_long_poll load configs failed,body is {body}")
|
||||
logger.error("_long_poll load configs failed,body is %s", body)
|
||||
return
|
||||
data = json.loads(body)
|
||||
for entry in data:
|
||||
@ -273,12 +273,12 @@ class ApolloClient:
|
||||
time.sleep(60 * 10) # 10 minutes
|
||||
|
||||
def _do_heart_beat(self, namespace):
|
||||
url = "{}/configs/{}/{}/{}?ip={}".format(self.config_url, self.app_id, self.cluster, namespace, self.ip)
|
||||
url = f"{self.config_url}/configs/{self.app_id}/{self.cluster}/{namespace}?ip={self.ip}"
|
||||
try:
|
||||
code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
|
||||
if code == 200:
|
||||
if not body:
|
||||
logger.error(f"_do_heart_beat load configs failed,body is {body}")
|
||||
logger.error("_do_heart_beat load configs failed,body is %s", body)
|
||||
return None
|
||||
data = json.loads(body)
|
||||
if self.last_release_key == data["releaseKey"]:
|
||||
|
||||
@ -24,7 +24,7 @@ def url_encode_wrapper(params):
|
||||
|
||||
|
||||
def no_key_cache_key(namespace, key):
|
||||
return "{}{}{}".format(namespace, len(namespace), key)
|
||||
return f"{namespace}{len(namespace)}{key}"
|
||||
|
||||
|
||||
# Returns whether the obtained value is obtained, and None if it does not
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
from configs import dify_config
|
||||
|
||||
HIDDEN_VALUE = "[__HIDDEN__]"
|
||||
UNKNOWN_VALUE = "[__UNKNOWN__]"
|
||||
UUID_NIL = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
DEFAULT_FILE_NUMBER_LIMITS = 3
|
||||
@ -8,10 +9,10 @@ DEFAULT_FILE_NUMBER_LIMITS = 3
|
||||
IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"]
|
||||
IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS])
|
||||
|
||||
VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "mpga"]
|
||||
VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "webm"]
|
||||
VIDEO_EXTENSIONS.extend([ext.upper() for ext in VIDEO_EXTENSIONS])
|
||||
|
||||
AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "webm", "amr"]
|
||||
AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "amr", "mpga"]
|
||||
AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])
|
||||
|
||||
|
||||
|
||||
@ -28,5 +28,5 @@ def supported_language(lang):
|
||||
if lang in languages:
|
||||
return lang
|
||||
|
||||
error = "{lang} is not a valid language.".format(lang=lang)
|
||||
error = f"{lang} is not a valid language."
|
||||
raise ValueError(error)
|
||||
|
||||
@ -84,6 +84,7 @@ from .datasets import (
|
||||
external,
|
||||
hit_testing,
|
||||
metadata,
|
||||
upload_file,
|
||||
website,
|
||||
)
|
||||
|
||||
|
||||
@ -56,7 +56,7 @@ class InsertExploreAppListApi(Resource):
|
||||
parser.add_argument("position", type=int, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
app = db.session.execute(select(App).filter(App.id == args["app_id"])).scalar_one_or_none()
|
||||
app = db.session.execute(select(App).where(App.id == args["app_id"])).scalar_one_or_none()
|
||||
if not app:
|
||||
raise NotFound(f"App '{args['app_id']}' is not found")
|
||||
|
||||
@ -74,7 +74,7 @@ class InsertExploreAppListApi(Resource):
|
||||
|
||||
with Session(db.engine) as session:
|
||||
recommended_app = session.execute(
|
||||
select(RecommendedApp).filter(RecommendedApp.app_id == args["app_id"])
|
||||
select(RecommendedApp).where(RecommendedApp.app_id == args["app_id"])
|
||||
).scalar_one_or_none()
|
||||
|
||||
if not recommended_app:
|
||||
@ -117,21 +117,21 @@ class InsertExploreAppApi(Resource):
|
||||
def delete(self, app_id):
|
||||
with Session(db.engine) as session:
|
||||
recommended_app = session.execute(
|
||||
select(RecommendedApp).filter(RecommendedApp.app_id == str(app_id))
|
||||
select(RecommendedApp).where(RecommendedApp.app_id == str(app_id))
|
||||
).scalar_one_or_none()
|
||||
|
||||
if not recommended_app:
|
||||
return {"result": "success"}, 204
|
||||
|
||||
with Session(db.engine) as session:
|
||||
app = session.execute(select(App).filter(App.id == recommended_app.app_id)).scalar_one_or_none()
|
||||
app = session.execute(select(App).where(App.id == recommended_app.app_id)).scalar_one_or_none()
|
||||
|
||||
if app:
|
||||
app.is_public = False
|
||||
|
||||
with Session(db.engine) as session:
|
||||
installed_apps = session.execute(
|
||||
select(InstalledApp).filter(
|
||||
select(InstalledApp).where(
|
||||
InstalledApp.app_id == recommended_app.app_id,
|
||||
InstalledApp.tenant_id != InstalledApp.app_owner_tenant_id,
|
||||
)
|
||||
|
||||
@ -61,7 +61,7 @@ class BaseApiKeyListResource(Resource):
|
||||
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
||||
keys = (
|
||||
db.session.query(ApiToken)
|
||||
.filter(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id)
|
||||
.where(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id)
|
||||
.all()
|
||||
)
|
||||
return {"items": keys}
|
||||
@ -76,7 +76,7 @@ class BaseApiKeyListResource(Resource):
|
||||
|
||||
current_key_count = (
|
||||
db.session.query(ApiToken)
|
||||
.filter(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id)
|
||||
.where(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id)
|
||||
.count()
|
||||
)
|
||||
|
||||
@ -117,7 +117,7 @@ class BaseApiKeyResource(Resource):
|
||||
|
||||
key = (
|
||||
db.session.query(ApiToken)
|
||||
.filter(
|
||||
.where(
|
||||
getattr(ApiToken, self.resource_id_field) == resource_id,
|
||||
ApiToken.type == self.resource_type,
|
||||
ApiToken.id == api_key_id,
|
||||
@ -128,7 +128,7 @@ class BaseApiKeyResource(Resource):
|
||||
if key is None:
|
||||
flask_restful.abort(404, message="API key not found")
|
||||
|
||||
db.session.query(ApiToken).filter(ApiToken.id == api_key_id).delete()
|
||||
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
|
||||
db.session.commit()
|
||||
|
||||
return {"result": "success"}, 204
|
||||
|
||||
@ -86,7 +86,7 @@ class AnnotationReplyActionStatusApi(Resource):
|
||||
raise Forbidden()
|
||||
|
||||
job_id = str(job_id)
|
||||
app_annotation_job_key = "{}_app_annotation_job_{}".format(action, str(job_id))
|
||||
app_annotation_job_key = f"{action}_app_annotation_job_{str(job_id)}"
|
||||
cache_result = redis_client.get(app_annotation_job_key)
|
||||
if cache_result is None:
|
||||
raise ValueError("The job does not exist.")
|
||||
@ -94,13 +94,13 @@ class AnnotationReplyActionStatusApi(Resource):
|
||||
job_status = cache_result.decode()
|
||||
error_msg = ""
|
||||
if job_status == "error":
|
||||
app_annotation_error_key = "{}_app_annotation_error_{}".format(action, str(job_id))
|
||||
app_annotation_error_key = f"{action}_app_annotation_error_{str(job_id)}"
|
||||
error_msg = redis_client.get(app_annotation_error_key).decode()
|
||||
|
||||
return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200
|
||||
|
||||
|
||||
class AnnotationListApi(Resource):
|
||||
class AnnotationApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@ -123,22 +123,6 @@ class AnnotationListApi(Resource):
|
||||
}
|
||||
return response, 200
|
||||
|
||||
|
||||
class AnnotationExportApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, app_id):
|
||||
if not current_user.is_editor:
|
||||
raise Forbidden()
|
||||
|
||||
app_id = str(app_id)
|
||||
annotation_list = AppAnnotationService.export_annotation_list_by_app_id(app_id)
|
||||
response = {"data": marshal(annotation_list, annotation_fields)}
|
||||
return response, 200
|
||||
|
||||
|
||||
class AnnotationCreateApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@ -156,6 +140,48 @@ class AnnotationCreateApi(Resource):
|
||||
annotation = AppAnnotationService.insert_app_annotation_directly(args, app_id)
|
||||
return annotation
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def delete(self, app_id):
|
||||
if not current_user.is_editor:
|
||||
raise Forbidden()
|
||||
|
||||
app_id = str(app_id)
|
||||
|
||||
# Use request.args.getlist to get annotation_ids array directly
|
||||
annotation_ids = request.args.getlist("annotation_id")
|
||||
|
||||
# If annotation_ids are provided, handle batch deletion
|
||||
if annotation_ids:
|
||||
# Check if any annotation_ids contain empty strings or invalid values
|
||||
if not all(annotation_id.strip() for annotation_id in annotation_ids if annotation_id):
|
||||
return {
|
||||
"code": "bad_request",
|
||||
"message": "annotation_ids are required if the parameter is provided.",
|
||||
}, 400
|
||||
|
||||
result = AppAnnotationService.delete_app_annotations_in_batch(app_id, annotation_ids)
|
||||
return result, 204
|
||||
# If no annotation_ids are provided, handle clearing all annotations
|
||||
else:
|
||||
AppAnnotationService.clear_all_annotations(app_id)
|
||||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
class AnnotationExportApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, app_id):
|
||||
if not current_user.is_editor:
|
||||
raise Forbidden()
|
||||
|
||||
app_id = str(app_id)
|
||||
annotation_list = AppAnnotationService.export_annotation_list_by_app_id(app_id)
|
||||
response = {"data": marshal(annotation_list, annotation_fields)}
|
||||
return response, 200
|
||||
|
||||
|
||||
class AnnotationUpdateDeleteApi(Resource):
|
||||
@setup_required
|
||||
@ -199,14 +225,15 @@ class AnnotationBatchImportApi(Resource):
|
||||
raise Forbidden()
|
||||
|
||||
app_id = str(app_id)
|
||||
# get file from request
|
||||
file = request.files["file"]
|
||||
# check file
|
||||
if "file" not in request.files:
|
||||
raise NoFileUploadedError()
|
||||
|
||||
if len(request.files) > 1:
|
||||
raise TooManyFilesError()
|
||||
|
||||
# get file from request
|
||||
file = request.files["file"]
|
||||
# check file type
|
||||
if not file.filename or not file.filename.lower().endswith(".csv"):
|
||||
raise ValueError("Invalid file type. Only CSV files are allowed")
|
||||
@ -223,14 +250,14 @@ class AnnotationBatchImportStatusApi(Resource):
|
||||
raise Forbidden()
|
||||
|
||||
job_id = str(job_id)
|
||||
indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id))
|
||||
indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}"
|
||||
cache_result = redis_client.get(indexing_cache_key)
|
||||
if cache_result is None:
|
||||
raise ValueError("The job does not exist.")
|
||||
job_status = cache_result.decode()
|
||||
error_msg = ""
|
||||
if job_status == "error":
|
||||
indexing_error_msg_key = "app_annotation_batch_import_error_msg_{}".format(str(job_id))
|
||||
indexing_error_msg_key = f"app_annotation_batch_import_error_msg_{str(job_id)}"
|
||||
error_msg = redis_client.get(indexing_error_msg_key).decode()
|
||||
|
||||
return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200
|
||||
@ -265,7 +292,7 @@ api.add_resource(AnnotationReplyActionApi, "/apps/<uuid:app_id>/annotation-reply
|
||||
api.add_resource(
|
||||
AnnotationReplyActionStatusApi, "/apps/<uuid:app_id>/annotation-reply/<string:action>/status/<uuid:job_id>"
|
||||
)
|
||||
api.add_resource(AnnotationListApi, "/apps/<uuid:app_id>/annotations")
|
||||
api.add_resource(AnnotationApi, "/apps/<uuid:app_id>/annotations")
|
||||
api.add_resource(AnnotationExportApi, "/apps/<uuid:app_id>/annotations/export")
|
||||
api.add_resource(AnnotationUpdateDeleteApi, "/apps/<uuid:app_id>/annotations/<uuid:annotation_id>")
|
||||
api.add_resource(AnnotationBatchImportApi, "/apps/<uuid:app_id>/annotations/batch-import")
|
||||
|
||||
@ -28,6 +28,12 @@ from services.feature_service import FeatureService
|
||||
ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"]
|
||||
|
||||
|
||||
def _validate_description_length(description):
|
||||
if description and len(description) > 400:
|
||||
raise ValueError("Description cannot exceed 400 characters.")
|
||||
return description
|
||||
|
||||
|
||||
class AppListApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@ -94,7 +100,7 @@ class AppListApi(Resource):
|
||||
"""Create app"""
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("name", type=str, required=True, location="json")
|
||||
parser.add_argument("description", type=str, location="json")
|
||||
parser.add_argument("description", type=_validate_description_length, location="json")
|
||||
parser.add_argument("mode", type=str, choices=ALLOW_CREATE_APP_MODES, location="json")
|
||||
parser.add_argument("icon_type", type=str, location="json")
|
||||
parser.add_argument("icon", type=str, location="json")
|
||||
@ -146,7 +152,7 @@ class AppApi(Resource):
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
|
||||
parser.add_argument("description", type=str, location="json")
|
||||
parser.add_argument("description", type=_validate_description_length, location="json")
|
||||
parser.add_argument("icon_type", type=str, location="json")
|
||||
parser.add_argument("icon", type=str, location="json")
|
||||
parser.add_argument("icon_background", type=str, location="json")
|
||||
@ -189,7 +195,7 @@ class AppCopyApi(Resource):
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("name", type=str, location="json")
|
||||
parser.add_argument("description", type=str, location="json")
|
||||
parser.add_argument("description", type=_validate_description_length, location="json")
|
||||
parser.add_argument("icon_type", type=str, location="json")
|
||||
parser.add_argument("icon", type=str, location="json")
|
||||
parser.add_argument("icon_background", type=str, location="json")
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
from datetime import UTC, datetime
|
||||
from datetime import datetime
|
||||
|
||||
import pytz # pip install pytz
|
||||
from flask_login import current_user
|
||||
@ -19,6 +19,7 @@ from fields.conversation_fields import (
|
||||
conversation_pagination_fields,
|
||||
conversation_with_summary_pagination_fields,
|
||||
)
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.helper import DatetimeString
|
||||
from libs.login import login_required
|
||||
from models import Conversation, EndUser, Message, MessageAnnotation
|
||||
@ -48,10 +49,10 @@ class CompletionConversationApi(Resource):
|
||||
query = db.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.mode == "completion")
|
||||
|
||||
if args["keyword"]:
|
||||
query = query.join(Message, Message.conversation_id == Conversation.id).filter(
|
||||
query = query.join(Message, Message.conversation_id == Conversation.id).where(
|
||||
or_(
|
||||
Message.query.ilike("%{}%".format(args["keyword"])),
|
||||
Message.answer.ilike("%{}%".format(args["keyword"])),
|
||||
Message.query.ilike(f"%{args['keyword']}%"),
|
||||
Message.answer.ilike(f"%{args['keyword']}%"),
|
||||
)
|
||||
)
|
||||
|
||||
@ -120,7 +121,7 @@ class CompletionConversationDetailApi(Resource):
|
||||
|
||||
conversation = (
|
||||
db.session.query(Conversation)
|
||||
.filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
|
||||
.where(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
|
||||
.first()
|
||||
)
|
||||
|
||||
@ -173,14 +174,14 @@ class ChatConversationApi(Resource):
|
||||
query = db.select(Conversation).where(Conversation.app_id == app_model.id)
|
||||
|
||||
if args["keyword"]:
|
||||
keyword_filter = "%{}%".format(args["keyword"])
|
||||
keyword_filter = f"%{args['keyword']}%"
|
||||
query = (
|
||||
query.join(
|
||||
Message,
|
||||
Message.conversation_id == Conversation.id,
|
||||
)
|
||||
.join(subquery, subquery.c.conversation_id == Conversation.id)
|
||||
.filter(
|
||||
.where(
|
||||
or_(
|
||||
Message.query.ilike(keyword_filter),
|
||||
Message.answer.ilike(keyword_filter),
|
||||
@ -285,7 +286,7 @@ class ChatConversationDetailApi(Resource):
|
||||
|
||||
conversation = (
|
||||
db.session.query(Conversation)
|
||||
.filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
|
||||
.where(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
|
||||
.first()
|
||||
)
|
||||
|
||||
@ -307,7 +308,7 @@ api.add_resource(ChatConversationDetailApi, "/apps/<uuid:app_id>/chat-conversati
|
||||
def _get_conversation(app_model, conversation_id):
|
||||
conversation = (
|
||||
db.session.query(Conversation)
|
||||
.filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
|
||||
.where(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
|
||||
.first()
|
||||
)
|
||||
|
||||
@ -315,7 +316,7 @@ def _get_conversation(app_model, conversation_id):
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
|
||||
if not conversation.read_at:
|
||||
conversation.read_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
conversation.read_at = naive_utc_now()
|
||||
conversation.read_account_id = current_user.id
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
import os
|
||||
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, reqparse
|
||||
|
||||
@ -29,15 +27,12 @@ class RuleGenerateApi(Resource):
|
||||
args = parser.parse_args()
|
||||
|
||||
account = current_user
|
||||
PROMPT_GENERATION_MAX_TOKENS = int(os.getenv("PROMPT_GENERATION_MAX_TOKENS", "512"))
|
||||
|
||||
try:
|
||||
rules = LLMGenerator.generate_rule_config(
|
||||
tenant_id=account.current_tenant_id,
|
||||
instruction=args["instruction"],
|
||||
model_config=args["model_config"],
|
||||
no_variable=args["no_variable"],
|
||||
rule_config_max_tokens=PROMPT_GENERATION_MAX_TOKENS,
|
||||
)
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
@ -64,14 +59,12 @@ class RuleCodeGenerateApi(Resource):
|
||||
args = parser.parse_args()
|
||||
|
||||
account = current_user
|
||||
CODE_GENERATION_MAX_TOKENS = int(os.getenv("CODE_GENERATION_MAX_TOKENS", "1024"))
|
||||
try:
|
||||
code_result = LLMGenerator.generate_code(
|
||||
tenant_id=account.current_tenant_id,
|
||||
instruction=args["instruction"],
|
||||
model_config=args["model_config"],
|
||||
code_language=args["code_language"],
|
||||
max_tokens=CODE_GENERATION_MAX_TOKENS,
|
||||
)
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
|
||||
@ -26,7 +26,7 @@ class AppMCPServerController(Resource):
|
||||
@get_app_model
|
||||
@marshal_with(app_server_fields)
|
||||
def get(self, app_model):
|
||||
server = db.session.query(AppMCPServer).filter(AppMCPServer.app_id == app_model.id).first()
|
||||
server = db.session.query(AppMCPServer).where(AppMCPServer.app_id == app_model.id).first()
|
||||
return server
|
||||
|
||||
@setup_required
|
||||
@ -73,7 +73,7 @@ class AppMCPServerController(Resource):
|
||||
parser.add_argument("parameters", type=dict, required=True, location="json")
|
||||
parser.add_argument("status", type=str, required=False, location="json")
|
||||
args = parser.parse_args()
|
||||
server = db.session.query(AppMCPServer).filter(AppMCPServer.id == args["id"]).first()
|
||||
server = db.session.query(AppMCPServer).where(AppMCPServer.id == args["id"]).first()
|
||||
if not server:
|
||||
raise NotFound()
|
||||
|
||||
@ -104,8 +104,8 @@ class AppMCPServerRefreshController(Resource):
|
||||
raise NotFound()
|
||||
server = (
|
||||
db.session.query(AppMCPServer)
|
||||
.filter(AppMCPServer.id == server_id)
|
||||
.filter(AppMCPServer.tenant_id == current_user.current_tenant_id)
|
||||
.where(AppMCPServer.id == server_id)
|
||||
.where(AppMCPServer.tenant_id == current_user.current_tenant_id)
|
||||
.first()
|
||||
)
|
||||
if not server:
|
||||
|
||||
@ -27,7 +27,7 @@ from fields.conversation_fields import annotation_fields, message_detail_fields
|
||||
from libs.helper import uuid_value
|
||||
from libs.infinite_scroll_pagination import InfiniteScrollPagination
|
||||
from libs.login import login_required
|
||||
from models.model import AppMode, Conversation, Message, MessageAnnotation, MessageFeedback
|
||||
from models.model import AppMode, Conversation, Message, MessageAnnotation
|
||||
from services.annotation_service import AppAnnotationService
|
||||
from services.errors.conversation import ConversationNotExistsError
|
||||
from services.errors.message import MessageNotExistsError, SuggestedQuestionsAfterAnswerDisabledError
|
||||
@ -55,7 +55,7 @@ class ChatMessageListApi(Resource):
|
||||
|
||||
conversation = (
|
||||
db.session.query(Conversation)
|
||||
.filter(Conversation.id == args["conversation_id"], Conversation.app_id == app_model.id)
|
||||
.where(Conversation.id == args["conversation_id"], Conversation.app_id == app_model.id)
|
||||
.first()
|
||||
)
|
||||
|
||||
@ -65,7 +65,7 @@ class ChatMessageListApi(Resource):
|
||||
if args["first_id"]:
|
||||
first_message = (
|
||||
db.session.query(Message)
|
||||
.filter(Message.conversation_id == conversation.id, Message.id == args["first_id"])
|
||||
.where(Message.conversation_id == conversation.id, Message.id == args["first_id"])
|
||||
.first()
|
||||
)
|
||||
|
||||
@ -74,7 +74,7 @@ class ChatMessageListApi(Resource):
|
||||
|
||||
history_messages = (
|
||||
db.session.query(Message)
|
||||
.filter(
|
||||
.where(
|
||||
Message.conversation_id == conversation.id,
|
||||
Message.created_at < first_message.created_at,
|
||||
Message.id != first_message.id,
|
||||
@ -86,7 +86,7 @@ class ChatMessageListApi(Resource):
|
||||
else:
|
||||
history_messages = (
|
||||
db.session.query(Message)
|
||||
.filter(Message.conversation_id == conversation.id)
|
||||
.where(Message.conversation_id == conversation.id)
|
||||
.order_by(Message.created_at.desc())
|
||||
.limit(args["limit"])
|
||||
.all()
|
||||
@ -97,7 +97,7 @@ class ChatMessageListApi(Resource):
|
||||
current_page_first_message = history_messages[-1]
|
||||
rest_count = (
|
||||
db.session.query(Message)
|
||||
.filter(
|
||||
.where(
|
||||
Message.conversation_id == conversation.id,
|
||||
Message.created_at < current_page_first_message.created_at,
|
||||
Message.id != current_page_first_message.id,
|
||||
@ -124,33 +124,16 @@ class MessageFeedbackApi(Resource):
|
||||
parser.add_argument("rating", type=str, choices=["like", "dislike", None], location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
message_id = str(args["message_id"])
|
||||
|
||||
message = db.session.query(Message).filter(Message.id == message_id, Message.app_id == app_model.id).first()
|
||||
|
||||
if not message:
|
||||
raise NotFound("Message Not Exists.")
|
||||
|
||||
feedback = message.admin_feedback
|
||||
|
||||
if not args["rating"] and feedback:
|
||||
db.session.delete(feedback)
|
||||
elif args["rating"] and feedback:
|
||||
feedback.rating = args["rating"]
|
||||
elif not args["rating"] and not feedback:
|
||||
raise ValueError("rating cannot be None when feedback not exists")
|
||||
else:
|
||||
feedback = MessageFeedback(
|
||||
app_id=app_model.id,
|
||||
conversation_id=message.conversation_id,
|
||||
message_id=message.id,
|
||||
rating=args["rating"],
|
||||
from_source="admin",
|
||||
from_account_id=current_user.id,
|
||||
try:
|
||||
MessageService.create_feedback(
|
||||
app_model=app_model,
|
||||
message_id=str(args["message_id"]),
|
||||
user=current_user,
|
||||
rating=args.get("rating"),
|
||||
content=None,
|
||||
)
|
||||
db.session.add(feedback)
|
||||
|
||||
db.session.commit()
|
||||
except MessageNotExistsError:
|
||||
raise NotFound("Message Not Exists.")
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
@ -183,7 +166,7 @@ class MessageAnnotationCountApi(Resource):
|
||||
@account_initialization_required
|
||||
@get_app_model
|
||||
def get(self, app_model):
|
||||
count = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_model.id).count()
|
||||
count = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_model.id).count()
|
||||
|
||||
return {"count": count}
|
||||
|
||||
@ -230,7 +213,7 @@ class MessageApi(Resource):
|
||||
def get(self, app_model, message_id):
|
||||
message_id = str(message_id)
|
||||
|
||||
message = db.session.query(Message).filter(Message.id == message_id, Message.app_id == app_model.id).first()
|
||||
message = db.session.query(Message).where(Message.id == message_id, Message.app_id == app_model.id).first()
|
||||
|
||||
if not message:
|
||||
raise NotFound("Message Not Exists.")
|
||||
|
||||
@ -42,7 +42,7 @@ class ModelConfigResource(Resource):
|
||||
if app_model.mode == AppMode.AGENT_CHAT.value or app_model.is_agent:
|
||||
# get original app model config
|
||||
original_app_model_config = (
|
||||
db.session.query(AppModelConfig).filter(AppModelConfig.id == app_model.app_model_config_id).first()
|
||||
db.session.query(AppModelConfig).where(AppModelConfig.id == app_model.app_model_config_id).first()
|
||||
)
|
||||
if original_app_model_config is None:
|
||||
raise ValueError("Original app model config not found")
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, marshal_with, reqparse
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
@ -10,6 +8,7 @@ from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from extensions.ext_database import db
|
||||
from fields.app_fields import app_site_fields
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.login import login_required
|
||||
from models import Site
|
||||
|
||||
@ -50,7 +49,7 @@ class AppSite(Resource):
|
||||
if not current_user.is_editor:
|
||||
raise Forbidden()
|
||||
|
||||
site = db.session.query(Site).filter(Site.app_id == app_model.id).first()
|
||||
site = db.session.query(Site).where(Site.app_id == app_model.id).first()
|
||||
if not site:
|
||||
raise NotFound
|
||||
|
||||
@ -77,7 +76,7 @@ class AppSite(Resource):
|
||||
setattr(site, attr_name, value)
|
||||
|
||||
site.updated_by = current_user.id
|
||||
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
site.updated_at = naive_utc_now()
|
||||
db.session.commit()
|
||||
|
||||
return site
|
||||
@ -94,14 +93,14 @@ class AppSiteAccessTokenReset(Resource):
|
||||
if not current_user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
site = db.session.query(Site).filter(Site.app_id == app_model.id).first()
|
||||
site = db.session.query(Site).where(Site.app_id == app_model.id).first()
|
||||
|
||||
if not site:
|
||||
raise NotFound
|
||||
|
||||
site.code = Site.generate_code(16)
|
||||
site.updated_by = current_user.id
|
||||
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
site.updated_at = naive_utc_now()
|
||||
db.session.commit()
|
||||
|
||||
return site
|
||||
|
||||
@ -67,7 +67,7 @@ WHERE
|
||||
response_data = []
|
||||
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(sql_query), arg_dict)
|
||||
rs = conn.execute(sa.text(sql_query), arg_dict)
|
||||
for i in rs:
|
||||
response_data.append({"date": str(i.date), "message_count": i.message_count})
|
||||
|
||||
@ -176,7 +176,7 @@ WHERE
|
||||
response_data = []
|
||||
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(sql_query), arg_dict)
|
||||
rs = conn.execute(sa.text(sql_query), arg_dict)
|
||||
for i in rs:
|
||||
response_data.append({"date": str(i.date), "terminal_count": i.terminal_count})
|
||||
|
||||
@ -234,7 +234,7 @@ WHERE
|
||||
response_data = []
|
||||
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(sql_query), arg_dict)
|
||||
rs = conn.execute(sa.text(sql_query), arg_dict)
|
||||
for i in rs:
|
||||
response_data.append(
|
||||
{"date": str(i.date), "token_count": i.token_count, "total_price": i.total_price, "currency": "USD"}
|
||||
@ -310,7 +310,7 @@ ORDER BY
|
||||
response_data = []
|
||||
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(sql_query), arg_dict)
|
||||
rs = conn.execute(sa.text(sql_query), arg_dict)
|
||||
for i in rs:
|
||||
response_data.append(
|
||||
{"date": str(i.date), "interactions": float(i.interactions.quantize(Decimal("0.01")))}
|
||||
@ -373,7 +373,7 @@ WHERE
|
||||
response_data = []
|
||||
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(sql_query), arg_dict)
|
||||
rs = conn.execute(sa.text(sql_query), arg_dict)
|
||||
for i in rs:
|
||||
response_data.append(
|
||||
{
|
||||
@ -435,7 +435,7 @@ WHERE
|
||||
response_data = []
|
||||
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(sql_query), arg_dict)
|
||||
rs = conn.execute(sa.text(sql_query), arg_dict)
|
||||
for i in rs:
|
||||
response_data.append({"date": str(i.date), "latency": round(i.latency * 1000, 4)})
|
||||
|
||||
@ -495,7 +495,7 @@ WHERE
|
||||
response_data = []
|
||||
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(sql_query), arg_dict)
|
||||
rs = conn.execute(sa.text(sql_query), arg_dict)
|
||||
for i in rs:
|
||||
response_data.append({"date": str(i.date), "tps": round(i.tokens_per_second, 4)})
|
||||
|
||||
|
||||
@ -2,6 +2,7 @@ from datetime import datetime
|
||||
from decimal import Decimal
|
||||
|
||||
import pytz
|
||||
import sqlalchemy as sa
|
||||
from flask import jsonify
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, reqparse
|
||||
@ -71,7 +72,7 @@ WHERE
|
||||
response_data = []
|
||||
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(sql_query), arg_dict)
|
||||
rs = conn.execute(sa.text(sql_query), arg_dict)
|
||||
for i in rs:
|
||||
response_data.append({"date": str(i.date), "runs": i.runs})
|
||||
|
||||
@ -133,7 +134,7 @@ WHERE
|
||||
response_data = []
|
||||
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(sql_query), arg_dict)
|
||||
rs = conn.execute(sa.text(sql_query), arg_dict)
|
||||
for i in rs:
|
||||
response_data.append({"date": str(i.date), "terminal_count": i.terminal_count})
|
||||
|
||||
@ -195,7 +196,7 @@ WHERE
|
||||
response_data = []
|
||||
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(sql_query), arg_dict)
|
||||
rs = conn.execute(sa.text(sql_query), arg_dict)
|
||||
for i in rs:
|
||||
response_data.append(
|
||||
{
|
||||
@ -277,7 +278,7 @@ GROUP BY
|
||||
response_data = []
|
||||
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(db.text(sql_query), arg_dict)
|
||||
rs = conn.execute(sa.text(sql_query), arg_dict)
|
||||
for i in rs:
|
||||
response_data.append(
|
||||
{"date": str(i.date), "interactions": float(i.interactions.quantize(Decimal("0.01")))}
|
||||
|
||||
@ -11,7 +11,7 @@ from models import App, AppMode
|
||||
def _load_app_model(app_id: str) -> Optional[App]:
|
||||
app_model = (
|
||||
db.session.query(App)
|
||||
.filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal")
|
||||
.where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal")
|
||||
.first()
|
||||
)
|
||||
return app_model
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
import datetime
|
||||
|
||||
from flask import request
|
||||
from flask_restful import Resource, reqparse
|
||||
|
||||
@ -7,6 +5,7 @@ from constants.languages import supported_language
|
||||
from controllers.console import api
|
||||
from controllers.console.error import AlreadyActivateError
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.helper import StrLen, email, extract_remote_ip, timezone
|
||||
from models.account import AccountStatus
|
||||
from services.account_service import AccountService, RegisterService
|
||||
@ -65,7 +64,7 @@ class ActivateApi(Resource):
|
||||
account.timezone = args["timezone"]
|
||||
account.interface_theme = "light"
|
||||
account.status = AccountStatus.ACTIVE.value
|
||||
account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||
account.initialized_at = naive_utc_now()
|
||||
db.session.commit()
|
||||
|
||||
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
|
||||
|
||||
@ -81,7 +81,7 @@ class OAuthDataSourceBinding(Resource):
|
||||
oauth_provider.get_access_token(code)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
logging.exception(
|
||||
f"An error occurred during the OAuthCallback process with {provider}: {e.response.text}"
|
||||
"An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text
|
||||
)
|
||||
return {"error": "OAuth data source process failed"}, 400
|
||||
|
||||
@ -103,7 +103,9 @@ class OAuthDataSourceSync(Resource):
|
||||
try:
|
||||
oauth_provider.sync_data_source(binding_id)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
logging.exception(f"An error occurred during the OAuthCallback process with {provider}: {e.response.text}")
|
||||
logging.exception(
|
||||
"An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text
|
||||
)
|
||||
return {"error": "OAuth data source process failed"}, 400
|
||||
|
||||
return {"result": "success"}, 200
|
||||
|
||||
@ -27,7 +27,19 @@ class InvalidTokenError(BaseHTTPException):
|
||||
|
||||
class PasswordResetRateLimitExceededError(BaseHTTPException):
|
||||
error_code = "password_reset_rate_limit_exceeded"
|
||||
description = "Too many password reset emails have been sent. Please try again in 1 minutes."
|
||||
description = "Too many password reset emails have been sent. Please try again in 1 minute."
|
||||
code = 429
|
||||
|
||||
|
||||
class EmailChangeRateLimitExceededError(BaseHTTPException):
|
||||
error_code = "email_change_rate_limit_exceeded"
|
||||
description = "Too many email change emails have been sent. Please try again in 1 minute."
|
||||
code = 429
|
||||
|
||||
|
||||
class OwnerTransferRateLimitExceededError(BaseHTTPException):
|
||||
error_code = "owner_transfer_rate_limit_exceeded"
|
||||
description = "Too many owner transfer emails have been sent. Please try again in 1 minute."
|
||||
code = 429
|
||||
|
||||
|
||||
@ -65,3 +77,39 @@ class EmailPasswordResetLimitError(BaseHTTPException):
|
||||
error_code = "email_password_reset_limit"
|
||||
description = "Too many failed password reset attempts. Please try again in 24 hours."
|
||||
code = 429
|
||||
|
||||
|
||||
class EmailChangeLimitError(BaseHTTPException):
|
||||
error_code = "email_change_limit"
|
||||
description = "Too many failed email change attempts. Please try again in 24 hours."
|
||||
code = 429
|
||||
|
||||
|
||||
class EmailAlreadyInUseError(BaseHTTPException):
|
||||
error_code = "email_already_in_use"
|
||||
description = "A user with this email already exists."
|
||||
code = 400
|
||||
|
||||
|
||||
class OwnerTransferLimitError(BaseHTTPException):
|
||||
error_code = "owner_transfer_limit"
|
||||
description = "Too many failed owner transfer attempts. Please try again in 24 hours."
|
||||
code = 429
|
||||
|
||||
|
||||
class NotOwnerError(BaseHTTPException):
|
||||
error_code = "not_owner"
|
||||
description = "You are not the owner of the workspace."
|
||||
code = 400
|
||||
|
||||
|
||||
class CannotTransferOwnerToSelfError(BaseHTTPException):
|
||||
error_code = "cannot_transfer_owner_to_self"
|
||||
description = "You cannot transfer ownership to yourself."
|
||||
code = 400
|
||||
|
||||
|
||||
class MemberNotInTenantError(BaseHTTPException):
|
||||
error_code = "member_not_in_tenant"
|
||||
description = "The member is not in the workspace."
|
||||
code = 400
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import logging
|
||||
from datetime import UTC, datetime
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
@ -13,6 +12,7 @@ from configs import dify_config
|
||||
from constants.languages import languages
|
||||
from events.tenant_event import tenant_was_created
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.helper import extract_remote_ip
|
||||
from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo
|
||||
from models import Account
|
||||
@ -80,7 +80,7 @@ class OAuthCallback(Resource):
|
||||
user_info = oauth_provider.get_user_info(token)
|
||||
except requests.exceptions.RequestException as e:
|
||||
error_text = e.response.text if e.response else str(e)
|
||||
logging.exception(f"An error occurred during the OAuth process with {provider}: {error_text}")
|
||||
logging.exception("An error occurred during the OAuth process with %s: %s", provider, error_text)
|
||||
return {"error": "OAuth process failed"}, 400
|
||||
|
||||
if invite_token and RegisterService.is_valid_invite_token(invite_token):
|
||||
@ -110,7 +110,7 @@ class OAuthCallback(Resource):
|
||||
|
||||
if account.status == AccountStatus.PENDING.value:
|
||||
account.status = AccountStatus.ACTIVE.value
|
||||
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
account.initialized_at = naive_utc_now()
|
||||
db.session.commit()
|
||||
|
||||
try:
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
import datetime
|
||||
import json
|
||||
|
||||
from flask import request
|
||||
@ -15,6 +14,7 @@ from core.rag.extractor.entity.extract_setting import ExtractSetting
|
||||
from core.rag.extractor.notion_extractor import NotionExtractor
|
||||
from extensions.ext_database import db
|
||||
from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.login import login_required
|
||||
from models import DataSourceOauthBinding, Document
|
||||
from services.dataset_service import DatasetService, DocumentService
|
||||
@ -30,7 +30,7 @@ class DataSourceApi(Resource):
|
||||
# get workspace data source integrates
|
||||
data_source_integrates = (
|
||||
db.session.query(DataSourceOauthBinding)
|
||||
.filter(
|
||||
.where(
|
||||
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
|
||||
DataSourceOauthBinding.disabled == False,
|
||||
)
|
||||
@ -88,7 +88,7 @@ class DataSourceApi(Resource):
|
||||
if action == "enable":
|
||||
if data_source_binding.disabled:
|
||||
data_source_binding.disabled = False
|
||||
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||
data_source_binding.updated_at = naive_utc_now()
|
||||
db.session.add(data_source_binding)
|
||||
db.session.commit()
|
||||
else:
|
||||
@ -97,7 +97,7 @@ class DataSourceApi(Resource):
|
||||
if action == "disable":
|
||||
if not data_source_binding.disabled:
|
||||
data_source_binding.disabled = True
|
||||
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||
data_source_binding.updated_at = naive_utc_now()
|
||||
db.session.add(data_source_binding)
|
||||
db.session.commit()
|
||||
else:
|
||||
@ -171,7 +171,7 @@ class DataSourceNotionApi(Resource):
|
||||
page_id = str(page_id)
|
||||
with Session(db.engine) as session:
|
||||
data_source_binding = session.execute(
|
||||
select(DataSourceOauthBinding).filter(
|
||||
select(DataSourceOauthBinding).where(
|
||||
db.and_(
|
||||
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
|
||||
DataSourceOauthBinding.provider == "notion",
|
||||
|
||||
@ -41,7 +41,7 @@ def _validate_name(name):
|
||||
|
||||
|
||||
def _validate_description_length(description):
|
||||
if len(description) > 400:
|
||||
if description and len(description) > 400:
|
||||
raise ValueError("Description cannot exceed 400 characters.")
|
||||
return description
|
||||
|
||||
@ -113,7 +113,7 @@ class DatasetListApi(Resource):
|
||||
)
|
||||
parser.add_argument(
|
||||
"description",
|
||||
type=str,
|
||||
type=_validate_description_length,
|
||||
nullable=True,
|
||||
required=False,
|
||||
default="",
|
||||
@ -211,10 +211,6 @@ class DatasetApi(Resource):
|
||||
else:
|
||||
data["embedding_available"] = True
|
||||
|
||||
if data.get("permission") == "partial_members":
|
||||
part_users_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
|
||||
data.update({"partial_member_list": part_users_list})
|
||||
|
||||
return data, 200
|
||||
|
||||
@setup_required
|
||||
@ -416,7 +412,7 @@ class DatasetIndexingEstimateApi(Resource):
|
||||
file_ids = args["info_list"]["file_info_list"]["file_ids"]
|
||||
file_details = (
|
||||
db.session.query(UploadFile)
|
||||
.filter(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id.in_(file_ids))
|
||||
.where(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id.in_(file_ids))
|
||||
.all()
|
||||
)
|
||||
|
||||
@ -521,14 +517,14 @@ class DatasetIndexingStatusApi(Resource):
|
||||
dataset_id = str(dataset_id)
|
||||
documents = (
|
||||
db.session.query(Document)
|
||||
.filter(Document.dataset_id == dataset_id, Document.tenant_id == current_user.current_tenant_id)
|
||||
.where(Document.dataset_id == dataset_id, Document.tenant_id == current_user.current_tenant_id)
|
||||
.all()
|
||||
)
|
||||
documents_status = []
|
||||
for document in documents:
|
||||
completed_segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(
|
||||
.where(
|
||||
DocumentSegment.completed_at.isnot(None),
|
||||
DocumentSegment.document_id == str(document.id),
|
||||
DocumentSegment.status != "re_segment",
|
||||
@ -537,7 +533,7 @@ class DatasetIndexingStatusApi(Resource):
|
||||
)
|
||||
total_segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
||||
.where(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
||||
.count()
|
||||
)
|
||||
# Create a dictionary with document attributes and additional fields
|
||||
@ -572,7 +568,7 @@ class DatasetApiKeyApi(Resource):
|
||||
def get(self):
|
||||
keys = (
|
||||
db.session.query(ApiToken)
|
||||
.filter(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id)
|
||||
.where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id)
|
||||
.all()
|
||||
)
|
||||
return {"items": keys}
|
||||
@ -588,7 +584,7 @@ class DatasetApiKeyApi(Resource):
|
||||
|
||||
current_key_count = (
|
||||
db.session.query(ApiToken)
|
||||
.filter(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id)
|
||||
.where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id)
|
||||
.count()
|
||||
)
|
||||
|
||||
@ -624,7 +620,7 @@ class DatasetApiDeleteApi(Resource):
|
||||
|
||||
key = (
|
||||
db.session.query(ApiToken)
|
||||
.filter(
|
||||
.where(
|
||||
ApiToken.tenant_id == current_user.current_tenant_id,
|
||||
ApiToken.type == self.resource_type,
|
||||
ApiToken.id == api_key_id,
|
||||
@ -635,7 +631,7 @@ class DatasetApiDeleteApi(Resource):
|
||||
if key is None:
|
||||
flask_restful.abort(404, message="API key not found")
|
||||
|
||||
db.session.query(ApiToken).filter(ApiToken.id == api_key_id).delete()
|
||||
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
|
||||
db.session.commit()
|
||||
|
||||
return {"result": "success"}, 204
|
||||
@ -687,6 +683,7 @@ class DatasetRetrievalSettingApi(Resource):
|
||||
| VectorType.HUAWEI_CLOUD
|
||||
| VectorType.TENCENT
|
||||
| VectorType.MATRIXONE
|
||||
| VectorType.CLICKZETTA
|
||||
):
|
||||
return {
|
||||
"retrieval_method": [
|
||||
@ -735,6 +732,7 @@ class DatasetRetrievalSettingMockApi(Resource):
|
||||
| VectorType.TENCENT
|
||||
| VectorType.HUAWEI_CLOUD
|
||||
| VectorType.MATRIXONE
|
||||
| VectorType.CLICKZETTA
|
||||
):
|
||||
return {
|
||||
"retrieval_method": [
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
import logging
|
||||
from argparse import ArgumentTypeError
|
||||
from datetime import UTC, datetime
|
||||
from typing import cast
|
||||
|
||||
from flask import request
|
||||
@ -49,6 +48,7 @@ from fields.document_fields import (
|
||||
document_status_fields,
|
||||
document_with_segments_fields,
|
||||
)
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.login import login_required
|
||||
from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile
|
||||
from services.dataset_service import DatasetService, DocumentService
|
||||
@ -124,7 +124,7 @@ class GetProcessRuleApi(Resource):
|
||||
# get the latest process rule
|
||||
dataset_process_rule = (
|
||||
db.session.query(DatasetProcessRule)
|
||||
.filter(DatasetProcessRule.dataset_id == document.dataset_id)
|
||||
.where(DatasetProcessRule.dataset_id == document.dataset_id)
|
||||
.order_by(DatasetProcessRule.created_at.desc())
|
||||
.limit(1)
|
||||
.one_or_none()
|
||||
@ -176,7 +176,7 @@ class DatasetDocumentListApi(Resource):
|
||||
|
||||
if search:
|
||||
search = f"%{search}%"
|
||||
query = query.filter(Document.name.like(search))
|
||||
query = query.where(Document.name.like(search))
|
||||
|
||||
if sort.startswith("-"):
|
||||
sort_logic = desc
|
||||
@ -212,7 +212,7 @@ class DatasetDocumentListApi(Resource):
|
||||
for document in documents:
|
||||
completed_segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(
|
||||
.where(
|
||||
DocumentSegment.completed_at.isnot(None),
|
||||
DocumentSegment.document_id == str(document.id),
|
||||
DocumentSegment.status != "re_segment",
|
||||
@ -221,7 +221,7 @@ class DatasetDocumentListApi(Resource):
|
||||
)
|
||||
total_segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
||||
.where(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
||||
.count()
|
||||
)
|
||||
document.completed_segments = completed_segments
|
||||
@ -417,7 +417,7 @@ class DocumentIndexingEstimateApi(DocumentResource):
|
||||
|
||||
file = (
|
||||
db.session.query(UploadFile)
|
||||
.filter(UploadFile.tenant_id == document.tenant_id, UploadFile.id == file_id)
|
||||
.where(UploadFile.tenant_id == document.tenant_id, UploadFile.id == file_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
@ -492,7 +492,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
|
||||
file_id = data_source_info["upload_file_id"]
|
||||
file_detail = (
|
||||
db.session.query(UploadFile)
|
||||
.filter(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id == file_id)
|
||||
.where(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id == file_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
@ -568,7 +568,7 @@ class DocumentBatchIndexingStatusApi(DocumentResource):
|
||||
for document in documents:
|
||||
completed_segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(
|
||||
.where(
|
||||
DocumentSegment.completed_at.isnot(None),
|
||||
DocumentSegment.document_id == str(document.id),
|
||||
DocumentSegment.status != "re_segment",
|
||||
@ -577,7 +577,7 @@ class DocumentBatchIndexingStatusApi(DocumentResource):
|
||||
)
|
||||
total_segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
||||
.where(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
||||
.count()
|
||||
)
|
||||
# Create a dictionary with document attributes and additional fields
|
||||
@ -611,7 +611,7 @@ class DocumentIndexingStatusApi(DocumentResource):
|
||||
|
||||
completed_segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(
|
||||
.where(
|
||||
DocumentSegment.completed_at.isnot(None),
|
||||
DocumentSegment.document_id == str(document_id),
|
||||
DocumentSegment.status != "re_segment",
|
||||
@ -620,7 +620,7 @@ class DocumentIndexingStatusApi(DocumentResource):
|
||||
)
|
||||
total_segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(DocumentSegment.document_id == str(document_id), DocumentSegment.status != "re_segment")
|
||||
.where(DocumentSegment.document_id == str(document_id), DocumentSegment.status != "re_segment")
|
||||
.count()
|
||||
)
|
||||
|
||||
@ -642,7 +642,7 @@ class DocumentIndexingStatusApi(DocumentResource):
|
||||
return marshal(document_dict, document_status_fields)
|
||||
|
||||
|
||||
class DocumentDetailApi(DocumentResource):
|
||||
class DocumentApi(DocumentResource):
|
||||
METADATA_CHOICES = {"all", "only", "without"}
|
||||
|
||||
@setup_required
|
||||
@ -730,45 +730,6 @@ class DocumentDetailApi(DocumentResource):
|
||||
|
||||
return response, 200
|
||||
|
||||
|
||||
class DocumentProcessingApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@cloud_edition_billing_rate_limit_check("knowledge")
|
||||
def patch(self, dataset_id, document_id, action):
|
||||
dataset_id = str(dataset_id)
|
||||
document_id = str(document_id)
|
||||
document = self.get_document(dataset_id, document_id)
|
||||
|
||||
# The role of the current user in the ta table must be admin, owner, dataset_operator, or editor
|
||||
if not current_user.is_dataset_editor:
|
||||
raise Forbidden()
|
||||
|
||||
if action == "pause":
|
||||
if document.indexing_status != "indexing":
|
||||
raise InvalidActionError("Document not in indexing state.")
|
||||
|
||||
document.paused_by = current_user.id
|
||||
document.paused_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
document.is_paused = True
|
||||
db.session.commit()
|
||||
|
||||
elif action == "resume":
|
||||
if document.indexing_status not in {"paused", "error"}:
|
||||
raise InvalidActionError("Document not in paused or error state.")
|
||||
|
||||
document.paused_by = None
|
||||
document.paused_at = None
|
||||
document.is_paused = False
|
||||
db.session.commit()
|
||||
else:
|
||||
raise InvalidActionError()
|
||||
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
class DocumentDeleteApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@ -792,6 +753,43 @@ class DocumentDeleteApi(DocumentResource):
|
||||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
class DocumentProcessingApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@cloud_edition_billing_rate_limit_check("knowledge")
|
||||
def patch(self, dataset_id, document_id, action):
|
||||
dataset_id = str(dataset_id)
|
||||
document_id = str(document_id)
|
||||
document = self.get_document(dataset_id, document_id)
|
||||
|
||||
# The role of the current user in the ta table must be admin, owner, dataset_operator, or editor
|
||||
if not current_user.is_dataset_editor:
|
||||
raise Forbidden()
|
||||
|
||||
if action == "pause":
|
||||
if document.indexing_status != "indexing":
|
||||
raise InvalidActionError("Document not in indexing state.")
|
||||
|
||||
document.paused_by = current_user.id
|
||||
document.paused_at = naive_utc_now()
|
||||
document.is_paused = True
|
||||
db.session.commit()
|
||||
|
||||
elif action == "resume":
|
||||
if document.indexing_status not in {"paused", "error"}:
|
||||
raise InvalidActionError("Document not in paused or error state.")
|
||||
|
||||
document.paused_by = None
|
||||
document.paused_at = None
|
||||
document.is_paused = False
|
||||
db.session.commit()
|
||||
else:
|
||||
raise InvalidActionError()
|
||||
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
class DocumentMetadataApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@ -830,7 +828,7 @@ class DocumentMetadataApi(DocumentResource):
|
||||
document.doc_metadata[key] = value
|
||||
|
||||
document.doc_type = doc_type
|
||||
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
document.updated_at = naive_utc_now()
|
||||
db.session.commit()
|
||||
|
||||
return {"result": "success", "message": "Document metadata updated."}, 200
|
||||
@ -970,7 +968,7 @@ class DocumentRetryApi(DocumentResource):
|
||||
raise DocumentAlreadyFinishedError()
|
||||
retry_documents.append(document)
|
||||
except Exception:
|
||||
logging.exception(f"Failed to retry document, document id: {document_id}")
|
||||
logging.exception("Failed to retry document, document id: %s", document_id)
|
||||
continue
|
||||
# retry document
|
||||
DocumentService.retry_document(dataset_id, retry_documents)
|
||||
@ -1037,11 +1035,10 @@ api.add_resource(
|
||||
api.add_resource(DocumentBatchIndexingEstimateApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-estimate")
|
||||
api.add_resource(DocumentBatchIndexingStatusApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-status")
|
||||
api.add_resource(DocumentIndexingStatusApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-status")
|
||||
api.add_resource(DocumentDetailApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
|
||||
api.add_resource(DocumentApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
|
||||
api.add_resource(
|
||||
DocumentProcessingApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/<string:action>"
|
||||
)
|
||||
api.add_resource(DocumentDeleteApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
|
||||
api.add_resource(DocumentMetadataApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/metadata")
|
||||
api.add_resource(DocumentStatusApi, "/datasets/<uuid:dataset_id>/documents/status/<string:action>/batch")
|
||||
api.add_resource(DocumentPauseApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/pause")
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
import uuid
|
||||
|
||||
import pandas as pd
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, marshal, reqparse
|
||||
@ -14,8 +13,6 @@ from controllers.console.datasets.error import (
|
||||
ChildChunkDeleteIndexError,
|
||||
ChildChunkIndexingError,
|
||||
InvalidActionError,
|
||||
NoFileUploadedError,
|
||||
TooManyFilesError,
|
||||
)
|
||||
from controllers.console.wraps import (
|
||||
account_initialization_required,
|
||||
@ -32,6 +29,7 @@ from extensions.ext_redis import redis_client
|
||||
from fields.segment_fields import child_chunk_fields, segment_fields
|
||||
from libs.login import login_required
|
||||
from models.dataset import ChildChunk, DocumentSegment
|
||||
from models.model import UploadFile
|
||||
from services.dataset_service import DatasetService, DocumentService, SegmentService
|
||||
from services.entities.knowledge_entities.knowledge_entities import ChildChunkUpdateArgs, SegmentUpdateArgs
|
||||
from services.errors.chunk import ChildChunkDeleteIndexError as ChildChunkDeleteIndexServiceError
|
||||
@ -78,7 +76,7 @@ class DatasetDocumentSegmentListApi(Resource):
|
||||
|
||||
query = (
|
||||
select(DocumentSegment)
|
||||
.filter(
|
||||
.where(
|
||||
DocumentSegment.document_id == str(document_id),
|
||||
DocumentSegment.tenant_id == current_user.current_tenant_id,
|
||||
)
|
||||
@ -86,19 +84,19 @@ class DatasetDocumentSegmentListApi(Resource):
|
||||
)
|
||||
|
||||
if status_list:
|
||||
query = query.filter(DocumentSegment.status.in_(status_list))
|
||||
query = query.where(DocumentSegment.status.in_(status_list))
|
||||
|
||||
if hit_count_gte is not None:
|
||||
query = query.filter(DocumentSegment.hit_count >= hit_count_gte)
|
||||
query = query.where(DocumentSegment.hit_count >= hit_count_gte)
|
||||
|
||||
if keyword:
|
||||
query = query.where(DocumentSegment.content.ilike(f"%{keyword}%"))
|
||||
|
||||
if args["enabled"].lower() != "all":
|
||||
if args["enabled"].lower() == "true":
|
||||
query = query.filter(DocumentSegment.enabled == True)
|
||||
query = query.where(DocumentSegment.enabled == True)
|
||||
elif args["enabled"].lower() == "false":
|
||||
query = query.filter(DocumentSegment.enabled == False)
|
||||
query = query.where(DocumentSegment.enabled == False)
|
||||
|
||||
segments = db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False)
|
||||
|
||||
@ -184,7 +182,7 @@ class DatasetDocumentSegmentApi(Resource):
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
segment_ids = request.args.getlist("segment_id")
|
||||
|
||||
document_indexing_cache_key = "document_{}_indexing".format(document.id)
|
||||
document_indexing_cache_key = f"document_{document.id}_indexing"
|
||||
cache_result = redis_client.get(document_indexing_cache_key)
|
||||
if cache_result is not None:
|
||||
raise InvalidActionError("Document is being indexed, please try again later")
|
||||
@ -285,7 +283,7 @@ class DatasetDocumentSegmentUpdateApi(Resource):
|
||||
segment_id = str(segment_id)
|
||||
segment = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.first()
|
||||
)
|
||||
if not segment:
|
||||
@ -331,7 +329,7 @@ class DatasetDocumentSegmentUpdateApi(Resource):
|
||||
segment_id = str(segment_id)
|
||||
segment = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.first()
|
||||
)
|
||||
if not segment:
|
||||
@ -365,37 +363,28 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
|
||||
document = DocumentService.get_document(dataset_id, document_id)
|
||||
if not document:
|
||||
raise NotFound("Document not found.")
|
||||
# get file from request
|
||||
file = request.files["file"]
|
||||
# check file
|
||||
if "file" not in request.files:
|
||||
raise NoFileUploadedError()
|
||||
|
||||
if len(request.files) > 1:
|
||||
raise TooManyFilesError()
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("upload_file_id", type=str, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
upload_file_id = args["upload_file_id"]
|
||||
|
||||
upload_file = db.session.query(UploadFile).where(UploadFile.id == upload_file_id).first()
|
||||
if not upload_file:
|
||||
raise NotFound("UploadFile not found.")
|
||||
|
||||
# check file type
|
||||
if not file.filename or not file.filename.lower().endswith(".csv"):
|
||||
if not upload_file.name or not upload_file.name.lower().endswith(".csv"):
|
||||
raise ValueError("Invalid file type. Only CSV files are allowed")
|
||||
|
||||
try:
|
||||
# Skip the first row
|
||||
df = pd.read_csv(file)
|
||||
result = []
|
||||
for index, row in df.iterrows():
|
||||
if document.doc_form == "qa_model":
|
||||
data = {"content": row.iloc[0], "answer": row.iloc[1]}
|
||||
else:
|
||||
data = {"content": row.iloc[0]}
|
||||
result.append(data)
|
||||
if len(result) == 0:
|
||||
raise ValueError("The CSV file is empty.")
|
||||
# async job
|
||||
job_id = str(uuid.uuid4())
|
||||
indexing_cache_key = "segment_batch_import_{}".format(str(job_id))
|
||||
indexing_cache_key = f"segment_batch_import_{str(job_id)}"
|
||||
# send batch add segments task
|
||||
redis_client.setnx(indexing_cache_key, "waiting")
|
||||
batch_create_segment_to_index_task.delay(
|
||||
str(job_id), result, dataset_id, document_id, current_user.current_tenant_id, current_user.id
|
||||
str(job_id), upload_file_id, dataset_id, document_id, current_user.current_tenant_id, current_user.id
|
||||
)
|
||||
except Exception as e:
|
||||
return {"error": str(e)}, 500
|
||||
@ -406,7 +395,7 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
|
||||
@account_initialization_required
|
||||
def get(self, job_id):
|
||||
job_id = str(job_id)
|
||||
indexing_cache_key = "segment_batch_import_{}".format(job_id)
|
||||
indexing_cache_key = f"segment_batch_import_{job_id}"
|
||||
cache_result = redis_client.get(indexing_cache_key)
|
||||
if cache_result is None:
|
||||
raise ValueError("The job does not exist.")
|
||||
@ -436,7 +425,7 @@ class ChildChunkAddApi(Resource):
|
||||
segment_id = str(segment_id)
|
||||
segment = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.first()
|
||||
)
|
||||
if not segment:
|
||||
@ -493,7 +482,7 @@ class ChildChunkAddApi(Resource):
|
||||
segment_id = str(segment_id)
|
||||
segment = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.first()
|
||||
)
|
||||
if not segment:
|
||||
@ -540,7 +529,7 @@ class ChildChunkAddApi(Resource):
|
||||
segment_id = str(segment_id)
|
||||
segment = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.first()
|
||||
)
|
||||
if not segment:
|
||||
@ -586,7 +575,7 @@ class ChildChunkUpdateApi(Resource):
|
||||
segment_id = str(segment_id)
|
||||
segment = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.first()
|
||||
)
|
||||
if not segment:
|
||||
@ -595,7 +584,7 @@ class ChildChunkUpdateApi(Resource):
|
||||
child_chunk_id = str(child_chunk_id)
|
||||
child_chunk = (
|
||||
db.session.query(ChildChunk)
|
||||
.filter(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id)
|
||||
.where(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id)
|
||||
.first()
|
||||
)
|
||||
if not child_chunk:
|
||||
@ -635,7 +624,7 @@ class ChildChunkUpdateApi(Resource):
|
||||
segment_id = str(segment_id)
|
||||
segment = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
|
||||
.first()
|
||||
)
|
||||
if not segment:
|
||||
@ -644,7 +633,7 @@ class ChildChunkUpdateApi(Resource):
|
||||
child_chunk_id = str(child_chunk_id)
|
||||
child_chunk = (
|
||||
db.session.query(ChildChunk)
|
||||
.filter(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id)
|
||||
.where(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id)
|
||||
.first()
|
||||
)
|
||||
if not child_chunk:
|
||||
|
||||
@ -25,12 +25,6 @@ class UnsupportedFileTypeError(BaseHTTPException):
|
||||
code = 415
|
||||
|
||||
|
||||
class HighQualityDatasetOnlyError(BaseHTTPException):
|
||||
error_code = "high_quality_dataset_only"
|
||||
description = "Current operation only supports 'high-quality' datasets."
|
||||
code = 400
|
||||
|
||||
|
||||
class DatasetNotInitializedError(BaseHTTPException):
|
||||
error_code = "dataset_not_initialized"
|
||||
description = "The dataset is still being initialized or indexing. Please wait a moment."
|
||||
|
||||
@ -22,8 +22,8 @@ class DatasetMetadataCreateApi(Resource):
|
||||
@marshal_with(dataset_metadata_fields)
|
||||
def post(self, dataset_id):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("type", type=str, required=True, nullable=True, location="json")
|
||||
parser.add_argument("name", type=str, required=True, nullable=True, location="json")
|
||||
parser.add_argument("type", type=str, required=True, nullable=False, location="json")
|
||||
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
metadata_args = MetadataArgs(**args)
|
||||
|
||||
@ -56,7 +56,7 @@ class DatasetMetadataApi(Resource):
|
||||
@marshal_with(dataset_metadata_fields)
|
||||
def patch(self, dataset_id, metadata_id):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("name", type=str, required=True, nullable=True, location="json")
|
||||
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
dataset_id_str = str(dataset_id)
|
||||
@ -127,7 +127,7 @@ class DocumentMetadataEditApi(Resource):
|
||||
DatasetService.check_dataset_permission(dataset, current_user)
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("operation_data", type=list, required=True, nullable=True, location="json")
|
||||
parser.add_argument("operation_data", type=list, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
metadata_args = MetadataOperationData(**args)
|
||||
|
||||
|
||||
62
api/controllers/console/datasets/upload_file.py
Normal file
62
api/controllers/console/datasets/upload_file.py
Normal file
@ -0,0 +1,62 @@
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from controllers.console import api
|
||||
from controllers.console.wraps import (
|
||||
account_initialization_required,
|
||||
setup_required,
|
||||
)
|
||||
from core.file import helpers as file_helpers
|
||||
from extensions.ext_database import db
|
||||
from models.dataset import Dataset
|
||||
from models.model import UploadFile
|
||||
from services.dataset_service import DocumentService
|
||||
|
||||
|
||||
class UploadFileApi(Resource):
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
def get(self, dataset_id, document_id):
|
||||
"""Get upload file."""
|
||||
# check dataset
|
||||
dataset_id = str(dataset_id)
|
||||
dataset = (
|
||||
db.session.query(Dataset)
|
||||
.filter(Dataset.tenant_id == current_user.current_tenant_id, Dataset.id == dataset_id)
|
||||
.first()
|
||||
)
|
||||
if not dataset:
|
||||
raise NotFound("Dataset not found.")
|
||||
# check document
|
||||
document_id = str(document_id)
|
||||
document = DocumentService.get_document(dataset.id, document_id)
|
||||
if not document:
|
||||
raise NotFound("Document not found.")
|
||||
# check upload file
|
||||
if document.data_source_type != "upload_file":
|
||||
raise ValueError(f"Document data source type ({document.data_source_type}) is not upload_file.")
|
||||
data_source_info = document.data_source_info_dict
|
||||
if data_source_info and "upload_file_id" in data_source_info:
|
||||
file_id = data_source_info["upload_file_id"]
|
||||
upload_file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first()
|
||||
if not upload_file:
|
||||
raise NotFound("UploadFile not found.")
|
||||
else:
|
||||
raise ValueError("Upload file id not found in document data source info.")
|
||||
|
||||
url = file_helpers.get_signed_file_url(upload_file_id=upload_file.id)
|
||||
return {
|
||||
"id": upload_file.id,
|
||||
"name": upload_file.name,
|
||||
"size": upload_file.size,
|
||||
"extension": upload_file.extension,
|
||||
"url": url,
|
||||
"download_url": f"{url}&as_attachment=true",
|
||||
"mime_type": upload_file.mime_type,
|
||||
"created_by": upload_file.created_by,
|
||||
"created_at": upload_file.created_at.timestamp(),
|
||||
}, 200
|
||||
|
||||
|
||||
api.add_resource(UploadFileApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/upload-file")
|
||||
@ -4,7 +4,7 @@ from controllers.console import api
|
||||
from controllers.console.datasets.error import WebsiteCrawlError
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from libs.login import login_required
|
||||
from services.website_service import WebsiteService
|
||||
from services.website_service import WebsiteCrawlApiRequest, WebsiteCrawlStatusApiRequest, WebsiteService
|
||||
|
||||
|
||||
class WebsiteCrawlApi(Resource):
|
||||
@ -24,10 +24,16 @@ class WebsiteCrawlApi(Resource):
|
||||
parser.add_argument("url", type=str, required=True, nullable=True, location="json")
|
||||
parser.add_argument("options", type=dict, required=True, nullable=True, location="json")
|
||||
args = parser.parse_args()
|
||||
WebsiteService.document_create_args_validate(args)
|
||||
# crawl url
|
||||
|
||||
# Create typed request and validate
|
||||
try:
|
||||
result = WebsiteService.crawl_url(args)
|
||||
api_request = WebsiteCrawlApiRequest.from_args(args)
|
||||
except ValueError as e:
|
||||
raise WebsiteCrawlError(str(e))
|
||||
|
||||
# Crawl URL using typed request
|
||||
try:
|
||||
result = WebsiteService.crawl_url(api_request)
|
||||
except Exception as e:
|
||||
raise WebsiteCrawlError(str(e))
|
||||
return result, 200
|
||||
@ -43,9 +49,16 @@ class WebsiteCrawlStatusApi(Resource):
|
||||
"provider", type=str, choices=["firecrawl", "watercrawl", "jinareader"], required=True, location="args"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
# get crawl status
|
||||
|
||||
# Create typed request and validate
|
||||
try:
|
||||
result = WebsiteService.get_crawl_status(job_id, args["provider"])
|
||||
api_request = WebsiteCrawlStatusApiRequest.from_args(args, job_id)
|
||||
except ValueError as e:
|
||||
raise WebsiteCrawlError(str(e))
|
||||
|
||||
# Get crawl status using typed request
|
||||
try:
|
||||
result = WebsiteService.get_crawl_status_typed(api_request)
|
||||
except Exception as e:
|
||||
raise WebsiteCrawlError(str(e))
|
||||
return result, 200
|
||||
|
||||
@ -127,7 +127,7 @@ class EducationActivateLimitError(BaseHTTPException):
|
||||
code = 429
|
||||
|
||||
|
||||
class CompilanceRateLimitError(BaseHTTPException):
|
||||
error_code = "compilance_rate_limit"
|
||||
class ComplianceRateLimitError(BaseHTTPException):
|
||||
error_code = "compliance_rate_limit"
|
||||
description = "Rate limit exceeded for downloading compliance report."
|
||||
code = 429
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import logging
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from flask_login import current_user
|
||||
from flask_restful import reqparse
|
||||
@ -27,6 +26,7 @@ from core.errors.error import (
|
||||
from core.model_runtime.errors.invoke import InvokeError
|
||||
from extensions.ext_database import db
|
||||
from libs import helper
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.helper import uuid_value
|
||||
from models.model import AppMode
|
||||
from services.app_generate_service import AppGenerateService
|
||||
@ -51,7 +51,7 @@ class CompletionApi(InstalledAppResource):
|
||||
streaming = args["response_mode"] == "streaming"
|
||||
args["auto_generate_name"] = False
|
||||
|
||||
installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
installed_app.last_used_at = naive_utc_now()
|
||||
db.session.commit()
|
||||
|
||||
try:
|
||||
@ -111,7 +111,7 @@ class ChatApi(InstalledAppResource):
|
||||
|
||||
args["auto_generate_name"] = False
|
||||
|
||||
installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
installed_app.last_used_at = naive_utc_now()
|
||||
db.session.commit()
|
||||
|
||||
try:
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import logging
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
from flask import request
|
||||
@ -13,6 +12,7 @@ from controllers.console.explore.wraps import InstalledAppResource
|
||||
from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check
|
||||
from extensions.ext_database import db
|
||||
from fields.installed_app_fields import installed_app_list_fields
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.login import login_required
|
||||
from models import App, InstalledApp, RecommendedApp
|
||||
from services.account_service import TenantService
|
||||
@ -34,11 +34,11 @@ class InstalledAppsListApi(Resource):
|
||||
if app_id:
|
||||
installed_apps = (
|
||||
db.session.query(InstalledApp)
|
||||
.filter(and_(InstalledApp.tenant_id == current_tenant_id, InstalledApp.app_id == app_id))
|
||||
.where(and_(InstalledApp.tenant_id == current_tenant_id, InstalledApp.app_id == app_id))
|
||||
.all()
|
||||
)
|
||||
else:
|
||||
installed_apps = db.session.query(InstalledApp).filter(InstalledApp.tenant_id == current_tenant_id).all()
|
||||
installed_apps = db.session.query(InstalledApp).where(InstalledApp.tenant_id == current_tenant_id).all()
|
||||
|
||||
current_user.role = TenantService.get_user_role(current_user, current_user.current_tenant)
|
||||
installed_app_list: list[dict[str, Any]] = [
|
||||
@ -58,23 +58,40 @@ class InstalledAppsListApi(Resource):
|
||||
# filter out apps that user doesn't have access to
|
||||
if FeatureService.get_system_features().webapp_auth.enabled:
|
||||
user_id = current_user.id
|
||||
res = []
|
||||
app_ids = [installed_app["app"].id for installed_app in installed_app_list]
|
||||
webapp_settings = EnterpriseService.WebAppAuth.batch_get_app_access_mode_by_id(app_ids)
|
||||
|
||||
# Pre-filter out apps without setting or with sso_verified
|
||||
filtered_installed_apps = []
|
||||
app_id_to_app_code = {}
|
||||
|
||||
for installed_app in installed_app_list:
|
||||
webapp_setting = webapp_settings.get(installed_app["app"].id)
|
||||
if not webapp_setting:
|
||||
app_id = installed_app["app"].id
|
||||
webapp_setting = webapp_settings.get(app_id)
|
||||
if not webapp_setting or webapp_setting.access_mode == "sso_verified":
|
||||
continue
|
||||
if webapp_setting.access_mode == "sso_verified":
|
||||
continue
|
||||
app_code = AppService.get_app_code_by_id(str(installed_app["app"].id))
|
||||
if EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp(
|
||||
user_id=user_id,
|
||||
app_code=app_code,
|
||||
):
|
||||
app_code = AppService.get_app_code_by_id(str(app_id))
|
||||
app_id_to_app_code[app_id] = app_code
|
||||
filtered_installed_apps.append(installed_app)
|
||||
|
||||
app_codes = list(app_id_to_app_code.values())
|
||||
|
||||
# Batch permission check
|
||||
permissions = EnterpriseService.WebAppAuth.batch_is_user_allowed_to_access_webapps(
|
||||
user_id=user_id,
|
||||
app_codes=app_codes,
|
||||
)
|
||||
|
||||
# Keep only allowed apps
|
||||
res = []
|
||||
for installed_app in filtered_installed_apps:
|
||||
app_id = installed_app["app"].id
|
||||
app_code = app_id_to_app_code[app_id]
|
||||
if permissions.get(app_code):
|
||||
res.append(installed_app)
|
||||
|
||||
installed_app_list = res
|
||||
logger.debug(f"installed_app_list: {installed_app_list}, user_id: {user_id}")
|
||||
logger.debug("installed_app_list: %s, user_id: %s", installed_app_list, user_id)
|
||||
|
||||
installed_app_list.sort(
|
||||
key=lambda app: (
|
||||
@ -94,12 +111,12 @@ class InstalledAppsListApi(Resource):
|
||||
parser.add_argument("app_id", type=str, required=True, help="Invalid app_id")
|
||||
args = parser.parse_args()
|
||||
|
||||
recommended_app = db.session.query(RecommendedApp).filter(RecommendedApp.app_id == args["app_id"]).first()
|
||||
recommended_app = db.session.query(RecommendedApp).where(RecommendedApp.app_id == args["app_id"]).first()
|
||||
if recommended_app is None:
|
||||
raise NotFound("App not found")
|
||||
|
||||
current_tenant_id = current_user.current_tenant_id
|
||||
app = db.session.query(App).filter(App.id == args["app_id"]).first()
|
||||
app = db.session.query(App).where(App.id == args["app_id"]).first()
|
||||
|
||||
if app is None:
|
||||
raise NotFound("App not found")
|
||||
@ -109,7 +126,7 @@ class InstalledAppsListApi(Resource):
|
||||
|
||||
installed_app = (
|
||||
db.session.query(InstalledApp)
|
||||
.filter(and_(InstalledApp.app_id == args["app_id"], InstalledApp.tenant_id == current_tenant_id))
|
||||
.where(and_(InstalledApp.app_id == args["app_id"], InstalledApp.tenant_id == current_tenant_id))
|
||||
.first()
|
||||
)
|
||||
|
||||
@ -122,7 +139,7 @@ class InstalledAppsListApi(Resource):
|
||||
tenant_id=current_tenant_id,
|
||||
app_owner_tenant_id=app.tenant_id,
|
||||
is_pinned=False,
|
||||
last_used_at=datetime.now(UTC).replace(tzinfo=None),
|
||||
last_used_at=naive_utc_now(),
|
||||
)
|
||||
db.session.add(new_installed_app)
|
||||
db.session.commit()
|
||||
|
||||
@ -5,7 +5,6 @@ from flask_restful import marshal_with, reqparse
|
||||
from flask_restful.inputs import int_range
|
||||
from werkzeug.exceptions import InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
from controllers.console.app.error import (
|
||||
AppMoreLikeThisDisabledError,
|
||||
CompletionRequestError,
|
||||
@ -29,7 +28,11 @@ from models.model import AppMode
|
||||
from services.app_generate_service import AppGenerateService
|
||||
from services.errors.app import MoreLikeThisDisabledError
|
||||
from services.errors.conversation import ConversationNotExistsError
|
||||
from services.errors.message import MessageNotExistsError, SuggestedQuestionsAfterAnswerDisabledError
|
||||
from services.errors.message import (
|
||||
FirstMessageNotExistsError,
|
||||
MessageNotExistsError,
|
||||
SuggestedQuestionsAfterAnswerDisabledError,
|
||||
)
|
||||
from services.message_service import MessageService
|
||||
|
||||
|
||||
@ -52,9 +55,9 @@ class MessageListApi(InstalledAppResource):
|
||||
return MessageService.pagination_by_first_id(
|
||||
app_model, current_user, args["conversation_id"], args["first_id"], args["limit"]
|
||||
)
|
||||
except services.errors.conversation.ConversationNotExistsError:
|
||||
except ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
except services.errors.message.FirstMessageNotExistsError:
|
||||
except FirstMessageNotExistsError:
|
||||
raise NotFound("First Message Not Exists.")
|
||||
|
||||
|
||||
@ -77,7 +80,7 @@ class MessageFeedbackApi(InstalledAppResource):
|
||||
rating=args.get("rating"),
|
||||
content=args.get("content"),
|
||||
)
|
||||
except services.errors.message.MessageNotExistsError:
|
||||
except MessageNotExistsError:
|
||||
raise NotFound("Message Not Exists.")
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
@ -28,7 +28,7 @@ def installed_app_required(view=None):
|
||||
|
||||
installed_app = (
|
||||
db.session.query(InstalledApp)
|
||||
.filter(
|
||||
.where(
|
||||
InstalledApp.id == str(installed_app_id), InstalledApp.tenant_id == current_user.current_tenant_id
|
||||
)
|
||||
.first()
|
||||
|
||||
@ -49,7 +49,6 @@ class FileApi(Resource):
|
||||
@marshal_with(file_fields)
|
||||
@cloud_edition_billing_resource_check("documents")
|
||||
def post(self):
|
||||
file = request.files["file"]
|
||||
source_str = request.form.get("source")
|
||||
source: Literal["datasets"] | None = "datasets" if source_str == "datasets" else None
|
||||
|
||||
@ -58,6 +57,7 @@ class FileApi(Resource):
|
||||
|
||||
if len(request.files) > 1:
|
||||
raise TooManyFilesError()
|
||||
file = request.files["file"]
|
||||
|
||||
if not file.filename:
|
||||
raise FilenameNotExistsError
|
||||
|
||||
@ -34,7 +34,7 @@ class VersionApi(Resource):
|
||||
try:
|
||||
response = requests.get(check_update_url, {"current_version": args.get("current_version")})
|
||||
except Exception as error:
|
||||
logging.warning("Check update version error: {}.".format(str(error)))
|
||||
logging.warning("Check update version error: %s.", str(error))
|
||||
result["version"] = args.get("current_version")
|
||||
return result
|
||||
|
||||
@ -55,7 +55,7 @@ def _has_new_version(*, latest_version: str, current_version: str) -> bool:
|
||||
# Compare versions
|
||||
return latest > current
|
||||
except version.InvalidVersion:
|
||||
logging.warning(f"Invalid version format: latest={latest_version}, current={current_version}")
|
||||
logging.warning("Invalid version format: latest=%s, current=%s", latest_version, current_version)
|
||||
return False
|
||||
|
||||
|
||||
|
||||
@ -21,7 +21,7 @@ def plugin_permission_required(
|
||||
with Session(db.engine) as session:
|
||||
permission = (
|
||||
session.query(TenantPluginPermission)
|
||||
.filter(
|
||||
.where(
|
||||
TenantPluginPermission.tenant_id == tenant_id,
|
||||
)
|
||||
.first()
|
||||
|
||||
@ -1,13 +1,21 @@
|
||||
import datetime
|
||||
|
||||
import pytz
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, fields, marshal_with, reqparse
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from configs import dify_config
|
||||
from constants.languages import supported_language
|
||||
from controllers.console import api
|
||||
from controllers.console.auth.error import (
|
||||
EmailAlreadyInUseError,
|
||||
EmailChangeLimitError,
|
||||
EmailCodeError,
|
||||
InvalidEmailError,
|
||||
InvalidTokenError,
|
||||
)
|
||||
from controllers.console.error import AccountInFreezeError, AccountNotFound, EmailSendIpLimitError
|
||||
from controllers.console.workspace.error import (
|
||||
AccountAlreadyInitedError,
|
||||
CurrentPasswordIncorrectError,
|
||||
@ -18,15 +26,18 @@ from controllers.console.workspace.error import (
|
||||
from controllers.console.wraps import (
|
||||
account_initialization_required,
|
||||
cloud_edition_billing_enabled,
|
||||
enable_change_email,
|
||||
enterprise_license_required,
|
||||
only_edition_cloud,
|
||||
setup_required,
|
||||
)
|
||||
from extensions.ext_database import db
|
||||
from fields.member_fields import account_fields
|
||||
from libs.helper import TimestampField, timezone
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.helper import TimestampField, email, extract_remote_ip, timezone
|
||||
from libs.login import login_required
|
||||
from models import AccountIntegrate, InvitationCode
|
||||
from models.account import Account
|
||||
from services.account_service import AccountService
|
||||
from services.billing_service import BillingService
|
||||
from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError
|
||||
@ -57,7 +68,7 @@ class AccountInitApi(Resource):
|
||||
# check invitation code
|
||||
invitation_code = (
|
||||
db.session.query(InvitationCode)
|
||||
.filter(
|
||||
.where(
|
||||
InvitationCode.code == args["invitation_code"],
|
||||
InvitationCode.status == "unused",
|
||||
)
|
||||
@ -68,7 +79,7 @@ class AccountInitApi(Resource):
|
||||
raise InvalidInvitationCodeError()
|
||||
|
||||
invitation_code.status = "used"
|
||||
invitation_code.used_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||
invitation_code.used_at = naive_utc_now()
|
||||
invitation_code.used_by_tenant_id = account.current_tenant_id
|
||||
invitation_code.used_by_account_id = account.id
|
||||
|
||||
@ -76,7 +87,7 @@ class AccountInitApi(Resource):
|
||||
account.timezone = args["timezone"]
|
||||
account.interface_theme = "light"
|
||||
account.status = "active"
|
||||
account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||
account.initialized_at = naive_utc_now()
|
||||
db.session.commit()
|
||||
|
||||
return {"result": "success"}
|
||||
@ -217,7 +228,7 @@ class AccountIntegrateApi(Resource):
|
||||
def get(self):
|
||||
account = current_user
|
||||
|
||||
account_integrates = db.session.query(AccountIntegrate).filter(AccountIntegrate.account_id == account.id).all()
|
||||
account_integrates = db.session.query(AccountIntegrate).where(AccountIntegrate.account_id == account.id).all()
|
||||
|
||||
base_url = request.url_root.rstrip("/")
|
||||
oauth_base_path = "/console/api/oauth/login"
|
||||
@ -369,6 +380,143 @@ class EducationAutoCompleteApi(Resource):
|
||||
return BillingService.EducationIdentity.autocomplete(args["keywords"], args["page"], args["limit"])
|
||||
|
||||
|
||||
class ChangeEmailSendEmailApi(Resource):
|
||||
@enable_change_email
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("email", type=email, required=True, location="json")
|
||||
parser.add_argument("language", type=str, required=False, location="json")
|
||||
parser.add_argument("phase", type=str, required=False, location="json")
|
||||
parser.add_argument("token", type=str, required=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
ip_address = extract_remote_ip(request)
|
||||
if AccountService.is_email_send_ip_limit(ip_address):
|
||||
raise EmailSendIpLimitError()
|
||||
|
||||
if args["language"] is not None and args["language"] == "zh-Hans":
|
||||
language = "zh-Hans"
|
||||
else:
|
||||
language = "en-US"
|
||||
account = None
|
||||
user_email = args["email"]
|
||||
if args["phase"] is not None and args["phase"] == "new_email":
|
||||
if args["token"] is None:
|
||||
raise InvalidTokenError()
|
||||
|
||||
reset_data = AccountService.get_change_email_data(args["token"])
|
||||
if reset_data is None:
|
||||
raise InvalidTokenError()
|
||||
user_email = reset_data.get("email", "")
|
||||
|
||||
if user_email != current_user.email:
|
||||
raise InvalidEmailError()
|
||||
else:
|
||||
with Session(db.engine) as session:
|
||||
account = session.execute(select(Account).filter_by(email=args["email"])).scalar_one_or_none()
|
||||
if account is None:
|
||||
raise AccountNotFound()
|
||||
|
||||
token = AccountService.send_change_email_email(
|
||||
account=account, email=args["email"], old_email=user_email, language=language, phase=args["phase"]
|
||||
)
|
||||
return {"result": "success", "data": token}
|
||||
|
||||
|
||||
class ChangeEmailCheckApi(Resource):
|
||||
@enable_change_email
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("email", type=email, required=True, location="json")
|
||||
parser.add_argument("code", type=str, required=True, location="json")
|
||||
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
user_email = args["email"]
|
||||
|
||||
is_change_email_error_rate_limit = AccountService.is_change_email_error_rate_limit(args["email"])
|
||||
if is_change_email_error_rate_limit:
|
||||
raise EmailChangeLimitError()
|
||||
|
||||
token_data = AccountService.get_change_email_data(args["token"])
|
||||
if token_data is None:
|
||||
raise InvalidTokenError()
|
||||
|
||||
if user_email != token_data.get("email"):
|
||||
raise InvalidEmailError()
|
||||
|
||||
if args["code"] != token_data.get("code"):
|
||||
AccountService.add_change_email_error_rate_limit(args["email"])
|
||||
raise EmailCodeError()
|
||||
|
||||
# Verified, revoke the first token
|
||||
AccountService.revoke_change_email_token(args["token"])
|
||||
|
||||
# Refresh token data by generating a new token
|
||||
_, new_token = AccountService.generate_change_email_token(
|
||||
user_email, code=args["code"], old_email=token_data.get("old_email"), additional_data={}
|
||||
)
|
||||
|
||||
AccountService.reset_change_email_error_rate_limit(args["email"])
|
||||
return {"is_valid": True, "email": token_data.get("email"), "token": new_token}
|
||||
|
||||
|
||||
class ChangeEmailResetApi(Resource):
|
||||
@enable_change_email
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(account_fields)
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("new_email", type=email, required=True, location="json")
|
||||
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
if AccountService.is_account_in_freeze(args["new_email"]):
|
||||
raise AccountInFreezeError()
|
||||
|
||||
if not AccountService.check_email_unique(args["new_email"]):
|
||||
raise EmailAlreadyInUseError()
|
||||
|
||||
reset_data = AccountService.get_change_email_data(args["token"])
|
||||
if not reset_data:
|
||||
raise InvalidTokenError()
|
||||
|
||||
AccountService.revoke_change_email_token(args["token"])
|
||||
|
||||
old_email = reset_data.get("old_email", "")
|
||||
if current_user.email != old_email:
|
||||
raise AccountNotFound()
|
||||
|
||||
updated_account = AccountService.update_account_email(current_user, email=args["new_email"])
|
||||
|
||||
AccountService.send_change_email_completed_notify_email(
|
||||
email=args["new_email"],
|
||||
)
|
||||
|
||||
return updated_account
|
||||
|
||||
|
||||
class CheckEmailUnique(Resource):
|
||||
@setup_required
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("email", type=email, required=True, location="json")
|
||||
args = parser.parse_args()
|
||||
if AccountService.is_account_in_freeze(args["email"]):
|
||||
raise AccountInFreezeError()
|
||||
if not AccountService.check_email_unique(args["email"]):
|
||||
raise EmailAlreadyInUseError()
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
# Register API resources
|
||||
api.add_resource(AccountInitApi, "/account/init")
|
||||
api.add_resource(AccountProfileApi, "/account/profile")
|
||||
@ -385,5 +533,10 @@ api.add_resource(AccountDeleteUpdateFeedbackApi, "/account/delete/feedback")
|
||||
api.add_resource(EducationVerifyApi, "/account/education/verify")
|
||||
api.add_resource(EducationApi, "/account/education")
|
||||
api.add_resource(EducationAutoCompleteApi, "/account/education/autocomplete")
|
||||
# Change email
|
||||
api.add_resource(ChangeEmailSendEmailApi, "/account/change-email")
|
||||
api.add_resource(ChangeEmailCheckApi, "/account/change-email/validity")
|
||||
api.add_resource(ChangeEmailResetApi, "/account/change-email/reset")
|
||||
api.add_resource(CheckEmailUnique, "/account/change-email/check-email-unique")
|
||||
# api.add_resource(AccountEmailApi, '/account/email')
|
||||
# api.add_resource(AccountEmailVerifyApi, '/account/email-verify')
|
||||
|
||||
@ -13,12 +13,6 @@ class CurrentPasswordIncorrectError(BaseHTTPException):
|
||||
code = 400
|
||||
|
||||
|
||||
class ProviderRequestFailedError(BaseHTTPException):
|
||||
error_code = "provider_request_failed"
|
||||
description = None
|
||||
code = 400
|
||||
|
||||
|
||||
class InvalidInvitationCodeError(BaseHTTPException):
|
||||
error_code = "invalid_invitation_code"
|
||||
description = "Invalid invitation code."
|
||||
|
||||
@ -1,22 +1,34 @@
|
||||
from urllib import parse
|
||||
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, abort, marshal_with, reqparse
|
||||
|
||||
import services
|
||||
from configs import dify_config
|
||||
from controllers.console import api
|
||||
from controllers.console.error import WorkspaceMembersLimitExceeded
|
||||
from controllers.console.auth.error import (
|
||||
CannotTransferOwnerToSelfError,
|
||||
EmailCodeError,
|
||||
InvalidEmailError,
|
||||
InvalidTokenError,
|
||||
MemberNotInTenantError,
|
||||
NotOwnerError,
|
||||
OwnerTransferLimitError,
|
||||
)
|
||||
from controllers.console.error import EmailSendIpLimitError, WorkspaceMembersLimitExceeded
|
||||
from controllers.console.wraps import (
|
||||
account_initialization_required,
|
||||
cloud_edition_billing_resource_check,
|
||||
is_allow_transfer_owner,
|
||||
setup_required,
|
||||
)
|
||||
from extensions.ext_database import db
|
||||
from fields.member_fields import account_with_role_list_fields
|
||||
from libs.helper import extract_remote_ip
|
||||
from libs.login import login_required
|
||||
from models.account import Account, TenantAccountRole
|
||||
from services.account_service import RegisterService, TenantService
|
||||
from services.account_service import AccountService, RegisterService, TenantService
|
||||
from services.errors.account import AccountAlreadyInTenantError
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
@ -96,7 +108,7 @@ class MemberCancelInviteApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def delete(self, member_id):
|
||||
member = db.session.query(Account).filter(Account.id == str(member_id)).first()
|
||||
member = db.session.query(Account).where(Account.id == str(member_id)).first()
|
||||
if member is None:
|
||||
abort(404)
|
||||
else:
|
||||
@ -156,8 +168,146 @@ class DatasetOperatorMemberListApi(Resource):
|
||||
return {"result": "success", "accounts": members}, 200
|
||||
|
||||
|
||||
class SendOwnerTransferEmailApi(Resource):
|
||||
"""Send owner transfer email."""
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@is_allow_transfer_owner
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("language", type=str, required=False, location="json")
|
||||
args = parser.parse_args()
|
||||
ip_address = extract_remote_ip(request)
|
||||
if AccountService.is_email_send_ip_limit(ip_address):
|
||||
raise EmailSendIpLimitError()
|
||||
|
||||
# check if the current user is the owner of the workspace
|
||||
if not TenantService.is_owner(current_user, current_user.current_tenant):
|
||||
raise NotOwnerError()
|
||||
|
||||
if args["language"] is not None and args["language"] == "zh-Hans":
|
||||
language = "zh-Hans"
|
||||
else:
|
||||
language = "en-US"
|
||||
|
||||
email = current_user.email
|
||||
|
||||
token = AccountService.send_owner_transfer_email(
|
||||
account=current_user,
|
||||
email=email,
|
||||
language=language,
|
||||
workspace_name=current_user.current_tenant.name,
|
||||
)
|
||||
|
||||
return {"result": "success", "data": token}
|
||||
|
||||
|
||||
class OwnerTransferCheckApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@is_allow_transfer_owner
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("code", type=str, required=True, location="json")
|
||||
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
# check if the current user is the owner of the workspace
|
||||
if not TenantService.is_owner(current_user, current_user.current_tenant):
|
||||
raise NotOwnerError()
|
||||
|
||||
user_email = current_user.email
|
||||
|
||||
is_owner_transfer_error_rate_limit = AccountService.is_owner_transfer_error_rate_limit(user_email)
|
||||
if is_owner_transfer_error_rate_limit:
|
||||
raise OwnerTransferLimitError()
|
||||
|
||||
token_data = AccountService.get_owner_transfer_data(args["token"])
|
||||
if token_data is None:
|
||||
raise InvalidTokenError()
|
||||
|
||||
if user_email != token_data.get("email"):
|
||||
raise InvalidEmailError()
|
||||
|
||||
if args["code"] != token_data.get("code"):
|
||||
AccountService.add_owner_transfer_error_rate_limit(user_email)
|
||||
raise EmailCodeError()
|
||||
|
||||
# Verified, revoke the first token
|
||||
AccountService.revoke_owner_transfer_token(args["token"])
|
||||
|
||||
# Refresh token data by generating a new token
|
||||
_, new_token = AccountService.generate_owner_transfer_token(user_email, code=args["code"], additional_data={})
|
||||
|
||||
AccountService.reset_owner_transfer_error_rate_limit(user_email)
|
||||
return {"is_valid": True, "email": token_data.get("email"), "token": new_token}
|
||||
|
||||
|
||||
class OwnerTransfer(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@is_allow_transfer_owner
|
||||
def post(self, member_id):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
# check if the current user is the owner of the workspace
|
||||
if not TenantService.is_owner(current_user, current_user.current_tenant):
|
||||
raise NotOwnerError()
|
||||
|
||||
if current_user.id == str(member_id):
|
||||
raise CannotTransferOwnerToSelfError()
|
||||
|
||||
transfer_token_data = AccountService.get_owner_transfer_data(args["token"])
|
||||
if not transfer_token_data:
|
||||
raise InvalidTokenError()
|
||||
|
||||
if transfer_token_data.get("email") != current_user.email:
|
||||
raise InvalidEmailError()
|
||||
|
||||
AccountService.revoke_owner_transfer_token(args["token"])
|
||||
|
||||
member = db.session.get(Account, str(member_id))
|
||||
if not member:
|
||||
abort(404)
|
||||
else:
|
||||
member_account = member
|
||||
if not TenantService.is_member(member_account, current_user.current_tenant):
|
||||
raise MemberNotInTenantError()
|
||||
|
||||
try:
|
||||
assert member is not None, "Member not found"
|
||||
TenantService.update_member_role(current_user.current_tenant, member, "owner", current_user)
|
||||
|
||||
AccountService.send_new_owner_transfer_notify_email(
|
||||
account=member,
|
||||
email=member.email,
|
||||
workspace_name=current_user.current_tenant.name,
|
||||
)
|
||||
|
||||
AccountService.send_old_owner_transfer_notify_email(
|
||||
account=current_user,
|
||||
email=current_user.email,
|
||||
workspace_name=current_user.current_tenant.name,
|
||||
new_owner_email=member.email,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise ValueError(str(e))
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
api.add_resource(MemberListApi, "/workspaces/current/members")
|
||||
api.add_resource(MemberInviteEmailApi, "/workspaces/current/members/invite-email")
|
||||
api.add_resource(MemberCancelInviteApi, "/workspaces/current/members/<uuid:member_id>")
|
||||
api.add_resource(MemberUpdateRoleApi, "/workspaces/current/members/<uuid:member_id>/update-role")
|
||||
api.add_resource(DatasetOperatorMemberListApi, "/workspaces/current/dataset-operators")
|
||||
# owner transfer
|
||||
api.add_resource(SendOwnerTransferEmailApi, "/workspaces/current/members/send-owner-transfer-confirm-email")
|
||||
api.add_resource(OwnerTransferCheckApi, "/workspaces/current/members/owner-transfer-check")
|
||||
api.add_resource(OwnerTransfer, "/workspaces/current/members/<uuid:member_id>/owner-transfer")
|
||||
|
||||
@ -73,8 +73,9 @@ class DefaultModelApi(Resource):
|
||||
)
|
||||
except Exception as ex:
|
||||
logging.exception(
|
||||
f"Failed to update default model, model type: {model_setting['model_type']},"
|
||||
f" model:{model_setting.get('model')}"
|
||||
"Failed to update default model, model type: %s, model: %s",
|
||||
model_setting["model_type"],
|
||||
model_setting.get("model"),
|
||||
)
|
||||
raise ex
|
||||
|
||||
@ -160,8 +161,10 @@ class ModelProviderModelApi(Resource):
|
||||
)
|
||||
except CredentialsValidateFailedError as ex:
|
||||
logging.exception(
|
||||
f"Failed to save model credentials, tenant_id: {tenant_id},"
|
||||
f" model: {args.get('model')}, model_type: {args.get('model_type')}"
|
||||
"Failed to save model credentials, tenant_id: %s, model: %s, model_type: %s",
|
||||
tenant_id,
|
||||
args.get("model"),
|
||||
args.get("model_type"),
|
||||
)
|
||||
raise ValueError(str(ex))
|
||||
|
||||
|
||||
@ -12,7 +12,8 @@ from controllers.console.wraps import account_initialization_required, setup_req
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.plugin.impl.exc import PluginDaemonClientSideError
|
||||
from libs.login import login_required
|
||||
from models.account import TenantPluginPermission
|
||||
from models.account import TenantPluginAutoUpgradeStrategy, TenantPluginPermission
|
||||
from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService
|
||||
from services.plugin.plugin_parameter_service import PluginParameterService
|
||||
from services.plugin.plugin_permission_service import PluginPermissionService
|
||||
from services.plugin.plugin_service import PluginService
|
||||
@ -534,6 +535,114 @@ class PluginFetchDynamicSelectOptionsApi(Resource):
|
||||
return jsonable_encoder({"options": options})
|
||||
|
||||
|
||||
class PluginChangePreferencesApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
user = current_user
|
||||
if not user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
req = reqparse.RequestParser()
|
||||
req.add_argument("permission", type=dict, required=True, location="json")
|
||||
req.add_argument("auto_upgrade", type=dict, required=True, location="json")
|
||||
args = req.parse_args()
|
||||
|
||||
tenant_id = user.current_tenant_id
|
||||
|
||||
permission = args["permission"]
|
||||
|
||||
install_permission = TenantPluginPermission.InstallPermission(permission.get("install_permission", "everyone"))
|
||||
debug_permission = TenantPluginPermission.DebugPermission(permission.get("debug_permission", "everyone"))
|
||||
|
||||
auto_upgrade = args["auto_upgrade"]
|
||||
|
||||
strategy_setting = TenantPluginAutoUpgradeStrategy.StrategySetting(
|
||||
auto_upgrade.get("strategy_setting", "fix_only")
|
||||
)
|
||||
upgrade_time_of_day = auto_upgrade.get("upgrade_time_of_day", 0)
|
||||
upgrade_mode = TenantPluginAutoUpgradeStrategy.UpgradeMode(auto_upgrade.get("upgrade_mode", "exclude"))
|
||||
exclude_plugins = auto_upgrade.get("exclude_plugins", [])
|
||||
include_plugins = auto_upgrade.get("include_plugins", [])
|
||||
|
||||
# set permission
|
||||
set_permission_result = PluginPermissionService.change_permission(
|
||||
tenant_id,
|
||||
install_permission,
|
||||
debug_permission,
|
||||
)
|
||||
if not set_permission_result:
|
||||
return jsonable_encoder({"success": False, "message": "Failed to set permission"})
|
||||
|
||||
# set auto upgrade strategy
|
||||
set_auto_upgrade_strategy_result = PluginAutoUpgradeService.change_strategy(
|
||||
tenant_id,
|
||||
strategy_setting,
|
||||
upgrade_time_of_day,
|
||||
upgrade_mode,
|
||||
exclude_plugins,
|
||||
include_plugins,
|
||||
)
|
||||
if not set_auto_upgrade_strategy_result:
|
||||
return jsonable_encoder({"success": False, "message": "Failed to set auto upgrade strategy"})
|
||||
|
||||
return jsonable_encoder({"success": True})
|
||||
|
||||
|
||||
class PluginFetchPreferencesApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
permission = PluginPermissionService.get_permission(tenant_id)
|
||||
permission_dict = {
|
||||
"install_permission": TenantPluginPermission.InstallPermission.EVERYONE,
|
||||
"debug_permission": TenantPluginPermission.DebugPermission.EVERYONE,
|
||||
}
|
||||
|
||||
if permission:
|
||||
permission_dict["install_permission"] = permission.install_permission
|
||||
permission_dict["debug_permission"] = permission.debug_permission
|
||||
|
||||
auto_upgrade = PluginAutoUpgradeService.get_strategy(tenant_id)
|
||||
auto_upgrade_dict = {
|
||||
"strategy_setting": TenantPluginAutoUpgradeStrategy.StrategySetting.DISABLED,
|
||||
"upgrade_time_of_day": 0,
|
||||
"upgrade_mode": TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE,
|
||||
"exclude_plugins": [],
|
||||
"include_plugins": [],
|
||||
}
|
||||
|
||||
if auto_upgrade:
|
||||
auto_upgrade_dict = {
|
||||
"strategy_setting": auto_upgrade.strategy_setting,
|
||||
"upgrade_time_of_day": auto_upgrade.upgrade_time_of_day,
|
||||
"upgrade_mode": auto_upgrade.upgrade_mode,
|
||||
"exclude_plugins": auto_upgrade.exclude_plugins,
|
||||
"include_plugins": auto_upgrade.include_plugins,
|
||||
}
|
||||
|
||||
return jsonable_encoder({"permission": permission_dict, "auto_upgrade": auto_upgrade_dict})
|
||||
|
||||
|
||||
class PluginAutoUpgradeExcludePluginApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
# exclude one single plugin
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
req = reqparse.RequestParser()
|
||||
req.add_argument("plugin_id", type=str, required=True, location="json")
|
||||
args = req.parse_args()
|
||||
|
||||
return jsonable_encoder({"success": PluginAutoUpgradeService.exclude_plugin(tenant_id, args["plugin_id"])})
|
||||
|
||||
|
||||
api.add_resource(PluginDebuggingKeyApi, "/workspaces/current/plugin/debugging-key")
|
||||
api.add_resource(PluginListApi, "/workspaces/current/plugin/list")
|
||||
api.add_resource(PluginListLatestVersionsApi, "/workspaces/current/plugin/list/latest-versions")
|
||||
@ -560,3 +669,7 @@ api.add_resource(PluginChangePermissionApi, "/workspaces/current/plugin/permissi
|
||||
api.add_resource(PluginFetchPermissionApi, "/workspaces/current/plugin/permission/fetch")
|
||||
|
||||
api.add_resource(PluginFetchDynamicSelectOptionsApi, "/workspaces/current/plugin/parameters/dynamic-options")
|
||||
|
||||
api.add_resource(PluginFetchPreferencesApi, "/workspaces/current/plugin/preferences/fetch")
|
||||
api.add_resource(PluginChangePreferencesApi, "/workspaces/current/plugin/preferences/change")
|
||||
api.add_resource(PluginAutoUpgradeExcludePluginApi, "/workspaces/current/plugin/preferences/autoupgrade/exclude")
|
||||
|
||||
@ -1,26 +1,35 @@
|
||||
import io
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from flask import redirect, send_file
|
||||
from flask import make_response, redirect, request, send_file
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, reqparse
|
||||
from sqlalchemy.orm import Session
|
||||
from flask_restful import (
|
||||
Resource,
|
||||
reqparse,
|
||||
)
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
from configs import dify_config
|
||||
from controllers.console import api
|
||||
from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required
|
||||
from controllers.console.wraps import (
|
||||
account_initialization_required,
|
||||
enterprise_license_required,
|
||||
setup_required,
|
||||
)
|
||||
from core.mcp.auth.auth_flow import auth, handle_callback
|
||||
from core.mcp.auth.auth_provider import OAuthClientProvider
|
||||
from core.mcp.error import MCPAuthError, MCPError
|
||||
from core.mcp.mcp_client import MCPClient
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from extensions.ext_database import db
|
||||
from libs.helper import alphanumeric, uuid_value
|
||||
from core.plugin.entities.plugin import ToolProviderID
|
||||
from core.plugin.impl.oauth import OAuthHandler
|
||||
from core.tools.entities.tool_entities import CredentialType
|
||||
from libs.helper import StrLen, alphanumeric, uuid_value
|
||||
from libs.login import login_required
|
||||
from services.plugin.oauth_service import OAuthProxyService
|
||||
from services.tools.api_tools_manage_service import ApiToolManageService
|
||||
from services.tools.builtin_tools_manage_service import BuiltinToolManageService
|
||||
from services.tools.mcp_tools_mange_service import MCPToolManageService
|
||||
from services.tools.mcp_tools_manage_service import MCPToolManageService
|
||||
from services.tools.tool_labels_service import ToolLabelsService
|
||||
from services.tools.tools_manage_service import ToolCommonService
|
||||
from services.tools.tools_transform_service import ToolTransformService
|
||||
@ -89,7 +98,7 @@ class ToolBuiltinProviderInfoApi(Resource):
|
||||
user_id = user.id
|
||||
tenant_id = user.current_tenant_id
|
||||
|
||||
return jsonable_encoder(BuiltinToolManageService.get_builtin_tool_provider_info(user_id, tenant_id, provider))
|
||||
return jsonable_encoder(BuiltinToolManageService.get_builtin_tool_provider_info(tenant_id, provider))
|
||||
|
||||
|
||||
class ToolBuiltinProviderDeleteApi(Resource):
|
||||
@ -98,17 +107,47 @@ class ToolBuiltinProviderDeleteApi(Resource):
|
||||
@account_initialization_required
|
||||
def post(self, provider):
|
||||
user = current_user
|
||||
|
||||
if not user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
tenant_id = user.current_tenant_id
|
||||
req = reqparse.RequestParser()
|
||||
req.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
|
||||
args = req.parse_args()
|
||||
|
||||
return BuiltinToolManageService.delete_builtin_tool_provider(
|
||||
tenant_id,
|
||||
provider,
|
||||
args["credential_id"],
|
||||
)
|
||||
|
||||
|
||||
class ToolBuiltinProviderAddApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, provider):
|
||||
user = current_user
|
||||
|
||||
user_id = user.id
|
||||
tenant_id = user.current_tenant_id
|
||||
|
||||
return BuiltinToolManageService.delete_builtin_tool_provider(
|
||||
user_id,
|
||||
tenant_id,
|
||||
provider,
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
|
||||
parser.add_argument("name", type=StrLen(30), required=False, nullable=False, location="json")
|
||||
parser.add_argument("type", type=str, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args["type"] not in CredentialType.values():
|
||||
raise ValueError(f"Invalid credential type: {args['type']}")
|
||||
|
||||
return BuiltinToolManageService.add_builtin_tool_provider(
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
credentials=args["credentials"],
|
||||
name=args["name"],
|
||||
api_type=CredentialType.of(args["type"]),
|
||||
)
|
||||
|
||||
|
||||
@ -126,19 +165,20 @@ class ToolBuiltinProviderUpdateApi(Resource):
|
||||
tenant_id = user.current_tenant_id
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
|
||||
parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
|
||||
parser.add_argument("credentials", type=dict, required=False, nullable=True, location="json")
|
||||
parser.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
with Session(db.engine) as session:
|
||||
result = BuiltinToolManageService.update_builtin_tool_provider(
|
||||
session=session,
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
provider_name=provider,
|
||||
credentials=args["credentials"],
|
||||
)
|
||||
session.commit()
|
||||
result = BuiltinToolManageService.update_builtin_tool_provider(
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
credential_id=args["credential_id"],
|
||||
credentials=args.get("credentials", None),
|
||||
name=args.get("name", ""),
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@ -149,9 +189,11 @@ class ToolBuiltinProviderGetCredentialsApi(Resource):
|
||||
def get(self, provider):
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
return BuiltinToolManageService.get_builtin_tool_provider_credentials(
|
||||
tenant_id=tenant_id,
|
||||
provider_name=provider,
|
||||
return jsonable_encoder(
|
||||
BuiltinToolManageService.get_builtin_tool_provider_credentials(
|
||||
tenant_id=tenant_id,
|
||||
provider_name=provider,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@ -344,12 +386,15 @@ class ToolBuiltinProviderCredentialsSchemaApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, provider):
|
||||
def get(self, provider, credential_type):
|
||||
user = current_user
|
||||
|
||||
tenant_id = user.current_tenant_id
|
||||
|
||||
return BuiltinToolManageService.list_builtin_provider_credentials_schema(provider, tenant_id)
|
||||
return jsonable_encoder(
|
||||
BuiltinToolManageService.list_builtin_provider_credentials_schema(
|
||||
provider, CredentialType.of(credential_type), tenant_id
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ToolApiProviderSchemaApi(Resource):
|
||||
@ -586,15 +631,12 @@ class ToolApiListApi(Resource):
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
user = current_user
|
||||
|
||||
user_id = user.id
|
||||
tenant_id = user.current_tenant_id
|
||||
|
||||
return jsonable_encoder(
|
||||
[
|
||||
provider.to_dict()
|
||||
for provider in ApiToolManageService.list_api_tools(
|
||||
user_id,
|
||||
tenant_id,
|
||||
)
|
||||
]
|
||||
@ -631,6 +673,183 @@ class ToolLabelsApi(Resource):
|
||||
return jsonable_encoder(ToolLabelsService.list_tool_labels())
|
||||
|
||||
|
||||
class ToolPluginOAuthApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, provider):
|
||||
tool_provider = ToolProviderID(provider)
|
||||
plugin_id = tool_provider.plugin_id
|
||||
provider_name = tool_provider.provider_name
|
||||
|
||||
# todo check permission
|
||||
user = current_user
|
||||
|
||||
if not user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
tenant_id = user.current_tenant_id
|
||||
oauth_client_params = BuiltinToolManageService.get_oauth_client(tenant_id=tenant_id, provider=provider)
|
||||
if oauth_client_params is None:
|
||||
raise Forbidden("no oauth available client config found for this tool provider")
|
||||
|
||||
oauth_handler = OAuthHandler()
|
||||
context_id = OAuthProxyService.create_proxy_context(
|
||||
user_id=current_user.id, tenant_id=tenant_id, plugin_id=plugin_id, provider=provider_name
|
||||
)
|
||||
redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider}/tool/callback"
|
||||
authorization_url_response = oauth_handler.get_authorization_url(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user.id,
|
||||
plugin_id=plugin_id,
|
||||
provider=provider_name,
|
||||
redirect_uri=redirect_uri,
|
||||
system_credentials=oauth_client_params,
|
||||
)
|
||||
response = make_response(jsonable_encoder(authorization_url_response))
|
||||
response.set_cookie(
|
||||
"context_id",
|
||||
context_id,
|
||||
httponly=True,
|
||||
samesite="Lax",
|
||||
max_age=OAuthProxyService.__MAX_AGE__,
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
class ToolOAuthCallback(Resource):
|
||||
@setup_required
|
||||
def get(self, provider):
|
||||
context_id = request.cookies.get("context_id")
|
||||
if not context_id:
|
||||
raise Forbidden("context_id not found")
|
||||
|
||||
context = OAuthProxyService.use_proxy_context(context_id)
|
||||
if context is None:
|
||||
raise Forbidden("Invalid context_id")
|
||||
|
||||
tool_provider = ToolProviderID(provider)
|
||||
plugin_id = tool_provider.plugin_id
|
||||
provider_name = tool_provider.provider_name
|
||||
user_id, tenant_id = context.get("user_id"), context.get("tenant_id")
|
||||
|
||||
oauth_handler = OAuthHandler()
|
||||
oauth_client_params = BuiltinToolManageService.get_oauth_client(tenant_id, provider)
|
||||
if oauth_client_params is None:
|
||||
raise Forbidden("no oauth available client config found for this tool provider")
|
||||
|
||||
redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider}/tool/callback"
|
||||
credentials_response = oauth_handler.get_credentials(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user_id,
|
||||
plugin_id=plugin_id,
|
||||
provider=provider_name,
|
||||
redirect_uri=redirect_uri,
|
||||
system_credentials=oauth_client_params,
|
||||
request=request,
|
||||
)
|
||||
|
||||
credentials = credentials_response.credentials
|
||||
expires_at = credentials_response.expires_at
|
||||
|
||||
if not credentials:
|
||||
raise Exception("the plugin credentials failed")
|
||||
|
||||
# add credentials to database
|
||||
BuiltinToolManageService.add_builtin_tool_provider(
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
credentials=dict(credentials),
|
||||
expires_at=expires_at,
|
||||
api_type=CredentialType.OAUTH2,
|
||||
)
|
||||
return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback")
|
||||
|
||||
|
||||
class ToolBuiltinProviderSetDefaultApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, provider):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("id", type=str, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
return BuiltinToolManageService.set_default_provider(
|
||||
tenant_id=current_user.current_tenant_id, user_id=current_user.id, provider=provider, id=args["id"]
|
||||
)
|
||||
|
||||
|
||||
class ToolOAuthCustomClient(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, provider):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("client_params", type=dict, required=False, nullable=True, location="json")
|
||||
parser.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
user = current_user
|
||||
|
||||
if not user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
return BuiltinToolManageService.save_custom_oauth_client_params(
|
||||
tenant_id=user.current_tenant_id,
|
||||
provider=provider,
|
||||
client_params=args.get("client_params", {}),
|
||||
enable_oauth_custom_client=args.get("enable_oauth_custom_client", True),
|
||||
)
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, provider):
|
||||
return jsonable_encoder(
|
||||
BuiltinToolManageService.get_custom_oauth_client_params(
|
||||
tenant_id=current_user.current_tenant_id, provider=provider
|
||||
)
|
||||
)
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def delete(self, provider):
|
||||
return jsonable_encoder(
|
||||
BuiltinToolManageService.delete_custom_oauth_client_params(
|
||||
tenant_id=current_user.current_tenant_id, provider=provider
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ToolBuiltinProviderGetOauthClientSchemaApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, provider):
|
||||
return jsonable_encoder(
|
||||
BuiltinToolManageService.get_builtin_tool_provider_oauth_client_schema(
|
||||
tenant_id=current_user.current_tenant_id, provider_name=provider
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ToolBuiltinProviderGetCredentialInfoApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, provider):
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
return jsonable_encoder(
|
||||
BuiltinToolManageService.get_builtin_tool_provider_credential_info(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ToolProviderMCPApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@ -794,17 +1013,33 @@ class ToolMCPCallbackApi(Resource):
|
||||
# tool provider
|
||||
api.add_resource(ToolProviderListApi, "/workspaces/current/tool-providers")
|
||||
|
||||
# tool oauth
|
||||
api.add_resource(ToolPluginOAuthApi, "/oauth/plugin/<path:provider>/tool/authorization-url")
|
||||
api.add_resource(ToolOAuthCallback, "/oauth/plugin/<path:provider>/tool/callback")
|
||||
api.add_resource(ToolOAuthCustomClient, "/workspaces/current/tool-provider/builtin/<path:provider>/oauth/custom-client")
|
||||
|
||||
# builtin tool provider
|
||||
api.add_resource(ToolBuiltinProviderListToolsApi, "/workspaces/current/tool-provider/builtin/<path:provider>/tools")
|
||||
api.add_resource(ToolBuiltinProviderInfoApi, "/workspaces/current/tool-provider/builtin/<path:provider>/info")
|
||||
api.add_resource(ToolBuiltinProviderAddApi, "/workspaces/current/tool-provider/builtin/<path:provider>/add")
|
||||
api.add_resource(ToolBuiltinProviderDeleteApi, "/workspaces/current/tool-provider/builtin/<path:provider>/delete")
|
||||
api.add_resource(ToolBuiltinProviderUpdateApi, "/workspaces/current/tool-provider/builtin/<path:provider>/update")
|
||||
api.add_resource(
|
||||
ToolBuiltinProviderSetDefaultApi, "/workspaces/current/tool-provider/builtin/<path:provider>/default-credential"
|
||||
)
|
||||
api.add_resource(
|
||||
ToolBuiltinProviderGetCredentialInfoApi, "/workspaces/current/tool-provider/builtin/<path:provider>/credential/info"
|
||||
)
|
||||
api.add_resource(
|
||||
ToolBuiltinProviderGetCredentialsApi, "/workspaces/current/tool-provider/builtin/<path:provider>/credentials"
|
||||
)
|
||||
api.add_resource(
|
||||
ToolBuiltinProviderCredentialsSchemaApi,
|
||||
"/workspaces/current/tool-provider/builtin/<path:provider>/credentials_schema",
|
||||
"/workspaces/current/tool-provider/builtin/<path:provider>/credential/schema/<path:credential_type>",
|
||||
)
|
||||
api.add_resource(
|
||||
ToolBuiltinProviderGetOauthClientSchemaApi,
|
||||
"/workspaces/current/tool-provider/builtin/<path:provider>/oauth/client-schema",
|
||||
)
|
||||
api.add_resource(ToolBuiltinProviderIconApi, "/workspaces/current/tool-provider/builtin/<path:provider>/icon")
|
||||
|
||||
|
||||
@ -191,9 +191,6 @@ class WebappLogoWorkspaceApi(Resource):
|
||||
@account_initialization_required
|
||||
@cloud_edition_billing_resource_check("workspace_custom")
|
||||
def post(self):
|
||||
# get file from request
|
||||
file = request.files["file"]
|
||||
|
||||
# check file
|
||||
if "file" not in request.files:
|
||||
raise NoFileUploadedError()
|
||||
@ -201,6 +198,8 @@ class WebappLogoWorkspaceApi(Resource):
|
||||
if len(request.files) > 1:
|
||||
raise TooManyFilesError()
|
||||
|
||||
# get file from request
|
||||
file = request.files["file"]
|
||||
if not file.filename:
|
||||
raise FilenameNotExistsError
|
||||
|
||||
|
||||
@ -235,3 +235,29 @@ def email_password_login_enabled(view):
|
||||
abort(403)
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
def enable_change_email(view):
|
||||
@wraps(view)
|
||||
def decorated(*args, **kwargs):
|
||||
features = FeatureService.get_system_features()
|
||||
if features.enable_change_email:
|
||||
return view(*args, **kwargs)
|
||||
|
||||
# otherwise, return 403
|
||||
abort(403)
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
def is_allow_transfer_owner(view):
|
||||
@wraps(view)
|
||||
def decorated(*args, **kwargs):
|
||||
features = FeatureService.get_features(current_user.current_tenant_id)
|
||||
if features.is_allow_transfer_workspace:
|
||||
return view(*args, **kwargs)
|
||||
|
||||
# otherwise, return 403
|
||||
abort(403)
|
||||
|
||||
return decorated
|
||||
|
||||
@ -175,6 +175,7 @@ class PluginInvokeToolApi(Resource):
|
||||
provider=payload.provider,
|
||||
tool_name=payload.tool,
|
||||
tool_parameters=payload.tool_parameters,
|
||||
credential_id=payload.credential_id,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@ def get_user(tenant_id: str, user_id: str | None) -> Account | EndUser:
|
||||
user_id = "DEFAULT-USER"
|
||||
|
||||
if user_id == "DEFAULT-USER":
|
||||
user_model = session.query(EndUser).filter(EndUser.session_id == "DEFAULT-USER").first()
|
||||
user_model = session.query(EndUser).where(EndUser.session_id == "DEFAULT-USER").first()
|
||||
if not user_model:
|
||||
user_model = EndUser(
|
||||
tenant_id=tenant_id,
|
||||
@ -36,7 +36,7 @@ def get_user(tenant_id: str, user_id: str | None) -> Account | EndUser:
|
||||
else:
|
||||
user_model = AccountService.load_user(user_id)
|
||||
if not user_model:
|
||||
user_model = session.query(EndUser).filter(EndUser.id == user_id).first()
|
||||
user_model = session.query(EndUser).where(EndUser.id == user_id).first()
|
||||
if not user_model:
|
||||
raise ValueError("user not found")
|
||||
except Exception:
|
||||
@ -71,7 +71,7 @@ def get_user_tenant(view: Optional[Callable] = None):
|
||||
try:
|
||||
tenant_model = (
|
||||
db.session.query(Tenant)
|
||||
.filter(
|
||||
.where(
|
||||
Tenant.id == tenant_id,
|
||||
)
|
||||
.first()
|
||||
|
||||
@ -55,7 +55,7 @@ def enterprise_inner_api_user_auth(view):
|
||||
if signature_base64 != token:
|
||||
return view(*args, **kwargs)
|
||||
|
||||
kwargs["user"] = db.session.query(EndUser).filter(EndUser.id == user_id).first()
|
||||
kwargs["user"] = db.session.query(EndUser).where(EndUser.id == user_id).first()
|
||||
|
||||
return view(*args, **kwargs)
|
||||
|
||||
|
||||
@ -30,7 +30,7 @@ class MCPAppApi(Resource):
|
||||
|
||||
request_id = args.get("id")
|
||||
|
||||
server = db.session.query(AppMCPServer).filter(AppMCPServer.server_code == server_code).first()
|
||||
server = db.session.query(AppMCPServer).where(AppMCPServer.server_code == server_code).first()
|
||||
if not server:
|
||||
return helper.compact_generate_response(
|
||||
create_mcp_error_response(request_id, types.INVALID_REQUEST, "Server Not Found")
|
||||
@ -41,7 +41,7 @@ class MCPAppApi(Resource):
|
||||
create_mcp_error_response(request_id, types.INVALID_REQUEST, "Server is not active")
|
||||
)
|
||||
|
||||
app = db.session.query(App).filter(App.id == server.app_id).first()
|
||||
app = db.session.query(App).where(App.id == server.app_id).first()
|
||||
if not app:
|
||||
return helper.compact_generate_response(
|
||||
create_mcp_error_response(request_id, types.INVALID_REQUEST, "App Not Found")
|
||||
|
||||
@ -6,6 +6,6 @@ bp = Blueprint("service_api", __name__, url_prefix="/v1")
|
||||
api = ExternalApi(bp)
|
||||
|
||||
from . import index
|
||||
from .app import annotation, app, audio, completion, conversation, file, message, site, workflow
|
||||
from .app import annotation, app, audio, completion, conversation, file, file_preview, message, site, workflow
|
||||
from .dataset import dataset, document, hit_testing, metadata, segment, upload_file
|
||||
from .workspace import models
|
||||
|
||||
@ -34,7 +34,7 @@ class AnnotationReplyActionStatusApi(Resource):
|
||||
@validate_app_token
|
||||
def get(self, app_model: App, job_id, action):
|
||||
job_id = str(job_id)
|
||||
app_annotation_job_key = "{}_app_annotation_job_{}".format(action, str(job_id))
|
||||
app_annotation_job_key = f"{action}_app_annotation_job_{str(job_id)}"
|
||||
cache_result = redis_client.get(app_annotation_job_key)
|
||||
if cache_result is None:
|
||||
raise ValueError("The job does not exist.")
|
||||
@ -42,7 +42,7 @@ class AnnotationReplyActionStatusApi(Resource):
|
||||
job_status = cache_result.decode()
|
||||
error_msg = ""
|
||||
if job_status == "error":
|
||||
app_annotation_error_key = "{}_app_annotation_error_{}".format(action, str(job_id))
|
||||
app_annotation_error_key = f"{action}_app_annotation_error_{str(job_id)}"
|
||||
error_msg = redis_client.get(app_annotation_error_key).decode()
|
||||
|
||||
return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
import logging
|
||||
|
||||
from flask import request
|
||||
from flask_restful import Resource, reqparse
|
||||
from werkzeug.exceptions import InternalServerError, NotFound
|
||||
from werkzeug.exceptions import BadRequest, InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
from controllers.service_api import api
|
||||
@ -23,11 +24,13 @@ from core.errors.error import (
|
||||
ProviderTokenNotInitError,
|
||||
QuotaExceededError,
|
||||
)
|
||||
from core.helper.trace_id_helper import get_external_trace_id
|
||||
from core.model_runtime.errors.invoke import InvokeError
|
||||
from libs import helper
|
||||
from libs.helper import uuid_value
|
||||
from models.model import App, AppMode, EndUser
|
||||
from services.app_generate_service import AppGenerateService
|
||||
from services.errors.app import IsDraftWorkflowError, WorkflowIdFormatError, WorkflowNotFoundError
|
||||
from services.errors.llm import InvokeRateLimitError
|
||||
|
||||
|
||||
@ -45,6 +48,9 @@ class CompletionApi(Resource):
|
||||
parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json")
|
||||
|
||||
args = parser.parse_args()
|
||||
external_trace_id = get_external_trace_id(request)
|
||||
if external_trace_id:
|
||||
args["external_trace_id"] = external_trace_id
|
||||
|
||||
streaming = args["response_mode"] == "streaming"
|
||||
|
||||
@ -108,9 +114,13 @@ class ChatApi(Resource):
|
||||
parser.add_argument("conversation_id", type=uuid_value, location="json")
|
||||
parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json")
|
||||
parser.add_argument("auto_generate_name", type=bool, required=False, default=True, location="json")
|
||||
|
||||
parser.add_argument("workflow_id", type=str, required=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
external_trace_id = get_external_trace_id(request)
|
||||
if external_trace_id:
|
||||
args["external_trace_id"] = external_trace_id
|
||||
|
||||
streaming = args["response_mode"] == "streaming"
|
||||
|
||||
try:
|
||||
@ -119,6 +129,12 @@ class ChatApi(Resource):
|
||||
)
|
||||
|
||||
return helper.compact_generate_response(response)
|
||||
except WorkflowNotFoundError as ex:
|
||||
raise NotFound(str(ex))
|
||||
except IsDraftWorkflowError as ex:
|
||||
raise BadRequest(str(ex))
|
||||
except WorkflowIdFormatError as ex:
|
||||
raise BadRequest(str(ex))
|
||||
except services.errors.conversation.ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
except services.errors.conversation.ConversationCompletedError:
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
import json
|
||||
|
||||
from flask_restful import Resource, marshal_with, reqparse
|
||||
from flask_restful.inputs import int_range
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import NotFound
|
||||
from werkzeug.exceptions import BadRequest, NotFound
|
||||
|
||||
import services
|
||||
from controllers.service_api import api
|
||||
@ -15,6 +17,7 @@ from fields.conversation_fields import (
|
||||
simple_conversation_fields,
|
||||
)
|
||||
from fields.conversation_variable_fields import (
|
||||
conversation_variable_fields,
|
||||
conversation_variable_infinite_scroll_pagination_fields,
|
||||
)
|
||||
from libs.helper import uuid_value
|
||||
@ -120,7 +123,41 @@ class ConversationVariablesApi(Resource):
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
|
||||
|
||||
class ConversationVariableDetailApi(Resource):
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON))
|
||||
@marshal_with(conversation_variable_fields)
|
||||
def put(self, app_model: App, end_user: EndUser, c_id, variable_id):
|
||||
"""Update a conversation variable's value"""
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
|
||||
raise NotChatAppError()
|
||||
|
||||
conversation_id = str(c_id)
|
||||
variable_id = str(variable_id)
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("value", required=True, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
return ConversationService.update_conversation_variable(
|
||||
app_model, conversation_id, variable_id, end_user, json.loads(args["value"])
|
||||
)
|
||||
except services.errors.conversation.ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
except services.errors.conversation.ConversationVariableNotExistsError:
|
||||
raise NotFound("Conversation Variable Not Exists.")
|
||||
except services.errors.conversation.ConversationVariableTypeMismatchError as e:
|
||||
raise BadRequest(str(e))
|
||||
|
||||
|
||||
api.add_resource(ConversationRenameApi, "/conversations/<uuid:c_id>/name", endpoint="conversation_name")
|
||||
api.add_resource(ConversationApi, "/conversations")
|
||||
api.add_resource(ConversationDetailApi, "/conversations/<uuid:c_id>", endpoint="conversation_detail")
|
||||
api.add_resource(ConversationVariablesApi, "/conversations/<uuid:c_id>/variables", endpoint="conversation_variables")
|
||||
api.add_resource(
|
||||
ConversationVariableDetailApi,
|
||||
"/conversations/<uuid:c_id>/variables/<uuid:variable_id>",
|
||||
endpoint="conversation_variable_detail",
|
||||
methods=["PUT"],
|
||||
)
|
||||
|
||||
@ -107,3 +107,15 @@ class UnsupportedFileTypeError(BaseHTTPException):
|
||||
error_code = "unsupported_file_type"
|
||||
description = "File type not allowed."
|
||||
code = 415
|
||||
|
||||
|
||||
class FileNotFoundError(BaseHTTPException):
|
||||
error_code = "file_not_found"
|
||||
description = "The requested file was not found."
|
||||
code = 404
|
||||
|
||||
|
||||
class FileAccessDeniedError(BaseHTTPException):
|
||||
error_code = "file_access_denied"
|
||||
description = "Access to the requested file is denied."
|
||||
code = 403
|
||||
|
||||
@ -20,18 +20,17 @@ class FileApi(Resource):
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.FORM))
|
||||
@marshal_with(file_fields)
|
||||
def post(self, app_model: App, end_user: EndUser):
|
||||
file = request.files["file"]
|
||||
|
||||
# check file
|
||||
if "file" not in request.files:
|
||||
raise NoFileUploadedError()
|
||||
|
||||
if not file.mimetype:
|
||||
raise UnsupportedFileTypeError()
|
||||
|
||||
if len(request.files) > 1:
|
||||
raise TooManyFilesError()
|
||||
|
||||
file = request.files["file"]
|
||||
if not file.mimetype:
|
||||
raise UnsupportedFileTypeError()
|
||||
|
||||
if not file.filename:
|
||||
raise FilenameNotExistsError
|
||||
|
||||
|
||||
186
api/controllers/service_api/app/file_preview.py
Normal file
186
api/controllers/service_api/app/file_preview.py
Normal file
@ -0,0 +1,186 @@
|
||||
import logging
|
||||
from urllib.parse import quote
|
||||
|
||||
from flask import Response
|
||||
from flask_restful import Resource, reqparse
|
||||
|
||||
from controllers.service_api import api
|
||||
from controllers.service_api.app.error import (
|
||||
FileAccessDeniedError,
|
||||
FileNotFoundError,
|
||||
)
|
||||
from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_storage import storage
|
||||
from models.model import App, EndUser, Message, MessageFile, UploadFile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FilePreviewApi(Resource):
|
||||
"""
|
||||
Service API File Preview endpoint
|
||||
|
||||
Provides secure file preview/download functionality for external API users.
|
||||
Files can only be accessed if they belong to messages within the requesting app's context.
|
||||
"""
|
||||
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY))
|
||||
def get(self, app_model: App, end_user: EndUser, file_id: str):
|
||||
"""
|
||||
Preview/Download a file that was uploaded via Service API
|
||||
|
||||
Args:
|
||||
app_model: The authenticated app model
|
||||
end_user: The authenticated end user (optional)
|
||||
file_id: UUID of the file to preview
|
||||
|
||||
Query Parameters:
|
||||
user: Optional user identifier
|
||||
as_attachment: Boolean, whether to download as attachment (default: false)
|
||||
|
||||
Returns:
|
||||
Stream response with file content
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: File does not exist
|
||||
FileAccessDeniedError: File access denied (not owned by app)
|
||||
"""
|
||||
file_id = str(file_id)
|
||||
|
||||
# Parse query parameters
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("as_attachment", type=bool, required=False, default=False, location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Validate file ownership and get file objects
|
||||
message_file, upload_file = self._validate_file_ownership(file_id, app_model.id)
|
||||
|
||||
# Get file content generator
|
||||
try:
|
||||
generator = storage.load(upload_file.key, stream=True)
|
||||
except Exception as e:
|
||||
raise FileNotFoundError(f"Failed to load file content: {str(e)}")
|
||||
|
||||
# Build response with appropriate headers
|
||||
response = self._build_file_response(generator, upload_file, args["as_attachment"])
|
||||
|
||||
return response
|
||||
|
||||
def _validate_file_ownership(self, file_id: str, app_id: str) -> tuple[MessageFile, UploadFile]:
|
||||
"""
|
||||
Validate that the file belongs to a message within the requesting app's context
|
||||
|
||||
Security validations performed:
|
||||
1. File exists in MessageFile table (was used in a conversation)
|
||||
2. Message belongs to the requesting app
|
||||
3. UploadFile record exists and is accessible
|
||||
4. File tenant matches app tenant (additional security layer)
|
||||
|
||||
Args:
|
||||
file_id: UUID of the file to validate
|
||||
app_id: UUID of the requesting app
|
||||
|
||||
Returns:
|
||||
Tuple of (MessageFile, UploadFile) if validation passes
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: File or related records not found
|
||||
FileAccessDeniedError: File does not belong to the app's context
|
||||
"""
|
||||
try:
|
||||
# Input validation
|
||||
if not file_id or not app_id:
|
||||
raise FileAccessDeniedError("Invalid file or app identifier")
|
||||
|
||||
# First, find the MessageFile that references this upload file
|
||||
message_file = db.session.query(MessageFile).where(MessageFile.upload_file_id == file_id).first()
|
||||
|
||||
if not message_file:
|
||||
raise FileNotFoundError("File not found in message context")
|
||||
|
||||
# Get the message and verify it belongs to the requesting app
|
||||
message = (
|
||||
db.session.query(Message).where(Message.id == message_file.message_id, Message.app_id == app_id).first()
|
||||
)
|
||||
|
||||
if not message:
|
||||
raise FileAccessDeniedError("File access denied: not owned by requesting app")
|
||||
|
||||
# Get the actual upload file record
|
||||
upload_file = db.session.query(UploadFile).where(UploadFile.id == file_id).first()
|
||||
|
||||
if not upload_file:
|
||||
raise FileNotFoundError("Upload file record not found")
|
||||
|
||||
# Additional security: verify tenant isolation
|
||||
app = db.session.query(App).where(App.id == app_id).first()
|
||||
if app and upload_file.tenant_id != app.tenant_id:
|
||||
raise FileAccessDeniedError("File access denied: tenant mismatch")
|
||||
|
||||
return message_file, upload_file
|
||||
|
||||
except (FileNotFoundError, FileAccessDeniedError):
|
||||
# Re-raise our custom exceptions
|
||||
raise
|
||||
except Exception as e:
|
||||
# Log unexpected errors for debugging
|
||||
logger.exception(
|
||||
"Unexpected error during file ownership validation",
|
||||
extra={"file_id": file_id, "app_id": app_id, "error": str(e)},
|
||||
)
|
||||
raise FileAccessDeniedError("File access validation failed")
|
||||
|
||||
def _build_file_response(self, generator, upload_file: UploadFile, as_attachment: bool = False) -> Response:
|
||||
"""
|
||||
Build Flask Response object with appropriate headers for file streaming
|
||||
|
||||
Args:
|
||||
generator: File content generator from storage
|
||||
upload_file: UploadFile database record
|
||||
as_attachment: Whether to set Content-Disposition as attachment
|
||||
|
||||
Returns:
|
||||
Flask Response object with streaming file content
|
||||
"""
|
||||
response = Response(
|
||||
generator,
|
||||
mimetype=upload_file.mime_type,
|
||||
direct_passthrough=True,
|
||||
headers={},
|
||||
)
|
||||
|
||||
# Add Content-Length if known
|
||||
if upload_file.size and upload_file.size > 0:
|
||||
response.headers["Content-Length"] = str(upload_file.size)
|
||||
|
||||
# Add Accept-Ranges header for audio/video files to support seeking
|
||||
if upload_file.mime_type in [
|
||||
"audio/mpeg",
|
||||
"audio/wav",
|
||||
"audio/mp4",
|
||||
"audio/ogg",
|
||||
"audio/flac",
|
||||
"audio/aac",
|
||||
"video/mp4",
|
||||
"video/webm",
|
||||
"video/quicktime",
|
||||
"audio/x-m4a",
|
||||
]:
|
||||
response.headers["Accept-Ranges"] = "bytes"
|
||||
|
||||
# Set Content-Disposition for downloads
|
||||
if as_attachment and upload_file.name:
|
||||
encoded_filename = quote(upload_file.name)
|
||||
response.headers["Content-Disposition"] = f"attachment; filename*=UTF-8''{encoded_filename}"
|
||||
# Override content-type for downloads to force download
|
||||
response.headers["Content-Type"] = "application/octet-stream"
|
||||
|
||||
# Add caching headers for performance
|
||||
response.headers["Cache-Control"] = "public, max-age=3600" # Cache for 1 hour
|
||||
|
||||
return response
|
||||
|
||||
|
||||
# Register the API endpoint
|
||||
api.add_resource(FilePreviewApi, "/files/<uuid:file_id>/preview")
|
||||
@ -15,7 +15,11 @@ from fields.message_fields import agent_thought_fields, feedback_fields
|
||||
from fields.raws import FilesContainedField
|
||||
from libs.helper import TimestampField, uuid_value
|
||||
from models.model import App, AppMode, EndUser
|
||||
from services.errors.message import SuggestedQuestionsAfterAnswerDisabledError
|
||||
from services.errors.message import (
|
||||
FirstMessageNotExistsError,
|
||||
MessageNotExistsError,
|
||||
SuggestedQuestionsAfterAnswerDisabledError,
|
||||
)
|
||||
from services.message_service import MessageService
|
||||
|
||||
|
||||
@ -65,7 +69,7 @@ class MessageListApi(Resource):
|
||||
)
|
||||
except services.errors.conversation.ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
except services.errors.message.FirstMessageNotExistsError:
|
||||
except FirstMessageNotExistsError:
|
||||
raise NotFound("First Message Not Exists.")
|
||||
|
||||
|
||||
@ -87,7 +91,7 @@ class MessageFeedbackApi(Resource):
|
||||
rating=args.get("rating"),
|
||||
content=args.get("content"),
|
||||
)
|
||||
except services.errors.message.MessageNotExistsError:
|
||||
except MessageNotExistsError:
|
||||
raise NotFound("Message Not Exists.")
|
||||
|
||||
return {"result": "success"}
|
||||
@ -117,7 +121,7 @@ class MessageSuggestedApi(Resource):
|
||||
questions = MessageService.get_suggested_questions_after_answer(
|
||||
app_model=app_model, user=end_user, message_id=message_id, invoke_from=InvokeFrom.SERVICE_API
|
||||
)
|
||||
except services.errors.message.MessageNotExistsError:
|
||||
except MessageNotExistsError:
|
||||
raise NotFound("Message Not Exists.")
|
||||
except SuggestedQuestionsAfterAnswerDisabledError:
|
||||
raise BadRequest("Suggested Questions Is Disabled.")
|
||||
|
||||
@ -16,7 +16,7 @@ class AppSiteApi(Resource):
|
||||
@marshal_with(fields.site_fields)
|
||||
def get(self, app_model: App):
|
||||
"""Retrieve app site info."""
|
||||
site = db.session.query(Site).filter(Site.app_id == app_model.id).first()
|
||||
site = db.session.query(Site).where(Site.app_id == app_model.id).first()
|
||||
|
||||
if not site:
|
||||
raise Forbidden()
|
||||
|
||||
@ -1,10 +1,11 @@
|
||||
import logging
|
||||
|
||||
from dateutil.parser import isoparse
|
||||
from flask import request
|
||||
from flask_restful import Resource, fields, marshal_with, reqparse
|
||||
from flask_restful.inputs import int_range
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
from werkzeug.exceptions import InternalServerError
|
||||
from werkzeug.exceptions import BadRequest, InternalServerError, NotFound
|
||||
|
||||
from controllers.service_api import api
|
||||
from controllers.service_api.app.error import (
|
||||
@ -23,6 +24,7 @@ from core.errors.error import (
|
||||
ProviderTokenNotInitError,
|
||||
QuotaExceededError,
|
||||
)
|
||||
from core.helper.trace_id_helper import get_external_trace_id
|
||||
from core.model_runtime.errors.invoke import InvokeError
|
||||
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus
|
||||
from extensions.ext_database import db
|
||||
@ -32,6 +34,7 @@ from libs.helper import TimestampField
|
||||
from models.model import App, AppMode, EndUser
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
from services.app_generate_service import AppGenerateService
|
||||
from services.errors.app import IsDraftWorkflowError, WorkflowIdFormatError, WorkflowNotFoundError
|
||||
from services.errors.llm import InvokeRateLimitError
|
||||
from services.workflow_app_service import WorkflowAppService
|
||||
|
||||
@ -90,7 +93,9 @@ class WorkflowRunApi(Resource):
|
||||
parser.add_argument("files", type=list, required=False, location="json")
|
||||
parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
external_trace_id = get_external_trace_id(request)
|
||||
if external_trace_id:
|
||||
args["external_trace_id"] = external_trace_id
|
||||
streaming = args.get("response_mode") == "streaming"
|
||||
|
||||
try:
|
||||
@ -116,6 +121,59 @@ class WorkflowRunApi(Resource):
|
||||
raise InternalServerError()
|
||||
|
||||
|
||||
class WorkflowRunByIdApi(Resource):
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON, required=True))
|
||||
def post(self, app_model: App, end_user: EndUser, workflow_id: str):
|
||||
"""
|
||||
Run specific workflow by ID
|
||||
"""
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
if app_mode != AppMode.WORKFLOW:
|
||||
raise NotWorkflowAppError()
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
|
||||
parser.add_argument("files", type=list, required=False, location="json")
|
||||
parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Add workflow_id to args for AppGenerateService
|
||||
args["workflow_id"] = workflow_id
|
||||
|
||||
external_trace_id = get_external_trace_id(request)
|
||||
if external_trace_id:
|
||||
args["external_trace_id"] = external_trace_id
|
||||
streaming = args.get("response_mode") == "streaming"
|
||||
|
||||
try:
|
||||
response = AppGenerateService.generate(
|
||||
app_model=app_model, user=end_user, args=args, invoke_from=InvokeFrom.SERVICE_API, streaming=streaming
|
||||
)
|
||||
|
||||
return helper.compact_generate_response(response)
|
||||
except WorkflowNotFoundError as ex:
|
||||
raise NotFound(str(ex))
|
||||
except IsDraftWorkflowError as ex:
|
||||
raise BadRequest(str(ex))
|
||||
except WorkflowIdFormatError as ex:
|
||||
raise BadRequest(str(ex))
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
except QuotaExceededError:
|
||||
raise ProviderQuotaExceededError()
|
||||
except ModelCurrentlyNotSupportError:
|
||||
raise ProviderModelCurrentlyNotSupportError()
|
||||
except InvokeRateLimitError as ex:
|
||||
raise InvokeRateLimitHttpError(ex.description)
|
||||
except InvokeError as e:
|
||||
raise CompletionRequestError(e.description)
|
||||
except ValueError as e:
|
||||
raise e
|
||||
except Exception:
|
||||
logging.exception("internal server error.")
|
||||
raise InternalServerError()
|
||||
|
||||
|
||||
class WorkflowTaskStopApi(Resource):
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON, required=True))
|
||||
def post(self, app_model: App, end_user: EndUser, task_id: str):
|
||||
@ -189,5 +247,6 @@ class WorkflowAppLogApi(Resource):
|
||||
|
||||
api.add_resource(WorkflowRunApi, "/workflows/run")
|
||||
api.add_resource(WorkflowRunDetailApi, "/workflows/run/<string:workflow_run_id>")
|
||||
api.add_resource(WorkflowRunByIdApi, "/workflows/<string:workflow_id>/run")
|
||||
api.add_resource(WorkflowTaskStopApi, "/workflows/tasks/<string:task_id>/stop")
|
||||
api.add_resource(WorkflowAppLogApi, "/workflows/logs")
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user