mirror of
https://github.com/langgenius/dify.git
synced 2026-04-01 19:37:50 +08:00
Compare commits
212 Commits
deploy/age
...
feat/evalu
| Author | SHA1 | Date | |
|---|---|---|---|
| 268ae1751d | |||
| 19530e880a | |||
| dbdbb098d5 | |||
| 2c8b47ce44 | |||
| cf50d7c7b5 | |||
| d9a0665b2c | |||
| b818cc0766 | |||
| 90f94be2b3 | |||
| 24111facdd | |||
| 015cbf850b | |||
| 873e13c2fb | |||
| 424d34a9c0 | |||
| 688bf7e7a1 | |||
| a6ffff3b39 | |||
| 023fc55bd5 | |||
| 351b909a53 | |||
| fbd2d31624 | |||
| b54a0dc1e4 | |||
| 6bec4f65c9 | |||
| f27d669f87 | |||
| 74f87ce152 | |||
| fcf04629d3 | |||
| 6b0c6d0cde | |||
| 1063e021f2 | |||
| 303f548408 | |||
| cc68f0e640 | |||
| 9b7b432e08 | |||
| 88863609e9 | |||
| adc6c6c13b | |||
| 2de818530b | |||
| 7e4754392d | |||
| 01c857a67a | |||
| 2c2cc72150 | |||
| f7b78b08fd | |||
| f0e6f11c1c | |||
| a19243068b | |||
| 323c51e095 | |||
| bbc3f90928 | |||
| 1344c3b280 | |||
| 5897b28355 | |||
| 15aa8071f8 | |||
| 097095a69b | |||
| daebe26089 | |||
| c58170f5b8 | |||
| 3a7885819d | |||
| 5fc4dfaf7b | |||
| 953bcc33b1 | |||
| bc14ad6a8f | |||
| cc89b57c1f | |||
| 623c8ae803 | |||
| dede190be2 | |||
| a1513f06c3 | |||
| 3c7180bfd5 | |||
| 51f6ca2bed | |||
| ae9a16a397 | |||
| 52a4bea88f | |||
| 1aaba80211 | |||
| 944db46d4f | |||
| 456684dfc3 | |||
| 40fa0f365c | |||
| 2cb71ad443 | |||
| 92c472ccc7 | |||
| b92b8becd1 | |||
| 8a277da278 | |||
| 7dd802201f | |||
| 79b952ea08 | |||
| 397165a524 | |||
| dc3f13991a | |||
| 8ef657531e | |||
| b40a4c27d3 | |||
| f0be15ded8 | |||
| 7fc161f781 | |||
| 2b54d205fe | |||
| 23d0d6a65d | |||
| 1940d05e64 | |||
| 1660067d6e | |||
| 0642475b85 | |||
| 905288423f | |||
| 62376f507b | |||
| 8cb634c9bc | |||
| 768b41c3cf | |||
| ca88516d54 | |||
| 51c8dad753 | |||
| 871a2a149f | |||
| 60e381eff0 | |||
| 540906fb8a | |||
| b642f5c3e5 | |||
| b36b077d42 | |||
| fe9c2b0e4b | |||
| 548cadacff | |||
| 768b3eb6f9 | |||
| 2f88da4a6d | |||
| a8cdf6964c | |||
| 985c3db4fd | |||
| 9636472db7 | |||
| 0ad268aa7d | |||
| a4ea33167d | |||
| 0f13aabea8 | |||
| 1e76ef5ccb | |||
| e6e3229d17 | |||
| dccf8e723a | |||
| c41ba7d627 | |||
| a6e9316de3 | |||
| a1171877a4 | |||
| f06cc339cc | |||
| 6bf8982559 | |||
| 364d7ebc40 | |||
| 7cc81e9a43 | |||
| 3409c519e2 | |||
| 5851b42af3 | |||
| c5eae67ac9 | |||
| 865ee473ce | |||
| 08e8145975 | |||
| ec0f20de03 | |||
| 40591a7c50 | |||
| 32d394d65b | |||
| 66fab8722c | |||
| 559d326cbd | |||
| abedf2506f | |||
| d01428b5bc | |||
| 5a8a68cab8 | |||
| 0de1f17e5c | |||
| 17d07a5a43 | |||
| 3bdbea99a3 | |||
| 689761bfcb | |||
| 2394e45ec7 | |||
| 01e6a3a9d9 | |||
| 07f4950cb3 | |||
| 368896d84d | |||
| 408f650b0c | |||
| 7c2e1fa3e2 | |||
| 1da66b9a8c | |||
| 4953762f4e | |||
| 97764c4a57 | |||
| 2ea85d3ba2 | |||
| 1f11300175 | |||
| f317db525f | |||
| 3fa0538f72 | |||
| b7683aedb1 | |||
| 515036e758 | |||
| 22b382527f | |||
| 2cfe4b5b86 | |||
| 6876c8041c | |||
| 7de45584ce | |||
| fcfc96ca05 | |||
| 5572d7c7e8 | |||
| 69c2b422de | |||
| 496baa9335 | |||
| e8657cc3de | |||
| db0a2fe52e | |||
| f0ae8d6167 | |||
| 2514e181ba | |||
| e08c06cbc3 | |||
| 8ca54ddf94 | |||
| 3e073404cc | |||
| 0acabf5f73 | |||
| 38285aa1ac | |||
| 5341cd015b | |||
| c32eebf57d | |||
| be2e6e9a14 | |||
| 554ba6b8f3 | |||
| a69b8c1e96 | |||
| 6f3fcf2276 | |||
| 3df4bba280 | |||
| 875e2eac1b | |||
| 7c0d2e1d98 | |||
| a9336b74fd | |||
| c3c73ceb1f | |||
| 6318bf0a2a | |||
| 5e1f252046 | |||
| 518937b87f | |||
| e6ab9abf19 | |||
| 87a25e326c | |||
| baf7d2c7c0 | |||
| df3b960505 | |||
| 22dd0aa20c | |||
| 52e7492cbc | |||
| 26bc108bf1 | |||
| a5cff32743 | |||
| 7e9d00a5a6 | |||
| ff9cf6c7a4 | |||
| 56593f20b0 | |||
| b7b9b003c9 | |||
| d418dd8eec | |||
| 59639ca9b2 | |||
| 66b8c42a25 | |||
| 61702fe346 | |||
| 43f0c780c3 | |||
| 30ebf2bfa9 | |||
| 7e3027b5f7 | |||
| b3acf83090 | |||
| 36c3d6e48a | |||
| f782ac6b3c | |||
| feef2dd1fa | |||
| a716d8789d | |||
| 6816f89189 | |||
| bfcac64a9d | |||
| 664eb601a2 | |||
| 8e5cc4e0aa | |||
| 9f28575903 | |||
| 4b9a26a5e6 | |||
| 7b85adf1cc | |||
| c964708ebe | |||
| 883eb498c0 | |||
| 4d3738d225 | |||
| dd0dee739d | |||
| 4d19914fcb | |||
| 887c7710e9 | |||
| 7a722773c7 | |||
| a763aff58b | |||
| c1011f4e5c | |||
| f7afa103a5 |
@ -64,7 +64,7 @@ export const useUpdateAccessMode = () => {
|
||||
|
||||
// Component only adds UI behavior.
|
||||
updateAccessMode({ appId, mode }, {
|
||||
onSuccess: () => toast.success('...'),
|
||||
onSuccess: () => Toast.notify({ type: 'success', message: '...' }),
|
||||
})
|
||||
|
||||
// Avoid putting invalidation knowledge in the component.
|
||||
@ -114,7 +114,10 @@ try {
|
||||
router.push(`/orders/${order.id}`)
|
||||
}
|
||||
catch (error) {
|
||||
toast.error(error instanceof Error ? error.message : 'Unknown error')
|
||||
Toast.notify({
|
||||
type: 'error',
|
||||
message: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@ -7,7 +7,7 @@ cd web && pnpm install
|
||||
pipx install uv
|
||||
|
||||
echo "alias start-api=\"cd $WORKSPACE_ROOT/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug\"" >> ~/.bashrc
|
||||
echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P threads -c 1 --loglevel INFO -Q dataset,dataset_summary,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution\"" >> ~/.bashrc
|
||||
echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P threads -c 1 --loglevel INFO -Q dataset,dataset_summary,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention\"" >> ~/.bashrc
|
||||
echo "alias start-web=\"cd $WORKSPACE_ROOT/web && pnpm dev:inspect\"" >> ~/.bashrc
|
||||
echo "alias start-web-prod=\"cd $WORKSPACE_ROOT/web && pnpm build && pnpm start\"" >> ~/.bashrc
|
||||
echo "alias start-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d\"" >> ~/.bashrc
|
||||
|
||||
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@ -36,7 +36,6 @@
|
||||
/api/core/workflow/graph/ @laipz8200 @QuantumGhost
|
||||
/api/core/workflow/graph_events/ @laipz8200 @QuantumGhost
|
||||
/api/core/workflow/node_events/ @laipz8200 @QuantumGhost
|
||||
/api/dify_graph/model_runtime/ @laipz8200 @QuantumGhost
|
||||
|
||||
# Backend - Workflow - Nodes (Agent, Iteration, Loop, LLM)
|
||||
/api/core/workflow/nodes/agent/ @Nov1c444
|
||||
|
||||
1
.github/actions/setup-web/action.yml
vendored
1
.github/actions/setup-web/action.yml
vendored
@ -6,7 +6,6 @@ runs:
|
||||
- name: Setup Vite+
|
||||
uses: voidzero-dev/setup-vp@20553a7a7429c429a74894104a2835d7fed28a72 # v1.3.0
|
||||
with:
|
||||
working-directory: web
|
||||
node-version-file: .nvmrc
|
||||
cache: true
|
||||
run-install: true
|
||||
|
||||
141
.github/workflows/api-tests.yml
vendored
141
.github/workflows/api-tests.yml
vendored
@ -14,18 +14,17 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: API Tests
|
||||
api-unit:
|
||||
name: API Unit Tests
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
COVERAGE_FILE: coverage-unit
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
|
||||
steps:
|
||||
@ -36,7 +35,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: ${{ matrix.python-version }}
|
||||
@ -51,6 +50,52 @@ jobs:
|
||||
- name: Run dify config tests
|
||||
run: uv run --project api dev/pytest/pytest_config_tests.py
|
||||
|
||||
- name: Run Unit Tests
|
||||
run: uv run --project api bash dev/pytest/pytest_unit_tests.sh
|
||||
|
||||
- name: Upload unit coverage data
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: api-coverage-unit
|
||||
path: coverage-unit
|
||||
retention-days: 1
|
||||
|
||||
api-integration:
|
||||
name: API Integration Tests
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
COVERAGE_FILE: coverage-integration
|
||||
STORAGE_TYPE: opendal
|
||||
OPENDAL_SCHEME: fs
|
||||
OPENDAL_FS_ROOT: /tmp/dify-storage
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.12"
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache-dependency-glob: api/uv.lock
|
||||
|
||||
- name: Check UV lockfile
|
||||
run: uv lock --project api --check
|
||||
|
||||
- name: Install dependencies
|
||||
run: uv sync --project api --dev
|
||||
|
||||
- name: Set up dotenvs
|
||||
run: |
|
||||
cp docker/.env.example docker/.env
|
||||
@ -74,23 +119,91 @@ jobs:
|
||||
run: |
|
||||
cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env
|
||||
|
||||
- name: Run API Tests
|
||||
env:
|
||||
STORAGE_TYPE: opendal
|
||||
OPENDAL_SCHEME: fs
|
||||
OPENDAL_FS_ROOT: /tmp/dify-storage
|
||||
- name: Run Integration Tests
|
||||
run: |
|
||||
uv run --project api pytest \
|
||||
-n auto \
|
||||
--timeout "${PYTEST_TIMEOUT:-180}" \
|
||||
api/tests/integration_tests/workflow \
|
||||
api/tests/integration_tests/tools \
|
||||
api/tests/test_containers_integration_tests \
|
||||
api/tests/unit_tests
|
||||
api/tests/test_containers_integration_tests
|
||||
|
||||
- name: Upload integration coverage data
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: api-coverage-integration
|
||||
path: coverage-integration
|
||||
retention-days: 1
|
||||
|
||||
api-coverage:
|
||||
name: API Coverage
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- api-unit
|
||||
- api-integration
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
COVERAGE_FILE: .coverage
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: "3.12"
|
||||
cache-dependency-glob: api/uv.lock
|
||||
|
||||
- name: Install dependencies
|
||||
run: uv sync --project api --dev
|
||||
|
||||
- name: Download coverage data
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
path: coverage-data
|
||||
pattern: api-coverage-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Combine coverage
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "### API Coverage" >> "$GITHUB_STEP_SUMMARY"
|
||||
echo "" >> "$GITHUB_STEP_SUMMARY"
|
||||
echo "Merged backend coverage report generated for Codecov project status." >> "$GITHUB_STEP_SUMMARY"
|
||||
echo "" >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
unit_coverage="$(find coverage-data -type f -name coverage-unit -print -quit)"
|
||||
integration_coverage="$(find coverage-data -type f -name coverage-integration -print -quit)"
|
||||
: "${unit_coverage:?coverage-unit artifact not found}"
|
||||
: "${integration_coverage:?coverage-integration artifact not found}"
|
||||
|
||||
report_file="$(mktemp)"
|
||||
uv run --project api coverage combine "$unit_coverage" "$integration_coverage"
|
||||
uv run --project api coverage report --show-missing | tee "$report_file"
|
||||
echo "Summary: \`$(tail -n 1 "$report_file")\`" >> "$GITHUB_STEP_SUMMARY"
|
||||
{
|
||||
echo ""
|
||||
echo "<details><summary>Coverage report</summary>"
|
||||
echo ""
|
||||
echo '```'
|
||||
cat "$report_file"
|
||||
echo '```'
|
||||
echo "</details>"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
uv run --project api coverage xml -o coverage.xml
|
||||
|
||||
- name: Report coverage
|
||||
if: ${{ env.CODECOV_TOKEN != '' && matrix.python-version == '3.12' }}
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5.5.3
|
||||
if: ${{ env.CODECOV_TOKEN != '' }}
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
disable_search: true
|
||||
|
||||
38
.github/workflows/autofix.yml
vendored
38
.github/workflows/autofix.yml
vendored
@ -2,6 +2,9 @@ name: autofix.ci
|
||||
on:
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
merge_group:
|
||||
branches: ["main"]
|
||||
types: [checks_requested]
|
||||
push:
|
||||
branches: ["main"]
|
||||
permissions:
|
||||
@ -12,9 +15,15 @@ jobs:
|
||||
if: github.repository == 'langgenius/dify'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
- name: Complete merge group check
|
||||
if: github.event_name == 'merge_group'
|
||||
run: echo "autofix.ci updates pull request branches, not merge group refs."
|
||||
|
||||
- if: github.event_name != 'merge_group'
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Check Docker Compose inputs
|
||||
if: github.event_name != 'merge_group'
|
||||
id: docker-compose-changes
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
@ -24,30 +33,38 @@ jobs:
|
||||
docker/docker-compose-template.yaml
|
||||
docker/docker-compose.yaml
|
||||
- name: Check web inputs
|
||||
if: github.event_name != 'merge_group'
|
||||
id: web-changes
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
web/**
|
||||
package.json
|
||||
pnpm-lock.yaml
|
||||
pnpm-workspace.yaml
|
||||
.nvmrc
|
||||
- name: Check api inputs
|
||||
if: github.event_name != 'merge_group'
|
||||
id: api-changes
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
- uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
- if: github.event_name != 'merge_group'
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0
|
||||
- if: github.event_name != 'merge_group'
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
|
||||
- name: Generate Docker Compose
|
||||
if: steps.docker-compose-changes.outputs.any_changed == 'true'
|
||||
if: github.event_name != 'merge_group' && steps.docker-compose-changes.outputs.any_changed == 'true'
|
||||
run: |
|
||||
cd docker
|
||||
./generate_docker_compose
|
||||
|
||||
- if: steps.api-changes.outputs.any_changed == 'true'
|
||||
- if: github.event_name != 'merge_group' && steps.api-changes.outputs.any_changed == 'true'
|
||||
run: |
|
||||
cd api
|
||||
uv sync --dev
|
||||
@ -59,13 +76,13 @@ jobs:
|
||||
uv run ruff format ..
|
||||
|
||||
- name: count migration progress
|
||||
if: steps.api-changes.outputs.any_changed == 'true'
|
||||
if: github.event_name != 'merge_group' && steps.api-changes.outputs.any_changed == 'true'
|
||||
run: |
|
||||
cd api
|
||||
./cnt_base.sh
|
||||
|
||||
- name: ast-grep
|
||||
if: steps.api-changes.outputs.any_changed == 'true'
|
||||
if: github.event_name != 'merge_group' && steps.api-changes.outputs.any_changed == 'true'
|
||||
run: |
|
||||
# ast-grep exits 1 if no matches are found; allow idempotent runs.
|
||||
uvx --from ast-grep-cli ast-grep --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all || true
|
||||
@ -95,13 +112,14 @@ jobs:
|
||||
find . -name "*.py.bak" -type f -delete
|
||||
|
||||
- name: Setup web environment
|
||||
if: steps.web-changes.outputs.any_changed == 'true'
|
||||
if: github.event_name != 'merge_group' && steps.web-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/setup-web
|
||||
|
||||
- name: ESLint autofix
|
||||
if: steps.web-changes.outputs.any_changed == 'true'
|
||||
if: github.event_name != 'merge_group' && steps.web-changes.outputs.any_changed == 'true'
|
||||
run: |
|
||||
cd web
|
||||
vp exec eslint --concurrency=2 --prune-suppressions --quiet || true
|
||||
|
||||
- uses: autofix-ci/action@7a166d7532b277f34e16238930461bf77f9d7ed8 # v1.3.3
|
||||
- if: github.event_name != 'merge_group'
|
||||
uses: autofix-ci/action@7a166d7532b277f34e16238930461bf77f9d7ed8 # v1.3.3
|
||||
|
||||
34
.github/workflows/build-push.yml
vendored
34
.github/workflows/build-push.yml
vendored
@ -24,27 +24,39 @@ env:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.platform == 'linux/arm64' && 'arm64_runner' || 'ubuntu-latest' }}
|
||||
runs-on: ${{ matrix.runs_on }}
|
||||
if: github.repository == 'langgenius/dify'
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- service_name: "build-api-amd64"
|
||||
image_name_env: "DIFY_API_IMAGE_NAME"
|
||||
context: "api"
|
||||
artifact_context: "api"
|
||||
build_context: "{{defaultContext}}:api"
|
||||
file: "Dockerfile"
|
||||
platform: linux/amd64
|
||||
runs_on: ubuntu-latest
|
||||
- service_name: "build-api-arm64"
|
||||
image_name_env: "DIFY_API_IMAGE_NAME"
|
||||
context: "api"
|
||||
artifact_context: "api"
|
||||
build_context: "{{defaultContext}}:api"
|
||||
file: "Dockerfile"
|
||||
platform: linux/arm64
|
||||
runs_on: ubuntu-24.04-arm
|
||||
- service_name: "build-web-amd64"
|
||||
image_name_env: "DIFY_WEB_IMAGE_NAME"
|
||||
context: "web"
|
||||
artifact_context: "web"
|
||||
build_context: "{{defaultContext}}"
|
||||
file: "web/Dockerfile"
|
||||
platform: linux/amd64
|
||||
runs_on: ubuntu-latest
|
||||
- service_name: "build-web-arm64"
|
||||
image_name_env: "DIFY_WEB_IMAGE_NAME"
|
||||
context: "web"
|
||||
artifact_context: "web"
|
||||
build_context: "{{defaultContext}}"
|
||||
file: "web/Dockerfile"
|
||||
platform: linux/arm64
|
||||
runs_on: ubuntu-24.04-arm
|
||||
|
||||
steps:
|
||||
- name: Prepare
|
||||
@ -58,9 +70,6 @@ jobs:
|
||||
username: ${{ env.DOCKERHUB_USER }}
|
||||
password: ${{ env.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
@ -74,11 +83,10 @@ jobs:
|
||||
id: build
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: "{{defaultContext}}:${{ matrix.context }}"
|
||||
context: ${{ matrix.build_context }}
|
||||
file: ${{ matrix.file }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
build-args: |
|
||||
COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
|
||||
ENABLE_PROD_SOURCEMAP=${{ matrix.context == 'web' && github.ref_name == 'deploy/dev' }}
|
||||
build-args: COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,name=${{ env[matrix.image_name_env] }},push-by-digest=true,name-canonical=true,push=true
|
||||
cache-from: type=gha,scope=${{ matrix.service_name }}
|
||||
@ -95,7 +103,7 @@ jobs:
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: digests-${{ matrix.context }}-${{ env.PLATFORM_PAIR }}
|
||||
name: digests-${{ matrix.artifact_context }}-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
4
.github/workflows/db-migration-test.yml
vendored
4
.github/workflows/db-migration-test.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: "3.12"
|
||||
@ -69,7 +69,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: "3.12"
|
||||
|
||||
30
.github/workflows/docker-build.yml
vendored
30
.github/workflows/docker-build.yml
vendored
@ -6,7 +6,12 @@ on:
|
||||
- "main"
|
||||
paths:
|
||||
- api/Dockerfile
|
||||
- web/docker/**
|
||||
- web/Dockerfile
|
||||
- package.json
|
||||
- pnpm-lock.yaml
|
||||
- pnpm-workspace.yaml
|
||||
- .nvmrc
|
||||
|
||||
concurrency:
|
||||
group: docker-build-${{ github.head_ref || github.run_id }}
|
||||
@ -14,26 +19,31 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
build-docker:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runs_on }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- service_name: "api-amd64"
|
||||
platform: linux/amd64
|
||||
context: "api"
|
||||
runs_on: ubuntu-latest
|
||||
context: "{{defaultContext}}:api"
|
||||
file: "Dockerfile"
|
||||
- service_name: "api-arm64"
|
||||
platform: linux/arm64
|
||||
context: "api"
|
||||
runs_on: ubuntu-24.04-arm
|
||||
context: "{{defaultContext}}:api"
|
||||
file: "Dockerfile"
|
||||
- service_name: "web-amd64"
|
||||
platform: linux/amd64
|
||||
context: "web"
|
||||
runs_on: ubuntu-latest
|
||||
context: "{{defaultContext}}"
|
||||
file: "web/Dockerfile"
|
||||
- service_name: "web-arm64"
|
||||
platform: linux/arm64
|
||||
context: "web"
|
||||
runs_on: ubuntu-24.04-arm
|
||||
context: "{{defaultContext}}"
|
||||
file: "web/Dockerfile"
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
@ -41,8 +51,8 @@ jobs:
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
push: false
|
||||
context: "{{defaultContext}}:${{ matrix.context }}"
|
||||
file: "${{ matrix.file }}"
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.file }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
377
.github/workflows/main-ci.yml
vendored
377
.github/workflows/main-ci.yml
vendored
@ -3,10 +3,14 @@ name: Main CI Pipeline
|
||||
on:
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
merge_group:
|
||||
branches: ["main"]
|
||||
types: [checks_requested]
|
||||
push:
|
||||
branches: ["main"]
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: write
|
||||
pull-requests: write
|
||||
checks: write
|
||||
@ -17,12 +21,28 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
pre_job:
|
||||
name: Skip Duplicate Checks
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_skip: ${{ steps.skip_check.outputs.should_skip || 'false' }}
|
||||
steps:
|
||||
- id: skip_check
|
||||
continue-on-error: true
|
||||
uses: fkirc/skip-duplicate-actions@f75f66ce1886f00957d99748a42c724f4330bdcf # v5.3.1
|
||||
with:
|
||||
cancel_others: 'true'
|
||||
concurrent_skipping: same_content_newer
|
||||
|
||||
# Check which paths were changed to determine which tests to run
|
||||
check-changes:
|
||||
name: Check Changed Files
|
||||
needs: pre_job
|
||||
if: needs.pre_job.outputs.should_skip != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
api-changed: ${{ steps.changes.outputs.api }}
|
||||
e2e-changed: ${{ steps.changes.outputs.e2e }}
|
||||
web-changed: ${{ steps.changes.outputs.web }}
|
||||
vdb-changed: ${{ steps.changes.outputs.vdb }}
|
||||
migration-changed: ${{ steps.changes.outputs.migration }}
|
||||
@ -34,49 +54,372 @@ jobs:
|
||||
filters: |
|
||||
api:
|
||||
- 'api/**'
|
||||
- 'docker/**'
|
||||
- '.github/workflows/api-tests.yml'
|
||||
- '.github/workflows/expose_service_ports.sh'
|
||||
- 'docker/.env.example'
|
||||
- 'docker/middleware.env.example'
|
||||
- 'docker/docker-compose.middleware.yaml'
|
||||
- 'docker/docker-compose-template.yaml'
|
||||
- 'docker/generate_docker_compose'
|
||||
- 'docker/ssrf_proxy/**'
|
||||
- 'docker/volumes/sandbox/conf/**'
|
||||
web:
|
||||
- 'web/**'
|
||||
- 'package.json'
|
||||
- 'pnpm-lock.yaml'
|
||||
- 'pnpm-workspace.yaml'
|
||||
- '.nvmrc'
|
||||
- '.github/workflows/web-tests.yml'
|
||||
- '.github/actions/setup-web/**'
|
||||
e2e:
|
||||
- 'api/**'
|
||||
- 'api/pyproject.toml'
|
||||
- 'api/uv.lock'
|
||||
- 'e2e/**'
|
||||
- 'web/**'
|
||||
- 'package.json'
|
||||
- 'pnpm-lock.yaml'
|
||||
- 'pnpm-workspace.yaml'
|
||||
- '.nvmrc'
|
||||
- 'docker/docker-compose.middleware.yaml'
|
||||
- 'docker/middleware.env.example'
|
||||
- '.github/workflows/web-e2e.yml'
|
||||
- '.github/actions/setup-web/**'
|
||||
vdb:
|
||||
- 'api/core/rag/datasource/**'
|
||||
- 'docker/**'
|
||||
- 'api/tests/integration_tests/vdb/**'
|
||||
- '.github/workflows/vdb-tests.yml'
|
||||
- '.github/workflows/expose_service_ports.sh'
|
||||
- 'docker/.env.example'
|
||||
- 'docker/middleware.env.example'
|
||||
- 'docker/docker-compose.yaml'
|
||||
- 'docker/docker-compose-template.yaml'
|
||||
- 'docker/generate_docker_compose'
|
||||
- 'docker/certbot/**'
|
||||
- 'docker/couchbase-server/**'
|
||||
- 'docker/elasticsearch/**'
|
||||
- 'docker/iris/**'
|
||||
- 'docker/nginx/**'
|
||||
- 'docker/pgvector/**'
|
||||
- 'docker/ssrf_proxy/**'
|
||||
- 'docker/startupscripts/**'
|
||||
- 'docker/tidb/**'
|
||||
- 'docker/volumes/**'
|
||||
- 'api/uv.lock'
|
||||
- 'api/pyproject.toml'
|
||||
migration:
|
||||
- 'api/migrations/**'
|
||||
- 'api/.env.example'
|
||||
- '.github/workflows/db-migration-test.yml'
|
||||
- '.github/workflows/expose_service_ports.sh'
|
||||
- 'docker/.env.example'
|
||||
- 'docker/middleware.env.example'
|
||||
- 'docker/docker-compose.middleware.yaml'
|
||||
- 'docker/docker-compose-template.yaml'
|
||||
- 'docker/generate_docker_compose'
|
||||
- 'docker/ssrf_proxy/**'
|
||||
- 'docker/volumes/sandbox/conf/**'
|
||||
|
||||
# Run tests in parallel
|
||||
api-tests:
|
||||
name: API Tests
|
||||
needs: check-changes
|
||||
if: needs.check-changes.outputs.api-changed == 'true'
|
||||
# Run tests in parallel while always emitting stable required checks.
|
||||
api-tests-run:
|
||||
name: Run API Tests
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.api-changed == 'true'
|
||||
uses: ./.github/workflows/api-tests.yml
|
||||
secrets: inherit
|
||||
|
||||
web-tests:
|
||||
name: Web Tests
|
||||
needs: check-changes
|
||||
if: needs.check-changes.outputs.web-changed == 'true'
|
||||
api-tests-skip:
|
||||
name: Skip API Tests
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.api-changed != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Report skipped API tests
|
||||
run: echo "No API-related changes detected; skipping API tests."
|
||||
|
||||
api-tests:
|
||||
name: API Tests
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
- api-tests-run
|
||||
- api-tests-skip
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Finalize API Tests status
|
||||
env:
|
||||
SHOULD_SKIP_WORKFLOW: ${{ needs.pre_job.outputs.should_skip }}
|
||||
TESTS_CHANGED: ${{ needs.check-changes.outputs.api-changed }}
|
||||
RUN_RESULT: ${{ needs.api-tests-run.result }}
|
||||
SKIP_RESULT: ${{ needs.api-tests-skip.result }}
|
||||
run: |
|
||||
if [[ "$SHOULD_SKIP_WORKFLOW" == 'true' ]]; then
|
||||
echo "API tests were skipped because this workflow run duplicated a successful or newer run."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$TESTS_CHANGED" == 'true' ]]; then
|
||||
if [[ "$RUN_RESULT" == 'success' ]]; then
|
||||
echo "API tests ran successfully."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "API tests were required but finished with result: $RUN_RESULT" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$SKIP_RESULT" == 'success' ]]; then
|
||||
echo "API tests were skipped because no API-related files changed."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "API tests were not required, but the skip job finished with result: $SKIP_RESULT" >&2
|
||||
exit 1
|
||||
|
||||
web-tests-run:
|
||||
name: Run Web Tests
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.web-changed == 'true'
|
||||
uses: ./.github/workflows/web-tests.yml
|
||||
secrets: inherit
|
||||
|
||||
web-tests-skip:
|
||||
name: Skip Web Tests
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.web-changed != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Report skipped web tests
|
||||
run: echo "No web-related changes detected; skipping web tests."
|
||||
|
||||
web-tests:
|
||||
name: Web Tests
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
- web-tests-run
|
||||
- web-tests-skip
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Finalize Web Tests status
|
||||
env:
|
||||
SHOULD_SKIP_WORKFLOW: ${{ needs.pre_job.outputs.should_skip }}
|
||||
TESTS_CHANGED: ${{ needs.check-changes.outputs.web-changed }}
|
||||
RUN_RESULT: ${{ needs.web-tests-run.result }}
|
||||
SKIP_RESULT: ${{ needs.web-tests-skip.result }}
|
||||
run: |
|
||||
if [[ "$SHOULD_SKIP_WORKFLOW" == 'true' ]]; then
|
||||
echo "Web tests were skipped because this workflow run duplicated a successful or newer run."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$TESTS_CHANGED" == 'true' ]]; then
|
||||
if [[ "$RUN_RESULT" == 'success' ]]; then
|
||||
echo "Web tests ran successfully."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Web tests were required but finished with result: $RUN_RESULT" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$SKIP_RESULT" == 'success' ]]; then
|
||||
echo "Web tests were skipped because no web-related files changed."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Web tests were not required, but the skip job finished with result: $SKIP_RESULT" >&2
|
||||
exit 1
|
||||
|
||||
web-e2e-run:
|
||||
name: Run Web Full-Stack E2E
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.e2e-changed == 'true'
|
||||
uses: ./.github/workflows/web-e2e.yml
|
||||
|
||||
web-e2e-skip:
|
||||
name: Skip Web Full-Stack E2E
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.e2e-changed != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Report skipped web full-stack e2e
|
||||
run: echo "No E2E-related changes detected; skipping web full-stack E2E."
|
||||
|
||||
web-e2e:
|
||||
name: Web Full-Stack E2E
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
- web-e2e-run
|
||||
- web-e2e-skip
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Finalize Web Full-Stack E2E status
|
||||
env:
|
||||
SHOULD_SKIP_WORKFLOW: ${{ needs.pre_job.outputs.should_skip }}
|
||||
TESTS_CHANGED: ${{ needs.check-changes.outputs.e2e-changed }}
|
||||
RUN_RESULT: ${{ needs.web-e2e-run.result }}
|
||||
SKIP_RESULT: ${{ needs.web-e2e-skip.result }}
|
||||
run: |
|
||||
if [[ "$SHOULD_SKIP_WORKFLOW" == 'true' ]]; then
|
||||
echo "Web full-stack E2E was skipped because this workflow run duplicated a successful or newer run."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$TESTS_CHANGED" == 'true' ]]; then
|
||||
if [[ "$RUN_RESULT" == 'success' ]]; then
|
||||
echo "Web full-stack E2E ran successfully."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Web full-stack E2E was required but finished with result: $RUN_RESULT" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$SKIP_RESULT" == 'success' ]]; then
|
||||
echo "Web full-stack E2E was skipped because no E2E-related files changed."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Web full-stack E2E was not required, but the skip job finished with result: $SKIP_RESULT" >&2
|
||||
exit 1
|
||||
|
||||
style-check:
|
||||
name: Style Check
|
||||
needs: pre_job
|
||||
if: needs.pre_job.outputs.should_skip != 'true'
|
||||
uses: ./.github/workflows/style.yml
|
||||
|
||||
vdb-tests-run:
|
||||
name: Run VDB Tests
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.vdb-changed == 'true'
|
||||
uses: ./.github/workflows/vdb-tests.yml
|
||||
|
||||
vdb-tests-skip:
|
||||
name: Skip VDB Tests
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.vdb-changed != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Report skipped VDB tests
|
||||
run: echo "No VDB-related changes detected; skipping VDB tests."
|
||||
|
||||
vdb-tests:
|
||||
name: VDB Tests
|
||||
needs: check-changes
|
||||
if: needs.check-changes.outputs.vdb-changed == 'true'
|
||||
uses: ./.github/workflows/vdb-tests.yml
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
- vdb-tests-run
|
||||
- vdb-tests-skip
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Finalize VDB Tests status
|
||||
env:
|
||||
SHOULD_SKIP_WORKFLOW: ${{ needs.pre_job.outputs.should_skip }}
|
||||
TESTS_CHANGED: ${{ needs.check-changes.outputs.vdb-changed }}
|
||||
RUN_RESULT: ${{ needs.vdb-tests-run.result }}
|
||||
SKIP_RESULT: ${{ needs.vdb-tests-skip.result }}
|
||||
run: |
|
||||
if [[ "$SHOULD_SKIP_WORKFLOW" == 'true' ]]; then
|
||||
echo "VDB tests were skipped because this workflow run duplicated a successful or newer run."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$TESTS_CHANGED" == 'true' ]]; then
|
||||
if [[ "$RUN_RESULT" == 'success' ]]; then
|
||||
echo "VDB tests ran successfully."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "VDB tests were required but finished with result: $RUN_RESULT" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$SKIP_RESULT" == 'success' ]]; then
|
||||
echo "VDB tests were skipped because no VDB-related files changed."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "VDB tests were not required, but the skip job finished with result: $SKIP_RESULT" >&2
|
||||
exit 1
|
||||
|
||||
db-migration-test-run:
|
||||
name: Run DB Migration Test
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.migration-changed == 'true'
|
||||
uses: ./.github/workflows/db-migration-test.yml
|
||||
|
||||
db-migration-test-skip:
|
||||
name: Skip DB Migration Test
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.migration-changed != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Report skipped DB migration tests
|
||||
run: echo "No migration-related changes detected; skipping DB migration tests."
|
||||
|
||||
db-migration-test:
|
||||
name: DB Migration Test
|
||||
needs: check-changes
|
||||
if: needs.check-changes.outputs.migration-changed == 'true'
|
||||
uses: ./.github/workflows/db-migration-test.yml
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- pre_job
|
||||
- check-changes
|
||||
- db-migration-test-run
|
||||
- db-migration-test-skip
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Finalize DB Migration Test status
|
||||
env:
|
||||
SHOULD_SKIP_WORKFLOW: ${{ needs.pre_job.outputs.should_skip }}
|
||||
TESTS_CHANGED: ${{ needs.check-changes.outputs.migration-changed }}
|
||||
RUN_RESULT: ${{ needs.db-migration-test-run.result }}
|
||||
SKIP_RESULT: ${{ needs.db-migration-test-skip.result }}
|
||||
run: |
|
||||
if [[ "$SHOULD_SKIP_WORKFLOW" == 'true' ]]; then
|
||||
echo "DB migration tests were skipped because this workflow run duplicated a successful or newer run."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$TESTS_CHANGED" == 'true' ]]; then
|
||||
if [[ "$RUN_RESULT" == 'success' ]]; then
|
||||
echo "DB migration tests ran successfully."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "DB migration tests were required but finished with result: $RUN_RESULT" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$SKIP_RESULT" == 'success' ]]; then
|
||||
echo "DB migration tests were skipped because no migration-related files changed."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "DB migration tests were not required, but the skip job finished with result: $SKIP_RESULT" >&2
|
||||
exit 1
|
||||
|
||||
15
.github/workflows/pyrefly-diff.yml
vendored
15
.github/workflows/pyrefly-diff.yml
vendored
@ -22,7 +22,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Python & UV
|
||||
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
with:
|
||||
enable-cache: true
|
||||
|
||||
@ -50,6 +50,17 @@ jobs:
|
||||
run: |
|
||||
diff -u /tmp/pyrefly_base.txt /tmp/pyrefly_pr.txt > pyrefly_diff.txt || true
|
||||
|
||||
- name: Check if line counts match
|
||||
id: line_count_check
|
||||
run: |
|
||||
base_lines=$(wc -l < /tmp/pyrefly_base.txt)
|
||||
pr_lines=$(wc -l < /tmp/pyrefly_pr.txt)
|
||||
if [ "$base_lines" -eq "$pr_lines" ]; then
|
||||
echo "same=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "same=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Save PR number
|
||||
run: |
|
||||
echo ${{ github.event.pull_request.number }} > pr_number.txt
|
||||
@ -63,7 +74,7 @@ jobs:
|
||||
pr_number.txt
|
||||
|
||||
- name: Comment PR with pyrefly diff
|
||||
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
if: ${{ github.event.pull_request.head.repo.full_name == github.repository && steps.line_count_check.outputs.same == 'false' }}
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
7
.github/workflows/semantic-pull-request.yml
vendored
7
.github/workflows/semantic-pull-request.yml
vendored
@ -7,6 +7,9 @@ on:
|
||||
- edited
|
||||
- reopened
|
||||
- synchronize
|
||||
merge_group:
|
||||
branches: ["main"]
|
||||
types: [checks_requested]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
@ -15,7 +18,11 @@ jobs:
|
||||
pull-requests: read
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Complete merge group check
|
||||
if: github.event_name == 'merge_group'
|
||||
run: echo "Semantic PR title validation is handled on pull requests."
|
||||
- name: Check title
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: amannn/action-semantic-pull-request@48f256284bd46cdaab1048c3721360e808335d50 # v6.1.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
12
.github/workflows/style.yml
vendored
12
.github/workflows/style.yml
vendored
@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
- name: Setup UV and Python
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
with:
|
||||
enable-cache: false
|
||||
python-version: "3.12"
|
||||
@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Run Type Checks
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
run: make type-check
|
||||
run: make type-check-core
|
||||
|
||||
- name: Dotenv check
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
@ -77,6 +77,10 @@ jobs:
|
||||
with:
|
||||
files: |
|
||||
web/**
|
||||
package.json
|
||||
pnpm-lock.yaml
|
||||
pnpm-workspace.yaml
|
||||
.nvmrc
|
||||
.github/workflows/style.yml
|
||||
.github/actions/setup-web/**
|
||||
|
||||
@ -90,9 +94,9 @@ jobs:
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: web/.eslintcache
|
||||
key: ${{ runner.os }}-web-eslint-${{ hashFiles('web/package.json', 'web/pnpm-lock.yaml', 'web/eslint.config.mjs', 'web/eslint.constants.mjs', 'web/plugins/eslint/**') }}-${{ github.sha }}
|
||||
key: ${{ runner.os }}-web-eslint-${{ hashFiles('web/package.json', 'pnpm-lock.yaml', 'web/eslint.config.mjs', 'web/eslint.constants.mjs', 'web/plugins/eslint/**') }}-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-web-eslint-${{ hashFiles('web/package.json', 'web/pnpm-lock.yaml', 'web/eslint.config.mjs', 'web/eslint.constants.mjs', 'web/plugins/eslint/**') }}-
|
||||
${{ runner.os }}-web-eslint-${{ hashFiles('web/package.json', 'pnpm-lock.yaml', 'web/eslint.config.mjs', 'web/eslint.constants.mjs', 'web/plugins/eslint/**') }}-
|
||||
|
||||
- name: Web style check
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
|
||||
3
.github/workflows/tool-test-sdks.yaml
vendored
3
.github/workflows/tool-test-sdks.yaml
vendored
@ -6,6 +6,9 @@ on:
|
||||
- main
|
||||
paths:
|
||||
- sdks/**
|
||||
- package.json
|
||||
- pnpm-lock.yaml
|
||||
- pnpm-workspace.yaml
|
||||
|
||||
concurrency:
|
||||
group: sdk-tests-${{ github.head_ref || github.run_id }}
|
||||
|
||||
722
.github/workflows/translate-i18n-claude.yml
vendored
722
.github/workflows/translate-i18n-claude.yml
vendored
@ -1,26 +1,24 @@
|
||||
name: Translate i18n Files with Claude Code
|
||||
|
||||
# Note: claude-code-action doesn't support push events directly.
|
||||
# Push events are handled by trigger-i18n-sync.yml which sends repository_dispatch.
|
||||
# See: https://github.com/langgenius/dify/issues/30743
|
||||
|
||||
# Push events are bridged by trigger-i18n-sync.yml via repository_dispatch.
|
||||
on:
|
||||
repository_dispatch:
|
||||
types: [i18n-sync]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
files:
|
||||
description: 'Specific files to translate (space-separated, e.g., "app common"). Leave empty for all files.'
|
||||
description: 'Specific files to translate (space-separated, e.g., "app common"). Required for full mode; leave empty in incremental mode to use en-US files changed since HEAD~1.'
|
||||
required: false
|
||||
type: string
|
||||
languages:
|
||||
description: 'Specific languages to translate (space-separated, e.g., "zh-Hans ja-JP"). Leave empty for all supported languages.'
|
||||
description: 'Specific languages to translate (space-separated, e.g., "zh-Hans ja-JP"). Leave empty for all supported target languages except en-US.'
|
||||
required: false
|
||||
type: string
|
||||
mode:
|
||||
description: 'Sync mode: incremental (only changes) or full (re-check all keys)'
|
||||
description: 'Sync mode: incremental (compare with previous en-US revision) or full (sync all keys in scope)'
|
||||
required: false
|
||||
default: 'incremental'
|
||||
default: incremental
|
||||
type: choice
|
||||
options:
|
||||
- incremental
|
||||
@ -30,11 +28,15 @@ permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
concurrency:
|
||||
group: translate-i18n-${{ github.event_name }}-${{ github.ref }}
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
translate:
|
||||
if: github.repository == 'langgenius/dify'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
timeout-minutes: 120
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@ -51,380 +53,374 @@ jobs:
|
||||
- name: Setup web environment
|
||||
uses: ./.github/actions/setup-web
|
||||
|
||||
- name: Detect changed files and generate diff
|
||||
id: detect_changes
|
||||
- name: Prepare sync context
|
||||
id: context
|
||||
shell: bash
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
|
||||
# Manual trigger
|
||||
if [ -n "${{ github.event.inputs.files }}" ]; then
|
||||
echo "CHANGED_FILES=${{ github.event.inputs.files }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# Get all JSON files in en-US directory
|
||||
files=$(ls web/i18n/en-US/*.json 2>/dev/null | xargs -n1 basename | sed 's/.json$//' | tr '\n' ' ')
|
||||
echo "CHANGED_FILES=$files" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
echo "TARGET_LANGS=${{ github.event.inputs.languages }}" >> $GITHUB_OUTPUT
|
||||
echo "SYNC_MODE=${{ github.event.inputs.mode || 'incremental' }}" >> $GITHUB_OUTPUT
|
||||
DEFAULT_TARGET_LANGS=$(awk "
|
||||
/value: '/ {
|
||||
value=\$2
|
||||
gsub(/[',]/, \"\", value)
|
||||
}
|
||||
/supported: true/ && value != \"en-US\" {
|
||||
printf \"%s \", value
|
||||
}
|
||||
" web/i18n-config/languages.ts | sed 's/[[:space:]]*$//')
|
||||
|
||||
# For manual trigger with incremental mode, get diff from last commit
|
||||
# For full mode, we'll do a complete check anyway
|
||||
if [ "${{ github.event.inputs.mode }}" == "full" ]; then
|
||||
echo "Full mode: will check all keys" > /tmp/i18n-diff.txt
|
||||
echo "DIFF_AVAILABLE=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
git diff HEAD~1..HEAD -- 'web/i18n/en-US/*.json' > /tmp/i18n-diff.txt 2>/dev/null || echo "" > /tmp/i18n-diff.txt
|
||||
if [ -s /tmp/i18n-diff.txt ]; then
|
||||
echo "DIFF_AVAILABLE=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "DIFF_AVAILABLE=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
fi
|
||||
elif [ "${{ github.event_name }}" == "repository_dispatch" ]; then
|
||||
# Triggered by push via trigger-i18n-sync.yml workflow
|
||||
# Validate required payload fields
|
||||
if [ -z "${{ github.event.client_payload.changed_files }}" ]; then
|
||||
echo "Error: repository_dispatch payload missing required 'changed_files' field" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "CHANGED_FILES=${{ github.event.client_payload.changed_files }}" >> $GITHUB_OUTPUT
|
||||
echo "TARGET_LANGS=" >> $GITHUB_OUTPUT
|
||||
echo "SYNC_MODE=${{ github.event.client_payload.sync_mode || 'incremental' }}" >> $GITHUB_OUTPUT
|
||||
generate_changes_json() {
|
||||
node <<'NODE'
|
||||
const { execFileSync } = require('node:child_process')
|
||||
const fs = require('node:fs')
|
||||
const path = require('node:path')
|
||||
|
||||
# Decode the base64-encoded diff from the trigger workflow
|
||||
if [ -n "${{ github.event.client_payload.diff_base64 }}" ]; then
|
||||
if ! echo "${{ github.event.client_payload.diff_base64 }}" | base64 -d > /tmp/i18n-diff.txt 2>&1; then
|
||||
echo "Warning: Failed to decode base64 diff payload" >&2
|
||||
echo "" > /tmp/i18n-diff.txt
|
||||
echo "DIFF_AVAILABLE=false" >> $GITHUB_OUTPUT
|
||||
elif [ -s /tmp/i18n-diff.txt ]; then
|
||||
echo "DIFF_AVAILABLE=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "DIFF_AVAILABLE=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
const repoRoot = process.cwd()
|
||||
const baseSha = process.env.BASE_SHA || ''
|
||||
const headSha = process.env.HEAD_SHA || ''
|
||||
const files = (process.env.CHANGED_FILES || '').split(/\s+/).filter(Boolean)
|
||||
|
||||
const englishPath = fileStem => path.join(repoRoot, 'web', 'i18n', 'en-US', `${fileStem}.json`)
|
||||
|
||||
const readCurrentJson = (fileStem) => {
|
||||
const filePath = englishPath(fileStem)
|
||||
if (!fs.existsSync(filePath))
|
||||
return null
|
||||
|
||||
return JSON.parse(fs.readFileSync(filePath, 'utf8'))
|
||||
}
|
||||
|
||||
const readBaseJson = (fileStem) => {
|
||||
if (!baseSha)
|
||||
return null
|
||||
|
||||
try {
|
||||
const relativePath = `web/i18n/en-US/${fileStem}.json`
|
||||
const content = execFileSync('git', ['show', `${baseSha}:${relativePath}`], { encoding: 'utf8' })
|
||||
return JSON.parse(content)
|
||||
}
|
||||
catch (error) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const compareJson = (beforeValue, afterValue) => JSON.stringify(beforeValue) === JSON.stringify(afterValue)
|
||||
|
||||
const changes = {}
|
||||
|
||||
for (const fileStem of files) {
|
||||
const currentJson = readCurrentJson(fileStem)
|
||||
const beforeJson = readBaseJson(fileStem) || {}
|
||||
const afterJson = currentJson || {}
|
||||
const added = {}
|
||||
const updated = {}
|
||||
const deleted = []
|
||||
|
||||
for (const [key, value] of Object.entries(afterJson)) {
|
||||
if (!(key in beforeJson)) {
|
||||
added[key] = value
|
||||
continue
|
||||
}
|
||||
|
||||
if (!compareJson(beforeJson[key], value)) {
|
||||
updated[key] = {
|
||||
before: beforeJson[key],
|
||||
after: value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const key of Object.keys(beforeJson)) {
|
||||
if (!(key in afterJson))
|
||||
deleted.push(key)
|
||||
}
|
||||
|
||||
changes[fileStem] = {
|
||||
fileDeleted: currentJson === null,
|
||||
added,
|
||||
updated,
|
||||
deleted,
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
'/tmp/i18n-changes.json',
|
||||
JSON.stringify({
|
||||
baseSha,
|
||||
headSha,
|
||||
files,
|
||||
changes,
|
||||
})
|
||||
)
|
||||
NODE
|
||||
}
|
||||
|
||||
if [ "${{ github.event_name }}" = "repository_dispatch" ]; then
|
||||
BASE_SHA="${{ github.event.client_payload.base_sha }}"
|
||||
HEAD_SHA="${{ github.event.client_payload.head_sha }}"
|
||||
CHANGED_FILES="${{ github.event.client_payload.changed_files }}"
|
||||
TARGET_LANGS="$DEFAULT_TARGET_LANGS"
|
||||
SYNC_MODE="${{ github.event.client_payload.sync_mode || 'incremental' }}"
|
||||
|
||||
if [ -n "${{ github.event.client_payload.changes_base64 }}" ]; then
|
||||
printf '%s' '${{ github.event.client_payload.changes_base64 }}' | base64 -d > /tmp/i18n-changes.json
|
||||
CHANGES_AVAILABLE="true"
|
||||
CHANGES_SOURCE="embedded"
|
||||
elif [ -n "$BASE_SHA" ] && [ -n "$CHANGED_FILES" ]; then
|
||||
export BASE_SHA HEAD_SHA CHANGED_FILES
|
||||
generate_changes_json
|
||||
CHANGES_AVAILABLE="true"
|
||||
CHANGES_SOURCE="recomputed"
|
||||
else
|
||||
echo "" > /tmp/i18n-diff.txt
|
||||
echo "DIFF_AVAILABLE=false" >> $GITHUB_OUTPUT
|
||||
printf '%s' '{"baseSha":"","headSha":"","files":[],"changes":{}}' > /tmp/i18n-changes.json
|
||||
CHANGES_AVAILABLE="false"
|
||||
CHANGES_SOURCE="unavailable"
|
||||
fi
|
||||
else
|
||||
echo "Unsupported event type: ${{ github.event_name }}"
|
||||
exit 1
|
||||
BASE_SHA=""
|
||||
HEAD_SHA=$(git rev-parse HEAD)
|
||||
if [ -n "${{ github.event.inputs.languages }}" ]; then
|
||||
TARGET_LANGS="${{ github.event.inputs.languages }}"
|
||||
else
|
||||
TARGET_LANGS="$DEFAULT_TARGET_LANGS"
|
||||
fi
|
||||
SYNC_MODE="${{ github.event.inputs.mode || 'incremental' }}"
|
||||
if [ -n "${{ github.event.inputs.files }}" ]; then
|
||||
CHANGED_FILES="${{ github.event.inputs.files }}"
|
||||
elif [ "$SYNC_MODE" = "incremental" ]; then
|
||||
BASE_SHA=$(git rev-parse HEAD~1 2>/dev/null || true)
|
||||
if [ -n "$BASE_SHA" ]; then
|
||||
CHANGED_FILES=$(git diff --name-only "$BASE_SHA" "$HEAD_SHA" -- 'web/i18n/en-US/*.json' 2>/dev/null | sed -n 's@^.*/@@p' | sed 's/\.json$//' | tr '\n' ' ' | sed 's/[[:space:]]*$//')
|
||||
else
|
||||
CHANGED_FILES=$(find web/i18n/en-US -maxdepth 1 -type f -name '*.json' -print | sed -n 's@^.*/@@p' | sed 's/\.json$//' | sort | tr '\n' ' ' | sed 's/[[:space:]]*$//')
|
||||
fi
|
||||
elif [ "$SYNC_MODE" = "full" ]; then
|
||||
echo "workflow_dispatch full mode requires the files input to stay within CI limits." >&2
|
||||
exit 1
|
||||
else
|
||||
CHANGED_FILES=""
|
||||
fi
|
||||
|
||||
if [ "$SYNC_MODE" = "incremental" ] && [ -n "$CHANGED_FILES" ]; then
|
||||
export BASE_SHA HEAD_SHA CHANGED_FILES
|
||||
generate_changes_json
|
||||
CHANGES_AVAILABLE="true"
|
||||
CHANGES_SOURCE="local"
|
||||
else
|
||||
printf '%s' '{"baseSha":"","headSha":"","files":[],"changes":{}}' > /tmp/i18n-changes.json
|
||||
CHANGES_AVAILABLE="false"
|
||||
CHANGES_SOURCE="unavailable"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Truncate diff if too large (keep first 50KB)
|
||||
if [ -f /tmp/i18n-diff.txt ]; then
|
||||
head -c 50000 /tmp/i18n-diff.txt > /tmp/i18n-diff-truncated.txt
|
||||
mv /tmp/i18n-diff-truncated.txt /tmp/i18n-diff.txt
|
||||
FILE_ARGS=""
|
||||
if [ -n "$CHANGED_FILES" ]; then
|
||||
FILE_ARGS="--file $CHANGED_FILES"
|
||||
fi
|
||||
|
||||
echo "Detected files: $(cat $GITHUB_OUTPUT | grep CHANGED_FILES || echo 'none')"
|
||||
LANG_ARGS=""
|
||||
if [ -n "$TARGET_LANGS" ]; then
|
||||
LANG_ARGS="--lang $TARGET_LANGS"
|
||||
fi
|
||||
|
||||
{
|
||||
echo "DEFAULT_TARGET_LANGS=$DEFAULT_TARGET_LANGS"
|
||||
echo "BASE_SHA=$BASE_SHA"
|
||||
echo "HEAD_SHA=$HEAD_SHA"
|
||||
echo "CHANGED_FILES=$CHANGED_FILES"
|
||||
echo "TARGET_LANGS=$TARGET_LANGS"
|
||||
echo "SYNC_MODE=$SYNC_MODE"
|
||||
echo "CHANGES_AVAILABLE=$CHANGES_AVAILABLE"
|
||||
echo "CHANGES_SOURCE=$CHANGES_SOURCE"
|
||||
echo "FILE_ARGS=$FILE_ARGS"
|
||||
echo "LANG_ARGS=$LANG_ARGS"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "Files: ${CHANGED_FILES:-<none>}"
|
||||
echo "Languages: ${TARGET_LANGS:-<none>}"
|
||||
echo "Mode: $SYNC_MODE"
|
||||
|
||||
- name: Run Claude Code for Translation Sync
|
||||
if: steps.detect_changes.outputs.CHANGED_FILES != ''
|
||||
uses: anthropics/claude-code-action@ff9acae5886d41a99ed4ec14b7dc147d55834722 # v1.0.77
|
||||
if: steps.context.outputs.CHANGED_FILES != ''
|
||||
uses: anthropics/claude-code-action@88c168b39e7e64da0286d812b6e9fbebb6708185 # v1.0.82
|
||||
with:
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Allow github-actions bot to trigger this workflow via repository_dispatch
|
||||
# See: https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
|
||||
allowed_bots: 'github-actions[bot]'
|
||||
show_full_output: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
prompt: |
|
||||
You are a professional i18n synchronization engineer for the Dify project.
|
||||
Your task is to keep all language translations in sync with the English source (en-US).
|
||||
You are the i18n sync agent for the Dify repository.
|
||||
Your job is to keep translations synchronized with the English source files under `${{ github.workspace }}/web/i18n/en-US/`.
|
||||
|
||||
## CRITICAL TOOL RESTRICTIONS
|
||||
- Use **Read** tool to read files (NOT cat or bash)
|
||||
- Use **Edit** tool to modify JSON files (NOT node, jq, or bash scripts)
|
||||
- Use **Bash** ONLY for: git commands, gh commands, pnpm commands
|
||||
- Run bash commands ONE BY ONE, never combine with && or ||
|
||||
- NEVER use `$()` command substitution - it's not supported. Split into separate commands instead.
|
||||
Use absolute paths at all times:
|
||||
- Repo root: `${{ github.workspace }}`
|
||||
- Web directory: `${{ github.workspace }}/web`
|
||||
- Language config: `${{ github.workspace }}/web/i18n-config/languages.ts`
|
||||
|
||||
## WORKING DIRECTORY & ABSOLUTE PATHS
|
||||
Claude Code sandbox working directory may vary. Always use absolute paths:
|
||||
- For pnpm: `pnpm --dir ${{ github.workspace }}/web <command>`
|
||||
- For git: `git -C ${{ github.workspace }} <command>`
|
||||
- For gh: `gh --repo ${{ github.repository }} <command>`
|
||||
- For file paths: `${{ github.workspace }}/web/i18n/`
|
||||
Inputs:
|
||||
- Files in scope: `${{ steps.context.outputs.CHANGED_FILES }}`
|
||||
- Target languages: `${{ steps.context.outputs.TARGET_LANGS }}`
|
||||
- Sync mode: `${{ steps.context.outputs.SYNC_MODE }}`
|
||||
- Base SHA: `${{ steps.context.outputs.BASE_SHA }}`
|
||||
- Head SHA: `${{ steps.context.outputs.HEAD_SHA }}`
|
||||
- Scoped file args: `${{ steps.context.outputs.FILE_ARGS }}`
|
||||
- Scoped language args: `${{ steps.context.outputs.LANG_ARGS }}`
|
||||
- Structured change set available: `${{ steps.context.outputs.CHANGES_AVAILABLE }}`
|
||||
- Structured change set source: `${{ steps.context.outputs.CHANGES_SOURCE }}`
|
||||
- Structured change set file: `/tmp/i18n-changes.json`
|
||||
|
||||
## EFFICIENCY RULES
|
||||
- **ONE Edit per language file** - batch all key additions into a single Edit
|
||||
- Insert new keys at the beginning of JSON (after `{`), lint:fix will sort them
|
||||
- Translate ALL keys for a language mentally first, then do ONE Edit
|
||||
|
||||
## Context
|
||||
- Changed/target files: ${{ steps.detect_changes.outputs.CHANGED_FILES }}
|
||||
- Target languages (empty means all supported): ${{ steps.detect_changes.outputs.TARGET_LANGS }}
|
||||
- Sync mode: ${{ steps.detect_changes.outputs.SYNC_MODE }}
|
||||
- Translation files are located in: ${{ github.workspace }}/web/i18n/{locale}/{filename}.json
|
||||
- Language configuration is in: ${{ github.workspace }}/web/i18n-config/languages.ts
|
||||
- Git diff is available: ${{ steps.detect_changes.outputs.DIFF_AVAILABLE }}
|
||||
|
||||
## CRITICAL DESIGN: Verify First, Then Sync
|
||||
|
||||
You MUST follow this three-phase approach:
|
||||
|
||||
═══════════════════════════════════════════════════════════════
|
||||
║ PHASE 1: VERIFY - Analyze and Generate Change Report ║
|
||||
═══════════════════════════════════════════════════════════════
|
||||
|
||||
### Step 1.1: Analyze Git Diff (for incremental mode)
|
||||
Use the Read tool to read `/tmp/i18n-diff.txt` to see the git diff.
|
||||
|
||||
Parse the diff to categorize changes:
|
||||
- Lines with `+` (not `+++`): Added or modified values
|
||||
- Lines with `-` (not `---`): Removed or old values
|
||||
- Identify specific keys for each category:
|
||||
* ADD: Keys that appear only in `+` lines (new keys)
|
||||
* UPDATE: Keys that appear in both `-` and `+` lines (value changed)
|
||||
* DELETE: Keys that appear only in `-` lines (removed keys)
|
||||
|
||||
### Step 1.2: Read Language Configuration
|
||||
Use the Read tool to read `${{ github.workspace }}/web/i18n-config/languages.ts`.
|
||||
Extract all languages with `supported: true`.
|
||||
|
||||
### Step 1.3: Run i18n:check for Each Language
|
||||
```bash
|
||||
pnpm --dir ${{ github.workspace }}/web install --frozen-lockfile
|
||||
```
|
||||
```bash
|
||||
pnpm --dir ${{ github.workspace }}/web run i18n:check
|
||||
```
|
||||
|
||||
This will report:
|
||||
- Missing keys (need to ADD)
|
||||
- Extra keys (need to DELETE)
|
||||
|
||||
### Step 1.4: Generate Change Report
|
||||
|
||||
Create a structured report identifying:
|
||||
```
|
||||
╔══════════════════════════════════════════════════════════════╗
|
||||
║ I18N SYNC CHANGE REPORT ║
|
||||
╠══════════════════════════════════════════════════════════════╣
|
||||
║ Files to process: [list] ║
|
||||
║ Languages to sync: [list] ║
|
||||
╠══════════════════════════════════════════════════════════════╣
|
||||
║ ADD (New Keys): ║
|
||||
║ - [filename].[key]: "English value" ║
|
||||
║ ... ║
|
||||
╠══════════════════════════════════════════════════════════════╣
|
||||
║ UPDATE (Modified Keys - MUST re-translate): ║
|
||||
║ - [filename].[key]: "Old value" → "New value" ║
|
||||
║ ... ║
|
||||
╠══════════════════════════════════════════════════════════════╣
|
||||
║ DELETE (Extra Keys): ║
|
||||
║ - [language]/[filename].[key] ║
|
||||
║ ... ║
|
||||
╚══════════════════════════════════════════════════════════════╝
|
||||
```
|
||||
|
||||
**IMPORTANT**: For UPDATE detection, compare git diff to find keys where
|
||||
the English value changed. These MUST be re-translated even if target
|
||||
language already has a translation (it's now stale!).
|
||||
|
||||
═══════════════════════════════════════════════════════════════
|
||||
║ PHASE 2: SYNC - Execute Changes Based on Report ║
|
||||
═══════════════════════════════════════════════════════════════
|
||||
|
||||
### Step 2.1: Process ADD Operations (BATCH per language file)
|
||||
|
||||
**CRITICAL WORKFLOW for efficiency:**
|
||||
1. First, translate ALL new keys for ALL languages mentally
|
||||
2. Then, for EACH language file, do ONE Edit operation:
|
||||
- Read the file once
|
||||
- Insert ALL new keys at the beginning (right after the opening `{`)
|
||||
- Don't worry about alphabetical order - lint:fix will sort them later
|
||||
|
||||
Example Edit (adding 3 keys to zh-Hans/app.json):
|
||||
```
|
||||
old_string: '{\n "accessControl"'
|
||||
new_string: '{\n "newKey1": "translation1",\n "newKey2": "translation2",\n "newKey3": "translation3",\n "accessControl"'
|
||||
```
|
||||
|
||||
**IMPORTANT**:
|
||||
- ONE Edit per language file (not one Edit per key!)
|
||||
- Always use the Edit tool. NEVER use bash scripts, node, or jq.
|
||||
|
||||
### Step 2.2: Process UPDATE Operations
|
||||
|
||||
**IMPORTANT: Special handling for zh-Hans and ja-JP**
|
||||
If zh-Hans or ja-JP files were ALSO modified in the same push:
|
||||
- Run: `git -C ${{ github.workspace }} diff HEAD~1 --name-only` and check for zh-Hans or ja-JP files
|
||||
- If found, it means someone manually translated them. Apply these rules:
|
||||
|
||||
1. **Missing keys**: Still ADD them (completeness required)
|
||||
2. **Existing translations**: Compare with the NEW English value:
|
||||
- If translation is **completely wrong** or **unrelated** → Update it
|
||||
- If translation is **roughly correct** (captures the meaning) → Keep it, respect manual work
|
||||
- When in doubt, **keep the manual translation**
|
||||
|
||||
Example:
|
||||
- English changed: "Save" → "Save Changes"
|
||||
- Manual translation: "保存更改" → Keep it (correct meaning)
|
||||
- Manual translation: "删除" → Update it (completely wrong)
|
||||
|
||||
For other languages:
|
||||
Use Edit tool to replace the old value with the new translation.
|
||||
You can batch multiple updates in one Edit if they are adjacent.
|
||||
|
||||
### Step 2.3: Process DELETE Operations
|
||||
For extra keys reported by i18n:check:
|
||||
- Run: `pnpm --dir ${{ github.workspace }}/web run i18n:check --auto-remove`
|
||||
- Or manually remove from target language JSON files
|
||||
|
||||
## Translation Guidelines
|
||||
|
||||
- PRESERVE all placeholders exactly as-is:
|
||||
- `{{variable}}` - Mustache interpolation
|
||||
- `${variable}` - Template literal
|
||||
- `<tag>content</tag>` - HTML tags
|
||||
- `_one`, `_other` - Pluralization suffixes (these are KEY suffixes, not values)
|
||||
|
||||
**CRITICAL: Variable names and tag names MUST stay in English - NEVER translate them**
|
||||
|
||||
✅ CORRECT examples:
|
||||
- English: "{{count}} items" → Japanese: "{{count}} 個のアイテム"
|
||||
- English: "{{name}} updated" → Korean: "{{name}} 업데이트됨"
|
||||
- English: "<email>{{email}}</email>" → Chinese: "<email>{{email}}</email>"
|
||||
- English: "<CustomLink>Marketplace</CustomLink>" → Japanese: "<CustomLink>マーケットプレイス</CustomLink>"
|
||||
|
||||
❌ WRONG examples (NEVER do this - will break the application):
|
||||
- "{{count}}" → "{{カウント}}" ❌ (variable name translated to Japanese)
|
||||
- "{{name}}" → "{{이름}}" ❌ (variable name translated to Korean)
|
||||
- "{{email}}" → "{{邮箱}}" ❌ (variable name translated to Chinese)
|
||||
- "<email>" → "<メール>" ❌ (tag name translated)
|
||||
- "<CustomLink>" → "<自定义链接>" ❌ (component name translated)
|
||||
|
||||
- Use appropriate language register (formal/informal) based on existing translations
|
||||
- Match existing translation style in each language
|
||||
- Technical terms: check existing conventions per language
|
||||
- For CJK languages: no spaces between characters unless necessary
|
||||
- For RTL languages (ar-TN, fa-IR): ensure proper text handling
|
||||
|
||||
## Output Format Requirements
|
||||
- Alphabetical key ordering (if original file uses it)
|
||||
- 2-space indentation
|
||||
- Trailing newline at end of file
|
||||
- Valid JSON (use proper escaping for special characters)
|
||||
|
||||
═══════════════════════════════════════════════════════════════
|
||||
║ PHASE 3: RE-VERIFY - Confirm All Issues Resolved ║
|
||||
═══════════════════════════════════════════════════════════════
|
||||
|
||||
### Step 3.1: Run Lint Fix (IMPORTANT!)
|
||||
```bash
|
||||
pnpm --dir ${{ github.workspace }}/web lint:fix --quiet -- 'i18n/**/*.json'
|
||||
```
|
||||
This ensures:
|
||||
- JSON keys are sorted alphabetically (jsonc/sort-keys rule)
|
||||
- Valid i18n keys (dify-i18n/valid-i18n-keys rule)
|
||||
- No extra keys (dify-i18n/no-extra-keys rule)
|
||||
|
||||
### Step 3.2: Run Final i18n Check
|
||||
```bash
|
||||
pnpm --dir ${{ github.workspace }}/web run i18n:check
|
||||
```
|
||||
|
||||
### Step 3.3: Fix Any Remaining Issues
|
||||
If check reports issues:
|
||||
- Go back to PHASE 2 for unresolved items
|
||||
- Repeat until check passes
|
||||
|
||||
### Step 3.4: Generate Final Summary
|
||||
```
|
||||
╔══════════════════════════════════════════════════════════════╗
|
||||
║ SYNC COMPLETED SUMMARY ║
|
||||
╠══════════════════════════════════════════════════════════════╣
|
||||
║ Language │ Added │ Updated │ Deleted │ Status ║
|
||||
╠══════════════════════════════════════════════════════════════╣
|
||||
║ zh-Hans │ 5 │ 2 │ 1 │ ✓ Complete ║
|
||||
║ ja-JP │ 5 │ 2 │ 1 │ ✓ Complete ║
|
||||
║ ... │ ... │ ... │ ... │ ... ║
|
||||
╠══════════════════════════════════════════════════════════════╣
|
||||
║ i18n:check │ PASSED - All keys in sync ║
|
||||
╚══════════════════════════════════════════════════════════════╝
|
||||
```
|
||||
|
||||
## Mode-Specific Behavior
|
||||
|
||||
**SYNC_MODE = "incremental"** (default):
|
||||
- Focus on keys identified from git diff
|
||||
- Also check i18n:check output for any missing/extra keys
|
||||
- Efficient for small changes
|
||||
|
||||
**SYNC_MODE = "full"**:
|
||||
- Compare ALL keys between en-US and each language
|
||||
- Run i18n:check to identify all discrepancies
|
||||
- Use for first-time sync or fixing historical issues
|
||||
|
||||
## Important Notes
|
||||
|
||||
1. Always run i18n:check BEFORE and AFTER making changes
|
||||
2. The check script is the source of truth for missing/extra keys
|
||||
3. For UPDATE scenario: git diff is the source of truth for changed values
|
||||
4. Create a single commit with all translation changes
|
||||
5. If any translation fails, continue with others and report failures
|
||||
|
||||
═══════════════════════════════════════════════════════════════
|
||||
║ PHASE 4: COMMIT AND CREATE PR ║
|
||||
═══════════════════════════════════════════════════════════════
|
||||
|
||||
After all translations are complete and verified:
|
||||
|
||||
### Step 4.1: Check for changes
|
||||
```bash
|
||||
git -C ${{ github.workspace }} status --porcelain
|
||||
```
|
||||
|
||||
If there are changes:
|
||||
|
||||
### Step 4.2: Create a new branch and commit
|
||||
Run these git commands ONE BY ONE (not combined with &&).
|
||||
**IMPORTANT**: Do NOT use `$()` command substitution. Use two separate commands:
|
||||
|
||||
1. First, get the timestamp:
|
||||
```bash
|
||||
date +%Y%m%d-%H%M%S
|
||||
```
|
||||
(Note the output, e.g., "20260115-143052")
|
||||
|
||||
2. Then create branch using the timestamp value:
|
||||
```bash
|
||||
git -C ${{ github.workspace }} checkout -b chore/i18n-sync-20260115-143052
|
||||
```
|
||||
(Replace "20260115-143052" with the actual timestamp from step 1)
|
||||
|
||||
3. Stage changes:
|
||||
```bash
|
||||
git -C ${{ github.workspace }} add web/i18n/
|
||||
```
|
||||
|
||||
4. Commit:
|
||||
```bash
|
||||
git -C ${{ github.workspace }} commit -m "chore(i18n): sync translations with en-US - Mode: ${{ steps.detect_changes.outputs.SYNC_MODE }}"
|
||||
```
|
||||
|
||||
5. Push:
|
||||
```bash
|
||||
git -C ${{ github.workspace }} push origin HEAD
|
||||
```
|
||||
|
||||
### Step 4.3: Create Pull Request
|
||||
```bash
|
||||
gh pr create --repo ${{ github.repository }} --title "chore(i18n): sync translations with en-US" --body "## Summary
|
||||
|
||||
This PR was automatically generated to sync i18n translation files.
|
||||
|
||||
### Changes
|
||||
- Mode: ${{ steps.detect_changes.outputs.SYNC_MODE }}
|
||||
- Files processed: ${{ steps.detect_changes.outputs.CHANGED_FILES }}
|
||||
|
||||
### Verification
|
||||
- [x] \`i18n:check\` passed
|
||||
- [x] \`lint:fix\` applied
|
||||
|
||||
🤖 Generated with Claude Code GitHub Action" --base main
|
||||
```
|
||||
Tool rules:
|
||||
- Use Read for repository files.
|
||||
- Use Edit for JSON updates.
|
||||
- Use Bash only for `pnpm`.
|
||||
- Do not use Bash for `git`, `gh`, or branch management.
|
||||
|
||||
Required execution plan:
|
||||
1. Resolve target languages.
|
||||
- Use the provided `Target languages` value as the source of truth.
|
||||
- If it is unexpectedly empty, read `${{ github.workspace }}/web/i18n-config/languages.ts` and use every language with `supported: true` except `en-US`.
|
||||
2. Stay strictly in scope.
|
||||
- Only process the files listed in `Files in scope`.
|
||||
- Only process the resolved target languages, never `en-US`.
|
||||
- Do not touch unrelated i18n files.
|
||||
- Do not modify `${{ github.workspace }}/web/i18n/en-US/`.
|
||||
3. Resolve source changes.
|
||||
- If `Structured change set available` is `true`, read `/tmp/i18n-changes.json` and use it as the source of truth for file-level and key-level changes.
|
||||
- For each file entry:
|
||||
- `added` contains new English keys that need translations.
|
||||
- `updated` contains stale keys whose English source changed; re-translate using the `after` value.
|
||||
- `deleted` contains keys that should be removed from locale files.
|
||||
- `fileDeleted: true` means the English file no longer exists; remove the matching locale file if present.
|
||||
- Read the current English JSON file for any file that still exists so wording, placeholders, and surrounding terminology stay accurate.
|
||||
- If `Structured change set available` is `false`, treat this as a scoped full sync and use the current English files plus scoped checks as the source of truth.
|
||||
4. Run a scoped pre-check before editing:
|
||||
- `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }}`
|
||||
- Use this command as the source of truth for missing and extra keys inside the current scope.
|
||||
5. Apply translations.
|
||||
- For every target language and scoped file:
|
||||
- If `fileDeleted` is `true`, remove the locale file if it exists and skip the rest of that file.
|
||||
- If the locale file does not exist yet, create it with `Write` and then continue with `Edit` as needed.
|
||||
- ADD missing keys.
|
||||
- UPDATE stale translations when the English value changed.
|
||||
- DELETE removed keys. Prefer `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }} --auto-remove` for extra keys so deletions stay in scope.
|
||||
- Preserve placeholders exactly: `{{variable}}`, `${variable}`, HTML tags, component tags, and variable names.
|
||||
- Match the existing terminology and register used by each locale.
|
||||
- Prefer one Edit per file when stable, but prioritize correctness over batching.
|
||||
6. Verify only the edited files.
|
||||
- Run `pnpm --dir ${{ github.workspace }}/web lint:fix --quiet -- <relative edited i18n file paths>`
|
||||
- Run `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }}`
|
||||
- If verification fails, fix the remaining problems before continuing.
|
||||
7. Stop after the scoped locale files are updated and verification passes.
|
||||
- Do not create branches, commits, or pull requests.
|
||||
claude_args: |
|
||||
--max-turns 150
|
||||
--allowedTools "Read,Write,Edit,Bash(git *),Bash(git:*),Bash(gh *),Bash(gh:*),Bash(pnpm *),Bash(pnpm:*),Bash(date *),Bash(date:*),Glob,Grep"
|
||||
--max-turns 120
|
||||
--allowedTools "Read,Write,Edit,Bash(pnpm *),Bash(pnpm:*),Glob,Grep"
|
||||
|
||||
- name: Prepare branch metadata
|
||||
id: pr_meta
|
||||
if: steps.context.outputs.CHANGED_FILES != ''
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -z "$(git -C "${{ github.workspace }}" status --porcelain -- web/i18n/)" ]; then
|
||||
echo "has_changes=false" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
SCOPE_HASH=$(printf '%s|%s|%s' "${{ steps.context.outputs.CHANGED_FILES }}" "${{ steps.context.outputs.TARGET_LANGS }}" "${{ steps.context.outputs.SYNC_MODE }}" | sha256sum | cut -c1-8)
|
||||
HEAD_SHORT=$(printf '%s' "${{ steps.context.outputs.HEAD_SHA }}" | cut -c1-12)
|
||||
BRANCH_NAME="chore/i18n-sync-${HEAD_SHORT}-${SCOPE_HASH}"
|
||||
|
||||
{
|
||||
echo "has_changes=true"
|
||||
echo "branch_name=$BRANCH_NAME"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Commit translation changes
|
||||
if: steps.pr_meta.outputs.has_changes == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
git -C "${{ github.workspace }}" checkout -B "${{ steps.pr_meta.outputs.branch_name }}"
|
||||
git -C "${{ github.workspace }}" add web/i18n/
|
||||
git -C "${{ github.workspace }}" commit -m "chore(i18n): sync translations with en-US"
|
||||
|
||||
- name: Push translation branch
|
||||
if: steps.pr_meta.outputs.has_changes == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
if git -C "${{ github.workspace }}" ls-remote --exit-code --heads origin "${{ steps.pr_meta.outputs.branch_name }}" >/dev/null 2>&1; then
|
||||
git -C "${{ github.workspace }}" push --force-with-lease origin "${{ steps.pr_meta.outputs.branch_name }}"
|
||||
else
|
||||
git -C "${{ github.workspace }}" push --set-upstream origin "${{ steps.pr_meta.outputs.branch_name }}"
|
||||
fi
|
||||
|
||||
- name: Create or update translation PR
|
||||
if: steps.pr_meta.outputs.has_changes == 'true'
|
||||
env:
|
||||
BRANCH_NAME: ${{ steps.pr_meta.outputs.branch_name }}
|
||||
FILES_IN_SCOPE: ${{ steps.context.outputs.CHANGED_FILES }}
|
||||
TARGET_LANGS: ${{ steps.context.outputs.TARGET_LANGS }}
|
||||
SYNC_MODE: ${{ steps.context.outputs.SYNC_MODE }}
|
||||
CHANGES_SOURCE: ${{ steps.context.outputs.CHANGES_SOURCE }}
|
||||
BASE_SHA: ${{ steps.context.outputs.BASE_SHA }}
|
||||
HEAD_SHA: ${{ steps.context.outputs.HEAD_SHA }}
|
||||
REPO_NAME: ${{ github.repository }}
|
||||
shell: bash
|
||||
run: |
|
||||
PR_BODY_FILE=/tmp/i18n-pr-body.md
|
||||
LANG_COUNT=$(printf '%s\n' "$TARGET_LANGS" | wc -w | tr -d ' ')
|
||||
if [ "$LANG_COUNT" = "0" ]; then
|
||||
LANG_COUNT="0"
|
||||
fi
|
||||
export LANG_COUNT
|
||||
|
||||
node <<'NODE' > "$PR_BODY_FILE"
|
||||
const fs = require('node:fs')
|
||||
|
||||
const changesPath = '/tmp/i18n-changes.json'
|
||||
const changes = fs.existsSync(changesPath)
|
||||
? JSON.parse(fs.readFileSync(changesPath, 'utf8'))
|
||||
: { changes: {} }
|
||||
|
||||
const filesInScope = (process.env.FILES_IN_SCOPE || '').split(/\s+/).filter(Boolean)
|
||||
const lines = [
|
||||
'## Summary',
|
||||
'',
|
||||
`- **Files synced**: \`${process.env.FILES_IN_SCOPE || '<none>'}\``,
|
||||
`- **Languages updated**: ${process.env.TARGET_LANGS || '<none>'} (${process.env.LANG_COUNT} languages)`,
|
||||
`- **Sync mode**: ${process.env.SYNC_MODE}${process.env.BASE_SHA ? ` (base: \`${process.env.BASE_SHA.slice(0, 10)}\`, head: \`${process.env.HEAD_SHA.slice(0, 10)}\`)` : ` (head: \`${process.env.HEAD_SHA.slice(0, 10)}\`)`}`,
|
||||
'',
|
||||
'### Key changes',
|
||||
]
|
||||
|
||||
for (const fileName of filesInScope) {
|
||||
const fileChange = changes.changes?.[fileName] || { added: {}, updated: {}, deleted: [], fileDeleted: false }
|
||||
const addedKeys = Object.keys(fileChange.added || {})
|
||||
const updatedKeys = Object.keys(fileChange.updated || {})
|
||||
const deletedKeys = fileChange.deleted || []
|
||||
lines.push(`- \`${fileName}\`: +${addedKeys.length} / ~${updatedKeys.length} / -${deletedKeys.length}${fileChange.fileDeleted ? ' (file deleted in en-US)' : ''}`)
|
||||
}
|
||||
|
||||
lines.push(
|
||||
'',
|
||||
'## Verification',
|
||||
'',
|
||||
`- \`pnpm --dir web run i18n:check --file ${process.env.FILES_IN_SCOPE} --lang ${process.env.TARGET_LANGS}\``,
|
||||
`- \`pnpm --dir web lint:fix --quiet -- <edited i18n files>\``,
|
||||
'',
|
||||
'## Notes',
|
||||
'',
|
||||
'- This PR was generated from structured en-US key changes produced by `trigger-i18n-sync.yml`.',
|
||||
`- Structured change source: ${process.env.CHANGES_SOURCE || 'unknown'}.`,
|
||||
'- Branch name is deterministic for the head SHA and scope, so reruns update the same PR instead of opening duplicates.',
|
||||
'',
|
||||
'🤖 Generated with [Claude Code](https://claude.com/claude-code)'
|
||||
)
|
||||
|
||||
process.stdout.write(lines.join('\n'))
|
||||
NODE
|
||||
|
||||
EXISTING_PR_NUMBER=$(gh pr list --repo "$REPO_NAME" --head "$BRANCH_NAME" --state open --json number --jq '.[0].number')
|
||||
|
||||
if [ -n "$EXISTING_PR_NUMBER" ] && [ "$EXISTING_PR_NUMBER" != "null" ]; then
|
||||
gh pr edit "$EXISTING_PR_NUMBER" --repo "$REPO_NAME" --title "chore(i18n): sync translations with en-US" --body-file "$PR_BODY_FILE"
|
||||
else
|
||||
gh pr create --repo "$REPO_NAME" --head "$BRANCH_NAME" --base main --title "chore(i18n): sync translations with en-US" --body-file "$PR_BODY_FILE"
|
||||
fi
|
||||
|
||||
171
.github/workflows/trigger-i18n-sync.yml
vendored
171
.github/workflows/trigger-i18n-sync.yml
vendored
@ -1,9 +1,5 @@
|
||||
name: Trigger i18n Sync on Push
|
||||
|
||||
# This workflow bridges the push event to repository_dispatch
|
||||
# because claude-code-action doesn't support push events directly.
|
||||
# See: https://github.com/langgenius/dify/issues/30743
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
@ -13,6 +9,10 @@ on:
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
concurrency:
|
||||
group: trigger-i18n-sync-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
trigger:
|
||||
if: github.repository == 'langgenius/dify'
|
||||
@ -25,42 +25,147 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Detect changed files and generate diff
|
||||
- name: Detect changed files and build structured change set
|
||||
id: detect
|
||||
shell: bash
|
||||
run: |
|
||||
BEFORE_SHA="${{ github.event.before }}"
|
||||
# Handle edge case: force push may have null/zero SHA
|
||||
if [ -z "$BEFORE_SHA" ] || [ "$BEFORE_SHA" = "0000000000000000000000000000000000000000" ]; then
|
||||
BEFORE_SHA="HEAD~1"
|
||||
BASE_SHA="${{ github.event.before }}"
|
||||
if [ -z "$BASE_SHA" ] || [ "$BASE_SHA" = "0000000000000000000000000000000000000000" ]; then
|
||||
BASE_SHA=$(git rev-parse HEAD~1 2>/dev/null || true)
|
||||
fi
|
||||
HEAD_SHA="${{ github.sha }}"
|
||||
|
||||
# Detect changed i18n files
|
||||
changed=$(git diff --name-only "$BEFORE_SHA" "${{ github.sha }}" -- 'web/i18n/en-US/*.json' 2>/dev/null | xargs -n1 basename 2>/dev/null | sed 's/.json$//' | tr '\n' ' ' || echo "")
|
||||
echo "changed_files=$changed" >> $GITHUB_OUTPUT
|
||||
|
||||
# Generate diff for context
|
||||
git diff "$BEFORE_SHA" "${{ github.sha }}" -- 'web/i18n/en-US/*.json' > /tmp/i18n-diff.txt 2>/dev/null || echo "" > /tmp/i18n-diff.txt
|
||||
|
||||
# Truncate if too large (keep first 50KB to match receiving workflow)
|
||||
head -c 50000 /tmp/i18n-diff.txt > /tmp/i18n-diff-truncated.txt
|
||||
mv /tmp/i18n-diff-truncated.txt /tmp/i18n-diff.txt
|
||||
|
||||
# Base64 encode the diff for safe JSON transport (portable, single-line)
|
||||
diff_base64=$(base64 < /tmp/i18n-diff.txt | tr -d '\n')
|
||||
echo "diff_base64=$diff_base64" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ -n "$changed" ]; then
|
||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
echo "Detected changed files: $changed"
|
||||
if [ -n "$BASE_SHA" ]; then
|
||||
CHANGED_FILES=$(git diff --name-only "$BASE_SHA" "$HEAD_SHA" -- 'web/i18n/en-US/*.json' 2>/dev/null | sed -n 's@^.*/@@p' | sed 's/\.json$//' | tr '\n' ' ' | sed 's/[[:space:]]*$//')
|
||||
else
|
||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||
echo "No i18n changes detected"
|
||||
CHANGED_FILES=$(find web/i18n/en-US -maxdepth 1 -type f -name '*.json' -print | sed -n 's@^.*/@@p' | sed 's/\.json$//' | sort | tr '\n' ' ' | sed 's/[[:space:]]*$//')
|
||||
fi
|
||||
|
||||
export BASE_SHA HEAD_SHA CHANGED_FILES
|
||||
node <<'NODE'
|
||||
const { execFileSync } = require('node:child_process')
|
||||
const fs = require('node:fs')
|
||||
const path = require('node:path')
|
||||
|
||||
const repoRoot = process.cwd()
|
||||
const baseSha = process.env.BASE_SHA || ''
|
||||
const headSha = process.env.HEAD_SHA || ''
|
||||
const files = (process.env.CHANGED_FILES || '').split(/\s+/).filter(Boolean)
|
||||
|
||||
const englishPath = fileStem => path.join(repoRoot, 'web', 'i18n', 'en-US', `${fileStem}.json`)
|
||||
|
||||
const readCurrentJson = (fileStem) => {
|
||||
const filePath = englishPath(fileStem)
|
||||
if (!fs.existsSync(filePath))
|
||||
return null
|
||||
|
||||
return JSON.parse(fs.readFileSync(filePath, 'utf8'))
|
||||
}
|
||||
|
||||
const readBaseJson = (fileStem) => {
|
||||
if (!baseSha)
|
||||
return null
|
||||
|
||||
try {
|
||||
const relativePath = `web/i18n/en-US/${fileStem}.json`
|
||||
const content = execFileSync('git', ['show', `${baseSha}:${relativePath}`], { encoding: 'utf8' })
|
||||
return JSON.parse(content)
|
||||
}
|
||||
catch (error) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const compareJson = (beforeValue, afterValue) => JSON.stringify(beforeValue) === JSON.stringify(afterValue)
|
||||
|
||||
const changes = {}
|
||||
|
||||
for (const fileStem of files) {
|
||||
const beforeJson = readBaseJson(fileStem) || {}
|
||||
const afterJson = readCurrentJson(fileStem) || {}
|
||||
const added = {}
|
||||
const updated = {}
|
||||
const deleted = []
|
||||
|
||||
for (const [key, value] of Object.entries(afterJson)) {
|
||||
if (!(key in beforeJson)) {
|
||||
added[key] = value
|
||||
continue
|
||||
}
|
||||
|
||||
if (!compareJson(beforeJson[key], value)) {
|
||||
updated[key] = {
|
||||
before: beforeJson[key],
|
||||
after: value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const key of Object.keys(beforeJson)) {
|
||||
if (!(key in afterJson))
|
||||
deleted.push(key)
|
||||
}
|
||||
|
||||
changes[fileStem] = {
|
||||
fileDeleted: readCurrentJson(fileStem) === null,
|
||||
added,
|
||||
updated,
|
||||
deleted,
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
'/tmp/i18n-changes.json',
|
||||
JSON.stringify({
|
||||
baseSha,
|
||||
headSha,
|
||||
files,
|
||||
changes,
|
||||
})
|
||||
)
|
||||
NODE
|
||||
|
||||
if [ -n "$CHANGED_FILES" ]; then
|
||||
echo "has_changes=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "has_changes=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
echo "base_sha=$BASE_SHA" >> "$GITHUB_OUTPUT"
|
||||
echo "head_sha=$HEAD_SHA" >> "$GITHUB_OUTPUT"
|
||||
echo "changed_files=$CHANGED_FILES" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Trigger i18n sync workflow
|
||||
if: steps.detect.outputs.has_changes == 'true'
|
||||
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
BASE_SHA: ${{ steps.detect.outputs.base_sha }}
|
||||
HEAD_SHA: ${{ steps.detect.outputs.head_sha }}
|
||||
CHANGED_FILES: ${{ steps.detect.outputs.changed_files }}
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
event-type: i18n-sync
|
||||
client-payload: '{"changed_files": "${{ steps.detect.outputs.changed_files }}", "diff_base64": "${{ steps.detect.outputs.diff_base64 }}", "sync_mode": "incremental", "trigger_sha": "${{ github.sha }}"}'
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const fs = require('fs')
|
||||
|
||||
const changesJson = fs.readFileSync('/tmp/i18n-changes.json', 'utf8')
|
||||
const changesBase64 = Buffer.from(changesJson).toString('base64')
|
||||
const maxEmbeddedChangesChars = 48000
|
||||
const changesEmbedded = changesBase64.length <= maxEmbeddedChangesChars
|
||||
|
||||
if (!changesEmbedded) {
|
||||
console.log(`Structured change set too large to embed safely (${changesBase64.length} chars). Downstream workflow will regenerate it from git history.`)
|
||||
}
|
||||
|
||||
await github.rest.repos.createDispatchEvent({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
event_type: 'i18n-sync',
|
||||
client_payload: {
|
||||
changed_files: process.env.CHANGED_FILES,
|
||||
changes_base64: changesEmbedded ? changesBase64 : '',
|
||||
changes_embedded: changesEmbedded,
|
||||
sync_mode: 'incremental',
|
||||
base_sha: process.env.BASE_SHA,
|
||||
head_sha: process.env.HEAD_SHA,
|
||||
},
|
||||
})
|
||||
|
||||
95
.github/workflows/vdb-tests-full.yml
vendored
Normal file
95
.github/workflows/vdb-tests-full.yml
vendored
Normal file
@ -0,0 +1,95 @@
|
||||
name: Run Full VDB Tests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 3 * * 1'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: vdb-tests-full-${{ github.ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Full VDB Tests
|
||||
if: github.repository == 'langgenius/dify'
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.12"
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Free Disk Space
|
||||
uses: endersonmenezes/free-disk-space@7901478139cff6e9d44df5972fd8ab8fcade4db1 # v3.2.2
|
||||
with:
|
||||
remove_dotnet: true
|
||||
remove_haskell: true
|
||||
remove_tool_cache: true
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache-dependency-glob: api/uv.lock
|
||||
|
||||
- name: Check UV lockfile
|
||||
run: uv lock --project api --check
|
||||
|
||||
- name: Install dependencies
|
||||
run: uv sync --project api --dev
|
||||
|
||||
- name: Set up dotenvs
|
||||
run: |
|
||||
cp docker/.env.example docker/.env
|
||||
cp docker/middleware.env.example docker/middleware.env
|
||||
|
||||
- name: Expose Service Ports
|
||||
run: sh .github/workflows/expose_service_ports.sh
|
||||
|
||||
# - name: Set up Vector Store (TiDB)
|
||||
# uses: hoverkraft-tech/compose-action@v2.0.2
|
||||
# with:
|
||||
# compose-file: docker/tidb/docker-compose.yaml
|
||||
# services: |
|
||||
# tidb
|
||||
# tiflash
|
||||
|
||||
- name: Set up Full Vector Store Matrix
|
||||
uses: hoverkraft-tech/compose-action@4894d2492015c1774ee5a13a95b1072093087ec3 # v2.5.0
|
||||
with:
|
||||
compose-file: |
|
||||
docker/docker-compose.yaml
|
||||
services: |
|
||||
weaviate
|
||||
qdrant
|
||||
couchbase-server
|
||||
etcd
|
||||
minio
|
||||
milvus-standalone
|
||||
pgvecto-rs
|
||||
pgvector
|
||||
chroma
|
||||
elasticsearch
|
||||
oceanbase
|
||||
|
||||
- name: setup test config
|
||||
run: |
|
||||
echo $(pwd)
|
||||
ls -lah .
|
||||
cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env
|
||||
|
||||
# - name: Check VDB Ready (TiDB)
|
||||
# run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
|
||||
|
||||
- name: Test Vector Stores
|
||||
run: uv run --project api bash dev/pytest/pytest_vdb.sh
|
||||
28
.github/workflows/vdb-tests.yml
vendored
28
.github/workflows/vdb-tests.yml
vendored
@ -1,20 +1,22 @@
|
||||
name: Run VDB Tests
|
||||
name: Run VDB Smoke Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: vdb-tests-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: VDB Tests
|
||||
name: VDB Smoke Tests
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
|
||||
steps:
|
||||
@ -31,7 +33,7 @@ jobs:
|
||||
remove_tool_cache: true
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: ${{ matrix.python-version }}
|
||||
@ -59,23 +61,18 @@ jobs:
|
||||
# tidb
|
||||
# tiflash
|
||||
|
||||
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase, OceanBase)
|
||||
- name: Set up Vector Stores for Smoke Coverage
|
||||
uses: hoverkraft-tech/compose-action@4894d2492015c1774ee5a13a95b1072093087ec3 # v2.5.0
|
||||
with:
|
||||
compose-file: |
|
||||
docker/docker-compose.yaml
|
||||
services: |
|
||||
db_postgres
|
||||
redis
|
||||
weaviate
|
||||
qdrant
|
||||
couchbase-server
|
||||
etcd
|
||||
minio
|
||||
milvus-standalone
|
||||
pgvecto-rs
|
||||
pgvector
|
||||
chroma
|
||||
elasticsearch
|
||||
oceanbase
|
||||
|
||||
- name: setup test config
|
||||
run: |
|
||||
@ -87,4 +84,9 @@ jobs:
|
||||
# run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
|
||||
|
||||
- name: Test Vector Stores
|
||||
run: uv run --project api bash dev/pytest/pytest_vdb.sh
|
||||
run: |
|
||||
uv run --project api pytest --timeout "${PYTEST_TIMEOUT:-180}" \
|
||||
api/tests/integration_tests/vdb/chroma \
|
||||
api/tests/integration_tests/vdb/pgvector \
|
||||
api/tests/integration_tests/vdb/qdrant \
|
||||
api/tests/integration_tests/vdb/weaviate
|
||||
|
||||
68
.github/workflows/web-e2e.yml
vendored
Normal file
68
.github/workflows/web-e2e.yml
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
name: Web Full-Stack E2E
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: web-e2e-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Web Full-Stack E2E
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup web dependencies
|
||||
uses: ./.github/actions/setup-web
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: "3.12"
|
||||
cache-dependency-glob: api/uv.lock
|
||||
|
||||
- name: Install API dependencies
|
||||
run: uv sync --project api --dev
|
||||
|
||||
- name: Install Playwright browser
|
||||
working-directory: ./e2e
|
||||
run: vp run e2e:install
|
||||
|
||||
- name: Run isolated source-api and built-web Cucumber E2E tests
|
||||
working-directory: ./e2e
|
||||
env:
|
||||
E2E_ADMIN_EMAIL: e2e-admin@example.com
|
||||
E2E_ADMIN_NAME: E2E Admin
|
||||
E2E_ADMIN_PASSWORD: E2eAdmin12345
|
||||
E2E_FORCE_WEB_BUILD: "1"
|
||||
E2E_INIT_PASSWORD: E2eInit12345
|
||||
run: vp run e2e:full
|
||||
|
||||
- name: Upload Cucumber report
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: cucumber-report
|
||||
path: e2e/cucumber-report
|
||||
retention-days: 7
|
||||
|
||||
- name: Upload E2E logs
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: e2e-logs
|
||||
path: e2e/.logs
|
||||
retention-days: 7
|
||||
38
.github/workflows/web-tests.yml
vendored
38
.github/workflows/web-tests.yml
vendored
@ -22,8 +22,8 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
shardIndex: [1, 2, 3, 4, 5, 6]
|
||||
shardTotal: [6]
|
||||
shardIndex: [1, 2, 3, 4]
|
||||
shardTotal: [4]
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@ -66,7 +66,6 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup web environment
|
||||
@ -84,40 +83,9 @@ jobs:
|
||||
|
||||
- name: Report coverage
|
||||
if: ${{ env.CODECOV_TOKEN != '' }}
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5.5.3
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
with:
|
||||
directory: web/coverage
|
||||
flags: web
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ env.CODECOV_TOKEN }}
|
||||
|
||||
web-build:
|
||||
name: Web Build
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./web
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
web/**
|
||||
.github/workflows/web-tests.yml
|
||||
.github/actions/setup-web/**
|
||||
|
||||
- name: Setup web environment
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/setup-web
|
||||
|
||||
- name: Web build check
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
working-directory: ./web
|
||||
run: vp run build
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@ -209,10 +209,10 @@ api/.vscode
|
||||
.history
|
||||
|
||||
.idea/
|
||||
web/migration/
|
||||
|
||||
# pnpm
|
||||
/.pnpm-store
|
||||
/node_modules
|
||||
|
||||
# plugin migrate
|
||||
plugins.jsonl
|
||||
@ -222,7 +222,6 @@ mise.toml
|
||||
|
||||
|
||||
# AI Assistant
|
||||
.sisyphus/
|
||||
.roo/
|
||||
/.claude/worktrees/
|
||||
api/.env.backup
|
||||
@ -241,4 +240,4 @@ scripts/stress-test/reports/
|
||||
*.local.md
|
||||
|
||||
# Code Agent Folder
|
||||
.qoder/*
|
||||
.qoder/*
|
||||
|
||||
13
Makefile
13
Makefile
@ -24,8 +24,8 @@ prepare-docker:
|
||||
# Step 2: Prepare web environment
|
||||
prepare-web:
|
||||
@echo "🌐 Setting up web environment..."
|
||||
@cp -n web/.env.example web/.env 2>/dev/null || echo "Web .env already exists"
|
||||
@cd web && pnpm install
|
||||
@cp -n web/.env.example web/.env.local 2>/dev/null || echo "Web .env.local already exists"
|
||||
@pnpm install
|
||||
@echo "✅ Web environment prepared (not started)"
|
||||
|
||||
# Step 3: Prepare API environment
|
||||
@ -74,6 +74,12 @@ type-check:
|
||||
@uv --directory api run mypy --exclude-gitignore --exclude 'tests/' --exclude 'migrations/' --check-untyped-defs --disable-error-code=import-untyped .
|
||||
@echo "✅ Type checks complete"
|
||||
|
||||
type-check-core:
|
||||
@echo "📝 Running core type checks (basedpyright + mypy)..."
|
||||
@./dev/basedpyright-check $(PATH_TO_CHECK)
|
||||
@uv --directory api run mypy --exclude-gitignore --exclude 'tests/' --exclude 'migrations/' --check-untyped-defs --disable-error-code=import-untyped .
|
||||
@echo "✅ Core type checks complete"
|
||||
|
||||
test:
|
||||
@echo "🧪 Running backend unit tests..."
|
||||
@if [ -n "$(TARGET_TESTS)" ]; then \
|
||||
@ -87,7 +93,7 @@ test:
|
||||
# Build Docker images
|
||||
build-web:
|
||||
@echo "Building web Docker image: $(WEB_IMAGE):$(VERSION)..."
|
||||
docker build -t $(WEB_IMAGE):$(VERSION) ./web
|
||||
docker build -f web/Dockerfile -t $(WEB_IMAGE):$(VERSION) .
|
||||
@echo "Web Docker image built successfully: $(WEB_IMAGE):$(VERSION)"
|
||||
|
||||
build-api:
|
||||
@ -133,6 +139,7 @@ help:
|
||||
@echo " make check - Check code with ruff"
|
||||
@echo " make lint - Format, fix, and lint code (ruff, imports, dotenv)"
|
||||
@echo " make type-check - Run type checks (basedpyright, pyrefly, mypy)"
|
||||
@echo " make type-check-core - Run core type checks (basedpyright, mypy)"
|
||||
@echo " make test - Run backend unit tests (or TARGET_TESTS=./api/tests/<target_tests>)"
|
||||
@echo ""
|
||||
@echo "Docker Build Targets:"
|
||||
|
||||
@ -53,7 +53,11 @@
|
||||
<a href="./docs/tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
|
||||
<a href="./docs/vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
|
||||
<a href="./docs/de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
|
||||
<a href="./docs/it-IT/README.md"><img alt="README in Italiano" src="https://img.shields.io/badge/Italiano-d9d9d9"></a>
|
||||
<a href="./docs/pt-BR/README.md"><img alt="README em Português do Brasil" src="https://img.shields.io/badge/Portugu%C3%AAs%20do%20Brasil-d9d9d9"></a>
|
||||
<a href="./docs/sl-SI/README.md"><img alt="README Slovenščina" src="https://img.shields.io/badge/Sloven%C5%A1%C4%8Dina-d9d9d9"></a>
|
||||
<a href="./docs/bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
|
||||
<a href="./docs/hi-IN/README.md"><img alt="README in हिन्दी" src="https://img.shields.io/badge/Hindi-d9d9d9"></a>
|
||||
</p>
|
||||
|
||||
Dify is an open-source LLM app development platform. Its intuitive interface combines AI workflow, RAG pipeline, agent capabilities, model management, observability features (including [Opik](https://www.comet.com/docs/opik/integrations/dify), [Langfuse](https://docs.langfuse.com), and [Arize Phoenix](https://docs.arize.com/phoenix)) and more, letting you quickly go from prototype to production. Here's a list of the core features:
|
||||
|
||||
@ -33,9 +33,6 @@ TRIGGER_URL=http://localhost:5001
|
||||
# The time in seconds after the signature is rejected
|
||||
FILES_ACCESS_TIMEOUT=300
|
||||
|
||||
# Collaboration mode toggle
|
||||
ENABLE_COLLABORATION_MODE=false
|
||||
|
||||
# Access token expiration time in minutes
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||
|
||||
@ -130,7 +127,8 @@ ALIYUN_OSS_AUTH_VERSION=v1
|
||||
ALIYUN_OSS_REGION=your-region
|
||||
# Don't start with '/'. OSS doesn't support leading slash in object names.
|
||||
ALIYUN_OSS_PATH=your-path
|
||||
ALIYUN_CLOUDBOX_ID=your-cloudbox-id
|
||||
# Optional CloudBox ID for Aliyun OSS, DO NOT enable it if you are not using CloudBox.
|
||||
#ALIYUN_CLOUDBOX_ID=your-cloudbox-id
|
||||
|
||||
# Google Storage configuration
|
||||
GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name
|
||||
@ -647,11 +645,6 @@ INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y
|
||||
MARKETPLACE_ENABLED=true
|
||||
MARKETPLACE_API_URL=https://marketplace.dify.ai
|
||||
|
||||
# Creators Platform configuration
|
||||
CREATORS_PLATFORM_FEATURES_ENABLED=true
|
||||
CREATORS_PLATFORM_API_URL=https://creators.dify.ai
|
||||
CREATORS_PLATFORM_OAUTH_CLIENT_ID=
|
||||
|
||||
# Endpoint configuration
|
||||
ENDPOINT_URL_TEMPLATE=http://localhost:5002/e/{hook_id}
|
||||
|
||||
@ -747,34 +740,6 @@ SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000
|
||||
|
||||
# Sandbox Dify CLI configuration
|
||||
# Directory containing dify CLI binaries (dify-cli-<os>-<arch>). Defaults to api/bin when unset.
|
||||
SANDBOX_DIFY_CLI_ROOT=
|
||||
|
||||
# CLI API URL for sandbox (dify-sandbox or e2b) to call back to Dify API.
|
||||
# This URL must be accessible from the sandbox environment.
|
||||
# For local development: use http://localhost:5001 or http://127.0.0.1:5001
|
||||
# For middleware docker stack (api on host): keep localhost/127.0.0.1 and use agentbox via 127.0.0.1:2222
|
||||
# For Docker deployment: use http://api:5001 (internal Docker network)
|
||||
# For external sandbox (e.g., e2b): use a publicly accessible URL
|
||||
CLI_API_URL=http://localhost:5001
|
||||
|
||||
# Base URL for storage file ticket API endpoints (upload/download).
|
||||
# Used by sandbox containers (internal or external like e2b) that need an absolute,
|
||||
# routable address to reach the Dify API file endpoints.
|
||||
# Required for sandbox runtime file access.
|
||||
# For local development: http://localhost:5001
|
||||
# For all-in-one Docker deployment with nginx: http://localhost
|
||||
# For public/remote sandbox environments (e.g., e2b): use a public domain or IP
|
||||
FILES_API_URL=http://localhost:5001
|
||||
|
||||
# Optional defaults for SSH sandbox provider setup (for manual config/CLI usage).
|
||||
# Middleware/local dev usually uses 127.0.0.1:2222; full docker deployment usually uses agentbox:22.
|
||||
SSH_SANDBOX_HOST=127.0.0.1
|
||||
SSH_SANDBOX_PORT=2222
|
||||
SSH_SANDBOX_USERNAME=agentbox
|
||||
SSH_SANDBOX_PASSWORD=agentbox
|
||||
SSH_SANDBOX_BASE_WORKING_PATH=/workspace/sandboxes
|
||||
|
||||
# Redis URL used for event bus between API and
|
||||
# celery worker
|
||||
|
||||
@ -1,242 +1,14 @@
|
||||
[importlinter]
|
||||
root_packages =
|
||||
core
|
||||
dify_graph
|
||||
constants
|
||||
context
|
||||
configs
|
||||
controllers
|
||||
extensions
|
||||
factories
|
||||
libs
|
||||
models
|
||||
tasks
|
||||
services
|
||||
include_external_packages = True
|
||||
|
||||
[importlinter:contract:workflow]
|
||||
name = Workflow
|
||||
type=layers
|
||||
layers =
|
||||
graph_engine
|
||||
graph_events
|
||||
graph
|
||||
nodes
|
||||
node_events
|
||||
runtime
|
||||
entities
|
||||
containers =
|
||||
dify_graph
|
||||
ignore_imports =
|
||||
dify_graph.nodes.base.node -> dify_graph.graph_events
|
||||
dify_graph.nodes.iteration.iteration_node -> dify_graph.graph_events
|
||||
dify_graph.nodes.loop.loop_node -> dify_graph.graph_events
|
||||
|
||||
dify_graph.nodes.iteration.iteration_node -> dify_graph.graph_engine
|
||||
dify_graph.nodes.loop.loop_node -> dify_graph.graph_engine
|
||||
# TODO(QuantumGhost): fix the import violation later
|
||||
dify_graph.entities.pause_reason -> dify_graph.nodes.human_input.entities
|
||||
|
||||
dify_graph.nodes.base.node -> core.workflow.node_factory
|
||||
dify_graph.nodes.tool.tool_node -> core.workflow.node_factory
|
||||
dify_graph.file.file_manager -> models.model
|
||||
dify_graph.file.file_manager -> models.tools
|
||||
dify_graph.file.file_manager -> extensions.ext_database
|
||||
|
||||
[importlinter:contract:workflow-infrastructure-dependencies]
|
||||
name = Workflow Infrastructure Dependencies
|
||||
type = forbidden
|
||||
source_modules =
|
||||
dify_graph
|
||||
forbidden_modules =
|
||||
extensions.ext_database
|
||||
extensions.ext_redis
|
||||
allow_indirect_imports = True
|
||||
ignore_imports =
|
||||
dify_graph.nodes.llm.node -> extensions.ext_database
|
||||
dify_graph.model_runtime.model_providers.__base.ai_model -> extensions.ext_redis
|
||||
dify_graph.model_runtime.model_providers.model_provider_factory -> extensions.ext_redis
|
||||
dify_graph.file.file_manager -> extensions.ext_database
|
||||
dify_graph.nodes.llm.llm_utils -> extensions.ext_database
|
||||
|
||||
[importlinter:contract:workflow-external-imports]
|
||||
name = Workflow External Imports
|
||||
type = forbidden
|
||||
source_modules =
|
||||
dify_graph
|
||||
forbidden_modules =
|
||||
configs
|
||||
controllers
|
||||
extensions
|
||||
models
|
||||
services
|
||||
tasks
|
||||
core.agent
|
||||
core.app
|
||||
core.base
|
||||
core.callback_handler
|
||||
core.datasource
|
||||
core.db
|
||||
core.entities
|
||||
core.errors
|
||||
core.extension
|
||||
core.external_data_tool
|
||||
core.file
|
||||
core.helper
|
||||
core.hosting_configuration
|
||||
core.indexing_runner
|
||||
core.llm_generator
|
||||
core.logging
|
||||
core.mcp
|
||||
core.memory
|
||||
core.moderation
|
||||
core.ops
|
||||
core.plugin
|
||||
core.prompt
|
||||
core.provider_manager
|
||||
core.rag
|
||||
core.repositories
|
||||
core.schemas
|
||||
core.tools
|
||||
core.trigger
|
||||
core.variables
|
||||
ignore_imports =
|
||||
dify_graph.nodes.llm.llm_utils -> core.model_manager
|
||||
dify_graph.nodes.llm.protocols -> core.model_manager
|
||||
dify_graph.nodes.llm.llm_utils -> dify_graph.model_runtime.model_providers.__base.large_language_model
|
||||
dify_graph.nodes.llm.node -> core.tools.signature
|
||||
dify_graph.nodes.tool.tool_node -> core.callback_handler.workflow_tool_callback_handler
|
||||
dify_graph.nodes.tool.tool_node -> core.tools.tool_engine
|
||||
dify_graph.nodes.tool.tool_node -> core.tools.tool_manager
|
||||
dify_graph.nodes.parameter_extractor.parameter_extractor_node -> core.prompt.advanced_prompt_transform
|
||||
dify_graph.nodes.parameter_extractor.parameter_extractor_node -> core.prompt.simple_prompt_transform
|
||||
dify_graph.nodes.parameter_extractor.parameter_extractor_node -> dify_graph.model_runtime.model_providers.__base.large_language_model
|
||||
dify_graph.nodes.question_classifier.question_classifier_node -> core.prompt.simple_prompt_transform
|
||||
dify_graph.nodes.parameter_extractor.parameter_extractor_node -> core.model_manager
|
||||
dify_graph.nodes.question_classifier.question_classifier_node -> core.model_manager
|
||||
dify_graph.nodes.tool.tool_node -> core.tools.utils.message_transformer
|
||||
dify_graph.nodes.llm.node -> core.llm_generator.output_parser.errors
|
||||
dify_graph.nodes.llm.node -> core.llm_generator.output_parser.file_ref
|
||||
dify_graph.nodes.llm.node -> core.llm_generator.output_parser.structured_output
|
||||
dify_graph.nodes.llm.node -> core.model_manager
|
||||
dify_graph.nodes.llm.entities -> core.prompt.entities.advanced_prompt_entities
|
||||
dify_graph.nodes.llm.node -> core.prompt.entities.advanced_prompt_entities
|
||||
dify_graph.nodes.llm.node -> core.prompt.utils.prompt_message_util
|
||||
dify_graph.nodes.parameter_extractor.entities -> core.prompt.entities.advanced_prompt_entities
|
||||
dify_graph.nodes.parameter_extractor.parameter_extractor_node -> core.prompt.entities.advanced_prompt_entities
|
||||
dify_graph.nodes.parameter_extractor.parameter_extractor_node -> core.prompt.utils.prompt_message_util
|
||||
dify_graph.nodes.question_classifier.entities -> core.prompt.entities.advanced_prompt_entities
|
||||
dify_graph.nodes.question_classifier.question_classifier_node -> core.prompt.utils.prompt_message_util
|
||||
dify_graph.nodes.llm.node -> models.dataset
|
||||
dify_graph.nodes.llm.file_saver -> core.tools.signature
|
||||
dify_graph.nodes.llm.file_saver -> core.tools.tool_file_manager
|
||||
dify_graph.nodes.tool.tool_node -> core.tools.errors
|
||||
dify_graph.nodes.llm.node -> extensions.ext_database
|
||||
dify_graph.nodes.llm.node -> models.model
|
||||
dify_graph.nodes.llm.node -> configs
|
||||
dify_graph.nodes.llm.node -> core.agent.entities
|
||||
dify_graph.nodes.llm.node -> core.agent.patterns
|
||||
dify_graph.nodes.llm.node -> core.app.entities.app_invoke_entities
|
||||
dify_graph.nodes.llm.node -> core.helper.code_executor
|
||||
dify_graph.nodes.llm.node -> core.memory.base
|
||||
dify_graph.nodes.llm.node -> core.sandbox
|
||||
dify_graph.nodes.llm.node -> core.sandbox.bash.session
|
||||
dify_graph.nodes.llm.node -> core.sandbox.entities.config
|
||||
dify_graph.nodes.llm.node -> core.skill.assembler
|
||||
dify_graph.nodes.llm.node -> core.skill.constants
|
||||
dify_graph.nodes.llm.node -> core.skill.entities.skill_bundle
|
||||
dify_graph.nodes.llm.node -> core.skill.entities.skill_document
|
||||
dify_graph.nodes.llm.node -> core.skill.entities.skill_metadata
|
||||
dify_graph.nodes.llm.node -> core.skill.entities.tool_dependencies
|
||||
dify_graph.nodes.llm.node -> core.tools.tool_file_manager
|
||||
dify_graph.nodes.llm.node -> core.tools.tool_manager
|
||||
dify_graph.nodes.tool.tool_node -> services
|
||||
dify_graph.model_runtime.model_providers.__base.ai_model -> configs
|
||||
dify_graph.model_runtime.model_providers.__base.ai_model -> extensions.ext_redis
|
||||
dify_graph.model_runtime.model_providers.__base.large_language_model -> configs
|
||||
dify_graph.model_runtime.model_providers.__base.text_embedding_model -> core.entities.embedding_type
|
||||
dify_graph.model_runtime.model_providers.model_provider_factory -> configs
|
||||
dify_graph.model_runtime.model_providers.model_provider_factory -> extensions.ext_redis
|
||||
dify_graph.model_runtime.model_providers.model_provider_factory -> models.provider_ids
|
||||
dify_graph.file.file_manager -> configs
|
||||
dify_graph.file.file_manager -> extensions.ext_database
|
||||
dify_graph.file.file_manager -> models.model
|
||||
dify_graph.file.file_manager -> models.tools
|
||||
dify_graph.nodes.llm.llm_utils -> core.app.llm.model_access
|
||||
dify_graph.nodes.llm.llm_utils -> core.app.llm.quota
|
||||
dify_graph.nodes.llm.llm_utils -> core.memory
|
||||
dify_graph.nodes.llm.llm_utils -> core.memory.base
|
||||
dify_graph.nodes.llm.llm_utils -> extensions.ext_database
|
||||
dify_graph.nodes.llm.llm_utils -> models.model
|
||||
dify_graph.nodes.llm.llm_utils -> core.prompt.entities.advanced_prompt_entities
|
||||
dify_graph.nodes.llm.entities -> core.agent.entities
|
||||
dify_graph.nodes.base.node -> core.workflow.node_factory
|
||||
dify_graph.nodes.tool.tool_node -> core.workflow.node_factory
|
||||
|
||||
[importlinter:contract:rsc]
|
||||
name = RSC
|
||||
type = layers
|
||||
layers =
|
||||
graph_engine
|
||||
response_coordinator
|
||||
containers =
|
||||
dify_graph.graph_engine
|
||||
|
||||
[importlinter:contract:worker]
|
||||
name = Worker
|
||||
type = layers
|
||||
layers =
|
||||
graph_engine
|
||||
worker
|
||||
containers =
|
||||
dify_graph.graph_engine
|
||||
|
||||
[importlinter:contract:graph-engine-architecture]
|
||||
name = Graph Engine Architecture
|
||||
type = layers
|
||||
layers =
|
||||
graph_engine
|
||||
orchestration
|
||||
command_processing
|
||||
event_management
|
||||
error_handler
|
||||
graph_traversal
|
||||
graph_state_manager
|
||||
worker_management
|
||||
domain
|
||||
containers =
|
||||
dify_graph.graph_engine
|
||||
|
||||
[importlinter:contract:domain-isolation]
|
||||
name = Domain Model Isolation
|
||||
type = forbidden
|
||||
source_modules =
|
||||
dify_graph.graph_engine.domain
|
||||
forbidden_modules =
|
||||
dify_graph.graph_engine.worker_management
|
||||
dify_graph.graph_engine.command_channels
|
||||
dify_graph.graph_engine.layers
|
||||
dify_graph.graph_engine.protocols
|
||||
|
||||
[importlinter:contract:worker-management]
|
||||
name = Worker Management
|
||||
type = forbidden
|
||||
source_modules =
|
||||
dify_graph.graph_engine.worker_management
|
||||
forbidden_modules =
|
||||
dify_graph.graph_engine.orchestration
|
||||
dify_graph.graph_engine.command_processing
|
||||
dify_graph.graph_engine.event_management
|
||||
|
||||
|
||||
[importlinter:contract:graph-traversal-components]
|
||||
name = Graph Traversal Components
|
||||
type = layers
|
||||
layers =
|
||||
edge_processor
|
||||
skip_propagator
|
||||
containers =
|
||||
dify_graph.graph_engine.graph_traversal
|
||||
|
||||
[importlinter:contract:command-channels]
|
||||
name = Command Channels Independence
|
||||
type = independence
|
||||
modules =
|
||||
dify_graph.graph_engine.command_channels.in_memory_channel
|
||||
dify_graph.graph_engine.command_channels.redis_channel
|
||||
|
||||
@ -100,7 +100,7 @@ ignore = [
|
||||
"configs/*" = [
|
||||
"N802", # invalid-function-name
|
||||
]
|
||||
"dify_graph/model_runtime/callbacks/base_callback.py" = ["T201"]
|
||||
"graphon/model_runtime/callbacks/base_callback.py" = ["T201"]
|
||||
"core/workflow/callbacks/workflow_logging_callback.py" = ["T201"]
|
||||
"libs/gmpy2_pkcs10aep_cipher.py" = [
|
||||
"N803", # invalid-argument-name
|
||||
|
||||
@ -40,6 +40,8 @@ The scripts resolve paths relative to their location, so you can run them from a
|
||||
./dev/start-web
|
||||
```
|
||||
|
||||
`./dev/setup` and `./dev/start-web` install JavaScript dependencies through the repository root workspace, so you do not need a separate `cd web && pnpm install` step.
|
||||
|
||||
1. Set up your application by visiting `http://localhost:3000`.
|
||||
|
||||
1. Start the worker service (async and scheduler tasks, runs from `api`).
|
||||
@ -54,86 +56,6 @@ The scripts resolve paths relative to their location, so you can run them from a
|
||||
./dev/start-beat
|
||||
```
|
||||
|
||||
### Manual commands
|
||||
|
||||
<details>
|
||||
<summary>Show manual setup and run steps</summary>
|
||||
|
||||
These commands assume you start from the repository root.
|
||||
|
||||
1. Start the docker-compose stack.
|
||||
|
||||
The backend requires middleware, including PostgreSQL, Redis, and Weaviate, which can be started together using `docker-compose`.
|
||||
|
||||
```bash
|
||||
cp docker/middleware.env.example docker/middleware.env
|
||||
# Use mysql or another vector database profile if you are not using postgres/weaviate.
|
||||
docker compose -f docker/docker-compose.middleware.yaml --profile postgresql --profile weaviate -p dify up -d
|
||||
```
|
||||
|
||||
1. Copy env files.
|
||||
|
||||
```bash
|
||||
cp api/.env.example api/.env
|
||||
cp web/.env.example web/.env.local
|
||||
```
|
||||
|
||||
1. Install UV if needed.
|
||||
|
||||
```bash
|
||||
pip install uv
|
||||
# Or on macOS
|
||||
brew install uv
|
||||
```
|
||||
|
||||
1. Install API dependencies.
|
||||
|
||||
```bash
|
||||
cd api
|
||||
uv sync --group dev
|
||||
```
|
||||
|
||||
1. Install web dependencies.
|
||||
|
||||
```bash
|
||||
cd web
|
||||
pnpm install
|
||||
cd ..
|
||||
```
|
||||
|
||||
1. Start backend (runs migrations first, in a new terminal).
|
||||
|
||||
```bash
|
||||
cd api
|
||||
uv run flask db upgrade
|
||||
uv run flask run --host 0.0.0.0 --port=5001 --debug
|
||||
```
|
||||
|
||||
1. Start Dify [web](../web) service (in a new terminal).
|
||||
|
||||
```bash
|
||||
cd web
|
||||
pnpm dev:inspect
|
||||
```
|
||||
|
||||
1. Set up your application by visiting `http://localhost:3000`.
|
||||
|
||||
1. Optional: start the worker service (async tasks, in a new terminal).
|
||||
|
||||
```bash
|
||||
cd api
|
||||
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution
|
||||
```
|
||||
|
||||
1. Optional: start Celery Beat (scheduled tasks, in a new terminal).
|
||||
|
||||
```bash
|
||||
cd api
|
||||
uv run celery -A app.celery beat
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Environment notes
|
||||
|
||||
> [!IMPORTANT]
|
||||
|
||||
@ -1,9 +0,0 @@
|
||||
Summary:
|
||||
Summary:
|
||||
- Application configuration definitions, including file access settings.
|
||||
|
||||
Invariants:
|
||||
- File access settings drive signed URL expiration and base URLs.
|
||||
|
||||
Tests:
|
||||
- Config parsing tests under tests/unit_tests/configs.
|
||||
@ -1,9 +0,0 @@
|
||||
Summary:
|
||||
- Registers file-related API namespaces and routes for files service.
|
||||
- Includes app-assets and sandbox archive proxy controllers.
|
||||
|
||||
Invariants:
|
||||
- files_ns must include all file controller modules to register routes.
|
||||
|
||||
Tests:
|
||||
- Coverage via controller unit tests and route registration smoke checks.
|
||||
@ -1,14 +0,0 @@
|
||||
Summary:
|
||||
- App assets download proxy endpoint (signed URL verification, stream from storage).
|
||||
|
||||
Invariants:
|
||||
- Validates AssetPath fields (UUIDs, asset_type allowlist).
|
||||
- Verifies tenant-scoped signature and expiration before reading storage.
|
||||
- URL uses expires_at/nonce/sign query params.
|
||||
|
||||
Edge Cases:
|
||||
- Missing files return NotFound.
|
||||
- Invalid signature or expired link returns Forbidden.
|
||||
|
||||
Tests:
|
||||
- Verify signature validation and invalid/expired cases.
|
||||
@ -1,13 +0,0 @@
|
||||
Summary:
|
||||
- App assets upload proxy endpoint (signed URL verification, upload to storage).
|
||||
|
||||
Invariants:
|
||||
- Validates AssetPath fields (UUIDs, asset_type allowlist).
|
||||
- Verifies tenant-scoped signature and expiration before writing storage.
|
||||
- URL uses expires_at/nonce/sign query params.
|
||||
|
||||
Edge Cases:
|
||||
- Invalid signature or expired link returns Forbidden.
|
||||
|
||||
Tests:
|
||||
- Verify signature validation and invalid/expired cases.
|
||||
@ -1,14 +0,0 @@
|
||||
Summary:
|
||||
- Sandbox archive upload/download proxy endpoints (signed URL verification, stream to storage).
|
||||
|
||||
Invariants:
|
||||
- Validates tenant_id and sandbox_id UUIDs.
|
||||
- Verifies tenant-scoped signature and expiration before storage access.
|
||||
- URL uses expires_at/nonce/sign query params.
|
||||
|
||||
Edge Cases:
|
||||
- Missing archive returns NotFound.
|
||||
- Invalid signature or expired link returns Forbidden.
|
||||
|
||||
Tests:
|
||||
- Add unit tests for signature validation if needed.
|
||||
@ -1,9 +0,0 @@
|
||||
Summary:
|
||||
Summary:
|
||||
- Collects file assets and emits FileAsset entries with storage keys.
|
||||
|
||||
Invariants:
|
||||
- Storage keys are derived via AppAssetStorage for draft files.
|
||||
|
||||
Tests:
|
||||
- Covered by asset build pipeline tests.
|
||||
@ -1,14 +0,0 @@
|
||||
Summary:
|
||||
Summary:
|
||||
- Builds skill artifacts from markdown assets and uploads resolved outputs.
|
||||
|
||||
Invariants:
|
||||
- Reads draft asset content via AppAssetStorage refs.
|
||||
- Writes resolved artifacts via AppAssetStorage refs.
|
||||
- FileAsset storage keys are derived via AppAssetStorage.
|
||||
|
||||
Edge Cases:
|
||||
- Missing or invalid JSON content yields empty skill content/metadata.
|
||||
|
||||
Tests:
|
||||
- Build pipeline unit tests covering compile/upload paths.
|
||||
@ -1,9 +0,0 @@
|
||||
Summary:
|
||||
Summary:
|
||||
- Converts AppAssetFileTree to FileAsset items for packaging.
|
||||
|
||||
Invariants:
|
||||
- Storage keys for assets are derived via AppAssetStorage.
|
||||
|
||||
Tests:
|
||||
- Used in packaging/service tests for asset bundles.
|
||||
@ -1,14 +0,0 @@
|
||||
# Zip Packager Notes
|
||||
|
||||
## Purpose
|
||||
- Builds a ZIP archive of asset contents stored via the configured storage backend.
|
||||
|
||||
## Key Decisions
|
||||
- Packaging writes assets into an in-memory zip buffer returned as bytes.
|
||||
- Asset fetch + zip writing are executed via a thread pool with a lock guarding `ZipFile` writes.
|
||||
|
||||
## Edge Cases
|
||||
- ZIP writes are serialized by the lock; storage reads still run in parallel.
|
||||
|
||||
## Tests/Verification
|
||||
- None yet.
|
||||
@ -1,9 +0,0 @@
|
||||
Summary:
|
||||
Summary:
|
||||
- Builds AssetItem entries for asset trees using AssetPath-derived storage keys.
|
||||
|
||||
Invariants:
|
||||
- Uses AssetPath to compute draft storage keys.
|
||||
|
||||
Tests:
|
||||
- Covered by asset parsing and packaging tests.
|
||||
@ -1,20 +0,0 @@
|
||||
Summary:
|
||||
- Defines AssetPath facade + typed asset path classes for app-asset storage access.
|
||||
- Maps asset paths to storage keys and generates presigned or signed-proxy URLs.
|
||||
- Signs proxy URLs using tenant private keys and enforces expiration.
|
||||
- Exposes app_asset_storage singleton for reuse.
|
||||
|
||||
Invariants:
|
||||
- AssetPathBase fields (tenant_id/app_id/resource_id/node_id) must be UUIDs.
|
||||
- AssetPath.from_components enforces valid types and resolved node_id presence.
|
||||
- Storage keys are derived internally via AssetPathBase.get_storage_key; callers never supply raw paths.
|
||||
- AppAssetStorage.storage returns the cached presign wrapper (not the raw storage).
|
||||
|
||||
Edge Cases:
|
||||
- Storage backends without presign support must fall back to signed proxy URLs.
|
||||
- Signed proxy verification enforces expiration and tenant-scoped signing keys.
|
||||
- Upload URLs also fall back to signed proxy endpoints when presign is unsupported.
|
||||
- load_or_none treats SilentStorage "File Not Found" bytes as missing.
|
||||
|
||||
Tests:
|
||||
- Unit tests for ref validation, storage key mapping, and signed URL verification.
|
||||
@ -1,10 +0,0 @@
|
||||
Summary:
|
||||
Summary:
|
||||
- Extracts asset files from a zip and persists them into app asset storage.
|
||||
|
||||
Invariants:
|
||||
- Rejects path traversal/absolute/backslash paths.
|
||||
- Saves extracted files via AppAssetStorage draft refs.
|
||||
|
||||
Tests:
|
||||
- Zip security edge cases and tree construction tests.
|
||||
@ -1,9 +0,0 @@
|
||||
Summary:
|
||||
Summary:
|
||||
- Downloads published app asset zip into sandbox and extracts it.
|
||||
|
||||
Invariants:
|
||||
- Uses AppAssetStorage to generate download URLs for build zips (internal URL).
|
||||
|
||||
Tests:
|
||||
- Sandbox initialization integration tests.
|
||||
@ -1,12 +0,0 @@
|
||||
Summary:
|
||||
Summary:
|
||||
- Downloads draft/resolved assets into sandbox for draft execution.
|
||||
|
||||
Invariants:
|
||||
- Uses AppAssetStorage to generate download URLs for draft/resolved refs (internal URL).
|
||||
|
||||
Edge Cases:
|
||||
- No nodes -> returns early.
|
||||
|
||||
Tests:
|
||||
- Sandbox draft initialization tests.
|
||||
@ -1,9 +0,0 @@
|
||||
Summary:
|
||||
- Sandbox lifecycle wrapper (ready/cancel/fail signals, mount/unmount, release).
|
||||
|
||||
Invariants:
|
||||
- wait_ready raises with the original initialization error as the cause.
|
||||
- release always attempts unmount and environment release, logging failures.
|
||||
|
||||
Tests:
|
||||
- Covered by sandbox lifecycle/unit tests and workflow execution error handling.
|
||||
@ -1,2 +0,0 @@
|
||||
Summary:
|
||||
- Sandbox security helper modules.
|
||||
@ -1,13 +0,0 @@
|
||||
Summary:
|
||||
- Generates and verifies signed URLs for sandbox archive upload/download.
|
||||
|
||||
Invariants:
|
||||
- tenant_id and sandbox_id must be UUIDs.
|
||||
- Signatures are tenant-scoped and include operation, expiry, and nonce.
|
||||
|
||||
Edge Cases:
|
||||
- Missing tenant private key raises ValueError.
|
||||
- Expired or tampered signatures are rejected.
|
||||
|
||||
Tests:
|
||||
- Add unit tests if sandbox archive signature behavior expands.
|
||||
@ -1,12 +0,0 @@
|
||||
Summary:
|
||||
- Manages sandbox archive uploads/downloads for workspace persistence.
|
||||
|
||||
Invariants:
|
||||
- Archive storage key is sandbox/<tenant_id>/<sandbox_id>.tar.gz.
|
||||
- Signed URLs are tenant-scoped and use external files URL.
|
||||
|
||||
Edge Cases:
|
||||
- Missing archive skips mount.
|
||||
|
||||
Tests:
|
||||
- Covered indirectly via sandbox integration tests.
|
||||
@ -1,9 +0,0 @@
|
||||
Summary:
|
||||
Summary:
|
||||
- Loads/saves skill bundles to app asset storage.
|
||||
|
||||
Invariants:
|
||||
- Skill bundles use AppAssetStorage refs and JSON serialization.
|
||||
|
||||
Tests:
|
||||
- Covered by skill bundle build/load unit tests.
|
||||
@ -1,16 +0,0 @@
|
||||
# E2B Sandbox Provider Notes
|
||||
|
||||
## Purpose
|
||||
- Implements the E2B-backed `VirtualEnvironment` provider and bootstraps sandbox metadata, file I/O, and command execution.
|
||||
|
||||
## Key Decisions
|
||||
- Sandbox metadata is gathered during `_construct_environment` using the E2B SDK before returning `Metadata`.
|
||||
- Architecture/OS detection uses a single `uname -m -s` call split by whitespace to reduce round-trips.
|
||||
- Command execution streams stdout/stderr through `QueueTransportReadCloser`; stdin is unsupported.
|
||||
|
||||
## Edge Cases
|
||||
- `release_environment` raises when sandbox termination fails.
|
||||
- `execute_command` runs in a background thread; consumers must read stdout/stderr until EOF.
|
||||
|
||||
## Tests/Verification
|
||||
- None yet. Add targeted service tests when behavior changes.
|
||||
@ -1,14 +0,0 @@
|
||||
Summary:
|
||||
- App asset CRUD, publish/build pipeline, and presigned URL generation.
|
||||
|
||||
Invariants:
|
||||
- Asset storage access goes through AppAssetStorage + AssetPath, using app_asset_storage singleton.
|
||||
- Tree operations require tenant/app scoping and lock for mutation.
|
||||
- Asset zips are packaged via raw storage with storage keys from AppAssetStorage.
|
||||
|
||||
Edge Cases:
|
||||
- File nodes larger than preview limit are rejected.
|
||||
- Deletion runs asynchronously; storage failures are logged.
|
||||
|
||||
Tests:
|
||||
- Unit tests for storage URL generation and publish/build flows.
|
||||
@ -1,10 +0,0 @@
|
||||
Summary:
|
||||
Summary:
|
||||
- Imports app bundles, including asset extraction into app asset storage.
|
||||
|
||||
Invariants:
|
||||
- Asset imports respect zip security checks and tenant/app scoping.
|
||||
- Draft asset packaging uses AppAssetStorage for key mapping.
|
||||
|
||||
Tests:
|
||||
- Bundle import unit tests and zip validation coverage.
|
||||
@ -1,6 +0,0 @@
|
||||
Summary:
|
||||
Summary:
|
||||
- Unit tests for AppAssetStorage ref validation, key mapping, and signing.
|
||||
|
||||
Tests:
|
||||
- Covers valid/invalid refs, signature verify, expiration handling, and proxy URL generation.
|
||||
19
api/app.py
19
api/app.py
@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
@ -17,15 +16,10 @@ def is_db_command() -> bool:
|
||||
|
||||
|
||||
# create app
|
||||
flask_app = None
|
||||
socketio_app = None
|
||||
|
||||
if is_db_command():
|
||||
from app_factory import create_migrations_app
|
||||
|
||||
app = create_migrations_app()
|
||||
socketio_app = app
|
||||
flask_app = app
|
||||
else:
|
||||
# Gunicorn and Celery handle monkey patching automatically in production by
|
||||
# specifying the `gevent` worker class. Manual monkey patching is not required here.
|
||||
@ -36,15 +30,8 @@ else:
|
||||
|
||||
from app_factory import create_app
|
||||
|
||||
socketio_app, flask_app = create_app()
|
||||
app = flask_app
|
||||
celery = cast("Celery", flask_app.extensions["celery"])
|
||||
app = create_app()
|
||||
celery = cast("Celery", app.extensions["celery"])
|
||||
|
||||
if __name__ == "__main__":
|
||||
from gevent import pywsgi
|
||||
from geventwebsocket.handler import WebSocketHandler # type: ignore[reportMissingTypeStubs]
|
||||
|
||||
host = os.environ.get("HOST", "0.0.0.0")
|
||||
port = int(os.environ.get("PORT", 5001))
|
||||
server = pywsgi.WSGIServer((host, port), socketio_app, handler_class=WebSocketHandler)
|
||||
server.serve_forever()
|
||||
app.run(host="0.0.0.0", port=5001)
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
import socketio # type: ignore[reportMissingTypeStubs]
|
||||
from flask import request
|
||||
from opentelemetry.trace import get_current_span
|
||||
from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID
|
||||
@ -11,7 +10,6 @@ from contexts.wrapper import RecyclableContextVar
|
||||
from controllers.console.error import UnauthorizedAndForceLogout
|
||||
from core.logging.context import init_request_context
|
||||
from dify_app import DifyApp
|
||||
from extensions.ext_socketio import sio
|
||||
from services.enterprise.enterprise_service import EnterpriseService
|
||||
from services.feature_service import LicenseStatus
|
||||
|
||||
@ -124,18 +122,14 @@ def create_flask_app_with_configs() -> DifyApp:
|
||||
return dify_app
|
||||
|
||||
|
||||
def create_app() -> tuple[socketio.WSGIApp, DifyApp]:
|
||||
def create_app() -> DifyApp:
|
||||
start_time = time.perf_counter()
|
||||
app = create_flask_app_with_configs()
|
||||
initialize_extensions(app)
|
||||
|
||||
sio.app = app
|
||||
socketio_app = socketio.WSGIApp(sio, app)
|
||||
|
||||
end_time = time.perf_counter()
|
||||
if dify_config.DEBUG:
|
||||
logger.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2))
|
||||
return socketio_app, app
|
||||
return app
|
||||
|
||||
|
||||
def initialize_extensions(app: DifyApp):
|
||||
@ -149,6 +143,7 @@ def initialize_extensions(app: DifyApp):
|
||||
ext_commands,
|
||||
ext_compress,
|
||||
ext_database,
|
||||
ext_enterprise_telemetry,
|
||||
ext_fastopenapi,
|
||||
ext_forward_refs,
|
||||
ext_hosting_provider,
|
||||
@ -199,6 +194,7 @@ def initialize_extensions(app: DifyApp):
|
||||
ext_commands,
|
||||
ext_fastopenapi,
|
||||
ext_otel,
|
||||
ext_enterprise_telemetry,
|
||||
ext_request_logging,
|
||||
ext_session_factory,
|
||||
]
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -11,7 +11,7 @@ from configs import dify_config
|
||||
from core.helper import encrypter
|
||||
from core.plugin.entities.plugin_daemon import CredentialType
|
||||
from core.plugin.impl.plugin import PluginInstaller
|
||||
from core.tools.utils.system_encryption import encrypt_system_params as encrypt_system_oauth_params
|
||||
from core.tools.utils.system_oauth_encryption import encrypt_system_oauth_params
|
||||
from extensions.ext_database import db
|
||||
from models import Tenant
|
||||
from models.oauth import DatasourceOauthParamConfig, DatasourceProvider
|
||||
|
||||
@ -2,14 +2,13 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic.fields import FieldInfo
|
||||
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict, TomlConfigSettingsSource
|
||||
|
||||
from libs.file_utils import search_file_upwards
|
||||
|
||||
from .deploy import DeploymentConfig
|
||||
from .enterprise import EnterpriseFeatureConfig
|
||||
from .enterprise import EnterpriseFeatureConfig, EnterpriseTelemetryConfig
|
||||
from .extra import ExtraServiceConfig
|
||||
from .feature import FeatureConfig
|
||||
from .middleware import MiddlewareConfig
|
||||
@ -74,6 +73,8 @@ class DifyConfig(
|
||||
# Enterprise feature configs
|
||||
# **Before using, please contact business@dify.ai by email to inquire about licensing matters.**
|
||||
EnterpriseFeatureConfig,
|
||||
# Enterprise telemetry configs
|
||||
EnterpriseTelemetryConfig,
|
||||
):
|
||||
model_config = SettingsConfigDict(
|
||||
# read from dotenv format config file
|
||||
@ -83,17 +84,6 @@ class DifyConfig(
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
SANDBOX_DIFY_CLI_ROOT: str | None = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"Filesystem directory containing dify CLI binaries named dify-cli-<os>-<arch>. "
|
||||
"Defaults to api/bin when unset."
|
||||
),
|
||||
)
|
||||
DIFY_PORT: int = Field(
|
||||
default=5001,
|
||||
description="Port used by Dify to communicate with the host machine.",
|
||||
)
|
||||
# Before adding any config,
|
||||
# please consider to arrange it in the proper config group of existed or added
|
||||
# for better readability and maintainability.
|
||||
|
||||
@ -22,3 +22,52 @@ class EnterpriseFeatureConfig(BaseSettings):
|
||||
ENTERPRISE_REQUEST_TIMEOUT: int = Field(
|
||||
ge=1, description="Maximum timeout in seconds for enterprise requests", default=5
|
||||
)
|
||||
|
||||
|
||||
class EnterpriseTelemetryConfig(BaseSettings):
|
||||
"""
|
||||
Configuration for enterprise telemetry.
|
||||
"""
|
||||
|
||||
ENTERPRISE_TELEMETRY_ENABLED: bool = Field(
|
||||
description="Enable enterprise telemetry collection (also requires ENTERPRISE_ENABLED=true).",
|
||||
default=False,
|
||||
)
|
||||
|
||||
ENTERPRISE_OTLP_ENDPOINT: str = Field(
|
||||
description="Enterprise OTEL collector endpoint.",
|
||||
default="",
|
||||
)
|
||||
|
||||
ENTERPRISE_OTLP_HEADERS: str = Field(
|
||||
description="Auth headers for OTLP export (key=value,key2=value2).",
|
||||
default="",
|
||||
)
|
||||
|
||||
ENTERPRISE_OTLP_PROTOCOL: str = Field(
|
||||
description="OTLP protocol: 'http' or 'grpc' (default: http).",
|
||||
default="http",
|
||||
)
|
||||
|
||||
ENTERPRISE_OTLP_API_KEY: str = Field(
|
||||
description="Bearer token for enterprise OTLP export authentication.",
|
||||
default="",
|
||||
)
|
||||
|
||||
ENTERPRISE_INCLUDE_CONTENT: bool = Field(
|
||||
description="Include input/output content in traces (privacy toggle).",
|
||||
# Setting the default value to False to avoid accidentally log PII data in traces.
|
||||
default=False,
|
||||
)
|
||||
|
||||
ENTERPRISE_SERVICE_NAME: str = Field(
|
||||
description="Service name for OTEL resource.",
|
||||
default="dify",
|
||||
)
|
||||
|
||||
ENTERPRISE_OTEL_SAMPLING_RATE: float = Field(
|
||||
description="Sampling rate for enterprise traces (0.0 to 1.0, default 1.0 = 100%).",
|
||||
default=1.0,
|
||||
ge=0.0,
|
||||
le=1.0,
|
||||
)
|
||||
|
||||
@ -271,17 +271,6 @@ class PluginConfig(BaseSettings):
|
||||
)
|
||||
|
||||
|
||||
class CliApiConfig(BaseSettings):
|
||||
"""
|
||||
Configuration for CLI API (for dify-cli to call back from external sandbox environments)
|
||||
"""
|
||||
|
||||
CLI_API_URL: str = Field(
|
||||
description="CLI API URL for external sandbox (e.g., e2b) to call back.",
|
||||
default="http://localhost:5001",
|
||||
)
|
||||
|
||||
|
||||
class MarketplaceConfig(BaseSettings):
|
||||
"""
|
||||
Configuration for marketplace
|
||||
@ -298,27 +287,6 @@ class MarketplaceConfig(BaseSettings):
|
||||
)
|
||||
|
||||
|
||||
class CreatorsPlatformConfig(BaseSettings):
|
||||
"""
|
||||
Configuration for creators platform
|
||||
"""
|
||||
|
||||
CREATORS_PLATFORM_FEATURES_ENABLED: bool = Field(
|
||||
description="Enable or disable creators platform features",
|
||||
default=True,
|
||||
)
|
||||
|
||||
CREATORS_PLATFORM_API_URL: HttpUrl = Field(
|
||||
description="Creators Platform API URL",
|
||||
default=HttpUrl("https://creators.dify.ai"),
|
||||
)
|
||||
|
||||
CREATORS_PLATFORM_OAUTH_CLIENT_ID: str = Field(
|
||||
description="OAuth client_id for the Creators Platform app registered in Dify",
|
||||
default="",
|
||||
)
|
||||
|
||||
|
||||
class EndpointConfig(BaseSettings):
|
||||
"""
|
||||
Configuration for various application endpoints and URLs
|
||||
@ -373,15 +341,6 @@ class FileAccessConfig(BaseSettings):
|
||||
default="",
|
||||
)
|
||||
|
||||
FILES_API_URL: str = Field(
|
||||
description="Base URL for storage file ticket API endpoints."
|
||||
" Used by sandbox containers (internal or external like e2b) that need"
|
||||
" an absolute, routable address to upload/download files via the API."
|
||||
" For all-in-one Docker deployments, set to http://localhost."
|
||||
" For public sandbox environments, set to a public domain or IP.",
|
||||
default="",
|
||||
)
|
||||
|
||||
FILES_ACCESS_TIMEOUT: int = Field(
|
||||
description="Expiration time in seconds for file access URLs",
|
||||
default=300,
|
||||
@ -1315,13 +1274,6 @@ class PositionConfig(BaseSettings):
|
||||
return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
|
||||
|
||||
|
||||
class CollaborationConfig(BaseSettings):
|
||||
ENABLE_COLLABORATION_MODE: bool = Field(
|
||||
description="Whether to enable collaboration mode features across the workspace",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class LoginConfig(BaseSettings):
|
||||
ENABLE_EMAIL_CODE_LOGIN: bool = Field(
|
||||
description="whether to enable email code login",
|
||||
@ -1423,9 +1375,7 @@ class FeatureConfig(
|
||||
TriggerConfig,
|
||||
AsyncWorkflowConfig,
|
||||
PluginConfig,
|
||||
CliApiConfig,
|
||||
MarketplaceConfig,
|
||||
CreatorsPlatformConfig,
|
||||
DataSetConfig,
|
||||
EndpointConfig,
|
||||
FileAccessConfig,
|
||||
@ -1449,7 +1399,6 @@ class FeatureConfig(
|
||||
WorkflowConfig,
|
||||
WorkflowNodeExecutionConfig,
|
||||
WorkspaceConfig,
|
||||
CollaborationConfig,
|
||||
LoginConfig,
|
||||
AccountConfig,
|
||||
SwaggerUIConfig,
|
||||
|
||||
@ -7,15 +7,16 @@ UUID_NIL = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
DEFAULT_FILE_NUMBER_LIMITS = 3
|
||||
|
||||
IMAGE_EXTENSIONS = convert_to_lower_and_upper_set({"jpg", "jpeg", "png", "webp", "gif", "svg"})
|
||||
_IMAGE_EXTENSION_BASE: frozenset[str] = frozenset(("jpg", "jpeg", "png", "webp", "gif", "svg"))
|
||||
_VIDEO_EXTENSION_BASE: frozenset[str] = frozenset(("mp4", "mov", "mpeg", "webm"))
|
||||
_AUDIO_EXTENSION_BASE: frozenset[str] = frozenset(("mp3", "m4a", "wav", "amr", "mpga"))
|
||||
|
||||
VIDEO_EXTENSIONS = convert_to_lower_and_upper_set({"mp4", "mov", "mpeg", "webm"})
|
||||
IMAGE_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_IMAGE_EXTENSION_BASE))
|
||||
VIDEO_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_VIDEO_EXTENSION_BASE))
|
||||
AUDIO_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_AUDIO_EXTENSION_BASE))
|
||||
|
||||
AUDIO_EXTENSIONS = convert_to_lower_and_upper_set({"mp3", "m4a", "wav", "amr", "mpga"})
|
||||
|
||||
_doc_extensions: set[str]
|
||||
if dify_config.ETL_TYPE == "Unstructured":
|
||||
_doc_extensions = {
|
||||
_UNSTRUCTURED_DOCUMENT_EXTENSION_BASE: frozenset[str] = frozenset(
|
||||
(
|
||||
"txt",
|
||||
"markdown",
|
||||
"md",
|
||||
@ -35,11 +36,10 @@ if dify_config.ETL_TYPE == "Unstructured":
|
||||
"pptx",
|
||||
"xml",
|
||||
"epub",
|
||||
}
|
||||
if dify_config.UNSTRUCTURED_API_URL:
|
||||
_doc_extensions.add("ppt")
|
||||
else:
|
||||
_doc_extensions = {
|
||||
)
|
||||
)
|
||||
_DEFAULT_DOCUMENT_EXTENSION_BASE: frozenset[str] = frozenset(
|
||||
(
|
||||
"txt",
|
||||
"markdown",
|
||||
"md",
|
||||
@ -53,8 +53,17 @@ else:
|
||||
"csv",
|
||||
"vtt",
|
||||
"properties",
|
||||
}
|
||||
DOCUMENT_EXTENSIONS: set[str] = convert_to_lower_and_upper_set(_doc_extensions)
|
||||
)
|
||||
)
|
||||
|
||||
_doc_extensions: set[str]
|
||||
if dify_config.ETL_TYPE == "Unstructured":
|
||||
_doc_extensions = set(_UNSTRUCTURED_DOCUMENT_EXTENSION_BASE)
|
||||
if dify_config.UNSTRUCTURED_API_URL:
|
||||
_doc_extensions.add("ppt")
|
||||
else:
|
||||
_doc_extensions = set(_DEFAULT_DOCUMENT_EXTENSION_BASE)
|
||||
DOCUMENT_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_doc_extensions))
|
||||
|
||||
# console
|
||||
COOKIE_NAME_ACCESS_TOKEN = "access_token"
|
||||
|
||||
@ -1,74 +1,36 @@
|
||||
"""
|
||||
Core Context - Framework-agnostic context management.
|
||||
Application-layer context adapters.
|
||||
|
||||
This module provides context management that is independent of any specific
|
||||
web framework. Framework-specific implementations register their context
|
||||
capture functions at application initialization time.
|
||||
|
||||
This ensures the workflow layer remains completely decoupled from Flask
|
||||
or any other web framework.
|
||||
Concrete execution-context implementations live here so `graphon` only
|
||||
depends on injected context managers rather than framework state capture.
|
||||
"""
|
||||
|
||||
import contextvars
|
||||
from collections.abc import Callable
|
||||
|
||||
from dify_graph.context.execution_context import (
|
||||
from context.execution_context import (
|
||||
AppContext,
|
||||
ContextProviderNotFoundError,
|
||||
ExecutionContext,
|
||||
ExecutionContextBuilder,
|
||||
IExecutionContext,
|
||||
NullAppContext,
|
||||
capture_current_context,
|
||||
read_context,
|
||||
register_context,
|
||||
register_context_capturer,
|
||||
reset_context_provider,
|
||||
)
|
||||
|
||||
# Global capturer function - set by framework-specific modules
|
||||
_capturer: Callable[[], IExecutionContext] | None = None
|
||||
|
||||
|
||||
def register_context_capturer(capturer: Callable[[], IExecutionContext]) -> None:
|
||||
"""
|
||||
Register a context capture function.
|
||||
|
||||
This should be called by framework-specific modules (e.g., Flask)
|
||||
during application initialization.
|
||||
|
||||
Args:
|
||||
capturer: Function that captures current context and returns IExecutionContext
|
||||
"""
|
||||
global _capturer
|
||||
_capturer = capturer
|
||||
|
||||
|
||||
def capture_current_context() -> IExecutionContext:
|
||||
"""
|
||||
Capture current execution context.
|
||||
|
||||
This function uses the registered context capturer. If no capturer
|
||||
is registered, it returns a minimal context with only contextvars
|
||||
(suitable for non-framework environments like tests or standalone scripts).
|
||||
|
||||
Returns:
|
||||
IExecutionContext with captured context
|
||||
"""
|
||||
if _capturer is None:
|
||||
# No framework registered - return minimal context
|
||||
return ExecutionContext(
|
||||
app_context=NullAppContext(),
|
||||
context_vars=contextvars.copy_context(),
|
||||
)
|
||||
|
||||
return _capturer()
|
||||
|
||||
|
||||
def reset_context_provider() -> None:
|
||||
"""
|
||||
Reset the context capturer.
|
||||
|
||||
This is primarily useful for testing to ensure a clean state.
|
||||
"""
|
||||
global _capturer
|
||||
_capturer = None
|
||||
|
||||
from context.models import SandboxContext
|
||||
|
||||
__all__ = [
|
||||
"AppContext",
|
||||
"ContextProviderNotFoundError",
|
||||
"ExecutionContext",
|
||||
"ExecutionContextBuilder",
|
||||
"IExecutionContext",
|
||||
"NullAppContext",
|
||||
"SandboxContext",
|
||||
"capture_current_context",
|
||||
"read_context",
|
||||
"register_context",
|
||||
"register_context_capturer",
|
||||
"reset_context_provider",
|
||||
]
|
||||
|
||||
@ -1,5 +1,8 @@
|
||||
"""
|
||||
Execution Context - Abstracted context management for workflow execution.
|
||||
Application-layer execution context adapters.
|
||||
|
||||
Concrete context capture lives outside `graphon` so the graph package only
|
||||
consumes injected context managers when it needs to preserve thread-local state.
|
||||
"""
|
||||
|
||||
import contextvars
|
||||
@ -16,33 +19,33 @@ class AppContext(ABC):
|
||||
"""
|
||||
Abstract application context interface.
|
||||
|
||||
This abstraction allows workflow execution to work with or without Flask
|
||||
by providing a common interface for application context management.
|
||||
Application adapters can implement this to restore framework-specific state
|
||||
such as Flask app context around worker execution.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_config(self, key: str, default: Any = None) -> Any:
|
||||
"""Get configuration value by key."""
|
||||
pass
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def get_extension(self, name: str) -> Any:
|
||||
"""Get Flask extension by name (e.g., 'db', 'cache')."""
|
||||
pass
|
||||
"""Get application extension by name."""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def enter(self) -> AbstractContextManager[None]:
|
||||
"""Enter the application context."""
|
||||
pass
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class IExecutionContext(Protocol):
|
||||
"""
|
||||
Protocol for execution context.
|
||||
Protocol for enterable execution context objects.
|
||||
|
||||
This protocol defines the interface that all execution contexts must implement,
|
||||
allowing both ExecutionContext and FlaskExecutionContext to be used interchangeably.
|
||||
Concrete implementations may carry extra framework state, but callers only
|
||||
depend on standard context-manager behavior plus optional user metadata.
|
||||
"""
|
||||
|
||||
def __enter__(self) -> "IExecutionContext":
|
||||
@ -62,14 +65,10 @@ class IExecutionContext(Protocol):
|
||||
@final
|
||||
class ExecutionContext:
|
||||
"""
|
||||
Execution context for workflow execution in worker threads.
|
||||
Generic execution context used by application-layer adapters.
|
||||
|
||||
This class encapsulates all context needed for workflow execution:
|
||||
- Application context (Flask app or standalone)
|
||||
- Context variables for Python contextvars
|
||||
- User information (optional)
|
||||
|
||||
It is designed to be serializable and passable to worker threads.
|
||||
It restores captured `contextvars` and optionally enters an application
|
||||
context before the worker executes graph logic.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@ -78,14 +77,6 @@ class ExecutionContext:
|
||||
context_vars: contextvars.Context | None = None,
|
||||
user: Any = None,
|
||||
) -> None:
|
||||
"""
|
||||
Initialize execution context.
|
||||
|
||||
Args:
|
||||
app_context: Application context (Flask or standalone)
|
||||
context_vars: Python contextvars to preserve
|
||||
user: User object (optional)
|
||||
"""
|
||||
self._app_context = app_context
|
||||
self._context_vars = context_vars
|
||||
self._user = user
|
||||
@ -98,27 +89,21 @@ class ExecutionContext:
|
||||
|
||||
@property
|
||||
def context_vars(self) -> contextvars.Context | None:
|
||||
"""Get context variables."""
|
||||
"""Get captured context variables."""
|
||||
return self._context_vars
|
||||
|
||||
@property
|
||||
def user(self) -> Any:
|
||||
"""Get user object."""
|
||||
"""Get captured user object."""
|
||||
return self._user
|
||||
|
||||
@contextmanager
|
||||
def enter(self) -> Generator[None, None, None]:
|
||||
"""
|
||||
Enter this execution context.
|
||||
|
||||
This is a convenience method that creates a context manager.
|
||||
"""
|
||||
# Restore context variables if provided
|
||||
"""Enter this execution context."""
|
||||
if self._context_vars:
|
||||
for var, val in self._context_vars.items():
|
||||
var.set(val)
|
||||
|
||||
# Enter app context if available
|
||||
if self._app_context is not None:
|
||||
with self._app_context.enter():
|
||||
yield
|
||||
@ -141,18 +126,10 @@ class ExecutionContext:
|
||||
|
||||
class NullAppContext(AppContext):
|
||||
"""
|
||||
Null implementation of AppContext for non-Flask environments.
|
||||
|
||||
This is used when running without Flask (e.g., in tests or standalone mode).
|
||||
Null application context for non-framework environments.
|
||||
"""
|
||||
|
||||
def __init__(self, config: dict[str, Any] | None = None) -> None:
|
||||
"""
|
||||
Initialize null app context.
|
||||
|
||||
Args:
|
||||
config: Optional configuration dictionary
|
||||
"""
|
||||
self._config = config or {}
|
||||
self._extensions: dict[str, Any] = {}
|
||||
|
||||
@ -165,7 +142,7 @@ class NullAppContext(AppContext):
|
||||
return self._extensions.get(name)
|
||||
|
||||
def set_extension(self, name: str, extension: Any) -> None:
|
||||
"""Set extension by name."""
|
||||
"""Register an extension for tests or standalone execution."""
|
||||
self._extensions[name] = extension
|
||||
|
||||
@contextmanager
|
||||
@ -176,9 +153,7 @@ class NullAppContext(AppContext):
|
||||
|
||||
class ExecutionContextBuilder:
|
||||
"""
|
||||
Builder for creating ExecutionContext instances.
|
||||
|
||||
This provides a fluent API for building execution contexts.
|
||||
Builder for creating `ExecutionContext` instances.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
@ -211,63 +186,42 @@ class ExecutionContextBuilder:
|
||||
|
||||
|
||||
_capturer: Callable[[], IExecutionContext] | None = None
|
||||
|
||||
# Tenant-scoped providers using tuple keys for clarity and constant-time lookup.
|
||||
# Key mapping:
|
||||
# (name, tenant_id) -> provider
|
||||
# - name: namespaced identifier (recommend prefixing, e.g. "workflow.sandbox")
|
||||
# - tenant_id: tenant identifier string
|
||||
# Value:
|
||||
# provider: Callable[[], BaseModel] returning the typed context value
|
||||
# Type-safety note:
|
||||
# - This registry cannot enforce that all providers for a given name return the same BaseModel type.
|
||||
# - Implementors SHOULD provide typed wrappers around register/read (like Go's context best practice),
|
||||
# e.g. def register_sandbox_ctx(tenant_id: str, p: Callable[[], SandboxContext]) and
|
||||
# def read_sandbox_ctx(tenant_id: str) -> SandboxContext.
|
||||
_tenant_context_providers: dict[tuple[str, str], Callable[[], BaseModel]] = {}
|
||||
|
||||
T = TypeVar("T", bound=BaseModel)
|
||||
|
||||
|
||||
class ContextProviderNotFoundError(KeyError):
|
||||
"""Raised when a tenant-scoped context provider is missing for a given (name, tenant_id)."""
|
||||
"""Raised when a tenant-scoped context provider is missing."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def register_context_capturer(capturer: Callable[[], IExecutionContext]) -> None:
|
||||
"""Register a single enterable execution context capturer (e.g., Flask)."""
|
||||
"""Register an enterable execution context capturer."""
|
||||
global _capturer
|
||||
_capturer = capturer
|
||||
|
||||
|
||||
def register_context(name: str, tenant_id: str, provider: Callable[[], BaseModel]) -> None:
|
||||
"""Register a tenant-specific provider for a named context.
|
||||
|
||||
Tip: use a namespaced "name" (e.g., "workflow.sandbox") to avoid key collisions.
|
||||
Consider adding a typed wrapper for this registration in your feature module.
|
||||
"""
|
||||
"""Register a tenant-specific provider for a named context."""
|
||||
_tenant_context_providers[(name, tenant_id)] = provider
|
||||
|
||||
|
||||
def read_context(name: str, *, tenant_id: str) -> BaseModel:
|
||||
"""
|
||||
Read a context value for a specific tenant.
|
||||
|
||||
Raises KeyError if the provider for (name, tenant_id) is not registered.
|
||||
"""
|
||||
prov = _tenant_context_providers.get((name, tenant_id))
|
||||
if prov is None:
|
||||
"""Read a context value for a specific tenant."""
|
||||
provider = _tenant_context_providers.get((name, tenant_id))
|
||||
if provider is None:
|
||||
raise ContextProviderNotFoundError(f"Context provider '{name}' not registered for tenant '{tenant_id}'")
|
||||
return prov()
|
||||
return provider()
|
||||
|
||||
|
||||
def capture_current_context() -> IExecutionContext:
|
||||
"""
|
||||
Capture current execution context from the calling environment.
|
||||
|
||||
If a capturer is registered (e.g., Flask), use it. Otherwise, return a minimal
|
||||
context with NullAppContext + copy of current contextvars.
|
||||
If no framework adapter is registered, return a minimal context that only
|
||||
restores `contextvars`.
|
||||
"""
|
||||
if _capturer is None:
|
||||
return ExecutionContext(
|
||||
@ -278,7 +232,22 @@ def capture_current_context() -> IExecutionContext:
|
||||
|
||||
|
||||
def reset_context_provider() -> None:
|
||||
"""Reset the capturer and all tenant-scoped context providers (primarily for tests)."""
|
||||
"""Reset the capturer and tenant-scoped providers."""
|
||||
global _capturer
|
||||
_capturer = None
|
||||
_tenant_context_providers.clear()
|
||||
|
||||
|
||||
__all__ = [
|
||||
"AppContext",
|
||||
"ContextProviderNotFoundError",
|
||||
"ExecutionContext",
|
||||
"ExecutionContextBuilder",
|
||||
"IExecutionContext",
|
||||
"NullAppContext",
|
||||
"capture_current_context",
|
||||
"read_context",
|
||||
"register_context",
|
||||
"register_context_capturer",
|
||||
"reset_context_provider",
|
||||
]
|
||||
@ -10,11 +10,7 @@ from typing import Any, final
|
||||
|
||||
from flask import Flask, current_app, g
|
||||
|
||||
from dify_graph.context import register_context_capturer
|
||||
from dify_graph.context.execution_context import (
|
||||
AppContext,
|
||||
IExecutionContext,
|
||||
)
|
||||
from context.execution_context import AppContext, IExecutionContext, register_context_capturer
|
||||
|
||||
|
||||
@final
|
||||
|
||||
13
api/context/models.py
Normal file
13
api/context/models.py
Normal file
@ -0,0 +1,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import AnyHttpUrl, BaseModel
|
||||
|
||||
|
||||
class SandboxContext(BaseModel):
|
||||
"""Typed context for sandbox integration. All fields optional by design."""
|
||||
|
||||
sandbox_url: AnyHttpUrl | None = None
|
||||
sandbox_token: str | None = None # optional, if later needed for auth
|
||||
|
||||
|
||||
__all__ = ["SandboxContext"]
|
||||
@ -6,7 +6,6 @@ from contexts.wrapper import RecyclableContextVar
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.datasource.__base.datasource_provider import DatasourcePluginProviderController
|
||||
from core.plugin.entities.plugin_daemon import PluginModelProviderEntity
|
||||
from core.tools.plugin_tool.provider import PluginToolProviderController
|
||||
from core.trigger.provider import PluginTriggerProviderController
|
||||
|
||||
@ -20,14 +19,6 @@ plugin_tool_providers: RecyclableContextVar[dict[str, "PluginToolProviderControl
|
||||
|
||||
plugin_tool_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar(ContextVar("plugin_tool_providers_lock"))
|
||||
|
||||
plugin_model_providers: RecyclableContextVar[list["PluginModelProviderEntity"] | None] = RecyclableContextVar(
|
||||
ContextVar("plugin_model_providers")
|
||||
)
|
||||
|
||||
plugin_model_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar(
|
||||
ContextVar("plugin_model_providers_lock")
|
||||
)
|
||||
|
||||
datasource_plugin_providers: RecyclableContextVar[dict[str, "DatasourcePluginProviderController"]] = (
|
||||
RecyclableContextVar(ContextVar("datasource_plugin_providers"))
|
||||
)
|
||||
|
||||
@ -1,27 +0,0 @@
|
||||
from flask import Blueprint
|
||||
from flask_restx import Namespace
|
||||
|
||||
from libs.external_api import ExternalApi
|
||||
|
||||
bp = Blueprint("cli_api", __name__, url_prefix="/cli/api")
|
||||
|
||||
api = ExternalApi(
|
||||
bp,
|
||||
version="1.0",
|
||||
title="CLI API",
|
||||
description="APIs for Dify CLI to call back from external sandbox environments (e.g., e2b)",
|
||||
)
|
||||
|
||||
# Create namespace
|
||||
cli_api_ns = Namespace("cli_api", description="CLI API operations", path="/")
|
||||
|
||||
from .dify_cli import cli_api as _plugin
|
||||
|
||||
api.add_namespace(cli_api_ns)
|
||||
|
||||
__all__ = [
|
||||
"_plugin",
|
||||
"api",
|
||||
"bp",
|
||||
"cli_api_ns",
|
||||
]
|
||||
@ -1,192 +0,0 @@
|
||||
from flask import abort
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel
|
||||
|
||||
from controllers.cli_api import cli_api_ns
|
||||
from controllers.cli_api.dify_cli.wraps import get_cli_user_tenant, plugin_data
|
||||
from controllers.cli_api.wraps import cli_api_only
|
||||
from controllers.console.wraps import setup_required
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.plugin.backwards_invocation.app import PluginAppBackwardsInvocation
|
||||
from core.plugin.backwards_invocation.base import BaseBackwardsInvocationResponse
|
||||
from core.plugin.backwards_invocation.model import PluginModelBackwardsInvocation
|
||||
from core.plugin.backwards_invocation.tool import PluginToolBackwardsInvocation
|
||||
from core.plugin.entities.request import (
|
||||
RequestInvokeApp,
|
||||
RequestInvokeLLM,
|
||||
RequestInvokeTool,
|
||||
RequestRequestUploadFile,
|
||||
)
|
||||
from core.sandbox.bash.dify_cli import DifyCliToolConfig
|
||||
from core.session.cli_api import CliContext
|
||||
from core.skill.entities import ToolInvocationRequest
|
||||
from core.tools.entities.tool_entities import ToolProviderType
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from dify_graph.file.helpers import get_signed_file_url_for_plugin
|
||||
from libs.helper import length_prefixed_response
|
||||
from models.account import Account
|
||||
from models.model import EndUser, Tenant
|
||||
|
||||
|
||||
class FetchToolItem(BaseModel):
|
||||
tool_type: str
|
||||
tool_provider: str
|
||||
tool_name: str
|
||||
credential_id: str | None = None
|
||||
|
||||
|
||||
class FetchToolBatchRequest(BaseModel):
|
||||
tools: list[FetchToolItem]
|
||||
|
||||
|
||||
@cli_api_ns.route("/invoke/llm")
|
||||
class CliInvokeLLMApi(Resource):
|
||||
@cli_api_only
|
||||
@get_cli_user_tenant
|
||||
@setup_required
|
||||
@plugin_data(payload_type=RequestInvokeLLM)
|
||||
def post(
|
||||
self,
|
||||
user_model: Account | EndUser,
|
||||
tenant_model: Tenant,
|
||||
payload: RequestInvokeLLM,
|
||||
cli_context: CliContext,
|
||||
):
|
||||
def generator():
|
||||
response = PluginModelBackwardsInvocation.invoke_llm(user_model.id, tenant_model, payload)
|
||||
return PluginModelBackwardsInvocation.convert_to_event_stream(response)
|
||||
|
||||
return length_prefixed_response(0xF, generator())
|
||||
|
||||
|
||||
@cli_api_ns.route("/invoke/tool")
|
||||
class CliInvokeToolApi(Resource):
|
||||
@cli_api_only
|
||||
@get_cli_user_tenant
|
||||
@setup_required
|
||||
@plugin_data(payload_type=RequestInvokeTool)
|
||||
def post(
|
||||
self,
|
||||
user_model: Account | EndUser,
|
||||
tenant_model: Tenant,
|
||||
payload: RequestInvokeTool,
|
||||
cli_context: CliContext,
|
||||
):
|
||||
tool_type = ToolProviderType.value_of(payload.tool_type)
|
||||
|
||||
request = ToolInvocationRequest(
|
||||
tool_type=tool_type,
|
||||
provider=payload.provider,
|
||||
tool_name=payload.tool,
|
||||
credential_id=payload.credential_id,
|
||||
)
|
||||
if cli_context.tool_access and not cli_context.tool_access.is_allowed(request):
|
||||
abort(403, description=f"Access denied for tool: {payload.provider}/{payload.tool}")
|
||||
|
||||
def generator():
|
||||
return PluginToolBackwardsInvocation.convert_to_event_stream(
|
||||
PluginToolBackwardsInvocation.invoke_tool(
|
||||
tenant_id=tenant_model.id,
|
||||
user_id=user_model.id,
|
||||
tool_type=tool_type,
|
||||
provider=payload.provider,
|
||||
tool_name=payload.tool,
|
||||
tool_parameters=payload.tool_parameters,
|
||||
credential_id=payload.credential_id,
|
||||
),
|
||||
)
|
||||
|
||||
return length_prefixed_response(0xF, generator())
|
||||
|
||||
|
||||
@cli_api_ns.route("/invoke/app")
|
||||
class CliInvokeAppApi(Resource):
|
||||
@cli_api_only
|
||||
@get_cli_user_tenant
|
||||
@setup_required
|
||||
@plugin_data(payload_type=RequestInvokeApp)
|
||||
def post(
|
||||
self,
|
||||
user_model: Account | EndUser,
|
||||
tenant_model: Tenant,
|
||||
payload: RequestInvokeApp,
|
||||
cli_context: CliContext,
|
||||
):
|
||||
response = PluginAppBackwardsInvocation.invoke_app(
|
||||
app_id=payload.app_id,
|
||||
user_id=user_model.id,
|
||||
tenant_id=tenant_model.id,
|
||||
conversation_id=payload.conversation_id,
|
||||
query=payload.query,
|
||||
stream=payload.response_mode == "streaming",
|
||||
inputs=payload.inputs,
|
||||
files=payload.files,
|
||||
)
|
||||
|
||||
return length_prefixed_response(0xF, PluginAppBackwardsInvocation.convert_to_event_stream(response))
|
||||
|
||||
|
||||
@cli_api_ns.route("/upload/file/request")
|
||||
class CliUploadFileRequestApi(Resource):
|
||||
@cli_api_only
|
||||
@get_cli_user_tenant
|
||||
@setup_required
|
||||
@plugin_data(payload_type=RequestRequestUploadFile)
|
||||
def post(
|
||||
self,
|
||||
user_model: Account | EndUser,
|
||||
tenant_model: Tenant,
|
||||
payload: RequestRequestUploadFile,
|
||||
cli_context: CliContext,
|
||||
):
|
||||
url = get_signed_file_url_for_plugin(
|
||||
filename=payload.filename,
|
||||
mimetype=payload.mimetype,
|
||||
tenant_id=tenant_model.id,
|
||||
user_id=user_model.id,
|
||||
)
|
||||
return BaseBackwardsInvocationResponse(data={"url": url}).model_dump()
|
||||
|
||||
|
||||
@cli_api_ns.route("/fetch/tools/batch")
|
||||
class CliFetchToolsBatchApi(Resource):
|
||||
@cli_api_only
|
||||
@get_cli_user_tenant
|
||||
@setup_required
|
||||
@plugin_data(payload_type=FetchToolBatchRequest)
|
||||
def post(
|
||||
self,
|
||||
user_model: Account | EndUser,
|
||||
tenant_model: Tenant,
|
||||
payload: FetchToolBatchRequest,
|
||||
cli_context: CliContext,
|
||||
):
|
||||
tools: list[dict] = []
|
||||
|
||||
for item in payload.tools:
|
||||
provider_type = ToolProviderType.value_of(item.tool_type)
|
||||
|
||||
request = ToolInvocationRequest(
|
||||
tool_type=provider_type,
|
||||
provider=item.tool_provider,
|
||||
tool_name=item.tool_name,
|
||||
credential_id=item.credential_id,
|
||||
)
|
||||
if cli_context.tool_access and not cli_context.tool_access.is_allowed(request):
|
||||
abort(403, description=f"Access denied for tool: {item.tool_provider}/{item.tool_name}")
|
||||
|
||||
try:
|
||||
tool_runtime = ToolManager.get_tool_runtime(
|
||||
tenant_id=tenant_model.id,
|
||||
provider_type=provider_type,
|
||||
provider_id=item.tool_provider,
|
||||
tool_name=item.tool_name,
|
||||
invoke_from=InvokeFrom.AGENT,
|
||||
credential_id=item.credential_id,
|
||||
)
|
||||
tool_config = DifyCliToolConfig.create_from_tool(tool_runtime)
|
||||
tools.append(tool_config.model_dump())
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return BaseBackwardsInvocationResponse(data={"tools": tools}).model_dump()
|
||||
@ -1,137 +0,0 @@
|
||||
from collections.abc import Callable
|
||||
from functools import wraps
|
||||
from typing import ParamSpec, TypeVar
|
||||
|
||||
from flask import current_app, g, request
|
||||
from flask_login import user_logged_in
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.session.cli_api import CliApiSession, CliContext
|
||||
from extensions.ext_database import db
|
||||
from libs.login import current_user
|
||||
from models.account import Tenant
|
||||
from models.model import DefaultEndUserSessionID, EndUser
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
class TenantUserPayload(BaseModel):
|
||||
tenant_id: str
|
||||
user_id: str
|
||||
|
||||
|
||||
def get_user(tenant_id: str, user_id: str | None) -> EndUser:
|
||||
"""
|
||||
Get current user
|
||||
|
||||
NOTE: user_id is not trusted, it could be maliciously set to any value.
|
||||
As a result, it could only be considered as an end user id.
|
||||
"""
|
||||
if not user_id:
|
||||
user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID
|
||||
is_anonymous = user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID
|
||||
try:
|
||||
with Session(db.engine) as session:
|
||||
user_model = None
|
||||
|
||||
if is_anonymous:
|
||||
user_model = (
|
||||
session.query(EndUser)
|
||||
.where(
|
||||
EndUser.session_id == user_id,
|
||||
EndUser.tenant_id == tenant_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
else:
|
||||
user_model = (
|
||||
session.query(EndUser)
|
||||
.where(
|
||||
EndUser.id == user_id,
|
||||
EndUser.tenant_id == tenant_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not user_model:
|
||||
user_model = EndUser(
|
||||
tenant_id=tenant_id,
|
||||
type="service_api",
|
||||
is_anonymous=is_anonymous,
|
||||
session_id=user_id,
|
||||
)
|
||||
session.add(user_model)
|
||||
session.commit()
|
||||
session.refresh(user_model)
|
||||
|
||||
except Exception:
|
||||
raise ValueError("user not found")
|
||||
|
||||
return user_model
|
||||
|
||||
|
||||
def get_cli_user_tenant(view_func: Callable[P, R]):
|
||||
@wraps(view_func)
|
||||
def decorated_view(*args: P.args, **kwargs: P.kwargs):
|
||||
session: CliApiSession | None = getattr(g, "cli_api_session", None)
|
||||
if session is None:
|
||||
raise ValueError("session not found")
|
||||
|
||||
user_id = session.user_id
|
||||
tenant_id = session.tenant_id
|
||||
cli_context = CliContext.model_validate(session.context)
|
||||
|
||||
if not user_id:
|
||||
user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID
|
||||
|
||||
try:
|
||||
tenant_model = (
|
||||
db.session.query(Tenant)
|
||||
.where(
|
||||
Tenant.id == tenant_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
except Exception:
|
||||
raise ValueError("tenant not found")
|
||||
|
||||
if not tenant_model:
|
||||
raise ValueError("tenant not found")
|
||||
|
||||
kwargs["tenant_model"] = tenant_model
|
||||
kwargs["user_model"] = get_user(tenant_id, user_id)
|
||||
kwargs["cli_context"] = cli_context
|
||||
|
||||
current_app.login_manager._update_request_context_with_user(kwargs["user_model"]) # type: ignore
|
||||
user_logged_in.send(current_app._get_current_object(), user=current_user) # type: ignore
|
||||
|
||||
return view_func(*args, **kwargs)
|
||||
|
||||
return decorated_view
|
||||
|
||||
|
||||
def plugin_data(view: Callable[P, R] | None = None, *, payload_type: type[BaseModel]):
|
||||
def decorator(view_func: Callable[P, R]):
|
||||
@wraps(view_func)
|
||||
def decorated_view(*args: P.args, **kwargs: P.kwargs):
|
||||
try:
|
||||
data = request.get_json()
|
||||
except Exception:
|
||||
raise ValueError("invalid json")
|
||||
|
||||
try:
|
||||
payload = payload_type.model_validate(data)
|
||||
except Exception as e:
|
||||
raise ValueError(f"invalid payload: {str(e)}")
|
||||
|
||||
kwargs["payload"] = payload
|
||||
return view_func(*args, **kwargs)
|
||||
|
||||
return decorated_view
|
||||
|
||||
if view is None:
|
||||
return decorator
|
||||
else:
|
||||
return decorator(view)
|
||||
@ -1,56 +0,0 @@
|
||||
import hashlib
|
||||
import hmac
|
||||
import time
|
||||
from collections.abc import Callable
|
||||
from functools import wraps
|
||||
from typing import ParamSpec, TypeVar
|
||||
|
||||
from flask import abort, g, request
|
||||
|
||||
from core.session.cli_api import CliApiSessionManager
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
SIGNATURE_TTL_SECONDS = 300
|
||||
|
||||
|
||||
def _verify_signature(session_secret: str, timestamp: str, body: bytes, signature: str) -> bool:
|
||||
expected = hmac.new(
|
||||
session_secret.encode(),
|
||||
f"{timestamp}.".encode() + body,
|
||||
hashlib.sha256,
|
||||
).hexdigest()
|
||||
return hmac.compare_digest(f"sha256={expected}", signature)
|
||||
|
||||
|
||||
def cli_api_only(view: Callable[P, R]):
|
||||
@wraps(view)
|
||||
def decorated(*args: P.args, **kwargs: P.kwargs):
|
||||
session_id = request.headers.get("X-Cli-Api-Session-Id")
|
||||
timestamp = request.headers.get("X-Cli-Api-Timestamp")
|
||||
signature = request.headers.get("X-Cli-Api-Signature")
|
||||
|
||||
if not session_id or not timestamp or not signature:
|
||||
abort(401)
|
||||
|
||||
try:
|
||||
ts = int(timestamp)
|
||||
if abs(time.time() - ts) > SIGNATURE_TTL_SECONDS:
|
||||
abort(401)
|
||||
except ValueError:
|
||||
abort(401)
|
||||
|
||||
session = CliApiSessionManager().get(session_id)
|
||||
if not session:
|
||||
abort(401)
|
||||
|
||||
body = request.get_data()
|
||||
if not _verify_signature(session.secret, timestamp, body, signature):
|
||||
abort(401)
|
||||
|
||||
g.cli_api_session = session
|
||||
|
||||
return view(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
@ -2,9 +2,9 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any, TypeAlias
|
||||
|
||||
from graphon.file import helpers as file_helpers
|
||||
from pydantic import BaseModel, ConfigDict, computed_field
|
||||
|
||||
from dify_graph.file import helpers as file_helpers
|
||||
from models.model import IconType
|
||||
|
||||
JSONValue: TypeAlias = str | int | float | bool | None | dict[str, Any] | list[Any]
|
||||
|
||||
@ -4,8 +4,8 @@ from urllib.parse import quote
|
||||
|
||||
from flask import Response
|
||||
|
||||
HTML_MIME_TYPES = frozenset({"text/html", "application/xhtml+xml"})
|
||||
HTML_EXTENSIONS = frozenset({"html", "htm"})
|
||||
HTML_MIME_TYPES: frozenset[str] = frozenset(("text/html", "application/xhtml+xml"))
|
||||
HTML_EXTENSIONS: frozenset[str] = frozenset(("html", "htm"))
|
||||
|
||||
|
||||
def _normalize_mime_type(mime_type: str | None) -> str:
|
||||
|
||||
@ -32,7 +32,6 @@ for module_name in RESOURCE_MODULES:
|
||||
|
||||
# Ensure resource modules are imported so route decorators are evaluated.
|
||||
# Import other controllers
|
||||
# Sandbox file browser
|
||||
from . import (
|
||||
admin,
|
||||
apikey,
|
||||
@ -42,7 +41,6 @@ from . import (
|
||||
init_validate,
|
||||
notification,
|
||||
ping,
|
||||
sandbox_files,
|
||||
setup,
|
||||
spec,
|
||||
version,
|
||||
@ -54,7 +52,6 @@ from .app import (
|
||||
agent,
|
||||
annotation,
|
||||
app,
|
||||
app_asset,
|
||||
audio,
|
||||
completion,
|
||||
conversation,
|
||||
@ -65,11 +62,9 @@ from .app import (
|
||||
model_config,
|
||||
ops_trace,
|
||||
site,
|
||||
skills,
|
||||
statistic,
|
||||
workflow,
|
||||
workflow_app_log,
|
||||
workflow_comment,
|
||||
workflow_draft_variable,
|
||||
workflow_run,
|
||||
workflow_statistic,
|
||||
@ -121,7 +116,6 @@ from .explore import (
|
||||
saved_message,
|
||||
trial,
|
||||
)
|
||||
from .socketio import workflow as socketio_workflow # pyright: ignore[reportUnusedImport]
|
||||
|
||||
# Import tag controllers
|
||||
from .tag import tags
|
||||
@ -136,7 +130,6 @@ from .workspace import (
|
||||
model_providers,
|
||||
models,
|
||||
plugin,
|
||||
sandbox_providers,
|
||||
tool_providers,
|
||||
trigger_providers,
|
||||
workspace,
|
||||
@ -155,7 +148,6 @@ __all__ = [
|
||||
"api",
|
||||
"apikey",
|
||||
"app",
|
||||
"app_asset",
|
||||
"audio",
|
||||
"banner",
|
||||
"billing",
|
||||
@ -206,12 +198,9 @@ __all__ = [
|
||||
"rag_pipeline_import",
|
||||
"rag_pipeline_workflow",
|
||||
"recommended_app",
|
||||
"sandbox_files",
|
||||
"sandbox_providers",
|
||||
"saved_message",
|
||||
"setup",
|
||||
"site",
|
||||
"skills",
|
||||
"spec",
|
||||
"statistic",
|
||||
"tags",
|
||||
@ -222,7 +211,6 @@ __all__ = [
|
||||
"website",
|
||||
"workflow",
|
||||
"workflow_app_log",
|
||||
"workflow_comment",
|
||||
"workflow_draft_variable",
|
||||
"workflow_run",
|
||||
"workflow_statistic",
|
||||
|
||||
@ -2,7 +2,7 @@ import flask_restx
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from flask_restx._http import HTTPStatus
|
||||
from sqlalchemy import delete, func, select
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
from extensions.ext_database import db
|
||||
@ -34,7 +34,7 @@ api_key_list_model = console_ns.model(
|
||||
|
||||
|
||||
def _get_resource(resource_id, tenant_id, resource_model):
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine).begin() as session:
|
||||
resource = session.execute(
|
||||
select(resource_model).filter_by(id=resource_id, tenant_id=tenant_id)
|
||||
).scalar_one_or_none()
|
||||
|
||||
@ -1,14 +1,15 @@
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from enum import StrEnum
|
||||
from typing import Any, Literal, TypeAlias
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource
|
||||
from graphon.enums import WorkflowExecutionStatus
|
||||
from graphon.file import helpers as file_helpers
|
||||
from pydantic import AliasChoices, BaseModel, ConfigDict, Field, computed_field, field_validator
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from werkzeug.exceptions import BadRequest
|
||||
|
||||
from controllers.common.helpers import FileInfo
|
||||
@ -27,13 +28,10 @@ from controllers.console.wraps import (
|
||||
from core.ops.ops_trace_manager import OpsTraceManager
|
||||
from core.rag.retrieval.retrieval_methods import RetrievalMethod
|
||||
from core.trigger.constants import TRIGGER_NODE_TYPES
|
||||
from dify_graph.enums import WorkflowExecutionStatus
|
||||
from dify_graph.file import helpers as file_helpers
|
||||
from extensions.ext_database import db
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from models import App, DatasetPermissionEnum, Workflow
|
||||
from models.model import AppMode, IconType
|
||||
from models.workflow_features import WorkflowFeatures
|
||||
from models.model import IconType
|
||||
from services.app_dsl_service import AppDslService, ImportMode
|
||||
from services.app_service import AppService
|
||||
from services.enterprise.enterprise_service import EnterpriseService
|
||||
@ -61,11 +59,6 @@ register_enum_models(console_ns, IconType)
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RuntimeType(StrEnum):
|
||||
CLASSIC = "classic"
|
||||
SANDBOXED = "sandboxed"
|
||||
|
||||
|
||||
class AppListQuery(BaseModel):
|
||||
page: int = Field(default=1, ge=1, le=99999, description="Page number (1-99999)")
|
||||
limit: int = Field(default=20, ge=1, le=100, description="Page size (1-100)")
|
||||
@ -130,11 +123,6 @@ class AppExportQuery(BaseModel):
|
||||
workflow_id: str | None = Field(default=None, description="Specific workflow ID to export")
|
||||
|
||||
|
||||
class AppExportBundleQuery(BaseModel):
|
||||
include_secret: bool = Field(default=False, description="Include secrets in export")
|
||||
workflow_id: str | None = Field(default=None, description="Specific workflow ID to export")
|
||||
|
||||
|
||||
class AppNamePayload(BaseModel):
|
||||
name: str = Field(..., min_length=1, description="Name to check")
|
||||
|
||||
@ -360,7 +348,6 @@ class AppPartial(ResponseModel):
|
||||
create_user_name: str | None = None
|
||||
author_name: str | None = None
|
||||
has_draft_trigger: bool | None = None
|
||||
runtime_type: RuntimeType = RuntimeType.CLASSIC
|
||||
|
||||
@computed_field(return_type=str | None) # type: ignore
|
||||
@property
|
||||
@ -510,7 +497,6 @@ class AppListApi(Resource):
|
||||
str(app.id) for app in app_pagination.items if app.mode in {"workflow", "advanced-chat"}
|
||||
]
|
||||
draft_trigger_app_ids: set[str] = set()
|
||||
sandbox_app_ids: set[str] = set()
|
||||
if workflow_capable_app_ids:
|
||||
draft_workflows = (
|
||||
db.session.execute(
|
||||
@ -525,10 +511,6 @@ class AppListApi(Resource):
|
||||
)
|
||||
trigger_node_types = TRIGGER_NODE_TYPES
|
||||
for workflow in draft_workflows:
|
||||
# Check sandbox feature
|
||||
if workflow.get_feature(WorkflowFeatures.SANDBOX).enabled:
|
||||
sandbox_app_ids.add(str(workflow.app_id))
|
||||
|
||||
node_id = None
|
||||
try:
|
||||
for node_id, node_data in workflow.walk_nodes():
|
||||
@ -541,7 +523,6 @@ class AppListApi(Resource):
|
||||
|
||||
for app in app_pagination.items:
|
||||
app.has_draft_trigger = str(app.id) in draft_trigger_app_ids
|
||||
app.runtime_type = RuntimeType.SANDBOXED if str(app.id) in sandbox_app_ids else RuntimeType.CLASSIC
|
||||
|
||||
pagination_model = AppPagination.model_validate(app_pagination, from_attributes=True)
|
||||
return pagination_model.model_dump(mode="json"), 200
|
||||
@ -661,7 +642,7 @@ class AppCopyApi(Resource):
|
||||
|
||||
args = CopyAppPayload.model_validate(console_ns.payload or {})
|
||||
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||
import_service = AppDslService(session)
|
||||
yaml_content = import_service.export_dsl(app_model=app_model, include_secret=True)
|
||||
result = import_service.import_app(
|
||||
@ -674,7 +655,6 @@ class AppCopyApi(Resource):
|
||||
icon=args.icon,
|
||||
icon_background=args.icon_background,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
# Inherit web app permission from original app
|
||||
if result.app_id and FeatureService.get_system_features().webapp_auth.enabled:
|
||||
@ -723,89 +703,6 @@ class AppExportApi(Resource):
|
||||
return payload.model_dump(mode="json")
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/export-bundle")
|
||||
class AppExportBundleApi(Resource):
|
||||
@get_app_model
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
def get(self, app_model):
|
||||
from services.app_bundle_service import AppBundleService
|
||||
|
||||
args = AppExportBundleQuery.model_validate(request.args.to_dict(flat=True))
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
||||
result = AppBundleService.export_bundle(
|
||||
app_model=app_model,
|
||||
account_id=str(current_user.id),
|
||||
include_secret=args.include_secret,
|
||||
workflow_id=args.workflow_id,
|
||||
)
|
||||
|
||||
return result.model_dump(mode="json")
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/publish-to-creators-platform")
|
||||
class AppPublishToCreatorsPlatformApi(Resource):
|
||||
@get_app_model
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
def post(self, app_model):
|
||||
"""Export the app DSL and upload to Creators Platform, returning a redirect URL.
|
||||
|
||||
Classic apps export as YAML; sandboxed apps export as ZIP bundle.
|
||||
"""
|
||||
import httpx
|
||||
|
||||
from configs import dify_config
|
||||
from core.helper.creators import get_redirect_url, upload_dsl
|
||||
from services.app_bundle_service import AppBundleService
|
||||
from services.workflow_service import WorkflowService
|
||||
|
||||
if not dify_config.CREATORS_PLATFORM_FEATURES_ENABLED:
|
||||
return {"message": "Creators Platform is not enabled"}, 403
|
||||
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
||||
# Determine if the app is sandboxed by checking the draft workflow's sandbox feature
|
||||
is_sandboxed = False
|
||||
if app_model.mode in {"workflow", "advanced-chat"}:
|
||||
draft_workflow = WorkflowService().get_draft_workflow(app_model)
|
||||
if draft_workflow and draft_workflow.get_feature(WorkflowFeatures.SANDBOX).enabled:
|
||||
is_sandboxed = True
|
||||
|
||||
if is_sandboxed:
|
||||
# Sandboxed app: export as ZIP bundle
|
||||
bundle_result = AppBundleService.export_bundle(
|
||||
app_model=app_model,
|
||||
account_id=str(current_user.id),
|
||||
include_secret=False,
|
||||
)
|
||||
download_response = httpx.get(bundle_result.download_url, timeout=60, follow_redirects=True)
|
||||
download_response.raise_for_status()
|
||||
file_bytes = download_response.content
|
||||
filename = bundle_result.filename
|
||||
else:
|
||||
# Classic app: export as YAML
|
||||
dsl_content = AppDslService.export_dsl(
|
||||
app_model=app_model,
|
||||
include_secret=False,
|
||||
)
|
||||
file_bytes = dsl_content.encode("utf-8")
|
||||
filename = f"{app_model.name}.yml"
|
||||
|
||||
# Upload to Creators Platform
|
||||
claim_code = upload_dsl(file_bytes, filename=filename)
|
||||
|
||||
# Generate redirect URL (with optional OAuth code)
|
||||
redirect_url = get_redirect_url(str(current_user.id), claim_code)
|
||||
|
||||
return {"redirect_url": redirect_url}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/name")
|
||||
class AppNameApi(Resource):
|
||||
@console_ns.doc("check_app_name")
|
||||
@ -929,26 +826,3 @@ class AppTraceApi(Resource):
|
||||
)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/upgrade-runtime")
|
||||
class AppRuntimeUpgradeApi(Resource):
|
||||
@console_ns.doc("upgrade_app_runtime")
|
||||
@console_ns.doc(description="Clone the app and upgrade the clone to sandboxed runtime")
|
||||
@console_ns.doc(params={"app_id": "Application ID to upgrade"})
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.WORKFLOW, AppMode.ADVANCED_CHAT])
|
||||
@edit_permission_required
|
||||
def post(self, app_model):
|
||||
"""Upgrade app runtime by cloning to sandboxed mode"""
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
||||
with Session(db.engine) as session:
|
||||
from services.app_runtime_upgrade_service import AppRuntimeUpgradeService
|
||||
|
||||
result = AppRuntimeUpgradeService(session).upgrade(app_model, current_user)
|
||||
session.commit()
|
||||
|
||||
return result, 200
|
||||
|
||||
@ -1,333 +0,0 @@
|
||||
from flask import request
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.error import (
|
||||
AppAssetNodeNotFoundError,
|
||||
AppAssetPathConflictError,
|
||||
)
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from core.app.entities.app_asset_entities import BatchUploadNode
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
from services.app_asset_service import AppAssetService
|
||||
from services.errors.app_asset import (
|
||||
AppAssetNodeNotFoundError as ServiceNodeNotFoundError,
|
||||
)
|
||||
from services.errors.app_asset import (
|
||||
AppAssetParentNotFoundError,
|
||||
)
|
||||
from services.errors.app_asset import (
|
||||
AppAssetPathConflictError as ServicePathConflictError,
|
||||
)
|
||||
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
|
||||
class CreateFolderPayload(BaseModel):
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
parent_id: str | None = None
|
||||
|
||||
|
||||
class CreateFilePayload(BaseModel):
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
parent_id: str | None = None
|
||||
|
||||
@field_validator("name", mode="before")
|
||||
@classmethod
|
||||
def strip_name(cls, v: str) -> str:
|
||||
return v.strip() if isinstance(v, str) else v
|
||||
|
||||
@field_validator("parent_id", mode="before")
|
||||
@classmethod
|
||||
def empty_to_none(cls, v: str | None) -> str | None:
|
||||
return v or None
|
||||
|
||||
|
||||
class GetUploadUrlPayload(BaseModel):
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
size: int = Field(..., ge=0)
|
||||
parent_id: str | None = None
|
||||
|
||||
@field_validator("name", mode="before")
|
||||
@classmethod
|
||||
def strip_name(cls, v: str) -> str:
|
||||
return v.strip() if isinstance(v, str) else v
|
||||
|
||||
@field_validator("parent_id", mode="before")
|
||||
@classmethod
|
||||
def empty_to_none(cls, v: str | None) -> str | None:
|
||||
return v or None
|
||||
|
||||
|
||||
class BatchUploadPayload(BaseModel):
|
||||
children: list[BatchUploadNode] = Field(..., min_length=1)
|
||||
parent_id: str | None = None
|
||||
|
||||
@field_validator("parent_id", mode="before")
|
||||
@classmethod
|
||||
def empty_to_none(cls, v: str | None) -> str | None:
|
||||
return v or None
|
||||
|
||||
|
||||
class UpdateFileContentPayload(BaseModel):
|
||||
content: str
|
||||
|
||||
|
||||
class RenameNodePayload(BaseModel):
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
|
||||
|
||||
class MoveNodePayload(BaseModel):
|
||||
parent_id: str | None = None
|
||||
|
||||
|
||||
class ReorderNodePayload(BaseModel):
|
||||
after_node_id: str | None = Field(default=None, description="Place after this node, None for first position")
|
||||
|
||||
|
||||
def reg(cls: type[BaseModel]) -> None:
|
||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
||||
|
||||
|
||||
reg(CreateFolderPayload)
|
||||
reg(CreateFilePayload)
|
||||
reg(GetUploadUrlPayload)
|
||||
reg(BatchUploadNode)
|
||||
reg(BatchUploadPayload)
|
||||
reg(UpdateFileContentPayload)
|
||||
reg(RenameNodePayload)
|
||||
reg(MoveNodePayload)
|
||||
reg(ReorderNodePayload)
|
||||
|
||||
|
||||
@console_ns.route("/apps/<string:app_id>/assets/tree")
|
||||
class AppAssetTreeResource(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def get(self, app_model: App):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
tree = AppAssetService.get_asset_tree(app_model, current_user.id)
|
||||
return {"children": [view.model_dump() for view in tree.transform()]}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<string:app_id>/assets/folders")
|
||||
class AppAssetFolderResource(Resource):
|
||||
@console_ns.expect(console_ns.models[CreateFolderPayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
payload = CreateFolderPayload.model_validate(console_ns.payload or {})
|
||||
|
||||
try:
|
||||
node = AppAssetService.create_folder(app_model, current_user.id, payload.name, payload.parent_id)
|
||||
return node.model_dump(), 201
|
||||
except AppAssetParentNotFoundError:
|
||||
raise AppAssetNodeNotFoundError()
|
||||
except ServicePathConflictError:
|
||||
raise AppAssetPathConflictError()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<string:app_id>/assets/files/<string:node_id>")
|
||||
class AppAssetFileDetailResource(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def get(self, app_model: App, node_id: str):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
try:
|
||||
content = AppAssetService.get_file_content(app_model, current_user.id, node_id)
|
||||
return {"content": content.decode("utf-8", errors="replace")}
|
||||
except ServiceNodeNotFoundError:
|
||||
raise AppAssetNodeNotFoundError()
|
||||
|
||||
@console_ns.expect(console_ns.models[UpdateFileContentPayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def put(self, app_model: App, node_id: str):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
||||
file = request.files.get("file")
|
||||
if file:
|
||||
content = file.read()
|
||||
else:
|
||||
payload = UpdateFileContentPayload.model_validate(console_ns.payload or {})
|
||||
content = payload.content.encode("utf-8")
|
||||
|
||||
try:
|
||||
node = AppAssetService.update_file_content(app_model, current_user.id, node_id, content)
|
||||
return node.model_dump()
|
||||
except ServiceNodeNotFoundError:
|
||||
raise AppAssetNodeNotFoundError()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<string:app_id>/assets/nodes/<string:node_id>")
|
||||
class AppAssetNodeResource(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def delete(self, app_model: App, node_id: str):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
try:
|
||||
AppAssetService.delete_node(app_model, current_user.id, node_id)
|
||||
return {"result": "success"}, 200
|
||||
except ServiceNodeNotFoundError:
|
||||
raise AppAssetNodeNotFoundError()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<string:app_id>/assets/nodes/<string:node_id>/rename")
|
||||
class AppAssetNodeRenameResource(Resource):
|
||||
@console_ns.expect(console_ns.models[RenameNodePayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App, node_id: str):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
payload = RenameNodePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
try:
|
||||
node = AppAssetService.rename_node(app_model, current_user.id, node_id, payload.name)
|
||||
return node.model_dump()
|
||||
except ServiceNodeNotFoundError:
|
||||
raise AppAssetNodeNotFoundError()
|
||||
except ServicePathConflictError:
|
||||
raise AppAssetPathConflictError()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<string:app_id>/assets/nodes/<string:node_id>/move")
|
||||
class AppAssetNodeMoveResource(Resource):
|
||||
@console_ns.expect(console_ns.models[MoveNodePayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App, node_id: str):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
payload = MoveNodePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
try:
|
||||
node = AppAssetService.move_node(app_model, current_user.id, node_id, payload.parent_id)
|
||||
return node.model_dump()
|
||||
except ServiceNodeNotFoundError:
|
||||
raise AppAssetNodeNotFoundError()
|
||||
except AppAssetParentNotFoundError:
|
||||
raise AppAssetNodeNotFoundError()
|
||||
except ServicePathConflictError:
|
||||
raise AppAssetPathConflictError()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<string:app_id>/assets/nodes/<string:node_id>/reorder")
|
||||
class AppAssetNodeReorderResource(Resource):
|
||||
@console_ns.expect(console_ns.models[ReorderNodePayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App, node_id: str):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
payload = ReorderNodePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
try:
|
||||
node = AppAssetService.reorder_node(app_model, current_user.id, node_id, payload.after_node_id)
|
||||
return node.model_dump()
|
||||
except ServiceNodeNotFoundError:
|
||||
raise AppAssetNodeNotFoundError()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<string:app_id>/assets/files/<string:node_id>/download-url")
|
||||
class AppAssetFileDownloadUrlResource(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def get(self, app_model: App, node_id: str):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
try:
|
||||
download_url = AppAssetService.get_file_download_url(app_model, current_user.id, node_id)
|
||||
return {"download_url": download_url}
|
||||
except ServiceNodeNotFoundError:
|
||||
raise AppAssetNodeNotFoundError()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<string:app_id>/assets/files/upload")
|
||||
class AppAssetFileUploadUrlResource(Resource):
|
||||
@console_ns.expect(console_ns.models[GetUploadUrlPayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
payload = GetUploadUrlPayload.model_validate(console_ns.payload or {})
|
||||
|
||||
try:
|
||||
node, upload_url = AppAssetService.get_file_upload_url(
|
||||
app_model, current_user.id, payload.name, payload.size, payload.parent_id
|
||||
)
|
||||
return {"node": node.model_dump(), "upload_url": upload_url}, 201
|
||||
except AppAssetParentNotFoundError:
|
||||
raise AppAssetNodeNotFoundError()
|
||||
except ServicePathConflictError:
|
||||
raise AppAssetPathConflictError()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<string:app_id>/assets/batch-upload")
|
||||
class AppAssetBatchUploadResource(Resource):
|
||||
@console_ns.expect(console_ns.models[BatchUploadPayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App):
|
||||
"""
|
||||
Create nodes from tree structure and return upload URLs.
|
||||
|
||||
Input:
|
||||
{
|
||||
"parent_id": "optional-target-folder-id",
|
||||
"children": [
|
||||
{"name": "folder1", "node_type": "folder", "children": [
|
||||
{"name": "file1.txt", "node_type": "file", "size": 1024}
|
||||
]},
|
||||
{"name": "root.txt", "node_type": "file", "size": 512}
|
||||
]
|
||||
}
|
||||
|
||||
Output:
|
||||
{
|
||||
"children": [
|
||||
{"id": "xxx", "name": "folder1", "node_type": "folder", "children": [
|
||||
{"id": "yyy", "name": "file1.txt", "node_type": "file", "size": 1024, "upload_url": "..."}
|
||||
]},
|
||||
{"id": "zzz", "name": "root.txt", "node_type": "file", "size": 512, "upload_url": "..."}
|
||||
]
|
||||
}
|
||||
"""
|
||||
current_user, _ = current_account_with_tenant()
|
||||
payload = BatchUploadPayload.model_validate(console_ns.payload or {})
|
||||
|
||||
try:
|
||||
result_children = AppAssetService.batch_create_from_tree(
|
||||
app_model,
|
||||
current_user.id,
|
||||
payload.children,
|
||||
parent_id=payload.parent_id,
|
||||
)
|
||||
return {"children": [child.model_dump() for child in result_children]}, 201
|
||||
except AppAssetParentNotFoundError:
|
||||
raise AppAssetNodeNotFoundError()
|
||||
except ServicePathConflictError:
|
||||
raise AppAssetPathConflictError()
|
||||
@ -1,6 +1,6 @@
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import (
|
||||
@ -51,14 +51,6 @@ class AppImportPayload(BaseModel):
|
||||
app_id: str | None = Field(None)
|
||||
|
||||
|
||||
class AppImportBundleConfirmPayload(BaseModel):
|
||||
name: str | None = None
|
||||
description: str | None = None
|
||||
icon_type: str | None = None
|
||||
icon: str | None = None
|
||||
icon_background: str | None = None
|
||||
|
||||
|
||||
console_ns.schema_model(
|
||||
AppImportPayload.__name__, AppImportPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
||||
)
|
||||
@ -79,7 +71,7 @@ class AppImportApi(Resource):
|
||||
args = AppImportPayload.model_validate(console_ns.payload)
|
||||
|
||||
# Create service with session
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine).begin() as session:
|
||||
import_service = AppDslService(session)
|
||||
# Import app
|
||||
account = current_user
|
||||
@ -95,7 +87,6 @@ class AppImportApi(Resource):
|
||||
icon_background=args.icon_background,
|
||||
app_id=args.app_id,
|
||||
)
|
||||
session.commit()
|
||||
if result.app_id and FeatureService.get_system_features().webapp_auth.enabled:
|
||||
# update web app setting as private
|
||||
EnterpriseService.WebAppAuth.update_app_access_mode(result.app_id, "private")
|
||||
@ -120,12 +111,11 @@ class AppImportConfirmApi(Resource):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
||||
# Create service with session
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine).begin() as session:
|
||||
import_service = AppDslService(session)
|
||||
# Confirm import
|
||||
account = current_user
|
||||
result = import_service.confirm_import(import_id=import_id, account=account)
|
||||
session.commit()
|
||||
|
||||
# Return appropriate status code based on result
|
||||
if result.status == ImportStatus.FAILED:
|
||||
@ -142,73 +132,8 @@ class AppImportCheckDependenciesApi(Resource):
|
||||
@marshal_with(app_import_check_dependencies_model)
|
||||
@edit_permission_required
|
||||
def get(self, app_model: App):
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine).begin() as session:
|
||||
import_service = AppDslService(session)
|
||||
result = import_service.check_dependencies(app_model=app_model)
|
||||
|
||||
return result.model_dump(mode="json"), 200
|
||||
|
||||
|
||||
@console_ns.route("/apps/imports-bundle/prepare")
|
||||
class AppImportBundlePrepareApi(Resource):
|
||||
"""Step 1: Get upload URL for bundle import."""
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
def post(self):
|
||||
from services.app_bundle_service import AppBundleService
|
||||
|
||||
current_user, current_tenant_id = current_account_with_tenant()
|
||||
|
||||
result = AppBundleService.prepare_import(
|
||||
tenant_id=current_tenant_id,
|
||||
account_id=current_user.id,
|
||||
)
|
||||
|
||||
return {"import_id": result.import_id, "upload_url": result.upload_url}, 200
|
||||
|
||||
|
||||
@console_ns.route("/apps/imports-bundle/<string:import_id>/confirm")
|
||||
class AppImportBundleConfirmApi(Resource):
|
||||
"""Step 2: Confirm bundle import after upload."""
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(app_import_model)
|
||||
@cloud_edition_billing_resource_check("apps")
|
||||
@edit_permission_required
|
||||
def post(self, import_id: str):
|
||||
from flask import request
|
||||
|
||||
from core.app.entities.app_bundle_entities import BundleFormatError
|
||||
from services.app_bundle_service import AppBundleService
|
||||
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
||||
args = AppImportBundleConfirmPayload.model_validate(request.get_json() or {})
|
||||
|
||||
try:
|
||||
result = AppBundleService.confirm_import(
|
||||
import_id=import_id,
|
||||
account=current_user,
|
||||
name=args.name,
|
||||
description=args.description,
|
||||
icon_type=args.icon_type,
|
||||
icon=args.icon,
|
||||
icon_background=args.icon_background,
|
||||
)
|
||||
except BundleFormatError as e:
|
||||
return {"error": str(e)}, 400
|
||||
|
||||
if result.app_id and FeatureService.get_system_features().webapp_auth.enabled:
|
||||
EnterpriseService.WebAppAuth.update_app_access_mode(result.app_id, "private")
|
||||
|
||||
status = result.status
|
||||
if status == ImportStatus.FAILED:
|
||||
return result.model_dump(mode="json"), 400
|
||||
elif status == ImportStatus.PENDING:
|
||||
return result.model_dump(mode="json"), 202
|
||||
return result.model_dump(mode="json"), 200
|
||||
|
||||
@ -2,6 +2,7 @@ import logging
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource, fields
|
||||
from graphon.model_runtime.errors.invoke import InvokeError
|
||||
from pydantic import BaseModel, Field
|
||||
from werkzeug.exceptions import InternalServerError
|
||||
|
||||
@ -22,7 +23,6 @@ from controllers.console.app.error import (
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
||||
from dify_graph.model_runtime.errors.invoke import InvokeError
|
||||
from libs.login import login_required
|
||||
from models import App, AppMode
|
||||
from services.audio_service import AudioService
|
||||
|
||||
@ -3,6 +3,7 @@ from typing import Any, Literal
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource
|
||||
from graphon.model_runtime.errors.invoke import InvokeError
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from werkzeug.exceptions import InternalServerError, NotFound
|
||||
|
||||
@ -26,7 +27,6 @@ from core.errors.error import (
|
||||
QuotaExceededError,
|
||||
)
|
||||
from core.helper.trace_id_helper import get_external_trace_id
|
||||
from dify_graph.model_runtime.errors.invoke import InvokeError
|
||||
from libs import helper
|
||||
from libs.helper import uuid_value
|
||||
from libs.login import current_user, login_required
|
||||
|
||||
@ -2,7 +2,7 @@ from flask import request
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
@ -69,7 +69,7 @@ class ConversationVariablesApi(Resource):
|
||||
page_size = 100
|
||||
stmt = stmt.limit(page_size).offset((page - 1) * page_size)
|
||||
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||
rows = session.scalars(stmt).all()
|
||||
|
||||
return {
|
||||
|
||||
@ -110,6 +110,8 @@ class TracingConfigCheckError(BaseHTTPException):
|
||||
|
||||
|
||||
class InvokeRateLimitError(BaseHTTPException):
|
||||
"""Raised when the Invoke returns rate limit error."""
|
||||
|
||||
error_code = "rate_limit_error"
|
||||
description = "Rate Limit Error"
|
||||
code = 429
|
||||
@ -119,21 +121,3 @@ class NeedAddIdsError(BaseHTTPException):
|
||||
error_code = "need_add_ids"
|
||||
description = "Need to add ids."
|
||||
code = 400
|
||||
|
||||
|
||||
class AppAssetNodeNotFoundError(BaseHTTPException):
|
||||
error_code = "app_asset_node_not_found"
|
||||
description = "App asset node not found."
|
||||
code = 404
|
||||
|
||||
|
||||
class AppAssetFileRequiredError(BaseHTTPException):
|
||||
error_code = "app_asset_file_required"
|
||||
description = "File is required."
|
||||
code = 400
|
||||
|
||||
|
||||
class AppAssetPathConflictError(BaseHTTPException):
|
||||
error_code = "app_asset_path_conflict"
|
||||
description = "Path already exists."
|
||||
code = 409
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
from collections.abc import Sequence
|
||||
from typing import Any
|
||||
|
||||
from flask_restx import Resource
|
||||
from graphon.model_runtime.errors.invoke import InvokeError
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from controllers.console import console_ns
|
||||
@ -17,14 +17,8 @@ from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotIni
|
||||
from core.helper.code_executor.code_node_provider import CodeNodeProvider
|
||||
from core.helper.code_executor.javascript.javascript_code_provider import JavascriptCodeProvider
|
||||
from core.helper.code_executor.python3.python3_code_provider import Python3CodeProvider
|
||||
from core.llm_generator.context_models import (
|
||||
AvailableVarPayload,
|
||||
CodeContextPayload,
|
||||
ParameterInfoPayload,
|
||||
)
|
||||
from core.llm_generator.entities import RuleCodeGeneratePayload, RuleGeneratePayload, RuleStructuredOutputPayload
|
||||
from core.llm_generator.llm_generator import LLMGenerator
|
||||
from dify_graph.model_runtime.errors.invoke import InvokeError
|
||||
from extensions.ext_database import db
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from models import App
|
||||
@ -47,34 +41,6 @@ class InstructionTemplatePayload(BaseModel):
|
||||
type: str = Field(..., description="Instruction template type")
|
||||
|
||||
|
||||
class ContextGeneratePayload(BaseModel):
|
||||
"""Payload for generating extractor code node."""
|
||||
|
||||
language: str = Field(default="python3", description="Code language (python3/javascript)")
|
||||
prompt_messages: list[dict[str, Any]] = Field(
|
||||
..., description="Multi-turn conversation history, last message is the current instruction"
|
||||
)
|
||||
model_config_data: dict[str, Any] = Field(..., alias="model_config", description="Model configuration")
|
||||
available_vars: list[AvailableVarPayload] = Field(..., description="Available variables from upstream nodes")
|
||||
parameter_info: ParameterInfoPayload = Field(..., description="Target parameter metadata from the frontend")
|
||||
code_context: CodeContextPayload = Field(description="Existing code node context for incremental generation")
|
||||
|
||||
|
||||
class SuggestedQuestionsPayload(BaseModel):
|
||||
"""Payload for generating suggested questions."""
|
||||
|
||||
language: str = Field(
|
||||
default="English", description="Language for generated questions (e.g. English, Chinese, Japanese)"
|
||||
)
|
||||
model_config_data: dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
alias="model_config",
|
||||
description="Model configuration (optional, uses system default if not provided)",
|
||||
)
|
||||
available_vars: list[AvailableVarPayload] = Field(..., description="Available variables from upstream nodes")
|
||||
parameter_info: ParameterInfoPayload = Field(..., description="Target parameter metadata from the frontend")
|
||||
|
||||
|
||||
def reg(cls: type[BaseModel]):
|
||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
||||
|
||||
@ -84,8 +50,6 @@ reg(RuleCodeGeneratePayload)
|
||||
reg(RuleStructuredOutputPayload)
|
||||
reg(InstructionGeneratePayload)
|
||||
reg(InstructionTemplatePayload)
|
||||
reg(ContextGeneratePayload)
|
||||
reg(SuggestedQuestionsPayload)
|
||||
reg(ModelConfig)
|
||||
|
||||
|
||||
@ -299,70 +263,3 @@ class InstructionGenerationTemplateApi(Resource):
|
||||
return {"data": INSTRUCTION_GENERATE_TEMPLATE_CODE}
|
||||
case _:
|
||||
raise ValueError(f"Invalid type: {args.type}")
|
||||
|
||||
|
||||
@console_ns.route("/context-generate")
|
||||
class ContextGenerateApi(Resource):
|
||||
@console_ns.doc("generate_with_context")
|
||||
@console_ns.doc(description="Generate with multi-turn conversation context")
|
||||
@console_ns.expect(console_ns.models[ContextGeneratePayload.__name__])
|
||||
@console_ns.response(200, "Content generated successfully")
|
||||
@console_ns.response(400, "Invalid request parameters or workflow not found")
|
||||
@console_ns.response(402, "Provider quota exceeded")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
from core.llm_generator.utils import deserialize_prompt_messages
|
||||
|
||||
args = ContextGeneratePayload.model_validate(console_ns.payload)
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
|
||||
try:
|
||||
return LLMGenerator.generate_with_context(
|
||||
tenant_id=current_tenant_id,
|
||||
language=args.language,
|
||||
prompt_messages=deserialize_prompt_messages(args.prompt_messages),
|
||||
model_config=args.model_config_data,
|
||||
available_vars=args.available_vars,
|
||||
parameter_info=args.parameter_info,
|
||||
code_context=args.code_context,
|
||||
)
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
except QuotaExceededError:
|
||||
raise ProviderQuotaExceededError()
|
||||
except ModelCurrentlyNotSupportError:
|
||||
raise ProviderModelCurrentlyNotSupportError()
|
||||
except InvokeError as e:
|
||||
raise CompletionRequestError(e.description)
|
||||
|
||||
|
||||
@console_ns.route("/context-generate/suggested-questions")
|
||||
class SuggestedQuestionsApi(Resource):
|
||||
@console_ns.doc("generate_suggested_questions")
|
||||
@console_ns.doc(description="Generate suggested questions for context generation")
|
||||
@console_ns.expect(console_ns.models[SuggestedQuestionsPayload.__name__])
|
||||
@console_ns.response(200, "Questions generated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
args = SuggestedQuestionsPayload.model_validate(console_ns.payload)
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
try:
|
||||
return LLMGenerator.generate_suggested_questions(
|
||||
tenant_id=current_tenant_id,
|
||||
language=args.language,
|
||||
available_vars=args.available_vars,
|
||||
parameter_info=args.parameter_info,
|
||||
model_config=args.model_config_data,
|
||||
)
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
except QuotaExceededError:
|
||||
raise ProviderQuotaExceededError()
|
||||
except ModelCurrentlyNotSupportError:
|
||||
raise ProviderModelCurrentlyNotSupportError()
|
||||
except InvokeError as e:
|
||||
raise CompletionRequestError(e.description)
|
||||
|
||||
@ -3,6 +3,7 @@ from typing import Literal
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from graphon.model_runtime.errors.invoke import InvokeError
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from sqlalchemy import exists, func, select
|
||||
from werkzeug.exceptions import InternalServerError, NotFound
|
||||
@ -24,7 +25,6 @@ from controllers.console.wraps import (
|
||||
)
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
||||
from dify_graph.model_runtime.errors.invoke import InvokeError
|
||||
from extensions.ext_database import db
|
||||
from fields.raws import FilesContainedField
|
||||
from libs.helper import TimestampField, uuid_value
|
||||
@ -213,7 +213,6 @@ message_detail_model = console_ns.model(
|
||||
"status": fields.String,
|
||||
"error": fields.String,
|
||||
"parent_message_id": fields.String,
|
||||
"generation_detail": fields.Raw,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@ -88,6 +88,7 @@ class ModelConfigResource(Resource):
|
||||
tenant_id=current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
agent_tool=agent_tool_entity,
|
||||
user_id=current_user.id,
|
||||
)
|
||||
manager = ToolParameterConfigurationManager(
|
||||
tenant_id=current_tenant_id,
|
||||
@ -127,6 +128,7 @@ class ModelConfigResource(Resource):
|
||||
tenant_id=current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
agent_tool=agent_tool_entity,
|
||||
user_id=current_user.id,
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
@ -1,38 +0,0 @@
|
||||
from flask import request
|
||||
from flask_restx import Resource
|
||||
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, current_account_with_tenant, setup_required
|
||||
from libs.login import login_required
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
from services.skill_service import SkillService
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/llm/skills")
|
||||
class NodeSkillsApi(Resource):
|
||||
"""Extract tool dependencies from an LLM node's skill prompts.
|
||||
|
||||
The client sends the full node ``data`` object in the request body.
|
||||
The server real-time builds a ``SkillBundle`` from the current draft
|
||||
``.md`` assets and resolves transitive tool dependencies — no cached
|
||||
bundle is used.
|
||||
"""
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
node_data = request.get_json(force=True)
|
||||
if not isinstance(node_data, dict):
|
||||
return {"tool_dependencies": []}
|
||||
|
||||
tool_deps = SkillService.extract_tool_dependencies(
|
||||
app=app_model,
|
||||
node_data=node_data,
|
||||
user_id=current_user.id,
|
||||
)
|
||||
return {"tool_dependencies": [d.model_dump() for d in tool_deps]}
|
||||
@ -5,8 +5,12 @@ from typing import Any
|
||||
|
||||
from flask import abort, request
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from graphon.enums import NodeType
|
||||
from graphon.file import File
|
||||
from graphon.graph_engine.manager import GraphEngineManager
|
||||
from graphon.model_runtime.utils.encoders import jsonable_encoder
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from werkzeug.exceptions import BadRequest, Forbidden, InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
@ -20,6 +24,7 @@ from core.app.app_config.features.file_upload.manager import FileUploadConfigMan
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||
from core.app.apps.workflow.app_generator import SKIP_PREPARE_USER_INPUTS_KEY
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.app.file_access import DatabaseFileAccessController
|
||||
from core.helper.trace_id_helper import get_external_trace_id
|
||||
from core.plugin.impl.exc import PluginInvokeError
|
||||
from core.trigger.constants import TRIGGER_SCHEDULE_NODE_TYPE
|
||||
@ -29,15 +34,10 @@ from core.trigger.debug.event_selectors import (
|
||||
create_event_poller,
|
||||
select_trigger_debug_events,
|
||||
)
|
||||
from dify_graph.enums import NodeType
|
||||
from dify_graph.file.models import File
|
||||
from dify_graph.graph_engine.manager import GraphEngineManager
|
||||
from dify_graph.model_runtime.utils.encoders import jsonable_encoder
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from factories import file_factory, variable_factory
|
||||
from fields.member_fields import simple_account_fields
|
||||
from fields.online_user_fields import online_user_list_fields
|
||||
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
|
||||
from libs import helper
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
@ -46,15 +46,13 @@ from libs.login import current_account_with_tenant, login_required
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
from models.workflow import Workflow
|
||||
from repositories.workflow_collaboration_repository import WORKFLOW_ONLINE_USERS_PREFIX
|
||||
from services.app_generate_service import AppGenerateService
|
||||
from services.errors.app import IsDraftWorkflowError, WorkflowHashNotEqualError, WorkflowNotFoundError
|
||||
from services.errors.llm import InvokeRateLimitError
|
||||
from services.workflow.entities import NestedNodeGraphRequest, NestedNodeParameterSchema
|
||||
from services.workflow.nested_node_graph_service import NestedNodeGraphService
|
||||
from services.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError, WorkflowService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
_file_access_controller = DatabaseFileAccessController()
|
||||
LISTENING_RETRY_IN = 2000
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
RESTORE_SOURCE_WORKFLOW_MUST_BE_PUBLISHED_MESSAGE = "source workflow must be published"
|
||||
@ -167,14 +165,6 @@ class WorkflowUpdatePayload(BaseModel):
|
||||
marked_comment: str | None = Field(default=None, max_length=100)
|
||||
|
||||
|
||||
class WorkflowFeaturesPayload(BaseModel):
|
||||
features: dict[str, Any] = Field(..., description="Workflow feature configuration")
|
||||
|
||||
|
||||
class WorkflowOnlineUsersQuery(BaseModel):
|
||||
workflow_ids: str = Field(..., description="Comma-separated workflow IDs")
|
||||
|
||||
|
||||
class DraftWorkflowTriggerRunPayload(BaseModel):
|
||||
node_id: str
|
||||
|
||||
@ -183,15 +173,6 @@ class DraftWorkflowTriggerRunAllPayload(BaseModel):
|
||||
node_ids: list[str]
|
||||
|
||||
|
||||
class NestedNodeGraphPayload(BaseModel):
|
||||
"""Request payload for generating nested node graph."""
|
||||
|
||||
parent_node_id: str = Field(description="ID of the parent node that uses the extracted value")
|
||||
parameter_key: str = Field(description="Key of the parameter being extracted")
|
||||
context_source: list[str] = Field(description="Variable selector for the context source")
|
||||
parameter_schema: dict[str, Any] = Field(description="Schema of the parameter to extract")
|
||||
|
||||
|
||||
def reg(cls: type[BaseModel]):
|
||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
||||
|
||||
@ -207,11 +188,8 @@ reg(DefaultBlockConfigQuery)
|
||||
reg(ConvertToWorkflowPayload)
|
||||
reg(WorkflowListQuery)
|
||||
reg(WorkflowUpdatePayload)
|
||||
reg(WorkflowFeaturesPayload)
|
||||
reg(WorkflowOnlineUsersQuery)
|
||||
reg(DraftWorkflowTriggerRunPayload)
|
||||
reg(DraftWorkflowTriggerRunAllPayload)
|
||||
reg(NestedNodeGraphPayload)
|
||||
|
||||
|
||||
# TODO(QuantumGhost): Refactor existing node run API to handle file parameter parsing
|
||||
@ -228,6 +206,7 @@ def _parse_file(workflow: Workflow, files: list[dict] | None = None) -> Sequence
|
||||
mappings=files,
|
||||
tenant_id=workflow.tenant_id,
|
||||
config=file_extra_config,
|
||||
access_controller=_file_access_controller,
|
||||
)
|
||||
return file_objs
|
||||
|
||||
@ -856,14 +835,13 @@ class PublishedWorkflowApi(Resource):
|
||||
"""
|
||||
Publish workflow
|
||||
"""
|
||||
from services.app_bundle_service import AppBundleService
|
||||
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
||||
args = PublishWorkflowPayload.model_validate(console_ns.payload or {})
|
||||
|
||||
with Session(db.engine) as session:
|
||||
workflow = AppBundleService.publish(
|
||||
workflow_service = WorkflowService()
|
||||
with sessionmaker(db.engine).begin() as session:
|
||||
workflow = workflow_service.publish_workflow(
|
||||
session=session,
|
||||
app_model=app_model,
|
||||
account=current_user,
|
||||
@ -880,8 +858,6 @@ class PublishedWorkflowApi(Resource):
|
||||
|
||||
workflow_created_at = TimestampField().format(workflow.created_at)
|
||||
|
||||
session.commit()
|
||||
|
||||
return {
|
||||
"result": "success",
|
||||
"created_at": workflow_created_at,
|
||||
@ -974,31 +950,6 @@ class ConvertToWorkflowApi(Resource):
|
||||
}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/features")
|
||||
class WorkflowFeaturesApi(Resource):
|
||||
"""Update draft workflow features."""
|
||||
|
||||
@console_ns.expect(console_ns.models[WorkflowFeaturesPayload.__name__])
|
||||
@console_ns.doc("update_workflow_features")
|
||||
@console_ns.doc(description="Update draft workflow features")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Workflow features updated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
||||
args = WorkflowFeaturesPayload.model_validate(console_ns.payload or {})
|
||||
features = args.features
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
workflow_service.update_draft_workflow_features(app_model=app_model, features=features, account=current_user)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows")
|
||||
class PublishedAllWorkflowApi(Resource):
|
||||
@console_ns.expect(console_ns.models[WorkflowListQuery.__name__])
|
||||
@ -1029,7 +980,7 @@ class PublishedAllWorkflowApi(Resource):
|
||||
raise Forbidden()
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine).begin() as session:
|
||||
workflows, has_more = workflow_service.get_all_published_workflow(
|
||||
session=session,
|
||||
app_model=app_model,
|
||||
@ -1119,7 +1070,7 @@ class WorkflowByIdApi(Resource):
|
||||
workflow_service = WorkflowService()
|
||||
|
||||
# Create a session and manage the transaction
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||
workflow = workflow_service.update_workflow(
|
||||
session=session,
|
||||
workflow_id=workflow_id,
|
||||
@ -1131,9 +1082,6 @@ class WorkflowByIdApi(Resource):
|
||||
if not workflow:
|
||||
raise NotFound("Workflow not found")
|
||||
|
||||
# Commit the transaction in the controller
|
||||
session.commit()
|
||||
|
||||
return workflow
|
||||
|
||||
@setup_required
|
||||
@ -1148,13 +1096,11 @@ class WorkflowByIdApi(Resource):
|
||||
workflow_service = WorkflowService()
|
||||
|
||||
# Create a session and manage the transaction
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine).begin() as session:
|
||||
try:
|
||||
workflow_service.delete_workflow(
|
||||
session=session, workflow_id=workflow_id, tenant_id=app_model.tenant_id
|
||||
)
|
||||
# Commit the transaction in the controller
|
||||
session.commit()
|
||||
except WorkflowInUseError as e:
|
||||
abort(400, description=str(e))
|
||||
except DraftWorkflowDeletionError as e:
|
||||
@ -1413,83 +1359,3 @@ class DraftWorkflowTriggerRunAllApi(Resource):
|
||||
"status": "error",
|
||||
}
|
||||
), 400
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nested-node-graph")
|
||||
class NestedNodeGraphApi(Resource):
|
||||
"""
|
||||
API for generating Nested Node LLM graph structures.
|
||||
|
||||
This endpoint creates a complete graph structure containing an LLM node
|
||||
configured to extract values from list[PromptMessage] variables.
|
||||
"""
|
||||
|
||||
@console_ns.doc("generate_nested_node_graph")
|
||||
@console_ns.doc(description="Generate a Nested Node LLM graph structure")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.expect(console_ns.models[NestedNodeGraphPayload.__name__])
|
||||
@console_ns.response(200, "Nested node graph generated successfully")
|
||||
@console_ns.response(400, "Invalid request parameters")
|
||||
@console_ns.response(403, "Permission denied")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
@edit_permission_required
|
||||
def post(self, app_model: App):
|
||||
"""
|
||||
Generate a Nested Node LLM graph structure.
|
||||
|
||||
Returns a complete graph structure containing a single LLM node
|
||||
configured for extracting values from list[PromptMessage] context.
|
||||
"""
|
||||
|
||||
payload = NestedNodeGraphPayload.model_validate(console_ns.payload or {})
|
||||
|
||||
parameter_schema = NestedNodeParameterSchema(
|
||||
name=payload.parameter_schema.get("name", payload.parameter_key),
|
||||
type=payload.parameter_schema.get("type", "string"),
|
||||
description=payload.parameter_schema.get("description", ""),
|
||||
)
|
||||
|
||||
request = NestedNodeGraphRequest(
|
||||
parent_node_id=payload.parent_node_id,
|
||||
parameter_key=payload.parameter_key,
|
||||
context_source=payload.context_source,
|
||||
parameter_schema=parameter_schema,
|
||||
)
|
||||
|
||||
with Session(db.engine) as session:
|
||||
service = NestedNodeGraphService(session)
|
||||
response = service.generate_nested_node_graph(tenant_id=app_model.tenant_id, request=request)
|
||||
|
||||
return response.model_dump()
|
||||
|
||||
|
||||
@console_ns.route("/apps/workflows/online-users")
|
||||
class WorkflowOnlineUsersApi(Resource):
|
||||
@console_ns.expect(console_ns.models[WorkflowOnlineUsersQuery.__name__])
|
||||
@console_ns.doc("get_workflow_online_users")
|
||||
@console_ns.doc(description="Get workflow online users")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(online_user_list_fields)
|
||||
def get(self):
|
||||
args = WorkflowOnlineUsersQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
|
||||
workflow_ids = [workflow_id.strip() for workflow_id in args.workflow_ids.split(",") if workflow_id.strip()]
|
||||
|
||||
results = []
|
||||
for workflow_id in workflow_ids:
|
||||
users_json = redis_client.hgetall(f"{WORKFLOW_ONLINE_USERS_PREFIX}{workflow_id}")
|
||||
|
||||
users = []
|
||||
for _, user_info_json in users_json.items():
|
||||
try:
|
||||
users.append(json.loads(user_info_json))
|
||||
except Exception:
|
||||
continue
|
||||
results.append({"workflow_id": workflow_id, "users": users})
|
||||
|
||||
return {"data": results}
|
||||
|
||||
@ -3,13 +3,13 @@ from datetime import datetime
|
||||
from dateutil.parser import isoparse
|
||||
from flask import request
|
||||
from flask_restx import Resource, marshal_with
|
||||
from graphon.enums import WorkflowExecutionStatus
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from dify_graph.enums import WorkflowExecutionStatus
|
||||
from extensions.ext_database import db
|
||||
from fields.workflow_app_log_fields import (
|
||||
build_workflow_app_log_pagination_model,
|
||||
@ -87,7 +87,7 @@ class WorkflowAppLogApi(Resource):
|
||||
|
||||
# get paginate workflow app logs
|
||||
workflow_app_service = WorkflowAppService()
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine).begin() as session:
|
||||
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_app_logs(
|
||||
session=session,
|
||||
app_model=app_model,
|
||||
@ -124,7 +124,7 @@ class WorkflowArchivedLogApi(Resource):
|
||||
args = WorkflowAppLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
|
||||
workflow_app_service = WorkflowAppService()
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine).begin() as session:
|
||||
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_archive_logs(
|
||||
session=session,
|
||||
app_model=app_model,
|
||||
|
||||
@ -1,322 +0,0 @@
|
||||
import logging
|
||||
|
||||
from flask_restx import Resource, marshal_with
|
||||
from pydantic import BaseModel, Field, TypeAdapter
|
||||
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from fields.member_fields import AccountWithRole
|
||||
from fields.workflow_comment_fields import (
|
||||
workflow_comment_basic_fields,
|
||||
workflow_comment_create_fields,
|
||||
workflow_comment_detail_fields,
|
||||
workflow_comment_reply_create_fields,
|
||||
workflow_comment_reply_update_fields,
|
||||
workflow_comment_resolve_fields,
|
||||
workflow_comment_update_fields,
|
||||
)
|
||||
from libs.login import current_user, login_required
|
||||
from models import App
|
||||
from services.account_service import TenantService
|
||||
from services.workflow_comment_service import WorkflowCommentService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
|
||||
class WorkflowCommentCreatePayload(BaseModel):
|
||||
position_x: float = Field(..., description="Comment X position")
|
||||
position_y: float = Field(..., description="Comment Y position")
|
||||
content: str = Field(..., description="Comment content")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
class WorkflowCommentUpdatePayload(BaseModel):
|
||||
content: str = Field(..., description="Comment content")
|
||||
position_x: float | None = Field(default=None, description="Comment X position")
|
||||
position_y: float | None = Field(default=None, description="Comment Y position")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
class WorkflowCommentReplyCreatePayload(BaseModel):
|
||||
content: str = Field(..., description="Reply content")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
class WorkflowCommentReplyUpdatePayload(BaseModel):
|
||||
content: str = Field(..., description="Reply content")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
class WorkflowCommentMentionUsersResponse(BaseModel):
|
||||
users: list[AccountWithRole] = Field(description="Mentionable users")
|
||||
|
||||
|
||||
for model in (
|
||||
WorkflowCommentCreatePayload,
|
||||
WorkflowCommentUpdatePayload,
|
||||
WorkflowCommentReplyCreatePayload,
|
||||
WorkflowCommentReplyUpdatePayload,
|
||||
):
|
||||
console_ns.schema_model(model.__name__, model.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
||||
|
||||
for model in (AccountWithRole, WorkflowCommentMentionUsersResponse):
|
||||
console_ns.schema_model(model.__name__, model.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
||||
|
||||
workflow_comment_basic_model = console_ns.model("WorkflowCommentBasic", workflow_comment_basic_fields)
|
||||
workflow_comment_detail_model = console_ns.model("WorkflowCommentDetail", workflow_comment_detail_fields)
|
||||
workflow_comment_create_model = console_ns.model("WorkflowCommentCreate", workflow_comment_create_fields)
|
||||
workflow_comment_update_model = console_ns.model("WorkflowCommentUpdate", workflow_comment_update_fields)
|
||||
workflow_comment_resolve_model = console_ns.model("WorkflowCommentResolve", workflow_comment_resolve_fields)
|
||||
workflow_comment_reply_create_model = console_ns.model(
|
||||
"WorkflowCommentReplyCreate", workflow_comment_reply_create_fields
|
||||
)
|
||||
workflow_comment_reply_update_model = console_ns.model(
|
||||
"WorkflowCommentReplyUpdate", workflow_comment_reply_update_fields
|
||||
)
|
||||
workflow_comment_mention_users_model = console_ns.models[WorkflowCommentMentionUsersResponse.__name__]
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments")
|
||||
class WorkflowCommentListApi(Resource):
|
||||
"""API for listing and creating workflow comments."""
|
||||
|
||||
@console_ns.doc("list_workflow_comments")
|
||||
@console_ns.doc(description="Get all comments for a workflow")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Comments retrieved successfully", workflow_comment_basic_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_basic_model, envelope="data")
|
||||
def get(self, app_model: App):
|
||||
"""Get all comments for a workflow."""
|
||||
comments = WorkflowCommentService.get_comments(tenant_id=current_user.current_tenant_id, app_id=app_model.id)
|
||||
|
||||
return comments
|
||||
|
||||
@console_ns.doc("create_workflow_comment")
|
||||
@console_ns.doc(description="Create a new workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentCreatePayload.__name__])
|
||||
@console_ns.response(201, "Comment created successfully", workflow_comment_create_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_create_model)
|
||||
def post(self, app_model: App):
|
||||
"""Create a new workflow comment."""
|
||||
payload = WorkflowCommentCreatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
result = WorkflowCommentService.create_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
created_by=current_user.id,
|
||||
content=payload.content,
|
||||
position_x=payload.position_x,
|
||||
position_y=payload.position_y,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return result, 201
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>")
|
||||
class WorkflowCommentDetailApi(Resource):
|
||||
"""API for managing individual workflow comments."""
|
||||
|
||||
@console_ns.doc("get_workflow_comment")
|
||||
@console_ns.doc(description="Get a specific workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.response(200, "Comment retrieved successfully", workflow_comment_detail_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_detail_model)
|
||||
def get(self, app_model: App, comment_id: str):
|
||||
"""Get a specific workflow comment."""
|
||||
comment = WorkflowCommentService.get_comment(
|
||||
tenant_id=current_user.current_tenant_id, app_id=app_model.id, comment_id=comment_id
|
||||
)
|
||||
|
||||
return comment
|
||||
|
||||
@console_ns.doc("update_workflow_comment")
|
||||
@console_ns.doc(description="Update a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentUpdatePayload.__name__])
|
||||
@console_ns.response(200, "Comment updated successfully", workflow_comment_update_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_update_model)
|
||||
def put(self, app_model: App, comment_id: str):
|
||||
"""Update a workflow comment."""
|
||||
payload = WorkflowCommentUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
result = WorkflowCommentService.update_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
user_id=current_user.id,
|
||||
content=payload.content,
|
||||
position_x=payload.position_x,
|
||||
position_y=payload.position_y,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@console_ns.doc("delete_workflow_comment")
|
||||
@console_ns.doc(description="Delete a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.response(204, "Comment deleted successfully")
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
def delete(self, app_model: App, comment_id: str):
|
||||
"""Delete a workflow comment."""
|
||||
WorkflowCommentService.delete_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
user_id=current_user.id,
|
||||
)
|
||||
|
||||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/resolve")
|
||||
class WorkflowCommentResolveApi(Resource):
|
||||
"""API for resolving and reopening workflow comments."""
|
||||
|
||||
@console_ns.doc("resolve_workflow_comment")
|
||||
@console_ns.doc(description="Resolve a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.response(200, "Comment resolved successfully", workflow_comment_resolve_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_resolve_model)
|
||||
def post(self, app_model: App, comment_id: str):
|
||||
"""Resolve a workflow comment."""
|
||||
comment = WorkflowCommentService.resolve_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
user_id=current_user.id,
|
||||
)
|
||||
|
||||
return comment
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/replies")
|
||||
class WorkflowCommentReplyApi(Resource):
|
||||
"""API for managing comment replies."""
|
||||
|
||||
@console_ns.doc("create_workflow_comment_reply")
|
||||
@console_ns.doc(description="Add a reply to a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentReplyCreatePayload.__name__])
|
||||
@console_ns.response(201, "Reply created successfully", workflow_comment_reply_create_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_reply_create_model)
|
||||
def post(self, app_model: App, comment_id: str):
|
||||
"""Add a reply to a workflow comment."""
|
||||
# Validate comment access first
|
||||
WorkflowCommentService.validate_comment_access(
|
||||
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
|
||||
)
|
||||
|
||||
payload = WorkflowCommentReplyCreatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
result = WorkflowCommentService.create_reply(
|
||||
comment_id=comment_id,
|
||||
content=payload.content,
|
||||
created_by=current_user.id,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return result, 201
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/replies/<string:reply_id>")
|
||||
class WorkflowCommentReplyDetailApi(Resource):
|
||||
"""API for managing individual comment replies."""
|
||||
|
||||
@console_ns.doc("update_workflow_comment_reply")
|
||||
@console_ns.doc(description="Update a comment reply")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID", "reply_id": "Reply ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentReplyUpdatePayload.__name__])
|
||||
@console_ns.response(200, "Reply updated successfully", workflow_comment_reply_update_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_reply_update_model)
|
||||
def put(self, app_model: App, comment_id: str, reply_id: str):
|
||||
"""Update a comment reply."""
|
||||
# Validate comment access first
|
||||
WorkflowCommentService.validate_comment_access(
|
||||
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
|
||||
)
|
||||
|
||||
payload = WorkflowCommentReplyUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
reply = WorkflowCommentService.update_reply(
|
||||
reply_id=reply_id,
|
||||
user_id=current_user.id,
|
||||
content=payload.content,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return reply
|
||||
|
||||
@console_ns.doc("delete_workflow_comment_reply")
|
||||
@console_ns.doc(description="Delete a comment reply")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID", "reply_id": "Reply ID"})
|
||||
@console_ns.response(204, "Reply deleted successfully")
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
def delete(self, app_model: App, comment_id: str, reply_id: str):
|
||||
"""Delete a comment reply."""
|
||||
# Validate comment access first
|
||||
WorkflowCommentService.validate_comment_access(
|
||||
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
|
||||
)
|
||||
|
||||
WorkflowCommentService.delete_reply(reply_id=reply_id, user_id=current_user.id)
|
||||
|
||||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/mention-users")
|
||||
class WorkflowCommentMentionUsersApi(Resource):
|
||||
"""API for getting mentionable users for workflow comments."""
|
||||
|
||||
@console_ns.doc("workflow_comment_mention_users")
|
||||
@console_ns.doc(description="Get all users in current tenant for mentions")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Mentionable users retrieved successfully", workflow_comment_mention_users_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
def get(self, app_model: App):
|
||||
"""Get all users in current tenant for mentions."""
|
||||
members = TenantService.get_tenant_members(current_user.current_tenant)
|
||||
member_models = TypeAdapter(list[AccountWithRole]).validate_python(members, from_attributes=True)
|
||||
response = WorkflowCommentMentionUsersResponse(users=member_models)
|
||||
return response.model_dump(mode="json"), 200
|
||||
@ -5,8 +5,12 @@ from typing import Any, NoReturn, ParamSpec, TypeVar
|
||||
|
||||
from flask import Response, request
|
||||
from flask_restx import Resource, fields, marshal, marshal_with
|
||||
from graphon.file import helpers as file_helpers
|
||||
from graphon.variables.segment_group import SegmentGroup
|
||||
from graphon.variables.segments import ArrayFileSegment, FileSegment, Segment
|
||||
from graphon.variables.types import SegmentType
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.error import (
|
||||
@ -15,22 +19,19 @@ from controllers.console.app.error import (
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
|
||||
from controllers.web.error import InvalidArgumentError, NotFoundError
|
||||
from dify_graph.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
|
||||
from dify_graph.file import helpers as file_helpers
|
||||
from dify_graph.variables.segment_group import SegmentGroup
|
||||
from dify_graph.variables.segments import ArrayFileSegment, ArrayPromptMessageSegment, FileSegment, Segment
|
||||
from dify_graph.variables.types import SegmentType
|
||||
from core.app.file_access import DatabaseFileAccessController
|
||||
from core.workflow.variable_prefixes import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
|
||||
from extensions.ext_database import db
|
||||
from factories import variable_factory
|
||||
from factories.file_factory import build_from_mapping, build_from_mappings
|
||||
from libs.login import current_account_with_tenant, current_user, login_required
|
||||
from factories.variable_factory import build_segment_with_type
|
||||
from libs.login import current_user, login_required
|
||||
from models import App, AppMode
|
||||
from models.workflow import WorkflowDraftVariable
|
||||
from services.sandbox.sandbox_service import SandboxService
|
||||
from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService
|
||||
from services.workflow_service import WorkflowService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
_file_access_controller = DatabaseFileAccessController()
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
|
||||
@ -44,16 +45,6 @@ class WorkflowDraftVariableUpdatePayload(BaseModel):
|
||||
value: Any | None = Field(default=None, description="Variable value")
|
||||
|
||||
|
||||
class ConversationVariableUpdatePayload(BaseModel):
|
||||
conversation_variables: list[dict[str, Any]] = Field(
|
||||
..., description="Conversation variables for the draft workflow"
|
||||
)
|
||||
|
||||
|
||||
class EnvironmentVariableUpdatePayload(BaseModel):
|
||||
environment_variables: list[dict[str, Any]] = Field(..., description="Environment variables for the draft workflow")
|
||||
|
||||
|
||||
console_ns.schema_model(
|
||||
WorkflowDraftVariableListQuery.__name__,
|
||||
WorkflowDraftVariableListQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
@ -62,14 +53,6 @@ console_ns.schema_model(
|
||||
WorkflowDraftVariableUpdatePayload.__name__,
|
||||
WorkflowDraftVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
console_ns.schema_model(
|
||||
ConversationVariableUpdatePayload.__name__,
|
||||
ConversationVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
console_ns.schema_model(
|
||||
EnvironmentVariableUpdatePayload.__name__,
|
||||
EnvironmentVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
|
||||
|
||||
def _convert_values_to_json_serializable_object(value: Segment):
|
||||
@ -77,8 +60,6 @@ def _convert_values_to_json_serializable_object(value: Segment):
|
||||
return value.value.model_dump()
|
||||
elif isinstance(value, ArrayFileSegment):
|
||||
return [i.model_dump() for i in value.value]
|
||||
elif isinstance(value, ArrayPromptMessageSegment):
|
||||
return value.to_object()
|
||||
elif isinstance(value, SegmentGroup):
|
||||
return [_convert_values_to_json_serializable_object(i) for i in value.value]
|
||||
else:
|
||||
@ -263,7 +244,7 @@ class WorkflowVariableCollectionApi(Resource):
|
||||
raise DraftWorkflowNotExist()
|
||||
|
||||
# fetch draft workflow by app_model
|
||||
with Session(bind=db.engine, expire_on_commit=False) as session:
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
draft_var_srv = WorkflowDraftVariableService(
|
||||
session=session,
|
||||
)
|
||||
@ -281,8 +262,6 @@ class WorkflowVariableCollectionApi(Resource):
|
||||
@console_ns.response(204, "Workflow variables deleted successfully")
|
||||
@_api_prerequisite
|
||||
def delete(self, app_model: App):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
SandboxService.delete_draft_storage(app_model.tenant_id, app_model.id, current_user.id)
|
||||
draft_var_srv = WorkflowDraftVariableService(
|
||||
session=db.session(),
|
||||
)
|
||||
@ -319,7 +298,7 @@ class NodeVariableCollectionApi(Resource):
|
||||
@marshal_with(workflow_draft_variable_list_model)
|
||||
def get(self, app_model: App, node_id: str):
|
||||
validate_node_id(node_id)
|
||||
with Session(bind=db.engine, expire_on_commit=False) as session:
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
draft_var_srv = WorkflowDraftVariableService(
|
||||
session=session,
|
||||
)
|
||||
@ -412,14 +391,22 @@ class VariableApi(Resource):
|
||||
if variable.value_type == SegmentType.FILE:
|
||||
if not isinstance(raw_value, dict):
|
||||
raise InvalidArgumentError(description=f"expected dict for file, got {type(raw_value)}")
|
||||
raw_value = build_from_mapping(mapping=raw_value, tenant_id=app_model.tenant_id)
|
||||
raw_value = build_from_mapping(
|
||||
mapping=raw_value,
|
||||
tenant_id=app_model.tenant_id,
|
||||
access_controller=_file_access_controller,
|
||||
)
|
||||
elif variable.value_type == SegmentType.ARRAY_FILE:
|
||||
if not isinstance(raw_value, list):
|
||||
raise InvalidArgumentError(description=f"expected list for files, got {type(raw_value)}")
|
||||
if len(raw_value) > 0 and not isinstance(raw_value[0], dict):
|
||||
raise InvalidArgumentError(description=f"expected dict for files[0], got {type(raw_value)}")
|
||||
raw_value = build_from_mappings(mappings=raw_value, tenant_id=app_model.tenant_id)
|
||||
new_value = variable_factory.build_segment_with_type(variable.value_type, raw_value)
|
||||
raw_value = build_from_mappings(
|
||||
mappings=raw_value,
|
||||
tenant_id=app_model.tenant_id,
|
||||
access_controller=_file_access_controller,
|
||||
)
|
||||
new_value = build_segment_with_type(variable.value_type, raw_value)
|
||||
draft_var_srv.update_variable(variable, name=new_name, value=new_value)
|
||||
db.session.commit()
|
||||
return variable
|
||||
@ -478,7 +465,7 @@ class VariableResetApi(Resource):
|
||||
|
||||
|
||||
def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList:
|
||||
with Session(bind=db.engine, expire_on_commit=False) as session:
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
draft_var_srv = WorkflowDraftVariableService(
|
||||
session=session,
|
||||
)
|
||||
@ -516,35 +503,6 @@ class ConversationVariableCollectionApi(Resource):
|
||||
db.session.commit()
|
||||
return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID)
|
||||
|
||||
@console_ns.expect(console_ns.models[ConversationVariableUpdatePayload.__name__])
|
||||
@console_ns.doc("update_conversation_variables")
|
||||
@console_ns.doc(description="Update conversation variables for workflow draft")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Conversation variables updated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@get_app_model(mode=AppMode.ADVANCED_CHAT)
|
||||
def post(self, app_model: App):
|
||||
payload = ConversationVariableUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
|
||||
conversation_variables_list = payload.conversation_variables
|
||||
conversation_variables = [
|
||||
variable_factory.build_conversation_variable_from_mapping(obj) for obj in conversation_variables_list
|
||||
]
|
||||
|
||||
current_user, _ = current_account_with_tenant()
|
||||
workflow_service.update_draft_workflow_conversation_variables(
|
||||
app_model=app_model,
|
||||
account=current_user,
|
||||
conversation_variables=conversation_variables,
|
||||
)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/system-variables")
|
||||
class SystemVariableCollectionApi(Resource):
|
||||
@ -596,32 +554,3 @@ class EnvironmentVariableCollectionApi(Resource):
|
||||
)
|
||||
|
||||
return {"items": env_vars_list}
|
||||
|
||||
@console_ns.expect(console_ns.models[EnvironmentVariableUpdatePayload.__name__])
|
||||
@console_ns.doc("update_environment_variables")
|
||||
@console_ns.doc(description="Update environment variables for workflow draft")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Environment variables updated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App):
|
||||
payload = EnvironmentVariableUpdatePayload.model_validate(console_ns.payload or {})
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
|
||||
environment_variables_list = payload.environment_variables
|
||||
environment_variables = [
|
||||
variable_factory.build_environment_variable_from_mapping(obj) for obj in environment_variables_list
|
||||
]
|
||||
|
||||
workflow_service.update_draft_workflow_environment_variables(
|
||||
app_model=app_model,
|
||||
account=current_user,
|
||||
environment_variables=environment_variables,
|
||||
)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Literal, cast
|
||||
from typing import Literal, TypedDict, cast
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from graphon.entities.pause_reason import HumanInputRequired
|
||||
from graphon.enums import WorkflowExecutionStatus
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
@ -12,8 +14,7 @@ from controllers.console import console_ns
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from controllers.web.error import NotFoundError
|
||||
from dify_graph.entities.pause_reason import HumanInputRequired
|
||||
from dify_graph.enums import WorkflowExecutionStatus
|
||||
from core.workflow.human_input_forms import load_form_tokens_by_form_id as _load_form_tokens_by_form_id
|
||||
from extensions.ext_database import db
|
||||
from fields.end_user_fields import simple_end_user_fields
|
||||
from fields.member_fields import simple_account_fields
|
||||
@ -172,6 +173,23 @@ console_ns.schema_model(
|
||||
)
|
||||
|
||||
|
||||
class HumanInputPauseTypeResponse(TypedDict):
|
||||
type: Literal["human_input"]
|
||||
form_id: str
|
||||
backstage_input_url: str | None
|
||||
|
||||
|
||||
class PausedNodeResponse(TypedDict):
|
||||
node_id: str
|
||||
node_title: str
|
||||
pause_type: HumanInputPauseTypeResponse
|
||||
|
||||
|
||||
class WorkflowPauseDetailsResponse(TypedDict):
|
||||
paused_at: str | None
|
||||
paused_nodes: list[PausedNodeResponse]
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs")
|
||||
class AdvancedChatAppWorkflowRunListApi(Resource):
|
||||
@console_ns.doc("get_advanced_chat_workflow_runs")
|
||||
@ -489,18 +507,22 @@ class ConsoleWorkflowPauseDetailsApi(Resource):
|
||||
# Check if workflow is suspended
|
||||
is_paused = workflow_run.status == WorkflowExecutionStatus.PAUSED
|
||||
if not is_paused:
|
||||
return {
|
||||
empty_response: WorkflowPauseDetailsResponse = {
|
||||
"paused_at": None,
|
||||
"paused_nodes": [],
|
||||
}, 200
|
||||
}
|
||||
return empty_response, 200
|
||||
|
||||
pause_entity = workflow_run_repo.get_workflow_pause(workflow_run_id)
|
||||
pause_reasons = pause_entity.get_pause_reasons() if pause_entity else []
|
||||
form_tokens_by_form_id = _load_form_tokens_by_form_id(
|
||||
[reason.form_id for reason in pause_reasons if isinstance(reason, HumanInputRequired)]
|
||||
)
|
||||
|
||||
# Build response
|
||||
paused_at = pause_entity.paused_at if pause_entity else None
|
||||
paused_nodes = []
|
||||
response = {
|
||||
paused_nodes: list[PausedNodeResponse] = []
|
||||
response: WorkflowPauseDetailsResponse = {
|
||||
"paused_at": paused_at.isoformat() + "Z" if paused_at else None,
|
||||
"paused_nodes": paused_nodes,
|
||||
}
|
||||
@ -514,7 +536,9 @@ class ConsoleWorkflowPauseDetailsApi(Resource):
|
||||
"pause_type": {
|
||||
"type": "human_input",
|
||||
"form_id": reason.form_id,
|
||||
"backstage_input_url": _build_backstage_input_url(reason.form_token),
|
||||
"backstage_input_url": _build_backstage_input_url(
|
||||
form_tokens_by_form_id.get(reason.form_id)
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@ -4,7 +4,7 @@ from flask import request
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from configs import dify_config
|
||||
@ -64,7 +64,7 @@ class WebhookTriggerApi(Resource):
|
||||
|
||||
node_id = args.node_id
|
||||
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine).begin() as session:
|
||||
# Get webhook trigger for this app and node
|
||||
webhook_trigger = (
|
||||
session.query(WorkflowWebhookTrigger)
|
||||
@ -95,7 +95,7 @@ class AppTriggersApi(Resource):
|
||||
assert isinstance(current_user, Account)
|
||||
assert current_user.current_tenant_id is not None
|
||||
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine).begin() as session:
|
||||
# Get all triggers for this app using select API
|
||||
triggers = (
|
||||
session.execute(
|
||||
@ -137,7 +137,7 @@ class AppTriggerEnableApi(Resource):
|
||||
assert current_user.current_tenant_id is not None
|
||||
|
||||
trigger_id = args.trigger_id
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||
# Find the trigger using select
|
||||
trigger = session.execute(
|
||||
select(AppTrigger).where(
|
||||
@ -153,9 +153,6 @@ class AppTriggerEnableApi(Resource):
|
||||
# Update status based on enable_trigger boolean
|
||||
trigger.status = AppTriggerStatus.ENABLED if args.enable_trigger else AppTriggerStatus.DISABLED
|
||||
|
||||
session.commit()
|
||||
session.refresh(trigger)
|
||||
|
||||
# Add computed icon field
|
||||
url_prefix = dify_config.CONSOLE_API_URL + "/console/api/workspaces/current/tool-provider/builtin/"
|
||||
if trigger.trigger_type == "trigger-plugin":
|
||||
|
||||
@ -4,11 +4,11 @@ from typing import Concatenate, ParamSpec, TypeVar
|
||||
|
||||
from flask import jsonify, request
|
||||
from flask_restx import Resource
|
||||
from graphon.model_runtime.utils.encoders import jsonable_encoder
|
||||
from pydantic import BaseModel
|
||||
from werkzeug.exceptions import BadRequest, NotFound
|
||||
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from dify_graph.model_runtime.utils.encoders import jsonable_encoder
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from models import Account
|
||||
from models.model import OAuthProviderApp
|
||||
|
||||
@ -36,7 +36,7 @@ class Subscription(Resource):
|
||||
@only_edition_cloud
|
||||
def get(self):
|
||||
current_user, current_tenant_id = current_account_with_tenant()
|
||||
args = SubscriptionQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
args = SubscriptionQuery.model_validate(request.args.to_dict(flat=True))
|
||||
BillingService.is_tenant_owner_or_admin(current_user)
|
||||
return BillingService.get_subscription(args.plan, args.interval, current_user.email, current_tenant_id)
|
||||
|
||||
|
||||
@ -31,7 +31,7 @@ class ComplianceApi(Resource):
|
||||
@only_edition_cloud
|
||||
def get(self):
|
||||
current_user, current_tenant_id = current_account_with_tenant()
|
||||
args = ComplianceDownloadQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
args = ComplianceDownloadQuery.model_validate(request.args.to_dict(flat=True))
|
||||
|
||||
ip_address = extract_remote_ip(request)
|
||||
device_info = request.headers.get("User-Agent", "Unknown device")
|
||||
|
||||
@ -6,7 +6,7 @@ from flask import request
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from controllers.common.schema import get_or_create_model, register_schema_model
|
||||
@ -159,7 +159,7 @@ class DataSourceApi(Resource):
|
||||
@account_initialization_required
|
||||
def patch(self, binding_id, action: Literal["enable", "disable"]):
|
||||
binding_id = str(binding_id)
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||
data_source_binding = session.execute(
|
||||
select(DataSourceOauthBinding).filter_by(id=binding_id)
|
||||
).scalar_one_or_none()
|
||||
@ -211,7 +211,7 @@ class DataSourceNotionListApi(Resource):
|
||||
if not credential:
|
||||
raise NotFound("Credential not found.")
|
||||
exist_page_ids = []
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine).begin() as session:
|
||||
# import notion in the exist dataset
|
||||
if query.dataset_id:
|
||||
dataset = DatasetService.get_dataset(query.dataset_id)
|
||||
|
||||
@ -2,6 +2,7 @@ from typing import Any, cast
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource, fields, marshal, marshal_with
|
||||
from graphon.model_runtime.entities.model_entities import ModelType
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from sqlalchemy import func, select
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
@ -25,13 +26,12 @@ from controllers.console.wraps import (
|
||||
)
|
||||
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
|
||||
from core.indexing_runner import IndexingRunner
|
||||
from core.provider_manager import ProviderManager
|
||||
from core.plugin.impl.model_runtime_factory import create_plugin_provider_manager
|
||||
from core.rag.datasource.vdb.vector_type import VectorType
|
||||
from core.rag.extractor.entity.datasource_type import DatasourceType
|
||||
from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo
|
||||
from core.rag.index_processor.constant.index_type import IndexTechniqueType
|
||||
from core.rag.retrieval.retrieval_methods import RetrievalMethod
|
||||
from dify_graph.model_runtime.entities.model_entities import ModelType
|
||||
from extensions.ext_database import db
|
||||
from fields.app_fields import app_detail_kernel_fields, related_app_list
|
||||
from fields.dataset_fields import (
|
||||
@ -332,7 +332,7 @@ class DatasetListApi(Resource):
|
||||
)
|
||||
|
||||
# check embedding setting
|
||||
provider_manager = ProviderManager()
|
||||
provider_manager = create_plugin_provider_manager(tenant_id=current_tenant_id)
|
||||
configurations = provider_manager.get_configurations(tenant_id=current_tenant_id)
|
||||
|
||||
embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True)
|
||||
@ -446,7 +446,7 @@ class DatasetApi(Resource):
|
||||
data.update({"partial_member_list": part_users_list})
|
||||
|
||||
# check embedding setting
|
||||
provider_manager = ProviderManager()
|
||||
provider_manager = create_plugin_provider_manager(tenant_id=current_tenant_id)
|
||||
configurations = provider_manager.get_configurations(tenant_id=current_tenant_id)
|
||||
|
||||
embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True)
|
||||
|
||||
@ -9,6 +9,8 @@ from uuid import UUID
|
||||
import sqlalchemy as sa
|
||||
from flask import request, send_file
|
||||
from flask_restx import Resource, fields, marshal, marshal_with
|
||||
from graphon.model_runtime.entities.model_entities import ModelType
|
||||
from graphon.model_runtime.errors.invoke import InvokeAuthorizationError
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy import asc, desc, func, select
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
@ -28,8 +30,6 @@ from core.plugin.impl.exc import PluginDaemonClientSideError
|
||||
from core.rag.extractor.entity.datasource_type import DatasourceType
|
||||
from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo
|
||||
from core.rag.index_processor.constant.index_type import IndexTechniqueType
|
||||
from dify_graph.model_runtime.entities.model_entities import ModelType
|
||||
from dify_graph.model_runtime.errors.invoke import InvokeAuthorizationError
|
||||
from extensions.ext_database import db
|
||||
from fields.dataset_fields import dataset_fields
|
||||
from fields.document_fields import (
|
||||
@ -454,7 +454,7 @@ class DatasetInitApi(Resource):
|
||||
if knowledge_config.embedding_model is None or knowledge_config.embedding_model_provider is None:
|
||||
raise ValueError("embedding model and embedding model provider are required for high quality indexing.")
|
||||
try:
|
||||
model_manager = ModelManager()
|
||||
model_manager = ModelManager.for_tenant(tenant_id=current_tenant_id)
|
||||
model_manager.get_model_instance(
|
||||
tenant_id=current_tenant_id,
|
||||
provider=knowledge_config.embedding_model_provider,
|
||||
|
||||
@ -2,6 +2,7 @@ import uuid
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource, marshal
|
||||
from graphon.model_runtime.entities.model_entities import ModelType
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy import String, cast, func, or_, select
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
@ -27,7 +28,6 @@ from controllers.console.wraps import (
|
||||
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
|
||||
from core.model_manager import ModelManager
|
||||
from core.rag.index_processor.constant.index_type import IndexTechniqueType
|
||||
from dify_graph.model_runtime.entities.model_entities import ModelType
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from fields.segment_fields import child_chunk_fields, segment_fields
|
||||
@ -283,7 +283,7 @@ class DatasetDocumentSegmentApi(Resource):
|
||||
if dataset.indexing_technique == IndexTechniqueType.HIGH_QUALITY:
|
||||
# check embedding model setting
|
||||
try:
|
||||
model_manager = ModelManager()
|
||||
model_manager = ModelManager.for_tenant(tenant_id=current_tenant_id)
|
||||
model_manager.get_model_instance(
|
||||
tenant_id=current_tenant_id,
|
||||
provider=dataset.embedding_model_provider,
|
||||
@ -336,7 +336,7 @@ class DatasetDocumentSegmentAddApi(Resource):
|
||||
# check embedding model setting
|
||||
if dataset.indexing_technique == IndexTechniqueType.HIGH_QUALITY:
|
||||
try:
|
||||
model_manager = ModelManager()
|
||||
model_manager = ModelManager.for_tenant(tenant_id=current_tenant_id)
|
||||
model_manager.get_model_instance(
|
||||
tenant_id=current_tenant_id,
|
||||
provider=dataset.embedding_model_provider,
|
||||
@ -387,7 +387,7 @@ class DatasetDocumentSegmentUpdateApi(Resource):
|
||||
if dataset.indexing_technique == IndexTechniqueType.HIGH_QUALITY:
|
||||
# check embedding model setting
|
||||
try:
|
||||
model_manager = ModelManager()
|
||||
model_manager = ModelManager.for_tenant(tenant_id=current_tenant_id)
|
||||
model_manager.get_model_instance(
|
||||
tenant_id=current_tenant_id,
|
||||
provider=dataset.embedding_model_provider,
|
||||
@ -572,7 +572,7 @@ class ChildChunkAddApi(Resource):
|
||||
# check embedding model setting
|
||||
if dataset.indexing_technique == IndexTechniqueType.HIGH_QUALITY:
|
||||
try:
|
||||
model_manager = ModelManager()
|
||||
model_manager = ModelManager.for_tenant(tenant_id=current_tenant_id)
|
||||
model_manager.get_model_instance(
|
||||
tenant_id=current_tenant_id,
|
||||
provider=dataset.embedding_model_provider,
|
||||
|
||||
@ -25,7 +25,7 @@ from libs.login import current_account_with_tenant, login_required
|
||||
from services.dataset_service import DatasetService
|
||||
from services.external_knowledge_service import ExternalDatasetService
|
||||
from services.hit_testing_service import HitTestingService
|
||||
from services.knowledge_service import ExternalDatasetTestService
|
||||
from services.knowledge_service import BedrockRetrievalSetting, ExternalDatasetTestService
|
||||
|
||||
|
||||
def _build_dataset_detail_model():
|
||||
@ -86,7 +86,7 @@ class ExternalHitTestingPayload(BaseModel):
|
||||
|
||||
|
||||
class BedrockRetrievalPayload(BaseModel):
|
||||
retrieval_setting: dict[str, object]
|
||||
retrieval_setting: "BedrockRetrievalSetting"
|
||||
query: str
|
||||
knowledge_id: str
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user