mirror of
https://github.com/langgenius/dify.git
synced 2026-05-09 20:08:06 +08:00
Compare commits
28 Commits
feat/cli
...
4-17-lint-
| Author | SHA1 | Date | |
|---|---|---|---|
| b60498b2a7 | |||
| 3b24d8d2d1 | |||
| 051ba99cd2 | |||
| dc83e8aa09 | |||
| 77f8f2babb | |||
| 77d6c108e7 | |||
| c2a5962023 | |||
| d583b1b835 | |||
| da00de6688 | |||
| a633387e9b | |||
| df389eba1c | |||
| 5cae61eb5a | |||
| ba8e0681d5 | |||
| de123a8695 | |||
| 21e5962f98 | |||
| db60e649b8 | |||
| e561788809 | |||
| 3cd6ef4464 | |||
| 39dc636b02 | |||
| 4f03b7193e | |||
| 0d921cd21d | |||
| 1a7e46368e | |||
| 8c8ad02a6f | |||
| 66c91604b1 | |||
| 3aa578ceac | |||
| 1ccb763e24 | |||
| bcb6eb484f | |||
| 4dae959c28 |
@ -200,7 +200,7 @@ When assigned to test a directory/path, test **ALL content** within that path:
|
||||
|
||||
- ✅ **Import real project components** directly (including base components and siblings)
|
||||
- ✅ **Only mock**: API services (`@/service/*`), `next/navigation`, complex context providers
|
||||
- ❌ **DO NOT mock** base components (`@/app/components/base/*`)
|
||||
- ❌ **DO NOT mock** base components (`@/app/components/base/*`) or dify-ui primitives (`@langgenius/dify-ui/*`)
|
||||
- ❌ **DO NOT mock** sibling/child components in the same directory
|
||||
|
||||
> See [Test Structure Template](#test-structure-template) for correct import/mock patterns.
|
||||
@ -325,12 +325,12 @@ For more detailed information, refer to:
|
||||
### Reference Examples in Codebase
|
||||
|
||||
- `web/utils/classnames.spec.ts` - Utility function tests
|
||||
- `web/app/components/base/button/index.spec.tsx` - Component tests
|
||||
- `web/app/components/base/radio/__tests__/index.spec.tsx` - Component tests
|
||||
- `web/__mocks__/provider-context.ts` - Mock factory example
|
||||
|
||||
### Project Configuration
|
||||
|
||||
- `web/vitest.config.ts` - Vitest configuration
|
||||
- `web/vite.config.ts` - Vite/Vitest configuration
|
||||
- `web/vitest.setup.ts` - Test environment setup
|
||||
- `web/scripts/analyze-component.js` - Component analysis tool
|
||||
- Modules are not mocked automatically. Global mocks live in `web/vitest.setup.ts` (for example `react-i18next`, `next/image`); mock other modules like `ky` or `mime` locally in test files.
|
||||
|
||||
@ -36,7 +36,7 @@ Use this checklist when generating or reviewing tests for Dify frontend componen
|
||||
|
||||
### Integration vs Mocking
|
||||
|
||||
- [ ] **DO NOT mock base components** (`Loading`, `Button`, `Tooltip`, etc.)
|
||||
- [ ] **DO NOT mock base components or dify-ui primitives** (base `Loading`, `Input`, `Badge`; dify-ui `Button`, `Tooltip`, `Dialog`, etc.)
|
||||
- [ ] Import real project components instead of mocking
|
||||
- [ ] Only mock: API calls, complex context providers, third-party libs with side effects
|
||||
- [ ] Prefer integration testing when using single spec file
|
||||
@ -73,7 +73,7 @@ Use this checklist when generating or reviewing tests for Dify frontend componen
|
||||
|
||||
### Mocks
|
||||
|
||||
- [ ] **DO NOT mock base components** (`@/app/components/base/*`)
|
||||
- [ ] **DO NOT mock base components or dify-ui primitives** (`@/app/components/base/*` or `@langgenius/dify-ui/*`)
|
||||
- [ ] `vi.clearAllMocks()` in `beforeEach` (not `afterEach`)
|
||||
- [ ] Shared mock state reset in `beforeEach`
|
||||
- [ ] i18n uses global mock (auto-loaded in `web/vitest.setup.ts`); only override locally for custom translations
|
||||
|
||||
@ -2,29 +2,27 @@
|
||||
|
||||
## ⚠️ Important: What NOT to Mock
|
||||
|
||||
### DO NOT Mock Base Components
|
||||
### DO NOT Mock Base Components or dify-ui Primitives
|
||||
|
||||
**Never mock components from `@/app/components/base/`** such as:
|
||||
**Never mock components from `@/app/components/base/` or from `@langgenius/dify-ui/*`** such as:
|
||||
|
||||
- `Loading`, `Spinner`
|
||||
- `Button`, `Input`, `Select`
|
||||
- `Tooltip`, `Modal`, `Dropdown`
|
||||
- `Icon`, `Badge`, `Tag`
|
||||
- Legacy base (`@/app/components/base/*`): `Loading`, `Spinner`, `Input`, `Badge`, `Tag`
|
||||
- dify-ui primitives (`@langgenius/dify-ui/*`): `Button`, `Tooltip`, `Dialog`, `Popover`, `DropdownMenu`, `ContextMenu`, `Select`, `AlertDialog`, `Toast`
|
||||
|
||||
**Why?**
|
||||
|
||||
- Base components will have their own dedicated tests
|
||||
- These components have their own dedicated tests
|
||||
- Mocking them creates false positives (tests pass but real integration fails)
|
||||
- Using real components tests actual integration behavior
|
||||
|
||||
```typescript
|
||||
// ❌ WRONG: Don't mock base components
|
||||
// ❌ WRONG: Don't mock base components or dify-ui primitives
|
||||
vi.mock('@/app/components/base/loading', () => () => <div>Loading</div>)
|
||||
vi.mock('@/app/components/base/button', () => ({ children }: any) => <button>{children}</button>)
|
||||
vi.mock('@langgenius/dify-ui/button', () => ({ Button: ({ children }: any) => <button>{children}</button> }))
|
||||
|
||||
// ✅ CORRECT: Import and use real base components
|
||||
// ✅ CORRECT: Import and use the real components
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import Button from '@/app/components/base/button'
|
||||
import { Button } from '@langgenius/dify-ui/button'
|
||||
// They will render normally in tests
|
||||
```
|
||||
|
||||
@ -319,7 +317,7 @@ const renderWithQueryClient = (ui: React.ReactElement) => {
|
||||
|
||||
### ✅ DO
|
||||
|
||||
1. **Use real base components** - Import from `@/app/components/base/` directly
|
||||
1. **Use real base components and dify-ui primitives** - Import from `@/app/components/base/` or `@langgenius/dify-ui/*` directly
|
||||
1. **Use real project components** - Prefer importing over mocking
|
||||
1. **Use real Zustand stores** - Set test state via `store.setState()`
|
||||
1. **Reset mocks in `beforeEach`**, not `afterEach`
|
||||
@ -330,7 +328,7 @@ const renderWithQueryClient = (ui: React.ReactElement) => {
|
||||
|
||||
### ❌ DON'T
|
||||
|
||||
1. **Don't mock base components** (`Loading`, `Button`, `Tooltip`, etc.)
|
||||
1. **Don't mock base components or dify-ui primitives** (`Loading`, `Input`, `Button`, `Tooltip`, `Dialog`, etc.)
|
||||
1. **Don't mock Zustand store modules** - Use real stores with `setState()`
|
||||
1. Don't mock components you can import directly
|
||||
1. Don't create overly simplified mocks that miss conditional logic
|
||||
@ -342,7 +340,7 @@ const renderWithQueryClient = (ui: React.ReactElement) => {
|
||||
```
|
||||
Need to use a component in test?
|
||||
│
|
||||
├─ Is it from @/app/components/base/*?
|
||||
├─ Is it from @/app/components/base/* or @langgenius/dify-ui/*?
|
||||
│ └─ YES → Import real component, DO NOT mock
|
||||
│
|
||||
├─ Is it a project component?
|
||||
|
||||
19
.github/workflows/anti-slop.yml
vendored
19
.github/workflows/anti-slop.yml
vendored
@ -1,19 +0,0 @@
|
||||
name: Anti-Slop PR Check
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, edited, synchronize]
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
anti-slop:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: peakoss/anti-slop@85daca1880e9e1af197fc06ea03349daf08f4202 # v0.2.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
close-pr: false
|
||||
failure-add-pr-labels: "needs-revision"
|
||||
8
.github/workflows/api-tests.yml
vendored
8
.github/workflows/api-tests.yml
vendored
@ -35,7 +35,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: ${{ matrix.python-version }}
|
||||
@ -84,7 +84,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: ${{ matrix.python-version }}
|
||||
@ -105,7 +105,7 @@ jobs:
|
||||
run: sh .github/workflows/expose_service_ports.sh
|
||||
|
||||
- name: Set up Sandbox
|
||||
uses: hoverkraft-tech/compose-action@4894d2492015c1774ee5a13a95b1072093087ec3 # v2.5.0
|
||||
uses: hoverkraft-tech/compose-action@d2bee4f07e8ca410d6b196d00f90c12e7d48c33a # v2.6.0
|
||||
with:
|
||||
compose-file: |
|
||||
docker/docker-compose.middleware.yaml
|
||||
@ -156,7 +156,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: "3.12"
|
||||
|
||||
10
.github/workflows/autofix.yml
vendored
10
.github/workflows/autofix.yml
vendored
@ -25,7 +25,7 @@ jobs:
|
||||
- name: Check Docker Compose inputs
|
||||
if: github.event_name != 'merge_group'
|
||||
id: docker-compose-changes
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
uses: tj-actions/changed-files@9426d40962ed5378910ee2e21d5f8c6fcbf2dd96 # v47.0.6
|
||||
with:
|
||||
files: |
|
||||
docker/generate_docker_compose
|
||||
@ -35,7 +35,7 @@ jobs:
|
||||
- name: Check web inputs
|
||||
if: github.event_name != 'merge_group'
|
||||
id: web-changes
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
uses: tj-actions/changed-files@9426d40962ed5378910ee2e21d5f8c6fcbf2dd96 # v47.0.6
|
||||
with:
|
||||
files: |
|
||||
web/**
|
||||
@ -48,7 +48,7 @@ jobs:
|
||||
- name: Check api inputs
|
||||
if: github.event_name != 'merge_group'
|
||||
id: api-changes
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
uses: tj-actions/changed-files@9426d40962ed5378910ee2e21d5f8c6fcbf2dd96 # v47.0.6
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
@ -58,7 +58,7 @@ jobs:
|
||||
python-version: "3.11"
|
||||
|
||||
- if: github.event_name != 'merge_group'
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
|
||||
|
||||
- name: Generate Docker Compose
|
||||
if: github.event_name != 'merge_group' && steps.docker-compose-changes.outputs.any_changed == 'true'
|
||||
@ -123,4 +123,4 @@ jobs:
|
||||
vp exec eslint --concurrency=2 --prune-suppressions --quiet || true
|
||||
|
||||
- if: github.event_name != 'merge_group'
|
||||
uses: autofix-ci/action@7a166d7532b277f34e16238930461bf77f9d7ed8 # v1.3.3
|
||||
uses: autofix-ci/action@c5b2d67aa2274e7b5a18224e8171550871fc7e4a # v1.3.4
|
||||
|
||||
8
.github/workflows/db-migration-test.yml
vendored
8
.github/workflows/db-migration-test.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: "3.12"
|
||||
@ -40,7 +40,7 @@ jobs:
|
||||
cp middleware.env.example middleware.env
|
||||
|
||||
- name: Set up Middlewares
|
||||
uses: hoverkraft-tech/compose-action@4894d2492015c1774ee5a13a95b1072093087ec3 # v2.5.0
|
||||
uses: hoverkraft-tech/compose-action@d2bee4f07e8ca410d6b196d00f90c12e7d48c33a # v2.6.0
|
||||
with:
|
||||
compose-file: |
|
||||
docker/docker-compose.middleware.yaml
|
||||
@ -69,7 +69,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: "3.12"
|
||||
@ -94,7 +94,7 @@ jobs:
|
||||
sed -i 's/DB_USERNAME=postgres/DB_USERNAME=mysql/' middleware.env
|
||||
|
||||
- name: Set up Middlewares
|
||||
uses: hoverkraft-tech/compose-action@4894d2492015c1774ee5a13a95b1072093087ec3 # v2.5.0
|
||||
uses: hoverkraft-tech/compose-action@d2bee4f07e8ca410d6b196d00f90c12e7d48c33a # v2.6.0
|
||||
with:
|
||||
compose-file: |
|
||||
docker/docker-compose.middleware.yaml
|
||||
|
||||
2
.github/workflows/pyrefly-diff.yml
vendored
2
.github/workflows/pyrefly-diff.yml
vendored
@ -22,7 +22,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Python & UV
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
|
||||
with:
|
||||
enable-cache: true
|
||||
|
||||
|
||||
@ -24,7 +24,7 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Setup Python & UV
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
|
||||
with:
|
||||
enable-cache: true
|
||||
|
||||
|
||||
2
.github/workflows/pyrefly-type-coverage.yml
vendored
2
.github/workflows/pyrefly-type-coverage.yml
vendored
@ -22,7 +22,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Python & UV
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
|
||||
with:
|
||||
enable-cache: true
|
||||
|
||||
|
||||
12
.github/workflows/style.yml
vendored
12
.github/workflows/style.yml
vendored
@ -25,7 +25,7 @@ jobs:
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
uses: tj-actions/changed-files@9426d40962ed5378910ee2e21d5f8c6fcbf2dd96 # v47.0.6
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
- name: Setup UV and Python
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
|
||||
with:
|
||||
enable-cache: false
|
||||
python-version: "3.12"
|
||||
@ -73,7 +73,7 @@ jobs:
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
uses: tj-actions/changed-files@9426d40962ed5378910ee2e21d5f8c6fcbf2dd96 # v47.0.6
|
||||
with:
|
||||
files: |
|
||||
web/**
|
||||
@ -95,7 +95,7 @@ jobs:
|
||||
- name: Restore ESLint cache
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
id: eslint-cache-restore
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: .eslintcache
|
||||
key: ${{ runner.os }}-eslint-${{ hashFiles('pnpm-lock.yaml', 'eslint.config.mjs', 'web/eslint.config.mjs', 'web/eslint.constants.mjs', 'web/plugins/eslint/**') }}-${{ github.sha }}
|
||||
@ -124,7 +124,7 @@ jobs:
|
||||
|
||||
- name: Save ESLint cache
|
||||
if: steps.changed-files.outputs.any_changed == 'true' && success() && steps.eslint-cache-restore.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache/save@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: .eslintcache
|
||||
key: ${{ steps.eslint-cache-restore.outputs.cache-primary-key }}
|
||||
@ -142,7 +142,7 @@ jobs:
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
uses: tj-actions/changed-files@9426d40962ed5378910ee2e21d5f8c6fcbf2dd96 # v47.0.6
|
||||
with:
|
||||
files: |
|
||||
**.sh
|
||||
|
||||
2
.github/workflows/tool-test-sdks.yaml
vendored
2
.github/workflows/tool-test-sdks.yaml
vendored
@ -30,7 +30,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
cache: ''
|
||||
|
||||
2
.github/workflows/translate-i18n-claude.yml
vendored
2
.github/workflows/translate-i18n-claude.yml
vendored
@ -158,7 +158,7 @@ jobs:
|
||||
|
||||
- name: Run Claude Code for Translation Sync
|
||||
if: steps.context.outputs.CHANGED_FILES != ''
|
||||
uses: anthropics/claude-code-action@b47fd721da662d48c5680e154ad16a73ed74d2e0 # v1.0.93
|
||||
uses: anthropics/claude-code-action@38ec876110f9fbf8b950c79f534430740c3ac009 # v1.0.101
|
||||
with:
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
4
.github/workflows/vdb-tests-full.yml
vendored
4
.github/workflows/vdb-tests-full.yml
vendored
@ -36,7 +36,7 @@ jobs:
|
||||
remove_tool_cache: true
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: ${{ matrix.python-version }}
|
||||
@ -65,7 +65,7 @@ jobs:
|
||||
# tiflash
|
||||
|
||||
- name: Set up Full Vector Store Matrix
|
||||
uses: hoverkraft-tech/compose-action@4894d2492015c1774ee5a13a95b1072093087ec3 # v2.5.0
|
||||
uses: hoverkraft-tech/compose-action@d2bee4f07e8ca410d6b196d00f90c12e7d48c33a # v2.6.0
|
||||
with:
|
||||
compose-file: |
|
||||
docker/docker-compose.yaml
|
||||
|
||||
4
.github/workflows/vdb-tests.yml
vendored
4
.github/workflows/vdb-tests.yml
vendored
@ -33,7 +33,7 @@ jobs:
|
||||
remove_tool_cache: true
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: ${{ matrix.python-version }}
|
||||
@ -62,7 +62,7 @@ jobs:
|
||||
# tiflash
|
||||
|
||||
- name: Set up Vector Stores for Smoke Coverage
|
||||
uses: hoverkraft-tech/compose-action@4894d2492015c1774ee5a13a95b1072093087ec3 # v2.5.0
|
||||
uses: hoverkraft-tech/compose-action@d2bee4f07e8ca410d6b196d00f90c12e7d48c33a # v2.6.0
|
||||
with:
|
||||
compose-file: |
|
||||
docker/docker-compose.yaml
|
||||
|
||||
2
.github/workflows/web-e2e.yml
vendored
2
.github/workflows/web-e2e.yml
vendored
@ -28,7 +28,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-web
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
|
||||
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: "3.12"
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@ -237,6 +237,10 @@ scripts/stress-test/reports/
|
||||
.playwright-mcp/
|
||||
.serena/
|
||||
|
||||
# vitest browser mode attachments (failure screenshots, traces, etc.)
|
||||
.vitest-attachments/
|
||||
**/__screenshots__/
|
||||
|
||||
# settings
|
||||
*.local.json
|
||||
*.local.md
|
||||
|
||||
@ -159,7 +159,6 @@ def initialize_extensions(app: DifyApp):
|
||||
ext_logstore,
|
||||
ext_mail,
|
||||
ext_migrate,
|
||||
ext_oauth_bearer,
|
||||
ext_orjson,
|
||||
ext_otel,
|
||||
ext_proxy_fix,
|
||||
@ -204,7 +203,6 @@ def initialize_extensions(app: DifyApp):
|
||||
ext_enterprise_telemetry,
|
||||
ext_request_logging,
|
||||
ext_session_factory,
|
||||
ext_oauth_bearer,
|
||||
]
|
||||
for ext in extensions:
|
||||
short_name = ext.__name__.split(".")[-1]
|
||||
|
||||
@ -499,35 +499,6 @@ class HttpConfig(BaseSettings):
|
||||
def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
||||
return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
|
||||
|
||||
inner_OPENAPI_CORS_ALLOW_ORIGINS: str = Field(
|
||||
description=(
|
||||
"Comma-separated allowlist for /openapi/v1/* CORS. "
|
||||
"Default empty = same-origin only. Browser-cookie routes within "
|
||||
"the group reject cross-origin OPTIONS regardless of this list."
|
||||
),
|
||||
validation_alias=AliasChoices("OPENAPI_CORS_ALLOW_ORIGINS"),
|
||||
default="",
|
||||
)
|
||||
|
||||
@computed_field
|
||||
def OPENAPI_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
||||
return [o for o in self.inner_OPENAPI_CORS_ALLOW_ORIGINS.split(",") if o]
|
||||
|
||||
inner_OPENAPI_KNOWN_CLIENT_IDS: str = Field(
|
||||
description=(
|
||||
"Comma-separated client_id values accepted at "
|
||||
"POST /openapi/v1/oauth/device/code. New CLIs / SDKs added here "
|
||||
"without code changes. Unknown client_id returns 400 unsupported_client."
|
||||
),
|
||||
validation_alias=AliasChoices("OPENAPI_KNOWN_CLIENT_IDS"),
|
||||
default="difyctl",
|
||||
)
|
||||
|
||||
@computed_field # type: ignore[misc]
|
||||
@property
|
||||
def OPENAPI_KNOWN_CLIENT_IDS(self) -> frozenset[str]:
|
||||
return frozenset(c for c in self.inner_OPENAPI_KNOWN_CLIENT_IDS.split(",") if c)
|
||||
|
||||
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: int = Field(
|
||||
ge=1, description="Maximum connection timeout in seconds for HTTP requests", default=10
|
||||
)
|
||||
@ -903,17 +874,6 @@ class AuthConfig(BaseSettings):
|
||||
default=86400,
|
||||
)
|
||||
|
||||
ENABLE_OAUTH_BEARER: bool = Field(
|
||||
description="Enable OAuth bearer authentication (device-flow + Service API /v1/* bearer middleware).",
|
||||
default=True,
|
||||
)
|
||||
|
||||
OPENAPI_RATE_LIMIT_PER_TOKEN: PositiveInt = Field(
|
||||
description="Per-token rate limit on /openapi/v1/* (requests per minute). "
|
||||
"Bucket keyed on sha256(token), shared across api replicas via Redis.",
|
||||
default=60,
|
||||
)
|
||||
|
||||
|
||||
class ModerationConfig(BaseSettings):
|
||||
"""
|
||||
@ -1188,14 +1148,6 @@ class CeleryScheduleTasksConfig(BaseSettings):
|
||||
description="Enable scheduled workflow run cleanup task",
|
||||
default=False,
|
||||
)
|
||||
ENABLE_CLEAN_OAUTH_ACCESS_TOKENS_TASK: bool = Field(
|
||||
description="Enable scheduled cleanup of revoked/expired OAuth access-token rows past retention.",
|
||||
default=True,
|
||||
)
|
||||
OAUTH_ACCESS_TOKEN_RETENTION_DAYS: PositiveInt = Field(
|
||||
description="Days to retain revoked OAuth access-token rows before deletion.",
|
||||
default=30,
|
||||
)
|
||||
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: bool = Field(
|
||||
description="Enable mail clean document notify task",
|
||||
default=False,
|
||||
|
||||
@ -595,13 +595,25 @@ class ChangeEmailSendEmailApi(Resource):
|
||||
account = None
|
||||
user_email = None
|
||||
email_for_sending = args.email.lower()
|
||||
if args.phase is not None and args.phase == "new_email":
|
||||
# Default to the initial phase; any legacy/unexpected client input is
|
||||
# coerced back to `old_email` so we never trust the caller to declare
|
||||
# later phases without a verified predecessor token.
|
||||
send_phase = AccountService.CHANGE_EMAIL_PHASE_OLD
|
||||
if args.phase is not None and args.phase == AccountService.CHANGE_EMAIL_PHASE_NEW:
|
||||
send_phase = AccountService.CHANGE_EMAIL_PHASE_NEW
|
||||
if args.token is None:
|
||||
raise InvalidTokenError()
|
||||
|
||||
reset_data = AccountService.get_change_email_data(args.token)
|
||||
if reset_data is None:
|
||||
raise InvalidTokenError()
|
||||
|
||||
# The token used to request a new-email code must come from the
|
||||
# old-email verification step. This prevents the bypass described
|
||||
# in GHSA-4q3w-q5mc-45rq where the phase-1 token was reused here.
|
||||
token_phase = reset_data.get(AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY)
|
||||
if token_phase != AccountService.CHANGE_EMAIL_PHASE_OLD_VERIFIED:
|
||||
raise InvalidTokenError()
|
||||
user_email = reset_data.get("email", "")
|
||||
|
||||
if user_email.lower() != current_user.email.lower():
|
||||
@ -620,7 +632,7 @@ class ChangeEmailSendEmailApi(Resource):
|
||||
email=email_for_sending,
|
||||
old_email=user_email,
|
||||
language=language,
|
||||
phase=args.phase,
|
||||
phase=send_phase,
|
||||
)
|
||||
return {"result": "success", "data": token}
|
||||
|
||||
@ -655,12 +667,31 @@ class ChangeEmailCheckApi(Resource):
|
||||
AccountService.add_change_email_error_rate_limit(user_email)
|
||||
raise EmailCodeError()
|
||||
|
||||
# Only advance tokens that were minted by the matching send-code step;
|
||||
# refuse tokens that have already progressed or lack a phase marker so
|
||||
# the chain `old_email -> old_email_verified -> new_email -> new_email_verified`
|
||||
# is strictly enforced.
|
||||
phase_transitions = {
|
||||
AccountService.CHANGE_EMAIL_PHASE_OLD: AccountService.CHANGE_EMAIL_PHASE_OLD_VERIFIED,
|
||||
AccountService.CHANGE_EMAIL_PHASE_NEW: AccountService.CHANGE_EMAIL_PHASE_NEW_VERIFIED,
|
||||
}
|
||||
token_phase = token_data.get(AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY)
|
||||
if not isinstance(token_phase, str):
|
||||
raise InvalidTokenError()
|
||||
refreshed_phase = phase_transitions.get(token_phase)
|
||||
if refreshed_phase is None:
|
||||
raise InvalidTokenError()
|
||||
|
||||
# Verified, revoke the first token
|
||||
AccountService.revoke_change_email_token(args.token)
|
||||
|
||||
# Refresh token data by generating a new token
|
||||
# Refresh token data by generating a new token that carries the
|
||||
# upgraded phase so later steps can check it.
|
||||
_, new_token = AccountService.generate_change_email_token(
|
||||
user_email, code=args.code, old_email=token_data.get("old_email"), additional_data={}
|
||||
user_email,
|
||||
code=args.code,
|
||||
old_email=token_data.get("old_email"),
|
||||
additional_data={AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY: refreshed_phase},
|
||||
)
|
||||
|
||||
AccountService.reset_change_email_error_rate_limit(user_email)
|
||||
@ -690,13 +721,29 @@ class ChangeEmailResetApi(Resource):
|
||||
if not reset_data:
|
||||
raise InvalidTokenError()
|
||||
|
||||
AccountService.revoke_change_email_token(args.token)
|
||||
# Only tokens that completed both verification phases may be used to
|
||||
# change the email. This closes GHSA-4q3w-q5mc-45rq where a token from
|
||||
# the initial send-code step could be replayed directly here.
|
||||
token_phase = reset_data.get(AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY)
|
||||
if token_phase != AccountService.CHANGE_EMAIL_PHASE_NEW_VERIFIED:
|
||||
raise InvalidTokenError()
|
||||
|
||||
# Bind the new email to the token that was mailed and verified, so a
|
||||
# verified token cannot be reused with a different `new_email` value.
|
||||
token_email = reset_data.get("email")
|
||||
normalized_token_email = token_email.lower() if isinstance(token_email, str) else token_email
|
||||
if normalized_token_email != normalized_new_email:
|
||||
raise InvalidTokenError()
|
||||
|
||||
old_email = reset_data.get("old_email", "")
|
||||
current_user, _ = current_account_with_tenant()
|
||||
if current_user.email.lower() != old_email.lower():
|
||||
raise AccountNotFound()
|
||||
|
||||
# Revoke only after all checks pass so failed attempts don't burn a
|
||||
# legitimately verified token.
|
||||
AccountService.revoke_change_email_token(args.token)
|
||||
|
||||
updated_account = AccountService.update_account_email(current_user, email=normalized_new_email)
|
||||
|
||||
AccountService.send_change_email_completed_notify_email(
|
||||
|
||||
@ -1,41 +0,0 @@
|
||||
from flask import Blueprint
|
||||
from flask_restx import Namespace
|
||||
|
||||
from libs.device_flow_security import attach_anti_framing
|
||||
from libs.external_api import ExternalApi
|
||||
|
||||
bp = Blueprint("openapi", __name__, url_prefix="/openapi/v1")
|
||||
attach_anti_framing(bp)
|
||||
|
||||
api = ExternalApi(
|
||||
bp,
|
||||
version="1.0",
|
||||
title="OpenAPI",
|
||||
description="User-scoped programmatic API (bearer auth)",
|
||||
)
|
||||
|
||||
openapi_ns = Namespace("openapi", description="User-scoped operations", path="/")
|
||||
|
||||
from . import (
|
||||
account,
|
||||
app_run,
|
||||
apps,
|
||||
apps_permitted,
|
||||
index,
|
||||
oauth_device,
|
||||
oauth_device_sso,
|
||||
workspaces,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"account",
|
||||
"app_run",
|
||||
"apps",
|
||||
"apps_permitted",
|
||||
"index",
|
||||
"oauth_device",
|
||||
"oauth_device_sso",
|
||||
"workspaces",
|
||||
]
|
||||
|
||||
api.add_namespace(openapi_ns)
|
||||
@ -1,33 +0,0 @@
|
||||
"""Audit emission for openapi app-run endpoints.
|
||||
|
||||
Pattern: logger.info with extra={"audit": True, "event": "app.run.openapi", ...}
|
||||
matches the existing oauth_device convention. The EE OTel exporter consults
|
||||
its own allowlist to decide whether to ship the line.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
EVENT_APP_RUN_OPENAPI = "app.run.openapi"
|
||||
|
||||
|
||||
def emit_app_run(*, app_id: str, tenant_id: str, caller_kind: str, mode: str) -> None:
|
||||
logger.info(
|
||||
"audit: %s app_id=%s tenant_id=%s caller_kind=%s mode=%s",
|
||||
EVENT_APP_RUN_OPENAPI,
|
||||
app_id,
|
||||
tenant_id,
|
||||
caller_kind,
|
||||
mode,
|
||||
extra={
|
||||
"audit": True,
|
||||
"event": EVENT_APP_RUN_OPENAPI,
|
||||
"app_id": app_id,
|
||||
"tenant_id": tenant_id,
|
||||
"caller_kind": caller_kind,
|
||||
"mode": mode,
|
||||
},
|
||||
)
|
||||
@ -1,143 +0,0 @@
|
||||
"""Server-side JSON Schema derivation from Dify `user_input_form`."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, cast
|
||||
|
||||
from controllers.service_api.app.error import AppUnavailableError
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
|
||||
JSON_SCHEMA_DRAFT = "https://json-schema.org/draft/2020-12/schema"
|
||||
|
||||
EMPTY_INPUT_SCHEMA: dict[str, Any] = {
|
||||
"$schema": JSON_SCHEMA_DRAFT,
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
"required": [],
|
||||
}
|
||||
|
||||
_CHAT_FAMILY = frozenset({AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT})
|
||||
|
||||
|
||||
def _file_object_shape() -> dict[str, Any]:
|
||||
"""Single-file value shape. Forward-compat placeholder; refine when file-API contract pins."""
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {"type": "string"},
|
||||
"transfer_method": {"type": "string"},
|
||||
"url": {"type": "string"},
|
||||
"upload_file_id": {"type": "string"},
|
||||
},
|
||||
"additionalProperties": True,
|
||||
}
|
||||
|
||||
|
||||
def _row_to_schema(row_type: str, row: dict[str, Any]) -> dict[str, Any] | None:
|
||||
label = row.get("label") or row.get("variable", "")
|
||||
base: dict[str, Any] = {"title": label} if label else {}
|
||||
|
||||
if row_type in ("text-input", "paragraph"):
|
||||
out = {"type": "string"} | base
|
||||
max_length = row.get("max_length")
|
||||
if isinstance(max_length, int) and max_length > 0:
|
||||
out["maxLength"] = max_length
|
||||
return out
|
||||
|
||||
if row_type == "select":
|
||||
return {"type": "string"} | base | {"enum": list(row.get("options") or [])}
|
||||
|
||||
if row_type == "number":
|
||||
return {"type": "number"} | base
|
||||
|
||||
if row_type == "file":
|
||||
return _file_object_shape() | base
|
||||
|
||||
if row_type == "file-list":
|
||||
return {
|
||||
"type": "array",
|
||||
"items": _file_object_shape(),
|
||||
} | base
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _form_to_jsonschema(form: list[dict[str, Any]]) -> tuple[dict[str, Any], list[str]]:
|
||||
"""Translate a user_input_form row list into (properties, required-list).
|
||||
|
||||
Each row is a single-key dict: `{"text-input": {variable, label, required, ...}}`.
|
||||
Unknown variable types are skipped (forward-compat).
|
||||
"""
|
||||
properties: dict[str, Any] = {}
|
||||
required: list[str] = []
|
||||
for row in form:
|
||||
if not isinstance(row, dict) or len(row) != 1:
|
||||
continue
|
||||
((row_type, row_body),) = row.items()
|
||||
if not isinstance(row_body, dict):
|
||||
continue
|
||||
variable = row_body.get("variable")
|
||||
if not variable:
|
||||
continue
|
||||
schema = _row_to_schema(row_type, row_body)
|
||||
if schema is None:
|
||||
continue
|
||||
properties[variable] = schema
|
||||
if row_body.get("required"):
|
||||
required.append(variable)
|
||||
return properties, required
|
||||
|
||||
|
||||
def resolve_app_config(app: App) -> tuple[dict[str, Any], list[dict[str, Any]]]:
|
||||
"""Resolve `(features_dict, user_input_form)` for parameters / schema derivation.
|
||||
|
||||
Raises `AppUnavailableError` on misconfigured apps.
|
||||
"""
|
||||
if app.mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}:
|
||||
workflow = app.workflow
|
||||
if workflow is None:
|
||||
raise AppUnavailableError()
|
||||
return (
|
||||
workflow.features_dict,
|
||||
cast(list[dict[str, Any]], workflow.user_input_form(to_old_structure=True)),
|
||||
)
|
||||
|
||||
app_model_config = app.app_model_config
|
||||
if app_model_config is None:
|
||||
raise AppUnavailableError()
|
||||
features_dict = cast(dict[str, Any], app_model_config.to_dict())
|
||||
return features_dict, cast(list[dict[str, Any]], features_dict.get("user_input_form", []))
|
||||
|
||||
|
||||
def build_input_schema(app: App) -> dict[str, Any]:
|
||||
"""Derive Draft 2020-12 JSON Schema from `user_input_form` + app mode.
|
||||
|
||||
chat / agent-chat / advanced-chat: top-level `query` (required, minLength=1) + `inputs` object.
|
||||
completion / workflow: `inputs` object only.
|
||||
Raises `AppUnavailableError` on misconfigured apps.
|
||||
"""
|
||||
_, user_input_form = resolve_app_config(app)
|
||||
inputs_props, inputs_required = _form_to_jsonschema(user_input_form)
|
||||
|
||||
properties: dict[str, Any] = {}
|
||||
required: list[str] = []
|
||||
|
||||
if app.mode in _CHAT_FAMILY:
|
||||
properties["query"] = {"type": "string", "minLength": 1}
|
||||
required.append("query")
|
||||
|
||||
properties["inputs"] = {
|
||||
"type": "object",
|
||||
"properties": inputs_props,
|
||||
"required": inputs_required,
|
||||
"additionalProperties": False,
|
||||
}
|
||||
required.append("inputs")
|
||||
|
||||
return {
|
||||
"$schema": JSON_SCHEMA_DRAFT,
|
||||
"type": "object",
|
||||
"properties": properties,
|
||||
"required": required,
|
||||
}
|
||||
@ -1,112 +0,0 @@
|
||||
"""Shared response substructures for openapi endpoints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
# Server-side cap on `limit` query param for any /openapi/v1/* list endpoint.
|
||||
# Sibling endpoints (`/apps`, `/account/sessions`, future routes) all clamp to
|
||||
# this; do not introduce per-endpoint caps without raising the constant.
|
||||
MAX_PAGE_LIMIT = 200
|
||||
|
||||
|
||||
class UsageInfo(BaseModel):
|
||||
prompt_tokens: int = 0
|
||||
completion_tokens: int = 0
|
||||
total_tokens: int = 0
|
||||
|
||||
|
||||
class MessageMetadata(BaseModel):
|
||||
usage: UsageInfo | None = None
|
||||
retriever_resources: list[dict[str, Any]] = []
|
||||
|
||||
|
||||
class PaginationEnvelope[T](BaseModel):
|
||||
"""Canonical pagination envelope for `/openapi/v1/*` list endpoints."""
|
||||
|
||||
page: int
|
||||
limit: int
|
||||
total: int
|
||||
has_more: bool
|
||||
data: list[T]
|
||||
|
||||
@classmethod
|
||||
def build(cls, *, page: int, limit: int, total: int, items: list[T]) -> PaginationEnvelope[T]:
|
||||
return cls(page=page, limit=limit, total=total, has_more=page * limit < total, data=items)
|
||||
|
||||
|
||||
class AppListRow(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: str | None = None
|
||||
mode: str
|
||||
tags: list[dict[str, str]] = []
|
||||
updated_at: str | None = None
|
||||
created_by_name: str | None = None
|
||||
workspace_id: str | None = None
|
||||
workspace_name: str | None = None
|
||||
|
||||
|
||||
class AppInfoResponse(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: str | None = None
|
||||
mode: str
|
||||
author: str | None = None
|
||||
tags: list[dict[str, str]] = []
|
||||
|
||||
|
||||
class AppDescribeInfo(AppInfoResponse):
|
||||
updated_at: str | None = None
|
||||
service_api_enabled: bool
|
||||
|
||||
|
||||
class AppDescribeResponse(BaseModel):
|
||||
info: AppDescribeInfo | None = None
|
||||
parameters: dict[str, Any] | None = None
|
||||
input_schema: dict[str, Any] | None = None
|
||||
|
||||
|
||||
class ChatMessageResponse(BaseModel):
|
||||
event: str
|
||||
task_id: str
|
||||
id: str
|
||||
message_id: str
|
||||
conversation_id: str
|
||||
mode: str
|
||||
answer: str
|
||||
metadata: MessageMetadata = Field(default_factory=MessageMetadata)
|
||||
created_at: int
|
||||
|
||||
|
||||
class CompletionMessageResponse(BaseModel):
|
||||
event: str
|
||||
task_id: str
|
||||
id: str
|
||||
message_id: str
|
||||
mode: str
|
||||
answer: str
|
||||
metadata: MessageMetadata = Field(default_factory=MessageMetadata)
|
||||
created_at: int
|
||||
|
||||
|
||||
class WorkflowRunData(BaseModel):
|
||||
id: str
|
||||
workflow_id: str
|
||||
status: str
|
||||
outputs: dict[str, Any] = Field(default_factory=dict)
|
||||
error: str | None = None
|
||||
elapsed_time: float | None = None
|
||||
total_tokens: int | None = None
|
||||
total_steps: int | None = None
|
||||
created_at: int | None = None
|
||||
finished_at: int | None = None
|
||||
|
||||
|
||||
class WorkflowRunResponse(BaseModel):
|
||||
workflow_run_id: str
|
||||
task_id: str
|
||||
mode: Literal["workflow"] = "workflow"
|
||||
data: WorkflowRunData
|
||||
@ -1,236 +0,0 @@
|
||||
"""User-scoped account endpoints. /account is the bearer-authed
|
||||
identity read; /account/sessions and /account/sessions/<id> manage
|
||||
the user's active OAuth tokens.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from flask import g, request
|
||||
from flask_restx import Resource
|
||||
from sqlalchemy import and_, select, update
|
||||
from werkzeug.exceptions import BadRequest, NotFound
|
||||
|
||||
from controllers.openapi import openapi_ns
|
||||
from controllers.openapi._models import MAX_PAGE_LIMIT, PaginationEnvelope
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.oauth_bearer import (
|
||||
ACCEPT_USER_ANY,
|
||||
TOKEN_CACHE_KEY_FMT,
|
||||
AuthContext,
|
||||
SubjectType,
|
||||
validate_bearer,
|
||||
)
|
||||
from libs.rate_limit import (
|
||||
LIMIT_ME_PER_ACCOUNT,
|
||||
LIMIT_ME_PER_EMAIL,
|
||||
enforce,
|
||||
)
|
||||
from models import Account, OAuthAccessToken, Tenant, TenantAccountJoin
|
||||
|
||||
|
||||
@openapi_ns.route("/account")
|
||||
class AccountApi(Resource):
|
||||
@validate_bearer(accept=ACCEPT_USER_ANY)
|
||||
def get(self):
|
||||
ctx = g.auth_ctx
|
||||
|
||||
if ctx.subject_type == SubjectType.EXTERNAL_SSO:
|
||||
enforce(LIMIT_ME_PER_EMAIL, key=f"subject:{ctx.subject_email}")
|
||||
else:
|
||||
enforce(LIMIT_ME_PER_ACCOUNT, key=f"account:{ctx.account_id}")
|
||||
|
||||
if ctx.subject_type == SubjectType.EXTERNAL_SSO:
|
||||
return {
|
||||
"subject_type": ctx.subject_type,
|
||||
"subject_email": ctx.subject_email,
|
||||
"subject_issuer": ctx.subject_issuer,
|
||||
"account": None,
|
||||
"workspaces": [],
|
||||
"default_workspace_id": None,
|
||||
}
|
||||
|
||||
account = (
|
||||
db.session.query(Account).filter(Account.id == ctx.account_id).one_or_none() if ctx.account_id else None
|
||||
)
|
||||
memberships = _load_memberships(ctx.account_id) if ctx.account_id else []
|
||||
default_ws_id = _pick_default_workspace(memberships)
|
||||
|
||||
return {
|
||||
"subject_type": ctx.subject_type,
|
||||
"subject_email": ctx.subject_email or (account.email if account else None),
|
||||
"account": _account_payload(account) if account else None,
|
||||
"workspaces": [_workspace_payload(m) for m in memberships],
|
||||
"default_workspace_id": default_ws_id,
|
||||
}
|
||||
|
||||
|
||||
@openapi_ns.route("/account/sessions/self")
|
||||
class AccountSessionsSelfApi(Resource):
|
||||
@validate_bearer(accept=ACCEPT_USER_ANY)
|
||||
def delete(self):
|
||||
ctx = g.auth_ctx
|
||||
_require_oauth_subject(ctx)
|
||||
_revoke_token_by_id(str(ctx.token_id))
|
||||
return {"status": "revoked"}, 200
|
||||
|
||||
|
||||
@openapi_ns.route("/account/sessions")
|
||||
class AccountSessionsApi(Resource):
|
||||
@validate_bearer(accept=ACCEPT_USER_ANY)
|
||||
def get(self):
|
||||
ctx = g.auth_ctx
|
||||
now = datetime.now(UTC)
|
||||
page = int(request.args.get("page", "1"))
|
||||
limit = min(int(request.args.get("limit", "100")), MAX_PAGE_LIMIT)
|
||||
|
||||
all_rows = db.session.execute(
|
||||
select(
|
||||
OAuthAccessToken.id,
|
||||
OAuthAccessToken.prefix,
|
||||
OAuthAccessToken.client_id,
|
||||
OAuthAccessToken.device_label,
|
||||
OAuthAccessToken.created_at,
|
||||
OAuthAccessToken.last_used_at,
|
||||
OAuthAccessToken.expires_at,
|
||||
)
|
||||
.where(
|
||||
and_(
|
||||
*_subject_match(ctx),
|
||||
OAuthAccessToken.revoked_at.is_(None),
|
||||
OAuthAccessToken.token_hash.is_not(None),
|
||||
OAuthAccessToken.expires_at > now,
|
||||
)
|
||||
)
|
||||
.order_by(OAuthAccessToken.created_at.desc())
|
||||
).all()
|
||||
|
||||
total = len(all_rows)
|
||||
sliced = all_rows[(page - 1) * limit : page * limit]
|
||||
|
||||
items = [
|
||||
{
|
||||
"id": str(r.id),
|
||||
"prefix": r.prefix,
|
||||
"client_id": r.client_id,
|
||||
"device_label": r.device_label,
|
||||
"created_at": _iso(r.created_at),
|
||||
"last_used_at": _iso(r.last_used_at),
|
||||
"expires_at": _iso(r.expires_at),
|
||||
}
|
||||
for r in sliced
|
||||
]
|
||||
|
||||
return (
|
||||
PaginationEnvelope.build(page=page, limit=limit, total=total, items=items).model_dump(mode="json"),
|
||||
200,
|
||||
)
|
||||
|
||||
|
||||
@openapi_ns.route("/account/sessions/<string:session_id>")
|
||||
class AccountSessionByIdApi(Resource):
|
||||
@validate_bearer(accept=ACCEPT_USER_ANY)
|
||||
def delete(self, session_id: str):
|
||||
ctx = g.auth_ctx
|
||||
_require_oauth_subject(ctx)
|
||||
|
||||
# Subject-match guard. 404 (not 403) on cross-subject so the
|
||||
# endpoint doesn't leak token IDs that belong to other subjects.
|
||||
owns = db.session.execute(
|
||||
select(OAuthAccessToken.id).where(
|
||||
and_(
|
||||
OAuthAccessToken.id == session_id,
|
||||
*_subject_match(ctx),
|
||||
)
|
||||
)
|
||||
).first()
|
||||
if owns is None:
|
||||
raise NotFound("session not found")
|
||||
|
||||
_revoke_token_by_id(session_id)
|
||||
return {"status": "revoked"}, 200
|
||||
|
||||
|
||||
def _subject_match(ctx: AuthContext) -> tuple:
|
||||
"""Where-clauses that scope a query to the bearer's subject. Works
|
||||
for both account (account_id) and external_sso (email + issuer).
|
||||
"""
|
||||
if ctx.subject_type == SubjectType.ACCOUNT:
|
||||
return (OAuthAccessToken.account_id == str(ctx.account_id),)
|
||||
return (
|
||||
OAuthAccessToken.subject_email == ctx.subject_email,
|
||||
OAuthAccessToken.subject_issuer == ctx.subject_issuer,
|
||||
OAuthAccessToken.account_id.is_(None),
|
||||
)
|
||||
|
||||
|
||||
def _require_oauth_subject(ctx: AuthContext) -> None:
|
||||
if not ctx.source.startswith("oauth"):
|
||||
raise BadRequest(
|
||||
"this endpoint revokes OAuth bearer tokens; use /openapi/v1/personal-access-tokens/self for PATs"
|
||||
)
|
||||
|
||||
|
||||
def _revoke_token_by_id(token_id: str) -> None:
|
||||
# Snapshot pre-revoke hash for cache invalidation; UPDATE WHERE
|
||||
# makes double-revoke idempotent.
|
||||
row = (
|
||||
db.session.query(OAuthAccessToken.token_hash)
|
||||
.filter(
|
||||
OAuthAccessToken.id == token_id,
|
||||
OAuthAccessToken.revoked_at.is_(None),
|
||||
)
|
||||
.one_or_none()
|
||||
)
|
||||
pre_revoke_hash = row[0] if row else None
|
||||
|
||||
stmt = (
|
||||
update(OAuthAccessToken)
|
||||
.where(
|
||||
OAuthAccessToken.id == token_id,
|
||||
OAuthAccessToken.revoked_at.is_(None),
|
||||
)
|
||||
.values(revoked_at=datetime.now(UTC), token_hash=None)
|
||||
)
|
||||
db.session.execute(stmt)
|
||||
db.session.commit()
|
||||
|
||||
if pre_revoke_hash:
|
||||
redis_client.delete(TOKEN_CACHE_KEY_FMT.format(hash=pre_revoke_hash))
|
||||
|
||||
|
||||
def _iso(dt: datetime | None) -> str | None:
|
||||
if dt is None:
|
||||
return None
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=UTC)
|
||||
return dt.isoformat().replace("+00:00", "Z")
|
||||
|
||||
|
||||
def _load_memberships(account_id):
|
||||
return (
|
||||
db.session.query(TenantAccountJoin, Tenant)
|
||||
.join(Tenant, Tenant.id == TenantAccountJoin.tenant_id)
|
||||
.filter(TenantAccountJoin.account_id == account_id)
|
||||
.all()
|
||||
)
|
||||
|
||||
|
||||
def _pick_default_workspace(memberships) -> str | None:
|
||||
if not memberships:
|
||||
return None
|
||||
for join, tenant in memberships:
|
||||
if getattr(join, "current", False):
|
||||
return str(tenant.id)
|
||||
return str(memberships[0][1].id)
|
||||
|
||||
|
||||
def _workspace_payload(row) -> dict:
|
||||
join, tenant = row
|
||||
return {"id": str(tenant.id), "name": tenant.name, "role": getattr(join, "role", "")}
|
||||
|
||||
|
||||
def _account_payload(account) -> dict:
|
||||
return {"id": str(account.id), "email": account.email, "name": account.name}
|
||||
@ -1,198 +0,0 @@
|
||||
"""POST /openapi/v1/apps/<app_id>/run — mode-agnostic runner."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from collections.abc import Callable, Iterator, Mapping
|
||||
from contextlib import contextmanager
|
||||
from typing import Any, Literal
|
||||
from uuid import UUID
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, ValidationError, field_validator
|
||||
from werkzeug.exceptions import BadRequest, HTTPException, InternalServerError, NotFound, UnprocessableEntity
|
||||
|
||||
import services
|
||||
from controllers.openapi import openapi_ns
|
||||
from controllers.openapi._audit import emit_app_run
|
||||
from controllers.openapi._models import (
|
||||
ChatMessageResponse,
|
||||
CompletionMessageResponse,
|
||||
WorkflowRunResponse,
|
||||
)
|
||||
from controllers.openapi.auth.composition import OAUTH_BEARER_PIPELINE
|
||||
from controllers.service_api.app.error import (
|
||||
AppUnavailableError,
|
||||
CompletionRequestError,
|
||||
ConversationCompletedError,
|
||||
ProviderModelCurrentlyNotSupportError,
|
||||
ProviderNotInitializeError,
|
||||
ProviderQuotaExceededError,
|
||||
)
|
||||
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.errors.error import (
|
||||
ModelCurrentlyNotSupportError,
|
||||
ProviderTokenNotInitError,
|
||||
QuotaExceededError,
|
||||
)
|
||||
from graphon.model_runtime.errors.invoke import InvokeError
|
||||
from libs import helper
|
||||
from libs.helper import UUIDStrOrEmpty
|
||||
from libs.oauth_bearer import Scope
|
||||
from models.model import App, AppMode
|
||||
from services.app_generate_service import AppGenerateService
|
||||
from services.errors.app import (
|
||||
IsDraftWorkflowError,
|
||||
WorkflowIdFormatError,
|
||||
WorkflowNotFoundError,
|
||||
)
|
||||
from services.errors.llm import InvokeRateLimitError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AppRunRequest(BaseModel):
|
||||
inputs: dict[str, Any]
|
||||
query: str | None = None
|
||||
files: list[dict[str, Any]] | None = None
|
||||
response_mode: Literal["blocking", "streaming"] | None = None
|
||||
conversation_id: UUIDStrOrEmpty | None = None
|
||||
auto_generate_name: bool = True
|
||||
workflow_id: str | None = None
|
||||
|
||||
@field_validator("conversation_id", mode="before")
|
||||
@classmethod
|
||||
def _normalize_conv(cls, value: str | UUID | None) -> str | None:
|
||||
if isinstance(value, str):
|
||||
value = value.strip()
|
||||
if not value:
|
||||
return None
|
||||
try:
|
||||
return helper.uuid_value(value)
|
||||
except ValueError as exc:
|
||||
raise ValueError("conversation_id must be a valid UUID") from exc
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _translate_service_errors() -> Iterator[None]:
|
||||
try:
|
||||
yield
|
||||
except WorkflowNotFoundError as ex:
|
||||
raise NotFound(str(ex))
|
||||
except (IsDraftWorkflowError, WorkflowIdFormatError) as ex:
|
||||
raise BadRequest(str(ex))
|
||||
except services.errors.conversation.ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
except services.errors.conversation.ConversationCompletedError:
|
||||
raise ConversationCompletedError()
|
||||
except services.errors.app_model_config.AppModelConfigBrokenError:
|
||||
logger.exception("App model config broken.")
|
||||
raise AppUnavailableError()
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
except QuotaExceededError:
|
||||
raise ProviderQuotaExceededError()
|
||||
except ModelCurrentlyNotSupportError:
|
||||
raise ProviderModelCurrentlyNotSupportError()
|
||||
except InvokeRateLimitError as ex:
|
||||
raise InvokeRateLimitHttpError(ex.description)
|
||||
except InvokeError as e:
|
||||
raise CompletionRequestError(e.description)
|
||||
|
||||
|
||||
def _unpack_blocking(response: Any) -> Mapping[str, Any]:
|
||||
if isinstance(response, tuple):
|
||||
response = response[0]
|
||||
if not isinstance(response, Mapping):
|
||||
raise InternalServerError("blocking generate returned non-mapping response")
|
||||
return response
|
||||
|
||||
|
||||
def _generate(app: App, caller: Any, args: dict[str, Any], streaming: bool):
|
||||
return AppGenerateService.generate(
|
||||
app_model=app,
|
||||
user=caller,
|
||||
args=args,
|
||||
invoke_from=InvokeFrom.OPENAPI,
|
||||
streaming=streaming,
|
||||
)
|
||||
|
||||
|
||||
def _run_chat(app: App, caller: Any, payload: AppRunRequest, streaming: bool):
|
||||
if not payload.query or not payload.query.strip():
|
||||
raise UnprocessableEntity("query_required_for_chat")
|
||||
args = payload.model_dump(exclude_none=True)
|
||||
with _translate_service_errors():
|
||||
response = _generate(app, caller, args, streaming)
|
||||
if streaming:
|
||||
return response, None
|
||||
return None, ChatMessageResponse.model_validate(_unpack_blocking(response)).model_dump(mode="json")
|
||||
|
||||
|
||||
def _run_completion(app: App, caller: Any, payload: AppRunRequest, streaming: bool):
|
||||
args = payload.model_dump(exclude_none=True)
|
||||
args["auto_generate_name"] = False
|
||||
args.setdefault("query", "")
|
||||
with _translate_service_errors():
|
||||
response = _generate(app, caller, args, streaming)
|
||||
if streaming:
|
||||
return response, None
|
||||
return None, CompletionMessageResponse.model_validate(_unpack_blocking(response)).model_dump(mode="json")
|
||||
|
||||
|
||||
def _run_workflow(app: App, caller: Any, payload: AppRunRequest, streaming: bool):
|
||||
if payload.query is not None:
|
||||
raise UnprocessableEntity("query_not_supported_for_workflow")
|
||||
args = payload.model_dump(exclude={"query", "conversation_id", "auto_generate_name"}, exclude_none=True)
|
||||
with _translate_service_errors():
|
||||
response = _generate(app, caller, args, streaming)
|
||||
if streaming:
|
||||
return response, None
|
||||
return None, WorkflowRunResponse.model_validate(_unpack_blocking(response)).model_dump(mode="json")
|
||||
|
||||
|
||||
_DISPATCH: dict[AppMode, Callable[[App, Any, AppRunRequest, bool], tuple[Any, dict[str, Any] | None]]] = {
|
||||
AppMode.CHAT: _run_chat,
|
||||
AppMode.AGENT_CHAT: _run_chat,
|
||||
AppMode.ADVANCED_CHAT: _run_chat,
|
||||
AppMode.COMPLETION: _run_completion,
|
||||
AppMode.WORKFLOW: _run_workflow,
|
||||
}
|
||||
|
||||
|
||||
@openapi_ns.route("/apps/<string:app_id>/run")
|
||||
class AppRunApi(Resource):
|
||||
@OAUTH_BEARER_PIPELINE.guard(scope=Scope.APPS_RUN)
|
||||
def post(self, app_id: str, app_model: App, caller, caller_kind: str):
|
||||
body = request.get_json(silent=True) or {}
|
||||
body.pop("user", None)
|
||||
try:
|
||||
payload = AppRunRequest.model_validate(body)
|
||||
except ValidationError as exc:
|
||||
raise UnprocessableEntity(exc.json())
|
||||
|
||||
handler = _DISPATCH.get(app_model.mode)
|
||||
if handler is None:
|
||||
raise UnprocessableEntity("mode_not_runnable")
|
||||
|
||||
streaming = payload.response_mode == "streaming"
|
||||
try:
|
||||
stream_obj, blocking_body = handler(app_model, caller, payload, streaming)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("internal server error.")
|
||||
raise InternalServerError()
|
||||
|
||||
emit_app_run(
|
||||
app_id=app_model.id,
|
||||
tenant_id=app_model.tenant_id,
|
||||
caller_kind=caller_kind,
|
||||
mode=str(app_model.mode),
|
||||
)
|
||||
|
||||
if streaming:
|
||||
return helper.compact_generate_response(stream_obj)
|
||||
return blocking_body, 200
|
||||
@ -1,315 +0,0 @@
|
||||
"""GET /openapi/v1/apps and per-app reads.
|
||||
|
||||
Decorator order: `method_decorators` is innermost-first. `validate_bearer`
|
||||
is last → outermost → sets `g.auth_ctx` before `require_scope` reads it.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid as _uuid
|
||||
from typing import Any
|
||||
|
||||
import sqlalchemy as sa
|
||||
from flask import g, request
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, ConfigDict, Field, ValidationError, field_validator
|
||||
from werkzeug.exceptions import Conflict, NotFound, UnprocessableEntity
|
||||
|
||||
from controllers.common.fields import Parameters
|
||||
from controllers.openapi import openapi_ns
|
||||
from controllers.openapi._input_schema import EMPTY_INPUT_SCHEMA, build_input_schema, resolve_app_config
|
||||
from controllers.openapi._models import (
|
||||
MAX_PAGE_LIMIT,
|
||||
AppDescribeInfo,
|
||||
AppDescribeResponse,
|
||||
AppListRow,
|
||||
PaginationEnvelope,
|
||||
)
|
||||
from controllers.service_api.app.error import AppUnavailableError
|
||||
from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict
|
||||
from extensions.ext_database import db
|
||||
from libs.oauth_bearer import (
|
||||
ACCEPT_USER_ANY,
|
||||
AuthContext,
|
||||
Scope,
|
||||
SubjectType,
|
||||
require_scope,
|
||||
require_workspace_member,
|
||||
validate_bearer,
|
||||
)
|
||||
from models import App, Tenant
|
||||
from models.model import AppMode
|
||||
from services.app_service import AppService
|
||||
from services.tag_service import TagService
|
||||
|
||||
_APPS_READ_DECORATORS = [
|
||||
require_scope(Scope.APPS_READ),
|
||||
validate_bearer(accept=ACCEPT_USER_ANY),
|
||||
]
|
||||
|
||||
_ALLOWED_DESCRIBE_FIELDS: frozenset[str] = frozenset({"info", "parameters", "input_schema"})
|
||||
|
||||
|
||||
class AppDescribeQuery(BaseModel):
|
||||
"""`?fields=` allow-list for GET /apps/<id>/describe.
|
||||
|
||||
Empty / omitted → all blocks. Unknown member → ValidationError → 422.
|
||||
"""
|
||||
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
|
||||
fields: set[str] | None = None
|
||||
workspace_id: str | None = None
|
||||
|
||||
@field_validator("workspace_id", mode="before")
|
||||
@classmethod
|
||||
def _validate_workspace_id(cls, v: object) -> str | None:
|
||||
if v is None or v == "":
|
||||
return None
|
||||
if not isinstance(v, str):
|
||||
raise ValueError("workspace_id must be a string")
|
||||
try:
|
||||
_uuid.UUID(v)
|
||||
except ValueError:
|
||||
raise ValueError("workspace_id must be a valid UUID")
|
||||
return v
|
||||
|
||||
@field_validator("fields", mode="before")
|
||||
@classmethod
|
||||
def _parse_fields(cls, v: object) -> set[str] | None:
|
||||
if v is None or v == "":
|
||||
return None
|
||||
if not isinstance(v, str):
|
||||
raise ValueError("fields must be a comma-separated string")
|
||||
members = {m.strip() for m in v.split(",") if m.strip()}
|
||||
unknown = members - _ALLOWED_DESCRIBE_FIELDS
|
||||
if unknown:
|
||||
raise ValueError(f"unknown field(s): {sorted(unknown)}")
|
||||
return members
|
||||
|
||||
|
||||
_EMPTY_PARAMETERS: dict[str, Any] = {
|
||||
"opening_statement": None,
|
||||
"suggested_questions": [],
|
||||
"user_input_form": [],
|
||||
"file_upload": None,
|
||||
"system_parameters": {},
|
||||
}
|
||||
|
||||
|
||||
class AppReadResource(Resource):
|
||||
"""Base for per-app read endpoints; subclasses call `_load()` for SSO/membership/exists checks."""
|
||||
|
||||
method_decorators = _APPS_READ_DECORATORS
|
||||
|
||||
def _load(self, app_id: str, workspace_id: str | None = None) -> tuple[App, AuthContext]:
|
||||
ctx = g.auth_ctx
|
||||
if ctx.subject_type != SubjectType.ACCOUNT or ctx.account_id is None:
|
||||
raise NotFound("app not found")
|
||||
|
||||
try:
|
||||
parsed_uuid = _uuid.UUID(app_id)
|
||||
is_uuid = True
|
||||
except ValueError:
|
||||
parsed_uuid = None
|
||||
is_uuid = False
|
||||
|
||||
if is_uuid:
|
||||
app = db.session.get(App, str(parsed_uuid)) # normalised dashed form
|
||||
if not app or app.status != "normal":
|
||||
raise NotFound("app not found")
|
||||
else:
|
||||
if not workspace_id:
|
||||
raise UnprocessableEntity("workspace_id is required for name-based lookup")
|
||||
matches = list(
|
||||
db.session.execute(
|
||||
sa.select(App).where(
|
||||
App.name == app_id,
|
||||
App.tenant_id == workspace_id,
|
||||
App.status == "normal",
|
||||
)
|
||||
).scalars()
|
||||
)
|
||||
if len(matches) == 0:
|
||||
raise NotFound("app not found")
|
||||
if len(matches) > 1:
|
||||
lines = [f"app name {app_id!r} is ambiguous — re-run with a UUID:\n\n"]
|
||||
lines.append(f" {'ID':<36} {'MODE':<12} NAME\n")
|
||||
for m in matches:
|
||||
lines.append(f" {str(m.id):<36} {str(m.mode.value):<12} {m.name}\n")
|
||||
raise Conflict("".join(lines))
|
||||
app = matches[0]
|
||||
|
||||
require_workspace_member(ctx, str(app.tenant_id))
|
||||
return app, ctx
|
||||
|
||||
|
||||
def parameters_payload(app: App) -> dict:
|
||||
"""Mirrors service_api/app/app.py::AppParameterApi response body."""
|
||||
features_dict, user_input_form = resolve_app_config(app)
|
||||
parameters = get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form)
|
||||
return Parameters.model_validate(parameters).model_dump(mode="json")
|
||||
|
||||
|
||||
@openapi_ns.route("/apps/<string:app_id>/describe")
|
||||
class AppDescribeApi(AppReadResource):
|
||||
def get(self, app_id: str):
|
||||
try:
|
||||
query = AppDescribeQuery.model_validate(request.args.to_dict(flat=True))
|
||||
except ValidationError as exc:
|
||||
raise UnprocessableEntity(exc.json())
|
||||
|
||||
app, _ = self._load(app_id, workspace_id=query.workspace_id)
|
||||
|
||||
requested = query.fields
|
||||
want_info = requested is None or "info" in requested
|
||||
want_params = requested is None or "parameters" in requested
|
||||
want_schema = requested is None or "input_schema" in requested
|
||||
|
||||
info = (
|
||||
AppDescribeInfo(
|
||||
id=str(app.id),
|
||||
name=app.name,
|
||||
mode=app.mode,
|
||||
description=app.description,
|
||||
tags=[{"name": t.name} for t in app.tags],
|
||||
author=app.author_name,
|
||||
updated_at=app.updated_at.isoformat() if app.updated_at else None,
|
||||
service_api_enabled=bool(app.enable_api),
|
||||
)
|
||||
if want_info
|
||||
else None
|
||||
)
|
||||
|
||||
parameters: dict[str, Any] | None = None
|
||||
input_schema: dict[str, Any] | None = None
|
||||
if want_params:
|
||||
try:
|
||||
parameters = parameters_payload(app)
|
||||
except AppUnavailableError:
|
||||
parameters = dict(_EMPTY_PARAMETERS)
|
||||
if want_schema:
|
||||
try:
|
||||
input_schema = build_input_schema(app)
|
||||
except AppUnavailableError:
|
||||
input_schema = dict(EMPTY_INPUT_SCHEMA)
|
||||
|
||||
return (
|
||||
AppDescribeResponse(
|
||||
info=info,
|
||||
parameters=parameters,
|
||||
input_schema=input_schema,
|
||||
).model_dump(mode="json", exclude_none=False),
|
||||
200,
|
||||
)
|
||||
|
||||
|
||||
class AppListQuery(BaseModel):
|
||||
"""`mode` is a closed enum — unknown values 422 instead of silently-empty data."""
|
||||
|
||||
workspace_id: str
|
||||
page: int = Field(1, ge=1)
|
||||
limit: int = Field(20, ge=1, le=MAX_PAGE_LIMIT)
|
||||
mode: AppMode | None = None
|
||||
name: str | None = Field(None, max_length=200)
|
||||
tag: str | None = Field(None, max_length=100)
|
||||
|
||||
|
||||
@openapi_ns.route("/apps")
|
||||
class AppListApi(Resource):
|
||||
method_decorators = _APPS_READ_DECORATORS
|
||||
|
||||
def get(self):
|
||||
ctx = g.auth_ctx
|
||||
if ctx.subject_type != SubjectType.ACCOUNT or ctx.account_id is None:
|
||||
return PaginationEnvelope[AppListRow].build(page=1, limit=0, total=0, items=[]).model_dump(mode="json"), 200
|
||||
|
||||
try:
|
||||
query = AppListQuery.model_validate(request.args.to_dict(flat=True))
|
||||
except ValidationError as exc:
|
||||
raise UnprocessableEntity(exc.json())
|
||||
|
||||
workspace_id = query.workspace_id
|
||||
require_workspace_member(ctx, workspace_id)
|
||||
|
||||
empty = (
|
||||
PaginationEnvelope[AppListRow]
|
||||
.build(page=query.page, limit=query.limit, total=0, items=[])
|
||||
.model_dump(mode="json"),
|
||||
200,
|
||||
)
|
||||
|
||||
if query.name:
|
||||
try:
|
||||
parsed_uuid = _uuid.UUID(query.name)
|
||||
except ValueError:
|
||||
parsed_uuid = None
|
||||
else:
|
||||
parsed_uuid = None
|
||||
|
||||
if parsed_uuid is not None:
|
||||
app = db.session.get(App, str(parsed_uuid))
|
||||
if not app or app.status != "normal" or str(app.tenant_id) != workspace_id:
|
||||
return empty
|
||||
tenant_name = db.session.execute(
|
||||
sa.select(Tenant.name).where(Tenant.id == workspace_id)
|
||||
).scalar_one_or_none()
|
||||
item = AppListRow(
|
||||
id=str(app.id),
|
||||
name=app.name,
|
||||
description=app.description,
|
||||
mode=app.mode,
|
||||
tags=[{"name": t.name} for t in app.tags],
|
||||
updated_at=app.updated_at.isoformat() if app.updated_at else None,
|
||||
created_by_name=getattr(app, "author_name", None),
|
||||
workspace_id=str(workspace_id),
|
||||
workspace_name=tenant_name,
|
||||
)
|
||||
env = PaginationEnvelope[AppListRow].build(page=1, limit=1, total=1, items=[item])
|
||||
return env.model_dump(mode="json"), 200
|
||||
|
||||
tag_ids: list[str] | None = None
|
||||
if query.tag:
|
||||
tags = TagService.get_tag_by_tag_name("app", workspace_id, query.tag)
|
||||
if not tags:
|
||||
return empty
|
||||
tag_ids = [tag.id for tag in tags]
|
||||
|
||||
args: dict[str, Any] = {
|
||||
"page": query.page,
|
||||
"limit": query.limit,
|
||||
"mode": query.mode.value if query.mode else "",
|
||||
"name": query.name,
|
||||
"status": "normal",
|
||||
}
|
||||
if tag_ids:
|
||||
args["tag_ids"] = tag_ids
|
||||
|
||||
pagination = AppService().get_paginate_apps(ctx.account_id, workspace_id, args)
|
||||
if pagination is None:
|
||||
return empty
|
||||
|
||||
tenant_name: str | None = None
|
||||
if pagination.items:
|
||||
tenant_name = db.session.execute(
|
||||
sa.select(Tenant.name).where(Tenant.id == workspace_id)
|
||||
).scalar_one_or_none()
|
||||
|
||||
items = [
|
||||
AppListRow(
|
||||
id=str(r.id),
|
||||
name=r.name,
|
||||
description=r.description,
|
||||
mode=r.mode,
|
||||
tags=[{"name": t.name} for t in r.tags],
|
||||
updated_at=r.updated_at.isoformat() if r.updated_at else None,
|
||||
created_by_name=getattr(r, "author_name", None),
|
||||
workspace_id=str(workspace_id),
|
||||
workspace_name=tenant_name,
|
||||
)
|
||||
for r in pagination.items
|
||||
]
|
||||
env = PaginationEnvelope[AppListRow].build(
|
||||
page=query.page, limit=query.limit, total=int(pagination.total), items=items
|
||||
)
|
||||
return env.model_dump(mode="json"), 200
|
||||
@ -1,101 +0,0 @@
|
||||
"""GET /openapi/v1/apps/permitted — external-subject app discovery (EE only)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlalchemy as sa
|
||||
from flask import request
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, ConfigDict, Field, ValidationError
|
||||
from werkzeug.exceptions import UnprocessableEntity
|
||||
|
||||
from controllers.openapi import openapi_ns
|
||||
from controllers.openapi._models import (
|
||||
MAX_PAGE_LIMIT,
|
||||
AppListRow,
|
||||
PaginationEnvelope,
|
||||
)
|
||||
from extensions.ext_database import db
|
||||
from libs.device_flow_security import enterprise_only
|
||||
from libs.oauth_bearer import (
|
||||
ACCEPT_USER_EXT_SSO,
|
||||
Scope,
|
||||
require_scope,
|
||||
validate_bearer,
|
||||
)
|
||||
from models import App, Tenant
|
||||
from models.model import AppMode
|
||||
from services.enterprise.app_permitted_service import list_permitted_apps
|
||||
|
||||
|
||||
class AppPermittedListQuery(BaseModel):
|
||||
"""Strict (`extra='forbid'`) — rejects `workspace_id`/`tag`/etc. that are valid on /apps but not here."""
|
||||
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
|
||||
page: int = Field(1, ge=1)
|
||||
limit: int = Field(20, ge=1, le=MAX_PAGE_LIMIT)
|
||||
mode: AppMode | None = None
|
||||
name: str | None = Field(None, max_length=200)
|
||||
|
||||
|
||||
@openapi_ns.route("/apps/permitted")
|
||||
class AppPermittedListApi(Resource):
|
||||
method_decorators = [
|
||||
require_scope(Scope.APPS_READ_PERMITTED),
|
||||
validate_bearer(accept=ACCEPT_USER_EXT_SSO),
|
||||
enterprise_only,
|
||||
]
|
||||
|
||||
def get(self):
|
||||
try:
|
||||
query = AppPermittedListQuery.model_validate(request.args.to_dict(flat=True))
|
||||
except ValidationError as exc:
|
||||
raise UnprocessableEntity(exc.json())
|
||||
|
||||
page_result = list_permitted_apps(
|
||||
page=query.page,
|
||||
limit=query.limit,
|
||||
mode=query.mode.value if query.mode else None,
|
||||
name=query.name,
|
||||
)
|
||||
|
||||
if not page_result.app_ids:
|
||||
env = PaginationEnvelope[AppListRow].build(
|
||||
page=query.page, limit=query.limit, total=page_result.total, items=[]
|
||||
)
|
||||
return env.model_dump(mode="json"), 200
|
||||
|
||||
apps_by_id = {
|
||||
str(a.id): a
|
||||
for a in db.session.execute(sa.select(App).where(App.id.in_(page_result.app_ids))).scalars().all()
|
||||
}
|
||||
tenant_ids = list({a.tenant_id for a in apps_by_id.values()})
|
||||
tenants_by_id = {
|
||||
str(t.id): t for t in db.session.execute(sa.select(Tenant).where(Tenant.id.in_(tenant_ids))).scalars().all()
|
||||
}
|
||||
|
||||
items: list[AppListRow] = []
|
||||
for app_id in page_result.app_ids:
|
||||
app = apps_by_id.get(app_id)
|
||||
if not app or app.status != "normal":
|
||||
continue
|
||||
tenant = tenants_by_id.get(str(app.tenant_id))
|
||||
items.append(
|
||||
AppListRow(
|
||||
id=str(app.id),
|
||||
name=app.name,
|
||||
description=app.description,
|
||||
mode=app.mode,
|
||||
tags=[], # tenant-scoped; not surfaced cross-tenant
|
||||
updated_at=app.updated_at.isoformat() if app.updated_at else None,
|
||||
created_by_name=None, # cross-tenant author leak prevention
|
||||
workspace_id=str(app.tenant_id),
|
||||
workspace_name=tenant.name if tenant else None,
|
||||
)
|
||||
)
|
||||
|
||||
# total/has_more reflect the EE-side allow-list; len(items) may be < limit when local rows are dropped.
|
||||
env = PaginationEnvelope[AppListRow].build(
|
||||
page=query.page, limit=query.limit, total=page_result.total, items=items
|
||||
)
|
||||
return env.model_dump(mode="json"), 200
|
||||
@ -1,3 +0,0 @@
|
||||
from controllers.openapi.auth.composition import OAUTH_BEARER_PIPELINE
|
||||
|
||||
__all__ = ["OAUTH_BEARER_PIPELINE"]
|
||||
@ -1,43 +0,0 @@
|
||||
"""`OAUTH_BEARER_PIPELINE` — the auth scheme for openapi `/run` endpoints.
|
||||
|
||||
Endpoints attach via `@OAUTH_BEARER_PIPELINE.guard(scope=…)`. No alternative
|
||||
paths. Read endpoints (`/apps`, `/info`, `/parameters`, `/describe`) skip
|
||||
the pipeline and use `validate_bearer + require_scope + require_workspace_member`
|
||||
inline — they don't need `AppAuthzCheck`/`CallerMount`.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from controllers.openapi.auth.pipeline import Pipeline
|
||||
from controllers.openapi.auth.steps import (
|
||||
AppAuthzCheck,
|
||||
AppResolver,
|
||||
BearerCheck,
|
||||
CallerMount,
|
||||
ScopeCheck,
|
||||
WorkspaceMembershipCheck,
|
||||
)
|
||||
from controllers.openapi.auth.strategies import (
|
||||
AccountMounter,
|
||||
AclStrategy,
|
||||
AppAuthzStrategy,
|
||||
EndUserMounter,
|
||||
MembershipStrategy,
|
||||
)
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
|
||||
def _resolve_app_authz_strategy() -> AppAuthzStrategy:
|
||||
if FeatureService.get_system_features().webapp_auth.enabled:
|
||||
return AclStrategy()
|
||||
return MembershipStrategy()
|
||||
|
||||
|
||||
OAUTH_BEARER_PIPELINE = Pipeline(
|
||||
BearerCheck(),
|
||||
ScopeCheck(),
|
||||
AppResolver(),
|
||||
WorkspaceMembershipCheck(),
|
||||
AppAuthzCheck(_resolve_app_authz_strategy),
|
||||
CallerMount(AccountMounter(), EndUserMounter()),
|
||||
)
|
||||
@ -1,46 +0,0 @@
|
||||
"""Mutable per-request context for the openapi auth pipeline.
|
||||
|
||||
Every field starts None / empty and is filled in by a step. The pipeline
|
||||
is the only thing that should construct or mutate Context — handlers
|
||||
read populated values via the decorator's kwargs unpacking.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Literal, Protocol
|
||||
|
||||
from flask import Request
|
||||
|
||||
from libs.oauth_bearer import Scope, SubjectType
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models import App, Tenant
|
||||
|
||||
|
||||
@dataclass
|
||||
class Context:
|
||||
request: Request
|
||||
required_scope: Scope
|
||||
subject_type: SubjectType | None = None
|
||||
subject_email: str | None = None
|
||||
subject_issuer: str | None = None
|
||||
account_id: uuid.UUID | None = None
|
||||
scopes: frozenset[Scope] = field(default_factory=frozenset)
|
||||
token_id: uuid.UUID | None = None
|
||||
token_hash: str | None = None
|
||||
cached_verified_tenants: dict[str, bool] | None = None
|
||||
source: str | None = None
|
||||
expires_at: datetime | None = None
|
||||
app: App | None = None
|
||||
tenant: Tenant | None = None
|
||||
caller: object | None = None
|
||||
caller_kind: Literal["account", "end_user"] | None = None
|
||||
|
||||
|
||||
class Step(Protocol):
|
||||
"""One responsibility. Mutate ctx or raise to short-circuit."""
|
||||
|
||||
def __call__(self, ctx: Context) -> None: ...
|
||||
@ -1,41 +0,0 @@
|
||||
"""Pipeline IS the auth scheme.
|
||||
|
||||
`Pipeline.guard(scope=…)` is the only attachment point for endpoints —
|
||||
that is the design lock-in: forgetting an auth layer is structurally
|
||||
impossible because there is no "sometimes wrap, sometimes don't" choice.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from flask import request
|
||||
|
||||
from controllers.openapi.auth.context import Context, Step
|
||||
from libs.oauth_bearer import Scope
|
||||
|
||||
|
||||
class Pipeline:
|
||||
def __init__(self, *steps: Step) -> None:
|
||||
self._steps = steps
|
||||
|
||||
def run(self, ctx: Context) -> None:
|
||||
for step in self._steps:
|
||||
step(ctx)
|
||||
|
||||
def guard(self, *, scope: Scope):
|
||||
def decorator(view):
|
||||
@wraps(view)
|
||||
def decorated(*args, **kwargs):
|
||||
ctx = Context(request=request, required_scope=scope)
|
||||
self.run(ctx)
|
||||
kwargs.update(
|
||||
app_model=ctx.app,
|
||||
caller=ctx.caller,
|
||||
caller_kind=ctx.caller_kind,
|
||||
)
|
||||
return view(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
|
||||
return decorator
|
||||
@ -1,131 +0,0 @@
|
||||
"""Pipeline steps. Each is one responsibility.
|
||||
|
||||
`BearerCheck` is the only step that touches the token registry; downstream
|
||||
steps see only the populated `Context`.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
from werkzeug.exceptions import BadRequest, Forbidden, NotFound, Unauthorized
|
||||
|
||||
from configs import dify_config
|
||||
from controllers.openapi.auth.context import Context
|
||||
from controllers.openapi.auth.strategies import AppAuthzStrategy, CallerMounter
|
||||
from extensions.ext_database import db
|
||||
from libs.oauth_bearer import (
|
||||
InvalidBearerError,
|
||||
Scope,
|
||||
SubjectType,
|
||||
_extract_bearer, # type: ignore[attr-defined]
|
||||
check_workspace_membership,
|
||||
get_authenticator,
|
||||
)
|
||||
from models import App, Tenant, TenantStatus
|
||||
|
||||
|
||||
class BearerCheck:
|
||||
"""Resolve bearer → populate identity fields. Rate-limit is enforced
|
||||
inside `BearerAuthenticator.authenticate`, so no separate step here."""
|
||||
|
||||
def __call__(self, ctx: Context) -> None:
|
||||
token = _extract_bearer(ctx.request)
|
||||
if not token:
|
||||
raise Unauthorized("bearer required")
|
||||
|
||||
try:
|
||||
authn = get_authenticator().authenticate(token)
|
||||
except InvalidBearerError as e:
|
||||
raise Unauthorized(str(e))
|
||||
|
||||
ctx.subject_type = authn.subject_type
|
||||
ctx.subject_email = authn.subject_email
|
||||
ctx.subject_issuer = authn.subject_issuer
|
||||
ctx.account_id = authn.account_id
|
||||
ctx.scopes = frozenset(authn.scopes)
|
||||
ctx.source = authn.source
|
||||
ctx.token_id = authn.token_id
|
||||
ctx.expires_at = authn.expires_at
|
||||
ctx.token_hash = authn.token_hash
|
||||
ctx.cached_verified_tenants = dict(authn.verified_tenants)
|
||||
|
||||
|
||||
class ScopeCheck:
|
||||
"""Verify ctx.scopes (already populated by BearerCheck) covers required."""
|
||||
|
||||
def __call__(self, ctx: Context) -> None:
|
||||
if Scope.FULL in ctx.scopes or ctx.required_scope in ctx.scopes:
|
||||
return
|
||||
raise Forbidden("insufficient_scope")
|
||||
|
||||
|
||||
class AppResolver:
|
||||
"""Read app_id from request.view_args, populate ctx.app + ctx.tenant.
|
||||
|
||||
Every endpoint using the OAuth bearer pipeline must declare
|
||||
``<string:app_id>`` in its route — that is the design lock-in (no body /
|
||||
header coupling).
|
||||
"""
|
||||
|
||||
def __call__(self, ctx: Context) -> None:
|
||||
app_id = (ctx.request.view_args or {}).get("app_id")
|
||||
if not app_id:
|
||||
raise BadRequest("app_id is required in path")
|
||||
app = db.session.get(App, app_id)
|
||||
if not app or app.status != "normal":
|
||||
raise NotFound("app not found")
|
||||
if not app.enable_api:
|
||||
raise Forbidden("service_api_disabled")
|
||||
tenant = db.session.get(Tenant, app.tenant_id)
|
||||
if tenant is None or tenant.status == TenantStatus.ARCHIVE:
|
||||
raise Forbidden("workspace unavailable")
|
||||
ctx.app, ctx.tenant = app, tenant
|
||||
|
||||
|
||||
class WorkspaceMembershipCheck:
|
||||
"""Layer 0 — workspace membership gate.
|
||||
|
||||
CE-only (skipped when ENTERPRISE_ENABLED). Account-subject bearers
|
||||
(dfoa_) only — SSO subjects skip.
|
||||
"""
|
||||
|
||||
def __call__(self, ctx: Context) -> None:
|
||||
if dify_config.ENTERPRISE_ENABLED:
|
||||
return
|
||||
if ctx.subject_type != SubjectType.ACCOUNT:
|
||||
return
|
||||
if ctx.account_id is None or ctx.tenant is None:
|
||||
raise Unauthorized("account_id or tenant unset — BearerCheck or AppResolver did not run")
|
||||
if ctx.token_hash is None:
|
||||
raise Unauthorized("token_hash unset — BearerCheck did not run")
|
||||
|
||||
check_workspace_membership(
|
||||
account_id=ctx.account_id,
|
||||
tenant_id=ctx.tenant.id,
|
||||
token_hash=ctx.token_hash,
|
||||
cached_verdicts=ctx.cached_verified_tenants or {},
|
||||
)
|
||||
|
||||
|
||||
class AppAuthzCheck:
|
||||
def __init__(self, resolve_strategy: Callable[[], AppAuthzStrategy]) -> None:
|
||||
self._resolve = resolve_strategy
|
||||
|
||||
def __call__(self, ctx: Context) -> None:
|
||||
if not self._resolve().authorize(ctx):
|
||||
raise Forbidden("subject_no_app_access")
|
||||
|
||||
|
||||
class CallerMount:
|
||||
def __init__(self, *mounters: CallerMounter) -> None:
|
||||
self._mounters = mounters
|
||||
|
||||
def __call__(self, ctx: Context) -> None:
|
||||
if ctx.subject_type is None:
|
||||
raise Unauthorized("subject_type unset — BearerCheck did not run")
|
||||
for m in self._mounters:
|
||||
if m.applies_to(ctx.subject_type):
|
||||
m.mount(ctx)
|
||||
return
|
||||
raise Unauthorized("no caller mounter for subject type")
|
||||
@ -1,115 +0,0 @@
|
||||
"""Strategy classes for the openapi auth pipeline.
|
||||
|
||||
App authorization (Acl/Membership) and caller mounting (Account/EndUser)
|
||||
vary along independent axes; each strategy is one class so the pipeline
|
||||
composition stays a flat list.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from typing import Protocol
|
||||
|
||||
from flask import current_app
|
||||
from flask_login import user_logged_in
|
||||
from sqlalchemy import select
|
||||
|
||||
from controllers.openapi.auth.context import Context
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from extensions.ext_database import db
|
||||
from libs.oauth_bearer import SubjectType
|
||||
from models import Account, TenantAccountJoin
|
||||
from services.end_user_service import EndUserService
|
||||
from services.enterprise.enterprise_service import EnterpriseService
|
||||
|
||||
|
||||
class AppAuthzStrategy(Protocol):
|
||||
def authorize(self, ctx: Context) -> bool: ...
|
||||
|
||||
|
||||
class AclStrategy:
|
||||
"""Per-app ACL via the workspace-auth inner API.
|
||||
|
||||
Used when webapp-auth is enabled (EE deployment). The inner-API
|
||||
allowlist is the source of truth.
|
||||
"""
|
||||
|
||||
def authorize(self, ctx: Context) -> bool:
|
||||
if ctx.subject_email is None or ctx.app is None:
|
||||
return False
|
||||
return EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp(
|
||||
user_id=ctx.subject_email,
|
||||
app_id=ctx.app.id,
|
||||
)
|
||||
|
||||
|
||||
class MembershipStrategy:
|
||||
"""Tenant-membership fallback.
|
||||
|
||||
Used when webapp-auth is disabled (CE deployment). Account-bearing
|
||||
subjects pass if they have a TenantAccountJoin row; EXTERNAL_SSO is
|
||||
denied (it requires the webapp-auth surface).
|
||||
"""
|
||||
|
||||
def authorize(self, ctx: Context) -> bool:
|
||||
if ctx.subject_type == SubjectType.EXTERNAL_SSO:
|
||||
return False
|
||||
if ctx.tenant is None:
|
||||
return False
|
||||
return _has_tenant_membership(ctx.account_id, ctx.tenant.id)
|
||||
|
||||
|
||||
def _has_tenant_membership(account_id: uuid.UUID | str | None, tenant_id: str) -> bool:
|
||||
if not account_id:
|
||||
return False
|
||||
row = db.session.execute(
|
||||
select(TenantAccountJoin.id).where(
|
||||
TenantAccountJoin.tenant_id == tenant_id,
|
||||
TenantAccountJoin.account_id == account_id,
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
return row is not None
|
||||
|
||||
|
||||
def _login_as(user) -> None:
|
||||
"""Set Flask-Login request user so downstream services see the caller."""
|
||||
current_app.login_manager._update_request_context_with_user(user)
|
||||
user_logged_in.send(current_app._get_current_object(), user=user)
|
||||
|
||||
|
||||
class CallerMounter(Protocol):
|
||||
def applies_to(self, subject_type: SubjectType) -> bool: ...
|
||||
|
||||
def mount(self, ctx: Context) -> None: ...
|
||||
|
||||
|
||||
class AccountMounter:
|
||||
def applies_to(self, subject_type: SubjectType) -> bool:
|
||||
return subject_type == SubjectType.ACCOUNT
|
||||
|
||||
def mount(self, ctx: Context) -> None:
|
||||
if ctx.account_id is None:
|
||||
raise RuntimeError("AccountMounter: account_id unset — BearerCheck did not run")
|
||||
account = db.session.get(Account, ctx.account_id)
|
||||
if account is None:
|
||||
raise RuntimeError("AccountMounter: account row missing for resolved bearer")
|
||||
account.current_tenant = ctx.tenant
|
||||
_login_as(account)
|
||||
ctx.caller, ctx.caller_kind = account, "account"
|
||||
|
||||
|
||||
class EndUserMounter:
|
||||
def applies_to(self, subject_type: SubjectType) -> bool:
|
||||
return subject_type == SubjectType.EXTERNAL_SSO
|
||||
|
||||
def mount(self, ctx: Context) -> None:
|
||||
if ctx.tenant is None or ctx.app is None or ctx.subject_email is None:
|
||||
raise RuntimeError("EndUserMounter: tenant/app/subject_email unset — earlier steps did not run")
|
||||
end_user = EndUserService.get_or_create_end_user_by_type(
|
||||
InvokeFrom.OPENAPI,
|
||||
tenant_id=ctx.tenant.id,
|
||||
app_id=ctx.app.id,
|
||||
user_id=ctx.subject_email,
|
||||
)
|
||||
_login_as(end_user)
|
||||
ctx.caller, ctx.caller_kind = end_user, "end_user"
|
||||
@ -1,9 +0,0 @@
|
||||
from flask_restx import Resource
|
||||
|
||||
from controllers.openapi import openapi_ns
|
||||
|
||||
|
||||
@openapi_ns.route("/_health")
|
||||
class HealthApi(Resource):
|
||||
def get(self):
|
||||
return {"ok": True}
|
||||
@ -1,392 +0,0 @@
|
||||
"""Device-flow endpoints under /openapi/v1/oauth/device/*. Two
|
||||
sub-groups in one module:
|
||||
|
||||
Protocol (RFC 8628, public + rate-limited):
|
||||
POST /oauth/device/code
|
||||
POST /oauth/device/token
|
||||
GET /oauth/device/lookup
|
||||
|
||||
Approval (account branch, console-cookie authed):
|
||||
POST /oauth/device/approve
|
||||
POST /oauth/device/deny
|
||||
|
||||
SSO branch lives in oauth_device_sso.py.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from flask import request
|
||||
from flask_login import login_required
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, ValidationError
|
||||
from werkzeug.exceptions import BadRequest
|
||||
|
||||
from configs import dify_config
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from controllers.openapi import openapi_ns
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.helper import extract_remote_ip
|
||||
from libs.login import current_account_with_tenant
|
||||
from libs.oauth_bearer import SubjectType, bearer_feature_required
|
||||
from libs.rate_limit import (
|
||||
LIMIT_APPROVE_CONSOLE,
|
||||
LIMIT_DEVICE_CODE_PER_IP,
|
||||
LIMIT_LOOKUP_PUBLIC,
|
||||
rate_limit,
|
||||
)
|
||||
from services.oauth_device_flow import (
|
||||
ACCOUNT_ISSUER_SENTINEL,
|
||||
DEFAULT_POLL_INTERVAL_SECONDS,
|
||||
DEVICE_FLOW_TTL_SECONDS,
|
||||
PREFIX_OAUTH_ACCOUNT,
|
||||
DeviceFlowRedis,
|
||||
DeviceFlowStatus,
|
||||
InvalidTransitionError,
|
||||
SlowDownDecision,
|
||||
StateNotFoundError,
|
||||
mint_oauth_token,
|
||||
oauth_ttl_days,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Request / query schemas
|
||||
# =========================================================================
|
||||
|
||||
|
||||
class DeviceCodeRequest(BaseModel):
|
||||
client_id: str
|
||||
device_label: str
|
||||
|
||||
|
||||
class DevicePollRequest(BaseModel):
|
||||
device_code: str
|
||||
client_id: str
|
||||
|
||||
|
||||
class DeviceLookupQuery(BaseModel):
|
||||
user_code: str
|
||||
|
||||
|
||||
class DeviceMutateRequest(BaseModel):
|
||||
user_code: str
|
||||
|
||||
|
||||
def _validate_json[M: BaseModel](model: type[M]) -> M:
|
||||
body = request.get_json(silent=True) or {}
|
||||
try:
|
||||
return model.model_validate(body)
|
||||
except ValidationError as exc:
|
||||
raise BadRequest(str(exc))
|
||||
|
||||
|
||||
def _validate_query[M: BaseModel](model: type[M]) -> M:
|
||||
try:
|
||||
return model.model_validate(request.args.to_dict(flat=True))
|
||||
except ValidationError as exc:
|
||||
raise BadRequest(str(exc))
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Protocol endpoints — RFC 8628 (public + per-IP rate limit)
|
||||
# =========================================================================
|
||||
|
||||
|
||||
@openapi_ns.route("/oauth/device/code")
|
||||
class OAuthDeviceCodeApi(Resource):
|
||||
@rate_limit(LIMIT_DEVICE_CODE_PER_IP)
|
||||
def post(self):
|
||||
payload = _validate_json(DeviceCodeRequest)
|
||||
client_id = payload.client_id
|
||||
device_label = payload.device_label
|
||||
|
||||
if client_id not in dify_config.OPENAPI_KNOWN_CLIENT_IDS:
|
||||
return {"error": "unsupported_client"}, 400
|
||||
|
||||
store = DeviceFlowRedis(redis_client)
|
||||
ip = extract_remote_ip(request)
|
||||
device_code, user_code, expires_in = store.start(client_id, device_label, created_ip=ip)
|
||||
|
||||
return {
|
||||
"device_code": device_code,
|
||||
"user_code": user_code,
|
||||
"verification_uri": _verification_uri(),
|
||||
"expires_in": expires_in,
|
||||
"interval": DEFAULT_POLL_INTERVAL_SECONDS,
|
||||
}, 200
|
||||
|
||||
|
||||
@openapi_ns.route("/oauth/device/token")
|
||||
class OAuthDeviceTokenApi(Resource):
|
||||
"""RFC 8628 poll."""
|
||||
|
||||
def post(self):
|
||||
payload = _validate_json(DevicePollRequest)
|
||||
device_code = payload.device_code
|
||||
|
||||
store = DeviceFlowRedis(redis_client)
|
||||
|
||||
# slow_down beats every other branch — polling-too-fast clients
|
||||
# see only that response regardless of underlying state.
|
||||
if store.record_poll(device_code, DEFAULT_POLL_INTERVAL_SECONDS) is SlowDownDecision.SLOW_DOWN:
|
||||
return {"error": "slow_down"}, 400
|
||||
|
||||
state = store.load_by_device_code(device_code)
|
||||
if state is None:
|
||||
return {"error": "expired_token"}, 400
|
||||
|
||||
if state.status is DeviceFlowStatus.PENDING:
|
||||
return {"error": "authorization_pending"}, 400
|
||||
|
||||
terminal = store.consume_on_poll(device_code)
|
||||
if terminal is None:
|
||||
return {"error": "expired_token"}, 400
|
||||
|
||||
if terminal.status is DeviceFlowStatus.DENIED:
|
||||
return {"error": "access_denied"}, 400
|
||||
|
||||
poll_payload = terminal.poll_payload or {}
|
||||
if "token" not in poll_payload:
|
||||
logger.error("device_flow: approved state missing poll_payload for %s", device_code)
|
||||
return {"error": "expired_token"}, 400
|
||||
|
||||
_audit_cross_ip_if_needed(state)
|
||||
return poll_payload, 200
|
||||
|
||||
|
||||
@openapi_ns.route("/oauth/device/lookup")
|
||||
class OAuthDeviceLookupApi(Resource):
|
||||
"""Read-only — public for pre-validate before login. user_code is
|
||||
high-entropy + short-TTL; per-IP rate limit blocks enumeration.
|
||||
"""
|
||||
|
||||
@rate_limit(LIMIT_LOOKUP_PUBLIC)
|
||||
def get(self):
|
||||
payload = _validate_query(DeviceLookupQuery)
|
||||
user_code = payload.user_code.strip().upper()
|
||||
|
||||
store = DeviceFlowRedis(redis_client)
|
||||
found = store.load_by_user_code(user_code)
|
||||
if found is None:
|
||||
return {"valid": False, "expires_in_remaining": 0, "client_id": None}, 200
|
||||
|
||||
_device_code, state = found
|
||||
if state.status is not DeviceFlowStatus.PENDING:
|
||||
return {"valid": False, "expires_in_remaining": 0, "client_id": state.client_id}, 200
|
||||
|
||||
return {
|
||||
"valid": True,
|
||||
"expires_in_remaining": DEVICE_FLOW_TTL_SECONDS,
|
||||
"client_id": state.client_id,
|
||||
}, 200
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Approval endpoints — account branch (cookie-authed)
|
||||
# =========================================================================
|
||||
|
||||
|
||||
_APPROVE_GUARD_KEY_FMT = "device_code:{code}:approving"
|
||||
_APPROVE_GUARD_TTL_SECONDS = 10
|
||||
|
||||
|
||||
@openapi_ns.route("/oauth/device/approve")
|
||||
class DeviceApproveApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@bearer_feature_required
|
||||
@rate_limit(LIMIT_APPROVE_CONSOLE)
|
||||
def post(self):
|
||||
payload = _validate_json(DeviceMutateRequest)
|
||||
user_code = payload.user_code.strip().upper()
|
||||
|
||||
account, tenant = current_account_with_tenant()
|
||||
store = DeviceFlowRedis(redis_client)
|
||||
|
||||
found = store.load_by_user_code(user_code)
|
||||
if found is None:
|
||||
return {"error": "expired_or_unknown"}, 404
|
||||
device_code, state = found
|
||||
if state.status is not DeviceFlowStatus.PENDING:
|
||||
return {"error": "already_resolved"}, 409
|
||||
|
||||
# SET NX guard — without it, two in-flight approves both pass
|
||||
# PENDING, both mint, and the second upsert silently rotates the
|
||||
# first caller into an already-revoked token.
|
||||
guard_key = _APPROVE_GUARD_KEY_FMT.format(code=device_code)
|
||||
if not redis_client.set(guard_key, "1", nx=True, ex=_APPROVE_GUARD_TTL_SECONDS):
|
||||
return {"error": "approve_in_progress"}, 409
|
||||
|
||||
try:
|
||||
ttl_days = oauth_ttl_days(tenant_id=tenant)
|
||||
mint = mint_oauth_token(
|
||||
db.session,
|
||||
redis_client,
|
||||
subject_email=account.email,
|
||||
subject_issuer=ACCOUNT_ISSUER_SENTINEL,
|
||||
account_id=str(account.id),
|
||||
client_id=state.client_id,
|
||||
device_label=state.device_label,
|
||||
prefix=PREFIX_OAUTH_ACCOUNT,
|
||||
ttl_days=ttl_days,
|
||||
)
|
||||
|
||||
poll_payload = _build_account_poll_payload(account, tenant, mint)
|
||||
try:
|
||||
store.approve(
|
||||
device_code,
|
||||
subject_email=account.email,
|
||||
account_id=str(account.id),
|
||||
subject_issuer=ACCOUNT_ISSUER_SENTINEL,
|
||||
minted_token=mint.token,
|
||||
token_id=str(mint.token_id),
|
||||
poll_payload=poll_payload,
|
||||
)
|
||||
except (StateNotFoundError, InvalidTransitionError):
|
||||
# Row minted but state vanished — roll forward; the orphan
|
||||
# token is revocable via auth devices list / Authorized Apps.
|
||||
logger.exception("device_flow: approve raced on %s", device_code)
|
||||
return {"error": "state_lost"}, 409
|
||||
finally:
|
||||
redis_client.delete(guard_key)
|
||||
|
||||
_emit_approve_audit(state, account, tenant, mint)
|
||||
return {"status": "approved"}, 200
|
||||
|
||||
|
||||
@openapi_ns.route("/oauth/device/deny")
|
||||
class DeviceDenyApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@bearer_feature_required
|
||||
@rate_limit(LIMIT_APPROVE_CONSOLE)
|
||||
def post(self):
|
||||
payload = _validate_json(DeviceMutateRequest)
|
||||
user_code = payload.user_code.strip().upper()
|
||||
|
||||
store = DeviceFlowRedis(redis_client)
|
||||
found = store.load_by_user_code(user_code)
|
||||
if found is None:
|
||||
return {"error": "expired_or_unknown"}, 404
|
||||
device_code, state = found
|
||||
if state.status is not DeviceFlowStatus.PENDING:
|
||||
return {"error": "already_resolved"}, 409
|
||||
|
||||
try:
|
||||
store.deny(device_code)
|
||||
except (StateNotFoundError, InvalidTransitionError):
|
||||
logger.exception("device_flow: deny raced on %s", device_code)
|
||||
return {"error": "state_lost"}, 409
|
||||
|
||||
_emit_deny_audit(state)
|
||||
return {"status": "denied"}, 200
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Helpers
|
||||
# =========================================================================
|
||||
|
||||
|
||||
def _verification_uri() -> str:
|
||||
base = getattr(dify_config, "CONSOLE_WEB_URL", None)
|
||||
if base:
|
||||
return f"{base.rstrip('/')}/device"
|
||||
return f"{request.host_url.rstrip('/')}/device"
|
||||
|
||||
|
||||
def _audit_cross_ip_if_needed(state) -> None:
|
||||
poll_ip = extract_remote_ip(request)
|
||||
if state.created_ip and poll_ip and poll_ip != state.created_ip:
|
||||
logger.warning(
|
||||
"audit: oauth.device_code_cross_ip_poll token_id=%s creation_ip=%s poll_ip=%s",
|
||||
state.token_id,
|
||||
state.created_ip,
|
||||
poll_ip,
|
||||
extra={
|
||||
"audit": True,
|
||||
"token_id": state.token_id,
|
||||
"creation_ip": state.created_ip,
|
||||
"poll_ip": poll_ip,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _build_account_poll_payload(account, tenant, mint) -> dict:
|
||||
"""Pre-render the poll-response body so the unauthenticated poll
|
||||
handler doesn't re-query accounts/tenants for authz data.
|
||||
"""
|
||||
from models import Tenant, TenantAccountJoin
|
||||
|
||||
rows = (
|
||||
db.session.query(Tenant, TenantAccountJoin)
|
||||
.join(TenantAccountJoin, TenantAccountJoin.tenant_id == Tenant.id)
|
||||
.filter(TenantAccountJoin.account_id == account.id)
|
||||
.all()
|
||||
)
|
||||
workspaces = [{"id": str(t.id), "name": t.name, "role": getattr(m, "role", "")} for t, m in rows]
|
||||
# Prefer active session tenant → DB-flagged current join → first membership.
|
||||
default_ws_id = None
|
||||
if tenant and any(w["id"] == str(tenant) for w in workspaces):
|
||||
default_ws_id = str(tenant)
|
||||
if default_ws_id is None:
|
||||
for _t, m in rows:
|
||||
if getattr(m, "current", False):
|
||||
default_ws_id = str(m.tenant_id)
|
||||
break
|
||||
if default_ws_id is None and workspaces:
|
||||
default_ws_id = workspaces[0]["id"]
|
||||
|
||||
return {
|
||||
"token": mint.token,
|
||||
"expires_at": mint.expires_at.isoformat(),
|
||||
"subject_type": SubjectType.ACCOUNT,
|
||||
"account": {"id": str(account.id), "email": account.email, "name": account.name},
|
||||
"workspaces": workspaces,
|
||||
"default_workspace_id": default_ws_id,
|
||||
"token_id": str(mint.token_id),
|
||||
}
|
||||
|
||||
|
||||
def _emit_approve_audit(state, account, tenant, mint) -> None:
|
||||
logger.warning(
|
||||
"audit: oauth.device_flow_approved token_id=%s subject=%s client_id=%s device_label=%s rotated=? expires_at=%s",
|
||||
mint.token_id,
|
||||
account.email,
|
||||
state.client_id,
|
||||
state.device_label,
|
||||
mint.expires_at,
|
||||
extra={
|
||||
"audit": True,
|
||||
"event": "oauth.device_flow_approved",
|
||||
"token_id": str(mint.token_id),
|
||||
"subject_type": SubjectType.ACCOUNT,
|
||||
"subject_email": account.email,
|
||||
"account_id": str(account.id),
|
||||
"tenant_id": tenant,
|
||||
"client_id": state.client_id,
|
||||
"device_label": state.device_label,
|
||||
"scopes": ["full"],
|
||||
"expires_at": mint.expires_at.isoformat(),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _emit_deny_audit(state) -> None:
|
||||
logger.warning(
|
||||
"audit: oauth.device_flow_denied client_id=%s device_label=%s",
|
||||
state.client_id,
|
||||
state.device_label,
|
||||
extra={
|
||||
"audit": True,
|
||||
"event": "oauth.device_flow_denied",
|
||||
"client_id": state.client_id,
|
||||
"device_label": state.device_label,
|
||||
},
|
||||
)
|
||||
@ -1,287 +0,0 @@
|
||||
"""SSO-branch device-flow endpoints under /openapi/v1/oauth/device/*.
|
||||
EE-only. Browser flow:
|
||||
|
||||
GET /oauth/device/sso-initiate → 302 to IdP authorize URL
|
||||
GET /oauth/device/sso-complete → ACS callback, sets approval-grant cookie
|
||||
GET /oauth/device/approval-context → SPA reads cookie claims (idempotent)
|
||||
POST /oauth/device/approve-external → mints dfoe_ token + clears cookie
|
||||
|
||||
Function-based (raw @bp.route) rather than Resource classes because the
|
||||
handlers do redirects + cookie kwargs that don't fit the Resource shape.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import secrets
|
||||
|
||||
from flask import jsonify, make_response, redirect, request
|
||||
from werkzeug.exceptions import (
|
||||
BadGateway,
|
||||
BadRequest,
|
||||
Conflict,
|
||||
Forbidden,
|
||||
NotFound,
|
||||
Unauthorized,
|
||||
)
|
||||
|
||||
from controllers.openapi import bp
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs import jws
|
||||
from libs.device_flow_security import (
|
||||
APPROVAL_GRANT_COOKIE_NAME,
|
||||
ApprovalGrantClaims,
|
||||
approval_grant_cleared_cookie_kwargs,
|
||||
approval_grant_cookie_kwargs,
|
||||
consume_approval_grant_nonce,
|
||||
consume_sso_assertion_nonce,
|
||||
enterprise_only,
|
||||
mint_approval_grant,
|
||||
verify_approval_grant,
|
||||
)
|
||||
from libs.oauth_bearer import SubjectType
|
||||
from libs.rate_limit import (
|
||||
LIMIT_APPROVE_EXT_PER_EMAIL,
|
||||
LIMIT_SSO_INITIATE_PER_IP,
|
||||
enforce,
|
||||
rate_limit,
|
||||
)
|
||||
from services.enterprise.enterprise_service import EnterpriseService
|
||||
from services.oauth_device_flow import (
|
||||
PREFIX_OAUTH_EXTERNAL_SSO,
|
||||
DeviceFlowRedis,
|
||||
DeviceFlowStatus,
|
||||
InvalidTransitionError,
|
||||
StateNotFoundError,
|
||||
mint_oauth_token,
|
||||
oauth_ttl_days,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Matches DEVICE_FLOW_TTL_SECONDS so the signed state can't outlive the
|
||||
# device_code it references.
|
||||
STATE_ENVELOPE_TTL_SECONDS = 15 * 60
|
||||
|
||||
# Canonical sso-complete path. IdP-side ACS callback URL must point here.
|
||||
_SSO_COMPLETE_PATH = "/openapi/v1/oauth/device/sso-complete"
|
||||
|
||||
|
||||
@bp.route("/oauth/device/sso-initiate", methods=["GET"])
|
||||
@enterprise_only
|
||||
@rate_limit(LIMIT_SSO_INITIATE_PER_IP)
|
||||
def sso_initiate():
|
||||
user_code = (request.args.get("user_code") or "").strip().upper()
|
||||
if not user_code:
|
||||
raise BadRequest("user_code required")
|
||||
|
||||
store = DeviceFlowRedis(redis_client)
|
||||
found = store.load_by_user_code(user_code)
|
||||
if found is None:
|
||||
raise BadRequest("invalid_user_code")
|
||||
_, state = found
|
||||
if state.status is not DeviceFlowStatus.PENDING:
|
||||
raise BadRequest("invalid_user_code")
|
||||
|
||||
keyset = jws.KeySet.from_shared_secret()
|
||||
signed_state = jws.sign(
|
||||
keyset,
|
||||
payload={
|
||||
"redirect_url": "",
|
||||
"app_code": "",
|
||||
"intent": "device_flow",
|
||||
"user_code": user_code,
|
||||
"nonce": secrets.token_urlsafe(16),
|
||||
"return_to": "",
|
||||
"idp_callback_url": f"{request.host_url.rstrip('/')}{_SSO_COMPLETE_PATH}",
|
||||
},
|
||||
aud=jws.AUD_STATE_ENVELOPE,
|
||||
ttl_seconds=STATE_ENVELOPE_TTL_SECONDS,
|
||||
)
|
||||
|
||||
try:
|
||||
reply = EnterpriseService.initiate_device_flow_sso(signed_state)
|
||||
except Exception as e:
|
||||
logger.warning("sso-initiate: enterprise call failed: %s", e)
|
||||
raise BadGateway("sso_initiate_failed") from e
|
||||
|
||||
url = (reply or {}).get("url")
|
||||
if not url:
|
||||
raise BadGateway("sso_initiate_missing_url")
|
||||
|
||||
# Clear stale approval-grant — defends against cross-tab/back-button mixing.
|
||||
resp = redirect(url, code=302)
|
||||
resp.set_cookie(**approval_grant_cleared_cookie_kwargs())
|
||||
return resp
|
||||
|
||||
|
||||
@bp.route("/oauth/device/sso-complete", methods=["GET"])
|
||||
@enterprise_only
|
||||
def sso_complete():
|
||||
blob = request.args.get("sso_assertion")
|
||||
if not blob:
|
||||
raise BadRequest("sso_assertion required")
|
||||
|
||||
keyset = jws.KeySet.from_shared_secret()
|
||||
|
||||
try:
|
||||
claims = jws.verify(keyset, blob, expected_aud=jws.AUD_EXT_SUBJECT_ASSERTION)
|
||||
except jws.VerifyError as e:
|
||||
logger.warning("sso-complete: rejected assertion: %s", e)
|
||||
raise BadRequest("invalid_sso_assertion") from e
|
||||
|
||||
if not consume_sso_assertion_nonce(redis_client, claims.get("nonce", "")):
|
||||
raise BadRequest("invalid_sso_assertion")
|
||||
|
||||
user_code = (claims.get("user_code") or "").strip().upper()
|
||||
store = DeviceFlowRedis(redis_client)
|
||||
found = store.load_by_user_code(user_code)
|
||||
if found is None:
|
||||
raise Conflict("user_code_not_pending")
|
||||
_, state = found
|
||||
if state.status is not DeviceFlowStatus.PENDING:
|
||||
raise Conflict("user_code_not_pending")
|
||||
|
||||
iss = request.host_url.rstrip("/")
|
||||
cookie_value, _ = mint_approval_grant(
|
||||
keyset=keyset,
|
||||
iss=iss,
|
||||
subject_email=claims["email"],
|
||||
subject_issuer=claims["issuer"],
|
||||
user_code=user_code,
|
||||
)
|
||||
|
||||
resp = redirect("/device?sso_verified=1", code=302)
|
||||
resp.set_cookie(**approval_grant_cookie_kwargs(cookie_value))
|
||||
return resp
|
||||
|
||||
|
||||
@bp.route("/oauth/device/approval-context", methods=["GET"])
|
||||
@enterprise_only
|
||||
def approval_context():
|
||||
token = request.cookies.get(APPROVAL_GRANT_COOKIE_NAME)
|
||||
if not token:
|
||||
raise Unauthorized("no_session")
|
||||
|
||||
keyset = jws.KeySet.from_shared_secret()
|
||||
try:
|
||||
claims = verify_approval_grant(keyset, token)
|
||||
except jws.VerifyError as e:
|
||||
logger.warning("approval-context: bad cookie: %s", e)
|
||||
raise Unauthorized("no_session") from e
|
||||
|
||||
return jsonify(
|
||||
{
|
||||
"subject_email": claims.subject_email,
|
||||
"subject_issuer": claims.subject_issuer,
|
||||
"user_code": claims.user_code,
|
||||
"csrf_token": claims.csrf_token,
|
||||
"expires_at": claims.expires_at.isoformat(),
|
||||
}
|
||||
), 200
|
||||
|
||||
|
||||
@bp.route("/oauth/device/approve-external", methods=["POST"])
|
||||
@enterprise_only
|
||||
def approve_external():
|
||||
token = request.cookies.get(APPROVAL_GRANT_COOKIE_NAME)
|
||||
if not token:
|
||||
raise Unauthorized("invalid_session")
|
||||
|
||||
keyset = jws.KeySet.from_shared_secret()
|
||||
try:
|
||||
claims: ApprovalGrantClaims = verify_approval_grant(keyset, token)
|
||||
except jws.VerifyError as e:
|
||||
logger.warning("approve-external: bad cookie: %s", e)
|
||||
raise Unauthorized("invalid_session") from e
|
||||
|
||||
enforce(LIMIT_APPROVE_EXT_PER_EMAIL, key=f"subject:{claims.subject_email}")
|
||||
|
||||
csrf_header = request.headers.get("X-CSRF-Token", "")
|
||||
if not csrf_header or csrf_header != claims.csrf_token:
|
||||
raise Forbidden("csrf_mismatch")
|
||||
|
||||
data = request.get_json(silent=True) or {}
|
||||
body_user_code = (data.get("user_code") or "").strip().upper()
|
||||
if body_user_code != claims.user_code:
|
||||
raise BadRequest("user_code_mismatch")
|
||||
|
||||
store = DeviceFlowRedis(redis_client)
|
||||
found = store.load_by_user_code(claims.user_code)
|
||||
if found is None:
|
||||
raise NotFound("user_code_not_pending")
|
||||
device_code, state = found
|
||||
if state.status is not DeviceFlowStatus.PENDING:
|
||||
raise Conflict("user_code_not_pending")
|
||||
|
||||
if not consume_approval_grant_nonce(redis_client, claims.nonce):
|
||||
raise Unauthorized("session_already_consumed")
|
||||
|
||||
ttl_days = oauth_ttl_days(tenant_id=None)
|
||||
mint = mint_oauth_token(
|
||||
db.session,
|
||||
redis_client,
|
||||
subject_email=claims.subject_email,
|
||||
subject_issuer=claims.subject_issuer,
|
||||
account_id=None,
|
||||
client_id=state.client_id,
|
||||
device_label=state.device_label,
|
||||
prefix=PREFIX_OAUTH_EXTERNAL_SSO,
|
||||
ttl_days=ttl_days,
|
||||
)
|
||||
|
||||
poll_payload = {
|
||||
"token": mint.token,
|
||||
"expires_at": mint.expires_at.isoformat(),
|
||||
"subject_type": SubjectType.EXTERNAL_SSO,
|
||||
"subject_email": claims.subject_email,
|
||||
"subject_issuer": claims.subject_issuer,
|
||||
"account": None,
|
||||
"workspaces": [],
|
||||
"default_workspace_id": None,
|
||||
"token_id": str(mint.token_id),
|
||||
}
|
||||
|
||||
try:
|
||||
store.approve(
|
||||
device_code,
|
||||
subject_email=claims.subject_email,
|
||||
account_id=None,
|
||||
subject_issuer=claims.subject_issuer,
|
||||
minted_token=mint.token,
|
||||
token_id=str(mint.token_id),
|
||||
poll_payload=poll_payload,
|
||||
)
|
||||
except (StateNotFoundError, InvalidTransitionError) as e:
|
||||
logger.exception("approve-external: state transition raced")
|
||||
raise Conflict("state_lost") from e
|
||||
|
||||
_emit_approve_external_audit(state, claims, mint)
|
||||
|
||||
resp = make_response(jsonify({"status": "approved"}), 200)
|
||||
resp.set_cookie(**approval_grant_cleared_cookie_kwargs())
|
||||
return resp
|
||||
|
||||
|
||||
def _emit_approve_external_audit(state, claims, mint) -> None:
|
||||
logger.warning(
|
||||
"audit: oauth.device_flow_approved subject_type=%s subject_email=%s subject_issuer=%s token_id=%s",
|
||||
SubjectType.EXTERNAL_SSO,
|
||||
claims.subject_email,
|
||||
claims.subject_issuer,
|
||||
mint.token_id,
|
||||
extra={
|
||||
"audit": True,
|
||||
"event": "oauth.device_flow_approved",
|
||||
"subject_type": SubjectType.EXTERNAL_SSO,
|
||||
"subject_email": claims.subject_email,
|
||||
"subject_issuer": claims.subject_issuer,
|
||||
"token_id": str(mint.token_id),
|
||||
"client_id": state.client_id,
|
||||
"device_label": state.device_label,
|
||||
"scopes": ["apps:run"],
|
||||
"expires_at": mint.expires_at.isoformat(),
|
||||
},
|
||||
)
|
||||
@ -1,89 +0,0 @@
|
||||
"""User-scoped workspace reads under /openapi/v1/workspaces. Bearer-authed
|
||||
counterparts to the cookie-authed /console/api/workspaces endpoints.
|
||||
|
||||
Account bearers (dfoa_) see every tenant they're a member of. External
|
||||
SSO bearers (dfoe_) have no account_id and so see an empty list — that
|
||||
matches /openapi/v1/account.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from itertools import starmap
|
||||
|
||||
from flask import g
|
||||
from flask_restx import Resource
|
||||
from sqlalchemy import select
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from controllers.openapi import openapi_ns
|
||||
from extensions.ext_database import db
|
||||
from libs.oauth_bearer import (
|
||||
ACCEPT_USER_ANY,
|
||||
SubjectType,
|
||||
validate_bearer,
|
||||
)
|
||||
from models import Tenant, TenantAccountJoin
|
||||
|
||||
|
||||
@openapi_ns.route("/workspaces")
|
||||
class WorkspacesApi(Resource):
|
||||
@validate_bearer(accept=ACCEPT_USER_ANY)
|
||||
def get(self):
|
||||
ctx = g.auth_ctx
|
||||
if ctx.subject_type != SubjectType.ACCOUNT or not ctx.account_id:
|
||||
return {"workspaces": []}, 200
|
||||
|
||||
rows = db.session.execute(
|
||||
select(Tenant, TenantAccountJoin)
|
||||
.join(TenantAccountJoin, TenantAccountJoin.tenant_id == Tenant.id)
|
||||
.where(TenantAccountJoin.account_id == str(ctx.account_id))
|
||||
.order_by(Tenant.created_at.asc())
|
||||
).all()
|
||||
|
||||
return {"workspaces": list(starmap(_workspace_summary, rows))}, 200
|
||||
|
||||
|
||||
@openapi_ns.route("/workspaces/<string:workspace_id>")
|
||||
class WorkspaceByIdApi(Resource):
|
||||
@validate_bearer(accept=ACCEPT_USER_ANY)
|
||||
def get(self, workspace_id: str):
|
||||
ctx = g.auth_ctx
|
||||
# External SSO + missing account → never a member of anything; 404.
|
||||
if ctx.subject_type != SubjectType.ACCOUNT or not ctx.account_id:
|
||||
raise NotFound("workspace not found")
|
||||
|
||||
row = db.session.execute(
|
||||
select(Tenant, TenantAccountJoin)
|
||||
.join(TenantAccountJoin, TenantAccountJoin.tenant_id == Tenant.id)
|
||||
.where(
|
||||
Tenant.id == workspace_id,
|
||||
TenantAccountJoin.account_id == str(ctx.account_id),
|
||||
)
|
||||
).first()
|
||||
# 404 (not 403) on non-member so workspace IDs don't leak across tenants.
|
||||
if row is None:
|
||||
raise NotFound("workspace not found")
|
||||
|
||||
tenant, membership = row
|
||||
return _workspace_detail(tenant, membership), 200
|
||||
|
||||
|
||||
def _workspace_summary(tenant: Tenant, membership: TenantAccountJoin) -> dict:
|
||||
return {
|
||||
"id": str(tenant.id),
|
||||
"name": tenant.name,
|
||||
"role": getattr(membership, "role", ""),
|
||||
"status": tenant.status,
|
||||
"current": getattr(membership, "current", False),
|
||||
}
|
||||
|
||||
|
||||
def _workspace_detail(tenant: Tenant, membership: TenantAccountJoin) -> dict:
|
||||
return {
|
||||
"id": str(tenant.id),
|
||||
"name": tenant.name,
|
||||
"role": getattr(membership, "role", ""),
|
||||
"status": tenant.status,
|
||||
"current": getattr(membership, "current", False),
|
||||
"created_at": tenant.created_at.isoformat() if tenant.created_at else None,
|
||||
}
|
||||
@ -685,8 +685,6 @@ class WorkflowAppGenerateTaskPipeline(GraphRuntimeStateSupport):
|
||||
match invoke_from:
|
||||
case InvokeFrom.SERVICE_API:
|
||||
created_from = WorkflowAppLogCreatedFrom.SERVICE_API
|
||||
case InvokeFrom.OPENAPI:
|
||||
created_from = WorkflowAppLogCreatedFrom.OPENAPI
|
||||
case InvokeFrom.EXPLORE:
|
||||
created_from = WorkflowAppLogCreatedFrom.INSTALLED_APP
|
||||
case InvokeFrom.WEB_APP:
|
||||
|
||||
@ -24,7 +24,6 @@ class UserFrom(StrEnum):
|
||||
|
||||
class InvokeFrom(StrEnum):
|
||||
SERVICE_API = "service-api"
|
||||
OPENAPI = "openapi"
|
||||
WEB_APP = "web-app"
|
||||
TRIGGER = "trigger"
|
||||
EXPLORE = "explore"
|
||||
|
||||
@ -70,12 +70,32 @@ class ProviderManager:
|
||||
Request-bound managers may carry caller identity in that runtime, and the
|
||||
resulting ``ProviderConfiguration`` objects must reuse it for downstream
|
||||
model-type and schema lookups.
|
||||
|
||||
Configuration assembly is cached per manager instance so call chains that
|
||||
share one request-scoped manager can reuse the same provider graph instead
|
||||
of rebuilding it for every lookup. Call ``clear_configurations_cache()``
|
||||
when a long-lived manager needs to observe writes performed within the same
|
||||
instance scope.
|
||||
"""
|
||||
|
||||
decoding_rsa_key: Any | None
|
||||
decoding_cipher_rsa: Any | None
|
||||
_model_runtime: ModelRuntime
|
||||
_configurations_cache: dict[str, ProviderConfigurations]
|
||||
|
||||
def __init__(self, model_runtime: ModelRuntime):
|
||||
self.decoding_rsa_key = None
|
||||
self.decoding_cipher_rsa = None
|
||||
self._model_runtime = model_runtime
|
||||
self._configurations_cache = {}
|
||||
|
||||
def clear_configurations_cache(self, tenant_id: str | None = None) -> None:
|
||||
"""Drop assembled provider configurations cached on this manager instance."""
|
||||
if tenant_id is None:
|
||||
self._configurations_cache.clear()
|
||||
return
|
||||
|
||||
self._configurations_cache.pop(tenant_id, None)
|
||||
|
||||
def get_configurations(self, tenant_id: str) -> ProviderConfigurations:
|
||||
"""
|
||||
@ -114,6 +134,10 @@ class ProviderManager:
|
||||
:param tenant_id:
|
||||
:return:
|
||||
"""
|
||||
cached_configurations = self._configurations_cache.get(tenant_id)
|
||||
if cached_configurations is not None:
|
||||
return cached_configurations
|
||||
|
||||
# Get all provider records of the workspace
|
||||
provider_name_to_provider_records_dict = self._get_all_providers(tenant_id)
|
||||
|
||||
@ -273,6 +297,8 @@ class ProviderManager:
|
||||
|
||||
provider_configurations[str(provider_id_entity)] = provider_configuration
|
||||
|
||||
self._configurations_cache[tenant_id] = provider_configurations
|
||||
|
||||
# Return the encapsulated object
|
||||
return provider_configurations
|
||||
|
||||
|
||||
@ -139,8 +139,10 @@ class Jieba(BaseKeyword):
|
||||
"__data__": {"index_id": self.dataset.id, "summary": None, "table": keyword_table},
|
||||
}
|
||||
dataset_keyword_table = self.dataset.dataset_keyword_table
|
||||
keyword_data_source_type = dataset_keyword_table.data_source_type
|
||||
keyword_data_source_type = dataset_keyword_table.data_source_type if dataset_keyword_table else "file"
|
||||
if keyword_data_source_type == "database":
|
||||
if dataset_keyword_table is None:
|
||||
return
|
||||
dataset_keyword_table.keyword_table = dumps_with_sets(keyword_table_dict)
|
||||
db.session.commit()
|
||||
else:
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import re
|
||||
from collections.abc import Callable
|
||||
from operator import itemgetter
|
||||
from typing import cast
|
||||
|
||||
@ -80,12 +81,14 @@ class JiebaKeywordTableHandler:
|
||||
|
||||
def extract_tags(self, sentence: str, top_k: int | None = 20, **kwargs):
|
||||
# Basic frequency-based keyword extraction as a fallback when TF-IDF is unavailable.
|
||||
top_k = kwargs.pop("topK", top_k)
|
||||
top_k = cast(int | None, kwargs.pop("topK", top_k))
|
||||
if top_k is None:
|
||||
top_k = 20
|
||||
cut = getattr(jieba, "cut", None)
|
||||
if self._lcut:
|
||||
tokens = self._lcut(sentence)
|
||||
elif callable(cut):
|
||||
tokens = list(cut(sentence))
|
||||
tokens = list(cast(Callable[[str], list[str]], cut)(sentence))
|
||||
else:
|
||||
tokens = re.findall(r"\w+", sentence)
|
||||
|
||||
@ -108,7 +111,7 @@ class JiebaKeywordTableHandler:
|
||||
sentence=text,
|
||||
topK=max_keywords_per_chunk,
|
||||
)
|
||||
# jieba.analyse.extract_tags returns list[Any] when withFlag is False by default.
|
||||
# jieba.analyse.extract_tags returns an untyped list when withFlag is False by default.
|
||||
keywords = cast(list[str], keywords)
|
||||
|
||||
return set(self._expand_tokens_with_subtokens(set(keywords)))
|
||||
|
||||
@ -158,7 +158,7 @@ class RetrievalService:
|
||||
)
|
||||
|
||||
if futures:
|
||||
for future in concurrent.futures.as_completed(futures, timeout=3600):
|
||||
for _ in concurrent.futures.as_completed(futures, timeout=3600):
|
||||
if exceptions:
|
||||
for f in futures:
|
||||
f.cancel()
|
||||
|
||||
@ -94,6 +94,7 @@ class ExtractProcessor:
|
||||
cls, extract_setting: ExtractSetting, is_automatic: bool = False, file_path: str | None = None
|
||||
) -> list[Document]:
|
||||
if extract_setting.datasource_type == DatasourceType.FILE:
|
||||
upload_file = extract_setting.upload_file
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
upload_file = extract_setting.upload_file
|
||||
if not file_path:
|
||||
@ -104,6 +105,7 @@ class ExtractProcessor:
|
||||
storage.download(upload_file.key, file_path)
|
||||
input_file = Path(file_path)
|
||||
file_extension = input_file.suffix.lower()
|
||||
assert upload_file is not None, "upload_file is required"
|
||||
etl_type = dify_config.ETL_TYPE
|
||||
extractor: BaseExtractor | None = None
|
||||
if etl_type == "Unstructured":
|
||||
|
||||
@ -28,7 +28,7 @@ class FunctionCallMultiDatasetRouter:
|
||||
SystemPromptMessage(content="You are a helpful AI assistant."),
|
||||
UserPromptMessage(content=query),
|
||||
]
|
||||
result: LLMResult = model_instance.invoke_llm(
|
||||
result: LLMResult = model_instance.invoke_llm( # pyright: ignore[reportCallIssue, reportArgumentType]
|
||||
prompt_messages=prompt_messages,
|
||||
tools=dataset_tools,
|
||||
stream=False,
|
||||
|
||||
@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
import re
|
||||
from collections.abc import Collection
|
||||
from collections.abc import Set as AbstractSet
|
||||
from typing import Any, Literal
|
||||
|
||||
from core.model_manager import ModelInstance
|
||||
@ -21,8 +21,8 @@ class EnhanceRecursiveCharacterTextSplitter(RecursiveCharacterTextSplitter):
|
||||
def from_encoder[T: EnhanceRecursiveCharacterTextSplitter](
|
||||
cls: type[T],
|
||||
embedding_model_instance: ModelInstance | None,
|
||||
allowed_special: Literal["all"] | set[str] = set(),
|
||||
disallowed_special: Literal["all"] | Collection[str] = "all",
|
||||
allowed_special: Literal["all"] | AbstractSet[str] = frozenset(),
|
||||
disallowed_special: Literal["all"] | AbstractSet[str] = "all",
|
||||
**kwargs: Any,
|
||||
) -> T:
|
||||
def _token_encoder(texts: list[str]) -> list[int]:
|
||||
@ -40,6 +40,7 @@ class EnhanceRecursiveCharacterTextSplitter(RecursiveCharacterTextSplitter):
|
||||
|
||||
return [len(text) for text in texts]
|
||||
|
||||
_ = _token_encoder # kept for future token-length wiring
|
||||
return cls(length_function=_character_encoder, **kwargs)
|
||||
|
||||
|
||||
|
||||
@ -4,7 +4,8 @@ import copy
|
||||
import logging
|
||||
import re
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Callable, Collection, Iterable, Sequence, Set
|
||||
from collections.abc import Callable, Iterable, Sequence
|
||||
from collections.abc import Set as AbstractSet
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Literal
|
||||
|
||||
@ -187,8 +188,8 @@ class TokenTextSplitter(TextSplitter):
|
||||
self,
|
||||
encoding_name: str = "gpt2",
|
||||
model_name: str | None = None,
|
||||
allowed_special: Literal["all"] | Set[str] = set(),
|
||||
disallowed_special: Literal["all"] | Collection[str] = "all",
|
||||
allowed_special: Literal["all"] | AbstractSet[str] = frozenset(),
|
||||
disallowed_special: Literal["all"] | AbstractSet[str] = "all",
|
||||
**kwargs: Any,
|
||||
):
|
||||
"""Create a new TextSplitter."""
|
||||
@ -207,8 +208,8 @@ class TokenTextSplitter(TextSplitter):
|
||||
else:
|
||||
enc = tiktoken.get_encoding(encoding_name)
|
||||
self._tokenizer = enc
|
||||
self._allowed_special = allowed_special
|
||||
self._disallowed_special = disallowed_special
|
||||
self._allowed_special: Literal["all"] | AbstractSet[str] = allowed_special
|
||||
self._disallowed_special: Literal["all"] | AbstractSet[str] = disallowed_special
|
||||
|
||||
def split_text(self, text: str) -> list[str]:
|
||||
def _encode(_text: str) -> list[int]:
|
||||
|
||||
@ -41,6 +41,10 @@ def safe_json_value(v):
|
||||
return v.hex()
|
||||
elif isinstance(v, memoryview):
|
||||
return v.tobytes().hex()
|
||||
elif isinstance(v, np.integer):
|
||||
return int(v)
|
||||
elif isinstance(v, np.floating):
|
||||
return float(v)
|
||||
elif isinstance(v, np.ndarray):
|
||||
return v.tolist()
|
||||
elif isinstance(v, dict):
|
||||
|
||||
@ -105,7 +105,7 @@ class Article:
|
||||
|
||||
|
||||
def extract_using_readabilipy(html: str):
|
||||
json_article: dict[str, Any] = simple_json_from_html_string(html, use_readability=True)
|
||||
json_article: dict[str, Any] = simple_json_from_html_string(html, use_readability=False)
|
||||
article = Article(
|
||||
title=json_article.get("title") or "",
|
||||
author=json_article.get("byline") or "",
|
||||
|
||||
@ -8,8 +8,6 @@ AUTHENTICATED_HEADERS: tuple[str, ...] = (*SERVICE_API_HEADERS, HEADER_NAME_CSRF
|
||||
FILES_HEADERS: tuple[str, ...] = (*BASE_CORS_HEADERS, HEADER_NAME_CSRF_TOKEN)
|
||||
EMBED_HEADERS: tuple[str, ...] = ("Content-Type", HEADER_NAME_APP_CODE)
|
||||
EXPOSED_HEADERS: tuple[str, ...] = ("X-Version", "X-Env", "X-Trace-Id")
|
||||
OPENAPI_HEADERS: tuple[str, ...] = ("Authorization", "Content-Type", HEADER_NAME_CSRF_TOKEN)
|
||||
OPENAPI_MAX_AGE_SECONDS: int = 600
|
||||
|
||||
|
||||
def _apply_cors_once(bp, /, **cors_kwargs):
|
||||
@ -31,7 +29,6 @@ def init_app(app: DifyApp):
|
||||
from controllers.files import bp as files_bp
|
||||
from controllers.inner_api import bp as inner_api_bp
|
||||
from controllers.mcp import bp as mcp_bp
|
||||
from controllers.openapi import bp as openapi_bp
|
||||
from controllers.service_api import bp as service_api_bp
|
||||
from controllers.trigger import bp as trigger_bp
|
||||
from controllers.web import bp as web_bp
|
||||
@ -44,22 +41,6 @@ def init_app(app: DifyApp):
|
||||
)
|
||||
app.register_blueprint(service_api_bp)
|
||||
|
||||
# User-scoped programmatic API. Default empty allowlist = same-origin
|
||||
# only; expand via OPENAPI_CORS_ALLOW_ORIGINS for third-party
|
||||
# integrations. supports_credentials so cookie-authed approve/deny
|
||||
# work; cross-origin OPTIONS without an allowed origin will fail
|
||||
# the same as on the console blueprint.
|
||||
_apply_cors_once(
|
||||
openapi_bp,
|
||||
resources={r"/*": {"origins": dify_config.OPENAPI_CORS_ALLOW_ORIGINS}},
|
||||
supports_credentials=True,
|
||||
allow_headers=list(OPENAPI_HEADERS),
|
||||
methods=["GET", "POST", "PATCH", "DELETE", "OPTIONS"],
|
||||
expose_headers=list(EXPOSED_HEADERS),
|
||||
max_age=OPENAPI_MAX_AGE_SECONDS,
|
||||
)
|
||||
app.register_blueprint(openapi_bp)
|
||||
|
||||
_apply_cors_once(
|
||||
web_bp,
|
||||
resources={
|
||||
|
||||
@ -222,12 +222,6 @@ def init_app(app: DifyApp) -> Celery:
|
||||
"task": "schedule.clean_workflow_runs_task.clean_workflow_runs_task",
|
||||
"schedule": crontab(minute="0", hour="0"),
|
||||
}
|
||||
if dify_config.ENABLE_CLEAN_OAUTH_ACCESS_TOKENS_TASK:
|
||||
imports.append("schedule.clean_oauth_access_tokens_task")
|
||||
beat_schedule["clean_oauth_access_tokens_task"] = {
|
||||
"task": "schedule.clean_oauth_access_tokens_task.clean_oauth_access_tokens_task",
|
||||
"schedule": crontab(minute="0", hour="5", day_of_month=f"*/{day}"),
|
||||
}
|
||||
if dify_config.ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK:
|
||||
imports.append("schedule.workflow_schedule_task")
|
||||
beat_schedule["workflow_schedule_task"] = {
|
||||
|
||||
@ -12,7 +12,7 @@ from constants import HEADER_NAME_APP_CODE
|
||||
from dify_app import DifyApp
|
||||
from extensions.ext_database import db
|
||||
from libs.passport import PassportService
|
||||
from libs.token import extract_access_token, extract_console_cookie_token, extract_webapp_passport
|
||||
from libs.token import extract_access_token, extract_webapp_passport
|
||||
from models import Account, Tenant, TenantAccountJoin
|
||||
from models.model import AppMCPServer, EndUser
|
||||
from services.account_service import AccountService
|
||||
@ -84,24 +84,6 @@ def load_user_from_request(request_from_flask_login: Request) -> LoginUser | Non
|
||||
|
||||
logged_in_account = AccountService.load_logged_in_account(account_id=user_id)
|
||||
return logged_in_account
|
||||
elif request.blueprint == "openapi":
|
||||
# Account-branch device-flow approval routes (approve / deny /
|
||||
# approval-context) sit under @login_required and authenticate via
|
||||
# the console session cookie. Cookie-only on purpose — bearer
|
||||
# tokens (dfoa_/dfoe_) live on the Authorization header and are
|
||||
# validated by AppPipeline, not flask-login.
|
||||
cookie_token = extract_console_cookie_token(request)
|
||||
if not cookie_token:
|
||||
return None
|
||||
try:
|
||||
decoded = PassportService().verify(cookie_token)
|
||||
except Exception:
|
||||
return None
|
||||
user_id = decoded.get("user_id")
|
||||
source = decoded.get("token_source")
|
||||
if source or not user_id:
|
||||
return None
|
||||
return AccountService.load_logged_in_account(account_id=user_id)
|
||||
elif request.blueprint == "web":
|
||||
app_code = request.headers.get(HEADER_NAME_APP_CODE)
|
||||
webapp_token = extract_webapp_passport(app_code, request) if app_code else None
|
||||
|
||||
@ -1,23 +0,0 @@
|
||||
"""Bind the bearer authenticator at startup. Must run after ext_database
|
||||
and ext_redis (needs both factories).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from configs import dify_config
|
||||
from dify_app import DifyApp
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.oauth_bearer import build_and_bind
|
||||
|
||||
|
||||
def is_enabled() -> bool:
|
||||
return dify_config.ENABLE_OAUTH_BEARER
|
||||
|
||||
|
||||
def init_app(app: DifyApp) -> None:
|
||||
# scoped_session isn't a context manager; request teardown closes it.
|
||||
def session_factory():
|
||||
return db.session
|
||||
|
||||
build_and_bind(session_factory=session_factory, redis_client=redis_client)
|
||||
@ -1,196 +0,0 @@
|
||||
"""Device-flow security primitives: enterprise_only gate, approval-grant
|
||||
cookie mint/verify/consume, and anti-framing headers.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import secrets
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from functools import wraps
|
||||
|
||||
from flask import Blueprint
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from libs import jws
|
||||
from libs.token import is_secure
|
||||
from services.feature_service import FeatureService, LicenseStatus
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# enterprise_only decorator
|
||||
# ============================================================================
|
||||
|
||||
|
||||
# Fail-closed: any non-EE-active status (default NONE on CE, plus INACTIVE / EXPIRED / LOST)
|
||||
# is denied. Future LicenseStatus values default to denial unless explicitly admitted.
|
||||
_EE_ENABLED_STATUSES = {LicenseStatus.ACTIVE, LicenseStatus.EXPIRING}
|
||||
|
||||
|
||||
def enterprise_only[**P, R](view: Callable[P, R]) -> Callable[P, R]:
|
||||
"""404 on CE, passthrough on EE. Apply before rate-limit so CE
|
||||
responses don't consume the bucket.
|
||||
"""
|
||||
|
||||
@wraps(view)
|
||||
def decorated(*args: P.args, **kwargs: P.kwargs):
|
||||
settings = FeatureService.get_system_features()
|
||||
if settings.license.status not in _EE_ENABLED_STATUSES:
|
||||
raise NotFound()
|
||||
return view(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# approval_grant cookie
|
||||
# ============================================================================
|
||||
|
||||
|
||||
APPROVAL_GRANT_COOKIE_NAME = "device_approval_grant"
|
||||
APPROVAL_GRANT_COOKIE_PATH = "/openapi/v1/oauth/device"
|
||||
APPROVAL_GRANT_COOKIE_TTL_SECONDS = 300 # 5 min
|
||||
NONCE_TTL_SECONDS = 600 # 2x cookie TTL — defeats clock-skew late replay
|
||||
NONCE_KEY_FMT = "device_approval_grant_nonce:{nonce}"
|
||||
SSO_ASSERTION_NONCE_KEY_FMT = "sso_assertion_nonce:{nonce}"
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class ApprovalGrantClaims:
|
||||
subject_email: str
|
||||
subject_issuer: str
|
||||
user_code: str
|
||||
nonce: str
|
||||
csrf_token: str
|
||||
expires_at: datetime
|
||||
|
||||
|
||||
def mint_approval_grant(
|
||||
*,
|
||||
keyset: jws.KeySet,
|
||||
iss: str,
|
||||
subject_email: str,
|
||||
subject_issuer: str,
|
||||
user_code: str,
|
||||
) -> tuple[str, ApprovalGrantClaims]:
|
||||
"""Use ``approval_grant_cookie_kwargs`` to set the cookie — single
|
||||
source of truth for Path/HttpOnly/Secure/SameSite.
|
||||
"""
|
||||
now = datetime.now(UTC)
|
||||
exp = now + timedelta(seconds=APPROVAL_GRANT_COOKIE_TTL_SECONDS)
|
||||
nonce = _random_opaque()
|
||||
csrf_token = _random_opaque()
|
||||
|
||||
payload = {
|
||||
"iss": iss,
|
||||
"subject_email": subject_email,
|
||||
"subject_issuer": subject_issuer,
|
||||
"user_code": user_code,
|
||||
"nonce": nonce,
|
||||
"csrf_token": csrf_token,
|
||||
}
|
||||
token = jws.sign(keyset, payload, aud=jws.AUD_APPROVAL_GRANT, ttl_seconds=APPROVAL_GRANT_COOKIE_TTL_SECONDS)
|
||||
|
||||
return token, ApprovalGrantClaims(
|
||||
subject_email=subject_email,
|
||||
subject_issuer=subject_issuer,
|
||||
user_code=user_code,
|
||||
nonce=nonce,
|
||||
csrf_token=csrf_token,
|
||||
expires_at=exp,
|
||||
)
|
||||
|
||||
|
||||
def verify_approval_grant(keyset: jws.KeySet, token: str) -> ApprovalGrantClaims:
|
||||
"""Sig + aud + exp only — nonce consumption is the caller's job."""
|
||||
data = jws.verify(keyset, token, expected_aud=jws.AUD_APPROVAL_GRANT)
|
||||
return ApprovalGrantClaims(
|
||||
subject_email=data["subject_email"],
|
||||
subject_issuer=data["subject_issuer"],
|
||||
user_code=data["user_code"],
|
||||
nonce=data["nonce"],
|
||||
csrf_token=data["csrf_token"],
|
||||
expires_at=datetime.fromtimestamp(data["exp"], tz=UTC),
|
||||
)
|
||||
|
||||
|
||||
def consume_approval_grant_nonce(redis_client, nonce: str) -> bool:
|
||||
if not nonce:
|
||||
return False
|
||||
return bool(
|
||||
redis_client.set(
|
||||
NONCE_KEY_FMT.format(nonce=nonce),
|
||||
"1",
|
||||
nx=True,
|
||||
ex=NONCE_TTL_SECONDS,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def consume_sso_assertion_nonce(redis_client, nonce: str) -> bool:
|
||||
if not nonce:
|
||||
return False
|
||||
return bool(
|
||||
redis_client.set(
|
||||
SSO_ASSERTION_NONCE_KEY_FMT.format(nonce=nonce),
|
||||
"1",
|
||||
nx=True,
|
||||
ex=NONCE_TTL_SECONDS,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def approval_grant_cookie_kwargs(value: str) -> dict:
|
||||
"""``secure`` follows is_secure() so HTTP-only deployments don't
|
||||
silently drop the cookie.
|
||||
"""
|
||||
return {
|
||||
"key": APPROVAL_GRANT_COOKIE_NAME,
|
||||
"value": value,
|
||||
"max_age": APPROVAL_GRANT_COOKIE_TTL_SECONDS,
|
||||
"path": APPROVAL_GRANT_COOKIE_PATH,
|
||||
"secure": is_secure(),
|
||||
"httponly": True,
|
||||
"samesite": "Lax",
|
||||
}
|
||||
|
||||
|
||||
def approval_grant_cleared_cookie_kwargs() -> dict:
|
||||
return {
|
||||
"key": APPROVAL_GRANT_COOKIE_NAME,
|
||||
"value": "",
|
||||
"max_age": 0,
|
||||
"path": APPROVAL_GRANT_COOKIE_PATH,
|
||||
"secure": is_secure(),
|
||||
"httponly": True,
|
||||
"samesite": "Lax",
|
||||
}
|
||||
|
||||
|
||||
def _random_opaque() -> str:
|
||||
return secrets.token_urlsafe(16)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Anti-framing headers
|
||||
# ============================================================================
|
||||
|
||||
|
||||
_ANTI_FRAMING_HEADERS = {
|
||||
"X-Frame-Options": "DENY",
|
||||
"Content-Security-Policy": "frame-ancestors 'none'",
|
||||
}
|
||||
|
||||
|
||||
def attach_anti_framing(bp: Blueprint) -> None:
|
||||
"""X-Frame-Options + CSP on every response from ``bp`` (CI invariant #4)."""
|
||||
|
||||
@bp.after_request
|
||||
def _apply_headers(response): # pyright: ignore[reportUnusedFunction]
|
||||
for name, value in _ANTI_FRAMING_HEADERS.items():
|
||||
response.headers.setdefault(name, value)
|
||||
return response
|
||||
@ -542,18 +542,3 @@ class RateLimiter:
|
||||
|
||||
self._redis_client.zadd(key, {member: current_time})
|
||||
self._redis_client.expire(key, self.time_window * 2)
|
||||
|
||||
def seconds_until_available(self, email: str) -> int:
|
||||
"""Seconds until the oldest in-window entry expires, freeing a slot.
|
||||
|
||||
Defensive floor of 1 second. Caller should only invoke this after
|
||||
is_rate_limited() returned True.
|
||||
"""
|
||||
key = self._get_key(email)
|
||||
oldest = cast(Any, self._redis_client).zrange(key, 0, 0, withscores=True)
|
||||
if not oldest:
|
||||
return 1
|
||||
_member, score = oldest[0]
|
||||
free_at = int(score) + self.time_window
|
||||
remaining = free_at - int(time.time())
|
||||
return max(remaining, 1)
|
||||
|
||||
108
api/libs/jws.py
108
api/libs/jws.py
@ -1,108 +0,0 @@
|
||||
"""HS256 compact JWS keyed on the shared Dify SECRET_KEY. Used by the SSO
|
||||
state envelope, external subject assertion, and approval-grant cookie —
|
||||
all three share one key-set so api ↔ enterprise can verify each other.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
import jwt
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
AUD_STATE_ENVELOPE = "api.sso.state_envelope"
|
||||
AUD_EXT_SUBJECT_ASSERTION = "api.device_flow.external_subject_assertion"
|
||||
AUD_APPROVAL_GRANT = "api.device_flow.approval_grant"
|
||||
|
||||
ACTIVE_KID_V1 = "dify-shared-v1"
|
||||
|
||||
|
||||
class KeySetError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class KeySet:
|
||||
"""``from_entries`` reserves multi-kid construction for rotation slots."""
|
||||
|
||||
def __init__(self, entries: dict[str, bytes], active_kid: str) -> None:
|
||||
if active_kid not in entries:
|
||||
raise KeySetError(f"active kid {active_kid!r} missing from key-set")
|
||||
if not entries[active_kid]:
|
||||
raise KeySetError(f"active kid {active_kid!r} has empty secret")
|
||||
self._entries: dict[str, bytes] = {k: bytes(v) for k, v in entries.items()}
|
||||
self._active_kid = active_kid
|
||||
|
||||
@classmethod
|
||||
def from_shared_secret(cls) -> KeySet:
|
||||
secret = dify_config.SECRET_KEY
|
||||
if not secret:
|
||||
raise KeySetError("dify_config.SECRET_KEY is empty; cannot build key-set")
|
||||
return cls({ACTIVE_KID_V1: secret.encode("utf-8")}, ACTIVE_KID_V1)
|
||||
|
||||
@classmethod
|
||||
def from_entries(cls, entries: dict[str, bytes], active_kid: str) -> KeySet:
|
||||
return cls(entries, active_kid)
|
||||
|
||||
@property
|
||||
def active_kid(self) -> str:
|
||||
return self._active_kid
|
||||
|
||||
def lookup(self, kid: str) -> bytes | None:
|
||||
return self._entries.get(kid)
|
||||
|
||||
|
||||
def sign(keyset: KeySet, payload: dict, aud: str, ttl_seconds: int) -> str:
|
||||
"""``iat`` + ``exp`` are injected here; callers must not set them."""
|
||||
if "aud" in payload or "iat" in payload or "exp" in payload:
|
||||
raise ValueError("reserved claim present in payload (aud/iat/exp)")
|
||||
if ttl_seconds <= 0:
|
||||
raise ValueError("ttl_seconds must be positive")
|
||||
|
||||
kid = keyset.active_kid
|
||||
secret = keyset.lookup(kid)
|
||||
if secret is None:
|
||||
raise KeySetError(f"active kid {kid!r} lookup miss")
|
||||
|
||||
iat = datetime.now(UTC)
|
||||
exp = iat + timedelta(seconds=ttl_seconds)
|
||||
claims = {**payload, "aud": aud, "iat": iat, "exp": exp}
|
||||
return jwt.encode(
|
||||
claims,
|
||||
secret,
|
||||
algorithm="HS256",
|
||||
headers={"kid": kid, "typ": "JWT"},
|
||||
)
|
||||
|
||||
|
||||
class VerifyError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def verify(keyset: KeySet, token: str, expected_aud: str) -> dict:
|
||||
"""Unknown kid is rejected — never fall back to the active kid, since
|
||||
a past kid value would otherwise be forgeable by anyone who saw it.
|
||||
"""
|
||||
try:
|
||||
header = jwt.get_unverified_header(token)
|
||||
except jwt.PyJWTError as e:
|
||||
raise VerifyError(f"decode header: {e}") from e
|
||||
kid = header.get("kid")
|
||||
if not kid:
|
||||
raise VerifyError("no kid in header")
|
||||
secret = keyset.lookup(kid)
|
||||
if secret is None:
|
||||
raise VerifyError(f"unknown kid {kid!r}")
|
||||
try:
|
||||
return jwt.decode(
|
||||
token,
|
||||
secret,
|
||||
algorithms=["HS256"],
|
||||
audience=expected_aud,
|
||||
)
|
||||
except jwt.ExpiredSignatureError as e:
|
||||
raise VerifyError("token expired") from e
|
||||
except jwt.InvalidAudienceError as e:
|
||||
raise VerifyError("aud mismatch") from e
|
||||
except jwt.PyJWTError as e:
|
||||
raise VerifyError(f"decode: {e}") from e
|
||||
@ -1,608 +0,0 @@
|
||||
"""OAuth bearer primitives.
|
||||
|
||||
To add a token kind: write a Resolver, add a SubjectType + Accepts member,
|
||||
append a TokenKind to build_registry, and update _SUBJECT_TO_ACCEPT.
|
||||
Authenticator + validate_bearer stay untouched.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from collections.abc import Callable, Iterable
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import UTC, datetime
|
||||
from enum import StrEnum
|
||||
from functools import wraps
|
||||
from typing import Literal, ParamSpec, Protocol, TypeVar
|
||||
|
||||
from flask import g, request
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import Forbidden, ServiceUnavailable, Unauthorized
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.rate_limit import enforce_bearer_rate_limit
|
||||
from models import Account, OAuthAccessToken, TenantAccountJoin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Contract — types, enums, protocols
|
||||
# ============================================================================
|
||||
|
||||
|
||||
class SubjectType(StrEnum):
|
||||
ACCOUNT = "account"
|
||||
EXTERNAL_SSO = "external_sso"
|
||||
|
||||
|
||||
class Scope(StrEnum):
|
||||
"""Catalog of bearer scopes recognised by the openapi surface.
|
||||
|
||||
`FULL` is the catch-all carried by `dfoa_` account tokens — it satisfies
|
||||
any per-route `require_scope`. `dfoe_` tokens carry the per-feature scopes
|
||||
(`APPS_RUN`, `APPS_READ_PERMITTED`).
|
||||
"""
|
||||
|
||||
FULL = "full"
|
||||
APPS_READ = "apps:read"
|
||||
APPS_READ_PERMITTED = "apps:read:permitted"
|
||||
APPS_RUN = "apps:run"
|
||||
|
||||
|
||||
class Accepts(StrEnum):
|
||||
"""Subject types a route is willing to accept as caller."""
|
||||
|
||||
USER_ACCOUNT = "user_account"
|
||||
USER_EXT_SSO = "user_ext_sso"
|
||||
|
||||
|
||||
ACCEPT_USER_ANY: frozenset[Accepts] = frozenset({Accepts.USER_ACCOUNT, Accepts.USER_EXT_SSO})
|
||||
ACCEPT_USER_EXT_SSO: frozenset[Accepts] = frozenset({Accepts.USER_EXT_SSO})
|
||||
|
||||
_SUBJECT_TO_ACCEPT: dict[SubjectType, Accepts] = {
|
||||
SubjectType.ACCOUNT: Accepts.USER_ACCOUNT,
|
||||
SubjectType.EXTERNAL_SSO: Accepts.USER_EXT_SSO,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class AuthContext:
|
||||
"""Attached to ``g.auth_ctx``. ``scopes`` / ``subject_type`` / ``source``
|
||||
come from the TokenKind, not the DB — corrupt rows can't elevate scope.
|
||||
|
||||
`verified_tenants` is a snapshot of the Layer-0 verdict cache at
|
||||
authenticate time. Per-request mutations write through to Redis via
|
||||
`record_layer0_verdict`; this snapshot is not updated in place (frozen).
|
||||
"""
|
||||
|
||||
subject_type: SubjectType
|
||||
subject_email: str | None
|
||||
subject_issuer: str | None
|
||||
account_id: uuid.UUID | None
|
||||
scopes: frozenset[Scope]
|
||||
token_id: uuid.UUID
|
||||
source: str
|
||||
expires_at: datetime | None
|
||||
token_hash: str
|
||||
verified_tenants: dict[str, bool] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class ResolvedRow:
|
||||
subject_email: str | None
|
||||
subject_issuer: str | None
|
||||
account_id: uuid.UUID | None
|
||||
token_id: uuid.UUID
|
||||
expires_at: datetime | None
|
||||
verified_tenants: dict[str, bool] = field(default_factory=dict)
|
||||
|
||||
def to_cache(self) -> dict:
|
||||
return {
|
||||
"subject_email": self.subject_email,
|
||||
"subject_issuer": self.subject_issuer,
|
||||
"account_id": str(self.account_id) if self.account_id else None,
|
||||
"token_id": str(self.token_id),
|
||||
"expires_at": self.expires_at.isoformat() if self.expires_at else None,
|
||||
"verified_tenants": dict(self.verified_tenants),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_cache(cls, data: dict) -> ResolvedRow:
|
||||
return cls(
|
||||
subject_email=data["subject_email"],
|
||||
subject_issuer=data["subject_issuer"],
|
||||
account_id=uuid.UUID(data["account_id"]) if data["account_id"] else None,
|
||||
token_id=uuid.UUID(data["token_id"]),
|
||||
expires_at=datetime.fromisoformat(data["expires_at"]) if data["expires_at"] else None,
|
||||
verified_tenants=_coerce_verified_tenants(data.get("verified_tenants")),
|
||||
)
|
||||
|
||||
|
||||
def _coerce_verified_tenants(raw: object) -> dict[str, bool]:
|
||||
"""Tolerate legacy entries that stored 'ok'/'denied' string verdicts.
|
||||
|
||||
TODO(post-v1.0): remove once the AuthContext cache TTL has fully cycled
|
||||
on all live deployments (60s TTL → safe to drop one release after rollout).
|
||||
"""
|
||||
if not isinstance(raw, dict):
|
||||
return {}
|
||||
out: dict[str, bool] = {}
|
||||
for k, v in raw.items():
|
||||
if isinstance(v, bool):
|
||||
out[k] = v
|
||||
elif v == "ok":
|
||||
out[k] = True
|
||||
elif v == "denied":
|
||||
out[k] = False
|
||||
return out
|
||||
|
||||
|
||||
class Resolver(Protocol):
|
||||
def resolve(self, token_hash: str) -> ResolvedRow | None: # pragma: no cover - contract
|
||||
...
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class TokenKind:
|
||||
prefix: str
|
||||
subject_type: SubjectType
|
||||
scopes: frozenset[Scope]
|
||||
source: str
|
||||
resolver: Resolver
|
||||
|
||||
def matches(self, token: str) -> bool:
|
||||
return token.startswith(self.prefix)
|
||||
|
||||
|
||||
class InvalidBearerError(Exception):
|
||||
"""Token missing, unknown prefix, or no live row."""
|
||||
|
||||
|
||||
class TokenExpiredError(Exception):
|
||||
"""Hard-expire bookkeeping is the resolver's job before raising."""
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Registry
|
||||
# ============================================================================
|
||||
|
||||
|
||||
class TokenKindRegistry:
|
||||
def __init__(self, kinds: Iterable[TokenKind]) -> None:
|
||||
self._kinds: tuple[TokenKind, ...] = tuple(kinds)
|
||||
prefixes = [k.prefix for k in self._kinds]
|
||||
if len(set(prefixes)) != len(prefixes):
|
||||
raise ValueError(f"duplicate prefix in registry: {prefixes}")
|
||||
|
||||
def find(self, token: str) -> TokenKind | None:
|
||||
for k in self._kinds:
|
||||
if k.matches(token):
|
||||
return k
|
||||
return None
|
||||
|
||||
def kinds(self) -> tuple[TokenKind, ...]:
|
||||
return self._kinds
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Authenticator
|
||||
# ============================================================================
|
||||
|
||||
|
||||
def sha256_hex(token: str) -> str:
|
||||
return hashlib.sha256(token.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
class BearerAuthenticator:
|
||||
def __init__(self, registry: TokenKindRegistry) -> None:
|
||||
self._registry = registry
|
||||
|
||||
@property
|
||||
def registry(self) -> TokenKindRegistry:
|
||||
return self._registry
|
||||
|
||||
def authenticate(self, token: str) -> AuthContext:
|
||||
"""Identity + per-token rate limit (single source).
|
||||
|
||||
Both the openapi pipeline (`BearerCheck`) and the decorator
|
||||
(`validate_bearer`) call this — rate-limit fires exactly once per
|
||||
request regardless of which path hosts the route.
|
||||
"""
|
||||
kind = self._registry.find(token)
|
||||
if kind is None:
|
||||
raise InvalidBearerError("unknown token prefix")
|
||||
token_hash = sha256_hex(token)
|
||||
row = kind.resolver.resolve(token_hash)
|
||||
if row is None:
|
||||
raise InvalidBearerError("token unknown or revoked")
|
||||
enforce_bearer_rate_limit(token_hash)
|
||||
return AuthContext(
|
||||
subject_type=kind.subject_type,
|
||||
subject_email=row.subject_email,
|
||||
subject_issuer=row.subject_issuer,
|
||||
account_id=row.account_id,
|
||||
scopes=kind.scopes,
|
||||
token_id=row.token_id,
|
||||
source=kind.source,
|
||||
expires_at=row.expires_at,
|
||||
token_hash=token_hash,
|
||||
verified_tenants=dict(row.verified_tenants),
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# OAuth access token resolver (PAT resolver would be a sibling class)
|
||||
# ============================================================================
|
||||
|
||||
TOKEN_CACHE_KEY_FMT = "auth:token:{hash}"
|
||||
POSITIVE_TTL_SECONDS = 60
|
||||
NEGATIVE_TTL_SECONDS = 10
|
||||
AUDIT_OAUTH_EXPIRED = "oauth.token_expired"
|
||||
|
||||
ScopeVariant = Literal["account", "external_sso"]
|
||||
|
||||
|
||||
class OAuthAccessTokenResolver:
|
||||
"""``.for_account()`` / ``.for_external_sso()`` are variant-scoped views
|
||||
sharing DB + cache plumbing.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session_factory,
|
||||
redis_client,
|
||||
positive_ttl: int = POSITIVE_TTL_SECONDS,
|
||||
negative_ttl: int = NEGATIVE_TTL_SECONDS,
|
||||
) -> None:
|
||||
self.session_factory = session_factory
|
||||
self._redis = redis_client
|
||||
self._positive_ttl = positive_ttl
|
||||
self._negative_ttl = negative_ttl
|
||||
|
||||
def for_account(self) -> Resolver:
|
||||
return _VariantResolver(self, variant="account")
|
||||
|
||||
def for_external_sso(self) -> Resolver:
|
||||
return _VariantResolver(self, variant="external_sso")
|
||||
|
||||
def _cache_key(self, token_hash: str) -> str:
|
||||
return TOKEN_CACHE_KEY_FMT.format(hash=token_hash)
|
||||
|
||||
def cache_get(self, token_hash: str) -> ResolvedRow | None | Literal["invalid"]:
|
||||
raw = self._redis.get(self._cache_key(token_hash))
|
||||
if raw is None:
|
||||
return None
|
||||
text = raw.decode() if isinstance(raw, (bytes, bytearray)) else raw
|
||||
if text == "invalid":
|
||||
return "invalid"
|
||||
try:
|
||||
return ResolvedRow.from_cache(json.loads(text))
|
||||
except (ValueError, KeyError):
|
||||
logger.warning("auth:token cache entry malformed; treating as miss")
|
||||
return None
|
||||
|
||||
def cache_set_positive(self, token_hash: str, row: ResolvedRow) -> None:
|
||||
self._redis.setex(
|
||||
self._cache_key(token_hash),
|
||||
self._positive_ttl,
|
||||
json.dumps(row.to_cache()),
|
||||
)
|
||||
|
||||
def cache_set_negative(self, token_hash: str) -> None:
|
||||
self._redis.setex(self._cache_key(token_hash), self._negative_ttl, "invalid")
|
||||
|
||||
def hard_expire(self, session: Session, row_id: uuid.UUID | str, token_hash: str) -> None:
|
||||
"""Atomic CAS — only the worker that flips revoked_at emits audit;
|
||||
replays are idempotent.
|
||||
"""
|
||||
stmt = (
|
||||
update(OAuthAccessToken)
|
||||
.where(OAuthAccessToken.id == row_id, OAuthAccessToken.revoked_at.is_(None))
|
||||
.values(revoked_at=datetime.now(UTC), token_hash=None)
|
||||
)
|
||||
result = session.execute(stmt)
|
||||
session.commit()
|
||||
if result.rowcount == 1:
|
||||
logger.warning(
|
||||
"audit: %s token_id=%s",
|
||||
AUDIT_OAUTH_EXPIRED,
|
||||
row_id,
|
||||
extra={"audit": True, "token_id": str(row_id)},
|
||||
)
|
||||
self._redis.delete(self._cache_key(token_hash))
|
||||
self.cache_set_negative(token_hash)
|
||||
|
||||
|
||||
class _VariantResolver:
|
||||
def __init__(self, parent: OAuthAccessTokenResolver, variant: ScopeVariant) -> None:
|
||||
self._parent = parent
|
||||
self._variant = variant
|
||||
|
||||
def resolve(self, token_hash: str) -> ResolvedRow | None:
|
||||
cached = self._parent.cache_get(token_hash)
|
||||
if cached == "invalid":
|
||||
return None
|
||||
if cached is not None and not isinstance(cached, str):
|
||||
if not self._matches_variant(cached):
|
||||
return None
|
||||
return cached
|
||||
|
||||
# Flask-SQLAlchemy's scoped_session is request-bound and not a
|
||||
# context manager; use it directly.
|
||||
session = self._parent.session_factory()
|
||||
row = self._load_from_db(session, token_hash)
|
||||
if row is None:
|
||||
self._parent.cache_set_negative(token_hash)
|
||||
return None
|
||||
|
||||
now = datetime.now(UTC)
|
||||
if row.expires_at is not None and row.expires_at <= now:
|
||||
self._parent.hard_expire(session, row.id, token_hash)
|
||||
return None
|
||||
|
||||
if not self._matches_variant_model(row):
|
||||
logger.error(
|
||||
"internal_state_invariant: account_id/prefix mismatch token_id=%s prefix=%s",
|
||||
row.id,
|
||||
row.prefix,
|
||||
)
|
||||
return None
|
||||
|
||||
resolved = ResolvedRow(
|
||||
subject_email=row.subject_email,
|
||||
subject_issuer=row.subject_issuer,
|
||||
account_id=uuid.UUID(str(row.account_id)) if row.account_id else None,
|
||||
token_id=uuid.UUID(str(row.id)),
|
||||
expires_at=row.expires_at,
|
||||
)
|
||||
self._parent.cache_set_positive(token_hash, resolved)
|
||||
return resolved
|
||||
|
||||
def _matches_variant(self, row: ResolvedRow) -> bool:
|
||||
has_account = row.account_id is not None
|
||||
if self._variant == "account":
|
||||
return has_account
|
||||
return not has_account
|
||||
|
||||
def _matches_variant_model(self, row: OAuthAccessToken) -> bool:
|
||||
has_account = row.account_id is not None
|
||||
if self._variant == "account":
|
||||
return has_account and row.prefix == "dfoa_"
|
||||
return (not has_account) and row.prefix == "dfoe_"
|
||||
|
||||
def _load_from_db(self, session: Session, token_hash: str) -> OAuthAccessToken | None:
|
||||
return (
|
||||
session.query(OAuthAccessToken)
|
||||
.filter(
|
||||
OAuthAccessToken.token_hash == token_hash,
|
||||
OAuthAccessToken.revoked_at.is_(None),
|
||||
)
|
||||
.one_or_none()
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Layer 0 — workspace membership cache + helper
|
||||
# ============================================================================
|
||||
|
||||
|
||||
def record_layer0_verdict(token_hash: str, tenant_id: str, verdict: bool) -> None:
|
||||
"""Merge a Layer-0 membership verdict into the AuthContext cache entry at
|
||||
`auth:token:{hash}`. No-op if entry missing/expired/invalid — next request
|
||||
rebuilds via authenticate() and re-runs Layer 0.
|
||||
"""
|
||||
cache_key = TOKEN_CACHE_KEY_FMT.format(hash=token_hash)
|
||||
raw = redis_client.get(cache_key)
|
||||
if raw is None:
|
||||
return
|
||||
text = raw.decode() if isinstance(raw, (bytes, bytearray)) else raw
|
||||
if text == "invalid":
|
||||
return
|
||||
try:
|
||||
data = json.loads(text)
|
||||
except (ValueError, KeyError):
|
||||
return
|
||||
ttl = redis_client.ttl(cache_key)
|
||||
if ttl <= 0:
|
||||
return
|
||||
data.setdefault("verified_tenants", {})[tenant_id] = verdict
|
||||
redis_client.setex(cache_key, ttl, json.dumps(data))
|
||||
|
||||
|
||||
def check_workspace_membership(
|
||||
*,
|
||||
account_id: uuid.UUID | str,
|
||||
tenant_id: str,
|
||||
token_hash: str,
|
||||
cached_verdicts: dict[str, bool],
|
||||
) -> None:
|
||||
"""Layer-0 enforcement core. Raises `Forbidden` on deny, returns on allow.
|
||||
|
||||
Shared by the pipeline step (`WorkspaceMembershipCheck`) and the
|
||||
inline helper (`require_workspace_member`). Caller is responsible for
|
||||
short-circuiting on EE / SSO subjects before invoking — this function
|
||||
runs the membership + active-status checks unconditionally.
|
||||
"""
|
||||
cached = cached_verdicts.get(tenant_id)
|
||||
if cached is True:
|
||||
return
|
||||
if cached is False:
|
||||
raise Forbidden("workspace_membership_revoked")
|
||||
|
||||
join = db.session.execute(
|
||||
select(TenantAccountJoin.id).where(
|
||||
TenantAccountJoin.account_id == account_id,
|
||||
TenantAccountJoin.tenant_id == tenant_id,
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if join is None:
|
||||
record_layer0_verdict(token_hash, tenant_id, False)
|
||||
raise Forbidden("workspace_membership_revoked")
|
||||
|
||||
status = db.session.execute(select(Account.status).where(Account.id == account_id)).scalar_one_or_none()
|
||||
if status != "active":
|
||||
record_layer0_verdict(token_hash, tenant_id, False)
|
||||
raise Forbidden("workspace_membership_revoked")
|
||||
|
||||
record_layer0_verdict(token_hash, tenant_id, True)
|
||||
|
||||
|
||||
def require_workspace_member(ctx: AuthContext, tenant_id: str) -> None:
|
||||
"""AuthContext-flavoured wrapper around `check_workspace_membership`.
|
||||
|
||||
No-op on EE (gateway RBAC owns tenant isolation) and for SSO subjects
|
||||
(no `tenant_account_joins` row by definition).
|
||||
"""
|
||||
if dify_config.ENTERPRISE_ENABLED:
|
||||
return
|
||||
if ctx.subject_type != SubjectType.ACCOUNT or ctx.account_id is None:
|
||||
return
|
||||
check_workspace_membership(
|
||||
account_id=ctx.account_id,
|
||||
tenant_id=tenant_id,
|
||||
token_hash=ctx.token_hash,
|
||||
cached_verdicts=ctx.verified_tenants,
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Decorator — route-level bearer gate
|
||||
# ============================================================================
|
||||
|
||||
|
||||
_authenticator: BearerAuthenticator | None = None
|
||||
|
||||
|
||||
def bind_authenticator(authenticator: BearerAuthenticator) -> None:
|
||||
global _authenticator
|
||||
_authenticator = authenticator
|
||||
|
||||
|
||||
def get_authenticator() -> BearerAuthenticator:
|
||||
if _authenticator is None:
|
||||
raise RuntimeError("BearerAuthenticator not bound; call bind_authenticator at startup")
|
||||
return _authenticator
|
||||
|
||||
|
||||
def _extract_bearer(req) -> str | None:
|
||||
header = req.headers.get("Authorization", "")
|
||||
scheme, _, value = header.partition(" ")
|
||||
if scheme.lower() != "bearer" or not value:
|
||||
return None
|
||||
return value.strip()
|
||||
|
||||
|
||||
_DP = ParamSpec("_DP")
|
||||
_DR = TypeVar("_DR")
|
||||
|
||||
|
||||
def validate_bearer(*, accept: frozenset[Accepts]) -> Callable[[Callable[_DP, _DR]], Callable[_DP, _DR]]:
|
||||
"""Opt-in: omitting it leaves the route unauthenticated.
|
||||
|
||||
Resolves user-level OAuth bearers (``dfoa_`` / ``dfoe_``). Legacy
|
||||
``app-`` keys belong to ``service_api/wraps.py:validate_app_token``
|
||||
and are rejected here as the wrong auth scheme for this surface.
|
||||
"""
|
||||
|
||||
def wrap(fn: Callable[_DP, _DR]) -> Callable[_DP, _DR]:
|
||||
@wraps(fn)
|
||||
def inner(*args: _DP.args, **kwargs: _DP.kwargs) -> _DR:
|
||||
token = _extract_bearer(request)
|
||||
if token is None:
|
||||
raise Unauthorized("missing bearer token")
|
||||
|
||||
if _authenticator is None:
|
||||
raise ServiceUnavailable("bearer_auth_disabled: set ENABLE_OAUTH_BEARER=true to enable")
|
||||
|
||||
try:
|
||||
ctx = get_authenticator().authenticate(token)
|
||||
except InvalidBearerError as e:
|
||||
raise Unauthorized(str(e))
|
||||
|
||||
if _SUBJECT_TO_ACCEPT[ctx.subject_type] not in accept:
|
||||
raise Forbidden("token subject type not accepted here")
|
||||
|
||||
g.auth_ctx = ctx
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
return inner
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def bearer_feature_required[**P, R](fn: Callable[P, R]) -> Callable[P, R]:
|
||||
"""503 if ENABLE_OAUTH_BEARER is off — minted tokens would be unusable
|
||||
without the authenticator, so fail fast instead of approving silently.
|
||||
"""
|
||||
|
||||
@wraps(fn)
|
||||
def inner(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
if not dify_config.ENABLE_OAUTH_BEARER:
|
||||
raise ServiceUnavailable("bearer_auth_disabled: set ENABLE_OAUTH_BEARER=true to enable")
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
def require_scope(scope: Scope) -> Callable:
|
||||
"""Route-level scope gate — must run AFTER validate_bearer so that
|
||||
g.auth_ctx is set. Raises Forbidden('insufficient_scope: <scope>')
|
||||
when the bearer lacks both the requested scope and `Scope.FULL`.
|
||||
"""
|
||||
|
||||
def wrap(fn: Callable) -> Callable:
|
||||
@wraps(fn)
|
||||
def inner(*args, **kwargs):
|
||||
ctx = getattr(g, "auth_ctx", None)
|
||||
if ctx is None:
|
||||
raise RuntimeError(
|
||||
"require_scope used without validate_bearer; stack @validate_bearer above @require_scope"
|
||||
)
|
||||
if Scope.FULL not in ctx.scopes and scope not in ctx.scopes:
|
||||
raise Forbidden(f"insufficient_scope: {scope}")
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
return inner
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Wiring — called once from the app factory
|
||||
# ============================================================================
|
||||
|
||||
|
||||
def build_registry(session_factory, redis_client) -> TokenKindRegistry:
|
||||
oauth = OAuthAccessTokenResolver(session_factory, redis_client)
|
||||
return TokenKindRegistry(
|
||||
[
|
||||
TokenKind(
|
||||
prefix="dfoa_",
|
||||
subject_type=SubjectType.ACCOUNT,
|
||||
scopes=frozenset({Scope.FULL}),
|
||||
source="oauth_account",
|
||||
resolver=oauth.for_account(),
|
||||
),
|
||||
TokenKind(
|
||||
prefix="dfoe_",
|
||||
subject_type=SubjectType.EXTERNAL_SSO,
|
||||
scopes=frozenset({Scope.APPS_RUN, Scope.APPS_READ_PERMITTED}),
|
||||
source="oauth_external_sso",
|
||||
resolver=oauth.for_external_sso(),
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def build_and_bind(session_factory, redis_client) -> BearerAuthenticator:
|
||||
registry = build_registry(session_factory, redis_client)
|
||||
auth = BearerAuthenticator(registry)
|
||||
bind_authenticator(auth)
|
||||
return auth
|
||||
@ -1,140 +0,0 @@
|
||||
"""Typed rate-limit decorator over ``libs.helper.RateLimiter`` (sliding-
|
||||
window Redis ZSET). Apply after auth decorators so scopes can read
|
||||
``g.auth_ctx``. Use :func:`enforce` when the bucket key is computed
|
||||
in-handler. RFC-8628 ``slow_down`` is inline — its response shape isn't
|
||||
generic 429.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from enum import StrEnum
|
||||
from functools import wraps
|
||||
from typing import ParamSpec, TypeVar
|
||||
|
||||
from flask import g, jsonify, make_response, request, session
|
||||
from werkzeug.exceptions import TooManyRequests
|
||||
|
||||
from configs import dify_config
|
||||
from libs.helper import RateLimiter, extract_remote_ip
|
||||
|
||||
|
||||
class RateLimitScope(StrEnum):
|
||||
IP = "ip"
|
||||
SESSION = "session"
|
||||
ACCOUNT = "account"
|
||||
SUBJECT_EMAIL = "subject_email"
|
||||
TOKEN_ID = "token_id"
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class RateLimit:
|
||||
limit: int
|
||||
window: timedelta
|
||||
scopes: tuple[RateLimitScope, ...]
|
||||
|
||||
|
||||
LIMIT_DEVICE_CODE_PER_IP = RateLimit(60, timedelta(hours=1), (RateLimitScope.IP,))
|
||||
LIMIT_SSO_INITIATE_PER_IP = RateLimit(60, timedelta(hours=1), (RateLimitScope.IP,))
|
||||
LIMIT_APPROVE_EXT_PER_EMAIL = RateLimit(10, timedelta(hours=1), (RateLimitScope.SUBJECT_EMAIL,))
|
||||
LIMIT_APPROVE_CONSOLE = RateLimit(10, timedelta(hours=1), (RateLimitScope.SESSION,))
|
||||
LIMIT_LOOKUP_PUBLIC = RateLimit(60, timedelta(minutes=5), (RateLimitScope.IP,))
|
||||
LIMIT_ME_PER_ACCOUNT = RateLimit(60, timedelta(minutes=1), (RateLimitScope.ACCOUNT,))
|
||||
LIMIT_ME_PER_EMAIL = RateLimit(60, timedelta(minutes=1), (RateLimitScope.SUBJECT_EMAIL,))
|
||||
LIMIT_BEARER_PER_TOKEN = RateLimit(
|
||||
limit=dify_config.OPENAPI_RATE_LIMIT_PER_TOKEN,
|
||||
window=timedelta(minutes=1),
|
||||
scopes=(RateLimitScope.TOKEN_ID,), # bucket key composed by caller from sha256(token)
|
||||
)
|
||||
|
||||
|
||||
def _one_key(scope: RateLimitScope) -> str:
|
||||
match scope:
|
||||
case RateLimitScope.IP:
|
||||
return f"ip:{extract_remote_ip(request) or 'unknown'}"
|
||||
case RateLimitScope.SESSION:
|
||||
return f"session:{session.get('_id', 'anon')}"
|
||||
case RateLimitScope.ACCOUNT:
|
||||
ctx = getattr(g, "auth_ctx", None)
|
||||
if ctx and ctx.account_id:
|
||||
return f"account:{ctx.account_id}"
|
||||
return "account:anon"
|
||||
case RateLimitScope.SUBJECT_EMAIL:
|
||||
ctx = getattr(g, "auth_ctx", None)
|
||||
if ctx and ctx.subject_email:
|
||||
return f"subject:{ctx.subject_email}"
|
||||
return "subject:anon"
|
||||
case RateLimitScope.TOKEN_ID:
|
||||
ctx = getattr(g, "auth_ctx", None)
|
||||
if ctx and ctx.token_id:
|
||||
return f"token:{ctx.token_id}"
|
||||
return "token:anon"
|
||||
|
||||
|
||||
def _composite_key(scopes: tuple[RateLimitScope, ...]) -> str:
|
||||
return "|".join(_one_key(s) for s in scopes)
|
||||
|
||||
|
||||
def _limiter_prefix(scopes: tuple[RateLimitScope, ...]) -> str:
|
||||
return "rl:" + "+".join(s.value for s in scopes)
|
||||
|
||||
|
||||
def _build_limiter(spec: RateLimit) -> RateLimiter:
|
||||
return RateLimiter(
|
||||
prefix=_limiter_prefix(spec.scopes),
|
||||
max_attempts=spec.limit,
|
||||
time_window=int(spec.window.total_seconds()),
|
||||
)
|
||||
|
||||
|
||||
_P = ParamSpec("_P")
|
||||
_R = TypeVar("_R")
|
||||
|
||||
|
||||
def rate_limit(spec: RateLimit) -> Callable[[Callable[_P, _R]], Callable[_P, _R]]:
|
||||
"""Apply after auth decorators that the scopes read from."""
|
||||
limiter = _build_limiter(spec)
|
||||
|
||||
def wrap(fn: Callable[_P, _R]) -> Callable[_P, _R]:
|
||||
@wraps(fn)
|
||||
def inner(*args: _P.args, **kwargs: _P.kwargs) -> _R:
|
||||
key = _composite_key(spec.scopes)
|
||||
if limiter.is_rate_limited(key):
|
||||
raise TooManyRequests("rate_limited")
|
||||
limiter.increment_rate_limit(key)
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
return inner
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def enforce(spec: RateLimit, *, key: str) -> None:
|
||||
"""Imperative form — caller composes the bucket key to match scope
|
||||
semantics (the key is opaque here).
|
||||
"""
|
||||
limiter = _build_limiter(spec)
|
||||
if limiter.is_rate_limited(key):
|
||||
raise TooManyRequests("rate_limited")
|
||||
limiter.increment_rate_limit(key)
|
||||
|
||||
|
||||
def enforce_bearer_rate_limit(token_hash: str) -> None:
|
||||
"""Per-token rate limit on /openapi/v1/* bearer-authed routes.
|
||||
|
||||
Bucket key = ``token:<sha256_hex>`` so the same token shares one
|
||||
bucket across api replicas (Redis-backed sliding window).
|
||||
"""
|
||||
limiter = _build_limiter(LIMIT_BEARER_PER_TOKEN)
|
||||
key = f"token:{token_hash}"
|
||||
if limiter.is_rate_limited(key):
|
||||
retry_after = limiter.seconds_until_available(key)
|
||||
response = make_response(
|
||||
jsonify({"error": "rate_limited", "retry_after_ms": retry_after * 1000}),
|
||||
429,
|
||||
)
|
||||
response.headers["Retry-After"] = str(retry_after)
|
||||
raise TooManyRequests(response=response)
|
||||
limiter.increment_rate_limit(key)
|
||||
@ -72,15 +72,11 @@ def extract_csrf_token_from_cookie(request: Request) -> str | None:
|
||||
return request.cookies.get(_real_cookie_name(COOKIE_NAME_CSRF_TOKEN))
|
||||
|
||||
|
||||
def extract_console_cookie_token(request: Request) -> str | None:
|
||||
"""Cookie-only console session token. Used by /openapi/v1/oauth/device/*
|
||||
approval routes, which must not fall through to the Authorization header
|
||||
(that's where dfoa_/dfoe_ bearers live — they aren't JWTs)."""
|
||||
return request.cookies.get(_real_cookie_name(COOKIE_NAME_ACCESS_TOKEN))
|
||||
|
||||
|
||||
def extract_access_token(request: Request) -> str | None:
|
||||
return extract_console_cookie_token(request) or _try_extract_from_header(request)
|
||||
def _try_extract_from_cookie(request: Request) -> str | None:
|
||||
return request.cookies.get(_real_cookie_name(COOKIE_NAME_ACCESS_TOKEN))
|
||||
|
||||
return _try_extract_from_cookie(request) or _try_extract_from_header(request)
|
||||
|
||||
|
||||
def extract_webapp_access_token(request: Request) -> str | None:
|
||||
|
||||
@ -1,104 +0,0 @@
|
||||
"""add oauth_access_tokens table
|
||||
|
||||
Revision ID: d4a5e1f3c9b7
|
||||
Revises: 227822d22895, b69ca54b9208, 2a3aebbbf4bb
|
||||
Create Date: 2026-04-23 22:00:00.000000
|
||||
|
||||
Merges the three open heads at time of authoring (add_workflow_comments_table,
|
||||
add_chatbot_color_theme, add_app_tracing) into a single parent so the new
|
||||
oauth_access_tokens table sits on a definite linear chain thereafter.
|
||||
|
||||
Table stores user-level OAuth bearer tokens minted via the device-flow grant
|
||||
(difyctl auth login). PAT storage (personal_access_tokens) is a separate
|
||||
table not added in this migration.
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "d4a5e1f3c9b7"
|
||||
down_revision = ("227822d22895", "b69ca54b9208", "2a3aebbbf4bb")
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"oauth_access_tokens",
|
||||
sa.Column(
|
||||
"id",
|
||||
postgresql.UUID(as_uuid=True),
|
||||
server_default=sa.text("gen_random_uuid()"),
|
||||
nullable=False,
|
||||
primary_key=True,
|
||||
),
|
||||
sa.Column("subject_email", sa.Text(), nullable=False),
|
||||
sa.Column("subject_issuer", sa.Text(), nullable=True),
|
||||
sa.Column("account_id", postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column("client_id", sa.String(length=64), nullable=False),
|
||||
sa.Column("device_label", sa.Text(), nullable=False),
|
||||
sa.Column("prefix", sa.String(length=8), nullable=False),
|
||||
sa.Column("token_hash", sa.String(length=64), nullable=True, unique=True),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.TIMESTAMP(timezone=True),
|
||||
server_default=sa.text("NOW()"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("last_used_at", sa.TIMESTAMP(timezone=True), nullable=True),
|
||||
sa.Column("expires_at", sa.TIMESTAMP(timezone=True), nullable=False),
|
||||
sa.Column("revoked_at", sa.TIMESTAMP(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["account_id"],
|
||||
["accounts.id"],
|
||||
name="fk_oauth_access_tokens_account_id",
|
||||
ondelete="SET NULL",
|
||||
),
|
||||
)
|
||||
|
||||
op.create_index(
|
||||
"idx_oauth_subject_email",
|
||||
"oauth_access_tokens",
|
||||
["subject_email"],
|
||||
postgresql_where=sa.text("revoked_at IS NULL"),
|
||||
)
|
||||
op.create_index(
|
||||
"idx_oauth_account",
|
||||
"oauth_access_tokens",
|
||||
["account_id"],
|
||||
postgresql_where=sa.text("revoked_at IS NULL AND account_id IS NOT NULL"),
|
||||
)
|
||||
op.create_index(
|
||||
"idx_oauth_client",
|
||||
"oauth_access_tokens",
|
||||
["subject_email", "client_id"],
|
||||
postgresql_where=sa.text("revoked_at IS NULL"),
|
||||
)
|
||||
op.create_index(
|
||||
"idx_oauth_token_hash",
|
||||
"oauth_access_tokens",
|
||||
["token_hash"],
|
||||
postgresql_where=sa.text("revoked_at IS NULL"),
|
||||
)
|
||||
# Partial unique index — rotate-in-place keyed on (subject, client, device).
|
||||
# The app always writes a non-NULL subject_issuer (account flow uses a
|
||||
# sentinel, external-SSO uses the verified IdP issuer); without that the
|
||||
# composite key would never collide because Postgres treats NULLs as
|
||||
# distinct in unique indices.
|
||||
op.create_index(
|
||||
"uq_oauth_active_per_device",
|
||||
"oauth_access_tokens",
|
||||
["subject_email", "subject_issuer", "client_id", "device_label"],
|
||||
unique=True,
|
||||
postgresql_where=sa.text("revoked_at IS NULL"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("uq_oauth_active_per_device", table_name="oauth_access_tokens")
|
||||
op.drop_index("idx_oauth_token_hash", table_name="oauth_access_tokens")
|
||||
op.drop_index("idx_oauth_client", table_name="oauth_access_tokens")
|
||||
op.drop_index("idx_oauth_account", table_name="oauth_access_tokens")
|
||||
op.drop_index("idx_oauth_subject_email", table_name="oauth_access_tokens")
|
||||
op.drop_table("oauth_access_tokens")
|
||||
@ -73,7 +73,7 @@ from .model import (
|
||||
TrialApp,
|
||||
UploadFile,
|
||||
)
|
||||
from .oauth import DatasourceOauthParamConfig, DatasourceProvider, OAuthAccessToken
|
||||
from .oauth import DatasourceOauthParamConfig, DatasourceProvider
|
||||
from .provider import (
|
||||
LoadBalancingModelConfig,
|
||||
Provider,
|
||||
@ -177,7 +177,6 @@ __all__ = [
|
||||
"MessageChain",
|
||||
"MessageFeedback",
|
||||
"MessageFile",
|
||||
"OAuthAccessToken",
|
||||
"OperationLog",
|
||||
"PinnedConversation",
|
||||
"Provider",
|
||||
|
||||
@ -84,35 +84,3 @@ class DatasourceOauthTenantParamConfig(TypeBase):
|
||||
onupdate=func.current_timestamp(),
|
||||
init=False,
|
||||
)
|
||||
|
||||
|
||||
class OAuthAccessToken(TypeBase):
|
||||
"""Device-flow bearer. account_id NOT NULL ⇒ dfoa_ (Dify account,
|
||||
subject_issuer = "dify:account" sentinel); account_id NULL +
|
||||
subject_issuer = verified IdP issuer ⇒ dfoe_ (external SSO, EE-only).
|
||||
subject_issuer is non-NULL for all rows the app writes — Postgres
|
||||
treats NULLs as distinct in unique indices, so the partial unique
|
||||
index on (subject_email, subject_issuer, client_id, device_label)
|
||||
WHERE revoked_at IS NULL would otherwise fail to rotate in place.
|
||||
"""
|
||||
|
||||
__tablename__ = "oauth_access_tokens"
|
||||
__table_args__ = (sa.PrimaryKeyConstraint("id", name="oauth_access_tokens_pkey"),)
|
||||
|
||||
id: Mapped[str] = mapped_column(
|
||||
StringUUID, insert_default=lambda: str(uuidv7()), default_factory=lambda: str(uuidv7()), init=False
|
||||
)
|
||||
subject_email: Mapped[str] = mapped_column(sa.Text, nullable=False)
|
||||
client_id: Mapped[str] = mapped_column(sa.String(64), nullable=False)
|
||||
device_label: Mapped[str] = mapped_column(sa.Text, nullable=False)
|
||||
prefix: Mapped[str] = mapped_column(sa.String(8), nullable=False)
|
||||
expires_at: Mapped[datetime] = mapped_column(sa.DateTime(timezone=True), nullable=False)
|
||||
subject_issuer: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None)
|
||||
account_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
|
||||
token_hash: Mapped[str | None] = mapped_column(sa.String(64), nullable=True, default=None)
|
||||
last_used_at: Mapped[datetime | None] = mapped_column(sa.DateTime(timezone=True), nullable=True, default=None)
|
||||
revoked_at: Mapped[datetime | None] = mapped_column(sa.DateTime(timezone=True), nullable=True, default=None)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime(timezone=True), nullable=False, server_default=func.now(), init=False
|
||||
)
|
||||
|
||||
@ -1206,7 +1206,6 @@ class WorkflowAppLogCreatedFrom(StrEnum):
|
||||
SERVICE_API = "service-api"
|
||||
WEB_APP = "web-app"
|
||||
INSTALLED_APP = "installed-app"
|
||||
OPENAPI = "openapi"
|
||||
|
||||
@classmethod
|
||||
def value_of(cls, value: str) -> "WorkflowAppLogCreatedFrom":
|
||||
|
||||
@ -6,7 +6,7 @@ requires-python = "~=3.12.0"
|
||||
dependencies = [
|
||||
# Legacy: mature and widely deployed
|
||||
"bleach>=6.3.0",
|
||||
"boto3>=1.42.88",
|
||||
"boto3>=1.42.91",
|
||||
"celery>=5.6.3",
|
||||
"croniter>=6.2.2",
|
||||
"flask-cors>=6.0.2",
|
||||
@ -30,7 +30,7 @@ dependencies = [
|
||||
"flask-migrate>=4.1.0,<5.0.0",
|
||||
"flask-orjson>=2.0.0,<3.0.0",
|
||||
"flask-restx>=1.3.2,<2.0.0",
|
||||
"google-cloud-aiplatform>=1.147.0,<2.0.0",
|
||||
"google-cloud-aiplatform>=1.148.1,<2.0.0",
|
||||
"httpx[socks]>=0.28.1,<1.0.0",
|
||||
"opentelemetry-distro>=0.62b0,<1.0.0",
|
||||
"opentelemetry-instrumentation-celery>=0.62b0,<1.0.0",
|
||||
@ -46,7 +46,7 @@ dependencies = [
|
||||
"fastopenapi[flask]~=0.7.0",
|
||||
"graphon~=0.2.2",
|
||||
"httpx-sse~=0.4.0",
|
||||
"json-repair~=0.59.2",
|
||||
"json-repair~=0.59.4",
|
||||
]
|
||||
# Before adding new dependency, consider place it in
|
||||
# alphabet order (a-z) and suitable group.
|
||||
@ -174,7 +174,7 @@ dev = [
|
||||
"pytest-timeout>=2.4.0",
|
||||
"pytest-xdist>=3.8.0",
|
||||
"pyrefly>=0.61.1",
|
||||
"xinference-client>=2.4.0",
|
||||
"xinference-client>=2.5.0",
|
||||
]
|
||||
|
||||
############################################################
|
||||
@ -183,13 +183,13 @@ dev = [
|
||||
############################################################
|
||||
storage = [
|
||||
"azure-storage-blob>=12.28.0",
|
||||
"bce-python-sdk>=0.9.69",
|
||||
"bce-python-sdk>=0.9.70",
|
||||
"cos-python-sdk-v5>=1.9.41",
|
||||
"esdk-obs-python>=3.22.2",
|
||||
"google-cloud-storage>=3.10.1",
|
||||
"opendal>=0.46.0",
|
||||
"oss2>=2.19.1",
|
||||
"supabase>=2.18.1",
|
||||
"supabase>=2.28.3",
|
||||
"tos>=2.9.0",
|
||||
]
|
||||
|
||||
@ -266,7 +266,7 @@ vdb-vastbase = ["dify-vdb-vastbase"]
|
||||
vdb-vikingdb = ["dify-vdb-vikingdb"]
|
||||
vdb-weaviate = ["dify-vdb-weaviate"]
|
||||
# Optional client used by some tests / integrations (not a vector backend plugin)
|
||||
vdb-xinference = ["xinference-client>=2.4.0"]
|
||||
vdb-xinference = ["xinference-client>=2.5.0"]
|
||||
|
||||
trace-all = [
|
||||
"dify-trace-aliyun",
|
||||
|
||||
@ -1,54 +0,0 @@
|
||||
"""DELETE oauth_access_tokens past retention. Revocation is UPDATE
|
||||
(token_id stays for audits) so rows accumulate across re-logins, and
|
||||
expired-but-never-presented rows have no hard-expire trigger — both get
|
||||
pruned here. Spec: docs/specs/v1.0/server/tokens.md §Hard-expire.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
import click
|
||||
from sqlalchemy import delete, or_, select
|
||||
|
||||
import app
|
||||
from configs import dify_config
|
||||
from extensions.ext_database import db
|
||||
from models.oauth import OAuthAccessToken
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DELETE_BATCH_SIZE = 500
|
||||
|
||||
|
||||
@app.celery.task(queue="retention")
|
||||
def clean_oauth_access_tokens_task():
|
||||
click.echo(click.style("Start clean oauth_access_tokens.", fg="green"))
|
||||
retention_days = int(dify_config.OAUTH_ACCESS_TOKEN_RETENTION_DAYS)
|
||||
cutoff = datetime.now(UTC) - timedelta(days=retention_days)
|
||||
start_at = time.perf_counter()
|
||||
|
||||
candidates = or_(
|
||||
OAuthAccessToken.revoked_at < cutoff,
|
||||
# Zombies: expired but never re-presented, so middleware never flipped them.
|
||||
(OAuthAccessToken.revoked_at.is_(None)) & (OAuthAccessToken.expires_at < cutoff),
|
||||
)
|
||||
|
||||
total = 0
|
||||
while True:
|
||||
ids = db.session.scalars(select(OAuthAccessToken.id).where(candidates).limit(DELETE_BATCH_SIZE)).all()
|
||||
if not ids:
|
||||
break
|
||||
db.session.execute(delete(OAuthAccessToken).where(OAuthAccessToken.id.in_(ids)))
|
||||
db.session.commit()
|
||||
total += len(ids)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Cleaned {total} oauth_access_tokens rows older than {retention_days}d in {end_at - start_at:.2f}s",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
@ -112,6 +112,14 @@ REFRESH_TOKEN_EXPIRY = timedelta(days=dify_config.REFRESH_TOKEN_EXPIRE_DAYS)
|
||||
|
||||
|
||||
class AccountService:
|
||||
# Phase-bound token metadata for the change-email flow. Tokens carry the
|
||||
# current phase so that downstream endpoints can enforce proper progression
|
||||
CHANGE_EMAIL_TOKEN_PHASE_KEY = "email_change_phase"
|
||||
CHANGE_EMAIL_PHASE_OLD = "old_email"
|
||||
CHANGE_EMAIL_PHASE_OLD_VERIFIED = "old_email_verified"
|
||||
CHANGE_EMAIL_PHASE_NEW = "new_email"
|
||||
CHANGE_EMAIL_PHASE_NEW_VERIFIED = "new_email_verified"
|
||||
|
||||
reset_password_rate_limiter = RateLimiter(prefix="reset_password_rate_limit", max_attempts=1, time_window=60 * 1)
|
||||
email_register_rate_limiter = RateLimiter(prefix="email_register_rate_limit", max_attempts=1, time_window=60 * 1)
|
||||
email_code_login_rate_limiter = RateLimiter(
|
||||
@ -576,13 +584,20 @@ class AccountService:
|
||||
raise ValueError("Email must be provided.")
|
||||
if not phase:
|
||||
raise ValueError("phase must be provided.")
|
||||
if phase not in (cls.CHANGE_EMAIL_PHASE_OLD, cls.CHANGE_EMAIL_PHASE_NEW):
|
||||
raise ValueError("phase must be one of old_email or new_email.")
|
||||
|
||||
if cls.change_email_rate_limiter.is_rate_limited(account_email):
|
||||
from controllers.console.auth.error import EmailChangeRateLimitExceededError
|
||||
|
||||
raise EmailChangeRateLimitExceededError(int(cls.change_email_rate_limiter.time_window / 60))
|
||||
|
||||
code, token = cls.generate_change_email_token(account_email, account, old_email=old_email)
|
||||
code, token = cls.generate_change_email_token(
|
||||
account_email,
|
||||
account,
|
||||
old_email=old_email,
|
||||
additional_data={cls.CHANGE_EMAIL_TOKEN_PHASE_KEY: phase},
|
||||
)
|
||||
|
||||
send_change_mail_task.delay(
|
||||
language=language,
|
||||
|
||||
@ -37,7 +37,7 @@ class AppService:
|
||||
Get app list with pagination
|
||||
:param user_id: user id
|
||||
:param tenant_id: tenant id
|
||||
:param args: request args. Optional keys: status (e.g. "normal") restricts App.status.
|
||||
:param args: request args
|
||||
:return:
|
||||
"""
|
||||
filters = [App.tenant_id == tenant_id, App.is_universal == False]
|
||||
@ -53,8 +53,6 @@ class AppService:
|
||||
elif args["mode"] == "agent-chat":
|
||||
filters.append(App.mode == AppMode.AGENT_CHAT)
|
||||
|
||||
if args.get("status"):
|
||||
filters.append(App.status == args["status"])
|
||||
if args.get("is_created_by_me", False):
|
||||
filters.append(App.created_by == user_id)
|
||||
if args.get("name"):
|
||||
|
||||
@ -1,44 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
|
||||
from werkzeug.exceptions import ServiceUnavailable
|
||||
|
||||
from services.enterprise.enterprise_service import EnterpriseService
|
||||
from services.errors.enterprise import EnterpriseAPIError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class PermittedAppsPage:
|
||||
app_ids: list[str]
|
||||
total: int
|
||||
has_more: bool
|
||||
|
||||
|
||||
def list_permitted_apps(
|
||||
*,
|
||||
page: int,
|
||||
limit: int,
|
||||
mode: str | None = None,
|
||||
name: str | None = None,
|
||||
) -> PermittedAppsPage:
|
||||
try:
|
||||
body = EnterpriseService.WebAppAuth.list_externally_accessible_apps(
|
||||
page=page, limit=limit, mode=mode, name=name
|
||||
)
|
||||
except EnterpriseAPIError as exc:
|
||||
logger.warning(
|
||||
"permitted_apps EE call failed: status=%s message=%s",
|
||||
getattr(exc, "status_code", None),
|
||||
str(exc),
|
||||
)
|
||||
raise ServiceUnavailable("permitted_apps_unavailable") from exc
|
||||
|
||||
return PermittedAppsPage(
|
||||
app_ids=[row["appId"] for row in body.get("data", [])],
|
||||
total=int(body.get("total", 0)),
|
||||
has_more=bool(body.get("hasMore", False)),
|
||||
)
|
||||
@ -106,15 +106,6 @@ class EnterpriseService:
|
||||
def get_workspace_info(cls, tenant_id: str):
|
||||
return EnterpriseRequest.send_request("GET", f"/workspace/{tenant_id}/info")
|
||||
|
||||
@classmethod
|
||||
def initiate_device_flow_sso(cls, signed_state: str) -> dict:
|
||||
return EnterpriseRequest.send_request(
|
||||
"POST",
|
||||
"/device-flow/sso-initiate",
|
||||
json={"signed_state": signed_state},
|
||||
raise_for_status=True,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def join_default_workspace(cls, *, account_id: str) -> DefaultWorkspaceJoinResult:
|
||||
"""
|
||||
@ -243,32 +234,6 @@ class EnterpriseService:
|
||||
params = {"appId": app_id}
|
||||
EnterpriseRequest.send_request("DELETE", "/webapp/clean", params=params)
|
||||
|
||||
@classmethod
|
||||
def list_externally_accessible_apps(
|
||||
cls,
|
||||
*,
|
||||
page: int,
|
||||
limit: int,
|
||||
mode: str | None = None,
|
||||
name: str | None = None,
|
||||
) -> dict:
|
||||
"""Call EE InnerListExternallyAccessibleApps; returns raw camelCase response.
|
||||
|
||||
Response shape: ``{"data": [{"appId", "tenantId", "mode", "name", "updatedAt"}],
|
||||
"total": int, "hasMore": bool}``.
|
||||
"""
|
||||
body: dict[str, str | int] = {"page": page, "limit": limit}
|
||||
if mode is not None:
|
||||
body["mode"] = mode
|
||||
if name is not None:
|
||||
body["name"] = name
|
||||
return EnterpriseRequest.send_request(
|
||||
"POST",
|
||||
"/webapp/externally-accessible-apps",
|
||||
json=body,
|
||||
timeout=5.0,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_cached_license_status(cls) -> LicenseStatus | None:
|
||||
"""Get enterprise license status with Redis caching to reduce HTTP calls.
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, TypedDict
|
||||
from typing import Any, TypedDict, cast
|
||||
|
||||
from core.app.app_config.entities import ModelConfig
|
||||
from core.rag.datasource.retrieval_service import RetrievalService
|
||||
from core.rag.datasource.retrieval_service import DefaultRetrievalModelDict, RetrievalService
|
||||
from core.rag.index_processor.constant.query_type import QueryType
|
||||
from core.rag.models.document import Document
|
||||
from core.rag.retrieval.dataset_retrieval import DatasetRetrieval
|
||||
@ -36,6 +36,10 @@ default_retrieval_model = {
|
||||
}
|
||||
|
||||
|
||||
class HitTestingRetrievalModelDict(DefaultRetrievalModelDict, total=False):
|
||||
metadata_filtering_conditions: dict[str, Any]
|
||||
|
||||
|
||||
class HitTestingService:
|
||||
@classmethod
|
||||
def retrieve(
|
||||
@ -51,17 +55,18 @@ class HitTestingService:
|
||||
start = time.perf_counter()
|
||||
|
||||
# get retrieval model , if the model is not setting , using default
|
||||
if not retrieval_model:
|
||||
retrieval_model = dataset.retrieval_model or default_retrieval_model
|
||||
assert isinstance(retrieval_model, dict)
|
||||
resolved_retrieval_model = cast(
|
||||
HitTestingRetrievalModelDict,
|
||||
retrieval_model or dataset.retrieval_model or default_retrieval_model,
|
||||
)
|
||||
document_ids_filter = None
|
||||
metadata_filtering_conditions = retrieval_model.get("metadata_filtering_conditions", {})
|
||||
if metadata_filtering_conditions and query:
|
||||
metadata_filtering_conditions_raw = resolved_retrieval_model.get("metadata_filtering_conditions", {})
|
||||
if metadata_filtering_conditions_raw and query:
|
||||
dataset_retrieval = DatasetRetrieval()
|
||||
|
||||
from core.rag.entities import MetadataFilteringCondition
|
||||
|
||||
metadata_filtering_conditions = MetadataFilteringCondition.model_validate(metadata_filtering_conditions)
|
||||
metadata_filtering_conditions = MetadataFilteringCondition.model_validate(metadata_filtering_conditions_raw)
|
||||
|
||||
metadata_filter_document_ids, metadata_condition = dataset_retrieval.get_metadata_filter_condition(
|
||||
dataset_ids=[dataset.id],
|
||||
@ -78,19 +83,21 @@ class HitTestingService:
|
||||
if metadata_condition and not document_ids_filter:
|
||||
return cls.compact_retrieve_response(query, [])
|
||||
all_documents = RetrievalService.retrieve(
|
||||
retrieval_method=RetrievalMethod(retrieval_model.get("search_method", RetrievalMethod.SEMANTIC_SEARCH)),
|
||||
retrieval_method=RetrievalMethod(
|
||||
resolved_retrieval_model.get("search_method", RetrievalMethod.SEMANTIC_SEARCH)
|
||||
),
|
||||
dataset_id=dataset.id,
|
||||
query=query,
|
||||
attachment_ids=attachment_ids,
|
||||
top_k=retrieval_model.get("top_k", 4),
|
||||
score_threshold=retrieval_model.get("score_threshold", 0.0)
|
||||
if retrieval_model["score_threshold_enabled"]
|
||||
top_k=resolved_retrieval_model.get("top_k", 4),
|
||||
score_threshold=resolved_retrieval_model.get("score_threshold", 0.0)
|
||||
if resolved_retrieval_model["score_threshold_enabled"]
|
||||
else 0.0,
|
||||
reranking_model=retrieval_model.get("reranking_model", None)
|
||||
if retrieval_model["reranking_enable"]
|
||||
reranking_model=resolved_retrieval_model.get("reranking_model", None)
|
||||
if resolved_retrieval_model["reranking_enable"]
|
||||
else None,
|
||||
reranking_mode=retrieval_model.get("reranking_mode") or "reranking_model",
|
||||
weights=retrieval_model.get("weights", None),
|
||||
reranking_mode=resolved_retrieval_model.get("reranking_mode") or "reranking_model",
|
||||
weights=resolved_retrieval_model.get("weights", None),
|
||||
document_ids_filter=document_ids_filter,
|
||||
)
|
||||
|
||||
|
||||
@ -1,467 +0,0 @@
|
||||
"""Device-flow service layer: Redis state machine, OAuth token mint
|
||||
(DB upsert + plaintext generation), and TTL policy. Specs:
|
||||
docs/specs/v1.0/server/{device-flow.md, tokens.md}.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import secrets
|
||||
import time
|
||||
import uuid
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from enum import StrEnum
|
||||
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
||||
from sqlalchemy.orm import Session, scoped_session
|
||||
|
||||
from libs.oauth_bearer import TOKEN_CACHE_KEY_FMT
|
||||
from models.oauth import OAuthAccessToken
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Redis state machine — device_code + user_code ephemeral state
|
||||
# ============================================================================
|
||||
|
||||
|
||||
_DEVICE_CODE_KEY_PREFIX = "device_code:"
|
||||
_USER_CODE_KEY_PREFIX = "user_code:"
|
||||
DEVICE_CODE_KEY_FMT = _DEVICE_CODE_KEY_PREFIX + "{code}"
|
||||
USER_CODE_KEY_FMT = _USER_CODE_KEY_PREFIX + "{code}"
|
||||
|
||||
# Atomic GET → status-check → DEL(both keys). Two concurrent pollers must
|
||||
# not both observe APPROVED — only the winner gets the plaintext token,
|
||||
# the loser sees nil and the caller maps that to expired_token.
|
||||
_CONSUME_ON_POLL_LUA = """
|
||||
local raw = redis.call('GET', KEYS[1])
|
||||
if not raw then return nil end
|
||||
local ok, decoded = pcall(cjson.decode, raw)
|
||||
if not ok then return nil end
|
||||
if decoded.status == 'pending' then return nil end
|
||||
if decoded.user_code then
|
||||
redis.call('DEL', ARGV[1] .. decoded.user_code)
|
||||
end
|
||||
redis.call('DEL', KEYS[1])
|
||||
return raw
|
||||
"""
|
||||
|
||||
DEVICE_FLOW_TTL_SECONDS = 15 * 60 # RFC 8628 expires_in
|
||||
APPROVED_TTL_SECONDS_MIN = 60 # plaintext-token lifetime floor
|
||||
|
||||
USER_CODE_ALPHABET = "ABCDEFGHJKLMNPQRSTUVWXY3456789" # ambiguous chars dropped
|
||||
USER_CODE_SEGMENT_LEN = 4
|
||||
USER_CODE_MAX_CLAIM_ATTEMPTS = 5
|
||||
|
||||
DEFAULT_POLL_INTERVAL_SECONDS = 5 # RFC 8628 minimum
|
||||
|
||||
|
||||
class DeviceFlowStatus(StrEnum):
|
||||
PENDING = "pending"
|
||||
APPROVED = "approved"
|
||||
DENIED = "denied"
|
||||
|
||||
|
||||
class SlowDownDecision(StrEnum):
|
||||
OK = "ok"
|
||||
SLOW_DOWN = "slow_down"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeviceFlowState:
|
||||
"""``minted_token`` is plaintext between approve and the next poll;
|
||||
DEL'd after the poll reads it.
|
||||
"""
|
||||
|
||||
user_code: str
|
||||
client_id: str
|
||||
device_label: str
|
||||
status: DeviceFlowStatus
|
||||
subject_email: str | None = None
|
||||
account_id: str | None = None
|
||||
subject_issuer: str | None = None
|
||||
minted_token: str | None = None
|
||||
token_id: str | None = None
|
||||
created_at: str = ""
|
||||
created_ip: str = ""
|
||||
last_poll_at: str = ""
|
||||
poll_payload: dict | None = field(default=None)
|
||||
|
||||
def to_json(self) -> str:
|
||||
return json.dumps(asdict(self))
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, raw: str) -> DeviceFlowState:
|
||||
data = json.loads(raw)
|
||||
if "status" in data:
|
||||
data["status"] = DeviceFlowStatus(data["status"])
|
||||
return cls(**data)
|
||||
|
||||
|
||||
def _random_device_code() -> str:
|
||||
return "dc_" + secrets.token_urlsafe(24)
|
||||
|
||||
|
||||
def _random_user_code_segment() -> str:
|
||||
return "".join(secrets.choice(USER_CODE_ALPHABET) for _ in range(USER_CODE_SEGMENT_LEN))
|
||||
|
||||
|
||||
def _random_user_code() -> str:
|
||||
return f"{_random_user_code_segment()}-{_random_user_code_segment()}"
|
||||
|
||||
|
||||
class StateNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidTransitionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UserCodeExhaustedError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DeviceFlowRedis:
|
||||
def __init__(self, redis_client) -> None:
|
||||
self._redis = redis_client
|
||||
self._consume_on_poll_script = redis_client.register_script(_CONSUME_ON_POLL_LUA)
|
||||
|
||||
def start(self, client_id: str, device_label: str, created_ip: str) -> tuple[str, str, int]:
|
||||
device_code = _random_device_code()
|
||||
user_code = self._claim_user_code(device_code)
|
||||
state = DeviceFlowState(
|
||||
user_code=user_code,
|
||||
client_id=client_id,
|
||||
device_label=device_label,
|
||||
status=DeviceFlowStatus.PENDING,
|
||||
created_at=datetime.now(UTC).isoformat(),
|
||||
created_ip=created_ip,
|
||||
)
|
||||
self._redis.setex(
|
||||
DEVICE_CODE_KEY_FMT.format(code=device_code),
|
||||
DEVICE_FLOW_TTL_SECONDS,
|
||||
state.to_json(),
|
||||
)
|
||||
return device_code, user_code, DEVICE_FLOW_TTL_SECONDS
|
||||
|
||||
def _claim_user_code(self, device_code: str) -> str:
|
||||
for _ in range(USER_CODE_MAX_CLAIM_ATTEMPTS):
|
||||
user_code = _random_user_code()
|
||||
key = USER_CODE_KEY_FMT.format(code=user_code)
|
||||
ok = self._redis.set(key, device_code, nx=True, ex=DEVICE_FLOW_TTL_SECONDS)
|
||||
if ok:
|
||||
return user_code
|
||||
raise UserCodeExhaustedError("could not allocate a unique user_code in 5 attempts")
|
||||
|
||||
def load_by_user_code(self, user_code: str) -> tuple[str, DeviceFlowState] | None:
|
||||
raw_dc = self._redis.get(USER_CODE_KEY_FMT.format(code=user_code))
|
||||
if not raw_dc:
|
||||
return None
|
||||
device_code = raw_dc.decode() if isinstance(raw_dc, (bytes, bytearray)) else raw_dc
|
||||
state = self._load_state(device_code)
|
||||
if state is None:
|
||||
return None
|
||||
return device_code, state
|
||||
|
||||
def load_by_device_code(self, device_code: str) -> DeviceFlowState | None:
|
||||
return self._load_state(device_code)
|
||||
|
||||
def _load_state(self, device_code: str) -> DeviceFlowState | None:
|
||||
raw = self._redis.get(DEVICE_CODE_KEY_FMT.format(code=device_code))
|
||||
if not raw:
|
||||
return None
|
||||
text_ = raw.decode() if isinstance(raw, (bytes, bytearray)) else raw
|
||||
try:
|
||||
return DeviceFlowState.from_json(text_)
|
||||
except (ValueError, KeyError):
|
||||
logger.exception("device_flow: corrupt state for %s", device_code)
|
||||
return None
|
||||
|
||||
def approve(
|
||||
self,
|
||||
device_code: str,
|
||||
subject_email: str,
|
||||
account_id: str | None,
|
||||
minted_token: str,
|
||||
token_id: str,
|
||||
subject_issuer: str | None = None,
|
||||
poll_payload: dict | None = None,
|
||||
) -> None:
|
||||
state = self._load_state(device_code)
|
||||
if state is None:
|
||||
raise StateNotFoundError(device_code)
|
||||
if state.status is not DeviceFlowStatus.PENDING:
|
||||
raise InvalidTransitionError(f"cannot approve {state.status}")
|
||||
|
||||
state.status = DeviceFlowStatus.APPROVED
|
||||
state.subject_email = subject_email
|
||||
state.account_id = account_id
|
||||
state.subject_issuer = subject_issuer
|
||||
state.minted_token = minted_token
|
||||
state.token_id = token_id
|
||||
state.poll_payload = poll_payload
|
||||
|
||||
new_ttl = self._remaining_ttl(device_code, floor=APPROVED_TTL_SECONDS_MIN)
|
||||
self._redis.setex(DEVICE_CODE_KEY_FMT.format(code=device_code), new_ttl, state.to_json())
|
||||
|
||||
def deny(self, device_code: str) -> None:
|
||||
state = self._load_state(device_code)
|
||||
if state is None:
|
||||
raise StateNotFoundError(device_code)
|
||||
if state.status is not DeviceFlowStatus.PENDING:
|
||||
raise InvalidTransitionError(f"cannot deny {state.status}")
|
||||
state.status = DeviceFlowStatus.DENIED
|
||||
self._redis.setex(
|
||||
DEVICE_CODE_KEY_FMT.format(code=device_code),
|
||||
self._remaining_ttl(device_code, floor=1),
|
||||
state.to_json(),
|
||||
)
|
||||
|
||||
def consume_on_poll(self, device_code: str) -> DeviceFlowState | None:
|
||||
"""Race-safe via Lua EVAL: GET + status-check + DEL execute in a
|
||||
single Redis transaction so only one of N concurrent pollers
|
||||
observes the APPROVED state. Losers get None, mapped to
|
||||
expired_token by the caller.
|
||||
"""
|
||||
raw = self._consume_on_poll_script(
|
||||
keys=[DEVICE_CODE_KEY_FMT.format(code=device_code)],
|
||||
args=[_USER_CODE_KEY_PREFIX],
|
||||
)
|
||||
if raw is None:
|
||||
return None
|
||||
text_ = raw.decode() if isinstance(raw, (bytes, bytearray)) else raw
|
||||
try:
|
||||
return DeviceFlowState.from_json(text_)
|
||||
except (ValueError, KeyError):
|
||||
logger.exception("device_flow: corrupt state on consume %s", device_code)
|
||||
return None
|
||||
|
||||
def record_poll(self, device_code: str, interval_seconds: int) -> SlowDownDecision:
|
||||
now = time.time()
|
||||
key = f"device_code:{device_code}:last_poll"
|
||||
prev_raw = self._redis.get(key)
|
||||
self._redis.setex(key, DEVICE_FLOW_TTL_SECONDS, str(now))
|
||||
if prev_raw is None:
|
||||
return SlowDownDecision.OK
|
||||
prev_s = prev_raw.decode() if isinstance(prev_raw, (bytes, bytearray)) else prev_raw
|
||||
try:
|
||||
prev = float(prev_s)
|
||||
except ValueError:
|
||||
return SlowDownDecision.OK
|
||||
if now - prev < interval_seconds:
|
||||
return SlowDownDecision.SLOW_DOWN
|
||||
return SlowDownDecision.OK
|
||||
|
||||
def _remaining_ttl(self, device_code: str, floor: int) -> int:
|
||||
"""``max(remaining, floor)`` — guarantees the CLI has at least
|
||||
``floor`` seconds to poll after a near-expiry approve.
|
||||
"""
|
||||
ttl = self._redis.ttl(DEVICE_CODE_KEY_FMT.format(code=device_code))
|
||||
if ttl is None or ttl < 0:
|
||||
return floor
|
||||
return max(int(ttl), floor)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Token mint — generate + upsert
|
||||
# ============================================================================
|
||||
|
||||
|
||||
OAUTH_BODY_BYTES = 32 # ~256 bits entropy
|
||||
PREFIX_OAUTH_ACCOUNT = "dfoa_"
|
||||
PREFIX_OAUTH_EXTERNAL_SSO = "dfoe_"
|
||||
|
||||
# Sentinel issuer for account-flow rows. Postgres' default partial unique
|
||||
# index treats NULLs as distinct, which would let two live `dfoa_` rows
|
||||
# share (email, client, device) and break rotate-in-place. Storing a
|
||||
# non-empty literal makes the composite key collide as intended.
|
||||
ACCOUNT_ISSUER_SENTINEL = "dify:account"
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class MintResult:
|
||||
"""Plaintext token surfaces to the caller once."""
|
||||
|
||||
token: str
|
||||
token_id: uuid.UUID
|
||||
expires_at: datetime
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class UpsertOutcome:
|
||||
token_id: uuid.UUID
|
||||
rotated: bool
|
||||
old_hash: str | None
|
||||
|
||||
|
||||
def generate_token(prefix: str) -> str:
|
||||
return prefix + secrets.token_urlsafe(OAUTH_BODY_BYTES)
|
||||
|
||||
|
||||
def sha256_hex(token: str) -> str:
|
||||
return hashlib.sha256(token.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def mint_oauth_token(
|
||||
# Accept either Session or Flask-SQLAlchemy's request-scoped wrapper —
|
||||
# the wrapper proxies the same execute/commit surface.
|
||||
session: Session | scoped_session,
|
||||
redis_client,
|
||||
*,
|
||||
subject_email: str,
|
||||
subject_issuer: str | None,
|
||||
account_id: str | None,
|
||||
client_id: str,
|
||||
device_label: str,
|
||||
prefix: str,
|
||||
ttl_days: int,
|
||||
) -> MintResult:
|
||||
"""Live row rotates in place via partial unique index
|
||||
``uq_oauth_active_per_device``; hard-expired rows are excluded by the
|
||||
index predicate so re-login INSERTs fresh. Pre-rotate Redis entry is
|
||||
deleted so stale AuthContext drops immediately.
|
||||
"""
|
||||
if prefix == PREFIX_OAUTH_ACCOUNT:
|
||||
# Account flow always writes the sentinel — caller may pass None
|
||||
# (for clarity) or the sentinel itself; nothing else is valid.
|
||||
if subject_issuer not in (None, ACCOUNT_ISSUER_SENTINEL):
|
||||
raise ValueError(f"account-flow token must use ACCOUNT_ISSUER_SENTINEL, got {subject_issuer!r}")
|
||||
subject_issuer = ACCOUNT_ISSUER_SENTINEL
|
||||
elif prefix == PREFIX_OAUTH_EXTERNAL_SSO:
|
||||
# Defense in depth: enterprise canonicalises + rejects empty,
|
||||
# but a regression there must not yield a NULL composite key here.
|
||||
if not subject_issuer or not subject_issuer.strip():
|
||||
raise ValueError("external-SSO token requires non-empty subject_issuer")
|
||||
else:
|
||||
raise ValueError(f"unknown oauth prefix: {prefix!r}")
|
||||
|
||||
token = generate_token(prefix)
|
||||
new_hash = sha256_hex(token)
|
||||
expires_at = datetime.now(UTC) + timedelta(days=ttl_days)
|
||||
|
||||
outcome = _upsert(
|
||||
session,
|
||||
subject_email=subject_email,
|
||||
subject_issuer=subject_issuer,
|
||||
account_id=account_id,
|
||||
client_id=client_id,
|
||||
device_label=device_label,
|
||||
prefix=prefix,
|
||||
new_hash=new_hash,
|
||||
expires_at=expires_at,
|
||||
)
|
||||
|
||||
if outcome.rotated and outcome.old_hash:
|
||||
redis_client.delete(TOKEN_CACHE_KEY_FMT.format(hash=outcome.old_hash))
|
||||
|
||||
return MintResult(token=token, token_id=outcome.token_id, expires_at=expires_at)
|
||||
|
||||
|
||||
def _upsert(
|
||||
session: Session | scoped_session,
|
||||
*,
|
||||
subject_email: str,
|
||||
subject_issuer: str | None,
|
||||
account_id: str | None,
|
||||
client_id: str,
|
||||
device_label: str,
|
||||
prefix: str,
|
||||
new_hash: str,
|
||||
expires_at: datetime,
|
||||
) -> UpsertOutcome:
|
||||
# Snapshot prior live row's hash for Redis invalidation post-rotate.
|
||||
# subject_issuer is always non-null here (account flow uses sentinel,
|
||||
# external-SSO is validated upstream), so equality matches the index.
|
||||
prior = session.execute(
|
||||
select(OAuthAccessToken.id, OAuthAccessToken.token_hash)
|
||||
.where(
|
||||
OAuthAccessToken.subject_email == subject_email,
|
||||
OAuthAccessToken.subject_issuer == subject_issuer,
|
||||
OAuthAccessToken.client_id == client_id,
|
||||
OAuthAccessToken.device_label == device_label,
|
||||
OAuthAccessToken.revoked_at.is_(None),
|
||||
)
|
||||
.limit(1)
|
||||
).first()
|
||||
old_hash = prior.token_hash if prior else None
|
||||
|
||||
insert_stmt = pg_insert(OAuthAccessToken).values(
|
||||
subject_email=subject_email,
|
||||
subject_issuer=subject_issuer,
|
||||
account_id=account_id,
|
||||
client_id=client_id,
|
||||
device_label=device_label,
|
||||
prefix=prefix,
|
||||
token_hash=new_hash,
|
||||
expires_at=expires_at,
|
||||
)
|
||||
upsert_stmt = insert_stmt.on_conflict_do_update(
|
||||
index_elements=["subject_email", "subject_issuer", "client_id", "device_label"],
|
||||
index_where=OAuthAccessToken.revoked_at.is_(None),
|
||||
set_={
|
||||
"token_hash": insert_stmt.excluded.token_hash,
|
||||
"prefix": insert_stmt.excluded.prefix,
|
||||
"account_id": insert_stmt.excluded.account_id,
|
||||
"expires_at": insert_stmt.excluded.expires_at,
|
||||
"created_at": func.now(),
|
||||
"last_used_at": None,
|
||||
},
|
||||
).returning(OAuthAccessToken.id)
|
||||
row = session.execute(upsert_stmt).first()
|
||||
session.commit()
|
||||
|
||||
if row is None:
|
||||
raise RuntimeError("oauth_token upsert returned no row")
|
||||
token_id = uuid.UUID(str(row.id))
|
||||
return UpsertOutcome(
|
||||
token_id=token_id,
|
||||
rotated=prior is not None,
|
||||
old_hash=old_hash,
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# TTL policy — days new OAuth tokens live
|
||||
# ============================================================================
|
||||
|
||||
|
||||
DEFAULT_OAUTH_TTL_DAYS = 14
|
||||
MIN_TTL_DAYS = 1
|
||||
MAX_TTL_DAYS = 365
|
||||
|
||||
_TTL_ENV_VAR = "OAUTH_TTL_DAYS"
|
||||
|
||||
|
||||
def oauth_ttl_days(tenant_id: str | None = None) -> int:
|
||||
"""``OAUTH_TTL_DAYS`` env, else default. EE tenant-level lookup
|
||||
is deferred; when it lands it wins over the env (Redis-cached 60s).
|
||||
"""
|
||||
_ = tenant_id
|
||||
|
||||
raw = os.environ.get(_TTL_ENV_VAR)
|
||||
if raw is None:
|
||||
return DEFAULT_OAUTH_TTL_DAYS
|
||||
try:
|
||||
value = int(raw)
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
"%s=%r is not an int; falling back to %d",
|
||||
_TTL_ENV_VAR,
|
||||
raw,
|
||||
DEFAULT_OAUTH_TTL_DAYS,
|
||||
)
|
||||
return DEFAULT_OAUTH_TTL_DAYS
|
||||
if value < MIN_TTL_DAYS:
|
||||
logger.warning("%s=%d below min %d; clamping", _TTL_ENV_VAR, value, MIN_TTL_DAYS)
|
||||
return MIN_TTL_DAYS
|
||||
if value > MAX_TTL_DAYS:
|
||||
logger.warning("%s=%d above max %d; clamping", _TTL_ENV_VAR, value, MAX_TTL_DAYS)
|
||||
return MAX_TTL_DAYS
|
||||
return value
|
||||
@ -1,125 +0,0 @@
|
||||
"""Shared fixtures for /openapi/v1/* integration tests."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import uuid
|
||||
from collections.abc import Generator
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from models import Account, App, OAuthAccessToken, Tenant, TenantAccountJoin
|
||||
from models.account import AccountStatus
|
||||
|
||||
|
||||
def _sha256(token: str) -> str:
|
||||
return hashlib.sha256(token.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def disable_enterprise(monkeypatch):
|
||||
"""Default to CE behaviour for /openapi/v1 tests. Tests that exercise the
|
||||
EE branch override this with their own monkeypatch in-test."""
|
||||
from configs import dify_config
|
||||
|
||||
monkeypatch.setattr(dify_config, "ENTERPRISE_ENABLED", False)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workspace_account(flask_app: Flask) -> Generator[tuple[Account, Tenant, TenantAccountJoin], None, None]:
|
||||
with flask_app.app_context():
|
||||
tenant = Tenant(name="t1", status="normal")
|
||||
account = Account(email="u@example.com", name="u")
|
||||
db.session.add_all([tenant, account])
|
||||
db.session.commit()
|
||||
account.status = AccountStatus.ACTIVE
|
||||
join = TenantAccountJoin(tenant_id=tenant.id, account_id=account.id, role="owner")
|
||||
db.session.add(join)
|
||||
db.session.commit()
|
||||
yield account, tenant, join
|
||||
db.session.delete(join)
|
||||
db.session.delete(account)
|
||||
db.session.delete(tenant)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app_in_workspace(flask_app: Flask, workspace_account) -> Generator[App, None, None]:
|
||||
_, tenant, _ = workspace_account
|
||||
with flask_app.app_context():
|
||||
app = App(tenant_id=tenant.id, name="a", mode="chat", status="normal", enable_site=True, enable_api=True)
|
||||
db.session.add(app)
|
||||
db.session.commit()
|
||||
yield app
|
||||
db.session.delete(app)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mint_token(flask_app: Flask):
|
||||
"""Factory fixture; tracks minted rows and deletes them on teardown so
|
||||
the auth-related test runs don't accumulate `oauth_access_tokens` rows."""
|
||||
minted: list[OAuthAccessToken] = []
|
||||
|
||||
def _mint(
|
||||
token: str,
|
||||
*,
|
||||
account_id: str | None,
|
||||
prefix: str,
|
||||
subject_email: str,
|
||||
subject_issuer: str | None,
|
||||
) -> OAuthAccessToken:
|
||||
with flask_app.app_context():
|
||||
row = OAuthAccessToken(
|
||||
token_hash=_sha256(token),
|
||||
prefix=prefix,
|
||||
account_id=account_id,
|
||||
subject_email=subject_email,
|
||||
subject_issuer=subject_issuer,
|
||||
client_id="difyctl",
|
||||
device_label="test-device",
|
||||
expires_at=datetime.now(UTC) + timedelta(hours=1),
|
||||
)
|
||||
db.session.add(row)
|
||||
db.session.commit()
|
||||
minted.append(row)
|
||||
return row
|
||||
|
||||
yield _mint
|
||||
|
||||
with flask_app.app_context():
|
||||
for row in minted:
|
||||
db.session.delete(db.session.merge(row))
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def account_token(workspace_account, mint_token) -> str:
|
||||
account, _, _ = workspace_account
|
||||
token = "dfoa_" + uuid.uuid4().hex
|
||||
mint_token(
|
||||
token,
|
||||
account_id=account.id,
|
||||
prefix="dfoa_",
|
||||
subject_email=account.email,
|
||||
subject_issuer="dify:account",
|
||||
)
|
||||
return token
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _flush_auth_redis(flask_app: Flask) -> Generator[None, None, None]:
|
||||
def _flush():
|
||||
with flask_app.app_context():
|
||||
for k in redis_client.keys("auth:*"):
|
||||
redis_client.delete(k)
|
||||
for k in redis_client.keys("rl:*"):
|
||||
redis_client.delete(k)
|
||||
|
||||
_flush()
|
||||
yield
|
||||
_flush()
|
||||
@ -1,252 +0,0 @@
|
||||
"""Integration tests for POST /openapi/v1/apps/<id>/run."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from collections.abc import Generator
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from extensions.ext_database import db
|
||||
from models import App
|
||||
|
||||
|
||||
def test_run_chat_dispatches_to_chat_handler(flask_app, account_token, app_in_workspace, monkeypatch):
|
||||
captured = {}
|
||||
|
||||
def _fake_generate(*, app_model, user, args, invoke_from, streaming):
|
||||
captured["mode"] = app_model.mode
|
||||
captured["args"] = args
|
||||
captured["invoke_from"] = invoke_from
|
||||
return {
|
||||
"event": "message",
|
||||
"task_id": "t",
|
||||
"id": "m",
|
||||
"message_id": "m",
|
||||
"conversation_id": "c",
|
||||
"mode": "chat",
|
||||
"answer": "ok",
|
||||
"created_at": 0,
|
||||
}
|
||||
|
||||
monkeypatch.setattr(
|
||||
"controllers.openapi.app_run.AppGenerateService.generate", staticmethod(_fake_generate)
|
||||
)
|
||||
client = flask_app.test_client()
|
||||
res = client.post(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/run",
|
||||
json={"inputs": {}, "query": "hi", "response_mode": "blocking", "user": "spoof@x.com"},
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert res.get_json()["mode"] == "chat"
|
||||
assert captured["mode"] == "chat"
|
||||
assert captured["invoke_from"] == InvokeFrom.OPENAPI
|
||||
assert "user" not in captured["args"], "server must strip body.user; identity comes from bearer"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app_with_mode(flask_app: Flask, workspace_account):
|
||||
"""Factory that creates an App row in the workspace_account tenant with
|
||||
a specified mode. Tracks rows for teardown.
|
||||
"""
|
||||
_, tenant, _ = workspace_account
|
||||
created: list[App] = []
|
||||
|
||||
def _make(mode: str) -> App:
|
||||
with flask_app.app_context():
|
||||
app = App(
|
||||
tenant_id=tenant.id,
|
||||
name=f"a-{mode}",
|
||||
mode=mode,
|
||||
status="normal",
|
||||
enable_site=True,
|
||||
enable_api=True,
|
||||
)
|
||||
db.session.add(app)
|
||||
db.session.commit()
|
||||
db.session.refresh(app)
|
||||
db.session.expunge(app)
|
||||
created.append(app)
|
||||
return app
|
||||
|
||||
yield _make
|
||||
|
||||
with flask_app.app_context():
|
||||
for app in created:
|
||||
db.session.delete(db.session.merge(app))
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def test_run_chat_without_query_returns_422(flask_app, account_token, app_in_workspace, monkeypatch):
|
||||
client = flask_app.test_client()
|
||||
res = client.post(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/run",
|
||||
json={"inputs": {}, "response_mode": "blocking"},
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 422
|
||||
assert b"query_required_for_chat" in res.data
|
||||
|
||||
|
||||
def test_run_completion_dispatches_to_completion_handler(
|
||||
flask_app, account_token, app_with_mode, monkeypatch
|
||||
):
|
||||
app = app_with_mode("completion")
|
||||
|
||||
captured: dict = {}
|
||||
|
||||
def _fake_generate(*, app_model, user, args, invoke_from, streaming):
|
||||
captured["mode"] = app_model.mode
|
||||
captured["args"] = args
|
||||
return {
|
||||
"event": "message",
|
||||
"task_id": "t",
|
||||
"id": "m",
|
||||
"message_id": "m",
|
||||
"mode": "completion",
|
||||
"answer": "ok",
|
||||
"created_at": 0,
|
||||
}
|
||||
|
||||
monkeypatch.setattr(
|
||||
"controllers.openapi.app_run.AppGenerateService.generate", staticmethod(_fake_generate)
|
||||
)
|
||||
client = flask_app.test_client()
|
||||
res = client.post(
|
||||
f"/openapi/v1/apps/{app.id}/run",
|
||||
json={"inputs": {}, "response_mode": "blocking"},
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert res.get_json()["mode"] == "completion"
|
||||
assert captured["mode"] == "completion"
|
||||
|
||||
|
||||
def test_run_workflow_with_query_returns_422(flask_app, account_token, app_with_mode, monkeypatch):
|
||||
app = app_with_mode("workflow")
|
||||
client = flask_app.test_client()
|
||||
res = client.post(
|
||||
f"/openapi/v1/apps/{app.id}/run",
|
||||
json={"inputs": {}, "query": "hi", "response_mode": "blocking"},
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 422
|
||||
assert b"query_not_supported_for_workflow" in res.data
|
||||
|
||||
|
||||
def test_run_workflow_no_query_dispatches_to_workflow_handler(
|
||||
flask_app, account_token, app_with_mode, monkeypatch
|
||||
):
|
||||
app = app_with_mode("workflow")
|
||||
|
||||
def _fake_generate(*, app_model, user, args, invoke_from, streaming):
|
||||
return {
|
||||
"workflow_run_id": "wfr",
|
||||
"task_id": "t",
|
||||
"data": {"id": "wf-d", "workflow_id": "wf", "status": "succeeded"},
|
||||
}
|
||||
|
||||
monkeypatch.setattr(
|
||||
"controllers.openapi.app_run.AppGenerateService.generate", staticmethod(_fake_generate)
|
||||
)
|
||||
client = flask_app.test_client()
|
||||
res = client.post(
|
||||
f"/openapi/v1/apps/{app.id}/run",
|
||||
json={"inputs": {}, "response_mode": "blocking"},
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
body = res.get_json()
|
||||
assert body["mode"] == "workflow"
|
||||
assert body["workflow_run_id"] == "wfr"
|
||||
|
||||
|
||||
def test_run_unsupported_mode_returns_422(flask_app, account_token, app_with_mode, monkeypatch):
|
||||
app = app_with_mode("channel")
|
||||
client = flask_app.test_client()
|
||||
res = client.post(
|
||||
f"/openapi/v1/apps/{app.id}/run",
|
||||
json={"inputs": {}, "response_mode": "blocking"},
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 422
|
||||
assert b"mode_not_runnable" in res.data
|
||||
|
||||
|
||||
def test_run_without_bearer_returns_401(flask_app, app_in_workspace):
|
||||
client = flask_app.test_client()
|
||||
res = client.post(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/run",
|
||||
json={"inputs": {}, "query": "hi"},
|
||||
)
|
||||
assert res.status_code == 401
|
||||
|
||||
|
||||
def test_run_with_insufficient_scope_returns_403(
|
||||
flask_app, account_token, app_in_workspace, monkeypatch
|
||||
):
|
||||
"""Stub the authenticator to return an AuthContext with empty scopes."""
|
||||
from libs import oauth_bearer
|
||||
|
||||
real_authenticate = oauth_bearer.BearerAuthenticator.authenticate
|
||||
|
||||
def _stub_authenticate(self, token: str):
|
||||
ctx = real_authenticate(self, token)
|
||||
from dataclasses import replace
|
||||
|
||||
return replace(ctx, scopes=frozenset())
|
||||
|
||||
monkeypatch.setattr(oauth_bearer.BearerAuthenticator, "authenticate", _stub_authenticate)
|
||||
|
||||
client = flask_app.test_client()
|
||||
res = client.post(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/run",
|
||||
json={"inputs": {}, "query": "hi"},
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 403
|
||||
|
||||
|
||||
def test_run_with_unknown_app_returns_404(flask_app, account_token):
|
||||
client = flask_app.test_client()
|
||||
res = client.post(
|
||||
f"/openapi/v1/apps/{uuid.uuid4()}/run",
|
||||
json={"inputs": {}, "query": "hi"},
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 404
|
||||
|
||||
|
||||
def test_run_streaming_returns_event_stream(
|
||||
flask_app, account_token, app_in_workspace, monkeypatch
|
||||
):
|
||||
def _stream() -> Generator[str, None, None]:
|
||||
yield "event: message\ndata: {\"x\": 1}\n\n"
|
||||
|
||||
monkeypatch.setattr(
|
||||
"controllers.openapi.app_run.AppGenerateService.generate",
|
||||
staticmethod(lambda **kw: _stream()),
|
||||
)
|
||||
|
||||
client = flask_app.test_client()
|
||||
res = client.post(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/run",
|
||||
json={"inputs": {}, "query": "hi", "response_mode": "streaming"},
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert res.headers["Content-Type"].startswith("text/event-stream")
|
||||
assert b"event: message" in res.data
|
||||
|
||||
|
||||
def test_run_without_inputs_returns_422(flask_app, account_token, app_in_workspace):
|
||||
client = flask_app.test_client()
|
||||
res = client.post(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/run",
|
||||
json={"query": "hi"},
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 422
|
||||
@ -1,210 +0,0 @@
|
||||
"""Integration tests for /openapi/v1/apps* read surface."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from flask.testing import FlaskClient
|
||||
|
||||
from models import App
|
||||
|
||||
|
||||
def test_apps_bare_id_route_404(test_client, app_in_workspace, account_token):
|
||||
resp = test_client.get(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
def test_apps_parameters_route_404(test_client, app_in_workspace, account_token):
|
||||
resp = test_client.get(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/parameters",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
def test_apps_info_route_404(test_client, app_in_workspace, account_token):
|
||||
resp = test_client.get(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/info",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
def test_apps_describe_returns_merged_shape(
|
||||
test_client: FlaskClient,
|
||||
app_in_workspace: App,
|
||||
account_token: str,
|
||||
):
|
||||
res = test_client.get(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/describe",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
body = res.json
|
||||
assert body["info"]["id"] == app_in_workspace.id
|
||||
assert body["info"]["mode"] == "chat"
|
||||
assert isinstance(body["parameters"], dict)
|
||||
|
||||
|
||||
def test_apps_describe_full_includes_input_schema(
|
||||
test_client: FlaskClient,
|
||||
app_in_workspace: App,
|
||||
account_token: str,
|
||||
):
|
||||
res = test_client.get(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/describe",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
body = res.json
|
||||
assert body["info"] is not None
|
||||
assert body["parameters"] is not None
|
||||
assert body["input_schema"] is not None
|
||||
assert body["input_schema"]["$schema"] == "https://json-schema.org/draft/2020-12/schema"
|
||||
|
||||
|
||||
def test_apps_describe_fields_info_only(
|
||||
test_client: FlaskClient,
|
||||
app_in_workspace: App,
|
||||
account_token: str,
|
||||
):
|
||||
res = test_client.get(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/describe?fields=info",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
body = res.json
|
||||
assert body["info"] is not None
|
||||
assert body["parameters"] is None
|
||||
assert body["input_schema"] is None
|
||||
|
||||
|
||||
def test_apps_describe_fields_parameters_only(
|
||||
test_client: FlaskClient,
|
||||
app_in_workspace: App,
|
||||
account_token: str,
|
||||
):
|
||||
res = test_client.get(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/describe?fields=parameters",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
body = res.json
|
||||
assert body["info"] is None
|
||||
assert body["parameters"] is not None
|
||||
assert body["input_schema"] is None
|
||||
|
||||
|
||||
def test_apps_describe_fields_input_schema_only(
|
||||
test_client: FlaskClient,
|
||||
app_in_workspace: App,
|
||||
account_token: str,
|
||||
):
|
||||
res = test_client.get(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/describe?fields=input_schema",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
body = res.json
|
||||
assert body["info"] is None
|
||||
assert body["parameters"] is None
|
||||
assert body["input_schema"] is not None
|
||||
|
||||
|
||||
def test_apps_describe_fields_combined(
|
||||
test_client: FlaskClient,
|
||||
app_in_workspace: App,
|
||||
account_token: str,
|
||||
):
|
||||
res = test_client.get(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/describe?fields=info,input_schema",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
body = res.json
|
||||
assert body["info"] is not None
|
||||
assert body["parameters"] is None
|
||||
assert body["input_schema"] is not None
|
||||
|
||||
|
||||
def test_apps_describe_fields_unknown_returns_422(
|
||||
test_client: FlaskClient,
|
||||
app_in_workspace: App,
|
||||
account_token: str,
|
||||
):
|
||||
res = test_client.get(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/describe?fields=garbage",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 422
|
||||
|
||||
|
||||
def test_apps_describe_fields_extra_param_returns_422(
|
||||
test_client: FlaskClient,
|
||||
app_in_workspace: App,
|
||||
account_token: str,
|
||||
):
|
||||
res = test_client.get(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/describe?fields=info&page=1",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 422
|
||||
|
||||
|
||||
def test_apps_list_returns_pagination_envelope(
|
||||
test_client: FlaskClient,
|
||||
workspace_account,
|
||||
app_in_workspace: App,
|
||||
account_token: str,
|
||||
):
|
||||
_, tenant, _ = workspace_account
|
||||
res = test_client.get(
|
||||
f"/openapi/v1/apps?workspace_id={tenant.id}&page=1&limit=20",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
body = res.json
|
||||
assert body["page"] == 1
|
||||
assert body["limit"] == 20
|
||||
assert body["total"] >= 1
|
||||
assert any(d["id"] == app_in_workspace.id for d in body["data"])
|
||||
|
||||
|
||||
def test_apps_list_requires_workspace_id(test_client: FlaskClient, account_token: str):
|
||||
res = test_client.get("/openapi/v1/apps", headers={"Authorization": f"Bearer {account_token}"})
|
||||
assert res.status_code == 400
|
||||
|
||||
|
||||
def test_apps_list_tag_no_match_returns_empty_data_not_400(
|
||||
test_client: FlaskClient,
|
||||
workspace_account,
|
||||
app_in_workspace: App,
|
||||
account_token: str,
|
||||
):
|
||||
_, tenant, _ = workspace_account
|
||||
res = test_client.get(
|
||||
f"/openapi/v1/apps?workspace_id={tenant.id}&tag=nonexistent",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert res.json["data"] == []
|
||||
|
||||
|
||||
def test_account_sessions_returns_envelope(
|
||||
test_client: FlaskClient,
|
||||
account_token: str,
|
||||
):
|
||||
res = test_client.get("/openapi/v1/account/sessions", headers={"Authorization": f"Bearer {account_token}"})
|
||||
assert res.status_code == 200
|
||||
body = res.json
|
||||
# canonical envelope shape
|
||||
assert isinstance(body["data"], list)
|
||||
assert "page" in body
|
||||
assert "limit" in body
|
||||
assert "total" in body
|
||||
assert "has_more" in body
|
||||
# the bearer's own minted session must appear
|
||||
assert any(s["prefix"] == "dfoa_" for s in body["data"])
|
||||
# legacy "sessions" key must NOT appear
|
||||
assert "sessions" not in body
|
||||
@ -1,127 +0,0 @@
|
||||
"""Integration tests for the /openapi/v1 bearer auth surface.
|
||||
|
||||
Layer 0 (workspace membership), per-token rate limit, and read-scope (`apps:read`)
|
||||
acceptance/rejection on app-scoped routes.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.testing import FlaskClient
|
||||
|
||||
from extensions.ext_database import db
|
||||
from models import App, Tenant
|
||||
|
||||
|
||||
def test_info_accepts_account_bearer_with_apps_read_scope(
|
||||
test_client: FlaskClient,
|
||||
app_in_workspace: App,
|
||||
account_token: str,
|
||||
) -> None:
|
||||
res = test_client.get(
|
||||
f"/openapi/v1/apps/{app_in_workspace.id}/info",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert res.json["id"] == app_in_workspace.id
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def other_workspace_app(flask_app: Flask) -> Generator[App, None, None]:
|
||||
"""A fresh app under a *different* tenant — caller has no membership row."""
|
||||
with flask_app.app_context():
|
||||
other_tenant = Tenant(name="other", status="normal")
|
||||
db.session.add(other_tenant)
|
||||
db.session.commit()
|
||||
app = App(
|
||||
tenant_id=other_tenant.id,
|
||||
name="b",
|
||||
mode="chat",
|
||||
status="normal",
|
||||
enable_site=True,
|
||||
enable_api=True,
|
||||
)
|
||||
db.session.add(app)
|
||||
db.session.commit()
|
||||
yield app
|
||||
db.session.delete(app)
|
||||
db.session.delete(other_tenant)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def test_layer0_denies_account_bearer_without_membership(
|
||||
test_client: FlaskClient,
|
||||
account_token: str,
|
||||
other_workspace_app: App,
|
||||
) -> None:
|
||||
"""Account A bearer hitting an app under tenant B — Layer 0 denies on CE."""
|
||||
res = test_client.get(
|
||||
f"/openapi/v1/apps/{other_workspace_app.id}/info",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 403
|
||||
assert res.json.get("message") == "workspace_membership_revoked"
|
||||
|
||||
|
||||
def test_layer0_skipped_when_enterprise_enabled(
|
||||
test_client: FlaskClient,
|
||||
account_token: str,
|
||||
other_workspace_app: App,
|
||||
monkeypatch,
|
||||
) -> None:
|
||||
"""On EE, Layer 0 short-circuits — gateway RBAC owns tenant isolation.
|
||||
|
||||
/info uses validate_bearer + require_workspace_member inline (no
|
||||
AppAuthzCheck), so a cross-tenant bearer reaches the app lookup and
|
||||
gets 200 — gateway is expected to enforce isolation upstream.
|
||||
"""
|
||||
from configs import dify_config
|
||||
|
||||
# Override the conftest autouse default for this test only.
|
||||
monkeypatch.setattr(dify_config, "ENTERPRISE_ENABLED", True)
|
||||
|
||||
res = test_client.get(
|
||||
f"/openapi/v1/apps/{other_workspace_app.id}/info",
|
||||
headers={"Authorization": f"Bearer {account_token}"},
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert res.json.get("message") != "workspace_membership_revoked"
|
||||
|
||||
|
||||
def test_rate_limit_returns_429_after_60_requests(
|
||||
test_client: FlaskClient,
|
||||
account_token: str,
|
||||
) -> None:
|
||||
"""61st sequential GET to /account on the same bearer → 429 with Retry-After."""
|
||||
headers = {"Authorization": f"Bearer {account_token}"}
|
||||
for i in range(60):
|
||||
r = test_client.get("/openapi/v1/account", headers=headers)
|
||||
assert r.status_code == 200, f"unexpected fail at i={i}"
|
||||
|
||||
r = test_client.get("/openapi/v1/account", headers=headers)
|
||||
assert r.status_code == 429
|
||||
assert r.headers.get("Retry-After"), "Retry-After header missing"
|
||||
assert int(r.headers["Retry-After"]) >= 1
|
||||
body = r.json or {}
|
||||
assert body.get("error") == "rate_limited"
|
||||
assert isinstance(body.get("retry_after_ms"), int)
|
||||
assert body["retry_after_ms"] >= 1000
|
||||
|
||||
|
||||
def test_rate_limit_bucket_shared_across_surfaces(
|
||||
test_client: FlaskClient,
|
||||
app_in_workspace: App,
|
||||
account_token: str,
|
||||
) -> None:
|
||||
"""30 calls to /account + 30 calls to /apps/<id>/info on same token → 61st 429s."""
|
||||
headers = {"Authorization": f"Bearer {account_token}"}
|
||||
for _ in range(30):
|
||||
assert test_client.get("/openapi/v1/account", headers=headers).status_code == 200
|
||||
for _ in range(30):
|
||||
assert test_client.get(f"/openapi/v1/apps/{app_in_workspace.id}/info", headers=headers).status_code == 200
|
||||
|
||||
r = test_client.get("/openapi/v1/account", headers=headers)
|
||||
assert r.status_code == 429
|
||||
@ -68,7 +68,10 @@ class TestChangeEmailSend:
|
||||
mock_features.return_value = SimpleNamespace(enable_change_email=True)
|
||||
mock_account = _build_account("current@example.com", "acc1")
|
||||
mock_current_account.return_value = (mock_account, None)
|
||||
mock_get_change_data.return_value = {"email": "current@example.com"}
|
||||
mock_get_change_data.return_value = {
|
||||
"email": "current@example.com",
|
||||
AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY: AccountService.CHANGE_EMAIL_PHASE_OLD_VERIFIED,
|
||||
}
|
||||
mock_send_email.return_value = "token-abc"
|
||||
|
||||
with app.test_request_context(
|
||||
@ -85,12 +88,55 @@ class TestChangeEmailSend:
|
||||
email="new@example.com",
|
||||
old_email="current@example.com",
|
||||
language="en-US",
|
||||
phase="new_email",
|
||||
phase=AccountService.CHANGE_EMAIL_PHASE_NEW,
|
||||
)
|
||||
mock_extract_ip.assert_called_once()
|
||||
mock_is_ip_limit.assert_called_once_with("127.0.0.1")
|
||||
mock_csrf.assert_called_once()
|
||||
|
||||
@patch("controllers.console.wraps.db")
|
||||
@patch("controllers.console.workspace.account.current_account_with_tenant")
|
||||
@patch("controllers.console.workspace.account.AccountService.get_change_email_data")
|
||||
@patch("controllers.console.workspace.account.AccountService.send_change_email_email")
|
||||
@patch("controllers.console.workspace.account.AccountService.is_email_send_ip_limit", return_value=False)
|
||||
@patch("controllers.console.workspace.account.extract_remote_ip", return_value="127.0.0.1")
|
||||
@patch("libs.login.check_csrf_token", return_value=None)
|
||||
@patch("controllers.console.wraps.FeatureService.get_system_features")
|
||||
def test_should_reject_new_email_phase_when_token_phase_is_not_old_verified(
|
||||
self,
|
||||
mock_features,
|
||||
mock_csrf,
|
||||
mock_extract_ip,
|
||||
mock_is_ip_limit,
|
||||
mock_send_email,
|
||||
mock_get_change_data,
|
||||
mock_current_account,
|
||||
mock_db,
|
||||
app,
|
||||
):
|
||||
"""GHSA-4q3w-q5mc-45rq: a phase-1 token must not unlock the new-email send step."""
|
||||
from controllers.console.auth.error import InvalidTokenError
|
||||
|
||||
_mock_wraps_db(mock_db)
|
||||
mock_features.return_value = SimpleNamespace(enable_change_email=True)
|
||||
mock_account = _build_account("current@example.com", "acc1")
|
||||
mock_current_account.return_value = (mock_account, None)
|
||||
mock_get_change_data.return_value = {
|
||||
"email": "current@example.com",
|
||||
AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY: AccountService.CHANGE_EMAIL_PHASE_OLD,
|
||||
}
|
||||
|
||||
with app.test_request_context(
|
||||
"/account/change-email",
|
||||
method="POST",
|
||||
json={"email": "New@Example.com", "language": "en-US", "phase": "new_email", "token": "token-123"},
|
||||
):
|
||||
_set_logged_in_user(_build_account("tester@example.com", "tester"))
|
||||
with pytest.raises(InvalidTokenError):
|
||||
ChangeEmailSendEmailApi().post()
|
||||
|
||||
mock_send_email.assert_not_called()
|
||||
|
||||
|
||||
class TestChangeEmailValidity:
|
||||
@patch("controllers.console.wraps.db")
|
||||
@ -122,7 +168,12 @@ class TestChangeEmailValidity:
|
||||
mock_account = _build_account("user@example.com", "acc2")
|
||||
mock_current_account.return_value = (mock_account, None)
|
||||
mock_is_rate_limit.return_value = False
|
||||
mock_get_data.return_value = {"email": "user@example.com", "code": "1234", "old_email": "old@example.com"}
|
||||
mock_get_data.return_value = {
|
||||
"email": "user@example.com",
|
||||
"code": "1234",
|
||||
"old_email": "old@example.com",
|
||||
AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY: AccountService.CHANGE_EMAIL_PHASE_OLD,
|
||||
}
|
||||
mock_generate_token.return_value = (None, "new-token")
|
||||
|
||||
with app.test_request_context(
|
||||
@ -138,11 +189,169 @@ class TestChangeEmailValidity:
|
||||
mock_add_rate.assert_not_called()
|
||||
mock_revoke_token.assert_called_once_with("token-123")
|
||||
mock_generate_token.assert_called_once_with(
|
||||
"user@example.com", code="1234", old_email="old@example.com", additional_data={}
|
||||
"user@example.com",
|
||||
code="1234",
|
||||
old_email="old@example.com",
|
||||
additional_data={
|
||||
AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY: AccountService.CHANGE_EMAIL_PHASE_OLD_VERIFIED,
|
||||
},
|
||||
)
|
||||
mock_reset_rate.assert_called_once_with("user@example.com")
|
||||
mock_csrf.assert_called_once()
|
||||
|
||||
@patch("controllers.console.wraps.db")
|
||||
@patch("controllers.console.workspace.account.current_account_with_tenant")
|
||||
@patch("controllers.console.workspace.account.AccountService.reset_change_email_error_rate_limit")
|
||||
@patch("controllers.console.workspace.account.AccountService.generate_change_email_token")
|
||||
@patch("controllers.console.workspace.account.AccountService.revoke_change_email_token")
|
||||
@patch("controllers.console.workspace.account.AccountService.add_change_email_error_rate_limit")
|
||||
@patch("controllers.console.workspace.account.AccountService.get_change_email_data")
|
||||
@patch("controllers.console.workspace.account.AccountService.is_change_email_error_rate_limit")
|
||||
@patch("libs.login.check_csrf_token", return_value=None)
|
||||
@patch("controllers.console.wraps.FeatureService.get_system_features")
|
||||
def test_should_upgrade_new_phase_token_to_new_verified(
|
||||
self,
|
||||
mock_features,
|
||||
mock_csrf,
|
||||
mock_is_rate_limit,
|
||||
mock_get_data,
|
||||
mock_add_rate,
|
||||
mock_revoke_token,
|
||||
mock_generate_token,
|
||||
mock_reset_rate,
|
||||
mock_current_account,
|
||||
mock_db,
|
||||
app,
|
||||
):
|
||||
_mock_wraps_db(mock_db)
|
||||
mock_features.return_value = SimpleNamespace(enable_change_email=True)
|
||||
mock_current_account.return_value = (_build_account("old@example.com", "acc"), None)
|
||||
mock_is_rate_limit.return_value = False
|
||||
mock_get_data.return_value = {
|
||||
"email": "new@example.com",
|
||||
"code": "1234",
|
||||
"old_email": "old@example.com",
|
||||
AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY: AccountService.CHANGE_EMAIL_PHASE_NEW,
|
||||
}
|
||||
mock_generate_token.return_value = (None, "new-verified-token")
|
||||
|
||||
with app.test_request_context(
|
||||
"/account/change-email/validity",
|
||||
method="POST",
|
||||
json={"email": "new@example.com", "code": "1234", "token": "token-123"},
|
||||
):
|
||||
_set_logged_in_user(_build_account("tester@example.com", "tester"))
|
||||
response = ChangeEmailCheckApi().post()
|
||||
|
||||
assert response == {"is_valid": True, "email": "new@example.com", "token": "new-verified-token"}
|
||||
mock_generate_token.assert_called_once_with(
|
||||
"new@example.com",
|
||||
code="1234",
|
||||
old_email="old@example.com",
|
||||
additional_data={
|
||||
AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY: AccountService.CHANGE_EMAIL_PHASE_NEW_VERIFIED,
|
||||
},
|
||||
)
|
||||
|
||||
@patch("controllers.console.wraps.db")
|
||||
@patch("controllers.console.workspace.account.current_account_with_tenant")
|
||||
@patch("controllers.console.workspace.account.AccountService.reset_change_email_error_rate_limit")
|
||||
@patch("controllers.console.workspace.account.AccountService.generate_change_email_token")
|
||||
@patch("controllers.console.workspace.account.AccountService.revoke_change_email_token")
|
||||
@patch("controllers.console.workspace.account.AccountService.add_change_email_error_rate_limit")
|
||||
@patch("controllers.console.workspace.account.AccountService.get_change_email_data")
|
||||
@patch("controllers.console.workspace.account.AccountService.is_change_email_error_rate_limit")
|
||||
@patch("libs.login.check_csrf_token", return_value=None)
|
||||
@patch("controllers.console.wraps.FeatureService.get_system_features")
|
||||
def test_should_reject_validity_when_token_phase_is_unknown(
|
||||
self,
|
||||
mock_features,
|
||||
mock_csrf,
|
||||
mock_is_rate_limit,
|
||||
mock_get_data,
|
||||
mock_add_rate,
|
||||
mock_revoke_token,
|
||||
mock_generate_token,
|
||||
mock_reset_rate,
|
||||
mock_current_account,
|
||||
mock_db,
|
||||
app,
|
||||
):
|
||||
"""A token whose phase marker is a string but not a known transition must be rejected."""
|
||||
from controllers.console.auth.error import InvalidTokenError
|
||||
|
||||
_mock_wraps_db(mock_db)
|
||||
mock_features.return_value = SimpleNamespace(enable_change_email=True)
|
||||
mock_current_account.return_value = (_build_account("old@example.com", "acc"), None)
|
||||
mock_is_rate_limit.return_value = False
|
||||
mock_get_data.return_value = {
|
||||
"email": "user@example.com",
|
||||
"code": "1234",
|
||||
"old_email": "old@example.com",
|
||||
AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY: "something_else",
|
||||
}
|
||||
|
||||
with app.test_request_context(
|
||||
"/account/change-email/validity",
|
||||
method="POST",
|
||||
json={"email": "user@example.com", "code": "1234", "token": "token-123"},
|
||||
):
|
||||
_set_logged_in_user(_build_account("tester@example.com", "tester"))
|
||||
with pytest.raises(InvalidTokenError):
|
||||
ChangeEmailCheckApi().post()
|
||||
|
||||
mock_revoke_token.assert_not_called()
|
||||
mock_generate_token.assert_not_called()
|
||||
|
||||
@patch("controllers.console.wraps.db")
|
||||
@patch("controllers.console.workspace.account.current_account_with_tenant")
|
||||
@patch("controllers.console.workspace.account.AccountService.reset_change_email_error_rate_limit")
|
||||
@patch("controllers.console.workspace.account.AccountService.generate_change_email_token")
|
||||
@patch("controllers.console.workspace.account.AccountService.revoke_change_email_token")
|
||||
@patch("controllers.console.workspace.account.AccountService.add_change_email_error_rate_limit")
|
||||
@patch("controllers.console.workspace.account.AccountService.get_change_email_data")
|
||||
@patch("controllers.console.workspace.account.AccountService.is_change_email_error_rate_limit")
|
||||
@patch("libs.login.check_csrf_token", return_value=None)
|
||||
@patch("controllers.console.wraps.FeatureService.get_system_features")
|
||||
def test_should_reject_validity_when_token_has_no_phase(
|
||||
self,
|
||||
mock_features,
|
||||
mock_csrf,
|
||||
mock_is_rate_limit,
|
||||
mock_get_data,
|
||||
mock_add_rate,
|
||||
mock_revoke_token,
|
||||
mock_generate_token,
|
||||
mock_reset_rate,
|
||||
mock_current_account,
|
||||
mock_db,
|
||||
app,
|
||||
):
|
||||
"""A token minted without a phase marker (e.g. a hand-crafted token) must not validate."""
|
||||
from controllers.console.auth.error import InvalidTokenError
|
||||
|
||||
_mock_wraps_db(mock_db)
|
||||
mock_features.return_value = SimpleNamespace(enable_change_email=True)
|
||||
mock_current_account.return_value = (_build_account("old@example.com", "acc"), None)
|
||||
mock_is_rate_limit.return_value = False
|
||||
mock_get_data.return_value = {
|
||||
"email": "user@example.com",
|
||||
"code": "1234",
|
||||
"old_email": "old@example.com",
|
||||
}
|
||||
|
||||
with app.test_request_context(
|
||||
"/account/change-email/validity",
|
||||
method="POST",
|
||||
json={"email": "user@example.com", "code": "1234", "token": "token-123"},
|
||||
):
|
||||
_set_logged_in_user(_build_account("tester@example.com", "tester"))
|
||||
with pytest.raises(InvalidTokenError):
|
||||
ChangeEmailCheckApi().post()
|
||||
|
||||
mock_revoke_token.assert_not_called()
|
||||
mock_generate_token.assert_not_called()
|
||||
|
||||
|
||||
class TestChangeEmailReset:
|
||||
@patch("controllers.console.wraps.db")
|
||||
@ -175,7 +384,11 @@ class TestChangeEmailReset:
|
||||
mock_current_account.return_value = (current_user, None)
|
||||
mock_is_freeze.return_value = False
|
||||
mock_check_unique.return_value = True
|
||||
mock_get_data.return_value = {"old_email": "OLD@example.com"}
|
||||
mock_get_data.return_value = {
|
||||
"email": "new@example.com",
|
||||
"old_email": "OLD@example.com",
|
||||
AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY: AccountService.CHANGE_EMAIL_PHASE_NEW_VERIFIED,
|
||||
}
|
||||
mock_account_after_update = _build_account("new@example.com", "acc3-updated")
|
||||
mock_update_account.return_value = mock_account_after_update
|
||||
|
||||
@ -194,6 +407,155 @@ class TestChangeEmailReset:
|
||||
mock_send_notify.assert_called_once_with(email="new@example.com")
|
||||
mock_csrf.assert_called_once()
|
||||
|
||||
@patch("controllers.console.wraps.db")
|
||||
@patch("controllers.console.workspace.account.current_account_with_tenant")
|
||||
@patch("controllers.console.workspace.account.AccountService.send_change_email_completed_notify_email")
|
||||
@patch("controllers.console.workspace.account.AccountService.update_account_email")
|
||||
@patch("controllers.console.workspace.account.AccountService.revoke_change_email_token")
|
||||
@patch("controllers.console.workspace.account.AccountService.get_change_email_data")
|
||||
@patch("controllers.console.workspace.account.AccountService.check_email_unique")
|
||||
@patch("controllers.console.workspace.account.AccountService.is_account_in_freeze")
|
||||
@patch("libs.login.check_csrf_token", return_value=None)
|
||||
@patch("controllers.console.wraps.FeatureService.get_system_features")
|
||||
def test_should_reject_reset_when_token_phase_is_not_new_verified(
|
||||
self,
|
||||
mock_features,
|
||||
mock_csrf,
|
||||
mock_is_freeze,
|
||||
mock_check_unique,
|
||||
mock_get_data,
|
||||
mock_revoke_token,
|
||||
mock_update_account,
|
||||
mock_send_notify,
|
||||
mock_current_account,
|
||||
mock_db,
|
||||
app,
|
||||
):
|
||||
"""GHSA-4q3w-q5mc-45rq PoC: phase-1 token must not be usable against /reset."""
|
||||
from controllers.console.auth.error import InvalidTokenError
|
||||
|
||||
_mock_wraps_db(mock_db)
|
||||
mock_features.return_value = SimpleNamespace(enable_change_email=True)
|
||||
current_user = _build_account("old@example.com", "acc3")
|
||||
mock_current_account.return_value = (current_user, None)
|
||||
mock_is_freeze.return_value = False
|
||||
mock_check_unique.return_value = True
|
||||
# Simulate a token straight out of step #1 (phase=old_email) — exactly
|
||||
# the replay used in the advisory PoC.
|
||||
mock_get_data.return_value = {
|
||||
"email": "old@example.com",
|
||||
"old_email": "old@example.com",
|
||||
AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY: AccountService.CHANGE_EMAIL_PHASE_OLD,
|
||||
}
|
||||
|
||||
with app.test_request_context(
|
||||
"/account/change-email/reset",
|
||||
method="POST",
|
||||
json={"new_email": "attacker@example.com", "token": "token-from-step1"},
|
||||
):
|
||||
_set_logged_in_user(_build_account("tester@example.com", "tester"))
|
||||
with pytest.raises(InvalidTokenError):
|
||||
ChangeEmailResetApi().post()
|
||||
|
||||
mock_revoke_token.assert_not_called()
|
||||
mock_update_account.assert_not_called()
|
||||
mock_send_notify.assert_not_called()
|
||||
|
||||
@patch("controllers.console.wraps.db")
|
||||
@patch("controllers.console.workspace.account.current_account_with_tenant")
|
||||
@patch("controllers.console.workspace.account.AccountService.send_change_email_completed_notify_email")
|
||||
@patch("controllers.console.workspace.account.AccountService.update_account_email")
|
||||
@patch("controllers.console.workspace.account.AccountService.revoke_change_email_token")
|
||||
@patch("controllers.console.workspace.account.AccountService.get_change_email_data")
|
||||
@patch("controllers.console.workspace.account.AccountService.check_email_unique")
|
||||
@patch("controllers.console.workspace.account.AccountService.is_account_in_freeze")
|
||||
@patch("libs.login.check_csrf_token", return_value=None)
|
||||
@patch("controllers.console.wraps.FeatureService.get_system_features")
|
||||
def test_should_reject_reset_when_token_email_differs_from_payload_new_email(
|
||||
self,
|
||||
mock_features,
|
||||
mock_csrf,
|
||||
mock_is_freeze,
|
||||
mock_check_unique,
|
||||
mock_get_data,
|
||||
mock_revoke_token,
|
||||
mock_update_account,
|
||||
mock_send_notify,
|
||||
mock_current_account,
|
||||
mock_db,
|
||||
app,
|
||||
):
|
||||
"""A verified token for address A must not be replayed to change to address B."""
|
||||
from controllers.console.auth.error import InvalidTokenError
|
||||
|
||||
_mock_wraps_db(mock_db)
|
||||
mock_features.return_value = SimpleNamespace(enable_change_email=True)
|
||||
current_user = _build_account("old@example.com", "acc3")
|
||||
mock_current_account.return_value = (current_user, None)
|
||||
mock_is_freeze.return_value = False
|
||||
mock_check_unique.return_value = True
|
||||
mock_get_data.return_value = {
|
||||
"email": "verified@example.com",
|
||||
"old_email": "old@example.com",
|
||||
AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY: AccountService.CHANGE_EMAIL_PHASE_NEW_VERIFIED,
|
||||
}
|
||||
|
||||
with app.test_request_context(
|
||||
"/account/change-email/reset",
|
||||
method="POST",
|
||||
json={"new_email": "attacker@example.com", "token": "token-verified"},
|
||||
):
|
||||
_set_logged_in_user(_build_account("tester@example.com", "tester"))
|
||||
with pytest.raises(InvalidTokenError):
|
||||
ChangeEmailResetApi().post()
|
||||
|
||||
mock_revoke_token.assert_not_called()
|
||||
mock_update_account.assert_not_called()
|
||||
mock_send_notify.assert_not_called()
|
||||
|
||||
|
||||
class TestAccountServiceSendChangeEmailEmail:
|
||||
"""Service-level coverage for the phase-bound changes in `send_change_email_email`."""
|
||||
|
||||
def test_should_raise_value_error_for_invalid_phase(self):
|
||||
with pytest.raises(ValueError, match="phase must be one of"):
|
||||
AccountService.send_change_email_email(
|
||||
email="user@example.com",
|
||||
old_email="user@example.com",
|
||||
phase="old_email_verified",
|
||||
)
|
||||
|
||||
@patch("services.account_service.send_change_mail_task")
|
||||
@patch("services.account_service.AccountService.change_email_rate_limiter")
|
||||
@patch("services.account_service.AccountService.generate_change_email_token")
|
||||
def test_should_stamp_phase_into_generated_token(
|
||||
self,
|
||||
mock_generate_token,
|
||||
mock_rate_limiter,
|
||||
mock_mail_task,
|
||||
):
|
||||
mock_rate_limiter.is_rate_limited.return_value = False
|
||||
mock_generate_token.return_value = ("123456", "the-token")
|
||||
|
||||
returned = AccountService.send_change_email_email(
|
||||
email="user@example.com",
|
||||
old_email="user@example.com",
|
||||
language="en-US",
|
||||
phase=AccountService.CHANGE_EMAIL_PHASE_NEW,
|
||||
)
|
||||
|
||||
assert returned == "the-token"
|
||||
mock_generate_token.assert_called_once_with(
|
||||
"user@example.com",
|
||||
None,
|
||||
old_email="user@example.com",
|
||||
additional_data={
|
||||
AccountService.CHANGE_EMAIL_TOKEN_PHASE_KEY: AccountService.CHANGE_EMAIL_PHASE_NEW,
|
||||
},
|
||||
)
|
||||
mock_mail_task.delay.assert_called_once()
|
||||
mock_rate_limiter.increment_rate_limit.assert_called_once_with("user@example.com")
|
||||
|
||||
|
||||
class TestAccountDeletionFeedback:
|
||||
@patch("controllers.console.wraps.db")
|
||||
|
||||
@ -1,54 +0,0 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from controllers.openapi.auth.composition import OAUTH_BEARER_PIPELINE, _resolve_app_authz_strategy
|
||||
from controllers.openapi.auth.pipeline import Pipeline
|
||||
from controllers.openapi.auth.steps import (
|
||||
AppAuthzCheck,
|
||||
AppResolver,
|
||||
BearerCheck,
|
||||
CallerMount,
|
||||
ScopeCheck,
|
||||
WorkspaceMembershipCheck,
|
||||
)
|
||||
from controllers.openapi.auth.strategies import (
|
||||
AccountMounter,
|
||||
AclStrategy,
|
||||
EndUserMounter,
|
||||
MembershipStrategy,
|
||||
)
|
||||
|
||||
|
||||
def test_pipeline_is_composed():
|
||||
assert isinstance(OAUTH_BEARER_PIPELINE, Pipeline)
|
||||
|
||||
|
||||
def test_pipeline_step_order():
|
||||
"""BearerCheck → ScopeCheck → AppResolver → WorkspaceMembershipCheck →
|
||||
AppAuthzCheck → CallerMount. Rate-limit is enforced inside
|
||||
`BearerAuthenticator.authenticate`, not as a separate pipeline step."""
|
||||
steps = OAUTH_BEARER_PIPELINE._steps
|
||||
assert isinstance(steps[0], BearerCheck)
|
||||
assert isinstance(steps[1], ScopeCheck)
|
||||
assert isinstance(steps[2], AppResolver)
|
||||
assert isinstance(steps[3], WorkspaceMembershipCheck)
|
||||
assert isinstance(steps[4], AppAuthzCheck)
|
||||
assert isinstance(steps[5], CallerMount)
|
||||
|
||||
|
||||
def test_caller_mount_has_both_mounters():
|
||||
cm = OAUTH_BEARER_PIPELINE._steps[5]
|
||||
kinds = {type(m) for m in cm._mounters}
|
||||
assert AccountMounter in kinds
|
||||
assert EndUserMounter in kinds
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.composition.FeatureService")
|
||||
def test_strategy_resolver_picks_acl_when_enabled(fs):
|
||||
fs.get_system_features.return_value.webapp_auth.enabled = True
|
||||
assert isinstance(_resolve_app_authz_strategy(), AclStrategy)
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.composition.FeatureService")
|
||||
def test_strategy_resolver_picks_membership_when_disabled(fs):
|
||||
fs.get_system_features.return_value.webapp_auth.enabled = False
|
||||
assert isinstance(_resolve_app_authz_strategy(), MembershipStrategy)
|
||||
@ -1,21 +0,0 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from controllers.openapi.auth.context import Context
|
||||
|
||||
|
||||
def test_context_starts_unpopulated():
|
||||
ctx = Context(request=MagicMock(), required_scope="apps:run")
|
||||
assert ctx.subject_type is None
|
||||
assert ctx.subject_email is None
|
||||
assert ctx.account_id is None
|
||||
assert ctx.scopes == frozenset()
|
||||
assert ctx.app is None
|
||||
assert ctx.tenant is None
|
||||
assert ctx.caller is None
|
||||
assert ctx.caller_kind is None
|
||||
|
||||
|
||||
def test_context_fields_are_mutable():
|
||||
ctx = Context(request=MagicMock(), required_scope="apps:run")
|
||||
ctx.scopes = frozenset({"full"})
|
||||
assert "full" in ctx.scopes
|
||||
@ -1,61 +0,0 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
|
||||
from controllers.openapi.auth.context import Context
|
||||
from controllers.openapi.auth.pipeline import Pipeline
|
||||
|
||||
|
||||
def test_run_invokes_each_step_in_order():
|
||||
calls = []
|
||||
|
||||
class S:
|
||||
def __init__(self, tag):
|
||||
self.tag = tag
|
||||
|
||||
def __call__(self, ctx):
|
||||
calls.append(self.tag)
|
||||
|
||||
Pipeline(S("a"), S("b"), S("c")).run(Context(request=MagicMock(), required_scope="x"))
|
||||
assert calls == ["a", "b", "c"]
|
||||
|
||||
|
||||
def test_run_short_circuits_on_raise():
|
||||
calls = []
|
||||
|
||||
class Boom:
|
||||
def __call__(self, ctx):
|
||||
raise RuntimeError("boom")
|
||||
|
||||
class Tail:
|
||||
def __call__(self, ctx):
|
||||
calls.append("ran")
|
||||
|
||||
with pytest.raises(RuntimeError):
|
||||
Pipeline(Boom(), Tail()).run(Context(request=MagicMock(), required_scope="x"))
|
||||
assert calls == []
|
||||
|
||||
|
||||
def test_guard_decorator_runs_pipeline_and_unpacks_handler_kwargs():
|
||||
seen = {}
|
||||
|
||||
class FakeStep:
|
||||
def __call__(self, ctx):
|
||||
ctx.app = "APP"
|
||||
ctx.caller = "CALLER"
|
||||
ctx.caller_kind = "account"
|
||||
|
||||
pipeline = Pipeline(FakeStep())
|
||||
|
||||
@pipeline.guard(scope="apps:run")
|
||||
def handler(app_model, caller, caller_kind):
|
||||
seen["app_model"] = app_model
|
||||
seen["caller"] = caller
|
||||
seen["caller_kind"] = caller_kind
|
||||
return "ok"
|
||||
|
||||
app = Flask(__name__)
|
||||
with app.test_request_context("/x", method="POST"):
|
||||
assert handler() == "ok"
|
||||
assert seen == {"app_model": "APP", "caller": "CALLER", "caller_kind": "account"}
|
||||
@ -1,64 +0,0 @@
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from werkzeug.exceptions import BadRequest, Forbidden, NotFound
|
||||
|
||||
from controllers.openapi.auth.context import Context
|
||||
from controllers.openapi.auth.steps import AppResolver
|
||||
from models import TenantStatus
|
||||
|
||||
|
||||
def _ctx(view_args):
|
||||
req = MagicMock()
|
||||
req.view_args = view_args
|
||||
return Context(request=req, required_scope="apps:run")
|
||||
|
||||
|
||||
def _app(*, status="normal", enable_api=True):
|
||||
return SimpleNamespace(id="app1", tenant_id="t1", status=status, enable_api=enable_api)
|
||||
|
||||
|
||||
def _tenant(*, status=TenantStatus.NORMAL):
|
||||
return SimpleNamespace(id="t1", status=status)
|
||||
|
||||
|
||||
def test_resolver_rejects_missing_path_param():
|
||||
with pytest.raises(BadRequest):
|
||||
AppResolver()(_ctx({}))
|
||||
|
||||
|
||||
def test_resolver_rejects_none_view_args():
|
||||
with pytest.raises(BadRequest):
|
||||
AppResolver()(_ctx(None))
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.steps.db")
|
||||
def test_resolver_404_when_app_missing(db):
|
||||
db.session.get.side_effect = [None]
|
||||
with pytest.raises(NotFound):
|
||||
AppResolver()(_ctx({"app_id": "x"}))
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.steps.db")
|
||||
def test_resolver_403_when_disabled(db):
|
||||
db.session.get.side_effect = [_app(enable_api=False)]
|
||||
with pytest.raises(Forbidden) as exc:
|
||||
AppResolver()(_ctx({"app_id": "x"}))
|
||||
assert "service_api_disabled" in str(exc.value.description)
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.steps.db")
|
||||
def test_resolver_403_when_tenant_archived(db):
|
||||
db.session.get.side_effect = [_app(), _tenant(status=TenantStatus.ARCHIVE)]
|
||||
with pytest.raises(Forbidden):
|
||||
AppResolver()(_ctx({"app_id": "x"}))
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.steps.db")
|
||||
def test_resolver_populates_app_and_tenant(db):
|
||||
db.session.get.side_effect = [_app(), _tenant()]
|
||||
ctx = _ctx({"app_id": "x"})
|
||||
AppResolver()(ctx)
|
||||
assert ctx.app.id == "app1"
|
||||
assert ctx.tenant.id == "t1"
|
||||
@ -1,53 +0,0 @@
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
from controllers.openapi.auth.context import Context
|
||||
from controllers.openapi.auth.steps import AppAuthzCheck
|
||||
from controllers.openapi.auth.strategies import AclStrategy, MembershipStrategy
|
||||
from libs.oauth_bearer import SubjectType
|
||||
|
||||
|
||||
def _ctx(*, subject_type, account_id="acc1"):
|
||||
c = Context(request=MagicMock(), required_scope="apps:run")
|
||||
c.subject_type = subject_type
|
||||
c.subject_email = "alice@example.com"
|
||||
c.account_id = account_id
|
||||
c.app = SimpleNamespace(id="app1")
|
||||
c.tenant = SimpleNamespace(id="t1")
|
||||
return c
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.strategies.EnterpriseService")
|
||||
def test_acl_strategy_calls_inner_api(ent):
|
||||
ent.WebAppAuth.is_user_allowed_to_access_webapp.return_value = True
|
||||
assert AclStrategy().authorize(_ctx(subject_type=SubjectType.ACCOUNT)) is True
|
||||
ent.WebAppAuth.is_user_allowed_to_access_webapp.assert_called_once_with(
|
||||
user_id="alice@example.com",
|
||||
app_id="app1",
|
||||
)
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.strategies._has_tenant_membership")
|
||||
def test_membership_strategy_uses_join_lookup(member):
|
||||
member.return_value = True
|
||||
assert MembershipStrategy().authorize(_ctx(subject_type=SubjectType.ACCOUNT)) is True
|
||||
member.assert_called_once_with("acc1", "t1")
|
||||
|
||||
|
||||
def test_membership_strategy_rejects_external_sso():
|
||||
assert MembershipStrategy().authorize(_ctx(subject_type=SubjectType.EXTERNAL_SSO, account_id=None)) is False
|
||||
|
||||
|
||||
def test_app_authz_check_raises_when_strategy_denies():
|
||||
deny = SimpleNamespace(authorize=lambda c: False)
|
||||
with pytest.raises(Forbidden) as exc:
|
||||
AppAuthzCheck(lambda: deny)(_ctx(subject_type=SubjectType.ACCOUNT))
|
||||
assert "subject_no_app_access" in str(exc.value.description)
|
||||
|
||||
|
||||
def test_app_authz_check_passes_when_strategy_allows():
|
||||
allow = SimpleNamespace(authorize=lambda c: True)
|
||||
AppAuthzCheck(lambda: allow)(_ctx(subject_type=SubjectType.ACCOUNT))
|
||||
@ -1,56 +0,0 @@
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from controllers.openapi.auth.context import Context
|
||||
from controllers.openapi.auth.steps import BearerCheck
|
||||
from libs.oauth_bearer import AuthContext, InvalidBearerError, Scope, SubjectType
|
||||
|
||||
|
||||
def _ctx(headers):
|
||||
req = MagicMock()
|
||||
req.headers = headers
|
||||
return Context(request=req, required_scope="apps:run")
|
||||
|
||||
|
||||
def test_bearer_check_rejects_missing_header():
|
||||
with pytest.raises(Unauthorized):
|
||||
BearerCheck()(_ctx({}))
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.steps.get_authenticator")
|
||||
def test_bearer_check_rejects_unknown_prefix(get_auth):
|
||||
get_auth.return_value.authenticate.side_effect = InvalidBearerError("unknown token prefix")
|
||||
with pytest.raises(Unauthorized):
|
||||
BearerCheck()(_ctx({"Authorization": "Bearer xxx_abc"}))
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.steps.get_authenticator")
|
||||
def test_bearer_check_populates_context(get_auth):
|
||||
tok_id = uuid.uuid4()
|
||||
authn = AuthContext(
|
||||
subject_type=SubjectType.ACCOUNT,
|
||||
subject_email="a@x.com",
|
||||
subject_issuer=None,
|
||||
account_id=None,
|
||||
scopes=frozenset({Scope.FULL}),
|
||||
token_id=tok_id,
|
||||
source="oauth-account",
|
||||
expires_at=datetime.now(UTC),
|
||||
token_hash="hash-1",
|
||||
verified_tenants={},
|
||||
)
|
||||
get_auth.return_value.authenticate.return_value = authn
|
||||
|
||||
ctx = _ctx({"Authorization": "Bearer dfoa_abc"})
|
||||
BearerCheck()(ctx)
|
||||
|
||||
assert ctx.subject_type == SubjectType.ACCOUNT
|
||||
assert ctx.subject_email == "a@x.com"
|
||||
assert ctx.scopes == frozenset({Scope.FULL})
|
||||
assert ctx.source == "oauth-account"
|
||||
assert ctx.token_id == tok_id
|
||||
assert ctx.token_hash == "hash-1"
|
||||
@ -1,157 +0,0 @@
|
||||
"""Unit tests for WorkspaceMembershipCheck (Layer 0)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
from controllers.openapi.auth.context import Context
|
||||
from controllers.openapi.auth.steps import WorkspaceMembershipCheck
|
||||
from libs.oauth_bearer import SubjectType
|
||||
|
||||
|
||||
def _ctx(*, subject_type, account_id, tenant_id, cached_verified_tenants=None, token_hash=None) -> Context:
|
||||
c = Context(request=MagicMock(), required_scope="apps:read")
|
||||
c.subject_type = subject_type
|
||||
c.account_id = account_id
|
||||
c.tenant = SimpleNamespace(id=tenant_id) if tenant_id else None
|
||||
c.cached_verified_tenants = cached_verified_tenants
|
||||
c.token_hash = token_hash
|
||||
return c
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def step():
|
||||
return WorkspaceMembershipCheck()
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.steps.dify_config")
|
||||
@patch("libs.oauth_bearer.record_layer0_verdict")
|
||||
@patch("libs.oauth_bearer.db")
|
||||
def test_skips_when_enterprise_enabled(mock_db, mock_record, mock_cfg, step):
|
||||
mock_cfg.ENTERPRISE_ENABLED = True
|
||||
ctx = _ctx(
|
||||
subject_type=SubjectType.ACCOUNT,
|
||||
account_id=str(uuid.uuid4()),
|
||||
tenant_id=str(uuid.uuid4()),
|
||||
cached_verified_tenants={},
|
||||
token_hash="hash-1",
|
||||
)
|
||||
step(ctx) # no raise
|
||||
mock_db.session.execute.assert_not_called()
|
||||
mock_record.assert_not_called()
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.steps.dify_config")
|
||||
@patch("libs.oauth_bearer.record_layer0_verdict")
|
||||
@patch("libs.oauth_bearer.db")
|
||||
def test_skips_for_external_sso(mock_db, mock_record, mock_cfg, step):
|
||||
mock_cfg.ENTERPRISE_ENABLED = False
|
||||
ctx = _ctx(
|
||||
subject_type=SubjectType.EXTERNAL_SSO,
|
||||
account_id=None,
|
||||
tenant_id=str(uuid.uuid4()),
|
||||
cached_verified_tenants={},
|
||||
token_hash="hash-1",
|
||||
)
|
||||
step(ctx) # no raise
|
||||
mock_db.session.execute.assert_not_called()
|
||||
mock_record.assert_not_called()
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.steps.dify_config")
|
||||
@patch("libs.oauth_bearer.record_layer0_verdict")
|
||||
@patch("libs.oauth_bearer.db")
|
||||
def test_uses_cached_ok(mock_db, mock_record, mock_cfg, step):
|
||||
mock_cfg.ENTERPRISE_ENABLED = False
|
||||
ctx = _ctx(
|
||||
subject_type=SubjectType.ACCOUNT,
|
||||
account_id="a1",
|
||||
tenant_id="t1",
|
||||
cached_verified_tenants={"t1": True},
|
||||
token_hash="hash-1",
|
||||
)
|
||||
step(ctx)
|
||||
mock_db.session.execute.assert_not_called()
|
||||
mock_record.assert_not_called()
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.steps.dify_config")
|
||||
@patch("libs.oauth_bearer.record_layer0_verdict")
|
||||
@patch("libs.oauth_bearer.db")
|
||||
def test_uses_cached_denied(mock_db, mock_record, mock_cfg, step):
|
||||
mock_cfg.ENTERPRISE_ENABLED = False
|
||||
ctx = _ctx(
|
||||
subject_type=SubjectType.ACCOUNT,
|
||||
account_id="a1",
|
||||
tenant_id="t1",
|
||||
cached_verified_tenants={"t1": False},
|
||||
token_hash="hash-1",
|
||||
)
|
||||
with pytest.raises(Forbidden, match="workspace_membership_revoked"):
|
||||
step(ctx)
|
||||
mock_db.session.execute.assert_not_called()
|
||||
mock_record.assert_not_called()
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.steps.dify_config")
|
||||
@patch("libs.oauth_bearer.record_layer0_verdict")
|
||||
@patch("libs.oauth_bearer.db")
|
||||
def test_denies_when_no_membership(mock_db, mock_record, mock_cfg, step):
|
||||
mock_cfg.ENTERPRISE_ENABLED = False
|
||||
mock_db.session.execute.return_value.scalar_one_or_none.return_value = None
|
||||
ctx = _ctx(
|
||||
subject_type=SubjectType.ACCOUNT,
|
||||
account_id="a1",
|
||||
tenant_id="t1",
|
||||
cached_verified_tenants={},
|
||||
token_hash="hash-1",
|
||||
)
|
||||
with pytest.raises(Forbidden, match="workspace_membership_revoked"):
|
||||
step(ctx)
|
||||
mock_record.assert_called_once_with("hash-1", "t1", False)
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.steps.dify_config")
|
||||
@patch("libs.oauth_bearer.record_layer0_verdict")
|
||||
@patch("libs.oauth_bearer.db")
|
||||
def test_denies_when_account_inactive(mock_db, mock_record, mock_cfg, step):
|
||||
mock_cfg.ENTERPRISE_ENABLED = False
|
||||
mock_db.session.execute.side_effect = [
|
||||
MagicMock(scalar_one_or_none=MagicMock(return_value="join-id")),
|
||||
MagicMock(scalar_one_or_none=MagicMock(return_value="banned")),
|
||||
]
|
||||
ctx = _ctx(
|
||||
subject_type=SubjectType.ACCOUNT,
|
||||
account_id="a1",
|
||||
tenant_id="t1",
|
||||
cached_verified_tenants={},
|
||||
token_hash="hash-1",
|
||||
)
|
||||
with pytest.raises(Forbidden, match="workspace_membership_revoked"):
|
||||
step(ctx)
|
||||
mock_record.assert_called_once_with("hash-1", "t1", False)
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.steps.dify_config")
|
||||
@patch("libs.oauth_bearer.record_layer0_verdict")
|
||||
@patch("libs.oauth_bearer.db")
|
||||
def test_allows_active_member(mock_db, mock_record, mock_cfg, step):
|
||||
mock_cfg.ENTERPRISE_ENABLED = False
|
||||
mock_db.session.execute.side_effect = [
|
||||
MagicMock(scalar_one_or_none=MagicMock(return_value="join-id")),
|
||||
MagicMock(scalar_one_or_none=MagicMock(return_value="active")),
|
||||
]
|
||||
ctx = _ctx(
|
||||
subject_type=SubjectType.ACCOUNT,
|
||||
account_id="a1",
|
||||
tenant_id="t1",
|
||||
cached_verified_tenants={},
|
||||
token_hash="hash-1",
|
||||
)
|
||||
step(ctx) # no raise
|
||||
mock_record.assert_called_once_with("hash-1", "t1", True)
|
||||
@ -1,77 +0,0 @@
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from controllers.openapi.auth.context import Context
|
||||
from controllers.openapi.auth.steps import CallerMount
|
||||
from controllers.openapi.auth.strategies import AccountMounter, EndUserMounter
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from libs.oauth_bearer import SubjectType
|
||||
|
||||
|
||||
def _ctx(*, subject_type, account_id=None, subject_email=None):
|
||||
c = Context(request=MagicMock(), required_scope="apps:run")
|
||||
c.subject_type = subject_type
|
||||
c.account_id = account_id
|
||||
c.subject_email = subject_email
|
||||
c.app = SimpleNamespace(id="app1")
|
||||
c.tenant = SimpleNamespace(id="t1")
|
||||
return c
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.strategies._login_as")
|
||||
@patch("controllers.openapi.auth.strategies.db")
|
||||
def test_account_mounter(db, login):
|
||||
account = SimpleNamespace()
|
||||
db.session.get.return_value = account
|
||||
ctx = _ctx(subject_type=SubjectType.ACCOUNT, account_id="acc1")
|
||||
AccountMounter().mount(ctx)
|
||||
assert ctx.caller is account
|
||||
assert ctx.caller.current_tenant is ctx.tenant
|
||||
assert ctx.caller_kind == "account"
|
||||
login.assert_called_once_with(account)
|
||||
|
||||
|
||||
@patch("controllers.openapi.auth.strategies._login_as")
|
||||
@patch("controllers.openapi.auth.strategies.EndUserService")
|
||||
def test_end_user_mounter(svc, login):
|
||||
eu = SimpleNamespace()
|
||||
svc.get_or_create_end_user_by_type.return_value = eu
|
||||
ctx = _ctx(subject_type=SubjectType.EXTERNAL_SSO, subject_email="a@x.com")
|
||||
EndUserMounter().mount(ctx)
|
||||
svc.get_or_create_end_user_by_type.assert_called_once_with(
|
||||
InvokeFrom.OPENAPI,
|
||||
tenant_id="t1",
|
||||
app_id="app1",
|
||||
user_id="a@x.com",
|
||||
)
|
||||
assert ctx.caller is eu
|
||||
assert ctx.caller_kind == "end_user"
|
||||
|
||||
|
||||
def test_caller_mount_dispatches_by_subject_type():
|
||||
seen = {}
|
||||
|
||||
class Fake:
|
||||
def __init__(self, st, tag):
|
||||
self._st, self._tag = st, tag
|
||||
|
||||
def applies_to(self, st):
|
||||
return st == self._st
|
||||
|
||||
def mount(self, ctx):
|
||||
seen["who"] = self._tag
|
||||
|
||||
cm = CallerMount(
|
||||
Fake(SubjectType.ACCOUNT, "acct"),
|
||||
Fake(SubjectType.EXTERNAL_SSO, "sso"),
|
||||
)
|
||||
cm(_ctx(subject_type=SubjectType.EXTERNAL_SSO))
|
||||
assert seen == {"who": "sso"}
|
||||
|
||||
|
||||
def test_caller_mount_raises_when_none_applies():
|
||||
with pytest.raises(Unauthorized):
|
||||
CallerMount()(_ctx(subject_type=SubjectType.ACCOUNT))
|
||||
@ -1,27 +0,0 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
from controllers.openapi.auth.context import Context
|
||||
from controllers.openapi.auth.steps import ScopeCheck
|
||||
|
||||
|
||||
def _ctx(scopes, required):
|
||||
c = Context(request=MagicMock(), required_scope=required)
|
||||
c.scopes = frozenset(scopes)
|
||||
return c
|
||||
|
||||
|
||||
def test_scope_check_passes_on_full():
|
||||
ScopeCheck()(_ctx({"full"}, "apps:run"))
|
||||
|
||||
|
||||
def test_scope_check_passes_on_explicit_match():
|
||||
ScopeCheck()(_ctx({"apps:run"}, "apps:run"))
|
||||
|
||||
|
||||
def test_scope_check_rejects_when_missing():
|
||||
with pytest.raises(Forbidden) as exc:
|
||||
ScopeCheck()(_ctx({"apps:read"}, "apps:run"))
|
||||
assert "insufficient_scope" in str(exc.value.description)
|
||||
@ -1,15 +0,0 @@
|
||||
import pytest
|
||||
|
||||
from controllers.openapi.auth.pipeline import Pipeline
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def bypass_pipeline(monkeypatch):
|
||||
"""Stub Pipeline.run so endpoint decoration does not invoke real auth.
|
||||
|
||||
Module-level @OAUTH_BEARER_PIPELINE.guard(...) captures the real
|
||||
pipeline at import time; mocking the module attribute does not undo
|
||||
that. Patching Pipeline.run on the class is the bypass that actually
|
||||
works.
|
||||
"""
|
||||
monkeypatch.setattr(Pipeline, "run", lambda self, ctx: None)
|
||||
@ -1,138 +0,0 @@
|
||||
"""User-scoped identity + session endpoints under /openapi/v1/account."""
|
||||
|
||||
import builtins
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.views import MethodView
|
||||
|
||||
from controllers.openapi import bp as openapi_bp
|
||||
from controllers.openapi.account import (
|
||||
AccountApi,
|
||||
AccountSessionByIdApi,
|
||||
AccountSessionsApi,
|
||||
AccountSessionsSelfApi,
|
||||
)
|
||||
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
builtins.MethodView = MethodView # type: ignore[attr-defined]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def openapi_app() -> Flask:
|
||||
app = Flask(__name__)
|
||||
app.config["TESTING"] = True
|
||||
app.register_blueprint(openapi_bp)
|
||||
return app
|
||||
|
||||
|
||||
def _rule(app: Flask, path: str):
|
||||
return next(r for r in app.url_map.iter_rules() if r.rule == path)
|
||||
|
||||
|
||||
def test_account_route_registered(openapi_app: Flask):
|
||||
rules = {r.rule for r in openapi_app.url_map.iter_rules()}
|
||||
assert "/openapi/v1/account" in rules
|
||||
|
||||
|
||||
def test_account_dispatches_to_class(openapi_app: Flask):
|
||||
rule = _rule(openapi_app, "/openapi/v1/account")
|
||||
assert openapi_app.view_functions[rule.endpoint].view_class is AccountApi
|
||||
|
||||
|
||||
def test_account_sessions_self_route_registered(openapi_app: Flask):
|
||||
rules = {r.rule for r in openapi_app.url_map.iter_rules()}
|
||||
assert "/openapi/v1/account/sessions/self" in rules
|
||||
|
||||
|
||||
def test_sessions_self_dispatches_to_class(openapi_app: Flask):
|
||||
rule = _rule(openapi_app, "/openapi/v1/account/sessions/self")
|
||||
assert openapi_app.view_functions[rule.endpoint].view_class is AccountSessionsSelfApi
|
||||
|
||||
|
||||
def test_account_methods(openapi_app: Flask):
|
||||
rule = _rule(openapi_app, "/openapi/v1/account")
|
||||
assert "GET" in rule.methods
|
||||
|
||||
|
||||
def test_sessions_self_methods(openapi_app: Flask):
|
||||
rule = _rule(openapi_app, "/openapi/v1/account/sessions/self")
|
||||
assert "DELETE" in rule.methods
|
||||
|
||||
|
||||
def test_sessions_list_route_registered(openapi_app: Flask):
|
||||
rules = {r.rule for r in openapi_app.url_map.iter_rules()}
|
||||
assert "/openapi/v1/account/sessions" in rules
|
||||
|
||||
|
||||
def test_sessions_list_dispatches_to_sessions_api(openapi_app: Flask):
|
||||
rule = _rule(openapi_app, "/openapi/v1/account/sessions")
|
||||
assert openapi_app.view_functions[rule.endpoint].view_class is AccountSessionsApi
|
||||
assert "GET" in rule.methods
|
||||
|
||||
|
||||
def test_session_by_id_route_registered(openapi_app: Flask):
|
||||
rules = {r.rule for r in openapi_app.url_map.iter_rules()}
|
||||
assert "/openapi/v1/account/sessions/<string:session_id>" in rules
|
||||
|
||||
|
||||
def test_session_by_id_dispatches_to_correct_class(openapi_app: Flask):
|
||||
rule = _rule(openapi_app, "/openapi/v1/account/sessions/<string:session_id>")
|
||||
assert openapi_app.view_functions[rule.endpoint].view_class is AccountSessionByIdApi
|
||||
assert "DELETE" in rule.methods
|
||||
|
||||
|
||||
def test_subject_match_for_account_filters_by_account_id():
|
||||
"""Account subject scopes queries via account_id."""
|
||||
import uuid as _uuid
|
||||
|
||||
from controllers.openapi.account import _subject_match
|
||||
from libs.oauth_bearer import AuthContext, SubjectType
|
||||
|
||||
aid = _uuid.uuid4()
|
||||
ctx = AuthContext(
|
||||
subject_type=SubjectType.ACCOUNT,
|
||||
subject_email="user@example.com",
|
||||
subject_issuer="dify:account",
|
||||
account_id=aid,
|
||||
scopes=frozenset({"full"}),
|
||||
token_id=_uuid.uuid4(),
|
||||
source="oauth_account",
|
||||
expires_at=None,
|
||||
token_hash="h1",
|
||||
verified_tenants={},
|
||||
)
|
||||
clauses = _subject_match(ctx)
|
||||
# One predicate, on account_id
|
||||
assert len(clauses) == 1
|
||||
assert "account_id" in str(clauses[0])
|
||||
|
||||
|
||||
def test_subject_match_for_external_sso_filters_by_email_and_issuer():
|
||||
"""External SSO subject scopes via (subject_email, subject_issuer)
|
||||
AND account_id IS NULL — so a same-email account row from a
|
||||
federated tenant cannot be revoked through an SSO bearer.
|
||||
"""
|
||||
import uuid as _uuid
|
||||
|
||||
from controllers.openapi.account import _subject_match
|
||||
from libs.oauth_bearer import AuthContext, SubjectType
|
||||
|
||||
ctx = AuthContext(
|
||||
subject_type=SubjectType.EXTERNAL_SSO,
|
||||
subject_email="sso@partner.com",
|
||||
subject_issuer="https://idp.partner.com",
|
||||
account_id=None,
|
||||
scopes=frozenset({"apps:run"}),
|
||||
token_id=_uuid.uuid4(),
|
||||
source="oauth_external_sso",
|
||||
expires_at=None,
|
||||
token_hash="h1",
|
||||
verified_tenants={},
|
||||
)
|
||||
clauses = _subject_match(ctx)
|
||||
assert len(clauses) == 3
|
||||
rendered = " ".join(str(c) for c in clauses)
|
||||
assert "subject_email" in rendered
|
||||
assert "subject_issuer" in rendered
|
||||
assert "account_id IS NULL" in rendered
|
||||
@ -1,48 +0,0 @@
|
||||
"""Unit tests for AppDescribeQuery (`?fields=` allow-list)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from controllers.openapi.apps import AppDescribeQuery
|
||||
|
||||
|
||||
def test_no_fields_returns_none() -> None:
|
||||
q = AppDescribeQuery.model_validate({})
|
||||
assert q.fields is None
|
||||
|
||||
|
||||
def test_empty_string_returns_none() -> None:
|
||||
q = AppDescribeQuery.model_validate({"fields": ""})
|
||||
assert q.fields is None
|
||||
|
||||
|
||||
def test_single_field() -> None:
|
||||
q = AppDescribeQuery.model_validate({"fields": "info"})
|
||||
assert q.fields == {"info"}
|
||||
|
||||
|
||||
def test_comma_list() -> None:
|
||||
q = AppDescribeQuery.model_validate({"fields": "info,parameters"})
|
||||
assert q.fields == {"info", "parameters"}
|
||||
|
||||
|
||||
def test_whitespace_tolerant() -> None:
|
||||
q = AppDescribeQuery.model_validate({"fields": " info , input_schema "})
|
||||
assert q.fields == {"info", "input_schema"}
|
||||
|
||||
|
||||
def test_unknown_member_rejected() -> None:
|
||||
with pytest.raises(ValidationError):
|
||||
AppDescribeQuery.model_validate({"fields": "garbage"})
|
||||
|
||||
|
||||
def test_unknown_among_known_rejected() -> None:
|
||||
with pytest.raises(ValidationError):
|
||||
AppDescribeQuery.model_validate({"fields": "info,garbage"})
|
||||
|
||||
|
||||
def test_extra_param_forbidden() -> None:
|
||||
with pytest.raises(ValidationError):
|
||||
AppDescribeQuery.model_validate({"fields": "info", "page": "1"})
|
||||
@ -1,105 +0,0 @@
|
||||
"""Unit tests for AppListQuery — the /apps query-param validator.
|
||||
|
||||
Runs against the model directly, not the HTTP layer. Pins:
|
||||
- defaults match the plan (page=1, limit=20).
|
||||
- workspace_id is required.
|
||||
- numeric bounds enforced (page >= 1, limit in [1, MAX_PAGE_LIMIT]).
|
||||
- mode validates against the AppMode enum.
|
||||
- name and tag have length caps.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from controllers.openapi._models import MAX_PAGE_LIMIT
|
||||
from controllers.openapi.apps import AppListQuery
|
||||
|
||||
|
||||
def test_defaults():
|
||||
q = AppListQuery.model_validate({"workspace_id": "ws-1"})
|
||||
assert q.workspace_id == "ws-1"
|
||||
assert q.page == 1
|
||||
assert q.limit == 20
|
||||
assert q.mode is None
|
||||
assert q.name is None
|
||||
assert q.tag is None
|
||||
|
||||
|
||||
def test_workspace_id_required():
|
||||
with pytest.raises(ValidationError):
|
||||
AppListQuery.model_validate({})
|
||||
|
||||
|
||||
def test_page_must_be_positive():
|
||||
with pytest.raises(ValidationError):
|
||||
AppListQuery.model_validate({"workspace_id": "ws-1", "page": 0})
|
||||
with pytest.raises(ValidationError):
|
||||
AppListQuery.model_validate({"workspace_id": "ws-1", "page": -1})
|
||||
|
||||
|
||||
def test_page_rejects_non_integer_string():
|
||||
with pytest.raises(ValidationError):
|
||||
AppListQuery.model_validate({"workspace_id": "ws-1", "page": "abc"})
|
||||
|
||||
|
||||
def test_limit_must_be_positive():
|
||||
with pytest.raises(ValidationError):
|
||||
AppListQuery.model_validate({"workspace_id": "ws-1", "limit": 0})
|
||||
with pytest.raises(ValidationError):
|
||||
AppListQuery.model_validate({"workspace_id": "ws-1", "limit": -1})
|
||||
|
||||
|
||||
def test_limit_caps_at_max_page_limit():
|
||||
# Boundary accepts.
|
||||
q = AppListQuery.model_validate({"workspace_id": "ws-1", "limit": MAX_PAGE_LIMIT})
|
||||
assert q.limit == MAX_PAGE_LIMIT
|
||||
|
||||
# Just over rejects.
|
||||
with pytest.raises(ValidationError):
|
||||
AppListQuery.model_validate({"workspace_id": "ws-1", "limit": MAX_PAGE_LIMIT + 1})
|
||||
|
||||
|
||||
def test_mode_whitelisted_against_app_mode():
|
||||
# Valid mode passes.
|
||||
q = AppListQuery.model_validate({"workspace_id": "ws-1", "mode": "chat"})
|
||||
assert q.mode is not None
|
||||
assert q.mode.value == "chat"
|
||||
|
||||
# Invalid mode rejects.
|
||||
with pytest.raises(ValidationError):
|
||||
AppListQuery.model_validate({"workspace_id": "ws-1", "mode": "not-a-mode"})
|
||||
|
||||
|
||||
def test_name_length_capped():
|
||||
AppListQuery.model_validate({"workspace_id": "ws-1", "name": "x" * 200})
|
||||
with pytest.raises(ValidationError):
|
||||
AppListQuery.model_validate({"workspace_id": "ws-1", "name": "x" * 201})
|
||||
|
||||
|
||||
def test_tag_length_capped():
|
||||
AppListQuery.model_validate({"workspace_id": "ws-1", "tag": "x" * 100})
|
||||
with pytest.raises(ValidationError):
|
||||
AppListQuery.model_validate({"workspace_id": "ws-1", "tag": "x" * 101})
|
||||
|
||||
|
||||
def test_all_fields_accept_valid_values():
|
||||
"""Pin the happy-path acceptance for every field in one place."""
|
||||
q = AppListQuery.model_validate(
|
||||
{
|
||||
"workspace_id": "ws-1",
|
||||
"page": 5,
|
||||
"limit": 50,
|
||||
"mode": "workflow",
|
||||
"name": "search",
|
||||
"tag": "prod",
|
||||
}
|
||||
)
|
||||
assert q.workspace_id == "ws-1"
|
||||
assert q.page == 5
|
||||
assert q.limit == 50
|
||||
assert q.mode is not None
|
||||
assert q.mode.value == "workflow"
|
||||
assert q.name == "search"
|
||||
assert q.tag == "prod"
|
||||
@ -1,55 +0,0 @@
|
||||
"""Unit tests for app payload-rendering helpers — independent of
|
||||
HTTP plumbing or DB. Pin the response shapes that are CLI contracts.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from types import SimpleNamespace
|
||||
|
||||
import pytest
|
||||
|
||||
from controllers.openapi.apps import ( # pyright: ignore[reportPrivateUsage]
|
||||
_EMPTY_PARAMETERS,
|
||||
parameters_payload,
|
||||
)
|
||||
from controllers.service_api.app.error import AppUnavailableError
|
||||
|
||||
|
||||
def _fake_app(**overrides):
|
||||
base = {
|
||||
"id": "app1",
|
||||
"name": "X",
|
||||
"description": "d",
|
||||
"mode": "chat",
|
||||
"author_name": "alice",
|
||||
"tags": [SimpleNamespace(name="prod")],
|
||||
"updated_at": None,
|
||||
"enable_api": True,
|
||||
"workflow": None,
|
||||
"app_model_config": None,
|
||||
}
|
||||
base.update(overrides)
|
||||
return SimpleNamespace(**base)
|
||||
|
||||
|
||||
def test_parameters_payload_raises_app_unavailable_when_no_config():
|
||||
with pytest.raises(AppUnavailableError):
|
||||
parameters_payload(_fake_app(mode="chat", app_model_config=None))
|
||||
|
||||
|
||||
def test_empty_parameters_constant_matches_describe_fallback_shape():
|
||||
"""The fallback dict served by /describe when an app has no config
|
||||
must match the spec's stated keys (opening_statement, suggested_questions,
|
||||
user_input_form, file_upload, system_parameters)."""
|
||||
assert set(_EMPTY_PARAMETERS.keys()) == {
|
||||
"opening_statement",
|
||||
"suggested_questions",
|
||||
"user_input_form",
|
||||
"file_upload",
|
||||
"system_parameters",
|
||||
}
|
||||
assert _EMPTY_PARAMETERS["suggested_questions"] == []
|
||||
assert _EMPTY_PARAMETERS["user_input_form"] == []
|
||||
assert _EMPTY_PARAMETERS["opening_statement"] is None
|
||||
assert _EMPTY_PARAMETERS["file_upload"] is None
|
||||
assert _EMPTY_PARAMETERS["system_parameters"] == {}
|
||||
@ -1,45 +0,0 @@
|
||||
import pytest
|
||||
from werkzeug.exceptions import InternalServerError
|
||||
|
||||
from controllers.openapi.app_run import (
|
||||
_DISPATCH,
|
||||
AppRunRequest,
|
||||
_unpack_blocking,
|
||||
)
|
||||
from models.model import AppMode
|
||||
|
||||
|
||||
def test_dispatch_covers_runnable_modes():
|
||||
runnable = {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT, AppMode.COMPLETION, AppMode.WORKFLOW}
|
||||
assert set(_DISPATCH) == runnable
|
||||
|
||||
|
||||
def test_unpack_blocking_passes_through_mapping():
|
||||
assert _unpack_blocking({"a": 1}) == {"a": 1}
|
||||
|
||||
|
||||
def test_unpack_blocking_unwraps_tuple():
|
||||
assert _unpack_blocking(({"a": 1}, 200)) == {"a": 1}
|
||||
|
||||
|
||||
def test_unpack_blocking_rejects_non_mapping():
|
||||
with pytest.raises(InternalServerError):
|
||||
_unpack_blocking("not a mapping")
|
||||
|
||||
|
||||
def test_app_run_request_strips_blank_conversation_id():
|
||||
payload = AppRunRequest(inputs={}, conversation_id=" ")
|
||||
assert payload.conversation_id is None
|
||||
|
||||
|
||||
def test_app_run_request_rejects_invalid_uuid_conversation_id():
|
||||
from pydantic import ValidationError
|
||||
with pytest.raises(ValidationError, match="conversation_id must be a valid UUID"):
|
||||
AppRunRequest(inputs={}, conversation_id="not-a-uuid")
|
||||
|
||||
|
||||
def test_app_run_request_accepts_valid_uuid_conversation_id():
|
||||
import uuid as _uuid
|
||||
cid = str(_uuid.uuid4())
|
||||
payload = AppRunRequest(inputs={}, conversation_id=cid)
|
||||
assert payload.conversation_id == cid
|
||||
@ -1,51 +0,0 @@
|
||||
"""Unit tests for AppPermittedListQuery — the /apps/permitted query validator.
|
||||
|
||||
Strict ConfigDict(extra='forbid'): cross-tenant tag/workspace_id are
|
||||
unresolvable, so the model must reject them as 422 instead of silently
|
||||
dropping them. Mode/name/page/limit have the same shape as AppListQuery.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from controllers.openapi.apps_permitted import AppPermittedListQuery
|
||||
|
||||
|
||||
def test_query_defaults_match_apps_list():
|
||||
q = AppPermittedListQuery.model_validate({})
|
||||
assert q.page == 1
|
||||
assert q.limit == 20
|
||||
assert q.mode is None
|
||||
assert q.name is None
|
||||
|
||||
|
||||
def test_query_rejects_workspace_id():
|
||||
"""workspace_id is meaningless for /permitted (cross-tenant); rejecting it
|
||||
forces CLI authors to drop the param rather than send it silently."""
|
||||
with pytest.raises(ValidationError):
|
||||
AppPermittedListQuery.model_validate({"workspace_id": "ws-1"})
|
||||
|
||||
|
||||
def test_query_rejects_tag():
|
||||
"""Tags are tenant-scoped; cross-tenant tag resolution is undefined."""
|
||||
with pytest.raises(ValidationError):
|
||||
AppPermittedListQuery.model_validate({"tag": "prod"})
|
||||
|
||||
|
||||
def test_query_validates_mode_against_app_mode():
|
||||
with pytest.raises(ValidationError):
|
||||
AppPermittedListQuery.model_validate({"mode": "not-a-mode"})
|
||||
|
||||
|
||||
def test_query_clamps_limit_at_max():
|
||||
with pytest.raises(ValidationError):
|
||||
AppPermittedListQuery.model_validate({"limit": 500})
|
||||
|
||||
|
||||
def test_query_accepts_valid_mode():
|
||||
"""Pin the happy path: AppMode values pass."""
|
||||
q = AppPermittedListQuery.model_validate({"mode": "chat"})
|
||||
assert q.mode is not None
|
||||
assert q.mode.value == "chat"
|
||||
@ -1,19 +0,0 @@
|
||||
import logging
|
||||
|
||||
from controllers.openapi._audit import EVENT_APP_RUN_OPENAPI, emit_app_run
|
||||
|
||||
|
||||
def test_event_constant():
|
||||
assert EVENT_APP_RUN_OPENAPI == "app.run.openapi"
|
||||
|
||||
|
||||
def test_emit_app_run_logs_with_audit_extra(caplog):
|
||||
with caplog.at_level(logging.INFO, logger="controllers.openapi._audit"):
|
||||
emit_app_run(app_id="app1", tenant_id="t1", caller_kind="account", mode="chat")
|
||||
record = next(r for r in caplog.records if r.message and "app.run.openapi" in r.message)
|
||||
assert record.audit is True
|
||||
assert record.event == EVENT_APP_RUN_OPENAPI
|
||||
assert record.app_id == "app1"
|
||||
assert record.tenant_id == "t1"
|
||||
assert record.caller_kind == "account"
|
||||
assert record.mode == "chat"
|
||||
@ -1,127 +0,0 @@
|
||||
"""CORS posture for /openapi/v1/* — default empty allowlist (same-origin),
|
||||
expandable via OPENAPI_CORS_ALLOW_ORIGINS. Cross-origin requests from
|
||||
disallowed origins do not receive the Access-Control-Allow-Origin
|
||||
header, which the browser then blocks.
|
||||
|
||||
Tests use a fresh Blueprint + Flask-CORS per case because the production
|
||||
blueprint is a module-level singleton and can't be reconfigured once
|
||||
registered.
|
||||
"""
|
||||
|
||||
import builtins
|
||||
|
||||
from flask import Blueprint, Flask
|
||||
from flask.views import MethodView
|
||||
from flask_cors import CORS
|
||||
from flask_restx import Resource
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.ext_blueprints import OPENAPI_HEADERS, OPENAPI_MAX_AGE_SECONDS
|
||||
from libs.external_api import ExternalApi
|
||||
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
builtins.MethodView = MethodView # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def _make_app(allowed_origins: list[str], blueprint_name: str) -> Flask:
|
||||
"""Build a Flask app with a fresh openapi-style blueprint mirroring
|
||||
production CORS settings, parameterised on the origin allowlist.
|
||||
"""
|
||||
bp = Blueprint(blueprint_name, __name__, url_prefix="/openapi/v1")
|
||||
api = ExternalApi(bp, version="1.0", title="OpenAPI Test", description="")
|
||||
|
||||
@api.route("/_health")
|
||||
class _Health(Resource):
|
||||
def get(self):
|
||||
return {"ok": True}
|
||||
|
||||
CORS(
|
||||
bp,
|
||||
resources={r"/*": {"origins": allowed_origins}},
|
||||
supports_credentials=True,
|
||||
allow_headers=list(OPENAPI_HEADERS),
|
||||
methods=["GET", "POST", "PATCH", "DELETE", "OPTIONS"],
|
||||
expose_headers=["X-Version"],
|
||||
max_age=OPENAPI_MAX_AGE_SECONDS,
|
||||
)
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config["TESTING"] = True
|
||||
app.register_blueprint(bp)
|
||||
return app
|
||||
|
||||
|
||||
def test_default_openapi_cors_allowlist_is_empty():
|
||||
"""Default config admits no cross-origin until operator opts in."""
|
||||
assert dify_config.OPENAPI_CORS_ALLOW_ORIGINS == []
|
||||
|
||||
|
||||
def test_preflight_allowed_origin_returns_cors_headers():
|
||||
app = _make_app(["https://app.example.com"], "openapi_t1")
|
||||
client = app.test_client()
|
||||
response = client.options(
|
||||
"/openapi/v1/_health",
|
||||
headers={
|
||||
"Origin": "https://app.example.com",
|
||||
"Access-Control-Request-Method": "GET",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.headers.get("Access-Control-Allow-Origin") == "https://app.example.com"
|
||||
assert response.headers.get("Access-Control-Max-Age") == str(OPENAPI_MAX_AGE_SECONDS)
|
||||
|
||||
|
||||
def test_preflight_disallowed_origin_omits_cors_headers():
|
||||
app = _make_app(["https://app.example.com"], "openapi_t2")
|
||||
client = app.test_client()
|
||||
response = client.options(
|
||||
"/openapi/v1/_health",
|
||||
headers={
|
||||
"Origin": "https://attacker.example",
|
||||
"Access-Control-Request-Method": "GET",
|
||||
},
|
||||
)
|
||||
|
||||
# flask-cors omits Allow-Origin for disallowed origins; browser blocks.
|
||||
assert "Access-Control-Allow-Origin" not in response.headers
|
||||
|
||||
|
||||
def test_preflight_with_default_empty_allowlist_omits_cors_headers():
|
||||
app = _make_app([], "openapi_t3")
|
||||
client = app.test_client()
|
||||
response = client.options(
|
||||
"/openapi/v1/_health",
|
||||
headers={
|
||||
"Origin": "https://app.example.com",
|
||||
"Access-Control-Request-Method": "GET",
|
||||
},
|
||||
)
|
||||
|
||||
assert "Access-Control-Allow-Origin" not in response.headers
|
||||
|
||||
|
||||
def test_same_origin_request_succeeds_without_origin_header():
|
||||
app = _make_app(["https://app.example.com"], "openapi_t4")
|
||||
client = app.test_client()
|
||||
# Browsers don't send Origin on same-origin GETs.
|
||||
response = client.get("/openapi/v1/_health")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.get_json() == {"ok": True}
|
||||
|
||||
|
||||
def test_authorization_header_is_in_allow_headers():
|
||||
"""Bearer-authed routes need Authorization in the preflight response."""
|
||||
app = _make_app(["https://app.example.com"], "openapi_t5")
|
||||
client = app.test_client()
|
||||
response = client.options(
|
||||
"/openapi/v1/_health",
|
||||
headers={
|
||||
"Origin": "https://app.example.com",
|
||||
"Access-Control-Request-Method": "GET",
|
||||
"Access-Control-Request-Headers": "Authorization",
|
||||
},
|
||||
)
|
||||
|
||||
allow_headers = response.headers.get("Access-Control-Allow-Headers", "").lower()
|
||||
assert "authorization" in allow_headers
|
||||
@ -1,52 +0,0 @@
|
||||
"""Account-branch device-flow approve/deny under /openapi/v1."""
|
||||
|
||||
import builtins
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.views import MethodView
|
||||
|
||||
from controllers.openapi import bp as openapi_bp
|
||||
from controllers.openapi.oauth_device import DeviceApproveApi, DeviceDenyApi
|
||||
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
builtins.MethodView = MethodView # type: ignore[attr-defined]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def openapi_app() -> Flask:
|
||||
app = Flask(__name__)
|
||||
app.config["TESTING"] = True
|
||||
app.register_blueprint(openapi_bp)
|
||||
return app
|
||||
|
||||
|
||||
def _rule(app: Flask, path: str):
|
||||
return next(r for r in app.url_map.iter_rules() if r.rule == path)
|
||||
|
||||
|
||||
def test_approve_route_registered(openapi_app: Flask):
|
||||
rules = {r.rule for r in openapi_app.url_map.iter_rules()}
|
||||
assert "/openapi/v1/oauth/device/approve" in rules
|
||||
|
||||
|
||||
def test_deny_route_registered(openapi_app: Flask):
|
||||
rules = {r.rule for r in openapi_app.url_map.iter_rules()}
|
||||
assert "/openapi/v1/oauth/device/deny" in rules
|
||||
|
||||
|
||||
def test_approve_dispatches_to_class(openapi_app: Flask):
|
||||
rule = _rule(openapi_app, "/openapi/v1/oauth/device/approve")
|
||||
assert openapi_app.view_functions[rule.endpoint].view_class is DeviceApproveApi
|
||||
|
||||
|
||||
def test_deny_dispatches_to_class(openapi_app: Flask):
|
||||
rule = _rule(openapi_app, "/openapi/v1/oauth/device/deny")
|
||||
assert openapi_app.view_functions[rule.endpoint].view_class is DeviceDenyApi
|
||||
|
||||
|
||||
def test_approve_and_deny_methods(openapi_app: Flask):
|
||||
approve = _rule(openapi_app, "/openapi/v1/oauth/device/approve")
|
||||
deny = _rule(openapi_app, "/openapi/v1/oauth/device/deny")
|
||||
assert "POST" in approve.methods
|
||||
assert "POST" in deny.methods
|
||||
@ -1,47 +0,0 @@
|
||||
"""POST /openapi/v1/oauth/device/code is the canonical RFC 8628 device
|
||||
authorization endpoint.
|
||||
|
||||
Tests verify URL routing without invoking the handler — invoking would
|
||||
require Redis, which the unit-test runtime does not initialise.
|
||||
"""
|
||||
|
||||
import builtins
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.views import MethodView
|
||||
|
||||
from controllers.openapi import bp as openapi_bp
|
||||
from controllers.openapi.oauth_device import OAuthDeviceCodeApi
|
||||
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
builtins.MethodView = MethodView # type: ignore[attr-defined]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def openapi_app() -> Flask:
|
||||
app = Flask(__name__)
|
||||
app.config["TESTING"] = True
|
||||
app.register_blueprint(openapi_bp)
|
||||
return app
|
||||
|
||||
|
||||
def test_openapi_route_registered(openapi_app: Flask):
|
||||
rules = {r.rule for r in openapi_app.url_map.iter_rules()}
|
||||
assert "/openapi/v1/oauth/device/code" in rules
|
||||
|
||||
|
||||
def test_route_dispatches_to_class(openapi_app: Flask):
|
||||
rule = next(r for r in openapi_app.url_map.iter_rules() if r.rule == "/openapi/v1/oauth/device/code")
|
||||
assert openapi_app.view_functions[rule.endpoint].view_class is OAuthDeviceCodeApi
|
||||
|
||||
|
||||
def test_route_accepts_post(openapi_app: Flask):
|
||||
rule = next(r for r in openapi_app.url_map.iter_rules() if r.rule == "/openapi/v1/oauth/device/code")
|
||||
assert "POST" in rule.methods
|
||||
|
||||
|
||||
def test_known_client_ids_default_includes_difyctl():
|
||||
from configs import dify_config
|
||||
|
||||
assert "difyctl" in dify_config.OPENAPI_KNOWN_CLIENT_IDS
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user