mirror of
https://github.com/langgenius/dify.git
synced 2026-01-19 11:45:05 +08:00
Compare commits
39 Commits
f7aef50c25
...
feat/summa
| Author | SHA1 | Date | |
|---|---|---|---|
| 62c3f14570 | |||
| 7b66bbc35a | |||
| 77366f33a4 | |||
| e3b0918dd9 | |||
| fad6fa141d | |||
| 30821fd26c | |||
| 1a9fdd9a65 | |||
| de610cbf39 | |||
| 6903c31b84 | |||
| b2cc9b255d | |||
| e9f0e1e839 | |||
| cd497a8c52 | |||
| 7aab4529e6 | |||
| 4bff0cd0ab | |||
| d97f2df85c | |||
| fde8efa4a2 | |||
| c98870c3f4 | |||
| b06c7c8f33 | |||
| 1a2fce7055 | |||
| f02adc26e5 | |||
| 2b021e8752 | |||
| 1126a2aa95 | |||
| 4a197b9458 | |||
| 772ff636ec | |||
| ab1c5a2027 | |||
| 33e99f069b | |||
| 52af829f1f | |||
| 7c3ce7b1e6 | |||
| 0ef8b5a0ca | |||
| 2bfc54314e | |||
| bdd8d5b470 | |||
| 4955de5905 | |||
| 3bee2ee067 | |||
| 328897f81c | |||
| 830a7fb034 | |||
| 01a7dbcee8 | |||
| 4fe8d2491e | |||
| 5c2ae922bc | |||
| 13eec13a14 |
4
.github/workflows/autofix.yml
vendored
4
.github/workflows/autofix.yml
vendored
@ -16,14 +16,14 @@ jobs:
|
||||
|
||||
- name: Check Docker Compose inputs
|
||||
id: docker-compose-changes
|
||||
uses: tj-actions/changed-files@v46
|
||||
uses: tj-actions/changed-files@v47
|
||||
with:
|
||||
files: |
|
||||
docker/generate_docker_compose
|
||||
docker/.env.example
|
||||
docker/docker-compose-template.yaml
|
||||
docker/docker-compose.yaml
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
|
||||
2
.github/workflows/build-push.yml
vendored
2
.github/workflows/build-push.yml
vendored
@ -112,7 +112,7 @@ jobs:
|
||||
context: "web"
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-${{ matrix.context }}-*
|
||||
|
||||
2
.github/workflows/deploy-agent-dev.yml
vendored
2
.github/workflows/deploy-agent-dev.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
||||
github.event.workflow_run.head_branch == 'deploy/agent-dev'
|
||||
steps:
|
||||
- name: Deploy to server
|
||||
uses: appleboy/ssh-action@v0.1.8
|
||||
uses: appleboy/ssh-action@v1
|
||||
with:
|
||||
host: ${{ secrets.AGENT_DEV_SSH_HOST }}
|
||||
username: ${{ secrets.SSH_USER }}
|
||||
|
||||
2
.github/workflows/deploy-dev.yml
vendored
2
.github/workflows/deploy-dev.yml
vendored
@ -16,7 +16,7 @@ jobs:
|
||||
github.event.workflow_run.head_branch == 'deploy/dev'
|
||||
steps:
|
||||
- name: Deploy to server
|
||||
uses: appleboy/ssh-action@v0.1.8
|
||||
uses: appleboy/ssh-action@v1
|
||||
with:
|
||||
host: ${{ secrets.SSH_HOST }}
|
||||
username: ${{ secrets.SSH_USER }}
|
||||
|
||||
2
.github/workflows/deploy-hitl.yml
vendored
2
.github/workflows/deploy-hitl.yml
vendored
@ -20,7 +20,7 @@ jobs:
|
||||
)
|
||||
steps:
|
||||
- name: Deploy to server
|
||||
uses: appleboy/ssh-action@v0.1.8
|
||||
uses: appleboy/ssh-action@v1
|
||||
with:
|
||||
host: ${{ secrets.HITL_SSH_HOST }}
|
||||
username: ${{ secrets.SSH_USER }}
|
||||
|
||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@ -18,7 +18,7 @@ jobs:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v5
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
days-before-issue-stale: 15
|
||||
days-before-issue-close: 3
|
||||
|
||||
21
.github/workflows/style.yml
vendored
21
.github/workflows/style.yml
vendored
@ -65,6 +65,9 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./web
|
||||
permissions:
|
||||
checks: write
|
||||
pull-requests: read
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
@ -90,7 +93,7 @@ jobs:
|
||||
uses: actions/setup-node@v6
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
cache-dependency-path: ./web/pnpm-lock.yaml
|
||||
|
||||
@ -103,7 +106,16 @@ jobs:
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
working-directory: ./web
|
||||
run: |
|
||||
pnpm run lint
|
||||
pnpm run lint:ci
|
||||
# pnpm run lint:report
|
||||
# continue-on-error: true
|
||||
|
||||
# - name: Annotate Code
|
||||
# if: steps.changed-files.outputs.any_changed == 'true' && github.event_name == 'pull_request'
|
||||
# uses: DerLev/eslint-annotations@51347b3a0abfb503fc8734d5ae31c4b151297fae
|
||||
# with:
|
||||
# eslint-report: web/eslint_report.json
|
||||
# github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Web type check
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
@ -115,11 +127,6 @@ jobs:
|
||||
working-directory: ./web
|
||||
run: pnpm run knip
|
||||
|
||||
- name: Web build check
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
working-directory: ./web
|
||||
run: pnpm run build
|
||||
|
||||
superlinter:
|
||||
name: SuperLinter
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
8
.github/workflows/tool-test-sdks.yaml
vendored
8
.github/workflows/tool-test-sdks.yaml
vendored
@ -16,10 +16,6 @@ jobs:
|
||||
name: unit test for Node.js SDK
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [16, 18, 20, 22]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: sdks/nodejs-client
|
||||
@ -29,10 +25,10 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
node-version: 24
|
||||
cache: ''
|
||||
cache-dependency-path: 'pnpm-lock.yaml'
|
||||
|
||||
|
||||
2
.github/workflows/translate-i18n-claude.yml
vendored
2
.github/workflows/translate-i18n-claude.yml
vendored
@ -57,7 +57,7 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
cache-dependency-path: ./web/pnpm-lock.yaml
|
||||
|
||||
|
||||
2
.github/workflows/trigger-i18n-sync.yml
vendored
2
.github/workflows/trigger-i18n-sync.yml
vendored
@ -21,7 +21,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
47
.github/workflows/web-tests.yml
vendored
47
.github/workflows/web-tests.yml
vendored
@ -31,7 +31,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
cache-dependency-path: ./web/pnpm-lock.yaml
|
||||
|
||||
@ -366,3 +366,48 @@ jobs:
|
||||
path: web/coverage
|
||||
retention-days: 30
|
||||
if-no-files-found: error
|
||||
|
||||
web-build:
|
||||
name: Web Build
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./web
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v47
|
||||
with:
|
||||
files: |
|
||||
web/**
|
||||
.github/workflows/web-tests.yml
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
package_json_file: web/package.json
|
||||
run_install: false
|
||||
|
||||
- name: Setup NodeJS
|
||||
uses: actions/setup-node@v6
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
with:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
cache-dependency-path: ./web/pnpm-lock.yaml
|
||||
|
||||
- name: Web dependencies
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
working-directory: ./web
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Web build check
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
working-directory: ./web
|
||||
run: pnpm run build
|
||||
|
||||
@ -12,12 +12,8 @@ The codebase is split into:
|
||||
|
||||
## Backend Workflow
|
||||
|
||||
- Read `api/AGENTS.md` for details
|
||||
- Run backend CLI commands through `uv run --project api <command>`.
|
||||
|
||||
- Before submission, all backend modifications must pass local checks: `make lint`, `make type-check`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh`.
|
||||
|
||||
- Use Makefile targets for linting and formatting; `make lint` and `make type-check` cover the required checks.
|
||||
|
||||
- Integration tests are CI-only and are not expected to run in the local environment.
|
||||
|
||||
## Frontend Workflow
|
||||
|
||||
12
Makefile
12
Makefile
@ -61,7 +61,8 @@ check:
|
||||
|
||||
lint:
|
||||
@echo "🔧 Running ruff format, check with fixes, import linter, and dotenv-linter..."
|
||||
@uv run --project api --dev sh -c 'ruff format ./api && ruff check --fix ./api'
|
||||
@uv run --project api --dev ruff format ./api
|
||||
@uv run --project api --dev ruff check --fix ./api
|
||||
@uv run --directory api --dev lint-imports
|
||||
@uv run --project api --dev dotenv-linter ./api/.env.example ./web/.env.example
|
||||
@echo "✅ Linting complete"
|
||||
@ -73,7 +74,12 @@ type-check:
|
||||
|
||||
test:
|
||||
@echo "🧪 Running backend unit tests..."
|
||||
@uv run --project api --dev dev/pytest/pytest_unit_tests.sh
|
||||
@if [ -n "$(TARGET_TESTS)" ]; then \
|
||||
echo "Target: $(TARGET_TESTS)"; \
|
||||
uv run --project api --dev pytest $(TARGET_TESTS); \
|
||||
else \
|
||||
uv run --project api --dev dev/pytest/pytest_unit_tests.sh; \
|
||||
fi
|
||||
@echo "✅ Tests complete"
|
||||
|
||||
# Build Docker images
|
||||
@ -125,7 +131,7 @@ help:
|
||||
@echo " make check - Check code with ruff"
|
||||
@echo " make lint - Format, fix, and lint code (ruff, imports, dotenv)"
|
||||
@echo " make type-check - Run type checking with basedpyright"
|
||||
@echo " make test - Run backend unit tests"
|
||||
@echo " make test - Run backend unit tests (or TARGET_TESTS=./api/tests/<target_tests>)"
|
||||
@echo ""
|
||||
@echo "Docker Build Targets:"
|
||||
@echo " make build-web - Build web Docker image"
|
||||
|
||||
0
agent-notes/.gitkeep
Normal file
0
agent-notes/.gitkeep
Normal file
248
api/AGENTS.md
248
api/AGENTS.md
@ -1,62 +1,236 @@
|
||||
# Agent Skill Index
|
||||
# API Agent Guide
|
||||
|
||||
## Agent Notes (must-check)
|
||||
|
||||
Before you start work on any backend file under `api/`, you MUST check whether a related note exists under:
|
||||
|
||||
- `agent-notes/<same-relative-path-as-target-file>.md`
|
||||
|
||||
Rules:
|
||||
|
||||
- **Path mapping**: for a target file `<path>/<name>.py`, the note must be `agent-notes/<path>/<name>.py.md` (same folder structure, same filename, plus `.md`).
|
||||
- **Before working**:
|
||||
- If the note exists, read it first and follow any constraints/decisions recorded there.
|
||||
- If the note conflicts with the current code, or references an "origin" file/path that has been deleted, renamed, or migrated, treat the **code as the single source of truth** and update the note to match reality.
|
||||
- If the note does not exist, create it with a short architecture/intent summary and any relevant invariants/edge cases.
|
||||
- **During working**:
|
||||
- Keep the note in sync as you discover constraints, make decisions, or change approach.
|
||||
- If you move/rename a file, migrate its note to the new mapped path (and fix any outdated references inside the note).
|
||||
- Record non-obvious edge cases, trade-offs, and the test/verification plan as you go (not just at the end).
|
||||
- Keep notes **coherent**: integrate new findings into the relevant sections and rewrite for clarity; avoid append-only “recent fix” / changelog-style additions unless the note is explicitly intended to be a changelog.
|
||||
- **When finishing work**:
|
||||
- Update the related note(s) to reflect what changed, why, and any new edge cases/tests.
|
||||
- If a file is deleted, remove or clearly deprecate the corresponding note so it cannot be mistaken as current guidance.
|
||||
- Keep notes concise and accurate; they are meant to prevent repeated rediscovery.
|
||||
|
||||
## Skill Index
|
||||
|
||||
Start with the section that best matches your need. Each entry lists the problems it solves plus key files/concepts so you know what to expect before opening it.
|
||||
|
||||
______________________________________________________________________
|
||||
### Platform Foundations
|
||||
|
||||
## Platform Foundations
|
||||
|
||||
- **[Infrastructure Overview](agent_skills/infra.md)**\
|
||||
When to read this:
|
||||
#### [Infrastructure Overview](agent_skills/infra.md)
|
||||
|
||||
- **When to read this**
|
||||
- You need to understand where a feature belongs in the architecture.
|
||||
- You’re wiring storage, Redis, vector stores, or OTEL.
|
||||
- You’re about to add CLI commands or async jobs.\
|
||||
What it covers: configuration stack (`configs/app_config.py`, remote settings), storage entry points (`extensions/ext_storage.py`, `core/file/file_manager.py`), Redis conventions (`extensions/ext_redis.py`), plugin runtime topology, vector-store factory (`core/rag/datasource/vdb/*`), observability hooks, SSRF proxy usage, and core CLI commands.
|
||||
- You’re about to add CLI commands or async jobs.
|
||||
- **What it covers**
|
||||
- Configuration stack (`configs/app_config.py`, remote settings)
|
||||
- Storage entry points (`extensions/ext_storage.py`, `core/file/file_manager.py`)
|
||||
- Redis conventions (`extensions/ext_redis.py`)
|
||||
- Plugin runtime topology
|
||||
- Vector-store factory (`core/rag/datasource/vdb/*`)
|
||||
- Observability hooks
|
||||
- SSRF proxy usage
|
||||
- Core CLI commands
|
||||
|
||||
- **[Coding Style](agent_skills/coding_style.md)**\
|
||||
When to read this:
|
||||
### Plugin & Extension Development
|
||||
|
||||
- You’re writing or reviewing backend code and need the authoritative checklist.
|
||||
- You’re unsure about Pydantic validators, SQLAlchemy session usage, or logging patterns.
|
||||
- You want the exact lint/type/test commands used in PRs.\
|
||||
Includes: Ruff & BasedPyright commands, no-annotation policy, session examples (`with Session(db.engine, ...)`), `@field_validator` usage, logging expectations, and the rule set for file size, helpers, and package management.
|
||||
|
||||
______________________________________________________________________
|
||||
|
||||
## Plugin & Extension Development
|
||||
|
||||
- **[Plugin Systems](agent_skills/plugin.md)**\
|
||||
When to read this:
|
||||
#### [Plugin Systems](agent_skills/plugin.md)
|
||||
|
||||
- **When to read this**
|
||||
- You’re building or debugging a marketplace plugin.
|
||||
- You need to know how manifests, providers, daemons, and migrations fit together.\
|
||||
What it covers: plugin manifests (`core/plugin/entities/plugin.py`), installation/upgrade flows (`services/plugin/plugin_service.py`, CLI commands), runtime adapters (`core/plugin/impl/*` for tool/model/datasource/trigger/endpoint/agent), daemon coordination (`core/plugin/entities/plugin_daemon.py`), and how provider registries surface capabilities to the rest of the platform.
|
||||
- You need to know how manifests, providers, daemons, and migrations fit together.
|
||||
- **What it covers**
|
||||
- Plugin manifests (`core/plugin/entities/plugin.py`)
|
||||
- Installation/upgrade flows (`services/plugin/plugin_service.py`, CLI commands)
|
||||
- Runtime adapters (`core/plugin/impl/*` for tool/model/datasource/trigger/endpoint/agent)
|
||||
- Daemon coordination (`core/plugin/entities/plugin_daemon.py`)
|
||||
- How provider registries surface capabilities to the rest of the platform
|
||||
|
||||
- **[Plugin OAuth](agent_skills/plugin_oauth.md)**\
|
||||
When to read this:
|
||||
#### [Plugin OAuth](agent_skills/plugin_oauth.md)
|
||||
|
||||
- **When to read this**
|
||||
- You must integrate OAuth for a plugin or datasource.
|
||||
- You’re handling credential encryption or refresh flows.\
|
||||
Topics: credential storage, encryption helpers (`core/helper/provider_encryption.py`), OAuth client bootstrap (`services/plugin/oauth_service.py`, `services/plugin/plugin_parameter_service.py`), and how console/API layers expose the flows.
|
||||
- You’re handling credential encryption or refresh flows.
|
||||
- **Topics**
|
||||
- Credential storage
|
||||
- Encryption helpers (`core/helper/provider_encryption.py`)
|
||||
- OAuth client bootstrap (`services/plugin/oauth_service.py`, `services/plugin/plugin_parameter_service.py`)
|
||||
- How console/API layers expose the flows
|
||||
|
||||
______________________________________________________________________
|
||||
### Workflow Entry & Execution
|
||||
|
||||
## Workflow Entry & Execution
|
||||
#### [Trigger Concepts](agent_skills/trigger.md)
|
||||
|
||||
- **[Trigger Concepts](agent_skills/trigger.md)**\
|
||||
When to read this:
|
||||
- **When to read this**
|
||||
- You’re debugging why a workflow didn’t start.
|
||||
- You’re adding a new trigger type or hook.
|
||||
- You need to trace async execution, draft debugging, or webhook/schedule pipelines.\
|
||||
Details: Start-node taxonomy, webhook & schedule internals (`core/workflow/nodes/trigger_*`, `services/trigger/*`), async orchestration (`services/async_workflow_service.py`, Celery queues), debug event bus, and storage/logging interactions.
|
||||
- You need to trace async execution, draft debugging, or webhook/schedule pipelines.
|
||||
- **Details**
|
||||
- Start-node taxonomy
|
||||
- Webhook & schedule internals (`core/workflow/nodes/trigger_*`, `services/trigger/*`)
|
||||
- Async orchestration (`services/async_workflow_service.py`, Celery queues)
|
||||
- Debug event bus
|
||||
- Storage/logging interactions
|
||||
|
||||
______________________________________________________________________
|
||||
## General Reminders
|
||||
|
||||
## Additional Notes for Agents
|
||||
|
||||
- All skill docs assume you follow the coding style guide—run Ruff/BasedPyright/tests listed there before submitting changes.
|
||||
- All skill docs assume you follow the coding style rules below—run the lint/type/test commands before submitting changes.
|
||||
- When you cannot find an answer in these briefs, search the codebase using the paths referenced (e.g., `core/plugin/impl/tool.py`, `services/dataset_service.py`).
|
||||
- If you run into cross-cutting concerns (tenancy, configuration, storage), check the infrastructure guide first; it links to most supporting modules.
|
||||
- Keep multi-tenancy and configuration central: everything flows through `configs.dify_config` and `tenant_id`.
|
||||
- When touching plugins or triggers, consult both the system overview and the specialised doc to ensure you adjust lifecycle, storage, and observability consistently.
|
||||
|
||||
## Coding Style
|
||||
|
||||
This is the default standard for backend code in this repo. Follow it for new code and use it as the checklist when reviewing changes.
|
||||
|
||||
### Linting & Formatting
|
||||
|
||||
- Use Ruff for formatting and linting (follow `.ruff.toml`).
|
||||
- Keep each line under 120 characters (including spaces).
|
||||
|
||||
### Naming Conventions
|
||||
|
||||
- Use `snake_case` for variables and functions.
|
||||
- Use `PascalCase` for classes.
|
||||
- Use `UPPER_CASE` for constants.
|
||||
|
||||
### Typing & Class Layout
|
||||
|
||||
- Code should usually include type annotations that match the repo’s current Python version (avoid untyped public APIs and “mystery” values).
|
||||
- Prefer modern typing forms (e.g. `list[str]`, `dict[str, int]`) and avoid `Any` unless there’s a strong reason.
|
||||
- For classes, declare member variables at the top of the class body (before `__init__`) so the class shape is obvious at a glance:
|
||||
|
||||
```python
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class Example:
|
||||
user_id: str
|
||||
created_at: datetime
|
||||
|
||||
def __init__(self, user_id: str, created_at: datetime) -> None:
|
||||
self.user_id = user_id
|
||||
self.created_at = created_at
|
||||
```
|
||||
|
||||
### General Rules
|
||||
|
||||
- Use Pydantic v2 conventions.
|
||||
- Use `uv` for Python package management in this repo (usually with `--project api`).
|
||||
- Prefer simple functions over small “utility classes” for lightweight helpers.
|
||||
- Avoid implementing dunder methods unless it’s clearly needed and matches existing patterns.
|
||||
- Never start long-running services as part of agent work (`uv run app.py`, `flask run`, etc.); running tests is allowed.
|
||||
- Keep files below ~800 lines; split when necessary.
|
||||
- Keep code readable and explicit—avoid clever hacks.
|
||||
|
||||
### Architecture & Boundaries
|
||||
|
||||
- Mirror the layered architecture: controller → service → core/domain.
|
||||
- Reuse existing helpers in `core/`, `services/`, and `libs/` before creating new abstractions.
|
||||
- Optimise for observability: deterministic control flow, clear logging, actionable errors.
|
||||
|
||||
### Logging & Errors
|
||||
|
||||
- Never use `print`; use a module-level logger:
|
||||
- `logger = logging.getLogger(__name__)`
|
||||
- Include tenant/app/workflow identifiers in log context when relevant.
|
||||
- Raise domain-specific exceptions (`services/errors`, `core/errors`) and translate them into HTTP responses in controllers.
|
||||
- Log retryable events at `warning`, terminal failures at `error`.
|
||||
|
||||
### SQLAlchemy Patterns
|
||||
|
||||
- Models inherit from `models.base.TypeBase`; do not create ad-hoc metadata or engines.
|
||||
- Open sessions with context managers:
|
||||
|
||||
```python
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
stmt = select(Workflow).where(
|
||||
Workflow.id == workflow_id,
|
||||
Workflow.tenant_id == tenant_id,
|
||||
)
|
||||
workflow = session.execute(stmt).scalar_one_or_none()
|
||||
```
|
||||
|
||||
- Prefer SQLAlchemy expressions; avoid raw SQL unless necessary.
|
||||
- Always scope queries by `tenant_id` and protect write paths with safeguards (`FOR UPDATE`, row counts, etc.).
|
||||
- Introduce repository abstractions only for very large tables (e.g., workflow executions) or when alternative storage strategies are required.
|
||||
|
||||
### Storage & External I/O
|
||||
|
||||
- Access storage via `extensions.ext_storage.storage`.
|
||||
- Use `core.helper.ssrf_proxy` for outbound HTTP fetches.
|
||||
- Background tasks that touch storage must be idempotent, and should log relevant object identifiers.
|
||||
|
||||
### Pydantic Usage
|
||||
|
||||
- Define DTOs with Pydantic v2 models and forbid extras by default.
|
||||
- Use `@field_validator` / `@model_validator` for domain rules.
|
||||
|
||||
Example:
|
||||
|
||||
```python
|
||||
from pydantic import BaseModel, ConfigDict, HttpUrl, field_validator
|
||||
|
||||
|
||||
class TriggerConfig(BaseModel):
|
||||
endpoint: HttpUrl
|
||||
secret: str
|
||||
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
|
||||
@field_validator("secret")
|
||||
def ensure_secret_prefix(cls, value: str) -> str:
|
||||
if not value.startswith("dify_"):
|
||||
raise ValueError("secret must start with dify_")
|
||||
return value
|
||||
```
|
||||
|
||||
### Generics & Protocols
|
||||
|
||||
- Use `typing.Protocol` to define behavioural contracts (e.g., cache interfaces).
|
||||
- Apply generics (`TypeVar`, `Generic`) for reusable utilities like caches or providers.
|
||||
- Validate dynamic inputs at runtime when generics cannot enforce safety alone.
|
||||
|
||||
### Tooling & Checks
|
||||
|
||||
Quick checks while iterating:
|
||||
|
||||
- Format: `make format`
|
||||
- Lint (includes auto-fix): `make lint`
|
||||
- Type check: `make type-check`
|
||||
- Targeted tests: `make test TARGET_TESTS=./api/tests/<target_tests>`
|
||||
|
||||
Before opening a PR / submitting:
|
||||
|
||||
- `make lint`
|
||||
- `make type-check`
|
||||
- `make test`
|
||||
|
||||
### Controllers & Services
|
||||
|
||||
- Controllers: parse input via Pydantic, invoke services, return serialised responses; no business logic.
|
||||
- Services: coordinate repositories, providers, background tasks; keep side effects explicit.
|
||||
- Document non-obvious behaviour with concise comments.
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
- Use `configs.dify_config` for configuration—never read environment variables directly.
|
||||
- Maintain tenant awareness end-to-end; `tenant_id` must flow through every layer touching shared resources.
|
||||
- Queue async work through `services/async_workflow_service`; implement tasks under `tasks/` with explicit queue selection.
|
||||
- Keep experimental scripts under `dev/`; do not ship them in production builds.
|
||||
|
||||
@ -1,115 +0,0 @@
|
||||
## Linter
|
||||
|
||||
- Always follow `.ruff.toml`.
|
||||
- Run `uv run ruff check --fix --unsafe-fixes`.
|
||||
- Keep each line under 100 characters (including spaces).
|
||||
|
||||
## Code Style
|
||||
|
||||
- `snake_case` for variables and functions.
|
||||
- `PascalCase` for classes.
|
||||
- `UPPER_CASE` for constants.
|
||||
|
||||
## Rules
|
||||
|
||||
- Use Pydantic v2 standard.
|
||||
- Use `uv` for package management.
|
||||
- Do not override dunder methods like `__init__`, `__iadd__`, etc.
|
||||
- Never launch services (`uv run app.py`, `flask run`, etc.); running tests under `tests/` is allowed.
|
||||
- Prefer simple functions over classes for lightweight helpers.
|
||||
- Keep files below 800 lines; split when necessary.
|
||||
- Keep code readable—no clever hacks.
|
||||
- Never use `print`; log with `logger = logging.getLogger(__name__)`.
|
||||
|
||||
## Guiding Principles
|
||||
|
||||
- Mirror the project’s layered architecture: controller → service → core/domain.
|
||||
- Reuse existing helpers in `core/`, `services/`, and `libs/` before creating new abstractions.
|
||||
- Optimise for observability: deterministic control flow, clear logging, actionable errors.
|
||||
|
||||
## SQLAlchemy Patterns
|
||||
|
||||
- Models inherit from `models.base.Base`; never create ad-hoc metadata or engines.
|
||||
|
||||
- Open sessions with context managers:
|
||||
|
||||
```python
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
stmt = select(Workflow).where(
|
||||
Workflow.id == workflow_id,
|
||||
Workflow.tenant_id == tenant_id,
|
||||
)
|
||||
workflow = session.execute(stmt).scalar_one_or_none()
|
||||
```
|
||||
|
||||
- Use SQLAlchemy expressions; avoid raw SQL unless necessary.
|
||||
|
||||
- Introduce repository abstractions only for very large tables (e.g., workflow executions) to support alternative storage strategies.
|
||||
|
||||
- Always scope queries by `tenant_id` and protect write paths with safeguards (`FOR UPDATE`, row counts, etc.).
|
||||
|
||||
## Storage & External IO
|
||||
|
||||
- Access storage via `extensions.ext_storage.storage`.
|
||||
- Use `core.helper.ssrf_proxy` for outbound HTTP fetches.
|
||||
- Background tasks that touch storage must be idempotent and log the relevant object identifiers.
|
||||
|
||||
## Pydantic Usage
|
||||
|
||||
- Define DTOs with Pydantic v2 models and forbid extras by default.
|
||||
|
||||
- Use `@field_validator` / `@model_validator` for domain rules.
|
||||
|
||||
- Example:
|
||||
|
||||
```python
|
||||
from pydantic import BaseModel, ConfigDict, HttpUrl, field_validator
|
||||
|
||||
class TriggerConfig(BaseModel):
|
||||
endpoint: HttpUrl
|
||||
secret: str
|
||||
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
|
||||
@field_validator("secret")
|
||||
def ensure_secret_prefix(cls, value: str) -> str:
|
||||
if not value.startswith("dify_"):
|
||||
raise ValueError("secret must start with dify_")
|
||||
return value
|
||||
```
|
||||
|
||||
## Generics & Protocols
|
||||
|
||||
- Use `typing.Protocol` to define behavioural contracts (e.g., cache interfaces).
|
||||
- Apply generics (`TypeVar`, `Generic`) for reusable utilities like caches or providers.
|
||||
- Validate dynamic inputs at runtime when generics cannot enforce safety alone.
|
||||
|
||||
## Error Handling & Logging
|
||||
|
||||
- Raise domain-specific exceptions (`services/errors`, `core/errors`) and translate to HTTP responses in controllers.
|
||||
- Declare `logger = logging.getLogger(__name__)` at module top.
|
||||
- Include tenant/app/workflow identifiers in log context.
|
||||
- Log retryable events at `warning`, terminal failures at `error`.
|
||||
|
||||
## Tooling & Checks
|
||||
|
||||
- Format/lint: `uv run --project api --dev ruff format ./api` and `uv run --project api --dev ruff check --fix --unsafe-fixes ./api`.
|
||||
- Type checks: `uv run --directory api --dev basedpyright`.
|
||||
- Tests: `uv run --project api --dev dev/pytest/pytest_unit_tests.sh`.
|
||||
- Run all of the above before submitting your work.
|
||||
|
||||
## Controllers & Services
|
||||
|
||||
- Controllers: parse input via Pydantic, invoke services, return serialised responses; no business logic.
|
||||
- Services: coordinate repositories, providers, background tasks; keep side effects explicit.
|
||||
- Avoid repositories unless necessary; direct SQLAlchemy usage is preferred for typical tables.
|
||||
- Document non-obvious behaviour with concise comments.
|
||||
|
||||
## Miscellaneous
|
||||
|
||||
- Use `configs.dify_config` for configuration—never read environment variables directly.
|
||||
- Maintain tenant awareness end-to-end; `tenant_id` must flow through every layer touching shared resources.
|
||||
- Queue async work through `services/async_workflow_service`; implement tasks under `tasks/` with explicit queue selection.
|
||||
- Keep experimental scripts under `dev/`; do not ship them in production builds.
|
||||
@ -3,6 +3,7 @@ import datetime
|
||||
import json
|
||||
import logging
|
||||
import secrets
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
import click
|
||||
@ -46,6 +47,8 @@ from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpi
|
||||
from services.plugin.data_migration import PluginDataMigration
|
||||
from services.plugin.plugin_migration import PluginMigration
|
||||
from services.plugin.plugin_service import PluginService
|
||||
from services.retention.conversation.messages_clean_policy import create_message_clean_policy
|
||||
from services.retention.conversation.messages_clean_service import MessagesCleanService
|
||||
from services.retention.workflow_run.clear_free_plan_expired_workflow_run_logs import WorkflowRunCleanup
|
||||
from tasks.remove_app_and_related_data_task import delete_draft_variables_batch
|
||||
|
||||
@ -2172,3 +2175,79 @@ def migrate_oss(
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
click.echo(click.style(f"Failed to update DB storage_type: {str(e)}", fg="red"))
|
||||
|
||||
|
||||
@click.command("clean-expired-messages", help="Clean expired messages.")
|
||||
@click.option(
|
||||
"--start-from",
|
||||
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
|
||||
required=True,
|
||||
help="Lower bound (inclusive) for created_at.",
|
||||
)
|
||||
@click.option(
|
||||
"--end-before",
|
||||
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
|
||||
required=True,
|
||||
help="Upper bound (exclusive) for created_at.",
|
||||
)
|
||||
@click.option("--batch-size", default=1000, show_default=True, help="Batch size for selecting messages.")
|
||||
@click.option(
|
||||
"--graceful-period",
|
||||
default=21,
|
||||
show_default=True,
|
||||
help="Graceful period in days after subscription expiration, will be ignored when billing is disabled.",
|
||||
)
|
||||
@click.option("--dry-run", is_flag=True, default=False, help="Show messages logs would be cleaned without deleting")
|
||||
def clean_expired_messages(
|
||||
batch_size: int,
|
||||
graceful_period: int,
|
||||
start_from: datetime.datetime,
|
||||
end_before: datetime.datetime,
|
||||
dry_run: bool,
|
||||
):
|
||||
"""
|
||||
Clean expired messages and related data for tenants based on clean policy.
|
||||
"""
|
||||
click.echo(click.style("clean_messages: start clean messages.", fg="green"))
|
||||
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
# Create policy based on billing configuration
|
||||
# NOTE: graceful_period will be ignored when billing is disabled.
|
||||
policy = create_message_clean_policy(graceful_period_days=graceful_period)
|
||||
|
||||
# Create and run the cleanup service
|
||||
service = MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
batch_size=batch_size,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
stats = service.run()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
click.echo(
|
||||
click.style(
|
||||
f"clean_messages: completed successfully\n"
|
||||
f" - Latency: {end_at - start_at:.2f}s\n"
|
||||
f" - Batches processed: {stats['batches']}\n"
|
||||
f" - Total messages scanned: {stats['total_messages']}\n"
|
||||
f" - Messages filtered: {stats['filtered_messages']}\n"
|
||||
f" - Messages deleted: {stats['total_deleted']}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
end_at = time.perf_counter()
|
||||
logger.exception("clean_messages failed")
|
||||
click.echo(
|
||||
click.style(
|
||||
f"clean_messages: failed after {end_at - start_at:.2f}s - {str(e)}",
|
||||
fg="red",
|
||||
)
|
||||
)
|
||||
raise
|
||||
|
||||
click.echo(click.style("messages cleanup completed.", fg="green"))
|
||||
|
||||
@ -30,6 +30,11 @@ class TagBindingRemovePayload(BaseModel):
|
||||
type: Literal["knowledge", "app"] | None = Field(default=None, description="Tag type")
|
||||
|
||||
|
||||
class TagListQueryParam(BaseModel):
|
||||
type: Literal["knowledge", "app", ""] = Field("", description="Tag type filter")
|
||||
keyword: str | None = Field(None, description="Search keyword")
|
||||
|
||||
|
||||
register_schema_models(
|
||||
console_ns,
|
||||
TagBasePayload,
|
||||
@ -43,12 +48,15 @@ class TagListApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@console_ns.doc(
|
||||
params={"type": 'Tag type filter. Can be "knowledge" or "app".', "keyword": "Search keyword for tag name."}
|
||||
)
|
||||
@marshal_with(dataset_tag_fields)
|
||||
def get(self):
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
tag_type = request.args.get("type", type=str, default="")
|
||||
keyword = request.args.get("keyword", default=None, type=str)
|
||||
tags = TagService.get_tags(tag_type, current_tenant_id, keyword)
|
||||
raw_args = request.args.to_dict()
|
||||
param = TagListQueryParam.model_validate(raw_args)
|
||||
tags = TagService.get_tags(param.type, current_tenant_id, param.keyword)
|
||||
|
||||
return tags, 200
|
||||
|
||||
|
||||
@ -71,8 +71,8 @@ class LLMGenerator:
|
||||
response: LLMResult = model_instance.invoke_llm(
|
||||
prompt_messages=list(prompts), model_parameters={"max_tokens": 500, "temperature": 1}, stream=False
|
||||
)
|
||||
answer = cast(str, response.message.content)
|
||||
if answer is None:
|
||||
answer = response.message.get_text_content()
|
||||
if answer == "":
|
||||
return ""
|
||||
try:
|
||||
result_dict = json.loads(answer)
|
||||
@ -184,7 +184,7 @@ class LLMGenerator:
|
||||
prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False
|
||||
)
|
||||
|
||||
rule_config["prompt"] = cast(str, response.message.content)
|
||||
rule_config["prompt"] = response.message.get_text_content()
|
||||
|
||||
except InvokeError as e:
|
||||
error = str(e)
|
||||
@ -237,13 +237,11 @@ class LLMGenerator:
|
||||
|
||||
return rule_config
|
||||
|
||||
rule_config["prompt"] = cast(str, prompt_content.message.content)
|
||||
rule_config["prompt"] = prompt_content.message.get_text_content()
|
||||
|
||||
if not isinstance(prompt_content.message.content, str):
|
||||
raise NotImplementedError("prompt content is not a string")
|
||||
parameter_generate_prompt = parameter_template.format(
|
||||
inputs={
|
||||
"INPUT_TEXT": prompt_content.message.content,
|
||||
"INPUT_TEXT": prompt_content.message.get_text_content(),
|
||||
},
|
||||
remove_template_variables=False,
|
||||
)
|
||||
@ -253,7 +251,7 @@ class LLMGenerator:
|
||||
statement_generate_prompt = statement_template.format(
|
||||
inputs={
|
||||
"TASK_DESCRIPTION": instruction,
|
||||
"INPUT_TEXT": prompt_content.message.content,
|
||||
"INPUT_TEXT": prompt_content.message.get_text_content(),
|
||||
},
|
||||
remove_template_variables=False,
|
||||
)
|
||||
@ -263,7 +261,7 @@ class LLMGenerator:
|
||||
parameter_content: LLMResult = model_instance.invoke_llm(
|
||||
prompt_messages=list(parameter_messages), model_parameters=model_parameters, stream=False
|
||||
)
|
||||
rule_config["variables"] = re.findall(r'"\s*([^"]+)\s*"', cast(str, parameter_content.message.content))
|
||||
rule_config["variables"] = re.findall(r'"\s*([^"]+)\s*"', parameter_content.message.get_text_content())
|
||||
except InvokeError as e:
|
||||
error = str(e)
|
||||
error_step = "generate variables"
|
||||
@ -272,7 +270,7 @@ class LLMGenerator:
|
||||
statement_content: LLMResult = model_instance.invoke_llm(
|
||||
prompt_messages=list(statement_messages), model_parameters=model_parameters, stream=False
|
||||
)
|
||||
rule_config["opening_statement"] = cast(str, statement_content.message.content)
|
||||
rule_config["opening_statement"] = statement_content.message.get_text_content()
|
||||
except InvokeError as e:
|
||||
error = str(e)
|
||||
error_step = "generate conversation opener"
|
||||
@ -315,7 +313,7 @@ class LLMGenerator:
|
||||
prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False
|
||||
)
|
||||
|
||||
generated_code = cast(str, response.message.content)
|
||||
generated_code = response.message.get_text_content()
|
||||
return {"code": generated_code, "language": code_language, "error": ""}
|
||||
|
||||
except InvokeError as e:
|
||||
@ -351,7 +349,7 @@ class LLMGenerator:
|
||||
raise TypeError("Expected LLMResult when stream=False")
|
||||
response = result
|
||||
|
||||
answer = cast(str, response.message.content)
|
||||
answer = response.message.get_text_content()
|
||||
return answer.strip()
|
||||
|
||||
@classmethod
|
||||
@ -375,10 +373,7 @@ class LLMGenerator:
|
||||
prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False
|
||||
)
|
||||
|
||||
raw_content = response.message.content
|
||||
|
||||
if not isinstance(raw_content, str):
|
||||
raise ValueError(f"LLM response content must be a string, got: {type(raw_content)}")
|
||||
raw_content = response.message.get_text_content()
|
||||
|
||||
try:
|
||||
parsed_content = json.loads(raw_content)
|
||||
|
||||
@ -189,8 +189,7 @@ class WorkflowEntry:
|
||||
)
|
||||
|
||||
try:
|
||||
# run node
|
||||
generator = node.run()
|
||||
generator = cls._traced_node_run(node)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"error while running node, workflow_id=%s, node_id=%s, node_type=%s, node_version=%s",
|
||||
@ -323,8 +322,7 @@ class WorkflowEntry:
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
|
||||
# run node
|
||||
generator = node.run()
|
||||
generator = cls._traced_node_run(node)
|
||||
|
||||
return node, generator
|
||||
except Exception as e:
|
||||
@ -430,3 +428,26 @@ class WorkflowEntry:
|
||||
input_value = current_variable.value | input_value
|
||||
|
||||
variable_pool.add([variable_node_id] + variable_key_list, input_value)
|
||||
|
||||
@staticmethod
|
||||
def _traced_node_run(node: Node) -> Generator[GraphNodeEventBase, None, None]:
|
||||
"""
|
||||
Wraps a node's run method with OpenTelemetry tracing and returns a generator.
|
||||
"""
|
||||
# Wrap node.run() with ObservabilityLayer hooks to produce node-level spans
|
||||
layer = ObservabilityLayer()
|
||||
layer.on_graph_start()
|
||||
node.ensure_execution_id()
|
||||
|
||||
def _gen():
|
||||
error: Exception | None = None
|
||||
layer.on_node_run_start(node)
|
||||
try:
|
||||
yield from node.run()
|
||||
except Exception as exc:
|
||||
error = exc
|
||||
raise
|
||||
finally:
|
||||
layer.on_node_run_end(node, error)
|
||||
|
||||
return _gen()
|
||||
|
||||
@ -6,6 +6,7 @@ from .create_site_record_when_app_created import handle as handle_create_site_re
|
||||
from .delete_tool_parameters_cache_when_sync_draft_workflow import (
|
||||
handle as handle_delete_tool_parameters_cache_when_sync_draft_workflow,
|
||||
)
|
||||
from .queue_credential_sync_when_tenant_created import handle as handle_queue_credential_sync_when_tenant_created
|
||||
from .sync_plugin_trigger_when_app_created import handle as handle_sync_plugin_trigger_when_app_created
|
||||
from .sync_webhook_when_app_created import handle as handle_sync_webhook_when_app_created
|
||||
from .sync_workflow_schedule_when_app_published import handle as handle_sync_workflow_schedule_when_app_published
|
||||
@ -30,6 +31,7 @@ __all__ = [
|
||||
"handle_create_installed_app_when_app_created",
|
||||
"handle_create_site_record_when_app_created",
|
||||
"handle_delete_tool_parameters_cache_when_sync_draft_workflow",
|
||||
"handle_queue_credential_sync_when_tenant_created",
|
||||
"handle_sync_plugin_trigger_when_app_created",
|
||||
"handle_sync_webhook_when_app_created",
|
||||
"handle_sync_workflow_schedule_when_app_published",
|
||||
|
||||
@ -0,0 +1,19 @@
|
||||
from configs import dify_config
|
||||
from events.tenant_event import tenant_was_created
|
||||
from services.enterprise.workspace_sync import WorkspaceSyncService
|
||||
|
||||
|
||||
@tenant_was_created.connect
|
||||
def handle(sender, **kwargs):
|
||||
"""Queue credential sync when a tenant/workspace is created."""
|
||||
# Only queue sync tasks if plugin manager (enterprise feature) is enabled
|
||||
if not dify_config.ENTERPRISE_ENABLED:
|
||||
return
|
||||
|
||||
tenant = sender
|
||||
|
||||
# Determine source from kwargs if available, otherwise use generic
|
||||
source = kwargs.get("source", "tenant_created")
|
||||
|
||||
# Queue credential sync task to Redis for enterprise backend to process
|
||||
WorkspaceSyncService.queue_credential_sync(tenant.id, source=source)
|
||||
@ -4,6 +4,7 @@ from dify_app import DifyApp
|
||||
def init_app(app: DifyApp):
|
||||
from commands import (
|
||||
add_qdrant_index,
|
||||
clean_expired_messages,
|
||||
clean_workflow_runs,
|
||||
cleanup_orphaned_draft_variables,
|
||||
clear_free_plan_tenant_expired_logs,
|
||||
@ -58,6 +59,7 @@ def init_app(app: DifyApp):
|
||||
transform_datasource_credentials,
|
||||
install_rag_pipeline_plugins,
|
||||
clean_workflow_runs,
|
||||
clean_expired_messages,
|
||||
]
|
||||
for cmd in cmds_to_register:
|
||||
app.cli.add_command(cmd)
|
||||
|
||||
@ -115,7 +115,18 @@ def build_from_mappings(
|
||||
# TODO(QuantumGhost): Performance concern - each mapping triggers a separate database query.
|
||||
# Implement batch processing to reduce database load when handling multiple files.
|
||||
# Filter out None/empty mappings to avoid errors
|
||||
valid_mappings = [m for m in mappings if m and m.get("transfer_method")]
|
||||
def is_valid_mapping(m: Mapping[str, Any]) -> bool:
|
||||
if not m or not m.get("transfer_method"):
|
||||
return False
|
||||
# For REMOTE_URL transfer method, ensure url or remote_url is provided and not None
|
||||
transfer_method = m.get("transfer_method")
|
||||
if transfer_method == FileTransferMethod.REMOTE_URL:
|
||||
url = m.get("url") or m.get("remote_url")
|
||||
if not url:
|
||||
return False
|
||||
return True
|
||||
|
||||
valid_mappings = [m for m in mappings if is_valid_mapping(m)]
|
||||
files = [
|
||||
build_from_mapping(
|
||||
mapping=mapping,
|
||||
|
||||
@ -0,0 +1,33 @@
|
||||
"""feat: add created_at id index to messages
|
||||
|
||||
Revision ID: 3334862ee907
|
||||
Revises: 905527cc8fd3
|
||||
Create Date: 2026-01-12 17:29:44.846544
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '3334862ee907'
|
||||
down_revision = '905527cc8fd3'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.create_index('message_created_at_id_idx', ['created_at', 'id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.drop_index('message_created_at_id_idx')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@ -968,6 +968,7 @@ class Message(Base):
|
||||
Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"),
|
||||
Index("message_created_at_idx", "created_at"),
|
||||
Index("message_app_mode_idx", "app_mode"),
|
||||
Index("message_created_at_id_idx", "created_at", "id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()))
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dify-api"
|
||||
version = "1.11.3"
|
||||
version = "1.11.4"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
|
||||
@ -1,90 +1,62 @@
|
||||
import datetime
|
||||
import logging
|
||||
import time
|
||||
|
||||
import click
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
import app
|
||||
from configs import dify_config
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.model import (
|
||||
App,
|
||||
Message,
|
||||
MessageAgentThought,
|
||||
MessageAnnotation,
|
||||
MessageChain,
|
||||
MessageFeedback,
|
||||
MessageFile,
|
||||
)
|
||||
from models.web import SavedMessage
|
||||
from services.feature_service import FeatureService
|
||||
from services.retention.conversation.messages_clean_policy import create_message_clean_policy
|
||||
from services.retention.conversation.messages_clean_service import MessagesCleanService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@app.celery.task(queue="dataset")
|
||||
@app.celery.task(queue="retention")
|
||||
def clean_messages():
|
||||
click.echo(click.style("Start clean messages.", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
plan_sandbox_clean_message_day = datetime.datetime.now() - datetime.timedelta(
|
||||
days=dify_config.PLAN_SANDBOX_CLEAN_MESSAGE_DAY_SETTING
|
||||
)
|
||||
while True:
|
||||
try:
|
||||
# Main query with join and filter
|
||||
messages = (
|
||||
db.session.query(Message)
|
||||
.where(Message.created_at < plan_sandbox_clean_message_day)
|
||||
.order_by(Message.created_at.desc())
|
||||
.limit(100)
|
||||
.all()
|
||||
)
|
||||
"""
|
||||
Clean expired messages based on clean policy.
|
||||
|
||||
except SQLAlchemyError:
|
||||
raise
|
||||
if not messages:
|
||||
break
|
||||
for message in messages:
|
||||
app = db.session.query(App).filter_by(id=message.app_id).first()
|
||||
if not app:
|
||||
logger.warning(
|
||||
"Expected App record to exist, but none was found, app_id=%s, message_id=%s",
|
||||
message.app_id,
|
||||
message.id,
|
||||
)
|
||||
continue
|
||||
features_cache_key = f"features:{app.tenant_id}"
|
||||
plan_cache = redis_client.get(features_cache_key)
|
||||
if plan_cache is None:
|
||||
features = FeatureService.get_features(app.tenant_id)
|
||||
redis_client.setex(features_cache_key, 600, features.billing.subscription.plan)
|
||||
plan = features.billing.subscription.plan
|
||||
else:
|
||||
plan = plan_cache.decode()
|
||||
if plan == CloudPlan.SANDBOX:
|
||||
# clean related message
|
||||
db.session.query(MessageFeedback).where(MessageFeedback.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageAnnotation).where(MessageAnnotation.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageChain).where(MessageChain.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageAgentThought).where(MessageAgentThought.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageFile).where(MessageFile.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(SavedMessage).where(SavedMessage.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(Message).where(Message.id == message.id).delete()
|
||||
db.session.commit()
|
||||
end_at = time.perf_counter()
|
||||
click.echo(click.style(f"Cleaned messages from db success latency: {end_at - start_at}", fg="green"))
|
||||
This task uses MessagesCleanService to efficiently clean messages in batches.
|
||||
The behavior depends on BILLING_ENABLED configuration:
|
||||
- BILLING_ENABLED=True: only delete messages from sandbox tenants (with whitelist/grace period)
|
||||
- BILLING_ENABLED=False: delete all messages within the time range
|
||||
"""
|
||||
click.echo(click.style("clean_messages: start clean messages.", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
# Create policy based on billing configuration
|
||||
policy = create_message_clean_policy(
|
||||
graceful_period_days=dify_config.SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD,
|
||||
)
|
||||
|
||||
# Create and run the cleanup service
|
||||
service = MessagesCleanService.from_days(
|
||||
policy=policy,
|
||||
days=dify_config.SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS,
|
||||
batch_size=dify_config.SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE,
|
||||
)
|
||||
stats = service.run()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
click.echo(
|
||||
click.style(
|
||||
f"clean_messages: completed successfully\n"
|
||||
f" - Latency: {end_at - start_at:.2f}s\n"
|
||||
f" - Batches processed: {stats['batches']}\n"
|
||||
f" - Total messages scanned: {stats['total_messages']}\n"
|
||||
f" - Messages filtered: {stats['filtered_messages']}\n"
|
||||
f" - Messages deleted: {stats['total_deleted']}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
end_at = time.perf_counter()
|
||||
logger.exception("clean_messages failed")
|
||||
click.echo(
|
||||
click.style(
|
||||
f"clean_messages: failed after {end_at - start_at:.2f}s - {str(e)}",
|
||||
fg="red",
|
||||
)
|
||||
)
|
||||
raise
|
||||
|
||||
58
api/services/enterprise/workspace_sync.py
Normal file
58
api/services/enterprise/workspace_sync.py
Normal file
@ -0,0 +1,58 @@
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from redis import RedisError
|
||||
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
WORKSPACE_SYNC_QUEUE = "enterprise:workspace:sync:queue"
|
||||
WORKSPACE_SYNC_PROCESSING = "enterprise:workspace:sync:processing"
|
||||
|
||||
|
||||
class WorkspaceSyncService:
|
||||
"""Service to publish workspace sync tasks to Redis queue for enterprise backend consumption"""
|
||||
|
||||
@staticmethod
|
||||
def queue_credential_sync(workspace_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Queue a credential sync task for a newly created workspace.
|
||||
|
||||
This publishes a task to Redis that will be consumed by the enterprise backend
|
||||
worker to sync credentials with the plugin-manager.
|
||||
|
||||
Args:
|
||||
workspace_id: The workspace/tenant ID to sync credentials for
|
||||
source: Source of the sync request (for debugging/tracking)
|
||||
|
||||
Returns:
|
||||
bool: True if task was queued successfully, False otherwise
|
||||
"""
|
||||
try:
|
||||
task = {
|
||||
"task_id": str(uuid.uuid4()),
|
||||
"workspace_id": workspace_id,
|
||||
"retry_count": 0,
|
||||
"created_at": datetime.now(UTC).isoformat(),
|
||||
"source": source,
|
||||
}
|
||||
|
||||
# Push to Redis list (queue) - LPUSH adds to the head, worker consumes from tail with RPOP
|
||||
redis_client.lpush(WORKSPACE_SYNC_QUEUE, json.dumps(task))
|
||||
|
||||
logger.info(
|
||||
"Queued credential sync task for workspace %s, task_id: %s, source: %s",
|
||||
workspace_id,
|
||||
task["task_id"],
|
||||
source,
|
||||
)
|
||||
return True
|
||||
|
||||
except (RedisError, TypeError) as e:
|
||||
logger.error("Failed to queue credential sync for workspace %s: %s", workspace_id, str(e), exc_info=True)
|
||||
# Don't raise - we don't want to fail workspace creation if queueing fails
|
||||
# The scheduled task will catch it later
|
||||
return False
|
||||
216
api/services/retention/conversation/messages_clean_policy.py
Normal file
216
api/services/retention/conversation/messages_clean_policy.py
Normal file
@ -0,0 +1,216 @@
|
||||
import datetime
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Callable, Sequence
|
||||
from dataclasses import dataclass
|
||||
|
||||
from configs import dify_config
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from services.billing_service import BillingService, SubscriptionPlan
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SimpleMessage:
|
||||
id: str
|
||||
app_id: str
|
||||
created_at: datetime.datetime
|
||||
|
||||
|
||||
class MessagesCleanPolicy(ABC):
|
||||
"""
|
||||
Abstract base class for message cleanup policies.
|
||||
|
||||
A policy determines which messages from a batch should be deleted.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def filter_message_ids(
|
||||
self,
|
||||
messages: Sequence[SimpleMessage],
|
||||
app_to_tenant: dict[str, str],
|
||||
) -> Sequence[str]:
|
||||
"""
|
||||
Filter messages and return IDs of messages that should be deleted.
|
||||
|
||||
Args:
|
||||
messages: Batch of messages to evaluate
|
||||
app_to_tenant: Mapping from app_id to tenant_id
|
||||
|
||||
Returns:
|
||||
List of message IDs that should be deleted
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
class BillingDisabledPolicy(MessagesCleanPolicy):
|
||||
"""
|
||||
Policy for community or enterpriseedition (billing disabled).
|
||||
|
||||
No special filter logic, just return all message ids.
|
||||
"""
|
||||
|
||||
def filter_message_ids(
|
||||
self,
|
||||
messages: Sequence[SimpleMessage],
|
||||
app_to_tenant: dict[str, str],
|
||||
) -> Sequence[str]:
|
||||
return [msg.id for msg in messages]
|
||||
|
||||
|
||||
class BillingSandboxPolicy(MessagesCleanPolicy):
|
||||
"""
|
||||
Policy for sandbox plan tenants in cloud edition (billing enabled).
|
||||
|
||||
Filters messages based on sandbox plan expiration rules:
|
||||
- Skip tenants in the whitelist
|
||||
- Only delete messages from sandbox plan tenants
|
||||
- Respect grace period after subscription expiration
|
||||
- Safe default: if tenant mapping or plan is missing, do NOT delete
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
plan_provider: Callable[[Sequence[str]], dict[str, SubscriptionPlan]],
|
||||
graceful_period_days: int = 21,
|
||||
tenant_whitelist: Sequence[str] | None = None,
|
||||
current_timestamp: int | None = None,
|
||||
) -> None:
|
||||
self._graceful_period_days = graceful_period_days
|
||||
self._tenant_whitelist: Sequence[str] = tenant_whitelist or []
|
||||
self._plan_provider = plan_provider
|
||||
self._current_timestamp = current_timestamp
|
||||
|
||||
def filter_message_ids(
|
||||
self,
|
||||
messages: Sequence[SimpleMessage],
|
||||
app_to_tenant: dict[str, str],
|
||||
) -> Sequence[str]:
|
||||
"""
|
||||
Filter messages based on sandbox plan expiration rules.
|
||||
|
||||
Args:
|
||||
messages: Batch of messages to evaluate
|
||||
app_to_tenant: Mapping from app_id to tenant_id
|
||||
|
||||
Returns:
|
||||
List of message IDs that should be deleted
|
||||
"""
|
||||
if not messages or not app_to_tenant:
|
||||
return []
|
||||
|
||||
# Get unique tenant_ids and fetch subscription plans
|
||||
tenant_ids = list(set(app_to_tenant.values()))
|
||||
tenant_plans = self._plan_provider(tenant_ids)
|
||||
|
||||
if not tenant_plans:
|
||||
return []
|
||||
|
||||
# Apply sandbox deletion rules
|
||||
return self._filter_expired_sandbox_messages(
|
||||
messages=messages,
|
||||
app_to_tenant=app_to_tenant,
|
||||
tenant_plans=tenant_plans,
|
||||
)
|
||||
|
||||
def _filter_expired_sandbox_messages(
|
||||
self,
|
||||
messages: Sequence[SimpleMessage],
|
||||
app_to_tenant: dict[str, str],
|
||||
tenant_plans: dict[str, SubscriptionPlan],
|
||||
) -> list[str]:
|
||||
"""
|
||||
Filter messages that should be deleted based on sandbox plan expiration.
|
||||
|
||||
A message should be deleted if:
|
||||
1. It belongs to a sandbox tenant AND
|
||||
2. Either:
|
||||
a) The tenant has no previous subscription (expiration_date == -1), OR
|
||||
b) The subscription expired more than graceful_period_days ago
|
||||
|
||||
Args:
|
||||
messages: List of message objects with id and app_id attributes
|
||||
app_to_tenant: Mapping from app_id to tenant_id
|
||||
tenant_plans: Mapping from tenant_id to subscription plan info
|
||||
|
||||
Returns:
|
||||
List of message IDs that should be deleted
|
||||
"""
|
||||
current_timestamp = self._current_timestamp
|
||||
if current_timestamp is None:
|
||||
current_timestamp = int(datetime.datetime.now(datetime.UTC).timestamp())
|
||||
|
||||
sandbox_message_ids: list[str] = []
|
||||
graceful_period_seconds = self._graceful_period_days * 24 * 60 * 60
|
||||
|
||||
for msg in messages:
|
||||
# Get tenant_id for this message's app
|
||||
tenant_id = app_to_tenant.get(msg.app_id)
|
||||
if not tenant_id:
|
||||
continue
|
||||
|
||||
# Skip tenant messages in whitelist
|
||||
if tenant_id in self._tenant_whitelist:
|
||||
continue
|
||||
|
||||
# Get subscription plan for this tenant
|
||||
tenant_plan = tenant_plans.get(tenant_id)
|
||||
if not tenant_plan:
|
||||
continue
|
||||
|
||||
plan = str(tenant_plan["plan"])
|
||||
expiration_date = int(tenant_plan["expiration_date"])
|
||||
|
||||
# Only process sandbox plans
|
||||
if plan != CloudPlan.SANDBOX:
|
||||
continue
|
||||
|
||||
# Case 1: No previous subscription (-1 means never had a paid subscription)
|
||||
if expiration_date == -1:
|
||||
sandbox_message_ids.append(msg.id)
|
||||
continue
|
||||
|
||||
# Case 2: Subscription expired beyond grace period
|
||||
if current_timestamp - expiration_date > graceful_period_seconds:
|
||||
sandbox_message_ids.append(msg.id)
|
||||
|
||||
return sandbox_message_ids
|
||||
|
||||
|
||||
def create_message_clean_policy(
|
||||
graceful_period_days: int = 21,
|
||||
current_timestamp: int | None = None,
|
||||
) -> MessagesCleanPolicy:
|
||||
"""
|
||||
Factory function to create the appropriate message clean policy.
|
||||
|
||||
Determines which policy to use based on BILLING_ENABLED configuration:
|
||||
- If BILLING_ENABLED is True: returns BillingSandboxPolicy
|
||||
- If BILLING_ENABLED is False: returns BillingDisabledPolicy
|
||||
|
||||
Args:
|
||||
graceful_period_days: Grace period in days after subscription expiration (default: 21)
|
||||
current_timestamp: Current Unix timestamp for testing (default: None, uses current time)
|
||||
"""
|
||||
if not dify_config.BILLING_ENABLED:
|
||||
logger.info("create_message_clean_policy: billing disabled, using BillingDisabledPolicy")
|
||||
return BillingDisabledPolicy()
|
||||
|
||||
# Billing enabled - fetch whitelist from BillingService
|
||||
tenant_whitelist = BillingService.get_expired_subscription_cleanup_whitelist()
|
||||
plan_provider = BillingService.get_plan_bulk_with_cache
|
||||
|
||||
logger.info(
|
||||
"create_message_clean_policy: billing enabled, using BillingSandboxPolicy "
|
||||
"(graceful_period_days=%s, whitelist=%s)",
|
||||
graceful_period_days,
|
||||
tenant_whitelist,
|
||||
)
|
||||
|
||||
return BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=graceful_period_days,
|
||||
tenant_whitelist=tenant_whitelist,
|
||||
current_timestamp=current_timestamp,
|
||||
)
|
||||
334
api/services/retention/conversation/messages_clean_service.py
Normal file
334
api/services/retention/conversation/messages_clean_service.py
Normal file
@ -0,0 +1,334 @@
|
||||
import datetime
|
||||
import logging
|
||||
import random
|
||||
from collections.abc import Sequence
|
||||
from typing import cast
|
||||
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from extensions.ext_database import db
|
||||
from models.model import (
|
||||
App,
|
||||
AppAnnotationHitHistory,
|
||||
DatasetRetrieverResource,
|
||||
Message,
|
||||
MessageAgentThought,
|
||||
MessageAnnotation,
|
||||
MessageChain,
|
||||
MessageFeedback,
|
||||
MessageFile,
|
||||
)
|
||||
from models.web import SavedMessage
|
||||
from services.retention.conversation.messages_clean_policy import (
|
||||
MessagesCleanPolicy,
|
||||
SimpleMessage,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MessagesCleanService:
|
||||
"""
|
||||
Service for cleaning expired messages based on retention policies.
|
||||
|
||||
Compatible with non cloud edition (billing disabled): all messages in the time range will be deleted.
|
||||
If billing is enabled: only sandbox plan tenant messages are deleted (with whitelist and grace period support).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
policy: MessagesCleanPolicy,
|
||||
end_before: datetime.datetime,
|
||||
start_from: datetime.datetime | None = None,
|
||||
batch_size: int = 1000,
|
||||
dry_run: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Initialize the service with cleanup parameters.
|
||||
|
||||
Args:
|
||||
policy: The policy that determines which messages to delete
|
||||
end_before: End time (exclusive) of the range
|
||||
start_from: Optional start time (inclusive) of the range
|
||||
batch_size: Number of messages to process per batch
|
||||
dry_run: Whether to perform a dry run (no actual deletion)
|
||||
"""
|
||||
self._policy = policy
|
||||
self._end_before = end_before
|
||||
self._start_from = start_from
|
||||
self._batch_size = batch_size
|
||||
self._dry_run = dry_run
|
||||
|
||||
@classmethod
|
||||
def from_time_range(
|
||||
cls,
|
||||
policy: MessagesCleanPolicy,
|
||||
start_from: datetime.datetime,
|
||||
end_before: datetime.datetime,
|
||||
batch_size: int = 1000,
|
||||
dry_run: bool = False,
|
||||
) -> "MessagesCleanService":
|
||||
"""
|
||||
Create a service instance for cleaning messages within a specific time range.
|
||||
|
||||
Time range is [start_from, end_before).
|
||||
|
||||
Args:
|
||||
policy: The policy that determines which messages to delete
|
||||
start_from: Start time (inclusive) of the range
|
||||
end_before: End time (exclusive) of the range
|
||||
batch_size: Number of messages to process per batch
|
||||
dry_run: Whether to perform a dry run (no actual deletion)
|
||||
|
||||
Returns:
|
||||
MessagesCleanService instance
|
||||
|
||||
Raises:
|
||||
ValueError: If start_from >= end_before or invalid parameters
|
||||
"""
|
||||
if start_from >= end_before:
|
||||
raise ValueError(f"start_from ({start_from}) must be less than end_before ({end_before})")
|
||||
|
||||
if batch_size <= 0:
|
||||
raise ValueError(f"batch_size ({batch_size}) must be greater than 0")
|
||||
|
||||
logger.info(
|
||||
"clean_messages: start_from=%s, end_before=%s, batch_size=%s, policy=%s",
|
||||
start_from,
|
||||
end_before,
|
||||
batch_size,
|
||||
policy.__class__.__name__,
|
||||
)
|
||||
|
||||
return cls(
|
||||
policy=policy,
|
||||
end_before=end_before,
|
||||
start_from=start_from,
|
||||
batch_size=batch_size,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_days(
|
||||
cls,
|
||||
policy: MessagesCleanPolicy,
|
||||
days: int = 30,
|
||||
batch_size: int = 1000,
|
||||
dry_run: bool = False,
|
||||
) -> "MessagesCleanService":
|
||||
"""
|
||||
Create a service instance for cleaning messages older than specified days.
|
||||
|
||||
Args:
|
||||
policy: The policy that determines which messages to delete
|
||||
days: Number of days to look back from now
|
||||
batch_size: Number of messages to process per batch
|
||||
dry_run: Whether to perform a dry run (no actual deletion)
|
||||
|
||||
Returns:
|
||||
MessagesCleanService instance
|
||||
|
||||
Raises:
|
||||
ValueError: If invalid parameters
|
||||
"""
|
||||
if days < 0:
|
||||
raise ValueError(f"days ({days}) must be greater than or equal to 0")
|
||||
|
||||
if batch_size <= 0:
|
||||
raise ValueError(f"batch_size ({batch_size}) must be greater than 0")
|
||||
|
||||
end_before = datetime.datetime.now() - datetime.timedelta(days=days)
|
||||
|
||||
logger.info(
|
||||
"clean_messages: days=%s, end_before=%s, batch_size=%s, policy=%s",
|
||||
days,
|
||||
end_before,
|
||||
batch_size,
|
||||
policy.__class__.__name__,
|
||||
)
|
||||
|
||||
return cls(policy=policy, end_before=end_before, start_from=None, batch_size=batch_size, dry_run=dry_run)
|
||||
|
||||
def run(self) -> dict[str, int]:
|
||||
"""
|
||||
Execute the message cleanup operation.
|
||||
|
||||
Returns:
|
||||
Dict with statistics: batches, filtered_messages, total_deleted
|
||||
"""
|
||||
return self._clean_messages_by_time_range()
|
||||
|
||||
def _clean_messages_by_time_range(self) -> dict[str, int]:
|
||||
"""
|
||||
Clean messages within a time range using cursor-based pagination.
|
||||
|
||||
Time range is [start_from, end_before)
|
||||
|
||||
Steps:
|
||||
1. Iterate messages using cursor pagination (by created_at, id)
|
||||
2. Query app_id -> tenant_id mapping
|
||||
3. Delegate to policy to determine which messages to delete
|
||||
4. Batch delete messages and their relations
|
||||
|
||||
Returns:
|
||||
Dict with statistics: batches, filtered_messages, total_deleted
|
||||
"""
|
||||
stats = {
|
||||
"batches": 0,
|
||||
"total_messages": 0,
|
||||
"filtered_messages": 0,
|
||||
"total_deleted": 0,
|
||||
}
|
||||
|
||||
# Cursor-based pagination using (created_at, id) to avoid infinite loops
|
||||
# and ensure proper ordering with time-based filtering
|
||||
_cursor: tuple[datetime.datetime, str] | None = None
|
||||
|
||||
logger.info(
|
||||
"clean_messages: start cleaning messages (dry_run=%s), start_from=%s, end_before=%s",
|
||||
self._dry_run,
|
||||
self._start_from,
|
||||
self._end_before,
|
||||
)
|
||||
|
||||
while True:
|
||||
stats["batches"] += 1
|
||||
|
||||
# Step 1: Fetch a batch of messages using cursor
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
msg_stmt = (
|
||||
select(Message.id, Message.app_id, Message.created_at)
|
||||
.where(Message.created_at < self._end_before)
|
||||
.order_by(Message.created_at, Message.id)
|
||||
.limit(self._batch_size)
|
||||
)
|
||||
|
||||
if self._start_from:
|
||||
msg_stmt = msg_stmt.where(Message.created_at >= self._start_from)
|
||||
|
||||
# Apply cursor condition: (created_at, id) > (last_created_at, last_message_id)
|
||||
# This translates to:
|
||||
# created_at > last_created_at OR (created_at = last_created_at AND id > last_message_id)
|
||||
if _cursor:
|
||||
# Continuing from previous batch
|
||||
msg_stmt = msg_stmt.where(
|
||||
(Message.created_at > _cursor[0])
|
||||
| ((Message.created_at == _cursor[0]) & (Message.id > _cursor[1]))
|
||||
)
|
||||
|
||||
raw_messages = list(session.execute(msg_stmt).all())
|
||||
messages = [
|
||||
SimpleMessage(id=msg_id, app_id=app_id, created_at=msg_created_at)
|
||||
for msg_id, app_id, msg_created_at in raw_messages
|
||||
]
|
||||
|
||||
# Track total messages fetched across all batches
|
||||
stats["total_messages"] += len(messages)
|
||||
|
||||
if not messages:
|
||||
logger.info("clean_messages (batch %s): no more messages to process", stats["batches"])
|
||||
break
|
||||
|
||||
# Update cursor to the last message's (created_at, id)
|
||||
_cursor = (messages[-1].created_at, messages[-1].id)
|
||||
|
||||
# Step 2: Extract app_ids and query tenant_ids
|
||||
app_ids = list({msg.app_id for msg in messages})
|
||||
|
||||
if not app_ids:
|
||||
logger.info("clean_messages (batch %s): no app_ids found, skip", stats["batches"])
|
||||
continue
|
||||
|
||||
app_stmt = select(App.id, App.tenant_id).where(App.id.in_(app_ids))
|
||||
apps = list(session.execute(app_stmt).all())
|
||||
|
||||
if not apps:
|
||||
logger.info("clean_messages (batch %s): no apps found, skip", stats["batches"])
|
||||
continue
|
||||
|
||||
# Build app_id -> tenant_id mapping
|
||||
app_to_tenant: dict[str, str] = {app.id: app.tenant_id for app in apps}
|
||||
|
||||
# Step 3: Delegate to policy to determine which messages to delete
|
||||
message_ids_to_delete = self._policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
if not message_ids_to_delete:
|
||||
logger.info("clean_messages (batch %s): no messages to delete, skip", stats["batches"])
|
||||
continue
|
||||
|
||||
stats["filtered_messages"] += len(message_ids_to_delete)
|
||||
|
||||
# Step 4: Batch delete messages and their relations
|
||||
if not self._dry_run:
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# Delete related records first
|
||||
self._batch_delete_message_relations(session, message_ids_to_delete)
|
||||
|
||||
# Delete messages
|
||||
delete_stmt = delete(Message).where(Message.id.in_(message_ids_to_delete))
|
||||
delete_result = cast(CursorResult, session.execute(delete_stmt))
|
||||
messages_deleted = delete_result.rowcount
|
||||
session.commit()
|
||||
|
||||
stats["total_deleted"] += messages_deleted
|
||||
|
||||
logger.info(
|
||||
"clean_messages (batch %s): processed %s messages, deleted %s messages",
|
||||
stats["batches"],
|
||||
len(messages),
|
||||
messages_deleted,
|
||||
)
|
||||
else:
|
||||
# Log random sample of message IDs that would be deleted (up to 10)
|
||||
sample_size = min(10, len(message_ids_to_delete))
|
||||
sampled_ids = random.sample(list(message_ids_to_delete), sample_size)
|
||||
|
||||
logger.info(
|
||||
"clean_messages (batch %s, dry_run): would delete %s messages, sampling %s ids:",
|
||||
stats["batches"],
|
||||
len(message_ids_to_delete),
|
||||
sample_size,
|
||||
)
|
||||
for msg_id in sampled_ids:
|
||||
logger.info("clean_messages (batch %s, dry_run) sample: message_id=%s", stats["batches"], msg_id)
|
||||
|
||||
logger.info(
|
||||
"clean_messages completed: total batches: %s, total messages: %s, filtered messages: %s, total deleted: %s",
|
||||
stats["batches"],
|
||||
stats["total_messages"],
|
||||
stats["filtered_messages"],
|
||||
stats["total_deleted"],
|
||||
)
|
||||
|
||||
return stats
|
||||
|
||||
@staticmethod
|
||||
def _batch_delete_message_relations(session: Session, message_ids: Sequence[str]) -> None:
|
||||
"""
|
||||
Batch delete all related records for given message IDs.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
message_ids: List of message IDs to delete relations for
|
||||
"""
|
||||
if not message_ids:
|
||||
return
|
||||
|
||||
# Delete all related records in batch
|
||||
session.execute(delete(MessageFeedback).where(MessageFeedback.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(MessageAnnotation).where(MessageAnnotation.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(MessageChain).where(MessageChain.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(MessageAgentThought).where(MessageAgentThought.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(MessageFile).where(MessageFile.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(SavedMessage).where(SavedMessage.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(AppAnnotationHitHistory).where(AppAnnotationHitHistory.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(DatasetRetrieverResource).where(DatasetRetrieverResource.message_id.in_(message_ids)))
|
||||
File diff suppressed because it is too large
Load Diff
627
api/tests/unit_tests/services/test_messages_clean_service.py
Normal file
627
api/tests/unit_tests/services/test_messages_clean_service.py
Normal file
@ -0,0 +1,627 @@
|
||||
import datetime
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from services.retention.conversation.messages_clean_policy import (
|
||||
BillingDisabledPolicy,
|
||||
BillingSandboxPolicy,
|
||||
SimpleMessage,
|
||||
create_message_clean_policy,
|
||||
)
|
||||
from services.retention.conversation.messages_clean_service import MessagesCleanService
|
||||
|
||||
|
||||
def make_simple_message(msg_id: str, app_id: str) -> SimpleMessage:
|
||||
"""Helper to create a SimpleMessage with a fixed created_at timestamp."""
|
||||
return SimpleMessage(id=msg_id, app_id=app_id, created_at=datetime.datetime(2024, 1, 1))
|
||||
|
||||
|
||||
def make_plan_provider(tenant_plans: dict) -> MagicMock:
|
||||
"""Helper to create a mock plan_provider that returns the given tenant_plans."""
|
||||
provider = MagicMock()
|
||||
provider.return_value = tenant_plans
|
||||
return provider
|
||||
|
||||
|
||||
class TestBillingSandboxPolicyFilterMessageIds:
|
||||
"""Unit tests for BillingSandboxPolicy.filter_message_ids method."""
|
||||
|
||||
# Fixed timestamp for deterministic tests
|
||||
CURRENT_TIMESTAMP = 1000000
|
||||
GRACEFUL_PERIOD_DAYS = 8
|
||||
GRACEFUL_PERIOD_SECONDS = GRACEFUL_PERIOD_DAYS * 24 * 60 * 60
|
||||
|
||||
def test_missing_tenant_mapping_excluded(self):
|
||||
"""Test that messages with missing app-to-tenant mapping are excluded."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
]
|
||||
app_to_tenant = {} # No mapping
|
||||
tenant_plans = {"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert
|
||||
assert list(result) == []
|
||||
|
||||
def test_missing_tenant_plan_excluded(self):
|
||||
"""Test that messages with missing tenant plan are excluded (safe default)."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2"}
|
||||
tenant_plans = {} # No plans
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert
|
||||
assert list(result) == []
|
||||
|
||||
def test_non_sandbox_plan_excluded(self):
|
||||
"""Test that messages from non-sandbox plans (PROFESSIONAL/TEAM) are excluded."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
make_simple_message("msg3", "app3"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2", "app3": "tenant3"}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.PROFESSIONAL, "expiration_date": -1},
|
||||
"tenant2": {"plan": CloudPlan.TEAM, "expiration_date": -1},
|
||||
"tenant3": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}, # Only this one
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - only msg3 (sandbox tenant) should be included
|
||||
assert set(result) == {"msg3"}
|
||||
|
||||
def test_whitelist_skip(self):
|
||||
"""Test that whitelisted tenants are excluded even if sandbox + expired."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"), # Whitelisted - excluded
|
||||
make_simple_message("msg2", "app2"), # Not whitelisted - included
|
||||
make_simple_message("msg3", "app3"), # Whitelisted - excluded
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2", "app3": "tenant3"}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1},
|
||||
"tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": -1},
|
||||
"tenant3": {"plan": CloudPlan.SANDBOX, "expiration_date": -1},
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
tenant_whitelist = ["tenant1", "tenant3"]
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
tenant_whitelist=tenant_whitelist,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - only msg2 should be included
|
||||
assert set(result) == {"msg2"}
|
||||
|
||||
def test_no_previous_subscription_included(self):
|
||||
"""Test that messages with expiration_date=-1 (no previous subscription) are included."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2"}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1},
|
||||
"tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": -1},
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - all messages should be included
|
||||
assert set(result) == {"msg1", "msg2"}
|
||||
|
||||
def test_within_grace_period_excluded(self):
|
||||
"""Test that messages within grace period are excluded."""
|
||||
# Arrange
|
||||
now = self.CURRENT_TIMESTAMP
|
||||
expired_1_day_ago = now - (1 * 24 * 60 * 60)
|
||||
expired_5_days_ago = now - (5 * 24 * 60 * 60)
|
||||
expired_7_days_ago = now - (7 * 24 * 60 * 60)
|
||||
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
make_simple_message("msg3", "app3"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2", "app3": "tenant3"}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_1_day_ago},
|
||||
"tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_5_days_ago},
|
||||
"tenant3": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_7_days_ago},
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS, # 8 days
|
||||
current_timestamp=now,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - all within 8-day grace period, none should be included
|
||||
assert list(result) == []
|
||||
|
||||
def test_exactly_at_boundary_excluded(self):
|
||||
"""Test that messages exactly at grace period boundary are excluded (code uses >)."""
|
||||
# Arrange
|
||||
now = self.CURRENT_TIMESTAMP
|
||||
expired_exactly_8_days_ago = now - self.GRACEFUL_PERIOD_SECONDS # Exactly at boundary
|
||||
|
||||
messages = [make_simple_message("msg1", "app1")]
|
||||
app_to_tenant = {"app1": "tenant1"}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_exactly_8_days_ago},
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=now,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - exactly at boundary (==) should be excluded (code uses >)
|
||||
assert list(result) == []
|
||||
|
||||
def test_beyond_grace_period_included(self):
|
||||
"""Test that messages beyond grace period are included."""
|
||||
# Arrange
|
||||
now = self.CURRENT_TIMESTAMP
|
||||
expired_9_days_ago = now - (9 * 24 * 60 * 60) # Just beyond 8-day grace
|
||||
expired_30_days_ago = now - (30 * 24 * 60 * 60) # Well beyond
|
||||
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2"}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_9_days_ago},
|
||||
"tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_30_days_ago},
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=now,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - both beyond grace period, should be included
|
||||
assert set(result) == {"msg1", "msg2"}
|
||||
|
||||
def test_empty_messages_returns_empty(self):
|
||||
"""Test that empty messages returns empty list."""
|
||||
# Arrange
|
||||
messages: list[SimpleMessage] = []
|
||||
app_to_tenant = {"app1": "tenant1"}
|
||||
plan_provider = make_plan_provider({"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}})
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert
|
||||
assert list(result) == []
|
||||
|
||||
def test_plan_provider_called_with_correct_tenant_ids(self):
|
||||
"""Test that plan_provider is called with correct tenant_ids."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
make_simple_message("msg3", "app3"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2", "app3": "tenant1"} # tenant1 appears twice
|
||||
plan_provider = make_plan_provider({})
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - plan_provider should be called once with unique tenant_ids
|
||||
plan_provider.assert_called_once()
|
||||
called_tenant_ids = set(plan_provider.call_args[0][0])
|
||||
assert called_tenant_ids == {"tenant1", "tenant2"}
|
||||
|
||||
def test_complex_mixed_scenario(self):
|
||||
"""Test complex scenario with mixed plans, expirations, whitelist, and missing mappings."""
|
||||
# Arrange
|
||||
now = self.CURRENT_TIMESTAMP
|
||||
sandbox_expired_old = now - (15 * 24 * 60 * 60) # Beyond grace
|
||||
sandbox_expired_recent = now - (3 * 24 * 60 * 60) # Within grace
|
||||
future_expiration = now + (30 * 24 * 60 * 60)
|
||||
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"), # Sandbox, no subscription - included
|
||||
make_simple_message("msg2", "app2"), # Sandbox, expired old - included
|
||||
make_simple_message("msg3", "app3"), # Sandbox, within grace - excluded
|
||||
make_simple_message("msg4", "app4"), # Team plan, active - excluded
|
||||
make_simple_message("msg5", "app5"), # No tenant mapping - excluded
|
||||
make_simple_message("msg6", "app6"), # No plan info - excluded
|
||||
make_simple_message("msg7", "app7"), # Sandbox, expired old, whitelisted - excluded
|
||||
]
|
||||
app_to_tenant = {
|
||||
"app1": "tenant1",
|
||||
"app2": "tenant2",
|
||||
"app3": "tenant3",
|
||||
"app4": "tenant4",
|
||||
"app6": "tenant6", # Has mapping but no plan
|
||||
"app7": "tenant7",
|
||||
# app5 has no mapping
|
||||
}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1},
|
||||
"tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": sandbox_expired_old},
|
||||
"tenant3": {"plan": CloudPlan.SANDBOX, "expiration_date": sandbox_expired_recent},
|
||||
"tenant4": {"plan": CloudPlan.TEAM, "expiration_date": future_expiration},
|
||||
"tenant7": {"plan": CloudPlan.SANDBOX, "expiration_date": sandbox_expired_old},
|
||||
# tenant6 has no plan
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
tenant_whitelist = ["tenant7"]
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
tenant_whitelist=tenant_whitelist,
|
||||
current_timestamp=now,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - only msg1 and msg2 should be included
|
||||
assert set(result) == {"msg1", "msg2"}
|
||||
|
||||
|
||||
class TestBillingDisabledPolicyFilterMessageIds:
|
||||
"""Unit tests for BillingDisabledPolicy.filter_message_ids method."""
|
||||
|
||||
def test_returns_all_message_ids(self):
|
||||
"""Test that all message IDs are returned (order-preserving)."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
make_simple_message("msg3", "app3"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2"}
|
||||
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - all message IDs returned in order
|
||||
assert list(result) == ["msg1", "msg2", "msg3"]
|
||||
|
||||
def test_ignores_app_to_tenant(self):
|
||||
"""Test that app_to_tenant mapping is ignored."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
]
|
||||
app_to_tenant: dict[str, str] = {} # Empty - should be ignored
|
||||
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - all message IDs still returned
|
||||
assert list(result) == ["msg1", "msg2"]
|
||||
|
||||
def test_empty_messages_returns_empty(self):
|
||||
"""Test that empty messages returns empty list."""
|
||||
# Arrange
|
||||
messages: list[SimpleMessage] = []
|
||||
app_to_tenant = {"app1": "tenant1"}
|
||||
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert
|
||||
assert list(result) == []
|
||||
|
||||
|
||||
class TestCreateMessageCleanPolicy:
|
||||
"""Unit tests for create_message_clean_policy factory function."""
|
||||
|
||||
@patch("services.retention.conversation.messages_clean_policy.dify_config")
|
||||
def test_billing_disabled_returns_billing_disabled_policy(self, mock_config):
|
||||
"""Test that BILLING_ENABLED=False returns BillingDisabledPolicy."""
|
||||
# Arrange
|
||||
mock_config.BILLING_ENABLED = False
|
||||
|
||||
# Act
|
||||
policy = create_message_clean_policy(graceful_period_days=21)
|
||||
|
||||
# Assert
|
||||
assert isinstance(policy, BillingDisabledPolicy)
|
||||
|
||||
@patch("services.retention.conversation.messages_clean_policy.BillingService")
|
||||
@patch("services.retention.conversation.messages_clean_policy.dify_config")
|
||||
def test_billing_enabled_policy_has_correct_internals(self, mock_config, mock_billing_service):
|
||||
"""Test that BillingSandboxPolicy is created with correct internal values."""
|
||||
# Arrange
|
||||
mock_config.BILLING_ENABLED = True
|
||||
whitelist = ["tenant1", "tenant2"]
|
||||
mock_billing_service.get_expired_subscription_cleanup_whitelist.return_value = whitelist
|
||||
mock_plan_provider = MagicMock()
|
||||
mock_billing_service.get_plan_bulk_with_cache = mock_plan_provider
|
||||
|
||||
# Act
|
||||
policy = create_message_clean_policy(graceful_period_days=14, current_timestamp=1234567)
|
||||
|
||||
# Assert
|
||||
mock_billing_service.get_expired_subscription_cleanup_whitelist.assert_called_once()
|
||||
assert isinstance(policy, BillingSandboxPolicy)
|
||||
assert policy._graceful_period_days == 14
|
||||
assert list(policy._tenant_whitelist) == whitelist
|
||||
assert policy._plan_provider == mock_plan_provider
|
||||
assert policy._current_timestamp == 1234567
|
||||
|
||||
|
||||
class TestMessagesCleanServiceFromTimeRange:
|
||||
"""Unit tests for MessagesCleanService.from_time_range factory method."""
|
||||
|
||||
def test_start_from_end_before_raises_value_error(self):
|
||||
"""Test that start_from == end_before raises ValueError."""
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Arrange
|
||||
same_time = datetime.datetime(2024, 1, 1, 12, 0, 0)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="start_from .* must be less than end_before"):
|
||||
MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=same_time,
|
||||
end_before=same_time,
|
||||
)
|
||||
|
||||
# Arrange
|
||||
start_from = datetime.datetime(2024, 12, 31)
|
||||
end_before = datetime.datetime(2024, 1, 1)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="start_from .* must be less than end_before"):
|
||||
MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
)
|
||||
|
||||
def test_batch_size_raises_value_error(self):
|
||||
"""Test that batch_size=0 raises ValueError."""
|
||||
# Arrange
|
||||
start_from = datetime.datetime(2024, 1, 1)
|
||||
end_before = datetime.datetime(2024, 2, 1)
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="batch_size .* must be greater than 0"):
|
||||
MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
batch_size=0,
|
||||
)
|
||||
|
||||
start_from = datetime.datetime(2024, 1, 1)
|
||||
end_before = datetime.datetime(2024, 2, 1)
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="batch_size .* must be greater than 0"):
|
||||
MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
batch_size=-100,
|
||||
)
|
||||
|
||||
def test_valid_params_creates_instance(self):
|
||||
"""Test that valid parameters create a correctly configured instance."""
|
||||
# Arrange
|
||||
start_from = datetime.datetime(2024, 1, 1, 0, 0, 0)
|
||||
end_before = datetime.datetime(2024, 12, 31, 23, 59, 59)
|
||||
policy = BillingDisabledPolicy()
|
||||
batch_size = 500
|
||||
dry_run = True
|
||||
|
||||
# Act
|
||||
service = MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
batch_size=batch_size,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(service, MessagesCleanService)
|
||||
assert service._policy is policy
|
||||
assert service._start_from == start_from
|
||||
assert service._end_before == end_before
|
||||
assert service._batch_size == batch_size
|
||||
assert service._dry_run == dry_run
|
||||
|
||||
def test_default_params(self):
|
||||
"""Test that default parameters are applied correctly."""
|
||||
# Arrange
|
||||
start_from = datetime.datetime(2024, 1, 1)
|
||||
end_before = datetime.datetime(2024, 2, 1)
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act
|
||||
service = MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert service._batch_size == 1000 # default
|
||||
assert service._dry_run is False # default
|
||||
|
||||
|
||||
class TestMessagesCleanServiceFromDays:
|
||||
"""Unit tests for MessagesCleanService.from_days factory method."""
|
||||
|
||||
def test_days_raises_value_error(self):
|
||||
"""Test that days < 0 raises ValueError."""
|
||||
# Arrange
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="days .* must be greater than or equal to 0"):
|
||||
MessagesCleanService.from_days(policy=policy, days=-1)
|
||||
|
||||
# Act
|
||||
with patch("services.retention.conversation.messages_clean_service.datetime") as mock_datetime:
|
||||
fixed_now = datetime.datetime(2024, 6, 15, 14, 0, 0)
|
||||
mock_datetime.datetime.now.return_value = fixed_now
|
||||
mock_datetime.timedelta = datetime.timedelta
|
||||
|
||||
service = MessagesCleanService.from_days(policy=policy, days=0)
|
||||
|
||||
# Assert
|
||||
assert service._end_before == fixed_now
|
||||
|
||||
def test_batch_size_raises_value_error(self):
|
||||
"""Test that batch_size=0 raises ValueError."""
|
||||
# Arrange
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="batch_size .* must be greater than 0"):
|
||||
MessagesCleanService.from_days(policy=policy, days=30, batch_size=0)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="batch_size .* must be greater than 0"):
|
||||
MessagesCleanService.from_days(policy=policy, days=30, batch_size=-500)
|
||||
|
||||
def test_valid_params_creates_instance(self):
|
||||
"""Test that valid parameters create a correctly configured instance."""
|
||||
# Arrange
|
||||
policy = BillingDisabledPolicy()
|
||||
days = 90
|
||||
batch_size = 500
|
||||
dry_run = True
|
||||
|
||||
# Act
|
||||
with patch("services.retention.conversation.messages_clean_service.datetime") as mock_datetime:
|
||||
fixed_now = datetime.datetime(2024, 6, 15, 10, 30, 0)
|
||||
mock_datetime.datetime.now.return_value = fixed_now
|
||||
mock_datetime.timedelta = datetime.timedelta
|
||||
|
||||
service = MessagesCleanService.from_days(
|
||||
policy=policy,
|
||||
days=days,
|
||||
batch_size=batch_size,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
|
||||
# Assert
|
||||
expected_end_before = fixed_now - datetime.timedelta(days=days)
|
||||
assert isinstance(service, MessagesCleanService)
|
||||
assert service._policy is policy
|
||||
assert service._start_from is None
|
||||
assert service._end_before == expected_end_before
|
||||
assert service._batch_size == batch_size
|
||||
assert service._dry_run == dry_run
|
||||
|
||||
def test_default_params(self):
|
||||
"""Test that default parameters are applied correctly."""
|
||||
# Arrange
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act
|
||||
with patch("services.retention.conversation.messages_clean_service.datetime") as mock_datetime:
|
||||
fixed_now = datetime.datetime(2024, 6, 15, 10, 30, 0)
|
||||
mock_datetime.datetime.now.return_value = fixed_now
|
||||
mock_datetime.timedelta = datetime.timedelta
|
||||
|
||||
service = MessagesCleanService.from_days(policy=policy)
|
||||
|
||||
# Assert
|
||||
expected_end_before = fixed_now - datetime.timedelta(days=30) # default days=30
|
||||
assert service._end_before == expected_end_before
|
||||
assert service._batch_size == 1000 # default
|
||||
assert service._dry_run is False # default
|
||||
2
api/uv.lock
generated
2
api/uv.lock
generated
@ -1368,7 +1368,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "dify-api"
|
||||
version = "1.11.3"
|
||||
version = "1.11.4"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aliyun-log-python-sdk" },
|
||||
|
||||
@ -21,7 +21,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.11.3
|
||||
image: langgenius/dify-api:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -63,7 +63,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.11.3
|
||||
image: langgenius/dify-api:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -102,7 +102,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.11.3
|
||||
image: langgenius/dify-api:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -132,7 +132,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.11.3
|
||||
image: langgenius/dify-web:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@ -705,7 +705,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.11.3
|
||||
image: langgenius/dify-api:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -747,7 +747,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.11.3
|
||||
image: langgenius/dify-api:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -786,7 +786,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.11.3
|
||||
image: langgenius/dify-api:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -816,7 +816,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.11.3
|
||||
image: langgenius/dify-web:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@ -1 +1 @@
|
||||
22.21.1
|
||||
24
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
# base image
|
||||
FROM node:22.21.1-alpine3.23 AS base
|
||||
FROM node:24-alpine AS base
|
||||
LABEL maintainer="takatost@gmail.com"
|
||||
|
||||
# if you located in China, you can use aliyun mirror to speed up
|
||||
|
||||
@ -8,8 +8,8 @@ This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next
|
||||
|
||||
Before starting the web frontend service, please make sure the following environment is ready.
|
||||
|
||||
- [Node.js](https://nodejs.org) >= v22.11.x
|
||||
- [pnpm](https://pnpm.io) v10.x
|
||||
- [Node.js](https://nodejs.org)
|
||||
- [pnpm](https://pnpm.io)
|
||||
|
||||
> [!TIP]
|
||||
> It is recommended to install and enable Corepack to manage package manager versions automatically:
|
||||
|
||||
56
web/__mocks__/zustand.ts
Normal file
56
web/__mocks__/zustand.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import type * as ZustandExportedTypes from 'zustand'
|
||||
import { act } from '@testing-library/react'
|
||||
|
||||
export * from 'zustand'
|
||||
|
||||
const { create: actualCreate, createStore: actualCreateStore }
|
||||
// eslint-disable-next-line antfu/no-top-level-await
|
||||
= await vi.importActual<typeof ZustandExportedTypes>('zustand')
|
||||
|
||||
export const storeResetFns = new Set<() => void>()
|
||||
|
||||
const createUncurried = <T>(
|
||||
stateCreator: ZustandExportedTypes.StateCreator<T>,
|
||||
) => {
|
||||
const store = actualCreate(stateCreator)
|
||||
const initialState = store.getInitialState()
|
||||
storeResetFns.add(() => {
|
||||
store.setState(initialState, true)
|
||||
})
|
||||
return store
|
||||
}
|
||||
|
||||
export const create = (<T>(
|
||||
stateCreator: ZustandExportedTypes.StateCreator<T>,
|
||||
) => {
|
||||
return typeof stateCreator === 'function'
|
||||
? createUncurried(stateCreator)
|
||||
: createUncurried
|
||||
}) as typeof ZustandExportedTypes.create
|
||||
|
||||
const createStoreUncurried = <T>(
|
||||
stateCreator: ZustandExportedTypes.StateCreator<T>,
|
||||
) => {
|
||||
const store = actualCreateStore(stateCreator)
|
||||
const initialState = store.getInitialState()
|
||||
storeResetFns.add(() => {
|
||||
store.setState(initialState, true)
|
||||
})
|
||||
return store
|
||||
}
|
||||
|
||||
export const createStore = (<T>(
|
||||
stateCreator: ZustandExportedTypes.StateCreator<T>,
|
||||
) => {
|
||||
return typeof stateCreator === 'function'
|
||||
? createStoreUncurried(stateCreator)
|
||||
: createStoreUncurried
|
||||
}) as typeof ZustandExportedTypes.createStore
|
||||
|
||||
afterEach(() => {
|
||||
act(() => {
|
||||
storeResetFns.forEach((resetFn) => {
|
||||
resetFn()
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -65,15 +65,17 @@ const CardView: FC<ICardViewProps> = ({ appId, isInPanel, className }) => {
|
||||
<div className="text-xs text-text-secondary">
|
||||
{t('overview.disableTooltip.triggerMode', { ns: 'appOverview', feature: featureName })}
|
||||
</div>
|
||||
<div
|
||||
className="cursor-pointer text-xs font-medium text-text-accent hover:underline"
|
||||
<a
|
||||
href={triggerDocUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="block cursor-pointer text-xs font-medium text-text-accent hover:underline"
|
||||
onClick={(event) => {
|
||||
event.stopPropagation()
|
||||
window.open(triggerDocUrl, '_blank')
|
||||
}}
|
||||
>
|
||||
{t('overview.appInfo.enableTooltip.learnMore', { ns: 'appOverview' })}
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
), [t, triggerDocUrl])
|
||||
|
||||
|
||||
@ -3,9 +3,7 @@ import type { App } from '@/types/app'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import useAccessControlStore from '@/context/access-control-store'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { AccessMode, SubjectType } from '@/models/access-control'
|
||||
import { defaultSystemFeatures } from '@/types/feature'
|
||||
import Toast from '../../base/toast'
|
||||
import AccessControlDialog from './access-control-dialog'
|
||||
import AccessControlItem from './access-control-item'
|
||||
@ -105,22 +103,6 @@ const memberSubject: Subject = {
|
||||
accountData: baseMember,
|
||||
} as Subject
|
||||
|
||||
const resetAccessControlStore = () => {
|
||||
useAccessControlStore.setState({
|
||||
appId: '',
|
||||
specificGroups: [],
|
||||
specificMembers: [],
|
||||
currentMenu: AccessMode.SPECIFIC_GROUPS_MEMBERS,
|
||||
selectedGroupsForBreadcrumb: [],
|
||||
})
|
||||
}
|
||||
|
||||
const resetGlobalStore = () => {
|
||||
useGlobalPublicStore.setState({
|
||||
systemFeatures: defaultSystemFeatures,
|
||||
})
|
||||
}
|
||||
|
||||
beforeAll(() => {
|
||||
class MockIntersectionObserver {
|
||||
observe = vi.fn(() => undefined)
|
||||
@ -132,9 +114,6 @@ beforeAll(() => {
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
resetAccessControlStore()
|
||||
resetGlobalStore()
|
||||
mockMutateAsync.mockResolvedValue(undefined)
|
||||
mockUseUpdateAccessMode.mockReturnValue({
|
||||
isPending: false,
|
||||
|
||||
@ -21,7 +21,6 @@ import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/
|
||||
import FileUploadSetting from '@/app/components/workflow/nodes/_base/components/file-upload-setting'
|
||||
import { CodeLanguage } from '@/app/components/workflow/nodes/code/types'
|
||||
import { ChangeType, InputVarType, SupportUploadFileTypes } from '@/app/components/workflow/types'
|
||||
import { DEFAULT_VALUE_MAX_LEN } from '@/config'
|
||||
import ConfigContext from '@/context/debug-configuration'
|
||||
import { AppModeEnum, TransferMethod } from '@/types/app'
|
||||
import { checkKeys, getNewVarInWorkflow, replaceSpaceWithUnderscoreInVarNameInput } from '@/utils/var'
|
||||
@ -198,8 +197,6 @@ const ConfigModal: FC<IConfigModalProps> = ({
|
||||
if (type === InputVarType.multiFiles)
|
||||
draft.max_length = DEFAULT_FILE_UPLOAD_SETTING.max_length
|
||||
}
|
||||
if (type === InputVarType.paragraph)
|
||||
draft.max_length = DEFAULT_VALUE_MAX_LEN
|
||||
})
|
||||
setTempPayload(newPayload)
|
||||
}, [tempPayload])
|
||||
|
||||
@ -15,7 +15,6 @@ import Confirm from '@/app/components/base/confirm'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import { DEFAULT_VALUE_MAX_LEN } from '@/config'
|
||||
import ConfigContext from '@/context/debug-configuration'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import { useModalContext } from '@/context/modal-context'
|
||||
@ -58,8 +57,6 @@ const buildPromptVariableFromInput = (payload: InputVar): PromptVariable => {
|
||||
key: variable,
|
||||
name: label as string,
|
||||
}
|
||||
if (payload.type === InputVarType.textInput)
|
||||
nextItem.max_length = nextItem.max_length || DEFAULT_VALUE_MAX_LEN
|
||||
|
||||
if (payload.type !== InputVarType.select)
|
||||
delete nextItem.options
|
||||
|
||||
@ -189,6 +189,7 @@ const SelectDataSet: FC<ISelectDataSetProps> = ({
|
||||
}
|
||||
</div>
|
||||
))}
|
||||
{isFetchingNextPage && <Loading />}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
@ -7,7 +7,6 @@ import Input from '@/app/components/base/input'
|
||||
import Select from '@/app/components/base/select'
|
||||
import Textarea from '@/app/components/base/textarea'
|
||||
import BoolInput from '@/app/components/workflow/nodes/_base/components/before-run-form/bool-input'
|
||||
import { DEFAULT_VALUE_MAX_LEN } from '@/config'
|
||||
import ConfigContext from '@/context/debug-configuration'
|
||||
import { cn } from '@/utils/classnames'
|
||||
|
||||
@ -88,7 +87,7 @@ const ChatUserInput = ({
|
||||
onChange={(e) => { handleInputValueChange(key, e.target.value) }}
|
||||
placeholder={name}
|
||||
autoFocus={index === 0}
|
||||
maxLength={max_length || DEFAULT_VALUE_MAX_LEN}
|
||||
maxLength={max_length}
|
||||
/>
|
||||
)}
|
||||
{type === 'paragraph' && (
|
||||
@ -115,7 +114,7 @@ const ChatUserInput = ({
|
||||
onChange={(e) => { handleInputValueChange(key, e.target.value) }}
|
||||
placeholder={name}
|
||||
autoFocus={index === 0}
|
||||
maxLength={max_length || DEFAULT_VALUE_MAX_LEN}
|
||||
maxLength={max_length}
|
||||
/>
|
||||
)}
|
||||
{type === 'checkbox' && (
|
||||
|
||||
@ -20,7 +20,6 @@ import Select from '@/app/components/base/select'
|
||||
import Textarea from '@/app/components/base/textarea'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import BoolInput from '@/app/components/workflow/nodes/_base/components/before-run-form/bool-input'
|
||||
import { DEFAULT_VALUE_MAX_LEN } from '@/config'
|
||||
import ConfigContext from '@/context/debug-configuration'
|
||||
import { AppModeEnum, ModelModeType } from '@/types/app'
|
||||
import { cn } from '@/utils/classnames'
|
||||
@ -142,7 +141,7 @@ const PromptValuePanel: FC<IPromptValuePanelProps> = ({
|
||||
onChange={(e) => { handleInputValueChange(key, e.target.value) }}
|
||||
placeholder={name}
|
||||
autoFocus={index === 0}
|
||||
maxLength={max_length || DEFAULT_VALUE_MAX_LEN}
|
||||
maxLength={max_length}
|
||||
/>
|
||||
)}
|
||||
{type === 'paragraph' && (
|
||||
@ -170,7 +169,7 @@ const PromptValuePanel: FC<IPromptValuePanelProps> = ({
|
||||
onChange={(e) => { handleInputValueChange(key, e.target.value) }}
|
||||
placeholder={name}
|
||||
autoFocus={index === 0}
|
||||
maxLength={max_length || DEFAULT_VALUE_MAX_LEN}
|
||||
maxLength={max_length}
|
||||
/>
|
||||
)}
|
||||
{type === 'checkbox' && (
|
||||
|
||||
@ -21,15 +21,15 @@ export const AppCardSkeleton = React.memo(({ count = 6 }: AppCardSkeletonProps)
|
||||
>
|
||||
<SkeletonContainer className="h-full">
|
||||
<SkeletonRow>
|
||||
<SkeletonRectangle className="h-10 w-10 rounded-lg" />
|
||||
<SkeletonRectangle className="h-10 w-10 animate-pulse rounded-lg" />
|
||||
<div className="flex flex-1 flex-col gap-1">
|
||||
<SkeletonRectangle className="h-4 w-2/3" />
|
||||
<SkeletonRectangle className="h-3 w-1/3" />
|
||||
<SkeletonRectangle className="h-4 w-2/3 animate-pulse" />
|
||||
<SkeletonRectangle className="h-3 w-1/3 animate-pulse" />
|
||||
</div>
|
||||
</SkeletonRow>
|
||||
<div className="mt-4 flex flex-col gap-2">
|
||||
<SkeletonRectangle className="h-3 w-full" />
|
||||
<SkeletonRectangle className="h-3 w-4/5" />
|
||||
<SkeletonRectangle className="h-3 w-full animate-pulse" />
|
||||
<SkeletonRectangle className="h-3 w-4/5 animate-pulse" />
|
||||
</div>
|
||||
</SkeletonContainer>
|
||||
</div>
|
||||
|
||||
@ -12,7 +12,6 @@ import { useDebounceFn } from 'ahooks'
|
||||
import dynamic from 'next/dynamic'
|
||||
import {
|
||||
useRouter,
|
||||
useSearchParams,
|
||||
} from 'next/navigation'
|
||||
import { parseAsString, useQueryState } from 'nuqs'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
@ -29,7 +28,6 @@ import { CheckModal } from '@/hooks/use-pay'
|
||||
import { useInfiniteAppList } from '@/service/use-apps'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { isServer } from '@/utils/client'
|
||||
import AppCard from './app-card'
|
||||
import { AppCardSkeleton } from './app-card-skeleton'
|
||||
import Empty from './empty'
|
||||
@ -59,7 +57,6 @@ const List = () => {
|
||||
const { t } = useTranslation()
|
||||
const { systemFeatures } = useGlobalPublicStore()
|
||||
const router = useRouter()
|
||||
const searchParams = useSearchParams()
|
||||
const { isCurrentWorkspaceEditor, isCurrentWorkspaceDatasetOperator, isLoadingCurrentWorkspace } = useAppContext()
|
||||
const showTagManagementModal = useTagStore(s => s.showTagManagementModal)
|
||||
const [activeTab, setActiveTab] = useQueryState(
|
||||
@ -67,33 +64,6 @@ const List = () => {
|
||||
parseAsString.withDefault('all').withOptions({ history: 'push' }),
|
||||
)
|
||||
|
||||
// valid tabs for apps list; anything else should fallback to 'all'
|
||||
|
||||
// 1) Normalize legacy/incorrect query params like ?mode=discover -> ?category=all
|
||||
useEffect(() => {
|
||||
// avoid running on server
|
||||
if (isServer)
|
||||
return
|
||||
const mode = searchParams.get('mode')
|
||||
if (!mode)
|
||||
return
|
||||
const url = new URL(window.location.href)
|
||||
url.searchParams.delete('mode')
|
||||
if (validTabs.has(mode)) {
|
||||
// migrate to category key
|
||||
url.searchParams.set('category', mode)
|
||||
}
|
||||
else {
|
||||
url.searchParams.set('category', 'all')
|
||||
}
|
||||
router.replace(url.pathname + url.search)
|
||||
}, [router, searchParams])
|
||||
|
||||
// 2) If category has an invalid value (e.g., 'discover'), reset to 'all'
|
||||
useEffect(() => {
|
||||
if (!validTabs.has(activeTab))
|
||||
setActiveTab('all')
|
||||
}, [activeTab, setActiveTab])
|
||||
const { query: { tagIDs = [], keywords = '', isCreatedByMe: queryIsCreatedByMe = false }, setQuery } = useAppsQueryState()
|
||||
const [isCreatedByMe, setIsCreatedByMe] = useState(queryIsCreatedByMe)
|
||||
const [tagFilterValue, setTagFilterValue] = useState<string[]>(tagIDs)
|
||||
@ -278,6 +248,9 @@ const List = () => {
|
||||
// No apps - show empty state
|
||||
return <Empty />
|
||||
})()}
|
||||
{isFetchingNextPage && (
|
||||
<AppCardSkeleton count={3} />
|
||||
)}
|
||||
</div>
|
||||
|
||||
{isCurrentWorkspaceEditor && (
|
||||
|
||||
@ -0,0 +1,6 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M12 7.33337V2.66671H4.00002V13.3334H8.00002C8.36821 13.3334 8.66669 13.6319 8.66669 14C8.66669 14.3682 8.36821 14.6667 8.00002 14.6667H3.33335C2.96516 14.6667 2.66669 14.3682 2.66669 14V2.00004C2.66669 1.63185 2.96516 1.33337 3.33335 1.33337H12.6667C13.0349 1.33337 13.3334 1.63185 13.3334 2.00004V7.33337C13.3334 7.70156 13.0349 8.00004 12.6667 8.00004C12.2985 8.00004 12 7.70156 12 7.33337Z" fill="#354052"/>
|
||||
<path d="M10 4.00004C10.3682 4.00004 10.6667 4.29852 10.6667 4.66671C10.6667 5.0349 10.3682 5.33337 10 5.33337H6.00002C5.63183 5.33337 5.33335 5.0349 5.33335 4.66671C5.33335 4.29852 5.63183 4.00004 6.00002 4.00004H10Z" fill="#354052"/>
|
||||
<path d="M8.00002 6.66671C8.36821 6.66671 8.66669 6.96518 8.66669 7.33337C8.66669 7.70156 8.36821 8.00004 8.00002 8.00004H6.00002C5.63183 8.00004 5.33335 7.70156 5.33335 7.33337C5.33335 6.96518 5.63183 6.66671 6.00002 6.66671H8.00002Z" fill="#354052"/>
|
||||
<path d="M12.827 10.7902L12.3624 9.58224C12.3048 9.43231 12.1607 9.33337 12 9.33337C11.8394 9.33337 11.6953 9.43231 11.6376 9.58224L11.173 10.7902C11.1054 10.9662 10.9662 11.1054 10.7902 11.173L9.58222 11.6376C9.43229 11.6953 9.33335 11.8394 9.33335 12C9.33335 12.1607 9.43229 12.3048 9.58222 12.3624L10.7902 12.827C10.9662 12.8947 11.1054 13.0338 11.173 13.2099L11.6376 14.4178C11.6953 14.5678 11.8394 14.6667 12 14.6667C12.1607 14.6667 12.3048 14.5678 12.3624 14.4178L12.827 13.2099C12.8947 13.0338 13.0338 12.8947 13.2099 12.827L14.4178 12.3624C14.5678 12.3048 14.6667 12.1607 14.6667 12C14.6667 11.8394 14.5678 11.6953 14.4178 11.6376L13.2099 11.173C13.0338 11.1054 12.8947 10.9662 12.827 10.7902Z" fill="#354052"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
@ -0,0 +1,53 @@
|
||||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"width": "16",
|
||||
"height": "16",
|
||||
"viewBox": "0 0 16 16",
|
||||
"fill": "none",
|
||||
"xmlns": "http://www.w3.org/2000/svg"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M12 7.33337V2.66671H4.00002V13.3334H8.00002C8.36821 13.3334 8.66669 13.6319 8.66669 14C8.66669 14.3682 8.36821 14.6667 8.00002 14.6667H3.33335C2.96516 14.6667 2.66669 14.3682 2.66669 14V2.00004C2.66669 1.63185 2.96516 1.33337 3.33335 1.33337H12.6667C13.0349 1.33337 13.3334 1.63185 13.3334 2.00004V7.33337C13.3334 7.70156 13.0349 8.00004 12.6667 8.00004C12.2985 8.00004 12 7.70156 12 7.33337Z",
|
||||
"fill": "currentColor"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M10 4.00004C10.3682 4.00004 10.6667 4.29852 10.6667 4.66671C10.6667 5.0349 10.3682 5.33337 10 5.33337H6.00002C5.63183 5.33337 5.33335 5.0349 5.33335 4.66671C5.33335 4.29852 5.63183 4.00004 6.00002 4.00004H10Z",
|
||||
"fill": "currentColor"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M8.00002 6.66671C8.36821 6.66671 8.66669 6.96518 8.66669 7.33337C8.66669 7.70156 8.36821 8.00004 8.00002 8.00004H6.00002C5.63183 8.00004 5.33335 7.70156 5.33335 7.33337C5.33335 6.96518 5.63183 6.66671 6.00002 6.66671H8.00002Z",
|
||||
"fill": "currentColor"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M12.827 10.7902L12.3624 9.58224C12.3048 9.43231 12.1607 9.33337 12 9.33337C11.8394 9.33337 11.6953 9.43231 11.6376 9.58224L11.173 10.7902C11.1054 10.9662 10.9662 11.1054 10.7902 11.173L9.58222 11.6376C9.43229 11.6953 9.33335 11.8394 9.33335 12C9.33335 12.1607 9.43229 12.3048 9.58222 12.3624L10.7902 12.827C10.9662 12.8947 11.1054 13.0338 11.173 13.2099L11.6376 14.4178C11.6953 14.5678 11.8394 14.6667 12 14.6667C12.1607 14.6667 12.3048 14.5678 12.3624 14.4178L12.827 13.2099C12.8947 13.0338 13.0338 12.8947 13.2099 12.827L14.4178 12.3624C14.5678 12.3048 14.6667 12.1607 14.6667 12C14.6667 11.8394 14.5678 11.6953 14.4178 11.6376L13.2099 11.173C13.0338 11.1054 12.8947 10.9662 12.827 10.7902Z",
|
||||
"fill": "currentColor"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "SearchLinesSparkle"
|
||||
}
|
||||
@ -0,0 +1,20 @@
|
||||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
import * as React from 'react'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import data from './SearchLinesSparkle.json'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'SearchLinesSparkle'
|
||||
|
||||
export default Icon
|
||||
@ -11,5 +11,6 @@ export { default as HighQuality } from './HighQuality'
|
||||
export { default as HybridSearch } from './HybridSearch'
|
||||
export { default as ParentChildChunk } from './ParentChildChunk'
|
||||
export { default as QuestionAndAnswer } from './QuestionAndAnswer'
|
||||
export { default as SearchLinesSparkle } from './SearchLinesSparkle'
|
||||
export { default as SearchMenu } from './SearchMenu'
|
||||
export { default as VectorSearch } from './VectorSearch'
|
||||
|
||||
@ -1,21 +1,25 @@
|
||||
'use client'
|
||||
|
||||
import * as React from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
import { cn } from '@/utils/classnames'
|
||||
import './style.css'
|
||||
|
||||
type ILoadingProps = {
|
||||
type?: 'area' | 'app'
|
||||
className?: string
|
||||
}
|
||||
const Loading = (
|
||||
{ type = 'area' }: ILoadingProps = { type: 'area' },
|
||||
) => {
|
||||
|
||||
const Loading = (props?: ILoadingProps) => {
|
||||
const { type = 'area', className } = props || {}
|
||||
const { t } = useTranslation()
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`flex w-full items-center justify-center ${type === 'app' ? 'h-full' : ''}`}
|
||||
className={cn(
|
||||
'flex w-full items-center justify-center',
|
||||
type === 'app' && 'h-full',
|
||||
className,
|
||||
)}
|
||||
role="status"
|
||||
aria-live="polite"
|
||||
aria-label={t('loading', { ns: 'appApi' })}
|
||||
@ -37,4 +41,5 @@ const Loading = (
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default Loading
|
||||
|
||||
@ -20,6 +20,7 @@ const SearchInput: FC<SearchInputProps> = ({
|
||||
white,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const inputRef = useRef<HTMLInputElement>(null)
|
||||
const [focus, setFocus] = useState<boolean>(false)
|
||||
const isComposing = useRef<boolean>(false)
|
||||
const [compositionValue, setCompositionValue] = useState<string>('')
|
||||
@ -36,6 +37,7 @@ const SearchInput: FC<SearchInputProps> = ({
|
||||
<RiSearchLine className="h-4 w-4 text-components-input-text-placeholder" aria-hidden="true" />
|
||||
</div>
|
||||
<input
|
||||
ref={inputRef}
|
||||
type="text"
|
||||
name="query"
|
||||
className={cn(
|
||||
@ -65,14 +67,17 @@ const SearchInput: FC<SearchInputProps> = ({
|
||||
autoComplete="off"
|
||||
/>
|
||||
{value && (
|
||||
<div
|
||||
className="group/clear flex h-4 w-4 shrink-0 cursor-pointer items-center justify-center"
|
||||
<button
|
||||
type="button"
|
||||
aria-label={t('operation.clear', { ns: 'common' })}
|
||||
className="group/clear flex h-4 w-4 shrink-0 cursor-pointer items-center justify-center border-none bg-transparent p-0"
|
||||
onClick={() => {
|
||||
onChange('')
|
||||
inputRef.current?.focus()
|
||||
}}
|
||||
>
|
||||
<RiCloseCircleFill className="h-4 w-4 text-text-quaternary group-hover/clear:text-text-tertiary" />
|
||||
</div>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
|
||||
@ -27,7 +27,9 @@ vi.mock('@/service/billing', () => ({
|
||||
|
||||
vi.mock('@/service/client', () => ({
|
||||
consoleClient: {
|
||||
billingUrl: vi.fn(),
|
||||
billing: {
|
||||
invoices: vi.fn(),
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
@ -43,7 +45,7 @@ vi.mock('../../assets', () => ({
|
||||
|
||||
const mockUseAppContext = useAppContext as Mock
|
||||
const mockUseAsyncWindowOpen = useAsyncWindowOpen as Mock
|
||||
const mockBillingUrl = consoleClient.billingUrl as Mock
|
||||
const mockBillingInvoices = consoleClient.billing.invoices as Mock
|
||||
const mockFetchSubscriptionUrls = fetchSubscriptionUrls as Mock
|
||||
const mockToastNotify = Toast.notify as Mock
|
||||
|
||||
@ -75,7 +77,7 @@ beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockUseAppContext.mockReturnValue({ isCurrentWorkspaceManager: true })
|
||||
mockUseAsyncWindowOpen.mockReturnValue(vi.fn(async open => await open()))
|
||||
mockBillingUrl.mockResolvedValue({ url: 'https://billing.example' })
|
||||
mockBillingInvoices.mockResolvedValue({ url: 'https://billing.example' })
|
||||
mockFetchSubscriptionUrls.mockResolvedValue({ url: 'https://subscription.example' })
|
||||
assignedHref = ''
|
||||
})
|
||||
@ -149,7 +151,7 @@ describe('CloudPlanItem', () => {
|
||||
type: 'error',
|
||||
message: 'billing.buyPermissionDeniedTip',
|
||||
}))
|
||||
expect(mockBillingUrl).not.toHaveBeenCalled()
|
||||
expect(mockBillingInvoices).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should open billing portal when upgrading current paid plan', async () => {
|
||||
@ -168,7 +170,7 @@ describe('CloudPlanItem', () => {
|
||||
fireEvent.click(screen.getByRole('button', { name: 'billing.plansCommon.currentPlan' }))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockBillingUrl).toHaveBeenCalledTimes(1)
|
||||
expect(mockBillingInvoices).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
expect(openWindow).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
@ -77,7 +77,7 @@ const CloudPlanItem: FC<CloudPlanItemProps> = ({
|
||||
try {
|
||||
if (isCurrentPaidPlan) {
|
||||
await openAsyncWindow(async () => {
|
||||
const res = await consoleClient.billingUrl()
|
||||
const res = await consoleClient.billing.invoices()
|
||||
if (res.url)
|
||||
return res.url
|
||||
throw new Error('Failed to open billing page')
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import type { FC } from 'react'
|
||||
import type { PreProcessingRule } from '@/models/datasets'
|
||||
import type { PreProcessingRule, SummaryIndexSetting as SummaryIndexSettingType } from '@/models/datasets'
|
||||
import {
|
||||
RiAlertFill,
|
||||
RiSearchEyeLine,
|
||||
@ -12,6 +12,7 @@ import Button from '@/app/components/base/button'
|
||||
import Checkbox from '@/app/components/base/checkbox'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import SummaryIndexSetting from '@/app/components/datasets/settings/summary-index-setting'
|
||||
import { IS_CE_EDITION } from '@/config'
|
||||
import { ChunkingMode } from '@/models/datasets'
|
||||
import SettingCog from '../../assets/setting-gear-mod.svg'
|
||||
@ -52,6 +53,8 @@ type GeneralChunkingOptionsProps = {
|
||||
onReset: () => void
|
||||
// Locale
|
||||
locale: string
|
||||
summaryIndexSetting?: SummaryIndexSettingType
|
||||
onSummaryIndexSettingChange?: (payload: SummaryIndexSettingType) => void
|
||||
}
|
||||
|
||||
export const GeneralChunkingOptions: FC<GeneralChunkingOptionsProps> = ({
|
||||
@ -74,6 +77,8 @@ export const GeneralChunkingOptions: FC<GeneralChunkingOptionsProps> = ({
|
||||
onPreview,
|
||||
onReset,
|
||||
locale,
|
||||
summaryIndexSetting,
|
||||
onSummaryIndexSettingChange,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
@ -146,6 +151,13 @@ export const GeneralChunkingOptions: FC<GeneralChunkingOptionsProps> = ({
|
||||
</label>
|
||||
</div>
|
||||
))}
|
||||
<div className="mt-3">
|
||||
<SummaryIndexSetting
|
||||
entry="create-document"
|
||||
summaryIndexSetting={summaryIndexSetting}
|
||||
onSummaryIndexSettingChange={onSummaryIndexSettingChange}
|
||||
/>
|
||||
</div>
|
||||
{IS_CE_EDITION && (
|
||||
<>
|
||||
<Divider type="horizontal" className="my-4 bg-divider-subtle" />
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
import type { FC } from 'react'
|
||||
import type { ParentChildConfig } from '../hooks'
|
||||
import type { ParentMode, PreProcessingRule } from '@/models/datasets'
|
||||
import type { ParentMode, PreProcessingRule, SummaryIndexSetting as SummaryIndexSettingType } from '@/models/datasets'
|
||||
import { RiSearchEyeLine } from '@remixicon/react'
|
||||
import Image from 'next/image'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
@ -11,6 +11,7 @@ import Checkbox from '@/app/components/base/checkbox'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import { ParentChildChunk } from '@/app/components/base/icons/src/vender/knowledge'
|
||||
import RadioCard from '@/app/components/base/radio-card'
|
||||
import SummaryIndexSetting from '@/app/components/datasets/settings/summary-index-setting'
|
||||
import { ChunkingMode } from '@/models/datasets'
|
||||
import FileList from '../../assets/file-list-3-fill.svg'
|
||||
import Note from '../../assets/note-mod.svg'
|
||||
@ -31,6 +32,8 @@ type ParentChildOptionsProps = {
|
||||
// State
|
||||
parentChildConfig: ParentChildConfig
|
||||
rules: PreProcessingRule[]
|
||||
summaryIndexSetting?: SummaryIndexSettingType
|
||||
onSummaryIndexSettingChange?: (payload: SummaryIndexSettingType) => void
|
||||
currentDocForm: ChunkingMode
|
||||
// Flags
|
||||
isActive: boolean
|
||||
@ -51,6 +54,7 @@ type ParentChildOptionsProps = {
|
||||
export const ParentChildOptions: FC<ParentChildOptionsProps> = ({
|
||||
parentChildConfig,
|
||||
rules,
|
||||
summaryIndexSetting,
|
||||
currentDocForm: _currentDocForm,
|
||||
isActive,
|
||||
isInUpload,
|
||||
@ -62,6 +66,7 @@ export const ParentChildOptions: FC<ParentChildOptionsProps> = ({
|
||||
onChildDelimiterChange,
|
||||
onChildMaxLengthChange,
|
||||
onRuleToggle,
|
||||
onSummaryIndexSettingChange,
|
||||
onPreview,
|
||||
onReset,
|
||||
}) => {
|
||||
@ -183,6 +188,13 @@ export const ParentChildOptions: FC<ParentChildOptionsProps> = ({
|
||||
</label>
|
||||
</div>
|
||||
))}
|
||||
<div className="mt-3">
|
||||
<SummaryIndexSetting
|
||||
entry="create-document"
|
||||
summaryIndexSetting={summaryIndexSetting}
|
||||
onSummaryIndexSettingChange={onSummaryIndexSettingChange}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -9,6 +9,7 @@ import type {
|
||||
CustomFile,
|
||||
FullDocumentDetail,
|
||||
ProcessRule,
|
||||
SummaryIndexSetting as SummaryIndexSettingType,
|
||||
} from '@/models/datasets'
|
||||
import type { RetrievalConfig, RETRIEVE_METHOD } from '@/types/app'
|
||||
import { useCallback } from 'react'
|
||||
@ -141,6 +142,7 @@ export const useDocumentCreation = (options: UseDocumentCreationOptions) => {
|
||||
retrievalConfig: RetrievalConfig,
|
||||
embeddingModel: DefaultModel,
|
||||
indexingTechnique: string,
|
||||
summaryIndexSetting?: SummaryIndexSettingType,
|
||||
): CreateDocumentReq | null => {
|
||||
if (isSetting) {
|
||||
return {
|
||||
@ -148,6 +150,7 @@ export const useDocumentCreation = (options: UseDocumentCreationOptions) => {
|
||||
doc_form: currentDocForm,
|
||||
doc_language: docLanguage,
|
||||
process_rule: processRule,
|
||||
summary_index_setting: summaryIndexSetting,
|
||||
retrieval_model: retrievalConfig,
|
||||
embedding_model: embeddingModel.model,
|
||||
embedding_model_provider: embeddingModel.provider,
|
||||
@ -164,6 +167,7 @@ export const useDocumentCreation = (options: UseDocumentCreationOptions) => {
|
||||
},
|
||||
indexing_technique: indexingTechnique,
|
||||
process_rule: processRule,
|
||||
summary_index_setting: summaryIndexSetting,
|
||||
doc_form: currentDocForm,
|
||||
doc_language: docLanguage,
|
||||
retrieval_model: retrievalConfig,
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import type { DefaultModel } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import type { SummaryIndexSetting as SummaryIndexSettingType } from '@/models/datasets'
|
||||
import type { RetrievalConfig } from '@/types/app'
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { checkShowMultiModalTip } from '@/app/components/datasets/settings/utils'
|
||||
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import { useDefaultModel, useModelList, useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||
@ -29,6 +30,7 @@ export type UseIndexingConfigOptions = {
|
||||
initialRetrievalConfig?: RetrievalConfig
|
||||
isAPIKeySet: boolean
|
||||
hasSetIndexType: boolean
|
||||
initialSummaryIndexSetting?: SummaryIndexSettingType
|
||||
}
|
||||
|
||||
export const useIndexingConfig = (options: UseIndexingConfigOptions) => {
|
||||
@ -38,6 +40,7 @@ export const useIndexingConfig = (options: UseIndexingConfigOptions) => {
|
||||
initialRetrievalConfig,
|
||||
isAPIKeySet,
|
||||
hasSetIndexType,
|
||||
initialSummaryIndexSetting,
|
||||
} = options
|
||||
|
||||
// Rerank model
|
||||
@ -115,6 +118,17 @@ export const useIndexingConfig = (options: UseIndexingConfigOptions) => {
|
||||
// Get effective indexing technique
|
||||
const getIndexingTechnique = () => initialIndexType || indexType
|
||||
|
||||
// Summary index setting
|
||||
const [summaryIndexSetting, setSummaryIndexSetting] = useState<SummaryIndexSettingType | undefined>(
|
||||
initialSummaryIndexSetting ?? undefined,
|
||||
)
|
||||
const summaryIndexSettingRef = useRef<SummaryIndexSettingType | undefined>(initialSummaryIndexSetting ?? undefined)
|
||||
|
||||
const handleSummaryIndexSettingChange = useCallback((payload: SummaryIndexSettingType) => {
|
||||
setSummaryIndexSetting({ ...summaryIndexSettingRef.current, ...payload })
|
||||
summaryIndexSettingRef.current = { ...summaryIndexSettingRef.current, ...payload }
|
||||
}, [])
|
||||
|
||||
return {
|
||||
// Index type
|
||||
indexType,
|
||||
@ -137,6 +151,10 @@ export const useIndexingConfig = (options: UseIndexingConfigOptions) => {
|
||||
|
||||
// Computed
|
||||
showMultiModalTip,
|
||||
|
||||
// Summary index setting
|
||||
summaryIndexSetting,
|
||||
handleSummaryIndexSettingChange,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -72,6 +72,7 @@ const StepTwo: FC<StepTwoProps> = ({
|
||||
initialRetrievalConfig: currentDataset?.retrieval_model_dict,
|
||||
isAPIKeySet,
|
||||
hasSetIndexType,
|
||||
initialSummaryIndexSetting: currentDataset?.summary_index_setting,
|
||||
})
|
||||
const preview = usePreviewState({ dataSourceType, files, notionPages, websitePages, documentDetail, datasetId })
|
||||
const creation = useDocumentCreation({
|
||||
@ -156,7 +157,7 @@ const StepTwo: FC<StepTwoProps> = ({
|
||||
})
|
||||
if (!isValid)
|
||||
return
|
||||
const params = creation.buildCreationParams(currentDocForm, docLanguage, segmentation.getProcessRule(currentDocForm), indexing.retrievalConfig, indexing.embeddingModel, indexing.getIndexingTechnique())
|
||||
const params = creation.buildCreationParams(currentDocForm, docLanguage, segmentation.getProcessRule(currentDocForm), indexing.retrievalConfig, indexing.embeddingModel, indexing.getIndexingTechnique(), indexing.summaryIndexSetting)
|
||||
if (!params)
|
||||
return
|
||||
await creation.executeCreation(params, indexing.indexType, indexing.retrievalConfig)
|
||||
@ -217,6 +218,8 @@ const StepTwo: FC<StepTwoProps> = ({
|
||||
onPreview={updatePreview}
|
||||
onReset={segmentation.resetToDefaults}
|
||||
locale={locale}
|
||||
summaryIndexSetting={indexing.summaryIndexSetting}
|
||||
onSummaryIndexSettingChange={indexing.handleSummaryIndexSettingChange}
|
||||
/>
|
||||
)}
|
||||
{showParentChildOption && (
|
||||
@ -236,6 +239,8 @@ const StepTwo: FC<StepTwoProps> = ({
|
||||
onRuleToggle={segmentation.toggleRule}
|
||||
onPreview={updatePreview}
|
||||
onReset={segmentation.resetToDefaults}
|
||||
summaryIndexSetting={indexing.summaryIndexSetting}
|
||||
onSummaryIndexSettingChange={indexing.handleSummaryIndexSettingChange}
|
||||
/>
|
||||
)}
|
||||
<Divider className="my-5" />
|
||||
|
||||
@ -30,11 +30,12 @@ import { useDatasetDetailContextWithSelector as useDatasetDetailContext } from '
|
||||
import useTimestamp from '@/hooks/use-timestamp'
|
||||
import { ChunkingMode, DataSourceType, DocumentActionType } from '@/models/datasets'
|
||||
import { DatasourceType } from '@/models/pipeline'
|
||||
import { useDocumentArchive, useDocumentBatchRetryIndex, useDocumentDelete, useDocumentDisable, useDocumentEnable } from '@/service/knowledge/use-document'
|
||||
import { useDocumentArchive, useDocumentBatchRetryIndex, useDocumentDelete, useDocumentDisable, useDocumentEnable, useDocumentSummary } from '@/service/knowledge/use-document'
|
||||
import { asyncRunSafe } from '@/utils'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { formatNumber } from '@/utils/format'
|
||||
import BatchAction from '../detail/completed/common/batch-action'
|
||||
import SummaryStatus from '../detail/completed/common/summary-status'
|
||||
import StatusItem from '../status-item'
|
||||
import s from '../style.module.css'
|
||||
import Operations from './operations'
|
||||
@ -218,6 +219,7 @@ const DocumentList: FC<IDocumentListProps> = ({
|
||||
onSelectedIdChange(uniq([...selectedIds, ...localDocs.map(doc => doc.id)]))
|
||||
}, [isAllSelected, localDocs, onSelectedIdChange, selectedIds])
|
||||
const { mutateAsync: archiveDocument } = useDocumentArchive()
|
||||
const { mutateAsync: generateSummary } = useDocumentSummary()
|
||||
const { mutateAsync: enableDocument } = useDocumentEnable()
|
||||
const { mutateAsync: disableDocument } = useDocumentDisable()
|
||||
const { mutateAsync: deleteDocument } = useDocumentDelete()
|
||||
@ -230,6 +232,9 @@ const DocumentList: FC<IDocumentListProps> = ({
|
||||
case DocumentActionType.archive:
|
||||
opApi = archiveDocument
|
||||
break
|
||||
case DocumentActionType.summary:
|
||||
opApi = generateSummary
|
||||
break
|
||||
case DocumentActionType.enable:
|
||||
opApi = enableDocument
|
||||
break
|
||||
@ -409,6 +414,13 @@ const DocumentList: FC<IDocumentListProps> = ({
|
||||
>
|
||||
<span className="grow-1 truncate text-sm">{doc.name}</span>
|
||||
</Tooltip>
|
||||
{
|
||||
doc.summary_index_status && (
|
||||
<div className="ml-1 hidden shrink-0 group-hover:flex">
|
||||
<SummaryStatus status={doc.summary_index_status} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
<div className="hidden shrink-0 group-hover:ml-auto group-hover:flex">
|
||||
<Tooltip
|
||||
popupContent={t('list.table.rename', { ns: 'datasetDocuments' })}
|
||||
@ -461,6 +473,7 @@ const DocumentList: FC<IDocumentListProps> = ({
|
||||
className="absolute bottom-16 left-0 z-20"
|
||||
selectedIds={selectedIds}
|
||||
onArchive={handleAction(DocumentActionType.archive)}
|
||||
onBatchSummary={handleAction(DocumentActionType.summary)}
|
||||
onBatchEnable={handleAction(DocumentActionType.enable)}
|
||||
onBatchDisable={handleAction(DocumentActionType.disable)}
|
||||
onBatchDelete={handleAction(DocumentActionType.delete)}
|
||||
|
||||
@ -19,6 +19,7 @@ import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import Confirm from '@/app/components/base/confirm'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import { SearchLinesSparkle } from '@/app/components/base/icons/src/vender/knowledge'
|
||||
import CustomPopover from '@/app/components/base/popover'
|
||||
import Switch from '@/app/components/base/switch'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
@ -31,6 +32,7 @@ import {
|
||||
useDocumentEnable,
|
||||
useDocumentPause,
|
||||
useDocumentResume,
|
||||
useDocumentSummary,
|
||||
useDocumentUnArchive,
|
||||
useSyncDocument,
|
||||
useSyncWebsite,
|
||||
@ -82,6 +84,7 @@ const Operations = ({
|
||||
const { mutateAsync: deleteDocument } = useDocumentDelete()
|
||||
const { mutateAsync: syncDocument } = useSyncDocument()
|
||||
const { mutateAsync: syncWebsite } = useSyncWebsite()
|
||||
const { mutateAsync: generateSummary } = useDocumentSummary()
|
||||
const { mutateAsync: pauseDocument } = useDocumentPause()
|
||||
const { mutateAsync: resumeDocument } = useDocumentResume()
|
||||
const isListScene = scene === 'list'
|
||||
@ -107,6 +110,9 @@ const Operations = ({
|
||||
else
|
||||
opApi = syncWebsite
|
||||
break
|
||||
case 'summary':
|
||||
opApi = generateSummary
|
||||
break
|
||||
case 'pause':
|
||||
opApi = pauseDocument
|
||||
break
|
||||
@ -220,6 +226,10 @@ const Operations = ({
|
||||
<span className={s.actionName}>{t('list.action.sync', { ns: 'datasetDocuments' })}</span>
|
||||
</div>
|
||||
)}
|
||||
<div className={s.actionItem} onClick={() => onOperate('summary')}>
|
||||
<SearchLinesSparkle className="h-4 w-4 text-text-tertiary" />
|
||||
<span className={s.actionName}>{t('list.action.summary', { ns: 'datasetDocuments' })}</span>
|
||||
</div>
|
||||
<Divider className="my-1" />
|
||||
</>
|
||||
)}
|
||||
|
||||
@ -6,6 +6,7 @@ import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Confirm from '@/app/components/base/confirm'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import { SearchLinesSparkle } from '@/app/components/base/icons/src/vender/knowledge'
|
||||
import { cn } from '@/utils/classnames'
|
||||
|
||||
const i18nPrefix = 'batchAction'
|
||||
@ -15,6 +16,7 @@ type IBatchActionProps = {
|
||||
onBatchEnable: () => void
|
||||
onBatchDisable: () => void
|
||||
onBatchDelete: () => Promise<void>
|
||||
onBatchSummary?: () => void
|
||||
onArchive?: () => void
|
||||
onEditMetadata?: () => void
|
||||
onBatchReIndex?: () => void
|
||||
@ -26,6 +28,7 @@ const BatchAction: FC<IBatchActionProps> = ({
|
||||
selectedIds,
|
||||
onBatchEnable,
|
||||
onBatchDisable,
|
||||
onBatchSummary,
|
||||
onArchive,
|
||||
onBatchDelete,
|
||||
onEditMetadata,
|
||||
@ -82,7 +85,16 @@ const BatchAction: FC<IBatchActionProps> = ({
|
||||
<span className="px-0.5">{t('metadata.metadata', { ns: 'dataset' })}</span>
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{onBatchSummary && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="gap-x-0.5 px-3"
|
||||
onClick={onBatchSummary}
|
||||
>
|
||||
<SearchLinesSparkle className="size-4" />
|
||||
<span className="px-0.5">{t('list.action.summary', { ns: 'datasetDocuments' })}</span>
|
||||
</Button>
|
||||
)}
|
||||
{onArchive && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
|
||||
@ -0,0 +1,26 @@
|
||||
import { memo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { cn } from '@/utils/classnames'
|
||||
|
||||
type SummaryLabelProps = {
|
||||
summary?: string
|
||||
className?: string
|
||||
}
|
||||
const SummaryLabel = ({
|
||||
summary,
|
||||
className,
|
||||
}: SummaryLabelProps) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
return (
|
||||
<div className={cn('space-y-1', className)}>
|
||||
<div className="system-xs-medium-uppercase mt-2 flex items-center justify-between text-text-tertiary">
|
||||
{t('segment.summary', { ns: 'datasetDocuments' })}
|
||||
<div className="ml-2 h-px grow bg-divider-regular"></div>
|
||||
</div>
|
||||
<div className="body-xs-regular text-text-tertiary">{summary}</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default memo(SummaryLabel)
|
||||
@ -0,0 +1,47 @@
|
||||
import { memo, useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Badge from '@/app/components/base/badge'
|
||||
import { SearchLinesSparkle } from '@/app/components/base/icons/src/vender/knowledge'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
|
||||
type SummaryStatusProps = {
|
||||
status: string
|
||||
}
|
||||
|
||||
const SummaryStatus = ({ status }: SummaryStatusProps) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
const tip = useMemo(() => {
|
||||
if (status === 'COMPLETED') {
|
||||
return t('list.summary.ready', { ns: 'datasetDocuments' })
|
||||
}
|
||||
if (status === 'GENERATING') {
|
||||
return t('list.summary.generatingSummary', { ns: 'datasetDocuments' })
|
||||
}
|
||||
return ''
|
||||
}, [status, t])
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
popupContent={tip}
|
||||
>
|
||||
{
|
||||
status === 'COMPLETED' && (
|
||||
<Badge>
|
||||
<SearchLinesSparkle className="h-3 w-3" />
|
||||
</Badge>
|
||||
)
|
||||
}
|
||||
{
|
||||
status === 'GENERATING' && (
|
||||
<Badge className="border-text-accent-secondary text-text-accent-secondary">
|
||||
<SearchLinesSparkle className="mr-0.5 h-3 w-3" />
|
||||
<span>{t('list.summary.generating', { ns: 'datasetDocuments' })}</span>
|
||||
</Badge>
|
||||
)
|
||||
}
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
export default memo(SummaryStatus)
|
||||
@ -0,0 +1,35 @@
|
||||
import { memo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Textarea from 'react-textarea-autosize'
|
||||
import { cn } from '@/utils/classnames'
|
||||
|
||||
type SummaryTextProps = {
|
||||
value?: string
|
||||
onChange?: (value: string) => void
|
||||
disabled?: boolean
|
||||
}
|
||||
const SummaryText = ({
|
||||
value,
|
||||
onChange,
|
||||
disabled,
|
||||
}: SummaryTextProps) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
return (
|
||||
<div className="space-y-1">
|
||||
<div className="system-xs-medium-uppercase text-text-tertiary">{t('segment.summary', { ns: 'datasetDocuments' })}</div>
|
||||
<Textarea
|
||||
className={cn(
|
||||
'body-sm-regular w-full resize-none bg-transparent leading-6 text-text-secondary outline-none',
|
||||
)}
|
||||
placeholder={t('segment.summaryPlaceholder', { ns: 'datasetDocuments' })}
|
||||
minRows={1}
|
||||
value={value ?? ''}
|
||||
onChange={e => onChange?.(e.target.value)}
|
||||
disabled={disabled}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default memo(SummaryText)
|
||||
@ -322,6 +322,7 @@ const Completed: FC<ICompletedProps> = ({
|
||||
answer: string,
|
||||
keywords: string[],
|
||||
attachments: FileEntity[],
|
||||
summary?: string,
|
||||
needRegenerate = false,
|
||||
) => {
|
||||
const params: SegmentUpdater = { content: '', attachment_ids: [] }
|
||||
@ -351,6 +352,9 @@ const Completed: FC<ICompletedProps> = ({
|
||||
params.attachment_ids = attachments.map(item => item.uploadedId!)
|
||||
}
|
||||
|
||||
if (summary)
|
||||
params.summary = summary
|
||||
|
||||
if (needRegenerate)
|
||||
params.regenerate_child_chunks = needRegenerate
|
||||
|
||||
@ -364,6 +368,7 @@ const Completed: FC<ICompletedProps> = ({
|
||||
if (seg.id === segmentId) {
|
||||
seg.answer = res.data.answer
|
||||
seg.content = res.data.content
|
||||
seg.summary = res.data.summary
|
||||
seg.sign_content = res.data.sign_content
|
||||
seg.keywords = res.data.keywords
|
||||
seg.attachments = res.data.attachments
|
||||
@ -442,6 +447,10 @@ const Completed: FC<ICompletedProps> = ({
|
||||
setFullScreen(!fullScreen)
|
||||
}, [fullScreen])
|
||||
|
||||
const toggleCollapsed = useCallback(() => {
|
||||
setIsCollapsed(prev => !prev)
|
||||
}, [])
|
||||
|
||||
const viewNewlyAddedChunk = useCallback(async () => {
|
||||
const totalPages = segmentListData?.total_pages || 0
|
||||
const total = segmentListData?.total || 0
|
||||
@ -578,15 +587,16 @@ const Completed: FC<ICompletedProps> = ({
|
||||
return selectedStatus ? 1 : 0
|
||||
}, [selectedStatus])
|
||||
|
||||
const contextValue = useMemo<SegmentListContextValue>(() => ({
|
||||
isCollapsed,
|
||||
fullScreen,
|
||||
toggleFullScreen,
|
||||
currSegment,
|
||||
currChildChunk,
|
||||
}), [isCollapsed, fullScreen, toggleFullScreen, currSegment, currChildChunk])
|
||||
|
||||
return (
|
||||
<SegmentListContext.Provider value={{
|
||||
isCollapsed,
|
||||
fullScreen,
|
||||
toggleFullScreen,
|
||||
currSegment,
|
||||
currChildChunk,
|
||||
}}
|
||||
>
|
||||
<SegmentListContext.Provider value={contextValue}>
|
||||
{/* Menu Bar */}
|
||||
{!isFullDocMode && (
|
||||
<div className={s.docSearchWrapper}>
|
||||
@ -618,7 +628,7 @@ const Completed: FC<ICompletedProps> = ({
|
||||
onClear={() => handleInputChange('')}
|
||||
/>
|
||||
<Divider type="vertical" className="mx-3 h-3.5" />
|
||||
<DisplayToggle isCollapsed={isCollapsed} toggleCollapsed={() => setIsCollapsed(!isCollapsed)} />
|
||||
<DisplayToggle isCollapsed={isCollapsed} toggleCollapsed={toggleCollapsed} />
|
||||
</div>
|
||||
)}
|
||||
{/* Segment list */}
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import type { FC } from 'react'
|
||||
import type { SegmentListContextValue } from '..'
|
||||
import * as React from 'react'
|
||||
import { Markdown } from '@/app/components/base/markdown'
|
||||
import { cn } from '@/utils/classnames'
|
||||
@ -14,13 +15,15 @@ type ChunkContentProps = {
|
||||
className?: string
|
||||
}
|
||||
|
||||
const selectIsCollapsed = (s: SegmentListContextValue) => s.isCollapsed
|
||||
|
||||
const ChunkContent: FC<ChunkContentProps> = ({
|
||||
detail,
|
||||
isFullDocMode,
|
||||
className,
|
||||
}) => {
|
||||
const { answer, content, sign_content } = detail
|
||||
const isCollapsed = useSegmentListContext(s => s.isCollapsed)
|
||||
const isCollapsed = useSegmentListContext(selectIsCollapsed)
|
||||
|
||||
if (answer) {
|
||||
return (
|
||||
|
||||
@ -19,13 +19,14 @@ import { useDocumentContext } from '../../context'
|
||||
import ChildSegmentList from '../child-segment-list'
|
||||
import Dot from '../common/dot'
|
||||
import { SegmentIndexTag } from '../common/segment-index-tag'
|
||||
import SummaryLabel from '../common/summary-label'
|
||||
import Tag from '../common/tag'
|
||||
import ParentChunkCardSkeleton from '../skeleton/parent-chunk-card-skeleton'
|
||||
import ChunkContent from './chunk-content'
|
||||
|
||||
type ISegmentCardProps = {
|
||||
loading: boolean
|
||||
detail?: SegmentDetailModel & { document?: { name: string } }
|
||||
detail?: SegmentDetailModel & { document?: { name: string }, status?: string }
|
||||
onClick?: () => void
|
||||
onChangeSwitch?: (enabled: boolean, segId?: string) => Promise<void>
|
||||
onDelete?: (segId: string) => Promise<void>
|
||||
@ -43,7 +44,7 @@ type ISegmentCardProps = {
|
||||
}
|
||||
|
||||
const SegmentCard: FC<ISegmentCardProps> = ({
|
||||
detail = {},
|
||||
detail = { status: '' },
|
||||
onClick,
|
||||
onChangeSwitch,
|
||||
onDelete,
|
||||
@ -67,6 +68,7 @@ const SegmentCard: FC<ISegmentCardProps> = ({
|
||||
word_count,
|
||||
hit_count,
|
||||
answer,
|
||||
summary,
|
||||
keywords,
|
||||
child_chunks = [],
|
||||
created_at,
|
||||
@ -237,6 +239,11 @@ const SegmentCard: FC<ISegmentCardProps> = ({
|
||||
className={contentOpacity}
|
||||
/>
|
||||
{images.length > 0 && <ImageList images={images} size="md" className="py-1" />}
|
||||
{
|
||||
summary && (
|
||||
<SummaryLabel summary={summary} className="mt-2" />
|
||||
)
|
||||
}
|
||||
{isGeneralMode && (
|
||||
<div className={cn('flex flex-wrap items-center gap-2 py-1.5', contentOpacity)}>
|
||||
{keywords?.map(keyword => <Tag key={keyword} text={keyword} />)}
|
||||
|
||||
@ -25,6 +25,7 @@ import Dot from './common/dot'
|
||||
import Keywords from './common/keywords'
|
||||
import RegenerationModal from './common/regeneration-modal'
|
||||
import { SegmentIndexTag } from './common/segment-index-tag'
|
||||
import SummaryText from './common/summary-text'
|
||||
import { useSegmentListContext } from './index'
|
||||
|
||||
type ISegmentDetailProps = {
|
||||
@ -35,6 +36,7 @@ type ISegmentDetailProps = {
|
||||
a: string,
|
||||
k: string[],
|
||||
attachments: FileEntity[],
|
||||
summary?: string,
|
||||
needRegenerate?: boolean,
|
||||
) => void
|
||||
onCancel: () => void
|
||||
@ -57,6 +59,7 @@ const SegmentDetail: FC<ISegmentDetailProps> = ({
|
||||
const { t } = useTranslation()
|
||||
const [question, setQuestion] = useState(isEditMode ? segInfo?.content || '' : segInfo?.sign_content || '')
|
||||
const [answer, setAnswer] = useState(segInfo?.answer || '')
|
||||
const [summary, setSummary] = useState(segInfo?.summary || '')
|
||||
const [attachments, setAttachments] = useState<FileEntity[]>(() => {
|
||||
return segInfo?.attachments?.map(item => ({
|
||||
id: uuid4(),
|
||||
@ -91,8 +94,8 @@ const SegmentDetail: FC<ISegmentDetailProps> = ({
|
||||
}, [onCancel])
|
||||
|
||||
const handleSave = useCallback(() => {
|
||||
onUpdate(segInfo?.id || '', question, answer, keywords, attachments)
|
||||
}, [onUpdate, segInfo?.id, question, answer, keywords, attachments])
|
||||
onUpdate(segInfo?.id || '', question, answer, keywords, attachments, summary, false)
|
||||
}, [onUpdate, segInfo?.id, question, answer, keywords, attachments, summary])
|
||||
|
||||
const handleRegeneration = useCallback(() => {
|
||||
setShowRegenerationModal(true)
|
||||
@ -111,8 +114,8 @@ const SegmentDetail: FC<ISegmentDetailProps> = ({
|
||||
}, [onCancel, onModalStateChange])
|
||||
|
||||
const onConfirmRegeneration = useCallback(() => {
|
||||
onUpdate(segInfo?.id || '', question, answer, keywords, attachments, true)
|
||||
}, [onUpdate, segInfo?.id, question, answer, keywords, attachments])
|
||||
onUpdate(segInfo?.id || '', question, answer, keywords, attachments, summary, true)
|
||||
}, [onUpdate, segInfo?.id, question, answer, keywords, attachments, summary])
|
||||
|
||||
const onAttachmentsChange = useCallback((attachments: FileEntity[]) => {
|
||||
setAttachments(attachments)
|
||||
@ -197,6 +200,11 @@ const SegmentDetail: FC<ISegmentDetailProps> = ({
|
||||
value={attachments}
|
||||
onChange={onAttachmentsChange}
|
||||
/>
|
||||
<SummaryText
|
||||
value={summary}
|
||||
onChange={summary => setSummary(summary)}
|
||||
disabled={!isEditMode}
|
||||
/>
|
||||
{isECOIndexing && (
|
||||
<Keywords
|
||||
className="w-full"
|
||||
|
||||
@ -1 +1 @@
|
||||
export type OperationName = 'delete' | 'archive' | 'enable' | 'disable' | 'sync' | 'un_archive' | 'pause' | 'resume'
|
||||
export type OperationName = 'delete' | 'archive' | 'enable' | 'disable' | 'sync' | 'un_archive' | 'pause' | 'resume' | 'summary'
|
||||
|
||||
@ -7,6 +7,7 @@ import * as React from 'react'
|
||||
import { useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { Markdown } from '@/app/components/base/markdown'
|
||||
import SummaryLabel from '@/app/components/datasets/documents/detail/completed/common/summary-label'
|
||||
import Tag from '@/app/components/datasets/documents/detail/completed/common/tag'
|
||||
import { extensionToFileType } from '@/app/components/datasets/hit-testing/utils/extension-to-file-type'
|
||||
import { cn } from '@/utils/classnames'
|
||||
@ -25,7 +26,7 @@ const ResultItem = ({
|
||||
payload,
|
||||
}: ResultItemProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { segment, score, child_chunks, files } = payload
|
||||
const { segment, score, child_chunks, files, summary } = payload
|
||||
const data = segment
|
||||
const { position, word_count, content, sign_content, keywords, document } = data
|
||||
const isParentChildRetrieval = !!(child_chunks && child_chunks.length > 0)
|
||||
@ -98,6 +99,9 @@ const ResultItem = ({
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
{summary && (
|
||||
<SummaryLabel summary={summary} className="mt-2" />
|
||||
)}
|
||||
</div>
|
||||
{/* Foot */}
|
||||
<ResultItemFooter docType={fileType} docTitle={document.name} showDetailModal={showDetailModal} />
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
|
||||
import { useEffect, useRef } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import { useSelector as useAppContextWithSelector } from '@/context/app-context'
|
||||
import { useDatasetList, useInvalidDatasetList } from '@/service/knowledge/use-dataset'
|
||||
import DatasetCard from './dataset-card'
|
||||
@ -25,6 +26,7 @@ const Datasets = ({
|
||||
fetchNextPage,
|
||||
hasNextPage,
|
||||
isFetching,
|
||||
isFetchingNextPage,
|
||||
} = useDatasetList({
|
||||
initialPage: 1,
|
||||
tag_ids: tags,
|
||||
@ -60,6 +62,7 @@ const Datasets = ({
|
||||
{datasetList?.pages.map(({ data: datasets }) => datasets.map(dataset => (
|
||||
<DatasetCard key={dataset.id} dataset={dataset} onSuccess={invalidDatasetList} />),
|
||||
))}
|
||||
{isFetchingNextPage && <Loading />}
|
||||
<div ref={anchorRef} className="h-0" />
|
||||
</nav>
|
||||
</>
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
import type { AppIconSelection } from '@/app/components/base/app-icon-picker'
|
||||
import type { DefaultModel } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import type { Member } from '@/models/common'
|
||||
import type { IconInfo } from '@/models/datasets'
|
||||
import type { IconInfo, SummaryIndexSetting as SummaryIndexSettingType } from '@/models/datasets'
|
||||
import type { AppIconType, RetrievalConfig } from '@/types/app'
|
||||
import { RiAlertFill } from '@remixicon/react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
@ -33,6 +33,7 @@ import RetrievalSettings from '../../external-knowledge-base/create/RetrievalSet
|
||||
import ChunkStructure from '../chunk-structure'
|
||||
import IndexMethod from '../index-method'
|
||||
import PermissionSelector from '../permission-selector'
|
||||
import SummaryIndexSetting from '../summary-index-setting'
|
||||
import { checkShowMultiModalTip } from '../utils'
|
||||
|
||||
const rowClass = 'flex gap-x-1'
|
||||
@ -76,6 +77,12 @@ const Form = () => {
|
||||
model: '',
|
||||
},
|
||||
)
|
||||
const [summaryIndexSetting, setSummaryIndexSetting] = useState(currentDataset?.summary_index_setting)
|
||||
const summaryIndexSettingRef = useRef(currentDataset?.summary_index_setting)
|
||||
const handleSummaryIndexSettingChange = useCallback((payload: SummaryIndexSettingType) => {
|
||||
setSummaryIndexSetting({ ...summaryIndexSettingRef.current, ...payload })
|
||||
summaryIndexSettingRef.current = { ...summaryIndexSettingRef.current, ...payload }
|
||||
}, [])
|
||||
const { data: rerankModelList } = useModelList(ModelTypeEnum.rerank)
|
||||
const { data: embeddingModelList } = useModelList(ModelTypeEnum.textEmbedding)
|
||||
const { data: membersData } = useMembers()
|
||||
@ -167,6 +174,7 @@ const Form = () => {
|
||||
},
|
||||
}),
|
||||
keyword_number: keywordNumber,
|
||||
summary_index_setting: summaryIndexSetting,
|
||||
},
|
||||
} as any
|
||||
if (permission === DatasetPermission.partialMembers) {
|
||||
@ -348,6 +356,23 @@ const Form = () => {
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{
|
||||
indexMethod === IndexingType.QUALIFIED
|
||||
&& [ChunkingMode.text, ChunkingMode.parentChild].includes(currentDataset?.doc_form as ChunkingMode)
|
||||
&& (
|
||||
<>
|
||||
<Divider
|
||||
type="horizontal"
|
||||
className="my-1 h-px bg-divider-subtle"
|
||||
/>
|
||||
<SummaryIndexSetting
|
||||
entry="dataset-settings"
|
||||
summaryIndexSetting={summaryIndexSetting}
|
||||
onSummaryIndexSettingChange={handleSummaryIndexSettingChange}
|
||||
/>
|
||||
</>
|
||||
)
|
||||
}
|
||||
{/* Retrieval Method Config */}
|
||||
{currentDataset?.provider === 'external'
|
||||
? (
|
||||
|
||||
228
web/app/components/datasets/settings/summary-index-setting.tsx
Normal file
228
web/app/components/datasets/settings/summary-index-setting.tsx
Normal file
@ -0,0 +1,228 @@
|
||||
import type { ChangeEvent } from 'react'
|
||||
import type { DefaultModel } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import type { SummaryIndexSetting as SummaryIndexSettingType } from '@/models/datasets'
|
||||
import {
|
||||
memo,
|
||||
useCallback,
|
||||
useMemo,
|
||||
} from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Switch from '@/app/components/base/switch'
|
||||
import Textarea from '@/app/components/base/textarea'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import { useModelList } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||
import ModelSelector from '@/app/components/header/account-setting/model-provider-page/model-selector'
|
||||
|
||||
type SummaryIndexSettingProps = {
|
||||
entry?: 'knowledge-base' | 'dataset-settings' | 'create-document'
|
||||
summaryIndexSetting?: SummaryIndexSettingType
|
||||
onSummaryIndexSettingChange?: (payload: SummaryIndexSettingType) => void
|
||||
readonly?: boolean
|
||||
}
|
||||
const SummaryIndexSetting = ({
|
||||
entry = 'knowledge-base',
|
||||
summaryIndexSetting,
|
||||
onSummaryIndexSettingChange,
|
||||
readonly = false,
|
||||
}: SummaryIndexSettingProps) => {
|
||||
const { t } = useTranslation()
|
||||
const {
|
||||
data: textGenerationModelList,
|
||||
} = useModelList(ModelTypeEnum.textGeneration)
|
||||
const summaryIndexModelConfig = useMemo(() => {
|
||||
if (!summaryIndexSetting?.model_name || !summaryIndexSetting?.model_provider_name)
|
||||
return undefined
|
||||
|
||||
return {
|
||||
providerName: summaryIndexSetting?.model_provider_name,
|
||||
modelName: summaryIndexSetting?.model_name,
|
||||
}
|
||||
}, [summaryIndexSetting?.model_name, summaryIndexSetting?.model_provider_name])
|
||||
|
||||
const handleSummaryIndexEnableChange = useCallback((value: boolean) => {
|
||||
onSummaryIndexSettingChange?.({
|
||||
enable: value,
|
||||
})
|
||||
}, [onSummaryIndexSettingChange])
|
||||
|
||||
const handleSummaryIndexModelChange = useCallback((model: DefaultModel) => {
|
||||
onSummaryIndexSettingChange?.({
|
||||
model_provider_name: model.provider,
|
||||
model_name: model.model,
|
||||
})
|
||||
}, [onSummaryIndexSettingChange])
|
||||
|
||||
const handleSummaryIndexPromptChange = useCallback((e: ChangeEvent<HTMLTextAreaElement>) => {
|
||||
onSummaryIndexSettingChange?.({
|
||||
summary_prompt: e.target.value,
|
||||
})
|
||||
}, [onSummaryIndexSettingChange])
|
||||
|
||||
if (entry === 'knowledge-base') {
|
||||
return (
|
||||
<div>
|
||||
<div className="flex h-6 items-center justify-between">
|
||||
<div className="system-sm-semibold-uppercase flex items-center text-text-secondary">
|
||||
{t('form.summaryAutoGen', { ns: 'datasetSettings' })}
|
||||
<Tooltip
|
||||
triggerClassName="ml-1 h-4 w-4 shrink-0"
|
||||
popupContent={t('form.summaryAutoGenTip', { ns: 'datasetSettings' })}
|
||||
>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Switch
|
||||
defaultValue={summaryIndexSetting?.enable ?? false}
|
||||
onChange={handleSummaryIndexEnableChange}
|
||||
size="md"
|
||||
/>
|
||||
</div>
|
||||
{
|
||||
summaryIndexSetting?.enable && (
|
||||
<div>
|
||||
<div className="system-xs-medium-uppercase mb-1.5 mt-2 flex h-6 items-center text-text-tertiary">
|
||||
{t('form.summaryModel', { ns: 'datasetSettings' })}
|
||||
</div>
|
||||
<ModelSelector
|
||||
defaultModel={summaryIndexModelConfig && { provider: summaryIndexModelConfig.providerName, model: summaryIndexModelConfig.modelName }}
|
||||
modelList={textGenerationModelList}
|
||||
onSelect={handleSummaryIndexModelChange}
|
||||
readonly={readonly}
|
||||
showDeprecatedWarnIcon
|
||||
/>
|
||||
<div className="system-xs-medium-uppercase mt-3 flex h-6 items-center text-text-tertiary">
|
||||
{t('form.summaryInstructions', { ns: 'datasetSettings' })}
|
||||
</div>
|
||||
<Textarea
|
||||
value={summaryIndexSetting?.summary_prompt ?? ''}
|
||||
onChange={handleSummaryIndexPromptChange}
|
||||
disabled={readonly}
|
||||
placeholder={t('form.summaryInstructionsPlaceholder', { ns: 'datasetSettings' })}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (entry === 'dataset-settings') {
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<div className="flex gap-x-1">
|
||||
<div className="flex h-7 w-[180px] shrink-0 items-center pt-1">
|
||||
<div className="system-sm-semibold text-text-secondary">
|
||||
{t('form.summaryAutoGen', { ns: 'datasetSettings' })}
|
||||
</div>
|
||||
</div>
|
||||
<div className="py-1.5">
|
||||
<div className="system-sm-semibold flex items-center text-text-secondary">
|
||||
<Switch
|
||||
className="mr-2"
|
||||
defaultValue={summaryIndexSetting?.enable ?? false}
|
||||
onChange={handleSummaryIndexEnableChange}
|
||||
size="md"
|
||||
/>
|
||||
{
|
||||
summaryIndexSetting?.enable ? t('list.status.enabled', { ns: 'datasetDocuments' }) : t('list.status.disabled', { ns: 'datasetDocuments' })
|
||||
}
|
||||
</div>
|
||||
<div className="system-sm-regular mt-2 text-text-tertiary">
|
||||
{
|
||||
summaryIndexSetting?.enable && t('form.summaryAutoGenTip', { ns: 'datasetSettings' })
|
||||
}
|
||||
{
|
||||
!summaryIndexSetting?.enable && t('form.summaryAutoGenEnableTip', { ns: 'datasetSettings' })
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{
|
||||
summaryIndexSetting?.enable && (
|
||||
<>
|
||||
<div className="flex gap-x-1">
|
||||
<div className="flex h-7 w-[180px] shrink-0 items-center pt-1">
|
||||
<div className="system-sm-medium text-text-tertiary">
|
||||
{t('form.summaryModel', { ns: 'datasetSettings' })}
|
||||
</div>
|
||||
</div>
|
||||
<div className="grow">
|
||||
<ModelSelector
|
||||
defaultModel={summaryIndexModelConfig && { provider: summaryIndexModelConfig.providerName, model: summaryIndexModelConfig.modelName }}
|
||||
modelList={textGenerationModelList}
|
||||
onSelect={handleSummaryIndexModelChange}
|
||||
readonly={readonly}
|
||||
showDeprecatedWarnIcon
|
||||
triggerClassName="h-8"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex">
|
||||
<div className="flex h-7 w-[180px] shrink-0 items-center pt-1">
|
||||
<div className="system-sm-medium text-text-tertiary">
|
||||
{t('form.summaryInstructions', { ns: 'datasetSettings' })}
|
||||
</div>
|
||||
</div>
|
||||
<div className="grow">
|
||||
<Textarea
|
||||
value={summaryIndexSetting?.summary_prompt ?? ''}
|
||||
onChange={handleSummaryIndexPromptChange}
|
||||
disabled={readonly}
|
||||
placeholder={t('form.summaryInstructionsPlaceholder', { ns: 'datasetSettings' })}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="space-y-3">
|
||||
<div className="flex h-6 items-center">
|
||||
<Switch
|
||||
className="mr-2"
|
||||
defaultValue={summaryIndexSetting?.enable ?? false}
|
||||
onChange={handleSummaryIndexEnableChange}
|
||||
size="md"
|
||||
/>
|
||||
<div className="system-sm-semibold text-text-secondary">
|
||||
{t('form.summaryAutoGen', { ns: 'datasetSettings' })}
|
||||
</div>
|
||||
</div>
|
||||
{
|
||||
summaryIndexSetting?.enable && (
|
||||
<>
|
||||
<div>
|
||||
<div className="system-sm-medium mb-1.5 flex h-6 items-center text-text-secondary">
|
||||
{t('form.summaryModel', { ns: 'datasetSettings' })}
|
||||
</div>
|
||||
<ModelSelector
|
||||
defaultModel={summaryIndexModelConfig && { provider: summaryIndexModelConfig.providerName, model: summaryIndexModelConfig.modelName }}
|
||||
modelList={textGenerationModelList}
|
||||
onSelect={handleSummaryIndexModelChange}
|
||||
readonly={readonly}
|
||||
showDeprecatedWarnIcon
|
||||
triggerClassName="h-8"
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<div className="system-sm-medium mb-1.5 flex h-6 items-center text-text-secondary">
|
||||
{t('form.summaryInstructions', { ns: 'datasetSettings' })}
|
||||
</div>
|
||||
<Textarea
|
||||
value={summaryIndexSetting?.summary_prompt ?? ''}
|
||||
onChange={handleSummaryIndexPromptChange}
|
||||
disabled={readonly}
|
||||
placeholder={t('form.summaryInstructionsPlaceholder', { ns: 'datasetSettings' })}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
export default memo(SummaryIndexSetting)
|
||||
@ -33,6 +33,7 @@ const AppNav = () => {
|
||||
data: appsData,
|
||||
fetchNextPage,
|
||||
hasNextPage,
|
||||
isFetchingNextPage,
|
||||
refetch,
|
||||
} = useInfiniteAppList({
|
||||
page: 1,
|
||||
@ -111,6 +112,7 @@ const AppNav = () => {
|
||||
createText={t('menus.newApp', { ns: 'common' })}
|
||||
onCreate={openModal}
|
||||
onLoadMore={handleLoadMore}
|
||||
isLoadingMore={isFetchingNextPage}
|
||||
/>
|
||||
<CreateAppModal
|
||||
show={showNewAppDialog}
|
||||
|
||||
@ -23,6 +23,7 @@ const DatasetNav = () => {
|
||||
data: datasetList,
|
||||
fetchNextPage,
|
||||
hasNextPage,
|
||||
isFetchingNextPage,
|
||||
} = useDatasetList({
|
||||
initialPage: 1,
|
||||
limit: 30,
|
||||
@ -93,6 +94,7 @@ const DatasetNav = () => {
|
||||
createText={t('menus.newDataset', { ns: 'common' })}
|
||||
onCreate={() => router.push(createRoute)}
|
||||
onLoadMore={handleLoadMore}
|
||||
isLoadingMore={isFetchingNextPage}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
@ -2,9 +2,9 @@
|
||||
|
||||
import type { INavSelectorProps } from './nav-selector'
|
||||
import Link from 'next/link'
|
||||
import { usePathname, useSearchParams, useSelectedLayoutSegment } from 'next/navigation'
|
||||
import { useSelectedLayoutSegment } from 'next/navigation'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useState } from 'react'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import { ArrowNarrowLeft } from '@/app/components/base/icons/src/vender/line/arrows'
|
||||
import { cn } from '@/utils/classnames'
|
||||
@ -30,20 +30,13 @@ const Nav = ({
|
||||
createText,
|
||||
onCreate,
|
||||
onLoadMore,
|
||||
isLoadingMore,
|
||||
isApp,
|
||||
}: INavProps) => {
|
||||
const setAppDetail = useAppStore(state => state.setAppDetail)
|
||||
const [hovered, setHovered] = useState(false)
|
||||
const segment = useSelectedLayoutSegment()
|
||||
const isActivated = Array.isArray(activeSegment) ? activeSegment.includes(segment!) : segment === activeSegment
|
||||
const pathname = usePathname()
|
||||
const searchParams = useSearchParams()
|
||||
const [linkLastSearchParams, setLinkLastSearchParams] = useState('')
|
||||
|
||||
useEffect(() => {
|
||||
if (pathname === link)
|
||||
setLinkLastSearchParams(searchParams.toString())
|
||||
}, [pathname, searchParams])
|
||||
|
||||
return (
|
||||
<div className={`
|
||||
@ -52,7 +45,7 @@ const Nav = ({
|
||||
${!curNav && !isActivated && 'hover:bg-components-main-nav-nav-button-bg-hover'}
|
||||
`}
|
||||
>
|
||||
<Link href={link + (linkLastSearchParams && `?${linkLastSearchParams}`)}>
|
||||
<Link href={link}>
|
||||
<div
|
||||
onClick={(e) => {
|
||||
// Don't clear state if opening in new tab/window
|
||||
@ -89,6 +82,7 @@ const Nav = ({
|
||||
createText={createText}
|
||||
onCreate={onCreate}
|
||||
onLoadMore={onLoadMore}
|
||||
isLoadingMore={isLoadingMore}
|
||||
/>
|
||||
</>
|
||||
)
|
||||
|
||||
@ -14,6 +14,7 @@ import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import { AppTypeIcon } from '@/app/components/app/type-selector'
|
||||
import AppIcon from '@/app/components/base/app-icon'
|
||||
import { FileArrow01, FilePlus01, FilePlus02 } from '@/app/components/base/icons/src/vender/line/files'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { cn } from '@/utils/classnames'
|
||||
|
||||
@ -34,9 +35,10 @@ export type INavSelectorProps = {
|
||||
isApp?: boolean
|
||||
onCreate: (state: string) => void
|
||||
onLoadMore?: () => void
|
||||
isLoadingMore?: boolean
|
||||
}
|
||||
|
||||
const NavSelector = ({ curNav, navigationItems, createText, isApp, onCreate, onLoadMore }: INavSelectorProps) => {
|
||||
const NavSelector = ({ curNav, navigationItems, createText, isApp, onCreate, onLoadMore, isLoadingMore }: INavSelectorProps) => {
|
||||
const { t } = useTranslation()
|
||||
const router = useRouter()
|
||||
const { isCurrentWorkspaceEditor } = useAppContext()
|
||||
@ -106,6 +108,11 @@ const NavSelector = ({ curNav, navigationItems, createText, isApp, onCreate, onL
|
||||
</MenuItem>
|
||||
))
|
||||
}
|
||||
{isLoadingMore && (
|
||||
<div className="flex justify-center py-2">
|
||||
<Loading />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{!isApp && isCurrentWorkspaceEditor && (
|
||||
<MenuItem as="div" className="w-full p-1">
|
||||
|
||||
@ -897,6 +897,58 @@ describe('Icon', () => {
|
||||
const iconDiv = container.firstChild as HTMLElement
|
||||
expect(iconDiv).toHaveStyle({ backgroundImage: 'url(/icon?name=test&size=large)' })
|
||||
})
|
||||
|
||||
it('should not render status indicators when src is object with installed=true', () => {
|
||||
render(<Icon src={{ content: '🎉', background: '#fff' }} installed={true} />)
|
||||
|
||||
// Status indicators should not render for object src
|
||||
expect(screen.queryByTestId('ri-check-line')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render status indicators when src is object with installFailed=true', () => {
|
||||
render(<Icon src={{ content: '🎉', background: '#fff' }} installFailed={true} />)
|
||||
|
||||
// Status indicators should not render for object src
|
||||
expect(screen.queryByTestId('ri-close-line')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render object src with all size variants', () => {
|
||||
const sizes: Array<'xs' | 'tiny' | 'small' | 'medium' | 'large'> = ['xs', 'tiny', 'small', 'medium', 'large']
|
||||
|
||||
sizes.forEach((size) => {
|
||||
const { unmount } = render(<Icon src={{ content: '🔗', background: '#fff' }} size={size} />)
|
||||
expect(screen.getByTestId('app-icon')).toHaveAttribute('data-size', size)
|
||||
unmount()
|
||||
})
|
||||
})
|
||||
|
||||
it('should render object src with custom className', () => {
|
||||
const { container } = render(
|
||||
<Icon src={{ content: '🎉', background: '#fff' }} className="custom-object-icon" />,
|
||||
)
|
||||
|
||||
expect(container.querySelector('.custom-object-icon')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should pass correct props to AppIcon for object src', () => {
|
||||
render(<Icon src={{ content: '😀', background: '#123456' }} />)
|
||||
|
||||
const appIcon = screen.getByTestId('app-icon')
|
||||
expect(appIcon).toHaveAttribute('data-icon', '😀')
|
||||
expect(appIcon).toHaveAttribute('data-background', '#123456')
|
||||
expect(appIcon).toHaveAttribute('data-icon-type', 'emoji')
|
||||
})
|
||||
|
||||
it('should render inner icon only when shouldUseMcpIcon returns true', () => {
|
||||
// Test with MCP icon content
|
||||
const { unmount } = render(<Icon src={{ content: '🔗', background: '#fff' }} />)
|
||||
expect(screen.getByTestId('inner-icon')).toBeInTheDocument()
|
||||
unmount()
|
||||
|
||||
// Test without MCP icon content
|
||||
render(<Icon src={{ content: '🎉', background: '#fff' }} />)
|
||||
expect(screen.queryByTestId('inner-icon')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
import type { ActivePluginType } from './constants'
|
||||
import type { PluginsSort, SearchParamsFromCollection } from './types'
|
||||
import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'
|
||||
import { useQueryState } from 'nuqs'
|
||||
@ -17,32 +16,14 @@ export function useSetMarketplaceSort() {
|
||||
return useSetAtom(marketplaceSortAtom)
|
||||
}
|
||||
|
||||
/**
|
||||
* Preserve the state for marketplace
|
||||
*/
|
||||
export const preserveSearchStateInQueryAtom = atom<boolean>(false)
|
||||
|
||||
const searchPluginTextAtom = atom<string>('')
|
||||
const activePluginTypeAtom = atom<ActivePluginType>('all')
|
||||
const filterPluginTagsAtom = atom<string[]>([])
|
||||
|
||||
export function useSearchPluginText() {
|
||||
const preserveSearchStateInQuery = useAtomValue(preserveSearchStateInQueryAtom)
|
||||
const queryState = useQueryState('q', marketplaceSearchParamsParsers.q)
|
||||
const atomState = useAtom(searchPluginTextAtom)
|
||||
return preserveSearchStateInQuery ? queryState : atomState
|
||||
return useQueryState('q', marketplaceSearchParamsParsers.q)
|
||||
}
|
||||
export function useActivePluginType() {
|
||||
const preserveSearchStateInQuery = useAtomValue(preserveSearchStateInQueryAtom)
|
||||
const queryState = useQueryState('category', marketplaceSearchParamsParsers.category)
|
||||
const atomState = useAtom(activePluginTypeAtom)
|
||||
return preserveSearchStateInQuery ? queryState : atomState
|
||||
return useQueryState('category', marketplaceSearchParamsParsers.category)
|
||||
}
|
||||
export function useFilterPluginTags() {
|
||||
const preserveSearchStateInQuery = useAtomValue(preserveSearchStateInQueryAtom)
|
||||
const queryState = useQueryState('tags', marketplaceSearchParamsParsers.tags)
|
||||
const atomState = useAtom(filterPluginTagsAtom)
|
||||
return preserveSearchStateInQuery ? queryState : atomState
|
||||
return useQueryState('tags', marketplaceSearchParamsParsers.tags)
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -1,15 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useHydrateAtoms } from 'jotai/utils'
|
||||
import { preserveSearchStateInQueryAtom } from './atoms'
|
||||
|
||||
export function HydrateMarketplaceAtoms({
|
||||
preserveSearchStateInQuery,
|
||||
children,
|
||||
}: {
|
||||
preserveSearchStateInQuery: boolean
|
||||
children: React.ReactNode
|
||||
}) {
|
||||
useHydrateAtoms([[preserveSearchStateInQueryAtom, preserveSearchStateInQuery]])
|
||||
return <>{children}</>
|
||||
}
|
||||
@ -1,7 +1,6 @@
|
||||
import type { SearchParams } from 'nuqs'
|
||||
import { TanstackQueryInitializer } from '@/context/query-client'
|
||||
import Description from './description'
|
||||
import { HydrateMarketplaceAtoms } from './hydration-client'
|
||||
import { HydrateQueryClient } from './hydration-server'
|
||||
import ListWrapper from './list/list-wrapper'
|
||||
import StickySearchAndSwitchWrapper from './sticky-search-and-switch-wrapper'
|
||||
@ -10,8 +9,7 @@ type MarketplaceProps = {
|
||||
showInstallButton?: boolean
|
||||
pluginTypeSwitchClassName?: string
|
||||
/**
|
||||
* Pass the search params from the request to prefetch data on the server
|
||||
* and preserve the search params in the URL.
|
||||
* Pass the search params from the request to prefetch data on the server.
|
||||
*/
|
||||
searchParams?: Promise<SearchParams>
|
||||
}
|
||||
@ -24,15 +22,13 @@ const Marketplace = async ({
|
||||
return (
|
||||
<TanstackQueryInitializer>
|
||||
<HydrateQueryClient searchParams={searchParams}>
|
||||
<HydrateMarketplaceAtoms preserveSearchStateInQuery={!!searchParams}>
|
||||
<Description />
|
||||
<StickySearchAndSwitchWrapper
|
||||
pluginTypeSwitchClassName={pluginTypeSwitchClassName}
|
||||
/>
|
||||
<ListWrapper
|
||||
showInstallButton={showInstallButton}
|
||||
/>
|
||||
</HydrateMarketplaceAtoms>
|
||||
<Description />
|
||||
<StickySearchAndSwitchWrapper
|
||||
pluginTypeSwitchClassName={pluginTypeSwitchClassName}
|
||||
/>
|
||||
<ListWrapper
|
||||
showInstallButton={showInstallButton}
|
||||
/>
|
||||
</HydrateQueryClient>
|
||||
</TanstackQueryInitializer>
|
||||
)
|
||||
|
||||
@ -19,6 +19,7 @@ const ListWrapper = ({
|
||||
marketplaceCollections,
|
||||
marketplaceCollectionPluginsMap,
|
||||
isLoading,
|
||||
isFetchingNextPage,
|
||||
page,
|
||||
} = useMarketplaceData()
|
||||
|
||||
@ -53,6 +54,11 @@ const ListWrapper = ({
|
||||
/>
|
||||
)
|
||||
}
|
||||
{
|
||||
isFetchingNextPage && (
|
||||
<Loading className="my-3" />
|
||||
)
|
||||
}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@ -33,7 +33,7 @@ export function useMarketplaceData() {
|
||||
}, [isSearchMode, searchPluginText, activePluginType, filterPluginTags, sort])
|
||||
|
||||
const pluginsQuery = useMarketplacePlugins(queryParams)
|
||||
const { hasNextPage, fetchNextPage, isFetching } = pluginsQuery
|
||||
const { hasNextPage, fetchNextPage, isFetching, isFetchingNextPage } = pluginsQuery
|
||||
|
||||
const handlePageChange = useCallback(() => {
|
||||
if (hasNextPage && !isFetching)
|
||||
@ -50,5 +50,6 @@ export function useMarketplaceData() {
|
||||
pluginsTotal: pluginsQuery.data?.pages[0]?.total,
|
||||
page: pluginsQuery.data?.pages.length || 1,
|
||||
isLoading: collectionsQuery.isLoading || pluginsQuery.isLoading,
|
||||
isFetchingNextPage,
|
||||
}
|
||||
}
|
||||
|
||||
123
web/app/components/plugins/plugin-page/context.spec.tsx
Normal file
123
web/app/components/plugins/plugin-page/context.spec.tsx
Normal file
@ -0,0 +1,123 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
// Import mocks
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
import { PluginPageContext, PluginPageContextProvider, usePluginPageContext } from './context'
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('nuqs', () => ({
|
||||
useQueryState: vi.fn(() => ['plugins', vi.fn()]),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/global-public-context', () => ({
|
||||
useGlobalPublicStore: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('../hooks', () => ({
|
||||
PLUGIN_PAGE_TABS_MAP: {
|
||||
plugins: 'plugins',
|
||||
marketplace: 'discover',
|
||||
},
|
||||
usePluginPageTabs: () => [
|
||||
{ value: 'plugins', text: 'Plugins' },
|
||||
{ value: 'discover', text: 'Explore Marketplace' },
|
||||
],
|
||||
}))
|
||||
|
||||
// Helper function to mock useGlobalPublicStore with marketplace setting
|
||||
const mockGlobalPublicStore = (enableMarketplace: boolean) => {
|
||||
vi.mocked(useGlobalPublicStore).mockImplementation((selector) => {
|
||||
const state = { systemFeatures: { enable_marketplace: enableMarketplace } }
|
||||
return selector(state as Parameters<typeof selector>[0])
|
||||
})
|
||||
}
|
||||
|
||||
// Test component that uses the context
|
||||
const TestConsumer = () => {
|
||||
const containerRef = usePluginPageContext(v => v.containerRef)
|
||||
const options = usePluginPageContext(v => v.options)
|
||||
const activeTab = usePluginPageContext(v => v.activeTab)
|
||||
|
||||
return (
|
||||
<div>
|
||||
<span data-testid="has-container-ref">{containerRef ? 'true' : 'false'}</span>
|
||||
<span data-testid="options-count">{options.length}</span>
|
||||
<span data-testid="active-tab">{activeTab}</span>
|
||||
{options.map((opt: { value: string, text: string }) => (
|
||||
<span key={opt.value} data-testid={`option-${opt.value}`}>{opt.text}</span>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
describe('PluginPageContext', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('PluginPageContextProvider', () => {
|
||||
it('should provide context values to children', () => {
|
||||
mockGlobalPublicStore(true)
|
||||
|
||||
render(
|
||||
<PluginPageContextProvider>
|
||||
<TestConsumer />
|
||||
</PluginPageContextProvider>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('has-container-ref')).toHaveTextContent('true')
|
||||
expect(screen.getByTestId('options-count')).toHaveTextContent('2')
|
||||
})
|
||||
|
||||
it('should include marketplace tab when enable_marketplace is true', () => {
|
||||
mockGlobalPublicStore(true)
|
||||
|
||||
render(
|
||||
<PluginPageContextProvider>
|
||||
<TestConsumer />
|
||||
</PluginPageContextProvider>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('option-plugins')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('option-discover')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should filter out marketplace tab when enable_marketplace is false', () => {
|
||||
mockGlobalPublicStore(false)
|
||||
|
||||
render(
|
||||
<PluginPageContextProvider>
|
||||
<TestConsumer />
|
||||
</PluginPageContextProvider>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('option-plugins')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('option-discover')).not.toBeInTheDocument()
|
||||
expect(screen.getByTestId('options-count')).toHaveTextContent('1')
|
||||
})
|
||||
})
|
||||
|
||||
describe('usePluginPageContext', () => {
|
||||
it('should select specific context values', () => {
|
||||
mockGlobalPublicStore(true)
|
||||
|
||||
render(
|
||||
<PluginPageContextProvider>
|
||||
<TestConsumer />
|
||||
</PluginPageContextProvider>,
|
||||
)
|
||||
|
||||
// activeTab should be 'plugins' from the mock
|
||||
expect(screen.getByTestId('active-tab')).toHaveTextContent('plugins')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Default Context Values', () => {
|
||||
it('should have empty options by default from context', () => {
|
||||
// Test that the context has proper default values by checking the exported constant
|
||||
// The PluginPageContext is created with default values including empty options array
|
||||
expect(PluginPageContext).toBeDefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -68,7 +68,7 @@ export const PluginPageContextProvider = ({
|
||||
const options = useMemo(() => {
|
||||
return enable_marketplace ? tabs : tabs.filter(tab => tab.value !== PLUGIN_PAGE_TABS_MAP.marketplace)
|
||||
}, [tabs, enable_marketplace])
|
||||
const [activeTab, setActiveTab] = useQueryState('category', {
|
||||
const [activeTab, setActiveTab] = useQueryState('tab', {
|
||||
defaultValue: options[0].value,
|
||||
})
|
||||
|
||||
|
||||
@ -144,17 +144,6 @@ describe('constant.ts - Type Definitions', () => {
|
||||
|
||||
// ==================== store.ts Tests ====================
|
||||
describe('store.ts - Zustand Store', () => {
|
||||
beforeEach(() => {
|
||||
// Reset store to initial state
|
||||
const { setState } = useStore
|
||||
setState({
|
||||
tagList: [],
|
||||
categoryList: [],
|
||||
showTagManagementModal: false,
|
||||
showCategoryManagementModal: false,
|
||||
})
|
||||
})
|
||||
|
||||
describe('Initial State', () => {
|
||||
it('should have empty tagList initially', () => {
|
||||
const { result } = renderHook(() => useStore(state => state.tagList))
|
||||
|
||||
1041
web/app/components/plugins/plugin-page/index.spec.tsx
Normal file
1041
web/app/components/plugins/plugin-page/index.spec.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@ -207,6 +207,7 @@ const PluginPage = ({
|
||||
popupContent={t('privilege.title', { ns: 'plugin' })}
|
||||
>
|
||||
<Button
|
||||
data-testid="plugin-settings-button"
|
||||
className="group h-full w-full p-2 text-components-button-secondary-text"
|
||||
onClick={setShowPluginSettingModal}
|
||||
>
|
||||
|
||||
@ -0,0 +1,219 @@
|
||||
import type { FC, ReactNode } from 'react'
|
||||
import type { PluginStatus } from '@/app/components/plugins/types'
|
||||
import type { Locale } from '@/i18n-config'
|
||||
import {
|
||||
RiCheckboxCircleFill,
|
||||
RiErrorWarningFill,
|
||||
RiLoaderLine,
|
||||
} from '@remixicon/react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import CardIcon from '@/app/components/plugins/card/base/card-icon'
|
||||
import { useGetLanguage } from '@/context/i18n'
|
||||
|
||||
// Types
|
||||
type PluginItemProps = {
|
||||
plugin: PluginStatus
|
||||
getIconUrl: (icon: string) => string
|
||||
language: Locale
|
||||
statusIcon: ReactNode
|
||||
statusText: string
|
||||
statusClassName?: string
|
||||
action?: ReactNode
|
||||
}
|
||||
|
||||
type PluginSectionProps = {
|
||||
title: string
|
||||
count: number
|
||||
plugins: PluginStatus[]
|
||||
getIconUrl: (icon: string) => string
|
||||
language: Locale
|
||||
statusIcon: ReactNode
|
||||
defaultStatusText: string
|
||||
statusClassName?: string
|
||||
headerAction?: ReactNode
|
||||
renderItemAction?: (plugin: PluginStatus) => ReactNode
|
||||
}
|
||||
|
||||
type PluginTaskListProps = {
|
||||
runningPlugins: PluginStatus[]
|
||||
successPlugins: PluginStatus[]
|
||||
errorPlugins: PluginStatus[]
|
||||
getIconUrl: (icon: string) => string
|
||||
onClearAll: () => void
|
||||
onClearErrors: () => void
|
||||
onClearSingle: (taskId: string, pluginId: string) => void
|
||||
}
|
||||
|
||||
// Plugin Item Component
|
||||
const PluginItem: FC<PluginItemProps> = ({
|
||||
plugin,
|
||||
getIconUrl,
|
||||
language,
|
||||
statusIcon,
|
||||
statusText,
|
||||
statusClassName,
|
||||
action,
|
||||
}) => {
|
||||
return (
|
||||
<div className="flex items-center rounded-lg p-2 hover:bg-state-base-hover">
|
||||
<div className="relative mr-2 flex h-6 w-6 items-center justify-center rounded-md border-[0.5px] border-components-panel-border-subtle bg-background-default-dodge">
|
||||
{statusIcon}
|
||||
<CardIcon
|
||||
size="tiny"
|
||||
src={getIconUrl(plugin.icon)}
|
||||
/>
|
||||
</div>
|
||||
<div className="grow">
|
||||
<div className="system-md-regular truncate text-text-secondary">
|
||||
{plugin.labels[language]}
|
||||
</div>
|
||||
<div className={`system-xs-regular ${statusClassName || 'text-text-tertiary'}`}>
|
||||
{statusText}
|
||||
</div>
|
||||
</div>
|
||||
{action}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Plugin Section Component
|
||||
const PluginSection: FC<PluginSectionProps> = ({
|
||||
title,
|
||||
count,
|
||||
plugins,
|
||||
getIconUrl,
|
||||
language,
|
||||
statusIcon,
|
||||
defaultStatusText,
|
||||
statusClassName,
|
||||
headerAction,
|
||||
renderItemAction,
|
||||
}) => {
|
||||
if (plugins.length === 0)
|
||||
return null
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="system-sm-semibold-uppercase sticky top-0 flex h-7 items-center justify-between px-2 pt-1 text-text-secondary">
|
||||
{title}
|
||||
{' '}
|
||||
(
|
||||
{count}
|
||||
)
|
||||
{headerAction}
|
||||
</div>
|
||||
<div className="max-h-[200px] overflow-y-auto">
|
||||
{plugins.map(plugin => (
|
||||
<PluginItem
|
||||
key={plugin.plugin_unique_identifier}
|
||||
plugin={plugin}
|
||||
getIconUrl={getIconUrl}
|
||||
language={language}
|
||||
statusIcon={statusIcon}
|
||||
statusText={plugin.message || defaultStatusText}
|
||||
statusClassName={statusClassName}
|
||||
action={renderItemAction?.(plugin)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
// Main Plugin Task List Component
|
||||
const PluginTaskList: FC<PluginTaskListProps> = ({
|
||||
runningPlugins,
|
||||
successPlugins,
|
||||
errorPlugins,
|
||||
getIconUrl,
|
||||
onClearAll,
|
||||
onClearErrors,
|
||||
onClearSingle,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const language = useGetLanguage()
|
||||
|
||||
return (
|
||||
<div className="w-[360px] rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur p-1 shadow-lg">
|
||||
{/* Running Plugins Section */}
|
||||
{runningPlugins.length > 0 && (
|
||||
<PluginSection
|
||||
title={t('task.installing', { ns: 'plugin' })}
|
||||
count={runningPlugins.length}
|
||||
plugins={runningPlugins}
|
||||
getIconUrl={getIconUrl}
|
||||
language={language}
|
||||
statusIcon={
|
||||
<RiLoaderLine className="absolute -bottom-0.5 -right-0.5 z-10 h-3 w-3 animate-spin text-text-accent" />
|
||||
}
|
||||
defaultStatusText={t('task.installing', { ns: 'plugin' })}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Success Plugins Section */}
|
||||
{successPlugins.length > 0 && (
|
||||
<PluginSection
|
||||
title={t('task.installed', { ns: 'plugin' })}
|
||||
count={successPlugins.length}
|
||||
plugins={successPlugins}
|
||||
getIconUrl={getIconUrl}
|
||||
language={language}
|
||||
statusIcon={
|
||||
<RiCheckboxCircleFill className="absolute -bottom-0.5 -right-0.5 z-10 h-3 w-3 text-text-success" />
|
||||
}
|
||||
defaultStatusText={t('task.installed', { ns: 'plugin' })}
|
||||
statusClassName="text-text-success"
|
||||
headerAction={(
|
||||
<Button
|
||||
className="shrink-0"
|
||||
size="small"
|
||||
variant="ghost"
|
||||
onClick={onClearAll}
|
||||
>
|
||||
{t('task.clearAll', { ns: 'plugin' })}
|
||||
</Button>
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Error Plugins Section */}
|
||||
{errorPlugins.length > 0 && (
|
||||
<PluginSection
|
||||
title={t('task.installError', { ns: 'plugin', errorLength: errorPlugins.length })}
|
||||
count={errorPlugins.length}
|
||||
plugins={errorPlugins}
|
||||
getIconUrl={getIconUrl}
|
||||
language={language}
|
||||
statusIcon={
|
||||
<RiErrorWarningFill className="absolute -bottom-0.5 -right-0.5 z-10 h-3 w-3 text-text-destructive" />
|
||||
}
|
||||
defaultStatusText={t('task.installError', { ns: 'plugin', errorLength: errorPlugins.length })}
|
||||
statusClassName="text-text-destructive break-all"
|
||||
headerAction={(
|
||||
<Button
|
||||
className="shrink-0"
|
||||
size="small"
|
||||
variant="ghost"
|
||||
onClick={onClearErrors}
|
||||
>
|
||||
{t('task.clearAll', { ns: 'plugin' })}
|
||||
</Button>
|
||||
)}
|
||||
renderItemAction={plugin => (
|
||||
<Button
|
||||
className="shrink-0"
|
||||
size="small"
|
||||
variant="ghost"
|
||||
onClick={() => onClearSingle(plugin.taskId, plugin.plugin_unique_identifier)}
|
||||
>
|
||||
{t('operation.clear', { ns: 'common' })}
|
||||
</Button>
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default PluginTaskList
|
||||
@ -0,0 +1,96 @@
|
||||
import type { FC } from 'react'
|
||||
import {
|
||||
RiCheckboxCircleFill,
|
||||
RiErrorWarningFill,
|
||||
RiInstallLine,
|
||||
} from '@remixicon/react'
|
||||
import ProgressCircle from '@/app/components/base/progress-bar/progress-circle'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import DownloadingIcon from '@/app/components/header/plugins-nav/downloading-icon'
|
||||
import { cn } from '@/utils/classnames'
|
||||
|
||||
export type TaskStatusIndicatorProps = {
|
||||
tip: string
|
||||
isInstalling: boolean
|
||||
isInstallingWithSuccess: boolean
|
||||
isInstallingWithError: boolean
|
||||
isSuccess: boolean
|
||||
isFailed: boolean
|
||||
successPluginsLength: number
|
||||
runningPluginsLength: number
|
||||
totalPluginsLength: number
|
||||
onClick: () => void
|
||||
}
|
||||
|
||||
const TaskStatusIndicator: FC<TaskStatusIndicatorProps> = ({
|
||||
tip,
|
||||
isInstalling,
|
||||
isInstallingWithSuccess,
|
||||
isInstallingWithError,
|
||||
isSuccess,
|
||||
isFailed,
|
||||
successPluginsLength,
|
||||
runningPluginsLength,
|
||||
totalPluginsLength,
|
||||
onClick,
|
||||
}) => {
|
||||
const showDownloadingIcon = isInstalling || isInstallingWithError
|
||||
const showErrorStyle = isInstallingWithError || isFailed
|
||||
const showSuccessIcon = isSuccess || (successPluginsLength > 0 && runningPluginsLength === 0)
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
popupContent={tip}
|
||||
asChild
|
||||
offset={8}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'relative flex h-8 w-8 items-center justify-center rounded-lg border-[0.5px] border-components-button-secondary-border bg-components-button-secondary-bg shadow-xs hover:bg-components-button-secondary-bg-hover',
|
||||
showErrorStyle && 'cursor-pointer border-components-button-destructive-secondary-border-hover bg-state-destructive-hover hover:bg-state-destructive-hover-alt',
|
||||
(isInstalling || isInstallingWithSuccess || isSuccess) && 'cursor-pointer hover:bg-components-button-secondary-bg-hover',
|
||||
)}
|
||||
id="plugin-task-trigger"
|
||||
onClick={onClick}
|
||||
>
|
||||
{/* Main Icon */}
|
||||
{showDownloadingIcon
|
||||
? <DownloadingIcon />
|
||||
: (
|
||||
<RiInstallLine
|
||||
className={cn(
|
||||
'h-4 w-4 text-components-button-secondary-text',
|
||||
showErrorStyle && 'text-components-button-destructive-secondary-text',
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Status Indicator Badge */}
|
||||
<div className="absolute -right-1 -top-1">
|
||||
{(isInstalling || isInstallingWithSuccess) && (
|
||||
<ProgressCircle
|
||||
percentage={(totalPluginsLength > 0 ? successPluginsLength / totalPluginsLength : 0) * 100}
|
||||
circleFillColor="fill-components-progress-brand-bg"
|
||||
/>
|
||||
)}
|
||||
{isInstallingWithError && (
|
||||
<ProgressCircle
|
||||
percentage={(totalPluginsLength > 0 ? runningPluginsLength / totalPluginsLength : 0) * 100}
|
||||
circleFillColor="fill-components-progress-brand-bg"
|
||||
sectorFillColor="fill-components-progress-error-border"
|
||||
circleStrokeColor="stroke-components-progress-error-border"
|
||||
/>
|
||||
)}
|
||||
{showSuccessIcon && !isInstalling && !isInstallingWithSuccess && !isInstallingWithError && (
|
||||
<RiCheckboxCircleFill className="h-3.5 w-3.5 text-text-success" />
|
||||
)}
|
||||
{isFailed && (
|
||||
<RiErrorWarningFill className="h-3.5 w-3.5 text-text-destructive" />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
export default TaskStatusIndicator
|
||||
@ -0,0 +1,856 @@
|
||||
import type { PluginStatus } from '@/app/components/plugins/types'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { TaskStatus } from '@/app/components/plugins/types'
|
||||
// Import mocked modules
|
||||
import { useMutationClearTaskPlugin, usePluginTaskList } from '@/service/use-plugins'
|
||||
import PluginTaskList from './components/plugin-task-list'
|
||||
import TaskStatusIndicator from './components/task-status-indicator'
|
||||
import { usePluginTaskStatus } from './hooks'
|
||||
|
||||
import PluginTasks from './index'
|
||||
|
||||
// Mock external dependencies
|
||||
vi.mock('@/service/use-plugins', () => ({
|
||||
usePluginTaskList: vi.fn(),
|
||||
useMutationClearTaskPlugin: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/install-plugin/base/use-get-icon', () => ({
|
||||
default: () => ({
|
||||
getIconUrl: (icon: string) => `https://example.com/${icon}`,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/i18n', () => ({
|
||||
useGetLanguage: () => 'en_US',
|
||||
}))
|
||||
|
||||
// Helper to create mock plugin
|
||||
const createMockPlugin = (overrides: Partial<PluginStatus> = {}): PluginStatus => ({
|
||||
plugin_unique_identifier: `plugin-${Math.random().toString(36).substr(2, 9)}`,
|
||||
plugin_id: 'test-plugin',
|
||||
status: TaskStatus.running,
|
||||
message: '',
|
||||
icon: 'test-icon.png',
|
||||
labels: {
|
||||
en_US: 'Test Plugin',
|
||||
zh_Hans: '测试插件',
|
||||
} as Record<string, string>,
|
||||
taskId: 'task-1',
|
||||
...overrides,
|
||||
})
|
||||
|
||||
// Helper to setup mock hook returns
|
||||
const setupMocks = (plugins: PluginStatus[] = []) => {
|
||||
const mockMutateAsync = vi.fn().mockResolvedValue({})
|
||||
const mockHandleRefetch = vi.fn()
|
||||
|
||||
vi.mocked(usePluginTaskList).mockReturnValue({
|
||||
pluginTasks: plugins.length > 0
|
||||
? [{ id: 'task-1', plugins, created_at: '', updated_at: '', status: 'running', total_plugins: plugins.length, completed_plugins: 0 }]
|
||||
: [],
|
||||
handleRefetch: mockHandleRefetch,
|
||||
} as any)
|
||||
|
||||
vi.mocked(useMutationClearTaskPlugin).mockReturnValue({
|
||||
mutateAsync: mockMutateAsync,
|
||||
} as any)
|
||||
|
||||
return { mockMutateAsync, mockHandleRefetch }
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// usePluginTaskStatus Hook Tests
|
||||
// ============================================================================
|
||||
describe('usePluginTaskStatus Hook', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Plugin categorization', () => {
|
||||
it('should categorize running plugins correctly', () => {
|
||||
const runningPlugin = createMockPlugin({ status: TaskStatus.running })
|
||||
setupMocks([runningPlugin])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { runningPlugins, runningPluginsLength } = usePluginTaskStatus()
|
||||
return (
|
||||
<div>
|
||||
<span data-testid="running-count">{runningPluginsLength}</span>
|
||||
<span data-testid="running-id">{runningPlugins[0]?.plugin_unique_identifier}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
|
||||
expect(screen.getByTestId('running-count')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('running-id')).toHaveTextContent(runningPlugin.plugin_unique_identifier)
|
||||
})
|
||||
|
||||
it('should categorize success plugins correctly', () => {
|
||||
const successPlugin = createMockPlugin({ status: TaskStatus.success })
|
||||
setupMocks([successPlugin])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { successPlugins, successPluginsLength } = usePluginTaskStatus()
|
||||
return (
|
||||
<div>
|
||||
<span data-testid="success-count">{successPluginsLength}</span>
|
||||
<span data-testid="success-id">{successPlugins[0]?.plugin_unique_identifier}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
|
||||
expect(screen.getByTestId('success-count')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('success-id')).toHaveTextContent(successPlugin.plugin_unique_identifier)
|
||||
})
|
||||
|
||||
it('should categorize error plugins correctly', () => {
|
||||
const errorPlugin = createMockPlugin({ status: TaskStatus.failed, message: 'Install failed' })
|
||||
setupMocks([errorPlugin])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { errorPlugins, errorPluginsLength } = usePluginTaskStatus()
|
||||
return (
|
||||
<div>
|
||||
<span data-testid="error-count">{errorPluginsLength}</span>
|
||||
<span data-testid="error-id">{errorPlugins[0]?.plugin_unique_identifier}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
|
||||
expect(screen.getByTestId('error-count')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('error-id')).toHaveTextContent(errorPlugin.plugin_unique_identifier)
|
||||
})
|
||||
|
||||
it('should categorize mixed plugins correctly', () => {
|
||||
const plugins = [
|
||||
createMockPlugin({ status: TaskStatus.running, plugin_unique_identifier: 'running-1' }),
|
||||
createMockPlugin({ status: TaskStatus.success, plugin_unique_identifier: 'success-1' }),
|
||||
createMockPlugin({ status: TaskStatus.failed, plugin_unique_identifier: 'error-1' }),
|
||||
]
|
||||
setupMocks(plugins)
|
||||
|
||||
const TestComponent = () => {
|
||||
const { runningPluginsLength, successPluginsLength, errorPluginsLength, totalPluginsLength } = usePluginTaskStatus()
|
||||
return (
|
||||
<div>
|
||||
<span data-testid="running">{runningPluginsLength}</span>
|
||||
<span data-testid="success">{successPluginsLength}</span>
|
||||
<span data-testid="error">{errorPluginsLength}</span>
|
||||
<span data-testid="total">{totalPluginsLength}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
|
||||
expect(screen.getByTestId('running')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('success')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('error')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('total')).toHaveTextContent('3')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Status flags', () => {
|
||||
it('should set isInstalling when only running plugins exist', () => {
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { isInstalling, isInstallingWithSuccess, isInstallingWithError, isSuccess, isFailed } = usePluginTaskStatus()
|
||||
return (
|
||||
<div>
|
||||
<span data-testid="isInstalling">{String(isInstalling)}</span>
|
||||
<span data-testid="isInstallingWithSuccess">{String(isInstallingWithSuccess)}</span>
|
||||
<span data-testid="isInstallingWithError">{String(isInstallingWithError)}</span>
|
||||
<span data-testid="isSuccess">{String(isSuccess)}</span>
|
||||
<span data-testid="isFailed">{String(isFailed)}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
|
||||
expect(screen.getByTestId('isInstalling')).toHaveTextContent('true')
|
||||
expect(screen.getByTestId('isInstallingWithSuccess')).toHaveTextContent('false')
|
||||
expect(screen.getByTestId('isInstallingWithError')).toHaveTextContent('false')
|
||||
expect(screen.getByTestId('isSuccess')).toHaveTextContent('false')
|
||||
expect(screen.getByTestId('isFailed')).toHaveTextContent('false')
|
||||
})
|
||||
|
||||
it('should set isInstallingWithSuccess when running and success plugins exist', () => {
|
||||
setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.running }),
|
||||
createMockPlugin({ status: TaskStatus.success }),
|
||||
])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { isInstallingWithSuccess } = usePluginTaskStatus()
|
||||
return <span data-testid="flag">{String(isInstallingWithSuccess)}</span>
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
expect(screen.getByTestId('flag')).toHaveTextContent('true')
|
||||
})
|
||||
|
||||
it('should set isInstallingWithError when running and error plugins exist', () => {
|
||||
setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.running }),
|
||||
createMockPlugin({ status: TaskStatus.failed }),
|
||||
])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { isInstallingWithError } = usePluginTaskStatus()
|
||||
return <span data-testid="flag">{String(isInstallingWithError)}</span>
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
expect(screen.getByTestId('flag')).toHaveTextContent('true')
|
||||
})
|
||||
|
||||
it('should set isSuccess when all plugins succeeded', () => {
|
||||
setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.success }),
|
||||
createMockPlugin({ status: TaskStatus.success }),
|
||||
])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { isSuccess } = usePluginTaskStatus()
|
||||
return <span data-testid="flag">{String(isSuccess)}</span>
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
expect(screen.getByTestId('flag')).toHaveTextContent('true')
|
||||
})
|
||||
|
||||
it('should set isFailed when no running plugins and some failed', () => {
|
||||
setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.success }),
|
||||
createMockPlugin({ status: TaskStatus.failed }),
|
||||
])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { isFailed } = usePluginTaskStatus()
|
||||
return <span data-testid="flag">{String(isFailed)}</span>
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
expect(screen.getByTestId('flag')).toHaveTextContent('true')
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleClearErrorPlugin', () => {
|
||||
it('should call mutateAsync and handleRefetch', async () => {
|
||||
const { mockMutateAsync, mockHandleRefetch } = setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.failed }),
|
||||
])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { handleClearErrorPlugin } = usePluginTaskStatus()
|
||||
return (
|
||||
<button onClick={() => handleClearErrorPlugin('task-1', 'plugin-1')}>
|
||||
Clear
|
||||
</button>
|
||||
)
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
fireEvent.click(screen.getByRole('button'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalledWith({
|
||||
taskId: 'task-1',
|
||||
pluginId: 'plugin-1',
|
||||
})
|
||||
expect(mockHandleRefetch).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ============================================================================
|
||||
// TaskStatusIndicator Component Tests
|
||||
// ============================================================================
|
||||
describe('TaskStatusIndicator Component', () => {
|
||||
const defaultProps = {
|
||||
tip: 'Test tooltip',
|
||||
isInstalling: false,
|
||||
isInstallingWithSuccess: false,
|
||||
isInstallingWithError: false,
|
||||
isSuccess: false,
|
||||
isFailed: false,
|
||||
successPluginsLength: 0,
|
||||
runningPluginsLength: 0,
|
||||
totalPluginsLength: 1,
|
||||
onClick: vi.fn(),
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} />)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render with correct id', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} />)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Icon display', () => {
|
||||
it('should show downloading icon when installing', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isInstalling />)
|
||||
// DownloadingIcon is rendered when isInstalling is true
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show downloading icon when installing with error', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isInstallingWithError />)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show install icon when not installing', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isSuccess />)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Status badge', () => {
|
||||
it('should show progress circle when installing', () => {
|
||||
render(
|
||||
<TaskStatusIndicator
|
||||
{...defaultProps}
|
||||
isInstalling
|
||||
successPluginsLength={1}
|
||||
totalPluginsLength={3}
|
||||
/>,
|
||||
)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show progress circle when installing with success', () => {
|
||||
render(
|
||||
<TaskStatusIndicator
|
||||
{...defaultProps}
|
||||
isInstallingWithSuccess
|
||||
successPluginsLength={2}
|
||||
totalPluginsLength={3}
|
||||
/>,
|
||||
)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show error progress circle when installing with error', () => {
|
||||
render(
|
||||
<TaskStatusIndicator
|
||||
{...defaultProps}
|
||||
isInstallingWithError
|
||||
runningPluginsLength={1}
|
||||
totalPluginsLength={3}
|
||||
/>,
|
||||
)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show success icon when all completed successfully', () => {
|
||||
render(
|
||||
<TaskStatusIndicator
|
||||
{...defaultProps}
|
||||
isSuccess
|
||||
successPluginsLength={3}
|
||||
runningPluginsLength={0}
|
||||
totalPluginsLength={3}
|
||||
/>,
|
||||
)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show error icon when failed', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isFailed />)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Styling', () => {
|
||||
it('should apply error styles when installing with error', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isInstallingWithError />)
|
||||
const trigger = document.getElementById('plugin-task-trigger')
|
||||
expect(trigger).toHaveClass('bg-state-destructive-hover')
|
||||
})
|
||||
|
||||
it('should apply error styles when failed', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isFailed />)
|
||||
const trigger = document.getElementById('plugin-task-trigger')
|
||||
expect(trigger).toHaveClass('bg-state-destructive-hover')
|
||||
})
|
||||
|
||||
it('should apply cursor-pointer when clickable', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isInstalling />)
|
||||
const trigger = document.getElementById('plugin-task-trigger')
|
||||
expect(trigger).toHaveClass('cursor-pointer')
|
||||
})
|
||||
})
|
||||
|
||||
describe('User interactions', () => {
|
||||
it('should call onClick when clicked', () => {
|
||||
const handleClick = vi.fn()
|
||||
render(<TaskStatusIndicator {...defaultProps} onClick={handleClick} />)
|
||||
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
expect(handleClick).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ============================================================================
|
||||
// PluginTaskList Component Tests
|
||||
// ============================================================================
|
||||
describe('PluginTaskList Component', () => {
|
||||
const defaultProps = {
|
||||
runningPlugins: [] as PluginStatus[],
|
||||
successPlugins: [] as PluginStatus[],
|
||||
errorPlugins: [] as PluginStatus[],
|
||||
getIconUrl: (icon: string) => `https://example.com/${icon}`,
|
||||
onClearAll: vi.fn(),
|
||||
onClearErrors: vi.fn(),
|
||||
onClearSingle: vi.fn(),
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing with empty lists', () => {
|
||||
render(<PluginTaskList {...defaultProps} />)
|
||||
expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render running plugins section when plugins exist', () => {
|
||||
const runningPlugins = [createMockPlugin({ status: TaskStatus.running })]
|
||||
render(<PluginTaskList {...defaultProps} runningPlugins={runningPlugins} />)
|
||||
|
||||
// Translation key is returned as text in tests, multiple matches expected (title + status)
|
||||
expect(screen.getAllByText(/task\.installing/i).length).toBeGreaterThan(0)
|
||||
// Verify section container is rendered
|
||||
expect(document.querySelector('.max-h-\\[200px\\]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render success plugins section when plugins exist', () => {
|
||||
const successPlugins = [createMockPlugin({ status: TaskStatus.success })]
|
||||
render(<PluginTaskList {...defaultProps} successPlugins={successPlugins} />)
|
||||
|
||||
// Translation key is returned as text in tests, multiple matches expected
|
||||
expect(screen.getAllByText(/task\.installed/i).length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
it('should render error plugins section when plugins exist', () => {
|
||||
const errorPlugins = [createMockPlugin({ status: TaskStatus.failed, message: 'Error occurred' })]
|
||||
render(<PluginTaskList {...defaultProps} errorPlugins={errorPlugins} />)
|
||||
|
||||
expect(screen.getByText('Error occurred')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render all sections when all types exist', () => {
|
||||
render(
|
||||
<PluginTaskList
|
||||
{...defaultProps}
|
||||
runningPlugins={[createMockPlugin({ status: TaskStatus.running })]}
|
||||
successPlugins={[createMockPlugin({ status: TaskStatus.success })]}
|
||||
errorPlugins={[createMockPlugin({ status: TaskStatus.failed })]}
|
||||
/>,
|
||||
)
|
||||
|
||||
// All sections should be present
|
||||
expect(document.querySelectorAll('.max-h-\\[200px\\]').length).toBe(3)
|
||||
})
|
||||
})
|
||||
|
||||
describe('User interactions', () => {
|
||||
it('should call onClearAll when clear all button is clicked in success section', () => {
|
||||
const handleClearAll = vi.fn()
|
||||
const successPlugins = [createMockPlugin({ status: TaskStatus.success })]
|
||||
|
||||
render(
|
||||
<PluginTaskList
|
||||
{...defaultProps}
|
||||
successPlugins={successPlugins}
|
||||
onClearAll={handleClearAll}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /task\.clearAll/i }))
|
||||
|
||||
expect(handleClearAll).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should call onClearErrors when clear all button is clicked in error section', () => {
|
||||
const handleClearErrors = vi.fn()
|
||||
const errorPlugins = [createMockPlugin({ status: TaskStatus.failed })]
|
||||
|
||||
render(
|
||||
<PluginTaskList
|
||||
{...defaultProps}
|
||||
errorPlugins={errorPlugins}
|
||||
onClearErrors={handleClearErrors}
|
||||
/>,
|
||||
)
|
||||
|
||||
const clearButtons = screen.getAllByRole('button')
|
||||
fireEvent.click(clearButtons.find(btn => btn.textContent?.includes('task.clearAll'))!)
|
||||
|
||||
expect(handleClearErrors).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should call onClearSingle with correct args when individual clear is clicked', () => {
|
||||
const handleClearSingle = vi.fn()
|
||||
const errorPlugin = createMockPlugin({
|
||||
status: TaskStatus.failed,
|
||||
plugin_unique_identifier: 'error-plugin-1',
|
||||
taskId: 'task-123',
|
||||
})
|
||||
|
||||
render(
|
||||
<PluginTaskList
|
||||
{...defaultProps}
|
||||
errorPlugins={[errorPlugin]}
|
||||
onClearSingle={handleClearSingle}
|
||||
/>,
|
||||
)
|
||||
|
||||
// The individual clear button has the text 'operation.clear'
|
||||
fireEvent.click(screen.getByRole('button', { name: /operation\.clear/i }))
|
||||
|
||||
expect(handleClearSingle).toHaveBeenCalledWith('task-123', 'error-plugin-1')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Plugin display', () => {
|
||||
it('should display plugin name from labels', () => {
|
||||
const plugin = createMockPlugin({
|
||||
status: TaskStatus.running,
|
||||
labels: { en_US: 'My Test Plugin' } as Record<string, string>,
|
||||
})
|
||||
|
||||
render(<PluginTaskList {...defaultProps} runningPlugins={[plugin]} />)
|
||||
|
||||
expect(screen.getByText('My Test Plugin')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display plugin message when available', () => {
|
||||
const plugin = createMockPlugin({
|
||||
status: TaskStatus.success,
|
||||
message: 'Successfully installed!',
|
||||
})
|
||||
|
||||
render(<PluginTaskList {...defaultProps} successPlugins={[plugin]} />)
|
||||
|
||||
expect(screen.getByText('Successfully installed!')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display multiple plugins in each section', () => {
|
||||
const runningPlugins = [
|
||||
createMockPlugin({ status: TaskStatus.running, labels: { en_US: 'Plugin A' } as Record<string, string> }),
|
||||
createMockPlugin({ status: TaskStatus.running, labels: { en_US: 'Plugin B' } as Record<string, string> }),
|
||||
]
|
||||
|
||||
render(<PluginTaskList {...defaultProps} runningPlugins={runningPlugins} />)
|
||||
|
||||
expect(screen.getByText('Plugin A')).toBeInTheDocument()
|
||||
expect(screen.getByText('Plugin B')).toBeInTheDocument()
|
||||
// Count is rendered, verify multiple items are in list
|
||||
expect(document.querySelectorAll('.hover\\:bg-state-base-hover').length).toBe(2)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ============================================================================
|
||||
// PluginTasks Main Component Tests
|
||||
// ============================================================================
|
||||
describe('PluginTasks Component', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should return null when no plugins exist', () => {
|
||||
setupMocks([])
|
||||
|
||||
const { container } = render(<PluginTasks />)
|
||||
|
||||
expect(container.firstChild).toBeNull()
|
||||
})
|
||||
|
||||
it('should render when plugins exist', () => {
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Tooltip text (tip memoization)', () => {
|
||||
it('should show installing tip when isInstalling', () => {
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// The component renders with a tooltip, we verify it exists
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show success tip when all succeeded', () => {
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.success })])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show error tip when some failed', () => {
|
||||
setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.success }),
|
||||
createMockPlugin({ status: TaskStatus.failed }),
|
||||
])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Popover interaction', () => {
|
||||
it('should toggle popover when trigger is clicked and status allows', () => {
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Click to open
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
// The popover content should be visible (PluginTaskList)
|
||||
expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not toggle when status does not allow', () => {
|
||||
// Setup with no actionable status (edge case - should not happen in practice)
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Component should still render
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Clear handlers', () => {
|
||||
it('should clear all completed plugins when onClearAll is called', async () => {
|
||||
const { mockMutateAsync } = setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.success, plugin_unique_identifier: 'success-1' }),
|
||||
createMockPlugin({ status: TaskStatus.failed, plugin_unique_identifier: 'error-1' }),
|
||||
])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Open popover
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
// Wait for popover content to render
|
||||
await waitFor(() => {
|
||||
expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Find and click clear all button
|
||||
const clearButtons = screen.getAllByRole('button')
|
||||
const clearAllButton = clearButtons.find(btn => btn.textContent?.includes('clearAll'))
|
||||
if (clearAllButton)
|
||||
fireEvent.click(clearAllButton)
|
||||
|
||||
// Verify mutateAsync was called for each completed plugin
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('should clear only error plugins when onClearErrors is called', async () => {
|
||||
const { mockMutateAsync } = setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.failed, plugin_unique_identifier: 'error-1' }),
|
||||
])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Open popover
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Find and click the clear all button in error section
|
||||
const clearButtons = screen.getAllByRole('button')
|
||||
if (clearButtons.length > 0)
|
||||
fireEvent.click(clearButtons[0])
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('should clear single plugin when onClearSingle is called', async () => {
|
||||
const { mockMutateAsync } = setupMocks([
|
||||
createMockPlugin({
|
||||
status: TaskStatus.failed,
|
||||
plugin_unique_identifier: 'error-plugin',
|
||||
taskId: 'task-1',
|
||||
}),
|
||||
])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Open popover
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Find and click individual clear button (usually the last one)
|
||||
const clearButtons = screen.getAllByRole('button')
|
||||
const individualClearButton = clearButtons[clearButtons.length - 1]
|
||||
fireEvent.click(individualClearButton)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalledWith({
|
||||
taskId: 'task-1',
|
||||
pluginId: 'error-plugin',
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle empty plugin tasks array', () => {
|
||||
setupMocks([])
|
||||
|
||||
const { container } = render(<PluginTasks />)
|
||||
|
||||
expect(container.firstChild).toBeNull()
|
||||
})
|
||||
|
||||
it('should handle single running plugin', () => {
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle many plugins', () => {
|
||||
const manyPlugins = Array.from({ length: 10 }, (_, i) =>
|
||||
createMockPlugin({
|
||||
status: i % 3 === 0 ? TaskStatus.running : i % 3 === 1 ? TaskStatus.success : TaskStatus.failed,
|
||||
plugin_unique_identifier: `plugin-${i}`,
|
||||
}))
|
||||
setupMocks(manyPlugins)
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle plugins with empty labels', () => {
|
||||
const plugin = createMockPlugin({
|
||||
status: TaskStatus.running,
|
||||
labels: {} as Record<string, string>,
|
||||
})
|
||||
setupMocks([plugin])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle plugins with long messages', () => {
|
||||
const plugin = createMockPlugin({
|
||||
status: TaskStatus.failed,
|
||||
message: 'A'.repeat(500),
|
||||
})
|
||||
setupMocks([plugin])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Open popover
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ============================================================================
|
||||
// Integration Tests
|
||||
// ============================================================================
|
||||
describe('PluginTasks Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should show correct UI flow from installing to success', async () => {
|
||||
// Start with installing state
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
const { rerender } = render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
|
||||
// Simulate completion by re-rendering with success
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.success })])
|
||||
rerender(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show correct UI flow from installing to failure', async () => {
|
||||
// Start with installing state
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
const { rerender } = render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
|
||||
// Simulate failure by re-rendering with failed
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.failed, message: 'Network error' })])
|
||||
rerender(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle mixed status during installation', () => {
|
||||
setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.running, plugin_unique_identifier: 'p1' }),
|
||||
createMockPlugin({ status: TaskStatus.success, plugin_unique_identifier: 'p2' }),
|
||||
createMockPlugin({ status: TaskStatus.failed, plugin_unique_identifier: 'p3' }),
|
||||
])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Open popover
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
// All sections should be visible
|
||||
const sections = document.querySelectorAll('.max-h-\\[200px\\]')
|
||||
expect(sections.length).toBe(3)
|
||||
})
|
||||
})
|
||||
@ -1,33 +1,21 @@
|
||||
import {
|
||||
RiCheckboxCircleFill,
|
||||
RiErrorWarningFill,
|
||||
RiInstallLine,
|
||||
RiLoaderLine,
|
||||
} from '@remixicon/react'
|
||||
import {
|
||||
useCallback,
|
||||
useMemo,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import {
|
||||
PortalToFollowElem,
|
||||
PortalToFollowElemContent,
|
||||
PortalToFollowElemTrigger,
|
||||
} from '@/app/components/base/portal-to-follow-elem'
|
||||
import ProgressCircle from '@/app/components/base/progress-bar/progress-circle'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import DownloadingIcon from '@/app/components/header/plugins-nav/downloading-icon'
|
||||
import CardIcon from '@/app/components/plugins/card/base/card-icon'
|
||||
import useGetIcon from '@/app/components/plugins/install-plugin/base/use-get-icon'
|
||||
import { useGetLanguage } from '@/context/i18n'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import PluginTaskList from './components/plugin-task-list'
|
||||
import TaskStatusIndicator from './components/task-status-indicator'
|
||||
import { usePluginTaskStatus } from './hooks'
|
||||
|
||||
const PluginTasks = () => {
|
||||
const { t } = useTranslation()
|
||||
const language = useGetLanguage()
|
||||
const [open, setOpen] = useState(false)
|
||||
const {
|
||||
errorPlugins,
|
||||
@ -46,35 +34,7 @@ const PluginTasks = () => {
|
||||
} = usePluginTaskStatus()
|
||||
const { getIconUrl } = useGetIcon()
|
||||
|
||||
const handleClearAllWithModal = useCallback(async () => {
|
||||
// Clear all completed plugins (success and error) but keep running ones
|
||||
const completedPlugins = [...successPlugins, ...errorPlugins]
|
||||
|
||||
// Clear all completed plugins individually
|
||||
for (const plugin of completedPlugins)
|
||||
await handleClearErrorPlugin(plugin.taskId, plugin.plugin_unique_identifier)
|
||||
|
||||
// Only close modal if no plugins are still installing
|
||||
if (runningPluginsLength === 0)
|
||||
setOpen(false)
|
||||
}, [successPlugins, errorPlugins, handleClearErrorPlugin, runningPluginsLength])
|
||||
|
||||
const handleClearErrorsWithModal = useCallback(async () => {
|
||||
// Clear only error plugins, not all plugins
|
||||
for (const plugin of errorPlugins)
|
||||
await handleClearErrorPlugin(plugin.taskId, plugin.plugin_unique_identifier)
|
||||
// Only close modal if no plugins are still installing
|
||||
if (runningPluginsLength === 0)
|
||||
setOpen(false)
|
||||
}, [errorPlugins, handleClearErrorPlugin, runningPluginsLength])
|
||||
|
||||
const handleClearSingleWithModal = useCallback(async (taskId: string, pluginId: string) => {
|
||||
await handleClearErrorPlugin(taskId, pluginId)
|
||||
// Only close modal if no plugins are still installing
|
||||
if (runningPluginsLength === 0)
|
||||
setOpen(false)
|
||||
}, [handleClearErrorPlugin, runningPluginsLength])
|
||||
|
||||
// Generate tooltip text based on status
|
||||
const tip = useMemo(() => {
|
||||
if (isInstallingWithError)
|
||||
return t('task.installingWithError', { ns: 'plugin', installingLength: runningPluginsLength, successLength: successPluginsLength, errorLength: errorPluginsLength })
|
||||
@ -99,8 +59,38 @@ const PluginTasks = () => {
|
||||
t,
|
||||
])
|
||||
|
||||
// Show icon if there are any plugin tasks (completed, running, or failed)
|
||||
// Only hide when there are absolutely no plugin tasks
|
||||
// Generic clear function that handles clearing and modal closing
|
||||
const clearPluginsAndClose = useCallback(async (
|
||||
plugins: Array<{ taskId: string, plugin_unique_identifier: string }>,
|
||||
) => {
|
||||
for (const plugin of plugins)
|
||||
await handleClearErrorPlugin(plugin.taskId, plugin.plugin_unique_identifier)
|
||||
if (runningPluginsLength === 0)
|
||||
setOpen(false)
|
||||
}, [handleClearErrorPlugin, runningPluginsLength])
|
||||
|
||||
// Clear handlers using the generic function
|
||||
const handleClearAll = useCallback(
|
||||
() => clearPluginsAndClose([...successPlugins, ...errorPlugins]),
|
||||
[clearPluginsAndClose, successPlugins, errorPlugins],
|
||||
)
|
||||
|
||||
const handleClearErrors = useCallback(
|
||||
() => clearPluginsAndClose(errorPlugins),
|
||||
[clearPluginsAndClose, errorPlugins],
|
||||
)
|
||||
|
||||
const handleClearSingle = useCallback(
|
||||
(taskId: string, pluginId: string) => clearPluginsAndClose([{ taskId, plugin_unique_identifier: pluginId }]),
|
||||
[clearPluginsAndClose],
|
||||
)
|
||||
|
||||
const handleTriggerClick = useCallback(() => {
|
||||
if (isFailed || isInstalling || isInstallingWithSuccess || isInstallingWithError || isSuccess)
|
||||
setOpen(v => !v)
|
||||
}, [isFailed, isInstalling, isInstallingWithSuccess, isInstallingWithError, isSuccess])
|
||||
|
||||
// Hide when no plugin tasks
|
||||
if (totalPluginsLength === 0)
|
||||
return null
|
||||
|
||||
@ -115,206 +105,30 @@ const PluginTasks = () => {
|
||||
crossAxis: 79,
|
||||
}}
|
||||
>
|
||||
<PortalToFollowElemTrigger
|
||||
onClick={() => {
|
||||
if (isFailed || isInstalling || isInstallingWithSuccess || isInstallingWithError || isSuccess)
|
||||
setOpen(v => !v)
|
||||
}}
|
||||
>
|
||||
<Tooltip
|
||||
popupContent={tip}
|
||||
asChild
|
||||
offset={8}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'relative flex h-8 w-8 items-center justify-center rounded-lg border-[0.5px] border-components-button-secondary-border bg-components-button-secondary-bg shadow-xs hover:bg-components-button-secondary-bg-hover',
|
||||
(isInstallingWithError || isFailed) && 'cursor-pointer border-components-button-destructive-secondary-border-hover bg-state-destructive-hover hover:bg-state-destructive-hover-alt',
|
||||
(isInstalling || isInstallingWithSuccess || isSuccess) && 'cursor-pointer hover:bg-components-button-secondary-bg-hover',
|
||||
)}
|
||||
id="plugin-task-trigger"
|
||||
>
|
||||
{
|
||||
(isInstalling || isInstallingWithError) && (
|
||||
<DownloadingIcon />
|
||||
)
|
||||
}
|
||||
{
|
||||
!(isInstalling || isInstallingWithError) && (
|
||||
<RiInstallLine
|
||||
className={cn(
|
||||
'h-4 w-4 text-components-button-secondary-text',
|
||||
(isInstallingWithError || isFailed) && 'text-components-button-destructive-secondary-text',
|
||||
)}
|
||||
/>
|
||||
)
|
||||
}
|
||||
<div className="absolute -right-1 -top-1">
|
||||
{
|
||||
(isInstalling || isInstallingWithSuccess) && (
|
||||
<ProgressCircle
|
||||
percentage={successPluginsLength / totalPluginsLength * 100}
|
||||
circleFillColor="fill-components-progress-brand-bg"
|
||||
/>
|
||||
)
|
||||
}
|
||||
{
|
||||
isInstallingWithError && (
|
||||
<ProgressCircle
|
||||
percentage={runningPluginsLength / totalPluginsLength * 100}
|
||||
circleFillColor="fill-components-progress-brand-bg"
|
||||
sectorFillColor="fill-components-progress-error-border"
|
||||
circleStrokeColor="stroke-components-progress-error-border"
|
||||
/>
|
||||
)
|
||||
}
|
||||
{
|
||||
(isSuccess || (successPluginsLength > 0 && runningPluginsLength === 0 && errorPluginsLength === 0)) && (
|
||||
<RiCheckboxCircleFill className="h-3.5 w-3.5 text-text-success" />
|
||||
)
|
||||
}
|
||||
{
|
||||
isFailed && (
|
||||
<RiErrorWarningFill className="h-3.5 w-3.5 text-text-destructive" />
|
||||
)
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</Tooltip>
|
||||
<PortalToFollowElemTrigger onClick={handleTriggerClick}>
|
||||
<TaskStatusIndicator
|
||||
tip={tip}
|
||||
isInstalling={isInstalling}
|
||||
isInstallingWithSuccess={isInstallingWithSuccess}
|
||||
isInstallingWithError={isInstallingWithError}
|
||||
isSuccess={isSuccess}
|
||||
isFailed={isFailed}
|
||||
successPluginsLength={successPluginsLength}
|
||||
runningPluginsLength={runningPluginsLength}
|
||||
totalPluginsLength={totalPluginsLength}
|
||||
onClick={() => {}}
|
||||
/>
|
||||
</PortalToFollowElemTrigger>
|
||||
<PortalToFollowElemContent className="z-[11]">
|
||||
<div className="w-[360px] rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur p-1 shadow-lg">
|
||||
{/* Running Plugins */}
|
||||
{runningPlugins.length > 0 && (
|
||||
<>
|
||||
<div className="system-sm-semibold-uppercase sticky top-0 flex h-7 items-center justify-between px-2 pt-1 text-text-secondary">
|
||||
{t('task.installing', { ns: 'plugin' })}
|
||||
{' '}
|
||||
(
|
||||
{runningPlugins.length}
|
||||
)
|
||||
</div>
|
||||
<div className="max-h-[200px] overflow-y-auto">
|
||||
{runningPlugins.map(runningPlugin => (
|
||||
<div
|
||||
key={runningPlugin.plugin_unique_identifier}
|
||||
className="flex items-center rounded-lg p-2 hover:bg-state-base-hover"
|
||||
>
|
||||
<div className="relative mr-2 flex h-6 w-6 items-center justify-center rounded-md border-[0.5px] border-components-panel-border-subtle bg-background-default-dodge">
|
||||
<RiLoaderLine className="absolute -bottom-0.5 -right-0.5 z-10 h-3 w-3 animate-spin text-text-accent" />
|
||||
<CardIcon
|
||||
size="tiny"
|
||||
src={getIconUrl(runningPlugin.icon)}
|
||||
/>
|
||||
</div>
|
||||
<div className="grow">
|
||||
<div className="system-md-regular truncate text-text-secondary">
|
||||
{runningPlugin.labels[language]}
|
||||
</div>
|
||||
<div className="system-xs-regular text-text-tertiary">
|
||||
{t('task.installing', { ns: 'plugin' })}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Success Plugins */}
|
||||
{successPlugins.length > 0 && (
|
||||
<>
|
||||
<div className="system-sm-semibold-uppercase sticky top-0 flex h-7 items-center justify-between px-2 pt-1 text-text-secondary">
|
||||
{t('task.installed', { ns: 'plugin' })}
|
||||
{' '}
|
||||
(
|
||||
{successPlugins.length}
|
||||
)
|
||||
<Button
|
||||
className="shrink-0"
|
||||
size="small"
|
||||
variant="ghost"
|
||||
onClick={() => handleClearAllWithModal()}
|
||||
>
|
||||
{t('task.clearAll', { ns: 'plugin' })}
|
||||
</Button>
|
||||
</div>
|
||||
<div className="max-h-[200px] overflow-y-auto">
|
||||
{successPlugins.map(successPlugin => (
|
||||
<div
|
||||
key={successPlugin.plugin_unique_identifier}
|
||||
className="flex items-center rounded-lg p-2 hover:bg-state-base-hover"
|
||||
>
|
||||
<div className="relative mr-2 flex h-6 w-6 items-center justify-center rounded-md border-[0.5px] border-components-panel-border-subtle bg-background-default-dodge">
|
||||
<RiCheckboxCircleFill className="absolute -bottom-0.5 -right-0.5 z-10 h-3 w-3 text-text-success" />
|
||||
<CardIcon
|
||||
size="tiny"
|
||||
src={getIconUrl(successPlugin.icon)}
|
||||
/>
|
||||
</div>
|
||||
<div className="grow">
|
||||
<div className="system-md-regular truncate text-text-secondary">
|
||||
{successPlugin.labels[language]}
|
||||
</div>
|
||||
<div className="system-xs-regular text-text-success">
|
||||
{successPlugin.message || t('task.installed', { ns: 'plugin' })}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Error Plugins */}
|
||||
{errorPlugins.length > 0 && (
|
||||
<>
|
||||
<div className="system-sm-semibold-uppercase sticky top-0 flex h-7 items-center justify-between px-2 pt-1 text-text-secondary">
|
||||
{t('task.installError', { ns: 'plugin', errorLength: errorPlugins.length })}
|
||||
<Button
|
||||
className="shrink-0"
|
||||
size="small"
|
||||
variant="ghost"
|
||||
onClick={() => handleClearErrorsWithModal()}
|
||||
>
|
||||
{t('task.clearAll', { ns: 'plugin' })}
|
||||
</Button>
|
||||
</div>
|
||||
<div className="max-h-[200px] overflow-y-auto">
|
||||
{errorPlugins.map(errorPlugin => (
|
||||
<div
|
||||
key={errorPlugin.plugin_unique_identifier}
|
||||
className="flex items-center rounded-lg p-2 hover:bg-state-base-hover"
|
||||
>
|
||||
<div className="relative mr-2 flex h-6 w-6 items-center justify-center rounded-md border-[0.5px] border-components-panel-border-subtle bg-background-default-dodge">
|
||||
<RiErrorWarningFill className="absolute -bottom-0.5 -right-0.5 z-10 h-3 w-3 text-text-destructive" />
|
||||
<CardIcon
|
||||
size="tiny"
|
||||
src={getIconUrl(errorPlugin.icon)}
|
||||
/>
|
||||
</div>
|
||||
<div className="grow">
|
||||
<div className="system-md-regular truncate text-text-secondary">
|
||||
{errorPlugin.labels[language]}
|
||||
</div>
|
||||
<div className="system-xs-regular break-all text-text-destructive">
|
||||
{errorPlugin.message}
|
||||
</div>
|
||||
</div>
|
||||
<Button
|
||||
className="shrink-0"
|
||||
size="small"
|
||||
variant="ghost"
|
||||
onClick={() => handleClearSingleWithModal(errorPlugin.taskId, errorPlugin.plugin_unique_identifier)}
|
||||
>
|
||||
{t('operation.clear', { ns: 'common' })}
|
||||
</Button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<PluginTaskList
|
||||
runningPlugins={runningPlugins}
|
||||
successPlugins={successPlugins}
|
||||
errorPlugins={errorPlugins}
|
||||
getIconUrl={getIconUrl}
|
||||
onClearAll={handleClearAll}
|
||||
onClearErrors={handleClearErrors}
|
||||
onClearSingle={handleClearSingle}
|
||||
/>
|
||||
</PortalToFollowElemContent>
|
||||
</PortalToFollowElem>
|
||||
</div>
|
||||
|
||||
@ -5,11 +5,11 @@ import { useDebounceFn } from 'ahooks'
|
||||
import { useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import PluginDetailPanel from '@/app/components/plugins/plugin-detail-panel'
|
||||
import { useGetLanguage } from '@/context/i18n'
|
||||
import { renderI18nObject } from '@/i18n-config'
|
||||
import { useInstalledLatestVersion, useInstalledPluginList, useInvalidateInstalledPluginList } from '@/service/use-plugins'
|
||||
import Loading from '../../base/loading'
|
||||
import { PluginSource } from '../types'
|
||||
import { usePluginPageContext } from './context'
|
||||
import Empty from './empty'
|
||||
@ -107,12 +107,17 @@ const PluginsPanel = () => {
|
||||
<div className="w-full">
|
||||
<List pluginList={filteredList || []} />
|
||||
</div>
|
||||
{!isLastPage && !isFetching && (
|
||||
<Button onClick={loadNextPage}>
|
||||
{t('common.loadMore', { ns: 'workflow' })}
|
||||
</Button>
|
||||
{!isLastPage && (
|
||||
<div className="flex justify-center py-4">
|
||||
{isFetching
|
||||
? <Loading className="size-8" />
|
||||
: (
|
||||
<Button onClick={loadNextPage}>
|
||||
{t('common.loadMore', { ns: 'workflow' })}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{isFetching && <div className="system-md-semibold text-text-secondary">{t('detail.loading', { ns: 'appLog' })}</div>}
|
||||
</div>
|
||||
)
|
||||
: (
|
||||
|
||||
@ -0,0 +1,388 @@
|
||||
import { renderHook, waitFor } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
// Import mocks for assertions
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
import { useInvalidateReferenceSettings, useMutationReferenceSettings, useReferenceSettings } from '@/service/use-plugins'
|
||||
import Toast from '../../base/toast'
|
||||
import { PermissionType } from '../types'
|
||||
import useReferenceSetting, { useCanInstallPluginFromMarketplace } from './use-reference-setting'
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/global-public-context', () => ({
|
||||
useGlobalPublicStore: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-plugins', () => ({
|
||||
useReferenceSettings: vi.fn(),
|
||||
useMutationReferenceSettings: vi.fn(),
|
||||
useInvalidateReferenceSettings: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('../../base/toast', () => ({
|
||||
default: {
|
||||
notify: vi.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
describe('useReferenceSetting Hook', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
// Default mocks
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: false,
|
||||
isCurrentWorkspaceOwner: false,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.everyone,
|
||||
debug_permission: PermissionType.everyone,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
vi.mocked(useMutationReferenceSettings).mockReturnValue({
|
||||
mutate: vi.fn(),
|
||||
isPending: false,
|
||||
} as unknown as ReturnType<typeof useMutationReferenceSettings>)
|
||||
|
||||
vi.mocked(useInvalidateReferenceSettings).mockReturnValue(vi.fn())
|
||||
})
|
||||
|
||||
describe('hasPermission logic', () => {
|
||||
it('should return false when permission is undefined', () => {
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: undefined,
|
||||
debug_permission: undefined,
|
||||
},
|
||||
},
|
||||
} as unknown as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(false)
|
||||
expect(result.current.canDebugger).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when permission is noOne', () => {
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.noOne,
|
||||
debug_permission: PermissionType.noOne,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(false)
|
||||
expect(result.current.canDebugger).toBe(false)
|
||||
})
|
||||
|
||||
it('should return true when permission is everyone', () => {
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.everyone,
|
||||
debug_permission: PermissionType.everyone,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(true)
|
||||
expect(result.current.canDebugger).toBe(true)
|
||||
})
|
||||
|
||||
it('should return isAdmin when permission is admin and user is manager', () => {
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: true,
|
||||
isCurrentWorkspaceOwner: false,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.admin,
|
||||
debug_permission: PermissionType.admin,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(true)
|
||||
expect(result.current.canDebugger).toBe(true)
|
||||
})
|
||||
|
||||
it('should return isAdmin when permission is admin and user is owner', () => {
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: false,
|
||||
isCurrentWorkspaceOwner: true,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.admin,
|
||||
debug_permission: PermissionType.admin,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(true)
|
||||
expect(result.current.canDebugger).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false when permission is admin and user is not admin', () => {
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: false,
|
||||
isCurrentWorkspaceOwner: false,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.admin,
|
||||
debug_permission: PermissionType.admin,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(false)
|
||||
expect(result.current.canDebugger).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('canSetPermissions', () => {
|
||||
it('should be true when user is workspace manager', () => {
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: true,
|
||||
isCurrentWorkspaceOwner: false,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canSetPermissions).toBe(true)
|
||||
})
|
||||
|
||||
it('should be true when user is workspace owner', () => {
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: false,
|
||||
isCurrentWorkspaceOwner: true,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canSetPermissions).toBe(true)
|
||||
})
|
||||
|
||||
it('should be false when user is neither manager nor owner', () => {
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: false,
|
||||
isCurrentWorkspaceOwner: false,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canSetPermissions).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('setReferenceSettings callback', () => {
|
||||
it('should call invalidateReferenceSettings and show toast on success', async () => {
|
||||
const mockInvalidate = vi.fn()
|
||||
vi.mocked(useInvalidateReferenceSettings).mockReturnValue(mockInvalidate)
|
||||
|
||||
let onSuccessCallback: (() => void) | undefined
|
||||
vi.mocked(useMutationReferenceSettings).mockImplementation((options) => {
|
||||
onSuccessCallback = options?.onSuccess as () => void
|
||||
return {
|
||||
mutate: vi.fn(),
|
||||
isPending: false,
|
||||
} as unknown as ReturnType<typeof useMutationReferenceSettings>
|
||||
})
|
||||
|
||||
renderHook(() => useReferenceSetting())
|
||||
|
||||
// Trigger the onSuccess callback
|
||||
if (onSuccessCallback)
|
||||
onSuccessCallback()
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockInvalidate).toHaveBeenCalled()
|
||||
expect(Toast.notify).toHaveBeenCalledWith({
|
||||
type: 'success',
|
||||
message: 'api.actionSuccess',
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('returned values', () => {
|
||||
it('should return referenceSetting data', () => {
|
||||
const mockData = {
|
||||
permission: {
|
||||
install_permission: PermissionType.everyone,
|
||||
debug_permission: PermissionType.everyone,
|
||||
},
|
||||
}
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: mockData,
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.referenceSetting).toEqual(mockData)
|
||||
})
|
||||
|
||||
it('should return isUpdatePending from mutation', () => {
|
||||
vi.mocked(useMutationReferenceSettings).mockReturnValue({
|
||||
mutate: vi.fn(),
|
||||
isPending: true,
|
||||
} as unknown as ReturnType<typeof useMutationReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.isUpdatePending).toBe(true)
|
||||
})
|
||||
|
||||
it('should handle null data', () => {
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: null,
|
||||
} as unknown as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(false)
|
||||
expect(result.current.canDebugger).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('useCanInstallPluginFromMarketplace Hook', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: true,
|
||||
isCurrentWorkspaceOwner: false,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.everyone,
|
||||
debug_permission: PermissionType.everyone,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
vi.mocked(useMutationReferenceSettings).mockReturnValue({
|
||||
mutate: vi.fn(),
|
||||
isPending: false,
|
||||
} as unknown as ReturnType<typeof useMutationReferenceSettings>)
|
||||
|
||||
vi.mocked(useInvalidateReferenceSettings).mockReturnValue(vi.fn())
|
||||
})
|
||||
|
||||
it('should return true when marketplace is enabled and canManagement is true', () => {
|
||||
vi.mocked(useGlobalPublicStore).mockImplementation((selector) => {
|
||||
const state = {
|
||||
systemFeatures: {
|
||||
enable_marketplace: true,
|
||||
},
|
||||
}
|
||||
return selector(state as Parameters<typeof selector>[0])
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useCanInstallPluginFromMarketplace())
|
||||
|
||||
expect(result.current.canInstallPluginFromMarketplace).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false when marketplace is disabled', () => {
|
||||
vi.mocked(useGlobalPublicStore).mockImplementation((selector) => {
|
||||
const state = {
|
||||
systemFeatures: {
|
||||
enable_marketplace: false,
|
||||
},
|
||||
}
|
||||
return selector(state as Parameters<typeof selector>[0])
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useCanInstallPluginFromMarketplace())
|
||||
|
||||
expect(result.current.canInstallPluginFromMarketplace).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when canManagement is false', () => {
|
||||
vi.mocked(useGlobalPublicStore).mockImplementation((selector) => {
|
||||
const state = {
|
||||
systemFeatures: {
|
||||
enable_marketplace: true,
|
||||
},
|
||||
}
|
||||
return selector(state as Parameters<typeof selector>[0])
|
||||
})
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.noOne,
|
||||
debug_permission: PermissionType.noOne,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useCanInstallPluginFromMarketplace())
|
||||
|
||||
expect(result.current.canInstallPluginFromMarketplace).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when both marketplace is disabled and canManagement is false', () => {
|
||||
vi.mocked(useGlobalPublicStore).mockImplementation((selector) => {
|
||||
const state = {
|
||||
systemFeatures: {
|
||||
enable_marketplace: false,
|
||||
},
|
||||
}
|
||||
return selector(state as Parameters<typeof selector>[0])
|
||||
})
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.noOne,
|
||||
debug_permission: PermissionType.noOne,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useCanInstallPluginFromMarketplace())
|
||||
|
||||
expect(result.current.canInstallPluginFromMarketplace).toBe(false)
|
||||
})
|
||||
})
|
||||
487
web/app/components/plugins/plugin-page/use-uploader.spec.ts
Normal file
487
web/app/components/plugins/plugin-page/use-uploader.spec.ts
Normal file
@ -0,0 +1,487 @@
|
||||
import type { RefObject } from 'react'
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { useUploader } from './use-uploader'
|
||||
|
||||
describe('useUploader Hook', () => {
|
||||
let mockContainerRef: RefObject<HTMLDivElement | null>
|
||||
let mockOnFileChange: (file: File | null) => void
|
||||
let mockContainer: HTMLDivElement
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
mockContainer = document.createElement('div')
|
||||
document.body.appendChild(mockContainer)
|
||||
|
||||
mockContainerRef = { current: mockContainer }
|
||||
mockOnFileChange = vi.fn()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
if (mockContainer.parentNode)
|
||||
document.body.removeChild(mockContainer)
|
||||
})
|
||||
|
||||
describe('Initial State', () => {
|
||||
it('should return initial state with dragging false', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
expect(result.current.fileUploader.current).toBeNull()
|
||||
expect(result.current.fileChangeHandle).not.toBeNull()
|
||||
expect(result.current.removeFile).not.toBeNull()
|
||||
})
|
||||
|
||||
it('should return null handlers when disabled', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
enabled: false,
|
||||
}),
|
||||
)
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
expect(result.current.fileChangeHandle).toBeNull()
|
||||
expect(result.current.removeFile).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Drag Events', () => {
|
||||
it('should handle dragenter and set dragging to true', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['Files'] },
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
|
||||
expect(result.current.dragging).toBe(true)
|
||||
})
|
||||
|
||||
it('should not set dragging when dragenter without Files type', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['text/plain'] },
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle dragover event', () => {
|
||||
renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
const dragOverEvent = new Event('dragover', { bubbles: true, cancelable: true })
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragOverEvent)
|
||||
})
|
||||
|
||||
// dragover should prevent default and stop propagation
|
||||
expect(mockContainer).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle dragleave when relatedTarget is null', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// First set dragging to true
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['Files'] },
|
||||
})
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
expect(result.current.dragging).toBe(true)
|
||||
|
||||
// Then trigger dragleave with null relatedTarget
|
||||
const dragLeaveEvent = new Event('dragleave', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragLeaveEvent, 'relatedTarget', {
|
||||
value: null,
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragLeaveEvent)
|
||||
})
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle dragleave when relatedTarget is outside container', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// First set dragging to true
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['Files'] },
|
||||
})
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
expect(result.current.dragging).toBe(true)
|
||||
|
||||
// Create element outside container
|
||||
const outsideElement = document.createElement('div')
|
||||
document.body.appendChild(outsideElement)
|
||||
|
||||
// Trigger dragleave with relatedTarget outside container
|
||||
const dragLeaveEvent = new Event('dragleave', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragLeaveEvent, 'relatedTarget', {
|
||||
value: outsideElement,
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragLeaveEvent)
|
||||
})
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
document.body.removeChild(outsideElement)
|
||||
})
|
||||
|
||||
it('should not set dragging to false when relatedTarget is inside container', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// First set dragging to true
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['Files'] },
|
||||
})
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
expect(result.current.dragging).toBe(true)
|
||||
|
||||
// Create element inside container
|
||||
const insideElement = document.createElement('div')
|
||||
mockContainer.appendChild(insideElement)
|
||||
|
||||
// Trigger dragleave with relatedTarget inside container
|
||||
const dragLeaveEvent = new Event('dragleave', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragLeaveEvent, 'relatedTarget', {
|
||||
value: insideElement,
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragLeaveEvent)
|
||||
})
|
||||
|
||||
// Should still be dragging since relatedTarget is inside container
|
||||
expect(result.current.dragging).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Drop Events', () => {
|
||||
it('should handle drop event with files', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// First set dragging to true
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['Files'] },
|
||||
})
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
|
||||
// Create mock file
|
||||
const file = new File(['content'], 'test.difypkg', { type: 'application/octet-stream' })
|
||||
|
||||
// Trigger drop event
|
||||
const dropEvent = new Event('drop', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dropEvent, 'dataTransfer', {
|
||||
value: { files: [file] },
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dropEvent)
|
||||
})
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
expect(mockOnFileChange).toHaveBeenCalledWith(file)
|
||||
})
|
||||
|
||||
it('should not call onFileChange when drop has no dataTransfer', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// Set dragging first
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['Files'] },
|
||||
})
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
|
||||
// Drop without dataTransfer
|
||||
const dropEvent = new Event('drop', { bubbles: true, cancelable: true })
|
||||
// No dataTransfer property
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dropEvent)
|
||||
})
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
expect(mockOnFileChange).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not call onFileChange when drop has empty files array', () => {
|
||||
renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
const dropEvent = new Event('drop', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dropEvent, 'dataTransfer', {
|
||||
value: { files: [] },
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dropEvent)
|
||||
})
|
||||
|
||||
expect(mockOnFileChange).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('File Change Handler', () => {
|
||||
it('should call onFileChange with file from input', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
const file = new File(['content'], 'test.difypkg', { type: 'application/octet-stream' })
|
||||
const mockEvent = {
|
||||
target: {
|
||||
files: [file],
|
||||
},
|
||||
} as unknown as React.ChangeEvent<HTMLInputElement>
|
||||
|
||||
act(() => {
|
||||
result.current.fileChangeHandle?.(mockEvent)
|
||||
})
|
||||
|
||||
expect(mockOnFileChange).toHaveBeenCalledWith(file)
|
||||
})
|
||||
|
||||
it('should call onFileChange with null when no files', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
const mockEvent = {
|
||||
target: {
|
||||
files: null,
|
||||
},
|
||||
} as unknown as React.ChangeEvent<HTMLInputElement>
|
||||
|
||||
act(() => {
|
||||
result.current.fileChangeHandle?.(mockEvent)
|
||||
})
|
||||
|
||||
expect(mockOnFileChange).toHaveBeenCalledWith(null)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Remove File', () => {
|
||||
it('should call onFileChange with null', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
act(() => {
|
||||
result.current.removeFile?.()
|
||||
})
|
||||
|
||||
expect(mockOnFileChange).toHaveBeenCalledWith(null)
|
||||
})
|
||||
|
||||
it('should handle removeFile when fileUploader has a value', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// Create a mock input element with value property
|
||||
const mockInput = {
|
||||
value: 'test.difypkg',
|
||||
}
|
||||
|
||||
// Override the fileUploader ref
|
||||
Object.defineProperty(result.current.fileUploader, 'current', {
|
||||
value: mockInput,
|
||||
writable: true,
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.removeFile?.()
|
||||
})
|
||||
|
||||
expect(mockOnFileChange).toHaveBeenCalledWith(null)
|
||||
expect(mockInput.value).toBe('')
|
||||
})
|
||||
|
||||
it('should handle removeFile when fileUploader is null', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// fileUploader.current is null by default
|
||||
act(() => {
|
||||
result.current.removeFile?.()
|
||||
})
|
||||
|
||||
expect(mockOnFileChange).toHaveBeenCalledWith(null)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Enabled/Disabled State', () => {
|
||||
it('should not add event listeners when disabled', () => {
|
||||
const addEventListenerSpy = vi.spyOn(mockContainer, 'addEventListener')
|
||||
|
||||
renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
enabled: false,
|
||||
}),
|
||||
)
|
||||
|
||||
expect(addEventListenerSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should add event listeners when enabled', () => {
|
||||
const addEventListenerSpy = vi.spyOn(mockContainer, 'addEventListener')
|
||||
|
||||
renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
enabled: true,
|
||||
}),
|
||||
)
|
||||
|
||||
expect(addEventListenerSpy).toHaveBeenCalledWith('dragenter', expect.any(Function))
|
||||
expect(addEventListenerSpy).toHaveBeenCalledWith('dragover', expect.any(Function))
|
||||
expect(addEventListenerSpy).toHaveBeenCalledWith('dragleave', expect.any(Function))
|
||||
expect(addEventListenerSpy).toHaveBeenCalledWith('drop', expect.any(Function))
|
||||
})
|
||||
|
||||
it('should remove event listeners on cleanup', () => {
|
||||
const removeEventListenerSpy = vi.spyOn(mockContainer, 'removeEventListener')
|
||||
|
||||
const { unmount } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
enabled: true,
|
||||
}),
|
||||
)
|
||||
|
||||
unmount()
|
||||
|
||||
expect(removeEventListenerSpy).toHaveBeenCalledWith('dragenter', expect.any(Function))
|
||||
expect(removeEventListenerSpy).toHaveBeenCalledWith('dragover', expect.any(Function))
|
||||
expect(removeEventListenerSpy).toHaveBeenCalledWith('dragleave', expect.any(Function))
|
||||
expect(removeEventListenerSpy).toHaveBeenCalledWith('drop', expect.any(Function))
|
||||
})
|
||||
|
||||
it('should return false for dragging when disabled', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
enabled: false,
|
||||
}),
|
||||
)
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Container Ref Edge Cases', () => {
|
||||
it('should handle null containerRef.current', () => {
|
||||
const nullRef: RefObject<HTMLDivElement | null> = { current: null }
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: nullRef,
|
||||
}),
|
||||
)
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -134,13 +134,6 @@ describe('BUILTIN_TOOLS_ARRAY', () => {
|
||||
// Store Tests
|
||||
// ================================
|
||||
describe('useReadmePanelStore', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
// Reset store state before each test
|
||||
const { setCurrentPluginDetail } = useReadmePanelStore.getState()
|
||||
setCurrentPluginDetail()
|
||||
})
|
||||
|
||||
describe('Initial State', () => {
|
||||
it('should have undefined currentPluginDetail initially', () => {
|
||||
const { currentPluginDetail } = useReadmePanelStore.getState()
|
||||
@ -228,13 +221,6 @@ describe('useReadmePanelStore', () => {
|
||||
// ReadmeEntrance Component Tests
|
||||
// ================================
|
||||
describe('ReadmeEntrance', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
// Reset store state
|
||||
const { setCurrentPluginDetail } = useReadmePanelStore.getState()
|
||||
setCurrentPluginDetail()
|
||||
})
|
||||
|
||||
// ================================
|
||||
// Rendering Tests
|
||||
// ================================
|
||||
@ -417,11 +403,6 @@ describe('ReadmeEntrance', () => {
|
||||
// ================================
|
||||
describe('ReadmePanel', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
// Reset store state
|
||||
const { setCurrentPluginDetail } = useReadmePanelStore.getState()
|
||||
setCurrentPluginDetail()
|
||||
// Reset mock
|
||||
mockUsePluginReadme.mockReturnValue({
|
||||
data: null,
|
||||
isLoading: false,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user