mirror of
https://github.com/langgenius/dify.git
synced 2026-02-07 03:56:06 +08:00
Compare commits
21 Commits
main
...
2-6-type-s
| Author | SHA1 | Date | |
|---|---|---|---|
| ff83828263 | |||
| 82f472d0d0 | |||
| 149a5231f0 | |||
| 6e4d082cbf | |||
| 32f265b9f0 | |||
| c183d653a1 | |||
| c71e89f83a | |||
| d07da73fab | |||
| b58ed66d1c | |||
| dfa0062e97 | |||
| feb4ab8eb3 | |||
| 72ad187af2 | |||
| a4321e24a1 | |||
| 23b6f33bd3 | |||
| f95322ef9c | |||
| cef8058a8f | |||
| 5a1a3bb859 | |||
| 3371fa7861 | |||
| 97ecde5389 | |||
| 5b22d5026b | |||
| 7d34faaf74 |
4
.github/workflows/deploy-hitl.yml
vendored
4
.github/workflows/deploy-hitl.yml
vendored
@ -4,7 +4,7 @@ on:
|
||||
workflow_run:
|
||||
workflows: ["Build and Push API & Web"]
|
||||
branches:
|
||||
- "build/feat/hitl"
|
||||
- "feat/hitl"
|
||||
types:
|
||||
- completed
|
||||
|
||||
@ -13,7 +13,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.head_branch == 'build/feat/hitl'
|
||||
github.event.workflow_run.head_branch == 'feat/hitl'
|
||||
steps:
|
||||
- name: Deploy to server
|
||||
uses: appleboy/ssh-action@v1
|
||||
|
||||
@ -122,7 +122,7 @@ These commands assume you start from the repository root.
|
||||
|
||||
```bash
|
||||
cd api
|
||||
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention
|
||||
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention
|
||||
```
|
||||
|
||||
1. Optional: start Celery Beat (scheduled tasks, in a new terminal).
|
||||
|
||||
@ -1155,16 +1155,6 @@ class CeleryScheduleTasksConfig(BaseSettings):
|
||||
default=0,
|
||||
)
|
||||
|
||||
# API token last_used_at batch update
|
||||
ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK: bool = Field(
|
||||
description="Enable periodic batch update of API token last_used_at timestamps",
|
||||
default=True,
|
||||
)
|
||||
API_TOKEN_LAST_USED_UPDATE_INTERVAL: int = Field(
|
||||
description="Interval in minutes for batch updating API token last_used_at (default 30)",
|
||||
default=30,
|
||||
)
|
||||
|
||||
# Trigger provider refresh (simple version)
|
||||
ENABLE_TRIGGER_PROVIDER_REFRESH_TASK: bool = Field(
|
||||
description="Enable trigger provider refresh poller",
|
||||
|
||||
@ -10,7 +10,6 @@ from libs.helper import TimestampField
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from models.dataset import Dataset
|
||||
from models.model import ApiToken, App
|
||||
from services.api_token_service import ApiTokenCache
|
||||
|
||||
from . import console_ns
|
||||
from .wraps import account_initialization_required, edit_permission_required, setup_required
|
||||
@ -132,11 +131,6 @@ class BaseApiKeyResource(Resource):
|
||||
if key is None:
|
||||
flask_restx.abort(HTTPStatus.NOT_FOUND, message="API key not found")
|
||||
|
||||
# Invalidate cache before deleting from database
|
||||
# Type assertion: key is guaranteed to be non-None here because abort() raises
|
||||
assert key is not None # nosec - for type checker only
|
||||
ApiTokenCache.delete(key.token, key.type)
|
||||
|
||||
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@ -55,7 +55,6 @@ from libs.login import current_account_with_tenant, login_required
|
||||
from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile
|
||||
from models.dataset import DatasetPermissionEnum
|
||||
from models.provider_ids import ModelProviderID
|
||||
from services.api_token_service import ApiTokenCache
|
||||
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
|
||||
|
||||
# Register models for flask_restx to avoid dict type issues in Swagger
|
||||
@ -821,11 +820,6 @@ class DatasetApiDeleteApi(Resource):
|
||||
if key is None:
|
||||
console_ns.abort(404, message="API key not found")
|
||||
|
||||
# Invalidate cache before deleting from database
|
||||
# Type assertion: key is guaranteed to be non-None here because abort() raises
|
||||
assert key is not None # nosec - for type checker only
|
||||
ApiTokenCache.delete(key.token, key.type)
|
||||
|
||||
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@ -120,7 +120,7 @@ class TagUpdateDeleteApi(Resource):
|
||||
|
||||
TagService.delete_tag(tag_id)
|
||||
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
|
||||
@console_ns.route("/tag-bindings/create")
|
||||
|
||||
@ -396,7 +396,7 @@ class DatasetApi(DatasetApiResource):
|
||||
try:
|
||||
if DatasetService.delete_dataset(dataset_id_str, current_user):
|
||||
DatasetPermissionService.clear_partial_member_list(dataset_id_str)
|
||||
return "", 204
|
||||
return 204
|
||||
else:
|
||||
raise NotFound("Dataset not found.")
|
||||
except services.errors.dataset.DatasetInUseError:
|
||||
@ -557,7 +557,7 @@ class DatasetTagsApi(DatasetApiResource):
|
||||
payload = TagDeletePayload.model_validate(service_api_ns.payload or {})
|
||||
TagService.delete_tag(payload.tag_id)
|
||||
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
|
||||
@service_api_ns.route("/datasets/tags/binding")
|
||||
@ -581,7 +581,7 @@ class DatasetTagBindingApi(DatasetApiResource):
|
||||
payload = TagBindingPayload.model_validate(service_api_ns.payload or {})
|
||||
TagService.save_tag_binding({"tag_ids": payload.tag_ids, "target_id": payload.target_id, "type": "knowledge"})
|
||||
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
|
||||
@service_api_ns.route("/datasets/tags/unbinding")
|
||||
@ -605,7 +605,7 @@ class DatasetTagUnbindingApi(DatasetApiResource):
|
||||
payload = TagUnbindingPayload.model_validate(service_api_ns.payload or {})
|
||||
TagService.delete_tag_binding({"tag_id": payload.tag_id, "target_id": payload.target_id, "type": "knowledge"})
|
||||
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
|
||||
@service_api_ns.route("/datasets/<uuid:dataset_id>/tags")
|
||||
|
||||
@ -746,4 +746,4 @@ class DocumentApi(DatasetApiResource):
|
||||
except services.errors.document.DocumentIndexingError:
|
||||
raise DocumentIndexingError("Cannot delete document during indexing.")
|
||||
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
@ -128,7 +128,7 @@ class DatasetMetadataServiceApi(DatasetApiResource):
|
||||
DatasetService.check_dataset_permission(dataset, current_user)
|
||||
|
||||
MetadataService.delete_metadata(dataset_id_str, metadata_id_str)
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
|
||||
@service_api_ns.route("/datasets/<uuid:dataset_id>/metadata/built-in")
|
||||
|
||||
@ -233,7 +233,7 @@ class DatasetSegmentApi(DatasetApiResource):
|
||||
if not segment:
|
||||
raise NotFound("Segment not found.")
|
||||
SegmentService.delete_segment(segment, document, dataset)
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
@service_api_ns.expect(service_api_ns.models[SegmentUpdatePayload.__name__])
|
||||
@service_api_ns.doc("update_segment")
|
||||
@ -499,7 +499,7 @@ class DatasetChildChunkApi(DatasetApiResource):
|
||||
except ChildChunkDeleteIndexServiceError as e:
|
||||
raise ChildChunkDeleteIndexError(str(e))
|
||||
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
@service_api_ns.expect(service_api_ns.models[ChildChunkUpdatePayload.__name__])
|
||||
@service_api_ns.doc("update_child_chunk")
|
||||
|
||||
@ -1,24 +1,27 @@
|
||||
import logging
|
||||
import time
|
||||
from collections.abc import Callable
|
||||
from datetime import timedelta
|
||||
from enum import StrEnum, auto
|
||||
from functools import wraps
|
||||
from typing import Concatenate, ParamSpec, TypeVar, cast
|
||||
from typing import Concatenate, ParamSpec, TypeVar
|
||||
|
||||
from flask import current_app, request
|
||||
from flask_login import user_logged_in
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import Forbidden, NotFound, Unauthorized
|
||||
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.login import current_user
|
||||
from models import Account, Tenant, TenantAccountJoin, TenantStatus
|
||||
from models.dataset import Dataset, RateLimitLog
|
||||
from models.model import ApiToken, App
|
||||
from services.api_token_service import ApiTokenCache, fetch_token_with_single_flight, record_token_usage
|
||||
from services.end_user_service import EndUserService
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
@ -293,14 +296,7 @@ def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None):
|
||||
|
||||
def validate_and_get_api_token(scope: str | None = None):
|
||||
"""
|
||||
Validate and get API token with Redis caching.
|
||||
|
||||
This function uses a two-tier approach:
|
||||
1. First checks Redis cache for the token
|
||||
2. If not cached, queries database and caches the result
|
||||
|
||||
The last_used_at field is updated asynchronously via Celery task
|
||||
to avoid blocking the request.
|
||||
Validate and get API token.
|
||||
"""
|
||||
auth_header = request.headers.get("Authorization")
|
||||
if auth_header is None or " " not in auth_header:
|
||||
@ -312,18 +308,29 @@ def validate_and_get_api_token(scope: str | None = None):
|
||||
if auth_scheme != "bearer":
|
||||
raise Unauthorized("Authorization scheme must be 'Bearer'")
|
||||
|
||||
# Try to get token from cache first
|
||||
# Returns a CachedApiToken (plain Python object), not a SQLAlchemy model
|
||||
cached_token = ApiTokenCache.get(auth_token, scope)
|
||||
if cached_token is not None:
|
||||
logger.debug("Token validation served from cache for scope: %s", scope)
|
||||
# Record usage in Redis for later batch update (no Celery task per request)
|
||||
record_token_usage(auth_token, scope)
|
||||
return cast(ApiToken, cached_token)
|
||||
current_time = naive_utc_now()
|
||||
cutoff_time = current_time - timedelta(minutes=1)
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
update_stmt = (
|
||||
update(ApiToken)
|
||||
.where(
|
||||
ApiToken.token == auth_token,
|
||||
(ApiToken.last_used_at.is_(None) | (ApiToken.last_used_at < cutoff_time)),
|
||||
ApiToken.type == scope,
|
||||
)
|
||||
.values(last_used_at=current_time)
|
||||
)
|
||||
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
|
||||
result = session.execute(update_stmt)
|
||||
api_token = session.scalar(stmt)
|
||||
|
||||
# Cache miss - use Redis lock for single-flight mode
|
||||
# This ensures only one request queries DB for the same token concurrently
|
||||
return fetch_token_with_single_flight(auth_token, scope)
|
||||
if hasattr(result, "rowcount") and result.rowcount > 0:
|
||||
session.commit()
|
||||
|
||||
if not api_token:
|
||||
raise Unauthorized("Access token is invalid")
|
||||
|
||||
return api_token
|
||||
|
||||
|
||||
class DatasetApiResource(Resource):
|
||||
|
||||
@ -35,10 +35,10 @@ if [[ "${MODE}" == "worker" ]]; then
|
||||
if [[ -z "${CELERY_QUEUES}" ]]; then
|
||||
if [[ "${EDITION}" == "CLOUD" ]]; then
|
||||
# Cloud edition: separate queues for dataset and trigger tasks
|
||||
DEFAULT_QUEUES="api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
else
|
||||
# Community edition (SELF_HOSTED): dataset, pipeline and workflow have separate queues
|
||||
DEFAULT_QUEUES="api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
fi
|
||||
else
|
||||
DEFAULT_QUEUES="${CELERY_QUEUES}"
|
||||
|
||||
@ -184,14 +184,6 @@ def init_app(app: DifyApp) -> Celery:
|
||||
"task": "schedule.trigger_provider_refresh_task.trigger_provider_refresh",
|
||||
"schedule": timedelta(minutes=dify_config.TRIGGER_PROVIDER_REFRESH_INTERVAL),
|
||||
}
|
||||
|
||||
if dify_config.ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK:
|
||||
imports.append("schedule.update_api_token_last_used_task")
|
||||
beat_schedule["batch_update_api_token_last_used"] = {
|
||||
"task": "schedule.update_api_token_last_used_task.batch_update_api_token_last_used",
|
||||
"schedule": timedelta(minutes=dify_config.API_TOKEN_LAST_USED_UPDATE_INTERVAL),
|
||||
}
|
||||
|
||||
celery_app.conf.update(beat_schedule=beat_schedule, imports=imports)
|
||||
|
||||
return celery_app
|
||||
|
||||
@ -1,114 +0,0 @@
|
||||
"""
|
||||
Scheduled task to batch-update API token last_used_at timestamps.
|
||||
|
||||
Instead of updating the database on every request, token usage is recorded
|
||||
in Redis as lightweight SET keys (api_token_active:{scope}:{token}).
|
||||
This task runs periodically (default every 30 minutes) to flush those
|
||||
records into the database in a single batch operation.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
import click
|
||||
from sqlalchemy import update
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
import app
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.model import ApiToken
|
||||
from services.api_token_service import ACTIVE_TOKEN_KEY_PREFIX
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@app.celery.task(queue="api_token")
|
||||
def batch_update_api_token_last_used():
|
||||
"""
|
||||
Batch update last_used_at for all recently active API tokens.
|
||||
|
||||
Scans Redis for api_token_active:* keys, parses the token and scope
|
||||
from each key, and performs a batch database update.
|
||||
"""
|
||||
click.echo(click.style("batch_update_api_token_last_used: start.", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
updated_count = 0
|
||||
scanned_count = 0
|
||||
|
||||
try:
|
||||
# Collect all active token keys and their values (the actual usage timestamps)
|
||||
token_entries: list[tuple[str, str | None, datetime]] = [] # (token, scope, usage_time)
|
||||
keys_to_delete: list[str | bytes] = []
|
||||
|
||||
for key in redis_client.scan_iter(match=f"{ACTIVE_TOKEN_KEY_PREFIX}*", count=200):
|
||||
if isinstance(key, bytes):
|
||||
key = key.decode("utf-8")
|
||||
scanned_count += 1
|
||||
|
||||
# Read the value (ISO timestamp recorded at actual request time)
|
||||
value = redis_client.get(key)
|
||||
if not value:
|
||||
keys_to_delete.append(key)
|
||||
continue
|
||||
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode("utf-8")
|
||||
|
||||
try:
|
||||
usage_time = datetime.fromisoformat(value)
|
||||
except (ValueError, TypeError):
|
||||
logger.warning("Invalid timestamp in key %s: %s", key, value)
|
||||
keys_to_delete.append(key)
|
||||
continue
|
||||
|
||||
# Parse token info from key: api_token_active:{scope}:{token}
|
||||
suffix = key[len(ACTIVE_TOKEN_KEY_PREFIX) :]
|
||||
parts = suffix.split(":", 1)
|
||||
if len(parts) == 2:
|
||||
scope_str, token = parts
|
||||
scope = None if scope_str == "None" else scope_str
|
||||
token_entries.append((token, scope, usage_time))
|
||||
keys_to_delete.append(key)
|
||||
|
||||
if not token_entries:
|
||||
click.echo(click.style("batch_update_api_token_last_used: no active tokens found.", fg="yellow"))
|
||||
# Still clean up any invalid keys
|
||||
if keys_to_delete:
|
||||
redis_client.delete(*keys_to_delete)
|
||||
return
|
||||
|
||||
# Update each token in its own short transaction to avoid long transactions
|
||||
for token, scope, usage_time in token_entries:
|
||||
with Session(db.engine, expire_on_commit=False) as session, session.begin():
|
||||
stmt = (
|
||||
update(ApiToken)
|
||||
.where(
|
||||
ApiToken.token == token,
|
||||
ApiToken.type == scope,
|
||||
(ApiToken.last_used_at.is_(None) | (ApiToken.last_used_at < usage_time)),
|
||||
)
|
||||
.values(last_used_at=usage_time)
|
||||
)
|
||||
result = session.execute(stmt)
|
||||
rowcount = getattr(result, "rowcount", 0)
|
||||
if rowcount > 0:
|
||||
updated_count += 1
|
||||
|
||||
# Delete processed keys from Redis
|
||||
if keys_to_delete:
|
||||
redis_client.delete(*keys_to_delete)
|
||||
|
||||
except Exception:
|
||||
logger.exception("batch_update_api_token_last_used failed")
|
||||
|
||||
elapsed = time.perf_counter() - start_at
|
||||
click.echo(
|
||||
click.style(
|
||||
f"batch_update_api_token_last_used: done. "
|
||||
f"scanned={scanned_count}, updated={updated_count}, elapsed={elapsed:.2f}s",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
@ -1,330 +0,0 @@
|
||||
"""
|
||||
API Token Service
|
||||
|
||||
Handles all API token caching, validation, and usage recording.
|
||||
Includes Redis cache operations, database queries, and single-flight concurrency control.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client, redis_fallback
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models.model import ApiToken
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Pydantic DTO
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
|
||||
class CachedApiToken(BaseModel):
|
||||
"""
|
||||
Pydantic model for cached API token data.
|
||||
|
||||
This is NOT a SQLAlchemy model instance, but a plain Pydantic model
|
||||
that mimics the ApiToken model interface for read-only access.
|
||||
"""
|
||||
|
||||
id: str
|
||||
app_id: str | None
|
||||
tenant_id: str | None
|
||||
type: str
|
||||
token: str
|
||||
last_used_at: datetime | None
|
||||
created_at: datetime | None
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<CachedApiToken id={self.id} type={self.type}>"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Cache configuration
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
CACHE_KEY_PREFIX = "api_token"
|
||||
CACHE_TTL_SECONDS = 600 # 10 minutes
|
||||
CACHE_NULL_TTL_SECONDS = 60 # 1 minute for non-existent tokens
|
||||
ACTIVE_TOKEN_KEY_PREFIX = "api_token_active:"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Cache class
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
|
||||
class ApiTokenCache:
|
||||
"""
|
||||
Redis cache wrapper for API tokens.
|
||||
Handles serialization, deserialization, and cache invalidation.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def make_active_key(token: str, scope: str | None = None) -> str:
|
||||
"""Generate Redis key for recording token usage."""
|
||||
return f"{ACTIVE_TOKEN_KEY_PREFIX}{scope}:{token}"
|
||||
|
||||
@staticmethod
|
||||
def _make_tenant_index_key(tenant_id: str) -> str:
|
||||
"""Generate Redis key for tenant token index."""
|
||||
return f"tenant_tokens:{tenant_id}"
|
||||
|
||||
@staticmethod
|
||||
def _make_cache_key(token: str, scope: str | None = None) -> str:
|
||||
"""Generate cache key for the given token and scope."""
|
||||
scope_str = scope or "any"
|
||||
return f"{CACHE_KEY_PREFIX}:{scope_str}:{token}"
|
||||
|
||||
@staticmethod
|
||||
def _serialize_token(api_token: Any) -> bytes:
|
||||
"""Serialize ApiToken object to JSON bytes."""
|
||||
if isinstance(api_token, CachedApiToken):
|
||||
return api_token.model_dump_json().encode("utf-8")
|
||||
|
||||
cached = CachedApiToken(
|
||||
id=str(api_token.id),
|
||||
app_id=str(api_token.app_id) if api_token.app_id else None,
|
||||
tenant_id=str(api_token.tenant_id) if api_token.tenant_id else None,
|
||||
type=api_token.type,
|
||||
token=api_token.token,
|
||||
last_used_at=api_token.last_used_at,
|
||||
created_at=api_token.created_at,
|
||||
)
|
||||
return cached.model_dump_json().encode("utf-8")
|
||||
|
||||
@staticmethod
|
||||
def _deserialize_token(cached_data: bytes | str) -> Any:
|
||||
"""Deserialize JSON bytes/string back to a CachedApiToken Pydantic model."""
|
||||
if cached_data in {b"null", "null"}:
|
||||
return None
|
||||
|
||||
try:
|
||||
if isinstance(cached_data, bytes):
|
||||
cached_data = cached_data.decode("utf-8")
|
||||
return CachedApiToken.model_validate_json(cached_data)
|
||||
except (ValueError, Exception) as e:
|
||||
logger.warning("Failed to deserialize token from cache: %s", e)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
@redis_fallback(default_return=None)
|
||||
def get(token: str, scope: str | None) -> Any | None:
|
||||
"""Get API token from cache."""
|
||||
cache_key = ApiTokenCache._make_cache_key(token, scope)
|
||||
cached_data = redis_client.get(cache_key)
|
||||
|
||||
if cached_data is None:
|
||||
logger.debug("Cache miss for token key: %s", cache_key)
|
||||
return None
|
||||
|
||||
logger.debug("Cache hit for token key: %s", cache_key)
|
||||
return ApiTokenCache._deserialize_token(cached_data)
|
||||
|
||||
@staticmethod
|
||||
def _add_to_tenant_index(tenant_id: str | None, cache_key: str) -> None:
|
||||
"""Add cache key to tenant index for efficient invalidation."""
|
||||
if not tenant_id:
|
||||
return
|
||||
|
||||
try:
|
||||
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
|
||||
redis_client.sadd(index_key, cache_key)
|
||||
redis_client.expire(index_key, CACHE_TTL_SECONDS + 60)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to update tenant index: %s", e)
|
||||
|
||||
@staticmethod
|
||||
def _remove_from_tenant_index(tenant_id: str | None, cache_key: str) -> None:
|
||||
"""Remove cache key from tenant index."""
|
||||
if not tenant_id:
|
||||
return
|
||||
|
||||
try:
|
||||
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
|
||||
redis_client.srem(index_key, cache_key)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to remove from tenant index: %s", e)
|
||||
|
||||
@staticmethod
|
||||
@redis_fallback(default_return=False)
|
||||
def set(token: str, scope: str | None, api_token: Any | None, ttl: int = CACHE_TTL_SECONDS) -> bool:
|
||||
"""Set API token in cache."""
|
||||
cache_key = ApiTokenCache._make_cache_key(token, scope)
|
||||
|
||||
if api_token is None:
|
||||
cached_value = b"null"
|
||||
ttl = CACHE_NULL_TTL_SECONDS
|
||||
else:
|
||||
cached_value = ApiTokenCache._serialize_token(api_token)
|
||||
|
||||
try:
|
||||
redis_client.setex(cache_key, ttl, cached_value)
|
||||
|
||||
if api_token is not None and hasattr(api_token, "tenant_id"):
|
||||
ApiTokenCache._add_to_tenant_index(api_token.tenant_id, cache_key)
|
||||
|
||||
logger.debug("Cached token with key: %s, ttl: %ss", cache_key, ttl)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning("Failed to cache token: %s", e)
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
@redis_fallback(default_return=False)
|
||||
def delete(token: str, scope: str | None = None) -> bool:
|
||||
"""Delete API token from cache."""
|
||||
if scope is None:
|
||||
pattern = f"{CACHE_KEY_PREFIX}:*:{token}"
|
||||
try:
|
||||
keys_to_delete = list(redis_client.scan_iter(match=pattern))
|
||||
if keys_to_delete:
|
||||
redis_client.delete(*keys_to_delete)
|
||||
logger.info("Deleted %d cache entries for token", len(keys_to_delete))
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning("Failed to delete token cache with pattern: %s", e)
|
||||
return False
|
||||
else:
|
||||
cache_key = ApiTokenCache._make_cache_key(token, scope)
|
||||
try:
|
||||
tenant_id = None
|
||||
try:
|
||||
cached_data = redis_client.get(cache_key)
|
||||
if cached_data and cached_data != b"null":
|
||||
cached_token = ApiTokenCache._deserialize_token(cached_data)
|
||||
if cached_token:
|
||||
tenant_id = cached_token.tenant_id
|
||||
except Exception as e:
|
||||
logger.debug("Failed to get tenant_id for cache cleanup: %s", e)
|
||||
|
||||
redis_client.delete(cache_key)
|
||||
|
||||
if tenant_id:
|
||||
ApiTokenCache._remove_from_tenant_index(tenant_id, cache_key)
|
||||
|
||||
logger.info("Deleted cache for key: %s", cache_key)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning("Failed to delete token cache: %s", e)
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
@redis_fallback(default_return=False)
|
||||
def invalidate_by_tenant(tenant_id: str) -> bool:
|
||||
"""Invalidate all API token caches for a specific tenant via tenant index."""
|
||||
try:
|
||||
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
|
||||
cache_keys = redis_client.smembers(index_key)
|
||||
|
||||
if cache_keys:
|
||||
deleted_count = 0
|
||||
for cache_key in cache_keys:
|
||||
if isinstance(cache_key, bytes):
|
||||
cache_key = cache_key.decode("utf-8")
|
||||
redis_client.delete(cache_key)
|
||||
deleted_count += 1
|
||||
|
||||
redis_client.delete(index_key)
|
||||
|
||||
logger.info(
|
||||
"Invalidated %d token cache entries for tenant: %s",
|
||||
deleted_count,
|
||||
tenant_id,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
"No tenant index found for %s, relying on TTL expiration",
|
||||
tenant_id,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to invalidate tenant token cache: %s", e)
|
||||
return False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Token usage recording (for batch update)
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
|
||||
def record_token_usage(auth_token: str, scope: str | None) -> None:
|
||||
"""
|
||||
Record token usage in Redis for later batch update by a scheduled job.
|
||||
|
||||
Instead of dispatching a Celery task per request, we simply SET a key in Redis.
|
||||
A Celery Beat scheduled task will periodically scan these keys and batch-update
|
||||
last_used_at in the database.
|
||||
"""
|
||||
try:
|
||||
key = ApiTokenCache.make_active_key(auth_token, scope)
|
||||
redis_client.set(key, naive_utc_now().isoformat(), ex=3600)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to record token usage: %s", e)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Database query + single-flight
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
|
||||
def query_token_from_db(auth_token: str, scope: str | None) -> ApiToken:
|
||||
"""
|
||||
Query API token from database and cache the result.
|
||||
|
||||
Raises Unauthorized if token is invalid.
|
||||
"""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
|
||||
api_token = session.scalar(stmt)
|
||||
|
||||
if not api_token:
|
||||
ApiTokenCache.set(auth_token, scope, None)
|
||||
raise Unauthorized("Access token is invalid")
|
||||
|
||||
ApiTokenCache.set(auth_token, scope, api_token)
|
||||
record_token_usage(auth_token, scope)
|
||||
return api_token
|
||||
|
||||
|
||||
def fetch_token_with_single_flight(auth_token: str, scope: str | None) -> ApiToken | Any:
|
||||
"""
|
||||
Fetch token from DB with single-flight pattern using Redis lock.
|
||||
|
||||
Ensures only one concurrent request queries the database for the same token.
|
||||
Falls back to direct query if lock acquisition fails.
|
||||
"""
|
||||
logger.debug("Token cache miss, attempting to acquire query lock for scope: %s", scope)
|
||||
|
||||
lock_key = f"api_token_query_lock:{scope}:{auth_token}"
|
||||
lock = redis_client.lock(lock_key, timeout=10, blocking_timeout=5)
|
||||
|
||||
try:
|
||||
if lock.acquire(blocking=True):
|
||||
try:
|
||||
cached_token = ApiTokenCache.get(auth_token, scope)
|
||||
if cached_token is not None:
|
||||
logger.debug("Token cached by concurrent request, using cached version")
|
||||
return cached_token
|
||||
|
||||
return query_token_from_db(auth_token, scope)
|
||||
finally:
|
||||
lock.release()
|
||||
else:
|
||||
logger.warning("Lock timeout for token: %s, proceeding with direct query", auth_token[:10])
|
||||
return query_token_from_db(auth_token, scope)
|
||||
except Unauthorized:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.warning("Redis lock failed for token query: %s, proceeding anyway", e)
|
||||
return query_token_from_db(auth_token, scope)
|
||||
@ -1696,18 +1696,13 @@ class DocumentService:
|
||||
for document in documents
|
||||
if document.data_source_type == "upload_file" and document.data_source_info_dict
|
||||
]
|
||||
if dataset.doc_form is not None:
|
||||
batch_clean_document_task.delay(document_ids, dataset.id, dataset.doc_form, file_ids)
|
||||
|
||||
# Delete documents first, then dispatch cleanup task after commit
|
||||
# to avoid deadlock between main transaction and async task
|
||||
for document in documents:
|
||||
db.session.delete(document)
|
||||
db.session.commit()
|
||||
|
||||
# Dispatch cleanup task after commit to avoid lock contention
|
||||
# Task cleans up segments, files, and vector indexes
|
||||
if dataset.doc_form is not None:
|
||||
batch_clean_document_task.delay(document_ids, dataset.id, dataset.doc_form, file_ids)
|
||||
|
||||
@staticmethod
|
||||
def rename_document(dataset_id: str, document_id: str, name: str) -> Document:
|
||||
assert isinstance(current_user, Account)
|
||||
|
||||
@ -14,9 +14,6 @@ from models.model import UploadFile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Batch size for database operations to keep transactions short
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
|
||||
@shared_task(queue="dataset")
|
||||
def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form: str | None, file_ids: list[str]):
|
||||
@ -34,179 +31,63 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
|
||||
if not doc_form:
|
||||
raise ValueError("doc_form is required")
|
||||
|
||||
storage_keys_to_delete: list[str] = []
|
||||
index_node_ids: list[str] = []
|
||||
segment_ids: list[str] = []
|
||||
total_image_upload_file_ids: list[str] = []
|
||||
with session_factory.create_session() as session:
|
||||
try:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
|
||||
if not dataset:
|
||||
raise Exception("Document has no dataset")
|
||||
|
||||
session.query(DatasetMetadataBinding).where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id.in_(document_ids),
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
try:
|
||||
# ============ Step 1: Query segment and file data (short read-only transaction) ============
|
||||
with session_factory.create_session() as session:
|
||||
# Get segments info
|
||||
segments = session.scalars(
|
||||
select(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
|
||||
).all()
|
||||
|
||||
# check segment is exist
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
|
||||
# Collect image file IDs from segment content
|
||||
for segment in segments:
|
||||
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
||||
total_image_upload_file_ids.extend(image_upload_file_ids)
|
||||
|
||||
# Query storage keys for image files
|
||||
if total_image_upload_file_ids:
|
||||
image_files = session.scalars(
|
||||
select(UploadFile).where(UploadFile.id.in_(total_image_upload_file_ids))
|
||||
).all()
|
||||
storage_keys_to_delete.extend([f.key for f in image_files if f and f.key])
|
||||
|
||||
# Query storage keys for document files
|
||||
image_files = session.query(UploadFile).where(UploadFile.id.in_(image_upload_file_ids)).all()
|
||||
for image_file in image_files:
|
||||
try:
|
||||
if image_file and image_file.key:
|
||||
storage.delete(image_file.key)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Delete image_files failed when storage deleted, \
|
||||
image_upload_file_is: %s",
|
||||
image_file.id,
|
||||
)
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
|
||||
session.execute(stmt)
|
||||
session.delete(segment)
|
||||
if file_ids:
|
||||
files = session.scalars(select(UploadFile).where(UploadFile.id.in_(file_ids))).all()
|
||||
storage_keys_to_delete.extend([f.key for f in files if f and f.key])
|
||||
for file in files:
|
||||
try:
|
||||
storage.delete(file.key)
|
||||
except Exception:
|
||||
logger.exception("Delete file failed when document deleted, file_id: %s", file.id)
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
|
||||
session.execute(stmt)
|
||||
|
||||
# ============ Step 2: Clean vector index (external service, fresh session for dataset) ============
|
||||
if index_node_ids:
|
||||
try:
|
||||
# Fetch dataset in a fresh session to avoid DetachedInstanceError
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
logger.warning("Dataset not found for vector index cleanup, dataset_id: %s", dataset_id)
|
||||
else:
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to clean vector index for dataset_id: %s, document_ids: %s, index_node_ids count: %d",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
len(index_node_ids),
|
||||
)
|
||||
session.commit()
|
||||
|
||||
# ============ Step 3: Delete metadata binding (separate short transaction) ============
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
deleted_count = (
|
||||
session.query(DatasetMetadataBinding)
|
||||
.where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id.in_(document_ids),
|
||||
)
|
||||
.delete(synchronize_session=False)
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Cleaned documents when documents deleted latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
session.commit()
|
||||
logger.debug("Deleted %d metadata bindings for dataset_id: %s", deleted_count, dataset_id)
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to delete metadata bindings for dataset_id: %s, document_ids: %s",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
|
||||
# ============ Step 4: Batch delete UploadFile records (multiple short transactions) ============
|
||||
if total_image_upload_file_ids:
|
||||
failed_batches = 0
|
||||
total_batches = (len(total_image_upload_file_ids) + BATCH_SIZE - 1) // BATCH_SIZE
|
||||
for i in range(0, len(total_image_upload_file_ids), BATCH_SIZE):
|
||||
batch = total_image_upload_file_ids[i : i + BATCH_SIZE]
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(batch))
|
||||
session.execute(stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
failed_batches += 1
|
||||
logger.exception(
|
||||
"Failed to delete image UploadFile batch %d-%d for dataset_id: %s",
|
||||
i,
|
||||
i + len(batch),
|
||||
dataset_id,
|
||||
)
|
||||
if failed_batches > 0:
|
||||
logger.warning(
|
||||
"Image UploadFile deletion: %d/%d batches failed for dataset_id: %s",
|
||||
failed_batches,
|
||||
total_batches,
|
||||
dataset_id,
|
||||
)
|
||||
|
||||
# ============ Step 5: Batch delete DocumentSegment records (multiple short transactions) ============
|
||||
if segment_ids:
|
||||
failed_batches = 0
|
||||
total_batches = (len(segment_ids) + BATCH_SIZE - 1) // BATCH_SIZE
|
||||
for i in range(0, len(segment_ids), BATCH_SIZE):
|
||||
batch = segment_ids[i : i + BATCH_SIZE]
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(batch))
|
||||
session.execute(segment_delete_stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
failed_batches += 1
|
||||
logger.exception(
|
||||
"Failed to delete DocumentSegment batch %d-%d for dataset_id: %s, document_ids: %s",
|
||||
i,
|
||||
i + len(batch),
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
if failed_batches > 0:
|
||||
logger.warning(
|
||||
"DocumentSegment deletion: %d/%d batches failed, document_ids: %s",
|
||||
failed_batches,
|
||||
total_batches,
|
||||
document_ids,
|
||||
)
|
||||
|
||||
# ============ Step 6: Delete document-associated files (separate short transaction) ============
|
||||
if file_ids:
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
|
||||
session.execute(stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to delete document UploadFile records for dataset_id: %s, file_ids: %s",
|
||||
dataset_id,
|
||||
file_ids,
|
||||
)
|
||||
|
||||
# ============ Step 7: Delete storage files (I/O operations, no DB transaction) ============
|
||||
storage_delete_failures = 0
|
||||
for storage_key in storage_keys_to_delete:
|
||||
try:
|
||||
storage.delete(storage_key)
|
||||
except Exception:
|
||||
storage_delete_failures += 1
|
||||
logger.exception("Failed to delete file from storage, key: %s", storage_key)
|
||||
if storage_delete_failures > 0:
|
||||
logger.warning(
|
||||
"Storage file deletion completed with %d failures out of %d total files for dataset_id: %s",
|
||||
storage_delete_failures,
|
||||
len(storage_keys_to_delete),
|
||||
dataset_id,
|
||||
)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Cleaned documents when documents deleted latency: {end_at - start_at:.2f}s, "
|
||||
f"dataset_id: {dataset_id}, document_ids: {document_ids}, "
|
||||
f"segments: {len(segment_ids)}, image_files: {len(total_image_upload_file_ids)}, "
|
||||
f"storage_files: {len(storage_keys_to_delete)}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Batch clean documents failed for dataset_id: %s, document_ids: %s",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
logger.exception("Cleaned documents when documents deleted failed")
|
||||
|
||||
@ -3,7 +3,6 @@ import time
|
||||
|
||||
import click
|
||||
from celery import shared_task
|
||||
from sqlalchemy import delete
|
||||
|
||||
from core.db.session_factory import session_factory
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
@ -68,14 +67,8 @@ def delete_segment_from_index_task(
|
||||
if segment_attachment_bindings:
|
||||
attachment_ids = [binding.attachment_id for binding in segment_attachment_bindings]
|
||||
index_processor.clean(dataset=dataset, node_ids=attachment_ids, with_keywords=False)
|
||||
segment_attachment_bind_ids = [i.id for i in segment_attachment_bindings]
|
||||
|
||||
for i in range(0, len(segment_attachment_bind_ids), 1000):
|
||||
segment_attachment_bind_delete_stmt = delete(SegmentAttachmentBinding).where(
|
||||
SegmentAttachmentBinding.id.in_(segment_attachment_bind_ids[i : i + 1000])
|
||||
)
|
||||
session.execute(segment_attachment_bind_delete_stmt)
|
||||
|
||||
for binding in segment_attachment_bindings:
|
||||
session.delete(binding)
|
||||
# delete upload file
|
||||
session.query(UploadFile).where(UploadFile.id.in_(attachment_ids)).delete(synchronize_session=False)
|
||||
session.commit()
|
||||
|
||||
@ -28,7 +28,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
logger.info(click.style(f"Start sync document: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
with session_factory.create_session() as session:
|
||||
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
@ -68,6 +68,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
document.indexing_status = "error"
|
||||
document.error = "Datasource credential not found. Please reconnect your Notion workspace."
|
||||
document.stopped_at = naive_utc_now()
|
||||
session.commit()
|
||||
return
|
||||
|
||||
loader = NotionExtractor(
|
||||
@ -84,6 +85,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
if last_edited_time != page_edited_time:
|
||||
document.indexing_status = "parsing"
|
||||
document.processing_started_at = naive_utc_now()
|
||||
session.commit()
|
||||
|
||||
# delete all document segment and index
|
||||
try:
|
||||
|
||||
@ -48,7 +48,6 @@ from models.workflow import (
|
||||
WorkflowArchiveLog,
|
||||
)
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
from services.api_token_service import ApiTokenCache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -135,12 +134,6 @@ def _delete_app_mcp_servers(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_app_api_tokens(tenant_id: str, app_id: str):
|
||||
def del_api_token(session, api_token_id: str):
|
||||
# Fetch token details for cache invalidation
|
||||
token_obj = session.query(ApiToken).where(ApiToken.id == api_token_id).first()
|
||||
if token_obj:
|
||||
# Invalidate cache before deletion
|
||||
ApiTokenCache.delete(token_obj.token, token_obj.type)
|
||||
|
||||
session.query(ApiToken).where(ApiToken.id == api_token_id).delete(synchronize_session=False)
|
||||
|
||||
_delete_records(
|
||||
|
||||
@ -1,375 +0,0 @@
|
||||
"""
|
||||
Integration tests for API Token Cache with Redis.
|
||||
|
||||
These tests require:
|
||||
- Redis server running
|
||||
- Test database configured
|
||||
"""
|
||||
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.model import ApiToken
|
||||
from services.api_token_service import ApiTokenCache, CachedApiToken
|
||||
|
||||
|
||||
class TestApiTokenCacheRedisIntegration:
|
||||
"""Integration tests with real Redis."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Setup test fixtures and clean Redis."""
|
||||
self.test_token = "test-integration-token-123"
|
||||
self.test_scope = "app"
|
||||
self.cache_key = f"api_token:{self.test_scope}:{self.test_token}"
|
||||
|
||||
# Clean up any existing test data
|
||||
self._cleanup()
|
||||
|
||||
def teardown_method(self):
|
||||
"""Cleanup test data from Redis."""
|
||||
self._cleanup()
|
||||
|
||||
def _cleanup(self):
|
||||
"""Remove test data from Redis."""
|
||||
try:
|
||||
redis_client.delete(self.cache_key)
|
||||
redis_client.delete(ApiTokenCache._make_tenant_index_key("test-tenant-id"))
|
||||
redis_client.delete(ApiTokenCache.make_active_key(self.test_token, self.test_scope))
|
||||
except Exception:
|
||||
pass # Ignore cleanup errors
|
||||
|
||||
def test_cache_set_and_get_with_real_redis(self):
|
||||
"""Test cache set and get operations with real Redis."""
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "test-id-123"
|
||||
mock_token.app_id = "test-app-456"
|
||||
mock_token.tenant_id = "test-tenant-789"
|
||||
mock_token.type = "app"
|
||||
mock_token.token = self.test_token
|
||||
mock_token.last_used_at = datetime.now()
|
||||
mock_token.created_at = datetime.now() - timedelta(days=30)
|
||||
|
||||
# Set in cache
|
||||
result = ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
|
||||
assert result is True
|
||||
|
||||
# Verify in Redis
|
||||
cached_data = redis_client.get(self.cache_key)
|
||||
assert cached_data is not None
|
||||
|
||||
# Get from cache
|
||||
cached_token = ApiTokenCache.get(self.test_token, self.test_scope)
|
||||
assert cached_token is not None
|
||||
assert isinstance(cached_token, CachedApiToken)
|
||||
assert cached_token.id == "test-id-123"
|
||||
assert cached_token.app_id == "test-app-456"
|
||||
assert cached_token.tenant_id == "test-tenant-789"
|
||||
assert cached_token.type == "app"
|
||||
assert cached_token.token == self.test_token
|
||||
|
||||
def test_cache_ttl_with_real_redis(self):
|
||||
"""Test cache TTL is set correctly."""
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "test-id"
|
||||
mock_token.app_id = "test-app"
|
||||
mock_token.tenant_id = "test-tenant"
|
||||
mock_token.type = "app"
|
||||
mock_token.token = self.test_token
|
||||
mock_token.last_used_at = None
|
||||
mock_token.created_at = datetime.now()
|
||||
|
||||
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
|
||||
|
||||
ttl = redis_client.ttl(self.cache_key)
|
||||
assert 595 <= ttl <= 600 # Should be around 600 seconds (10 minutes)
|
||||
|
||||
def test_cache_null_value_for_invalid_token(self):
|
||||
"""Test caching null value for invalid tokens."""
|
||||
result = ApiTokenCache.set(self.test_token, self.test_scope, None)
|
||||
assert result is True
|
||||
|
||||
cached_data = redis_client.get(self.cache_key)
|
||||
assert cached_data == b"null"
|
||||
|
||||
cached_token = ApiTokenCache.get(self.test_token, self.test_scope)
|
||||
assert cached_token is None
|
||||
|
||||
ttl = redis_client.ttl(self.cache_key)
|
||||
assert 55 <= ttl <= 60
|
||||
|
||||
def test_cache_delete_with_real_redis(self):
|
||||
"""Test cache deletion with real Redis."""
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "test-id"
|
||||
mock_token.app_id = "test-app"
|
||||
mock_token.tenant_id = "test-tenant"
|
||||
mock_token.type = "app"
|
||||
mock_token.token = self.test_token
|
||||
mock_token.last_used_at = None
|
||||
mock_token.created_at = datetime.now()
|
||||
|
||||
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
|
||||
assert redis_client.exists(self.cache_key) == 1
|
||||
|
||||
result = ApiTokenCache.delete(self.test_token, self.test_scope)
|
||||
assert result is True
|
||||
assert redis_client.exists(self.cache_key) == 0
|
||||
|
||||
def test_tenant_index_creation(self):
|
||||
"""Test tenant index is created when caching token."""
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
tenant_id = "test-tenant-id"
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "test-id"
|
||||
mock_token.app_id = "test-app"
|
||||
mock_token.tenant_id = tenant_id
|
||||
mock_token.type = "app"
|
||||
mock_token.token = self.test_token
|
||||
mock_token.last_used_at = None
|
||||
mock_token.created_at = datetime.now()
|
||||
|
||||
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
|
||||
|
||||
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
|
||||
assert redis_client.exists(index_key) == 1
|
||||
|
||||
members = redis_client.smembers(index_key)
|
||||
cache_keys = [m.decode("utf-8") if isinstance(m, bytes) else m for m in members]
|
||||
assert self.cache_key in cache_keys
|
||||
|
||||
def test_invalidate_by_tenant_via_index(self):
|
||||
"""Test tenant-wide cache invalidation using index (fast path)."""
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
tenant_id = "test-tenant-id"
|
||||
|
||||
for i in range(3):
|
||||
token_value = f"test-token-{i}"
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = f"test-id-{i}"
|
||||
mock_token.app_id = "test-app"
|
||||
mock_token.tenant_id = tenant_id
|
||||
mock_token.type = "app"
|
||||
mock_token.token = token_value
|
||||
mock_token.last_used_at = None
|
||||
mock_token.created_at = datetime.now()
|
||||
|
||||
ApiTokenCache.set(token_value, "app", mock_token)
|
||||
|
||||
for i in range(3):
|
||||
key = f"api_token:app:test-token-{i}"
|
||||
assert redis_client.exists(key) == 1
|
||||
|
||||
result = ApiTokenCache.invalidate_by_tenant(tenant_id)
|
||||
assert result is True
|
||||
|
||||
for i in range(3):
|
||||
key = f"api_token:app:test-token-{i}"
|
||||
assert redis_client.exists(key) == 0
|
||||
|
||||
assert redis_client.exists(ApiTokenCache._make_tenant_index_key(tenant_id)) == 0
|
||||
|
||||
def test_concurrent_cache_access(self):
|
||||
"""Test concurrent cache access doesn't cause issues."""
|
||||
import concurrent.futures
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "test-id"
|
||||
mock_token.app_id = "test-app"
|
||||
mock_token.tenant_id = "test-tenant"
|
||||
mock_token.type = "app"
|
||||
mock_token.token = self.test_token
|
||||
mock_token.last_used_at = None
|
||||
mock_token.created_at = datetime.now()
|
||||
|
||||
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
|
||||
|
||||
def get_from_cache():
|
||||
return ApiTokenCache.get(self.test_token, self.test_scope)
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
|
||||
futures = [executor.submit(get_from_cache) for _ in range(50)]
|
||||
results = [f.result() for f in concurrent.futures.as_completed(futures)]
|
||||
|
||||
assert len(results) == 50
|
||||
assert all(r is not None for r in results)
|
||||
assert all(isinstance(r, CachedApiToken) for r in results)
|
||||
|
||||
|
||||
class TestTokenUsageRecording:
|
||||
"""Tests for recording token usage in Redis (batch update approach)."""
|
||||
|
||||
def setup_method(self):
|
||||
self.test_token = "test-usage-token"
|
||||
self.test_scope = "app"
|
||||
self.active_key = ApiTokenCache.make_active_key(self.test_token, self.test_scope)
|
||||
|
||||
def teardown_method(self):
|
||||
try:
|
||||
redis_client.delete(self.active_key)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def test_record_token_usage_sets_redis_key(self):
|
||||
"""Test that record_token_usage writes an active key to Redis."""
|
||||
from services.api_token_service import record_token_usage
|
||||
|
||||
record_token_usage(self.test_token, self.test_scope)
|
||||
|
||||
# Key should exist
|
||||
assert redis_client.exists(self.active_key) == 1
|
||||
|
||||
# Value should be an ISO timestamp
|
||||
value = redis_client.get(self.active_key)
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode("utf-8")
|
||||
datetime.fromisoformat(value) # Should not raise
|
||||
|
||||
def test_record_token_usage_has_ttl(self):
|
||||
"""Test that active keys have a TTL as safety net."""
|
||||
from services.api_token_service import record_token_usage
|
||||
|
||||
record_token_usage(self.test_token, self.test_scope)
|
||||
|
||||
ttl = redis_client.ttl(self.active_key)
|
||||
assert 3595 <= ttl <= 3600 # ~1 hour
|
||||
|
||||
def test_record_token_usage_overwrites(self):
|
||||
"""Test that repeated calls overwrite the same key (no accumulation)."""
|
||||
from services.api_token_service import record_token_usage
|
||||
|
||||
record_token_usage(self.test_token, self.test_scope)
|
||||
first_value = redis_client.get(self.active_key)
|
||||
|
||||
time.sleep(0.01) # Tiny delay so timestamp differs
|
||||
|
||||
record_token_usage(self.test_token, self.test_scope)
|
||||
second_value = redis_client.get(self.active_key)
|
||||
|
||||
# Key count should still be 1 (overwritten, not accumulated)
|
||||
assert redis_client.exists(self.active_key) == 1
|
||||
|
||||
|
||||
class TestEndToEndCacheFlow:
|
||||
"""End-to-end integration test for complete cache flow."""
|
||||
|
||||
@pytest.mark.usefixtures("db_session")
|
||||
def test_complete_flow_cache_miss_then_hit(self, db_session):
|
||||
"""
|
||||
Test complete flow:
|
||||
1. First request (cache miss) -> query DB -> cache result
|
||||
2. Second request (cache hit) -> return from cache
|
||||
3. Verify Redis state
|
||||
"""
|
||||
test_token_value = "test-e2e-token"
|
||||
test_scope = "app"
|
||||
|
||||
test_token = ApiToken()
|
||||
test_token.id = "test-e2e-id"
|
||||
test_token.token = test_token_value
|
||||
test_token.type = test_scope
|
||||
test_token.app_id = "test-app"
|
||||
test_token.tenant_id = "test-tenant"
|
||||
test_token.last_used_at = None
|
||||
test_token.created_at = datetime.now()
|
||||
|
||||
db_session.add(test_token)
|
||||
db_session.commit()
|
||||
|
||||
try:
|
||||
# Step 1: Cache miss - set token in cache
|
||||
ApiTokenCache.set(test_token_value, test_scope, test_token)
|
||||
|
||||
cache_key = f"api_token:{test_scope}:{test_token_value}"
|
||||
assert redis_client.exists(cache_key) == 1
|
||||
|
||||
# Step 2: Cache hit - get from cache
|
||||
cached_token = ApiTokenCache.get(test_token_value, test_scope)
|
||||
assert cached_token is not None
|
||||
assert cached_token.id == test_token.id
|
||||
assert cached_token.token == test_token_value
|
||||
|
||||
# Step 3: Verify tenant index
|
||||
index_key = ApiTokenCache._make_tenant_index_key(test_token.tenant_id)
|
||||
assert redis_client.exists(index_key) == 1
|
||||
assert cache_key.encode() in redis_client.smembers(index_key)
|
||||
|
||||
# Step 4: Delete and verify cleanup
|
||||
ApiTokenCache.delete(test_token_value, test_scope)
|
||||
assert redis_client.exists(cache_key) == 0
|
||||
assert cache_key.encode() not in redis_client.smembers(index_key)
|
||||
|
||||
finally:
|
||||
db_session.delete(test_token)
|
||||
db_session.commit()
|
||||
redis_client.delete(f"api_token:{test_scope}:{test_token_value}")
|
||||
redis_client.delete(ApiTokenCache._make_tenant_index_key(test_token.tenant_id))
|
||||
|
||||
def test_high_concurrency_simulation(self):
|
||||
"""Simulate high concurrency access to cache."""
|
||||
import concurrent.futures
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
test_token_value = "test-concurrent-token"
|
||||
test_scope = "app"
|
||||
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "concurrent-id"
|
||||
mock_token.app_id = "test-app"
|
||||
mock_token.tenant_id = "test-tenant"
|
||||
mock_token.type = test_scope
|
||||
mock_token.token = test_token_value
|
||||
mock_token.last_used_at = datetime.now()
|
||||
mock_token.created_at = datetime.now()
|
||||
|
||||
ApiTokenCache.set(test_token_value, test_scope, mock_token)
|
||||
|
||||
try:
|
||||
|
||||
def read_cache():
|
||||
return ApiTokenCache.get(test_token_value, test_scope)
|
||||
|
||||
start_time = time.time()
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor:
|
||||
futures = [executor.submit(read_cache) for _ in range(100)]
|
||||
results = [f.result() for f in concurrent.futures.as_completed(futures)]
|
||||
elapsed = time.time() - start_time
|
||||
|
||||
assert len(results) == 100
|
||||
assert all(r is not None for r in results)
|
||||
|
||||
assert elapsed < 1.0, f"Too slow: {elapsed}s for 100 cache reads"
|
||||
|
||||
finally:
|
||||
ApiTokenCache.delete(test_token_value, test_scope)
|
||||
redis_client.delete(ApiTokenCache._make_tenant_index_key(mock_token.tenant_id))
|
||||
|
||||
|
||||
class TestRedisFailover:
|
||||
"""Test behavior when Redis is unavailable."""
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_graceful_degradation_when_redis_fails(self, mock_redis):
|
||||
"""Test system degrades gracefully when Redis is unavailable."""
|
||||
from redis import RedisError
|
||||
|
||||
mock_redis.get.side_effect = RedisError("Connection failed")
|
||||
mock_redis.setex.side_effect = RedisError("Connection failed")
|
||||
|
||||
result_get = ApiTokenCache.get("test-token", "app")
|
||||
assert result_get is None
|
||||
|
||||
result_set = ApiTokenCache.set("test-token", "app", None)
|
||||
assert result_set is False
|
||||
@ -132,8 +132,6 @@ class TestCelerySSLConfiguration:
|
||||
mock_config.WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK = 0
|
||||
mock_config.ENABLE_TRIGGER_PROVIDER_REFRESH_TASK = False
|
||||
mock_config.TRIGGER_PROVIDER_REFRESH_INTERVAL = 15
|
||||
mock_config.ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK = False
|
||||
mock_config.API_TOKEN_LAST_USED_UPDATE_INTERVAL = 30
|
||||
|
||||
with patch("extensions.ext_celery.dify_config", mock_config):
|
||||
from dify_app import DifyApp
|
||||
|
||||
@ -1,250 +0,0 @@
|
||||
"""
|
||||
Unit tests for API Token Cache module.
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from services.api_token_service import (
|
||||
CACHE_KEY_PREFIX,
|
||||
CACHE_NULL_TTL_SECONDS,
|
||||
CACHE_TTL_SECONDS,
|
||||
ApiTokenCache,
|
||||
CachedApiToken,
|
||||
)
|
||||
|
||||
|
||||
class TestApiTokenCache:
|
||||
"""Test cases for ApiTokenCache class."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Setup test fixtures."""
|
||||
self.mock_token = MagicMock()
|
||||
self.mock_token.id = "test-token-id-123"
|
||||
self.mock_token.app_id = "test-app-id-456"
|
||||
self.mock_token.tenant_id = "test-tenant-id-789"
|
||||
self.mock_token.type = "app"
|
||||
self.mock_token.token = "test-token-value-abc"
|
||||
self.mock_token.last_used_at = datetime(2026, 2, 3, 10, 0, 0)
|
||||
self.mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
|
||||
|
||||
def test_make_cache_key(self):
|
||||
"""Test cache key generation."""
|
||||
# Test with scope
|
||||
key = ApiTokenCache._make_cache_key("my-token", "app")
|
||||
assert key == f"{CACHE_KEY_PREFIX}:app:my-token"
|
||||
|
||||
# Test without scope
|
||||
key = ApiTokenCache._make_cache_key("my-token", None)
|
||||
assert key == f"{CACHE_KEY_PREFIX}:any:my-token"
|
||||
|
||||
def test_serialize_token(self):
|
||||
"""Test token serialization."""
|
||||
serialized = ApiTokenCache._serialize_token(self.mock_token)
|
||||
data = json.loads(serialized)
|
||||
|
||||
assert data["id"] == "test-token-id-123"
|
||||
assert data["app_id"] == "test-app-id-456"
|
||||
assert data["tenant_id"] == "test-tenant-id-789"
|
||||
assert data["type"] == "app"
|
||||
assert data["token"] == "test-token-value-abc"
|
||||
assert data["last_used_at"] == "2026-02-03T10:00:00"
|
||||
assert data["created_at"] == "2026-01-01T00:00:00"
|
||||
|
||||
def test_serialize_token_with_nulls(self):
|
||||
"""Test token serialization with None values."""
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "test-id"
|
||||
mock_token.app_id = None
|
||||
mock_token.tenant_id = None
|
||||
mock_token.type = "dataset"
|
||||
mock_token.token = "test-token"
|
||||
mock_token.last_used_at = None
|
||||
mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
|
||||
|
||||
serialized = ApiTokenCache._serialize_token(mock_token)
|
||||
data = json.loads(serialized)
|
||||
|
||||
assert data["app_id"] is None
|
||||
assert data["tenant_id"] is None
|
||||
assert data["last_used_at"] is None
|
||||
|
||||
def test_deserialize_token(self):
|
||||
"""Test token deserialization."""
|
||||
cached_data = json.dumps(
|
||||
{
|
||||
"id": "test-id",
|
||||
"app_id": "test-app",
|
||||
"tenant_id": "test-tenant",
|
||||
"type": "app",
|
||||
"token": "test-token",
|
||||
"last_used_at": "2026-02-03T10:00:00",
|
||||
"created_at": "2026-01-01T00:00:00",
|
||||
}
|
||||
)
|
||||
|
||||
result = ApiTokenCache._deserialize_token(cached_data)
|
||||
|
||||
assert isinstance(result, CachedApiToken)
|
||||
assert result.id == "test-id"
|
||||
assert result.app_id == "test-app"
|
||||
assert result.tenant_id == "test-tenant"
|
||||
assert result.type == "app"
|
||||
assert result.token == "test-token"
|
||||
assert result.last_used_at == datetime(2026, 2, 3, 10, 0, 0)
|
||||
assert result.created_at == datetime(2026, 1, 1, 0, 0, 0)
|
||||
|
||||
def test_deserialize_null_token(self):
|
||||
"""Test deserialization of null token (cached miss)."""
|
||||
result = ApiTokenCache._deserialize_token("null")
|
||||
assert result is None
|
||||
|
||||
def test_deserialize_invalid_json(self):
|
||||
"""Test deserialization with invalid JSON."""
|
||||
result = ApiTokenCache._deserialize_token("invalid-json{")
|
||||
assert result is None
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_get_cache_hit(self, mock_redis):
|
||||
"""Test cache hit scenario."""
|
||||
cached_data = json.dumps(
|
||||
{
|
||||
"id": "test-id",
|
||||
"app_id": "test-app",
|
||||
"tenant_id": "test-tenant",
|
||||
"type": "app",
|
||||
"token": "test-token",
|
||||
"last_used_at": "2026-02-03T10:00:00",
|
||||
"created_at": "2026-01-01T00:00:00",
|
||||
}
|
||||
).encode("utf-8")
|
||||
mock_redis.get.return_value = cached_data
|
||||
|
||||
result = ApiTokenCache.get("test-token", "app")
|
||||
|
||||
assert result is not None
|
||||
assert isinstance(result, CachedApiToken)
|
||||
assert result.app_id == "test-app"
|
||||
mock_redis.get.assert_called_once_with(f"{CACHE_KEY_PREFIX}:app:test-token")
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_get_cache_miss(self, mock_redis):
|
||||
"""Test cache miss scenario."""
|
||||
mock_redis.get.return_value = None
|
||||
|
||||
result = ApiTokenCache.get("test-token", "app")
|
||||
|
||||
assert result is None
|
||||
mock_redis.get.assert_called_once()
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_set_valid_token(self, mock_redis):
|
||||
"""Test setting a valid token in cache."""
|
||||
result = ApiTokenCache.set("test-token", "app", self.mock_token)
|
||||
|
||||
assert result is True
|
||||
mock_redis.setex.assert_called_once()
|
||||
args = mock_redis.setex.call_args[0]
|
||||
assert args[0] == f"{CACHE_KEY_PREFIX}:app:test-token"
|
||||
assert args[1] == CACHE_TTL_SECONDS
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_set_null_token(self, mock_redis):
|
||||
"""Test setting a null token (cache penetration prevention)."""
|
||||
result = ApiTokenCache.set("invalid-token", "app", None)
|
||||
|
||||
assert result is True
|
||||
mock_redis.setex.assert_called_once()
|
||||
args = mock_redis.setex.call_args[0]
|
||||
assert args[0] == f"{CACHE_KEY_PREFIX}:app:invalid-token"
|
||||
assert args[1] == CACHE_NULL_TTL_SECONDS
|
||||
assert args[2] == b"null"
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_delete_with_scope(self, mock_redis):
|
||||
"""Test deleting token cache with specific scope."""
|
||||
result = ApiTokenCache.delete("test-token", "app")
|
||||
|
||||
assert result is True
|
||||
mock_redis.delete.assert_called_once_with(f"{CACHE_KEY_PREFIX}:app:test-token")
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_delete_without_scope(self, mock_redis):
|
||||
"""Test deleting token cache without scope (delete all)."""
|
||||
# Mock scan_iter to return an iterator of keys
|
||||
mock_redis.scan_iter.return_value = iter(
|
||||
[
|
||||
b"api_token:app:test-token",
|
||||
b"api_token:dataset:test-token",
|
||||
]
|
||||
)
|
||||
|
||||
result = ApiTokenCache.delete("test-token", None)
|
||||
|
||||
assert result is True
|
||||
# Verify scan_iter was called with the correct pattern
|
||||
mock_redis.scan_iter.assert_called_once()
|
||||
call_args = mock_redis.scan_iter.call_args
|
||||
assert call_args[1]["match"] == f"{CACHE_KEY_PREFIX}:*:test-token"
|
||||
|
||||
# Verify delete was called with all matched keys
|
||||
mock_redis.delete.assert_called_once_with(
|
||||
b"api_token:app:test-token",
|
||||
b"api_token:dataset:test-token",
|
||||
)
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_redis_fallback_on_exception(self, mock_redis):
|
||||
"""Test Redis fallback when Redis is unavailable."""
|
||||
from redis import RedisError
|
||||
|
||||
mock_redis.get.side_effect = RedisError("Connection failed")
|
||||
|
||||
result = ApiTokenCache.get("test-token", "app")
|
||||
|
||||
# Should return None (fallback) instead of raising exception
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestApiTokenCacheIntegration:
|
||||
"""Integration test scenarios."""
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_full_cache_lifecycle(self, mock_redis):
|
||||
"""Test complete cache lifecycle: set -> get -> delete."""
|
||||
# Setup mock token
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "id-123"
|
||||
mock_token.app_id = "app-456"
|
||||
mock_token.tenant_id = "tenant-789"
|
||||
mock_token.type = "app"
|
||||
mock_token.token = "token-abc"
|
||||
mock_token.last_used_at = datetime(2026, 2, 3, 10, 0, 0)
|
||||
mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
|
||||
|
||||
# 1. Set token in cache
|
||||
ApiTokenCache.set("token-abc", "app", mock_token)
|
||||
assert mock_redis.setex.called
|
||||
|
||||
# 2. Simulate cache hit
|
||||
cached_data = ApiTokenCache._serialize_token(mock_token)
|
||||
mock_redis.get.return_value = cached_data # bytes from model_dump_json().encode()
|
||||
|
||||
retrieved = ApiTokenCache.get("token-abc", "app")
|
||||
assert retrieved is not None
|
||||
assert isinstance(retrieved, CachedApiToken)
|
||||
|
||||
# 3. Delete from cache
|
||||
ApiTokenCache.delete("token-abc", "app")
|
||||
assert mock_redis.delete.called
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_cache_penetration_prevention(self, mock_redis):
|
||||
"""Test that non-existent tokens are cached as null."""
|
||||
# Set null token (cache miss)
|
||||
ApiTokenCache.set("non-existent-token", "app", None)
|
||||
|
||||
args = mock_redis.setex.call_args[0]
|
||||
assert args[2] == b"null"
|
||||
assert args[1] == CACHE_NULL_TTL_SECONDS # Shorter TTL for null values
|
||||
@ -114,21 +114,6 @@ def mock_db_session():
|
||||
session = MagicMock()
|
||||
# Ensure tests can observe session.close() via context manager teardown
|
||||
session.close = MagicMock()
|
||||
session.commit = MagicMock()
|
||||
|
||||
# Mock session.begin() context manager to auto-commit on exit
|
||||
begin_cm = MagicMock()
|
||||
begin_cm.__enter__.return_value = session
|
||||
|
||||
def _begin_exit_side_effect(*args, **kwargs):
|
||||
# session.begin().__exit__() should commit if no exception
|
||||
if args[0] is None: # No exception
|
||||
session.commit()
|
||||
|
||||
begin_cm.__exit__.side_effect = _begin_exit_side_effect
|
||||
session.begin.return_value = begin_cm
|
||||
|
||||
# Mock create_session() context manager
|
||||
cm = MagicMock()
|
||||
cm.__enter__.return_value = session
|
||||
|
||||
|
||||
@ -1,261 +0,0 @@
|
||||
/**
|
||||
* MAX_PARALLEL_LIMIT Configuration Bug Test
|
||||
*
|
||||
* This test reproduces and verifies the fix for issue #23083:
|
||||
* MAX_PARALLEL_LIMIT environment variable does not take effect in iteration panel
|
||||
*/
|
||||
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
|
||||
// Mock environment variables before importing constants
|
||||
const originalEnv = process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
// Test with different environment values
|
||||
function setupEnvironment(value?: string) {
|
||||
if (value)
|
||||
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = value
|
||||
else
|
||||
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
// Clear module cache to force re-evaluation
|
||||
vi.resetModules()
|
||||
}
|
||||
|
||||
function restoreEnvironment() {
|
||||
if (originalEnv)
|
||||
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = originalEnv
|
||||
else
|
||||
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
vi.resetModules()
|
||||
}
|
||||
|
||||
// Mock i18next with proper implementation
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => {
|
||||
if (key.includes('MaxParallelismTitle'))
|
||||
return 'Max Parallelism'
|
||||
if (key.includes('MaxParallelismDesc'))
|
||||
return 'Maximum number of parallel executions'
|
||||
if (key.includes('parallelMode'))
|
||||
return 'Parallel Mode'
|
||||
if (key.includes('parallelPanelDesc'))
|
||||
return 'Enable parallel execution'
|
||||
if (key.includes('errorResponseMethod'))
|
||||
return 'Error Response Method'
|
||||
return key
|
||||
},
|
||||
}),
|
||||
initReactI18next: {
|
||||
type: '3rdParty',
|
||||
init: vi.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock i18next module completely to prevent initialization issues
|
||||
vi.mock('i18next', () => ({
|
||||
use: vi.fn().mockReturnThis(),
|
||||
init: vi.fn().mockReturnThis(),
|
||||
t: vi.fn(key => key),
|
||||
isInitialized: true,
|
||||
}))
|
||||
|
||||
// Mock the useConfig hook
|
||||
vi.mock('@/app/components/workflow/nodes/iteration/use-config', () => ({
|
||||
default: () => ({
|
||||
inputs: {
|
||||
is_parallel: true,
|
||||
parallel_nums: 5,
|
||||
error_handle_mode: 'terminated',
|
||||
},
|
||||
changeParallel: vi.fn(),
|
||||
changeParallelNums: vi.fn(),
|
||||
changeErrorHandleMode: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock other components
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/variable/var-reference-picker', () => ({
|
||||
default: function MockVarReferencePicker() {
|
||||
return <div data-testid="var-reference-picker">VarReferencePicker</div>
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/split', () => ({
|
||||
default: function MockSplit() {
|
||||
return <div data-testid="split">Split</div>
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/field', () => ({
|
||||
default: function MockField({ title, children }: { title: string, children: React.ReactNode }) {
|
||||
return (
|
||||
<div data-testid="field">
|
||||
<label>{title}</label>
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
},
|
||||
}))
|
||||
|
||||
const getParallelControls = () => ({
|
||||
numberInput: screen.getByRole('spinbutton'),
|
||||
slider: screen.getByRole('slider'),
|
||||
})
|
||||
|
||||
describe('MAX_PARALLEL_LIMIT Configuration Bug', () => {
|
||||
const mockNodeData = {
|
||||
id: 'test-iteration-node',
|
||||
type: 'iteration' as const,
|
||||
data: {
|
||||
title: 'Test Iteration',
|
||||
desc: 'Test iteration node',
|
||||
iterator_selector: ['test'],
|
||||
output_selector: ['output'],
|
||||
is_parallel: true,
|
||||
parallel_nums: 5,
|
||||
error_handle_mode: 'terminated' as const,
|
||||
},
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
restoreEnvironment()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
restoreEnvironment()
|
||||
})
|
||||
|
||||
describe('Environment Variable Parsing', () => {
|
||||
it('should parse MAX_PARALLEL_LIMIT from NEXT_PUBLIC_MAX_PARALLEL_LIMIT environment variable', async () => {
|
||||
setupEnvironment('25')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(25)
|
||||
})
|
||||
|
||||
it('should fallback to default when environment variable is not set', async () => {
|
||||
setupEnvironment() // No environment variable
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
it('should handle invalid environment variable values', async () => {
|
||||
setupEnvironment('invalid')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
// Should fall back to default when parsing fails
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
it('should handle empty environment variable', async () => {
|
||||
setupEnvironment('')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
// Should fall back to default when empty
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
// Edge cases for boundary values
|
||||
it('should clamp MAX_PARALLEL_LIMIT to MIN when env is 0 or negative', async () => {
|
||||
setupEnvironment('0')
|
||||
let { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
|
||||
|
||||
setupEnvironment('-5')
|
||||
;({ MAX_PARALLEL_LIMIT } = await import('@/config'))
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
|
||||
})
|
||||
|
||||
it('should handle float numbers by parseInt behavior', async () => {
|
||||
setupEnvironment('12.7')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
// parseInt truncates to integer
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(12)
|
||||
})
|
||||
})
|
||||
|
||||
describe('UI Component Integration (Main Fix Verification)', () => {
|
||||
it('should render iteration panel with environment-configured max value', async () => {
|
||||
// Set environment variable to a different value
|
||||
setupEnvironment('30')
|
||||
|
||||
// Import Panel after setting environment
|
||||
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
render(
|
||||
<Panel
|
||||
id="test-node"
|
||||
// @ts-expect-error key type mismatch
|
||||
data={mockNodeData.data}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Behavior-focused assertion: UI max should equal MAX_PARALLEL_LIMIT
|
||||
const { numberInput, slider } = getParallelControls()
|
||||
expect(numberInput).toHaveAttribute('max', String(MAX_PARALLEL_LIMIT))
|
||||
expect(slider).toHaveAttribute('aria-valuemax', String(MAX_PARALLEL_LIMIT))
|
||||
|
||||
// Verify the actual values
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(30)
|
||||
expect(numberInput.getAttribute('max')).toBe('30')
|
||||
expect(slider.getAttribute('aria-valuemax')).toBe('30')
|
||||
})
|
||||
|
||||
it('should maintain UI consistency with different environment values', async () => {
|
||||
setupEnvironment('15')
|
||||
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
render(
|
||||
<Panel
|
||||
id="test-node"
|
||||
// @ts-expect-error key type mismatch
|
||||
data={mockNodeData.data}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Both input and slider should use the same max value from MAX_PARALLEL_LIMIT
|
||||
const { numberInput, slider } = getParallelControls()
|
||||
|
||||
expect(numberInput.getAttribute('max')).toBe(slider.getAttribute('aria-valuemax'))
|
||||
expect(numberInput.getAttribute('max')).toBe(String(MAX_PARALLEL_LIMIT))
|
||||
})
|
||||
})
|
||||
|
||||
describe('Legacy Constant Verification (For Transition Period)', () => {
|
||||
// Marked as transition/deprecation tests
|
||||
it('should maintain MAX_ITERATION_PARALLEL_NUM for backward compatibility', async () => {
|
||||
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
expect(typeof MAX_ITERATION_PARALLEL_NUM).toBe('number')
|
||||
expect(MAX_ITERATION_PARALLEL_NUM).toBe(10) // Hardcoded legacy value
|
||||
})
|
||||
|
||||
it('should demonstrate MAX_PARALLEL_LIMIT vs legacy constant difference', async () => {
|
||||
setupEnvironment('50')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
|
||||
// MAX_PARALLEL_LIMIT is configurable, MAX_ITERATION_PARALLEL_NUM is not
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(50)
|
||||
expect(MAX_ITERATION_PARALLEL_NUM).toBe(10)
|
||||
expect(MAX_PARALLEL_LIMIT).not.toBe(MAX_ITERATION_PARALLEL_NUM)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Constants Validation', () => {
|
||||
it('should validate that required constants exist and have correct types', async () => {
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
const { MIN_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
expect(typeof MAX_PARALLEL_LIMIT).toBe('number')
|
||||
expect(typeof MIN_ITERATION_PARALLEL_NUM).toBe('number')
|
||||
expect(MAX_PARALLEL_LIMIT).toBeGreaterThanOrEqual(MIN_ITERATION_PARALLEL_NUM)
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -1,5 +1,5 @@
|
||||
import type { RemixiconComponentType } from '@remixicon/react'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
|
||||
export const InputTypeEnum = z.enum([
|
||||
'text-input',
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import type { ZodNumber, ZodSchema, ZodString } from 'zod'
|
||||
import type { BaseConfiguration } from './types'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import { BaseFieldType } from './types'
|
||||
|
||||
export const generateZodSchema = (fields: BaseConfiguration[]) => {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
|
||||
const ContactMethod = z.union([
|
||||
z.literal('email'),
|
||||
@ -22,10 +22,10 @@ export const UserSchema = z.object({
|
||||
.min(3, 'Surname must be at least 3 characters long')
|
||||
.regex(/^[A-Z]/, 'Surname must start with a capital letter'),
|
||||
isAcceptingTerms: z.boolean().refine(val => val, {
|
||||
message: 'You must accept the terms and conditions',
|
||||
error: 'You must accept the terms and conditions',
|
||||
}),
|
||||
contact: z.object({
|
||||
email: z.string().email('Invalid email address'),
|
||||
email: z.email('Invalid email address'),
|
||||
phone: z.string().optional(),
|
||||
preferredContactMethod: ContactMethod,
|
||||
}),
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import type { ZodSchema, ZodString } from 'zod'
|
||||
import type { InputFieldConfiguration } from './types'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import { SupportedFileTypes, TransferMethod } from '@/app/components/rag-pipeline/components/panel/input-field/editor/form/schema'
|
||||
import { InputFieldType } from './types'
|
||||
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
import type { FC } from 'react'
|
||||
import * as React from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { env } from '@/env'
|
||||
import ParamItem from '.'
|
||||
|
||||
type Props = {
|
||||
@ -11,12 +12,7 @@ type Props = {
|
||||
enable: boolean
|
||||
}
|
||||
|
||||
const maxTopK = (() => {
|
||||
const configValue = Number.parseInt(globalThis.document?.body?.getAttribute('data-public-top-k-max-value') || '', 10)
|
||||
if (configValue && !isNaN(configValue))
|
||||
return configValue
|
||||
return 10
|
||||
})()
|
||||
const maxTopK = env.NEXT_PUBLIC_TOP_K_MAX_VALUE
|
||||
const VALUE_LIMIT = {
|
||||
default: 2,
|
||||
step: 1,
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { noop } from 'es-toolkit/function'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import withValidation from '.'
|
||||
|
||||
describe('withValidation HOC', () => {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import withValidation from '.'
|
||||
|
||||
// Sample components to wrap with validation
|
||||
@ -65,7 +65,7 @@ const ProductCard = ({ name, price, category, inStock }: ProductCardProps) => {
|
||||
// Create validated versions
|
||||
const userSchema = z.object({
|
||||
name: z.string().min(1, 'Name is required'),
|
||||
email: z.string().email('Invalid email'),
|
||||
email: z.email('Invalid email'),
|
||||
age: z.number().min(0).max(150),
|
||||
})
|
||||
|
||||
@ -371,7 +371,7 @@ export const ConfigurationValidation: Story = {
|
||||
)
|
||||
|
||||
const configSchema = z.object({
|
||||
apiUrl: z.string().url('Must be valid URL'),
|
||||
apiUrl: z.url('Must be valid URL'),
|
||||
timeout: z.number().min(0).max(30000),
|
||||
retries: z.number().min(0).max(5),
|
||||
debug: z.boolean(),
|
||||
@ -430,7 +430,7 @@ export const UsageDocumentation: Story = {
|
||||
<div>
|
||||
<h4 className="mb-2 text-sm font-semibold text-gray-900">Usage Example</h4>
|
||||
<pre className="overflow-x-auto rounded-lg bg-gray-900 p-4 text-xs text-gray-100">
|
||||
{`import { z } from 'zod'
|
||||
{`import * as z from 'zod'
|
||||
import withValidation from './withValidation'
|
||||
|
||||
// Define your component
|
||||
|
||||
@ -5,6 +5,7 @@ import { useTranslation } from 'react-i18next'
|
||||
import Input from '@/app/components/base/input'
|
||||
import { InputNumber } from '@/app/components/base/input-number'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { env } from '@/env'
|
||||
|
||||
const TextLabel: FC<PropsWithChildren> = (props) => {
|
||||
return <label className="text-xs font-semibold leading-none text-text-secondary">{props.children}</label>
|
||||
@ -46,7 +47,7 @@ export const DelimiterInput: FC<InputProps & { tooltip?: string }> = (props) =>
|
||||
}
|
||||
|
||||
export const MaxLengthInput: FC<InputNumberProps> = (props) => {
|
||||
const maxValue = Number.parseInt(globalThis.document?.body?.getAttribute('data-public-indexing-max-segmentation-tokens-length') || '4000', 10)
|
||||
const maxValue = env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
|
||||
|
||||
const { t } = useTranslation()
|
||||
return (
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import type { ParentMode, PreProcessingRule, ProcessRule, Rules, SummaryIndexSetting as SummaryIndexSettingType } from '@/models/datasets'
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { env } from '@/env'
|
||||
import { ChunkingMode, ProcessMode } from '@/models/datasets'
|
||||
import escape from './escape'
|
||||
import unescape from './unescape'
|
||||
@ -8,10 +9,7 @@ import unescape from './unescape'
|
||||
export const DEFAULT_SEGMENT_IDENTIFIER = '\\n\\n'
|
||||
export const DEFAULT_MAXIMUM_CHUNK_LENGTH = 1024
|
||||
export const DEFAULT_OVERLAP = 50
|
||||
export const MAXIMUM_CHUNK_TOKEN_LENGTH = Number.parseInt(
|
||||
globalThis.document?.body?.getAttribute('data-public-indexing-max-segmentation-tokens-length') || '4000',
|
||||
10,
|
||||
)
|
||||
export const MAXIMUM_CHUNK_TOKEN_LENGTH = env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
|
||||
|
||||
export type ParentChildConfig = {
|
||||
chunkForContext: ParentMode
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import type { BaseConfiguration } from '@/app/components/base/form/form-scenarios/base/types'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import { BaseFieldType } from '@/app/components/base/form/form-scenarios/base/types'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import Actions from './actions'
|
||||
@ -53,7 +53,7 @@ const createFailingSchema = () => {
|
||||
issues: [{ path: ['field1'], message: 'is required' }],
|
||||
},
|
||||
}),
|
||||
} as unknown as z.ZodSchema
|
||||
} as unknown as z.ZodType
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
|
||||
@ -28,6 +28,7 @@ import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
import { useModalContext } from '@/context/modal-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { env } from '@/env'
|
||||
import { useLogout } from '@/service/use-common'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import AccountAbout from '../account-about'
|
||||
@ -178,7 +179,7 @@ export default function AppSelector() {
|
||||
</Link>
|
||||
</MenuItem>
|
||||
{
|
||||
document?.body?.getAttribute('data-public-site-about') !== 'hide' && (
|
||||
env.NEXT_PUBLIC_SITE_ABOUT !== 'hide' && (
|
||||
<MenuItem>
|
||||
<div
|
||||
className={cn(itemClassName, 'justify-between', 'data-[active]:bg-state-base-hover')}
|
||||
|
||||
@ -3,6 +3,7 @@
|
||||
import { SerwistProvider } from '@serwist/turbopack/react'
|
||||
import { useEffect } from 'react'
|
||||
import { IS_DEV } from '@/config'
|
||||
import { env } from '@/env'
|
||||
import { isClient } from '@/utils/client'
|
||||
|
||||
export function PWAProvider({ children }: { children: React.ReactNode }) {
|
||||
@ -10,7 +11,7 @@ export function PWAProvider({ children }: { children: React.ReactNode }) {
|
||||
return <DisabledPWAProvider>{children}</DisabledPWAProvider>
|
||||
}
|
||||
|
||||
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
|
||||
const basePath = env.NEXT_PUBLIC_BASE_PATH
|
||||
const swUrl = `${basePath}/serwist/sw.js`
|
||||
|
||||
return (
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import type { TFunction } from 'i18next'
|
||||
import type { SchemaOptions } from './types'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import { InputTypeEnum } from '@/app/components/base/form/components/field/input-type-select/types'
|
||||
import { MAX_VAR_KEY_LENGTH } from '@/config'
|
||||
import { PipelineInputVarType } from '@/models/pipeline'
|
||||
@ -41,49 +41,47 @@ export const createInputFieldSchema = (type: PipelineInputVarType, t: TFunction,
|
||||
tooltips: z.string().optional(),
|
||||
})
|
||||
if (type === PipelineInputVarType.textInput || type === PipelineInputVarType.paragraph) {
|
||||
return z.object({
|
||||
return z.looseObject({
|
||||
maxLength: z.number().min(1).max(TEXT_MAX_LENGTH),
|
||||
default: z.string().optional(),
|
||||
}).merge(commonSchema).passthrough()
|
||||
}).extend(commonSchema.shape)
|
||||
}
|
||||
if (type === PipelineInputVarType.number) {
|
||||
return z.object({
|
||||
return z.looseObject({
|
||||
default: z.number().optional(),
|
||||
unit: z.string().optional(),
|
||||
placeholder: z.string().optional(),
|
||||
}).merge(commonSchema).passthrough()
|
||||
}).extend(commonSchema.shape)
|
||||
}
|
||||
if (type === PipelineInputVarType.select) {
|
||||
return z.object({
|
||||
options: z.array(z.string()).nonempty({
|
||||
message: t('variableConfig.errorMsg.atLeastOneOption', { ns: 'appDebug' }),
|
||||
}).refine(
|
||||
return z.looseObject({
|
||||
options: z.tuple([z.string()], z.string()).refine(
|
||||
arr => new Set(arr).size === arr.length,
|
||||
{
|
||||
message: t('variableConfig.errorMsg.optionRepeat', { ns: 'appDebug' }),
|
||||
},
|
||||
),
|
||||
default: z.string().optional(),
|
||||
}).merge(commonSchema).passthrough()
|
||||
}).extend(commonSchema.shape)
|
||||
}
|
||||
if (type === PipelineInputVarType.singleFile) {
|
||||
return z.object({
|
||||
return z.looseObject({
|
||||
allowedFileUploadMethods: z.array(TransferMethod),
|
||||
allowedTypesAndExtensions: z.object({
|
||||
allowedTypesAndExtensions: z.looseObject({
|
||||
allowedFileExtensions: z.array(z.string()).optional(),
|
||||
allowedFileTypes: z.array(SupportedFileTypes),
|
||||
}),
|
||||
}).merge(commonSchema).passthrough()
|
||||
}).extend(commonSchema.shape)
|
||||
}
|
||||
if (type === PipelineInputVarType.multiFiles) {
|
||||
return z.object({
|
||||
return z.looseObject({
|
||||
allowedFileUploadMethods: z.array(TransferMethod),
|
||||
allowedTypesAndExtensions: z.object({
|
||||
allowedTypesAndExtensions: z.looseObject({
|
||||
allowedFileExtensions: z.array(z.string()).optional(),
|
||||
allowedFileTypes: z.array(SupportedFileTypes),
|
||||
}),
|
||||
maxLength: z.number().min(1).max(maxFileUploadLimit),
|
||||
}).merge(commonSchema).passthrough()
|
||||
}).extend(commonSchema.shape)
|
||||
}
|
||||
return commonSchema.passthrough()
|
||||
return z.looseObject(commonSchema.shape)
|
||||
}
|
||||
|
||||
@ -4,15 +4,16 @@ import * as Sentry from '@sentry/react'
|
||||
import { useEffect } from 'react'
|
||||
|
||||
import { IS_DEV } from '@/config'
|
||||
import { env } from '@/env'
|
||||
|
||||
const SentryInitializer = ({
|
||||
children,
|
||||
}: { children: React.ReactElement }) => {
|
||||
useEffect(() => {
|
||||
const SENTRY_DSN = document?.body?.getAttribute('data-public-sentry-dsn')
|
||||
if (!IS_DEV && SENTRY_DSN) {
|
||||
const sentryDsn = env.NEXT_PUBLIC_SENTRY_DSN
|
||||
if (!IS_DEV && sentryDsn) {
|
||||
Sentry.init({
|
||||
dsn: SENTRY_DSN,
|
||||
dsn: sentryDsn,
|
||||
integrations: [
|
||||
Sentry.browserTracingIntegration(),
|
||||
Sentry.replayIntegration(),
|
||||
|
||||
@ -3,6 +3,7 @@ import { useTranslation } from 'react-i18next'
|
||||
import { InputNumber } from '@/app/components/base/input-number'
|
||||
import Switch from '@/app/components/base/switch'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { env } from '@/env'
|
||||
|
||||
export type TopKAndScoreThresholdProps = {
|
||||
topK: number
|
||||
@ -15,12 +16,7 @@ export type TopKAndScoreThresholdProps = {
|
||||
hiddenScoreThreshold?: boolean
|
||||
}
|
||||
|
||||
const maxTopK = (() => {
|
||||
const configValue = Number.parseInt(globalThis.document?.body?.getAttribute('data-public-top-k-max-value') || '', 10)
|
||||
if (configValue && !isNaN(configValue))
|
||||
return configValue
|
||||
return 10
|
||||
})()
|
||||
const maxTopK = env.NEXT_PUBLIC_TOP_K_MAX_VALUE
|
||||
const TOP_K_VALUE_LIMIT = {
|
||||
amount: 1,
|
||||
min: 1,
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import type { ValidationError } from 'jsonschema'
|
||||
import type { ArrayItems, Field, LLMNodeType } from './types'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import { draft07Validator, forbidBooleanProperties } from '@/utils/validators'
|
||||
import { ArrayType, Type } from './types'
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
|
||||
const arrayStringSchemaParttern = z.array(z.string())
|
||||
const arrayNumberSchemaParttern = z.array(z.number())
|
||||
@ -7,7 +7,7 @@ const arrayNumberSchemaParttern = z.array(z.number())
|
||||
const literalSchema = z.union([z.string(), z.number(), z.boolean(), z.null()])
|
||||
type Literal = z.infer<typeof literalSchema>
|
||||
type Json = Literal | { [key: string]: Json } | Json[]
|
||||
const jsonSchema: z.ZodType<Json> = z.lazy(() => z.union([literalSchema, z.array(jsonSchema), z.record(jsonSchema)]))
|
||||
const jsonSchema: z.ZodType<Json> = z.lazy(() => z.union([literalSchema, z.array(jsonSchema), z.record(z.string(), jsonSchema)]))
|
||||
const arrayJsonSchema: z.ZodType<Json[]> = z.lazy(() => z.array(jsonSchema))
|
||||
|
||||
export const validateJSONSchema = (schema: any, type: string) => {
|
||||
|
||||
@ -7,7 +7,7 @@ import { useRouter } from 'next/navigation'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import Button from '@/app/components/base/button'
|
||||
import { formContext, useAppForm } from '@/app/components/base/form'
|
||||
import { zodSubmitValidator } from '@/app/components/base/form/utils/zod-submit-validator'
|
||||
@ -22,10 +22,10 @@ import Input from '../components/base/input'
|
||||
import Loading from '../components/base/loading'
|
||||
|
||||
const accountFormSchema = z.object({
|
||||
email: z
|
||||
.string()
|
||||
.min(1, { message: 'error.emailInValid' })
|
||||
.email('error.emailInValid'),
|
||||
email: z.email('error.emailInValid')
|
||||
.min(1, {
|
||||
error: 'error.emailInValid',
|
||||
}),
|
||||
})
|
||||
|
||||
const ForgotPasswordForm = () => {
|
||||
|
||||
@ -7,7 +7,7 @@ import { useRouter } from 'next/navigation'
|
||||
import * as React from 'react'
|
||||
import { useEffect } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import Button from '@/app/components/base/button'
|
||||
import { formContext, useAppForm } from '@/app/components/base/form'
|
||||
import { zodSubmitValidator } from '@/app/components/base/form/utils/zod-submit-validator'
|
||||
@ -22,13 +22,15 @@ import { encryptPassword as encodePassword } from '@/utils/encryption'
|
||||
import Loading from '../components/base/loading'
|
||||
|
||||
const accountFormSchema = z.object({
|
||||
email: z
|
||||
.string()
|
||||
.min(1, { message: 'error.emailInValid' })
|
||||
.email('error.emailInValid'),
|
||||
name: z.string().min(1, { message: 'error.nameEmpty' }),
|
||||
email: z.email('error.emailInValid')
|
||||
.min(1, {
|
||||
error: 'error.emailInValid',
|
||||
}),
|
||||
name: z.string().min(1, {
|
||||
error: 'error.nameEmpty',
|
||||
}),
|
||||
password: z.string().min(8, {
|
||||
message: 'error.passwordLengthInValid',
|
||||
error: 'error.passwordLengthInValid',
|
||||
}).regex(validPassword, 'error.passwordInvalid'),
|
||||
})
|
||||
|
||||
@ -197,7 +199,7 @@ const InstallForm = () => {
|
||||
</div>
|
||||
|
||||
<div className={cn('mt-1 text-xs text-text-secondary', {
|
||||
'text-red-400 !text-sm': passwordErrors && passwordErrors.length > 0,
|
||||
'!text-sm text-red-400': passwordErrors && passwordErrors.length > 0,
|
||||
})}
|
||||
>
|
||||
{t('error.passwordInvalid', { ns: 'login' })}
|
||||
|
||||
@ -5,8 +5,8 @@ import { Instrument_Serif } from 'next/font/google'
|
||||
import { NuqsAdapter } from 'nuqs/adapters/next/app'
|
||||
import GlobalPublicStoreProvider from '@/context/global-public-context'
|
||||
import { TanstackQueryInitializer } from '@/context/query-client'
|
||||
import { getDatasetMap } from '@/env'
|
||||
import { getLocaleOnServer } from '@/i18n-config/server'
|
||||
import { DatasetAttr } from '@/types/feature'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { ToastProvider } from './components/base/toast'
|
||||
import BrowserInitializer from './components/browser-initializer'
|
||||
@ -39,40 +39,7 @@ const LocaleLayout = async ({
|
||||
children: React.ReactNode
|
||||
}) => {
|
||||
const locale = await getLocaleOnServer()
|
||||
|
||||
const datasetMap: Record<DatasetAttr, string | undefined> = {
|
||||
[DatasetAttr.DATA_API_PREFIX]: process.env.NEXT_PUBLIC_API_PREFIX,
|
||||
[DatasetAttr.DATA_PUBLIC_API_PREFIX]: process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX,
|
||||
[DatasetAttr.DATA_MARKETPLACE_API_PREFIX]: process.env.NEXT_PUBLIC_MARKETPLACE_API_PREFIX,
|
||||
[DatasetAttr.DATA_MARKETPLACE_URL_PREFIX]: process.env.NEXT_PUBLIC_MARKETPLACE_URL_PREFIX,
|
||||
[DatasetAttr.DATA_PUBLIC_EDITION]: process.env.NEXT_PUBLIC_EDITION,
|
||||
[DatasetAttr.DATA_PUBLIC_AMPLITUDE_API_KEY]: process.env.NEXT_PUBLIC_AMPLITUDE_API_KEY,
|
||||
[DatasetAttr.DATA_PUBLIC_COOKIE_DOMAIN]: process.env.NEXT_PUBLIC_COOKIE_DOMAIN,
|
||||
[DatasetAttr.DATA_PUBLIC_SUPPORT_MAIL_LOGIN]: process.env.NEXT_PUBLIC_SUPPORT_MAIL_LOGIN,
|
||||
[DatasetAttr.DATA_PUBLIC_SENTRY_DSN]: process.env.NEXT_PUBLIC_SENTRY_DSN,
|
||||
[DatasetAttr.DATA_PUBLIC_MAINTENANCE_NOTICE]: process.env.NEXT_PUBLIC_MAINTENANCE_NOTICE,
|
||||
[DatasetAttr.DATA_PUBLIC_SITE_ABOUT]: process.env.NEXT_PUBLIC_SITE_ABOUT,
|
||||
[DatasetAttr.DATA_PUBLIC_TEXT_GENERATION_TIMEOUT_MS]: process.env.NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS,
|
||||
[DatasetAttr.DATA_PUBLIC_MAX_TOOLS_NUM]: process.env.NEXT_PUBLIC_MAX_TOOLS_NUM,
|
||||
[DatasetAttr.DATA_PUBLIC_MAX_PARALLEL_LIMIT]: process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT,
|
||||
[DatasetAttr.DATA_PUBLIC_TOP_K_MAX_VALUE]: process.env.NEXT_PUBLIC_TOP_K_MAX_VALUE,
|
||||
[DatasetAttr.DATA_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH]: process.env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH,
|
||||
[DatasetAttr.DATA_PUBLIC_LOOP_NODE_MAX_COUNT]: process.env.NEXT_PUBLIC_LOOP_NODE_MAX_COUNT,
|
||||
[DatasetAttr.DATA_PUBLIC_MAX_ITERATIONS_NUM]: process.env.NEXT_PUBLIC_MAX_ITERATIONS_NUM,
|
||||
[DatasetAttr.DATA_PUBLIC_MAX_TREE_DEPTH]: process.env.NEXT_PUBLIC_MAX_TREE_DEPTH,
|
||||
[DatasetAttr.DATA_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME]: process.env.NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME,
|
||||
[DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_JINAREADER]: process.env.NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER,
|
||||
[DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_FIRECRAWL]: process.env.NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL,
|
||||
[DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_WATERCRAWL]: process.env.NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL,
|
||||
[DatasetAttr.DATA_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX]: process.env.NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX,
|
||||
[DatasetAttr.NEXT_PUBLIC_ZENDESK_WIDGET_KEY]: process.env.NEXT_PUBLIC_ZENDESK_WIDGET_KEY,
|
||||
[DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT]: process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT,
|
||||
[DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION]: process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION,
|
||||
[DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL]: process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL,
|
||||
[DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID]: process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID,
|
||||
[DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN]: process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN,
|
||||
[DatasetAttr.DATA_PUBLIC_BATCH_CONCURRENCY]: process.env.NEXT_PUBLIC_BATCH_CONCURRENCY,
|
||||
}
|
||||
const datasetMap = getDatasetMap()
|
||||
|
||||
return (
|
||||
<html lang={locale ?? 'en'} className={cn('h-full', instrumentSerif.variable)} suppressHydrationWarning>
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { createSerwistRoute } from '@serwist/turbopack'
|
||||
import { env } from '@/env'
|
||||
|
||||
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
|
||||
const basePath = env.NEXT_PUBLIC_BASE_PATH
|
||||
|
||||
export const { dynamic, dynamicParams, revalidate, generateStaticParams, GET } = createSerwistRoute({
|
||||
swSrc: 'app/sw.ts',
|
||||
|
||||
@ -1,101 +1,51 @@
|
||||
import type { ModelParameterRule } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import { env } from '@/env'
|
||||
import { PromptRole } from '@/models/debug'
|
||||
import { PipelineInputVarType } from '@/models/pipeline'
|
||||
import { AgentStrategy } from '@/types/app'
|
||||
import { DatasetAttr } from '@/types/feature'
|
||||
import pkg from '../package.json'
|
||||
|
||||
const getBooleanConfig = (
|
||||
envVar: string | undefined,
|
||||
dataAttrKey: DatasetAttr,
|
||||
defaultValue: boolean = true,
|
||||
) => {
|
||||
if (envVar !== undefined && envVar !== '')
|
||||
return envVar === 'true'
|
||||
const attrValue = globalThis.document?.body?.getAttribute(dataAttrKey)
|
||||
if (attrValue !== undefined && attrValue !== '')
|
||||
return attrValue === 'true'
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
const getNumberConfig = (
|
||||
envVar: string | undefined,
|
||||
dataAttrKey: DatasetAttr,
|
||||
defaultValue: number,
|
||||
) => {
|
||||
if (envVar) {
|
||||
const parsed = Number.parseInt(envVar)
|
||||
if (!Number.isNaN(parsed) && parsed > 0)
|
||||
return parsed
|
||||
}
|
||||
|
||||
const attrValue = globalThis.document?.body?.getAttribute(dataAttrKey)
|
||||
if (attrValue) {
|
||||
const parsed = Number.parseInt(attrValue)
|
||||
if (!Number.isNaN(parsed) && parsed > 0)
|
||||
return parsed
|
||||
}
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
const getStringConfig = (
|
||||
envVar: string | undefined,
|
||||
dataAttrKey: DatasetAttr,
|
||||
defaultValue: string,
|
||||
) => {
|
||||
if (envVar)
|
||||
return envVar
|
||||
|
||||
const attrValue = globalThis.document?.body?.getAttribute(dataAttrKey)
|
||||
if (attrValue)
|
||||
return attrValue
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
export const API_PREFIX = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_API_PREFIX,
|
||||
DatasetAttr.DATA_API_PREFIX,
|
||||
env.NEXT_PUBLIC_API_PREFIX,
|
||||
'http://localhost:5001/console/api',
|
||||
)
|
||||
export const PUBLIC_API_PREFIX = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX,
|
||||
DatasetAttr.DATA_PUBLIC_API_PREFIX,
|
||||
env.NEXT_PUBLIC_PUBLIC_API_PREFIX,
|
||||
'http://localhost:5001/api',
|
||||
)
|
||||
export const MARKETPLACE_API_PREFIX = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_MARKETPLACE_API_PREFIX,
|
||||
DatasetAttr.DATA_MARKETPLACE_API_PREFIX,
|
||||
env.NEXT_PUBLIC_MARKETPLACE_API_PREFIX,
|
||||
'http://localhost:5002/api',
|
||||
)
|
||||
export const MARKETPLACE_URL_PREFIX = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_MARKETPLACE_URL_PREFIX,
|
||||
DatasetAttr.DATA_MARKETPLACE_URL_PREFIX,
|
||||
env.NEXT_PUBLIC_MARKETPLACE_URL_PREFIX,
|
||||
'',
|
||||
)
|
||||
|
||||
const EDITION = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_EDITION,
|
||||
DatasetAttr.DATA_PUBLIC_EDITION,
|
||||
'SELF_HOSTED',
|
||||
)
|
||||
const EDITION = env.NEXT_PUBLIC_EDITION
|
||||
|
||||
export const IS_CE_EDITION = EDITION === 'SELF_HOSTED'
|
||||
export const IS_CLOUD_EDITION = EDITION === 'CLOUD'
|
||||
|
||||
export const AMPLITUDE_API_KEY = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_AMPLITUDE_API_KEY,
|
||||
DatasetAttr.DATA_PUBLIC_AMPLITUDE_API_KEY,
|
||||
env.NEXT_PUBLIC_AMPLITUDE_API_KEY,
|
||||
'',
|
||||
)
|
||||
|
||||
export const IS_DEV = process.env.NODE_ENV === 'development'
|
||||
export const IS_PROD = process.env.NODE_ENV === 'production'
|
||||
export const IS_DEV = env.NODE_ENV === 'development'
|
||||
export const IS_PROD = env.NODE_ENV === 'production'
|
||||
|
||||
export const SUPPORT_MAIL_LOGIN = !!(
|
||||
process.env.NEXT_PUBLIC_SUPPORT_MAIL_LOGIN
|
||||
|| globalThis.document?.body?.getAttribute('data-public-support-mail-login')
|
||||
)
|
||||
export const SUPPORT_MAIL_LOGIN = env.NEXT_PUBLIC_SUPPORT_MAIL_LOGIN
|
||||
|
||||
export const TONE_LIST = [
|
||||
{
|
||||
@ -161,16 +111,11 @@ export const getMaxToken = (modelId: string) => {
|
||||
export const LOCALE_COOKIE_NAME = 'locale'
|
||||
|
||||
const COOKIE_DOMAIN = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_COOKIE_DOMAIN,
|
||||
DatasetAttr.DATA_PUBLIC_COOKIE_DOMAIN,
|
||||
env.NEXT_PUBLIC_COOKIE_DOMAIN,
|
||||
'',
|
||||
).trim()
|
||||
|
||||
export const BATCH_CONCURRENCY = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_BATCH_CONCURRENCY,
|
||||
DatasetAttr.DATA_PUBLIC_BATCH_CONCURRENCY,
|
||||
5, // default
|
||||
)
|
||||
export const BATCH_CONCURRENCY = env.NEXT_PUBLIC_BATCH_CONCURRENCY
|
||||
|
||||
export const CSRF_COOKIE_NAME = () => {
|
||||
if (COOKIE_DOMAIN)
|
||||
@ -342,112 +287,62 @@ export const VAR_REGEX
|
||||
export const resetReg = () => (VAR_REGEX.lastIndex = 0)
|
||||
|
||||
export const DISABLE_UPLOAD_IMAGE_AS_ICON
|
||||
= process.env.NEXT_PUBLIC_DISABLE_UPLOAD_IMAGE_AS_ICON === 'true'
|
||||
= env.NEXT_PUBLIC_DISABLE_UPLOAD_IMAGE_AS_ICON
|
||||
|
||||
export const GITHUB_ACCESS_TOKEN
|
||||
= process.env.NEXT_PUBLIC_GITHUB_ACCESS_TOKEN || ''
|
||||
= env.NEXT_PUBLIC_GITHUB_ACCESS_TOKEN
|
||||
|
||||
export const SUPPORT_INSTALL_LOCAL_FILE_EXTENSIONS = '.difypkg,.difybndl'
|
||||
export const FULL_DOC_PREVIEW_LENGTH = 50
|
||||
|
||||
export const JSON_SCHEMA_MAX_DEPTH = 10
|
||||
|
||||
export const MAX_TOOLS_NUM = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_MAX_TOOLS_NUM,
|
||||
DatasetAttr.DATA_PUBLIC_MAX_TOOLS_NUM,
|
||||
10,
|
||||
)
|
||||
export const MAX_PARALLEL_LIMIT = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT,
|
||||
DatasetAttr.DATA_PUBLIC_MAX_PARALLEL_LIMIT,
|
||||
10,
|
||||
)
|
||||
export const TEXT_GENERATION_TIMEOUT_MS = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS,
|
||||
DatasetAttr.DATA_PUBLIC_TEXT_GENERATION_TIMEOUT_MS,
|
||||
60000,
|
||||
)
|
||||
export const LOOP_NODE_MAX_COUNT = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_LOOP_NODE_MAX_COUNT,
|
||||
DatasetAttr.DATA_PUBLIC_LOOP_NODE_MAX_COUNT,
|
||||
100,
|
||||
)
|
||||
export const MAX_ITERATIONS_NUM = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_MAX_ITERATIONS_NUM,
|
||||
DatasetAttr.DATA_PUBLIC_MAX_ITERATIONS_NUM,
|
||||
99,
|
||||
)
|
||||
export const MAX_TREE_DEPTH = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_MAX_TREE_DEPTH,
|
||||
DatasetAttr.DATA_PUBLIC_MAX_TREE_DEPTH,
|
||||
50,
|
||||
)
|
||||
export const MAX_TOOLS_NUM = env.NEXT_PUBLIC_MAX_TOOLS_NUM
|
||||
export const MAX_PARALLEL_LIMIT = env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
export const TEXT_GENERATION_TIMEOUT_MS = env.NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS
|
||||
export const LOOP_NODE_MAX_COUNT = env.NEXT_PUBLIC_LOOP_NODE_MAX_COUNT
|
||||
export const MAX_ITERATIONS_NUM = env.NEXT_PUBLIC_MAX_ITERATIONS_NUM
|
||||
export const MAX_TREE_DEPTH = env.NEXT_PUBLIC_MAX_TREE_DEPTH
|
||||
|
||||
export const ALLOW_UNSAFE_DATA_SCHEME = getBooleanConfig(
|
||||
process.env.NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME,
|
||||
DatasetAttr.DATA_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME,
|
||||
false,
|
||||
)
|
||||
export const ENABLE_WEBSITE_JINAREADER = getBooleanConfig(
|
||||
process.env.NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER,
|
||||
DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_JINAREADER,
|
||||
true,
|
||||
)
|
||||
export const ENABLE_WEBSITE_FIRECRAWL = getBooleanConfig(
|
||||
process.env.NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL,
|
||||
DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_FIRECRAWL,
|
||||
true,
|
||||
)
|
||||
export const ENABLE_WEBSITE_WATERCRAWL = getBooleanConfig(
|
||||
process.env.NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL,
|
||||
DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_WATERCRAWL,
|
||||
false,
|
||||
)
|
||||
export const ENABLE_SINGLE_DOLLAR_LATEX = getBooleanConfig(
|
||||
process.env.NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX,
|
||||
DatasetAttr.DATA_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX,
|
||||
false,
|
||||
)
|
||||
export const ALLOW_UNSAFE_DATA_SCHEME = env.NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME
|
||||
export const ENABLE_WEBSITE_JINAREADER = env.NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER
|
||||
export const ENABLE_WEBSITE_FIRECRAWL = env.NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL
|
||||
export const ENABLE_WEBSITE_WATERCRAWL = env.NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL
|
||||
export const ENABLE_SINGLE_DOLLAR_LATEX = env.NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX
|
||||
|
||||
export const VALUE_SELECTOR_DELIMITER = '@@@'
|
||||
|
||||
export const validPassword = /^(?=.*[a-z])(?=.*\d)\S{8,}$/i
|
||||
|
||||
export const ZENDESK_WIDGET_KEY = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_ZENDESK_WIDGET_KEY,
|
||||
DatasetAttr.NEXT_PUBLIC_ZENDESK_WIDGET_KEY,
|
||||
env.NEXT_PUBLIC_ZENDESK_WIDGET_KEY,
|
||||
'',
|
||||
)
|
||||
export const ZENDESK_FIELD_IDS = {
|
||||
ENVIRONMENT: getStringConfig(
|
||||
process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT,
|
||||
DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT,
|
||||
env.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT,
|
||||
'',
|
||||
),
|
||||
VERSION: getStringConfig(
|
||||
process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION,
|
||||
DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION,
|
||||
env.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION,
|
||||
'',
|
||||
),
|
||||
EMAIL: getStringConfig(
|
||||
process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL,
|
||||
DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL,
|
||||
env.NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL,
|
||||
'',
|
||||
),
|
||||
WORKSPACE_ID: getStringConfig(
|
||||
process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID,
|
||||
DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID,
|
||||
env.NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID,
|
||||
'',
|
||||
),
|
||||
PLAN: getStringConfig(
|
||||
process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN,
|
||||
DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN,
|
||||
env.NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN,
|
||||
'',
|
||||
),
|
||||
}
|
||||
export const APP_VERSION = pkg.version
|
||||
|
||||
export const IS_MARKETPLACE = globalThis.document?.body?.getAttribute('data-is-marketplace') === 'true'
|
||||
export const IS_MARKETPLACE = env.NEXT_PUBLIC_IS_MARKETPLACE
|
||||
|
||||
export const RAG_PIPELINE_PREVIEW_CHUNK_NUM = 20
|
||||
|
||||
|
||||
@ -10,6 +10,7 @@ import { setUserId, setUserProperties } from '@/app/components/base/amplitude'
|
||||
import { setZendeskConversationFields } from '@/app/components/base/zendesk/utils'
|
||||
import MaintenanceNotice from '@/app/components/header/maintenance-notice'
|
||||
import { ZENDESK_FIELD_IDS } from '@/config'
|
||||
import { env } from '@/env'
|
||||
import {
|
||||
useCurrentWorkspace,
|
||||
useLangGeniusVersion,
|
||||
@ -204,7 +205,7 @@ export const AppContextProvider: FC<AppContextProviderProps> = ({ children }) =>
|
||||
}}
|
||||
>
|
||||
<div className="flex h-full flex-col overflow-y-auto">
|
||||
{globalThis.document?.body?.getAttribute('data-public-maintenance-notice') && <MaintenanceNotice />}
|
||||
{env.NEXT_PUBLIC_MAINTENANCE_NOTICE && <MaintenanceNotice />}
|
||||
<div className="relative flex grow flex-col overflow-y-auto overflow-x-hidden bg-background-body">
|
||||
{children}
|
||||
</div>
|
||||
|
||||
233
web/env.ts
Normal file
233
web/env.ts
Normal file
@ -0,0 +1,233 @@
|
||||
import type { CamelCase, Replace } from 'string-ts'
|
||||
import { createEnv } from '@t3-oss/env-nextjs'
|
||||
import { concat, kebabCase, length, slice } from 'string-ts'
|
||||
import * as z from 'zod'
|
||||
import { isClient, isServer } from './utils/client'
|
||||
import { ObjectFromEntries, ObjectKeys } from './utils/object'
|
||||
|
||||
const CLIENT_ENV_PREFIX = 'NEXT_PUBLIC_'
|
||||
type ClientSchema = Record<`${typeof CLIENT_ENV_PREFIX}${string}`, z.ZodType>
|
||||
|
||||
const coercedBoolean = z.string().transform(s => s !== 'false' && s !== '0')
|
||||
const coercedNumber = z.coerce.number().int().positive()
|
||||
|
||||
/// keep-sorted
|
||||
const clientSchema = {
|
||||
/**
|
||||
* Default is not allow to embed into iframe to prevent Clickjacking: https://owasp.org/www-community/attacks/Clickjacking
|
||||
*/
|
||||
NEXT_PUBLIC_ALLOW_EMBED: coercedBoolean.default(false),
|
||||
/**
|
||||
* Allow rendering unsafe URLs which have "data:" scheme.
|
||||
*/
|
||||
NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME: coercedBoolean.default(false),
|
||||
/**
|
||||
* The API key of amplitude
|
||||
*/
|
||||
NEXT_PUBLIC_AMPLITUDE_API_KEY: z.string().optional(),
|
||||
/**
|
||||
* The base URL of console application, refers to the Console base URL of WEB service if console domain is
|
||||
* different from api or web app domain.
|
||||
* example: http://cloud.dify.ai/console/api
|
||||
*/
|
||||
NEXT_PUBLIC_API_PREFIX: z.url().optional(),
|
||||
/**
|
||||
* The base path for the application
|
||||
*/
|
||||
NEXT_PUBLIC_BASE_PATH: z.string().regex(/^\/.*[^/]$/).or(z.literal('')).default(''),
|
||||
/**
|
||||
* number of concurrency
|
||||
*/
|
||||
NEXT_PUBLIC_BATCH_CONCURRENCY: coercedNumber.default(5),
|
||||
/**
|
||||
* When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
|
||||
*/
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: z.string().optional(),
|
||||
/**
|
||||
* CSP https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP
|
||||
*/
|
||||
NEXT_PUBLIC_CSP_WHITELIST: z.string().optional(),
|
||||
/**
|
||||
* For production release, change this to PRODUCTION
|
||||
*/
|
||||
NEXT_PUBLIC_DEPLOY_ENV: z.enum(['DEVELOPMENT', 'PRODUCTION', 'TESTING']).optional(),
|
||||
NEXT_PUBLIC_DISABLE_UPLOAD_IMAGE_AS_ICON: coercedBoolean.default(false),
|
||||
/**
|
||||
* The deployment edition, SELF_HOSTED
|
||||
*/
|
||||
NEXT_PUBLIC_EDITION: z.enum(['SELF_HOSTED', 'CLOUD']).default('SELF_HOSTED'),
|
||||
/**
|
||||
* Enable inline LaTeX rendering with single dollar signs ($...$)
|
||||
* Default is false for security reasons to prevent conflicts with regular text
|
||||
*/
|
||||
NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX: coercedBoolean.default(false),
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL: coercedBoolean.default(true),
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER: coercedBoolean.default(true),
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL: coercedBoolean.default(false),
|
||||
/**
|
||||
* Github Access Token, used for invoking Github API
|
||||
*/
|
||||
NEXT_PUBLIC_GITHUB_ACCESS_TOKEN: z.string().default(''),
|
||||
/**
|
||||
* The maximum number of tokens for segmentation
|
||||
*/
|
||||
NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: coercedNumber.default(4000),
|
||||
NEXT_PUBLIC_IS_MARKETPLACE: coercedBoolean.default(false),
|
||||
/**
|
||||
* Maximum loop count in the workflow
|
||||
*/
|
||||
NEXT_PUBLIC_LOOP_NODE_MAX_COUNT: coercedNumber.default(100),
|
||||
NEXT_PUBLIC_MAINTENANCE_NOTICE: z.string().optional(),
|
||||
/**
|
||||
* The API PREFIX for MARKETPLACE
|
||||
*/
|
||||
NEXT_PUBLIC_MARKETPLACE_API_PREFIX: z.url().optional(),
|
||||
/**
|
||||
* The URL for MARKETPLACE
|
||||
*/
|
||||
NEXT_PUBLIC_MARKETPLACE_URL_PREFIX: z.url().optional(),
|
||||
/**
|
||||
* The maximum number of iterations for agent setting
|
||||
*/
|
||||
NEXT_PUBLIC_MAX_ITERATIONS_NUM: coercedNumber.default(99),
|
||||
/**
|
||||
* Maximum number of Parallelism branches in the workflow
|
||||
*/
|
||||
NEXT_PUBLIC_MAX_PARALLEL_LIMIT: coercedNumber.default(10),
|
||||
/**
|
||||
* Maximum number of tools in the agent/workflow
|
||||
*/
|
||||
NEXT_PUBLIC_MAX_TOOLS_NUM: coercedNumber.default(10),
|
||||
/**
|
||||
* The maximum number of tree node depth for workflow
|
||||
*/
|
||||
NEXT_PUBLIC_MAX_TREE_DEPTH: coercedNumber.default(50),
|
||||
/**
|
||||
* The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from
|
||||
* console or api domain.
|
||||
* example: http://udify.app/api
|
||||
*/
|
||||
NEXT_PUBLIC_PUBLIC_API_PREFIX: z.url().optional(),
|
||||
/**
|
||||
* SENTRY
|
||||
*/
|
||||
NEXT_PUBLIC_SENTRY_DSN: z.string().optional(),
|
||||
NEXT_PUBLIC_SITE_ABOUT: z.string().optional(),
|
||||
NEXT_PUBLIC_SUPPORT_MAIL_LOGIN: coercedBoolean.default(false),
|
||||
/**
|
||||
* The timeout for the text generation in millisecond
|
||||
*/
|
||||
NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS: coercedNumber.default(60000),
|
||||
/**
|
||||
* The maximum number of top-k value for RAG.
|
||||
*/
|
||||
NEXT_PUBLIC_TOP_K_MAX_VALUE: coercedNumber.default(10),
|
||||
/**
|
||||
* Disable Upload Image as WebApp icon default is false
|
||||
*/
|
||||
NEXT_PUBLIC_UPLOAD_IMAGE_AS_ICON: coercedBoolean.default(false),
|
||||
NEXT_PUBLIC_WEB_PREFIX: z.url().optional(),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL: z.string().optional(),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT: z.string().optional(),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN: z.string().optional(),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION: z.string().optional(),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID: z.string().optional(),
|
||||
NEXT_PUBLIC_ZENDESK_WIDGET_KEY: z.string().optional(),
|
||||
} satisfies ClientSchema
|
||||
|
||||
export const env = createEnv({
|
||||
server: {
|
||||
/**
|
||||
* Maximum length of segmentation tokens for indexing
|
||||
*/
|
||||
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: coercedNumber.default(4000),
|
||||
/**
|
||||
* Disable Next.js Telemetry (https://nextjs.org/telemetry)
|
||||
*/
|
||||
NEXT_TELEMETRY_DISABLED: coercedBoolean.optional(),
|
||||
PORT: coercedNumber.default(3000),
|
||||
/**
|
||||
* The timeout for the text generation in millisecond
|
||||
*/
|
||||
TEXT_GENERATION_TIMEOUT_MS: coercedNumber.default(60000),
|
||||
},
|
||||
shared: {
|
||||
NODE_ENV: z.enum(['development', 'test', 'production']).default('development'),
|
||||
},
|
||||
client: clientSchema,
|
||||
experimental__runtimeEnv: {
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
NEXT_PUBLIC_ALLOW_EMBED: isServer ? process.env.NEXT_PUBLIC_ALLOW_EMBED : getRuntimeEnvFromBody('allowEmbed'),
|
||||
NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME: isServer ? process.env.NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME : getRuntimeEnvFromBody('allowUnsafeDataScheme'),
|
||||
NEXT_PUBLIC_AMPLITUDE_API_KEY: isServer ? process.env.NEXT_PUBLIC_AMPLITUDE_API_KEY : getRuntimeEnvFromBody('amplitudeApiKey'),
|
||||
NEXT_PUBLIC_API_PREFIX: isServer ? process.env.NEXT_PUBLIC_API_PREFIX : getRuntimeEnvFromBody('apiPrefix'),
|
||||
NEXT_PUBLIC_BASE_PATH: isServer ? process.env.NEXT_PUBLIC_BASE_PATH : getRuntimeEnvFromBody('basePath'),
|
||||
NEXT_PUBLIC_BATCH_CONCURRENCY: isServer ? process.env.NEXT_PUBLIC_BATCH_CONCURRENCY : getRuntimeEnvFromBody('batchConcurrency'),
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: isServer ? process.env.NEXT_PUBLIC_COOKIE_DOMAIN : getRuntimeEnvFromBody('cookieDomain'),
|
||||
NEXT_PUBLIC_CSP_WHITELIST: isServer ? process.env.NEXT_PUBLIC_CSP_WHITELIST : getRuntimeEnvFromBody('cspWhitelist'),
|
||||
NEXT_PUBLIC_DEPLOY_ENV: isServer ? process.env.NEXT_PUBLIC_DEPLOY_ENV : getRuntimeEnvFromBody('deployEnv'),
|
||||
NEXT_PUBLIC_DISABLE_UPLOAD_IMAGE_AS_ICON: isServer ? process.env.NEXT_PUBLIC_DISABLE_UPLOAD_IMAGE_AS_ICON : getRuntimeEnvFromBody('disableUploadImageAsIcon'),
|
||||
NEXT_PUBLIC_EDITION: isServer ? process.env.NEXT_PUBLIC_EDITION : getRuntimeEnvFromBody('edition'),
|
||||
NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX: isServer ? process.env.NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX : getRuntimeEnvFromBody('enableSingleDollarLatex'),
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL: isServer ? process.env.NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL : getRuntimeEnvFromBody('enableWebsiteFirecrawl'),
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER: isServer ? process.env.NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER : getRuntimeEnvFromBody('enableWebsiteJinareader'),
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL: isServer ? process.env.NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL : getRuntimeEnvFromBody('enableWebsiteWatercrawl'),
|
||||
NEXT_PUBLIC_GITHUB_ACCESS_TOKEN: isServer ? process.env.NEXT_PUBLIC_GITHUB_ACCESS_TOKEN : getRuntimeEnvFromBody('githubAccessToken'),
|
||||
NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: isServer ? process.env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH : getRuntimeEnvFromBody('indexingMaxSegmentationTokensLength'),
|
||||
NEXT_PUBLIC_IS_MARKETPLACE: isServer ? process.env.NEXT_PUBLIC_IS_MARKETPLACE : getRuntimeEnvFromBody('isMarketplace'),
|
||||
NEXT_PUBLIC_LOOP_NODE_MAX_COUNT: isServer ? process.env.NEXT_PUBLIC_LOOP_NODE_MAX_COUNT : getRuntimeEnvFromBody('loopNodeMaxCount'),
|
||||
NEXT_PUBLIC_MAINTENANCE_NOTICE: isServer ? process.env.NEXT_PUBLIC_MAINTENANCE_NOTICE : getRuntimeEnvFromBody('maintenanceNotice'),
|
||||
NEXT_PUBLIC_MARKETPLACE_API_PREFIX: isServer ? process.env.NEXT_PUBLIC_MARKETPLACE_API_PREFIX : getRuntimeEnvFromBody('marketplaceApiPrefix'),
|
||||
NEXT_PUBLIC_MARKETPLACE_URL_PREFIX: isServer ? process.env.NEXT_PUBLIC_MARKETPLACE_URL_PREFIX : getRuntimeEnvFromBody('marketplaceUrlPrefix'),
|
||||
NEXT_PUBLIC_MAX_ITERATIONS_NUM: isServer ? process.env.NEXT_PUBLIC_MAX_ITERATIONS_NUM : getRuntimeEnvFromBody('maxIterationsNum'),
|
||||
NEXT_PUBLIC_MAX_PARALLEL_LIMIT: isServer ? process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT : getRuntimeEnvFromBody('maxParallelLimit'),
|
||||
NEXT_PUBLIC_MAX_TOOLS_NUM: isServer ? process.env.NEXT_PUBLIC_MAX_TOOLS_NUM : getRuntimeEnvFromBody('maxToolsNum'),
|
||||
NEXT_PUBLIC_MAX_TREE_DEPTH: isServer ? process.env.NEXT_PUBLIC_MAX_TREE_DEPTH : getRuntimeEnvFromBody('maxTreeDepth'),
|
||||
NEXT_PUBLIC_PUBLIC_API_PREFIX: isServer ? process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX : getRuntimeEnvFromBody('publicApiPrefix'),
|
||||
NEXT_PUBLIC_SENTRY_DSN: isServer ? process.env.NEXT_PUBLIC_SENTRY_DSN : getRuntimeEnvFromBody('sentryDsn'),
|
||||
NEXT_PUBLIC_SITE_ABOUT: isServer ? process.env.NEXT_PUBLIC_SITE_ABOUT : getRuntimeEnvFromBody('siteAbout'),
|
||||
NEXT_PUBLIC_SUPPORT_MAIL_LOGIN: isServer ? process.env.NEXT_PUBLIC_SUPPORT_MAIL_LOGIN : getRuntimeEnvFromBody('supportMailLogin'),
|
||||
NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS: isServer ? process.env.NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS : getRuntimeEnvFromBody('textGenerationTimeoutMs'),
|
||||
NEXT_PUBLIC_TOP_K_MAX_VALUE: isServer ? process.env.NEXT_PUBLIC_TOP_K_MAX_VALUE : getRuntimeEnvFromBody('topKMaxValue'),
|
||||
NEXT_PUBLIC_UPLOAD_IMAGE_AS_ICON: isServer ? process.env.NEXT_PUBLIC_UPLOAD_IMAGE_AS_ICON : getRuntimeEnvFromBody('uploadImageAsIcon'),
|
||||
NEXT_PUBLIC_WEB_PREFIX: isServer ? process.env.NEXT_PUBLIC_WEB_PREFIX : getRuntimeEnvFromBody('webPrefix'),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL: isServer ? process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL : getRuntimeEnvFromBody('zendeskFieldIdEmail'),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT: isServer ? process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT : getRuntimeEnvFromBody('zendeskFieldIdEnvironment'),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN: isServer ? process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN : getRuntimeEnvFromBody('zendeskFieldIdPlan'),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION: isServer ? process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION : getRuntimeEnvFromBody('zendeskFieldIdVersion'),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID: isServer ? process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID : getRuntimeEnvFromBody('zendeskFieldIdWorkspaceId'),
|
||||
NEXT_PUBLIC_ZENDESK_WIDGET_KEY: isServer ? process.env.NEXT_PUBLIC_ZENDESK_WIDGET_KEY : getRuntimeEnvFromBody('zendeskWidgetKey'),
|
||||
},
|
||||
emptyStringAsUndefined: true,
|
||||
})
|
||||
|
||||
type ClientEnvKey = keyof typeof clientSchema
|
||||
type DatasetKey = CamelCase<Replace<ClientEnvKey, typeof CLIENT_ENV_PREFIX>>
|
||||
|
||||
/**
|
||||
* Browser-only function to get runtime env value from HTML body dataset.
|
||||
*/
|
||||
function getRuntimeEnvFromBody(key: DatasetKey) {
|
||||
if (typeof window === 'undefined') {
|
||||
throw new TypeError('getRuntimeEnvFromBody can only be called in the browser')
|
||||
}
|
||||
|
||||
const value = document.body.dataset[key]
|
||||
return value || undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Server-only function to get dataset map for embedding into the HTML body.
|
||||
*/
|
||||
export function getDatasetMap() {
|
||||
if (isClient) {
|
||||
throw new TypeError('getDatasetMap can only be called on the server')
|
||||
}
|
||||
return ObjectFromEntries(
|
||||
ObjectKeys(clientSchema)
|
||||
.map(envKey => [
|
||||
concat('data-', kebabCase(slice(envKey, length(CLIENT_ENV_PREFIX)))),
|
||||
env[envKey],
|
||||
]),
|
||||
)
|
||||
}
|
||||
@ -1614,11 +1614,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/param-item/top-k-item.tsx": {
|
||||
"unicorn/prefer-number-properties": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/portal-to-follow-elem/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 2
|
||||
@ -4055,11 +4050,6 @@
|
||||
"count": 4
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/knowledge-base/components/retrieval-setting/top-k-and-score-threshold.tsx": {
|
||||
"unicorn/prefer-number-properties": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/knowledge-base/components/retrieval-setting/type.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
@ -4912,11 +4902,6 @@
|
||||
"count": 7
|
||||
}
|
||||
},
|
||||
"app/install/installForm.tsx": {
|
||||
"tailwindcss/enforce-consistent-class-order": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/reset-password/layout.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
|
||||
@ -1,10 +1,9 @@
|
||||
import type { NextConfig } from 'next'
|
||||
import process from 'node:process'
|
||||
import withBundleAnalyzerInit from '@next/bundle-analyzer'
|
||||
import createMDX from '@next/mdx'
|
||||
import { codeInspectorPlugin } from 'code-inspector-plugin'
|
||||
import { env } from './env'
|
||||
|
||||
const isDev = process.env.NODE_ENV === 'development'
|
||||
const isDev = env.NODE_ENV === 'development'
|
||||
const withMDX = createMDX({
|
||||
extension: /\.mdx?$/,
|
||||
options: {
|
||||
@ -17,20 +16,17 @@ const withMDX = createMDX({
|
||||
// providerImportSource: "@mdx-js/react",
|
||||
},
|
||||
})
|
||||
const withBundleAnalyzer = withBundleAnalyzerInit({
|
||||
enabled: process.env.ANALYZE === 'true',
|
||||
})
|
||||
|
||||
// the default url to prevent parse url error when running jest
|
||||
const hasSetWebPrefix = process.env.NEXT_PUBLIC_WEB_PREFIX
|
||||
const port = process.env.PORT || 3000
|
||||
const hasSetWebPrefix = env.NEXT_PUBLIC_WEB_PREFIX
|
||||
const port = env.PORT
|
||||
const locImageURLs = !hasSetWebPrefix ? [new URL(`http://localhost:${port}/**`), new URL(`http://127.0.0.1:${port}/**`)] : []
|
||||
const remoteImageURLs = ([hasSetWebPrefix ? new URL(`${process.env.NEXT_PUBLIC_WEB_PREFIX}/**`) : '', ...locImageURLs].filter(item => !!item)) as URL[]
|
||||
const remoteImageURLs = ([hasSetWebPrefix ? new URL(`${env.NEXT_PUBLIC_WEB_PREFIX}/**`) : '', ...locImageURLs].filter(item => !!item)) as URL[]
|
||||
|
||||
const nextConfig: NextConfig = {
|
||||
basePath: process.env.NEXT_PUBLIC_BASE_PATH || '',
|
||||
basePath: env.NEXT_PUBLIC_BASE_PATH,
|
||||
serverExternalPackages: ['esbuild'],
|
||||
transpilePackages: ['echarts', 'zrender'],
|
||||
transpilePackages: ['@t3-oss/env-core', '@t3-oss/env-nextjs', 'echarts', 'zrender'],
|
||||
turbopack: {
|
||||
rules: codeInspectorPlugin({
|
||||
bundler: 'turbopack',
|
||||
@ -72,4 +68,4 @@ const nextConfig: NextConfig = {
|
||||
},
|
||||
}
|
||||
|
||||
export default withBundleAnalyzer(withMDX(nextConfig))
|
||||
export default withMDX(nextConfig)
|
||||
|
||||
@ -47,14 +47,14 @@
|
||||
"i18n:check": "tsx ./scripts/check-i18n.js",
|
||||
"test": "vitest run",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"test:ci": "vitest run --coverage --silent=passed-only",
|
||||
"test:ci": "vitest run --coverage --reporter vitest-tiny-reporter --silent=passed-only",
|
||||
"test:watch": "vitest --watch",
|
||||
"analyze-component": "node ./scripts/analyze-component.js",
|
||||
"refactor-component": "node ./scripts/refactor-component.js",
|
||||
"storybook": "storybook dev -p 6006",
|
||||
"storybook:build": "storybook build",
|
||||
"preinstall": "npx only-allow pnpm",
|
||||
"analyze": "ANALYZE=true pnpm build",
|
||||
"analyze": "next experimental-analyze",
|
||||
"knip": "knip"
|
||||
},
|
||||
"dependencies": {
|
||||
@ -82,6 +82,7 @@
|
||||
"@remixicon/react": "4.7.0",
|
||||
"@sentry/react": "8.55.0",
|
||||
"@svgdotjs/svg.js": "3.2.5",
|
||||
"@t3-oss/env-nextjs": "0.13.10",
|
||||
"@tailwindcss/typography": "0.5.19",
|
||||
"@tanstack/react-form": "1.23.7",
|
||||
"@tanstack/react-query": "5.90.5",
|
||||
@ -159,7 +160,7 @@
|
||||
"ufo": "1.6.3",
|
||||
"use-context-selector": "2.0.0",
|
||||
"uuid": "10.0.0",
|
||||
"zod": "3.25.76",
|
||||
"zod": "4.3.6",
|
||||
"zundo": "2.3.0",
|
||||
"zustand": "5.0.9"
|
||||
},
|
||||
@ -169,7 +170,6 @@
|
||||
"@eslint-react/eslint-plugin": "2.9.4",
|
||||
"@mdx-js/loader": "3.1.1",
|
||||
"@mdx-js/react": "3.1.1",
|
||||
"@next/bundle-analyzer": "16.1.5",
|
||||
"@next/eslint-plugin-next": "16.1.6",
|
||||
"@next/mdx": "16.1.5",
|
||||
"@rgrove/parse-xml": "4.2.0",
|
||||
@ -236,7 +236,8 @@
|
||||
"vite": "7.3.1",
|
||||
"vite-tsconfig-paths": "6.0.4",
|
||||
"vitest": "4.0.17",
|
||||
"vitest-canvas-mock": "1.1.3"
|
||||
"vitest-canvas-mock": "1.1.3",
|
||||
"vitest-tiny-reporter": "1.3.1"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
|
||||
199
web/pnpm-lock.yaml
generated
199
web/pnpm-lock.yaml
generated
@ -125,6 +125,9 @@ importers:
|
||||
'@svgdotjs/svg.js':
|
||||
specifier: 3.2.5
|
||||
version: 3.2.5
|
||||
'@t3-oss/env-nextjs':
|
||||
specifier: 0.13.10
|
||||
version: 0.13.10(typescript@5.9.3)(valibot@1.2.0(typescript@5.9.3))(zod@4.3.6)
|
||||
'@tailwindcss/typography':
|
||||
specifier: 0.5.19
|
||||
version: 0.5.19(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.2))
|
||||
@ -357,8 +360,8 @@ importers:
|
||||
specifier: 10.0.0
|
||||
version: 10.0.0
|
||||
zod:
|
||||
specifier: 3.25.76
|
||||
version: 3.25.76
|
||||
specifier: 4.3.6
|
||||
version: 4.3.6
|
||||
zundo:
|
||||
specifier: 2.3.0
|
||||
version: 2.3.0(zustand@5.0.9(@types/react@19.2.9)(immer@11.1.0)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)))
|
||||
@ -381,9 +384,6 @@ importers:
|
||||
'@mdx-js/react':
|
||||
specifier: 3.1.1
|
||||
version: 3.1.1(@types/react@19.2.9)(react@19.2.4)
|
||||
'@next/bundle-analyzer':
|
||||
specifier: 16.1.5
|
||||
version: 16.1.5
|
||||
'@next/eslint-plugin-next':
|
||||
specifier: 16.1.6
|
||||
version: 16.1.6
|
||||
@ -585,6 +585,9 @@ importers:
|
||||
vitest-canvas-mock:
|
||||
specifier: 1.1.3
|
||||
version: 1.1.3(vitest@4.0.17)
|
||||
vitest-tiny-reporter:
|
||||
specifier: 1.3.1
|
||||
version: 1.3.1(@vitest/runner@4.0.17)(vitest@4.0.17)
|
||||
|
||||
packages:
|
||||
|
||||
@ -918,10 +921,6 @@ packages:
|
||||
resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
'@discoveryjs/json-ext@0.5.7':
|
||||
resolution: {integrity: sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==}
|
||||
engines: {node: '>=10.0.0'}
|
||||
|
||||
'@emnapi/core@1.8.1':
|
||||
resolution: {integrity: sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==}
|
||||
|
||||
@ -1722,9 +1721,6 @@ packages:
|
||||
'@neoconfetti/react@1.0.0':
|
||||
resolution: {integrity: sha512-klcSooChXXOzIm+SE5IISIAn3bYzYfPjbX7D7HoqZL84oAfgREeSg5vSIaSFH+DaGzzvImTyWe1OyrJ67vik4A==}
|
||||
|
||||
'@next/bundle-analyzer@16.1.5':
|
||||
resolution: {integrity: sha512-/iPMrxbvgMZQX1huKZu+rnh7bxo2m5/o0PpOWLMRcAlQ2METpZ7/a3SP/aXFePZAyrQpgpndTldXW3LxPXM/KA==}
|
||||
|
||||
'@next/env@16.0.0':
|
||||
resolution: {integrity: sha512-s5j2iFGp38QsG1LWRQaE2iUY3h1jc014/melHFfLdrsMJPqxqDQwWNwyQTcNoUSGZlCVZuM7t7JDMmSyRilsnA==}
|
||||
|
||||
@ -2814,6 +2810,40 @@ packages:
|
||||
'@swc/types@0.1.25':
|
||||
resolution: {integrity: sha512-iAoY/qRhNH8a/hBvm3zKj9qQ4oc2+3w1unPJa2XvTK3XjeLXtzcCingVPw/9e5mn1+0yPqxcBGp9Jf0pkfMb1g==}
|
||||
|
||||
'@t3-oss/env-core@0.13.10':
|
||||
resolution: {integrity: sha512-NNFfdlJ+HmPHkLi2HKy7nwuat9SIYOxei9K10lO2YlcSObDILY7mHZNSHsieIM3A0/5OOzw/P/b+yLvPdaG52g==}
|
||||
peerDependencies:
|
||||
arktype: ^2.1.0
|
||||
typescript: '>=5.0.0'
|
||||
valibot: ^1.0.0-beta.7 || ^1.0.0
|
||||
zod: ^3.24.0 || ^4.0.0
|
||||
peerDependenciesMeta:
|
||||
arktype:
|
||||
optional: true
|
||||
typescript:
|
||||
optional: true
|
||||
valibot:
|
||||
optional: true
|
||||
zod:
|
||||
optional: true
|
||||
|
||||
'@t3-oss/env-nextjs@0.13.10':
|
||||
resolution: {integrity: sha512-JfSA2WXOnvcc/uMdp31paMsfbYhhdvLLRxlwvrnlPE9bwM/n0Z+Qb9xRv48nPpvfMhOrkrTYw1I5Yc06WIKBJQ==}
|
||||
peerDependencies:
|
||||
arktype: ^2.1.0
|
||||
typescript: '>=5.0.0'
|
||||
valibot: ^1.0.0-beta.7 || ^1.0.0
|
||||
zod: ^3.24.0 || ^4.0.0
|
||||
peerDependenciesMeta:
|
||||
arktype:
|
||||
optional: true
|
||||
typescript:
|
||||
optional: true
|
||||
valibot:
|
||||
optional: true
|
||||
zod:
|
||||
optional: true
|
||||
|
||||
'@tailwindcss/typography@0.5.19':
|
||||
resolution: {integrity: sha512-w31dd8HOx3k9vPtcQh5QHP9GwKcgbMp87j58qi6xgiBnFFtKEAgCWnDw4qUT8aHwkCp8bKvb/KGKWWHedP0AAg==}
|
||||
peerDependencies:
|
||||
@ -3574,10 +3604,6 @@ packages:
|
||||
peerDependencies:
|
||||
acorn: ^6.0.0 || ^7.0.0 || ^8.0.0
|
||||
|
||||
acorn-walk@8.3.4:
|
||||
resolution: {integrity: sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==}
|
||||
engines: {node: '>=0.4.0'}
|
||||
|
||||
acorn@8.15.0:
|
||||
resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==}
|
||||
engines: {node: '>=0.4.0'}
|
||||
@ -4204,9 +4230,6 @@ packages:
|
||||
dayjs@1.11.19:
|
||||
resolution: {integrity: sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw==}
|
||||
|
||||
debounce@1.2.1:
|
||||
resolution: {integrity: sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug==}
|
||||
|
||||
debug@4.4.3:
|
||||
resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==}
|
||||
engines: {node: '>=6.0'}
|
||||
@ -4306,9 +4329,6 @@ packages:
|
||||
resolution: {integrity: sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
duplexer@0.1.2:
|
||||
resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==}
|
||||
|
||||
echarts-for-react@3.0.5:
|
||||
resolution: {integrity: sha512-YpEI5Ty7O/2nvCfQ7ybNa+S90DwE8KYZWacGvJW4luUqywP7qStQ+pxDlYOmr4jGDu10mhEkiAuMKcUlT4W5vg==}
|
||||
peerDependencies:
|
||||
@ -4953,10 +4973,6 @@ packages:
|
||||
graphemer@1.4.0:
|
||||
resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==}
|
||||
|
||||
gzip-size@6.0.0:
|
||||
resolution: {integrity: sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
hachure-fill@0.5.2:
|
||||
resolution: {integrity: sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==}
|
||||
|
||||
@ -5227,10 +5243,6 @@ packages:
|
||||
resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
is-plain-object@5.0.0:
|
||||
resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
is-potential-custom-element-name@1.0.1:
|
||||
resolution: {integrity: sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==}
|
||||
|
||||
@ -5953,10 +5965,6 @@ packages:
|
||||
openapi-types@12.1.3:
|
||||
resolution: {integrity: sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==}
|
||||
|
||||
opener@1.5.2:
|
||||
resolution: {integrity: sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==}
|
||||
hasBin: true
|
||||
|
||||
optionator@0.9.4:
|
||||
resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
@ -6683,10 +6691,6 @@ packages:
|
||||
simple-swizzle@0.2.4:
|
||||
resolution: {integrity: sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw==}
|
||||
|
||||
sirv@2.0.4:
|
||||
resolution: {integrity: sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==}
|
||||
engines: {node: '>= 10'}
|
||||
|
||||
sirv@3.0.2:
|
||||
resolution: {integrity: sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==}
|
||||
engines: {node: '>=18'}
|
||||
@ -7291,6 +7295,12 @@ packages:
|
||||
peerDependencies:
|
||||
vitest: ^3.0.0 || ^4.0.0
|
||||
|
||||
vitest-tiny-reporter@1.3.1:
|
||||
resolution: {integrity: sha512-9WfLruQBbxm4EqMIS0jDZmQjvMgsWgHUso9mHQWgjA6hM3tEVhjdG8wYo7ePFh1XbwEFzEo3XUQqkGoKZ/Td2Q==}
|
||||
peerDependencies:
|
||||
'@vitest/runner': ^2.0.0 || ^3.0.2 || ^4.0.0
|
||||
vitest: ^2.0.0 || ^3.0.2 || ^4.0.0
|
||||
|
||||
vitest@4.0.17:
|
||||
resolution: {integrity: sha512-FQMeF0DJdWY0iOnbv466n/0BudNdKj1l5jYgl5JVTwjSsZSlqyXFt/9+1sEyhR6CLowbZpV7O1sCHrzBhucKKg==}
|
||||
engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0}
|
||||
@ -7383,11 +7393,6 @@ packages:
|
||||
resolution: {integrity: sha512-BMhLD/Sw+GbJC21C/UgyaZX41nPt8bUTg+jWyDeg7e7YN4xOM05YPSIXceACnXVtqyEw/LMClUQMtMZ+PGGpqQ==}
|
||||
engines: {node: '>=20'}
|
||||
|
||||
webpack-bundle-analyzer@4.10.1:
|
||||
resolution: {integrity: sha512-s3P7pgexgT/HTUSYgxJyn28A+99mmLq4HsJepMPzu0R8ImJc52QNqaFYW1Z2z2uIb1/J3eYgaAWVpaC+v/1aAQ==}
|
||||
engines: {node: '>= 10.13.0'}
|
||||
hasBin: true
|
||||
|
||||
webpack-sources@3.3.3:
|
||||
resolution: {integrity: sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg==}
|
||||
engines: {node: '>=10.13.0'}
|
||||
@ -7454,18 +7459,6 @@ packages:
|
||||
wrappy@1.0.2:
|
||||
resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
|
||||
|
||||
ws@7.5.10:
|
||||
resolution: {integrity: sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==}
|
||||
engines: {node: '>=8.3.0'}
|
||||
peerDependencies:
|
||||
bufferutil: ^4.0.1
|
||||
utf-8-validate: ^5.0.2
|
||||
peerDependenciesMeta:
|
||||
bufferutil:
|
||||
optional: true
|
||||
utf-8-validate:
|
||||
optional: true
|
||||
|
||||
ws@8.19.0:
|
||||
resolution: {integrity: sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==}
|
||||
engines: {node: '>=10.0.0'}
|
||||
@ -7529,9 +7522,6 @@ packages:
|
||||
peerDependencies:
|
||||
zod: ^3.25.0 || ^4.0.0
|
||||
|
||||
zod@3.25.76:
|
||||
resolution: {integrity: sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==}
|
||||
|
||||
zod@4.3.6:
|
||||
resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
|
||||
|
||||
@ -8038,8 +8028,6 @@ snapshots:
|
||||
|
||||
'@csstools/css-tokenizer@3.0.4': {}
|
||||
|
||||
'@discoveryjs/json-ext@0.5.7': {}
|
||||
|
||||
'@emnapi/core@1.8.1':
|
||||
dependencies:
|
||||
'@emnapi/wasi-threads': 1.1.0
|
||||
@ -8230,7 +8218,7 @@ snapshots:
|
||||
eslint: 9.39.2(jiti@1.21.7)
|
||||
ts-pattern: 5.9.0
|
||||
typescript: 5.9.3
|
||||
zod: 3.25.76
|
||||
zod: 4.3.6
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
@ -8923,13 +8911,6 @@ snapshots:
|
||||
|
||||
'@neoconfetti/react@1.0.0': {}
|
||||
|
||||
'@next/bundle-analyzer@16.1.5':
|
||||
dependencies:
|
||||
webpack-bundle-analyzer: 4.10.1
|
||||
transitivePeerDependencies:
|
||||
- bufferutil
|
||||
- utf-8-validate
|
||||
|
||||
'@next/env@16.0.0': {}
|
||||
|
||||
'@next/env@16.1.5': {}
|
||||
@ -9230,7 +9211,8 @@ snapshots:
|
||||
|
||||
'@pkgr/core@0.2.9': {}
|
||||
|
||||
'@polka/url@1.0.0-next.29': {}
|
||||
'@polka/url@1.0.0-next.29':
|
||||
optional: true
|
||||
|
||||
'@preact/signals-core@1.12.2': {}
|
||||
|
||||
@ -9945,6 +9927,20 @@ snapshots:
|
||||
dependencies:
|
||||
'@swc/counter': 0.1.3
|
||||
|
||||
'@t3-oss/env-core@0.13.10(typescript@5.9.3)(valibot@1.2.0(typescript@5.9.3))(zod@4.3.6)':
|
||||
optionalDependencies:
|
||||
typescript: 5.9.3
|
||||
valibot: 1.2.0(typescript@5.9.3)
|
||||
zod: 4.3.6
|
||||
|
||||
'@t3-oss/env-nextjs@0.13.10(typescript@5.9.3)(valibot@1.2.0(typescript@5.9.3))(zod@4.3.6)':
|
||||
dependencies:
|
||||
'@t3-oss/env-core': 0.13.10(typescript@5.9.3)(valibot@1.2.0(typescript@5.9.3))(zod@4.3.6)
|
||||
optionalDependencies:
|
||||
typescript: 5.9.3
|
||||
valibot: 1.2.0(typescript@5.9.3)
|
||||
zod: 4.3.6
|
||||
|
||||
'@tailwindcss/typography@0.5.19(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.2))':
|
||||
dependencies:
|
||||
postcss-selector-parser: 6.0.10
|
||||
@ -10937,10 +10933,6 @@ snapshots:
|
||||
dependencies:
|
||||
acorn: 8.15.0
|
||||
|
||||
acorn-walk@8.3.4:
|
||||
dependencies:
|
||||
acorn: 8.15.0
|
||||
|
||||
acorn@8.15.0: {}
|
||||
|
||||
agent-base@7.1.4: {}
|
||||
@ -11567,8 +11559,6 @@ snapshots:
|
||||
|
||||
dayjs@1.11.19: {}
|
||||
|
||||
debounce@1.2.1: {}
|
||||
|
||||
debug@4.4.3:
|
||||
dependencies:
|
||||
ms: 2.1.3
|
||||
@ -11644,8 +11634,6 @@ snapshots:
|
||||
|
||||
dotenv@16.6.1: {}
|
||||
|
||||
duplexer@0.1.2: {}
|
||||
|
||||
echarts-for-react@3.0.5(echarts@5.6.0)(react@19.2.4):
|
||||
dependencies:
|
||||
echarts: 5.6.0
|
||||
@ -11939,8 +11927,8 @@ snapshots:
|
||||
'@babel/parser': 7.28.6
|
||||
eslint: 9.39.2(jiti@1.21.7)
|
||||
hermes-parser: 0.25.1
|
||||
zod: 3.25.76
|
||||
zod-validation-error: 4.0.2(zod@3.25.76)
|
||||
zod: 4.3.6
|
||||
zod-validation-error: 4.0.2(zod@4.3.6)
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
@ -12486,10 +12474,6 @@ snapshots:
|
||||
|
||||
graphemer@1.4.0: {}
|
||||
|
||||
gzip-size@6.0.0:
|
||||
dependencies:
|
||||
duplexer: 0.1.2
|
||||
|
||||
hachure-fill@0.5.2: {}
|
||||
|
||||
has-flag@4.0.0: {}
|
||||
@ -12807,8 +12791,6 @@ snapshots:
|
||||
|
||||
is-plain-obj@4.1.0: {}
|
||||
|
||||
is-plain-object@5.0.0: {}
|
||||
|
||||
is-potential-custom-element-name@1.0.1: {}
|
||||
|
||||
is-stream@3.0.0: {}
|
||||
@ -13694,7 +13676,8 @@ snapshots:
|
||||
|
||||
mri@1.2.0: {}
|
||||
|
||||
mrmime@2.0.1: {}
|
||||
mrmime@2.0.1:
|
||||
optional: true
|
||||
|
||||
ms@2.1.3: {}
|
||||
|
||||
@ -13815,8 +13798,6 @@ snapshots:
|
||||
|
||||
openapi-types@12.1.3: {}
|
||||
|
||||
opener@1.5.2: {}
|
||||
|
||||
optionator@0.9.4:
|
||||
dependencies:
|
||||
deep-is: 0.1.4
|
||||
@ -14744,12 +14725,6 @@ snapshots:
|
||||
dependencies:
|
||||
is-arrayish: 0.3.4
|
||||
|
||||
sirv@2.0.4:
|
||||
dependencies:
|
||||
'@polka/url': 1.0.0-next.29
|
||||
mrmime: 2.0.1
|
||||
totalist: 3.0.1
|
||||
|
||||
sirv@3.0.2:
|
||||
dependencies:
|
||||
'@polka/url': 1.0.0-next.29
|
||||
@ -15052,7 +15027,8 @@ snapshots:
|
||||
dependencies:
|
||||
eslint-visitor-keys: 5.0.0
|
||||
|
||||
totalist@3.0.1: {}
|
||||
totalist@3.0.1:
|
||||
optional: true
|
||||
|
||||
tough-cookie@6.0.0:
|
||||
dependencies:
|
||||
@ -15342,6 +15318,12 @@ snapshots:
|
||||
moo-color: 1.0.3
|
||||
vitest: 4.0.17(@types/node@18.15.0)(@vitest/browser-playwright@4.0.17)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.1))(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)
|
||||
|
||||
vitest-tiny-reporter@1.3.1(@vitest/runner@4.0.17)(vitest@4.0.17):
|
||||
dependencies:
|
||||
'@vitest/runner': 4.0.17
|
||||
tinyrainbow: 3.0.3
|
||||
vitest: 4.0.17(@types/node@18.15.0)(@vitest/browser-playwright@4.0.17)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.1))(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)
|
||||
|
||||
vitest@4.0.17(@types/node@18.15.0)(@vitest/browser-playwright@4.0.17)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.1))(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2):
|
||||
dependencies:
|
||||
'@vitest/expect': 4.0.17
|
||||
@ -15434,25 +15416,6 @@ snapshots:
|
||||
|
||||
webidl-conversions@8.0.1: {}
|
||||
|
||||
webpack-bundle-analyzer@4.10.1:
|
||||
dependencies:
|
||||
'@discoveryjs/json-ext': 0.5.7
|
||||
acorn: 8.15.0
|
||||
acorn-walk: 8.3.4
|
||||
commander: 7.2.0
|
||||
debounce: 1.2.1
|
||||
escape-string-regexp: 4.0.0
|
||||
gzip-size: 6.0.0
|
||||
html-escaper: 2.0.2
|
||||
is-plain-object: 5.0.0
|
||||
opener: 1.5.2
|
||||
picocolors: 1.1.1
|
||||
sirv: 2.0.4
|
||||
ws: 7.5.10
|
||||
transitivePeerDependencies:
|
||||
- bufferutil
|
||||
- utf-8-validate
|
||||
|
||||
webpack-sources@3.3.3:
|
||||
optional: true
|
||||
|
||||
@ -15542,8 +15505,6 @@ snapshots:
|
||||
wrappy@1.0.2:
|
||||
optional: true
|
||||
|
||||
ws@7.5.10: {}
|
||||
|
||||
ws@8.19.0: {}
|
||||
|
||||
wsl-utils@0.1.0:
|
||||
@ -15580,11 +15541,9 @@ snapshots:
|
||||
|
||||
zen-observable@0.8.15: {}
|
||||
|
||||
zod-validation-error@4.0.2(zod@3.25.76):
|
||||
zod-validation-error@4.0.2(zod@4.3.6):
|
||||
dependencies:
|
||||
zod: 3.25.76
|
||||
|
||||
zod@3.25.76: {}
|
||||
zod: 4.3.6
|
||||
|
||||
zod@4.3.6: {}
|
||||
|
||||
|
||||
@ -1,13 +1,14 @@
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { Buffer } from 'node:buffer'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { env } from '@/env'
|
||||
|
||||
const NECESSARY_DOMAIN = '*.sentry.io http://localhost:* http://127.0.0.1:* https://analytics.google.com googletagmanager.com *.googletagmanager.com https://www.google-analytics.com https://api.github.com https://api2.amplitude.com *.amplitude.com'
|
||||
|
||||
const wrapResponseWithXFrameOptions = (response: NextResponse, pathname: string) => {
|
||||
// prevent clickjacking: https://owasp.org/www-community/attacks/Clickjacking
|
||||
// Chatbot page should be allowed to be embedded in iframe. It's a feature
|
||||
if (process.env.NEXT_PUBLIC_ALLOW_EMBED !== 'true' && !pathname.startsWith('/chat') && !pathname.startsWith('/workflow') && !pathname.startsWith('/completion') && !pathname.startsWith('/webapp-signin'))
|
||||
if (env.NEXT_PUBLIC_ALLOW_EMBED !== true && !pathname.startsWith('/chat') && !pathname.startsWith('/workflow') && !pathname.startsWith('/completion') && !pathname.startsWith('/webapp-signin'))
|
||||
response.headers.set('X-Frame-Options', 'DENY')
|
||||
|
||||
return response
|
||||
@ -21,11 +22,11 @@ export function proxy(request: NextRequest) {
|
||||
},
|
||||
})
|
||||
|
||||
const isWhiteListEnabled = !!process.env.NEXT_PUBLIC_CSP_WHITELIST && process.env.NODE_ENV === 'production'
|
||||
const isWhiteListEnabled = !!env.NEXT_PUBLIC_CSP_WHITELIST && env.NODE_ENV === 'production'
|
||||
if (!isWhiteListEnabled)
|
||||
return wrapResponseWithXFrameOptions(response, pathname)
|
||||
|
||||
const whiteList = `${process.env.NEXT_PUBLIC_CSP_WHITELIST} ${NECESSARY_DOMAIN}`
|
||||
const whiteList = `${env.NEXT_PUBLIC_CSP_WHITELIST} ${NECESSARY_DOMAIN}`
|
||||
const nonce = Buffer.from(crypto.randomUUID()).toString('base64')
|
||||
const csp = `'nonce-${nonce}'`
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const loadGetBaseURL = async (isClientValue: boolean) => {
|
||||
vi.resetModules()
|
||||
vi.doMock('@/utils/client', () => ({ isClient: isClientValue }))
|
||||
vi.doMock('@/utils/client', () => ({ isClient: isClientValue, isServer: !isClientValue }))
|
||||
const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {})
|
||||
// eslint-disable-next-line next/no-assign-module-variable
|
||||
const module = await import('./client')
|
||||
|
||||
@ -107,37 +107,3 @@ export const defaultSystemFeatures: SystemFeatures = {
|
||||
enable_trial_app: false,
|
||||
enable_explore_banner: false,
|
||||
}
|
||||
|
||||
export enum DatasetAttr {
|
||||
DATA_API_PREFIX = 'data-api-prefix',
|
||||
DATA_PUBLIC_API_PREFIX = 'data-public-api-prefix',
|
||||
DATA_MARKETPLACE_API_PREFIX = 'data-marketplace-api-prefix',
|
||||
DATA_MARKETPLACE_URL_PREFIX = 'data-marketplace-url-prefix',
|
||||
DATA_PUBLIC_EDITION = 'data-public-edition',
|
||||
DATA_PUBLIC_AMPLITUDE_API_KEY = 'data-public-amplitude-api-key',
|
||||
DATA_PUBLIC_COOKIE_DOMAIN = 'data-public-cookie-domain',
|
||||
DATA_PUBLIC_SUPPORT_MAIL_LOGIN = 'data-public-support-mail-login',
|
||||
DATA_PUBLIC_SENTRY_DSN = 'data-public-sentry-dsn',
|
||||
DATA_PUBLIC_MAINTENANCE_NOTICE = 'data-public-maintenance-notice',
|
||||
DATA_PUBLIC_SITE_ABOUT = 'data-public-site-about',
|
||||
DATA_PUBLIC_TEXT_GENERATION_TIMEOUT_MS = 'data-public-text-generation-timeout-ms',
|
||||
DATA_PUBLIC_MAX_TOOLS_NUM = 'data-public-max-tools-num',
|
||||
DATA_PUBLIC_MAX_PARALLEL_LIMIT = 'data-public-max-parallel-limit',
|
||||
DATA_PUBLIC_TOP_K_MAX_VALUE = 'data-public-top-k-max-value',
|
||||
DATA_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH = 'data-public-indexing-max-segmentation-tokens-length',
|
||||
DATA_PUBLIC_LOOP_NODE_MAX_COUNT = 'data-public-loop-node-max-count',
|
||||
DATA_PUBLIC_MAX_ITERATIONS_NUM = 'data-public-max-iterations-num',
|
||||
DATA_PUBLIC_MAX_TREE_DEPTH = 'data-public-max-tree-depth',
|
||||
DATA_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME = 'data-public-allow-unsafe-data-scheme',
|
||||
DATA_PUBLIC_ENABLE_WEBSITE_JINAREADER = 'data-public-enable-website-jinareader',
|
||||
DATA_PUBLIC_ENABLE_WEBSITE_FIRECRAWL = 'data-public-enable-website-firecrawl',
|
||||
DATA_PUBLIC_ENABLE_WEBSITE_WATERCRAWL = 'data-public-enable-website-watercrawl',
|
||||
DATA_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX = 'data-public-enable-single-dollar-latex',
|
||||
NEXT_PUBLIC_ZENDESK_WIDGET_KEY = 'next-public-zendesk-widget-key',
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT = 'next-public-zendesk-field-id-environment',
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION = 'next-public-zendesk-field-id-version',
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL = 'next-public-zendesk-field-id-email',
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID = 'next-public-zendesk-field-id-workspace-id',
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN = 'next-public-zendesk-field-id-plan',
|
||||
DATA_PUBLIC_BATCH_CONCURRENCY = 'data-public-batch-concurrency',
|
||||
}
|
||||
|
||||
7
web/utils/object.ts
Normal file
7
web/utils/object.ts
Normal file
@ -0,0 +1,7 @@
|
||||
export function ObjectFromEntries<const T extends ReadonlyArray<readonly [PropertyKey, unknown]>>(entries: T): { [K in T[number]as K[0]]: K[1] } {
|
||||
return Object.fromEntries(entries) as { [K in T[number]as K[0]]: K[1] }
|
||||
}
|
||||
|
||||
export function ObjectKeys<const T extends Record<string, unknown>>(obj: T): (keyof T)[] {
|
||||
return Object.keys(obj) as (keyof T)[]
|
||||
}
|
||||
@ -8,6 +8,7 @@ import {
|
||||
} from '@/app/components/base/prompt-editor/constants'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import { getMaxVarNameLength, MARKETPLACE_URL_PREFIX, MAX_VAR_KEY_LENGTH, VAR_ITEM_TEMPLATE, VAR_ITEM_TEMPLATE_IN_WORKFLOW } from '@/config'
|
||||
import { env } from '@/env'
|
||||
|
||||
const otherAllowedRegex = /^\w+$/
|
||||
|
||||
@ -129,7 +130,7 @@ export const getVars = (value: string) => {
|
||||
|
||||
// Set the value of basePath
|
||||
// example: /dify
|
||||
export const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
|
||||
export const basePath = env.NEXT_PUBLIC_BASE_PATH
|
||||
|
||||
export function getMarketplaceUrl(path: string, params?: Record<string, string | undefined>) {
|
||||
const searchParams = new URLSearchParams({ source: encodeURIComponent(window.location.origin) })
|
||||
|
||||
@ -1,173 +0,0 @@
|
||||
import { z, ZodError } from 'zod'
|
||||
|
||||
describe('Zod Features', () => {
|
||||
it('should support string', () => {
|
||||
const stringSchema = z.string()
|
||||
const numberLikeStringSchema = z.coerce.string() // 12 would be converted to '12'
|
||||
const stringSchemaWithError = z.string({
|
||||
required_error: 'Name is required',
|
||||
invalid_type_error: 'Invalid name type, expected string',
|
||||
})
|
||||
|
||||
const urlSchema = z.string().url()
|
||||
const uuidSchema = z.string().uuid()
|
||||
|
||||
expect(stringSchema.parse('hello')).toBe('hello')
|
||||
expect(() => stringSchema.parse(12)).toThrow()
|
||||
expect(numberLikeStringSchema.parse('12')).toBe('12')
|
||||
expect(numberLikeStringSchema.parse(12)).toBe('12')
|
||||
expect(() => stringSchemaWithError.parse(undefined)).toThrow('Name is required')
|
||||
expect(() => stringSchemaWithError.parse(12)).toThrow('Invalid name type, expected string')
|
||||
|
||||
expect(urlSchema.parse('https://dify.ai')).toBe('https://dify.ai')
|
||||
expect(uuidSchema.parse('123e4567-e89b-12d3-a456-426614174000')).toBe('123e4567-e89b-12d3-a456-426614174000')
|
||||
})
|
||||
|
||||
it('should support enum', () => {
|
||||
enum JobStatus {
|
||||
waiting = 'waiting',
|
||||
processing = 'processing',
|
||||
completed = 'completed',
|
||||
}
|
||||
expect(z.nativeEnum(JobStatus).parse(JobStatus.waiting)).toBe(JobStatus.waiting)
|
||||
expect(z.nativeEnum(JobStatus).parse('completed')).toBe('completed')
|
||||
expect(() => z.nativeEnum(JobStatus).parse('invalid')).toThrow()
|
||||
})
|
||||
|
||||
it('should support number', () => {
|
||||
const numberSchema = z.number()
|
||||
const numberWithMin = z.number().gt(0) // alias min
|
||||
const numberWithMinEqual = z.number().gte(0)
|
||||
const numberWithMax = z.number().lt(100) // alias max
|
||||
|
||||
expect(numberSchema.parse(123)).toBe(123)
|
||||
expect(numberWithMin.parse(50)).toBe(50)
|
||||
expect(numberWithMinEqual.parse(0)).toBe(0)
|
||||
expect(() => numberWithMin.parse(-1)).toThrow()
|
||||
expect(numberWithMax.parse(50)).toBe(50)
|
||||
expect(() => numberWithMax.parse(101)).toThrow()
|
||||
})
|
||||
|
||||
it('should support boolean', () => {
|
||||
const booleanSchema = z.boolean()
|
||||
expect(booleanSchema.parse(true)).toBe(true)
|
||||
expect(booleanSchema.parse(false)).toBe(false)
|
||||
expect(() => booleanSchema.parse('true')).toThrow()
|
||||
})
|
||||
|
||||
it('should support date', () => {
|
||||
const dateSchema = z.date()
|
||||
expect(dateSchema.parse(new Date('2023-01-01'))).toEqual(new Date('2023-01-01'))
|
||||
})
|
||||
|
||||
it('should support object', () => {
|
||||
const userSchema = z.object({
|
||||
id: z.union([z.string(), z.number()]),
|
||||
name: z.string(),
|
||||
email: z.string().email(),
|
||||
age: z.number().min(0).max(120).optional(),
|
||||
})
|
||||
|
||||
type User = z.infer<typeof userSchema>
|
||||
|
||||
const validUser: User = {
|
||||
id: 1,
|
||||
name: 'John',
|
||||
email: 'john@example.com',
|
||||
age: 30,
|
||||
}
|
||||
|
||||
expect(userSchema.parse(validUser)).toEqual(validUser)
|
||||
})
|
||||
|
||||
it('should support object optional field', () => {
|
||||
const userSchema = z.object({
|
||||
name: z.string(),
|
||||
optionalField: z.optional(z.string()),
|
||||
})
|
||||
type User = z.infer<typeof userSchema>
|
||||
|
||||
const user: User = {
|
||||
name: 'John',
|
||||
}
|
||||
const userWithOptionalField: User = {
|
||||
name: 'John',
|
||||
optionalField: 'optional',
|
||||
}
|
||||
expect(userSchema.safeParse(user).success).toEqual(true)
|
||||
expect(userSchema.safeParse(userWithOptionalField).success).toEqual(true)
|
||||
})
|
||||
|
||||
it('should support object intersection', () => {
|
||||
const Person = z.object({
|
||||
name: z.string(),
|
||||
})
|
||||
|
||||
const Employee = z.object({
|
||||
role: z.string(),
|
||||
})
|
||||
|
||||
const EmployedPerson = z.intersection(Person, Employee)
|
||||
const validEmployedPerson = {
|
||||
name: 'John',
|
||||
role: 'Developer',
|
||||
}
|
||||
expect(EmployedPerson.parse(validEmployedPerson)).toEqual(validEmployedPerson)
|
||||
})
|
||||
|
||||
it('should support record', () => {
|
||||
const recordSchema = z.record(z.string(), z.number())
|
||||
const validRecord = {
|
||||
a: 1,
|
||||
b: 2,
|
||||
}
|
||||
expect(recordSchema.parse(validRecord)).toEqual(validRecord)
|
||||
})
|
||||
|
||||
it('should support array', () => {
|
||||
const numbersSchema = z.array(z.number())
|
||||
const stringArraySchema = z.string().array()
|
||||
|
||||
expect(numbersSchema.parse([1, 2, 3])).toEqual([1, 2, 3])
|
||||
expect(stringArraySchema.parse(['a', 'b', 'c'])).toEqual(['a', 'b', 'c'])
|
||||
})
|
||||
|
||||
it('should support promise', async () => {
|
||||
const promiseSchema = z.promise(z.string())
|
||||
const validPromise = Promise.resolve('success')
|
||||
|
||||
await expect(promiseSchema.parse(validPromise)).resolves.toBe('success')
|
||||
})
|
||||
|
||||
it('should support unions', () => {
|
||||
const unionSchema = z.union([z.string(), z.number()])
|
||||
|
||||
expect(unionSchema.parse('success')).toBe('success')
|
||||
expect(unionSchema.parse(404)).toBe(404)
|
||||
})
|
||||
|
||||
it('should support functions', () => {
|
||||
const functionSchema = z.function().args(z.string(), z.number(), z.optional(z.string())).returns(z.number())
|
||||
const validFunction = (name: string, age: number, _optional?: string): number => {
|
||||
return age
|
||||
}
|
||||
expect(functionSchema.safeParse(validFunction).success).toEqual(true)
|
||||
})
|
||||
|
||||
it('should support undefined, null, any, and void', () => {
|
||||
const undefinedSchema = z.undefined()
|
||||
const nullSchema = z.null()
|
||||
const anySchema = z.any()
|
||||
|
||||
expect(undefinedSchema.parse(undefined)).toBeUndefined()
|
||||
expect(nullSchema.parse(null)).toBeNull()
|
||||
expect(anySchema.parse('anything')).toBe('anything')
|
||||
expect(anySchema.parse(3)).toBe(3)
|
||||
})
|
||||
|
||||
it('should safeParse would not throw', () => {
|
||||
expect(z.string().safeParse('abc').success).toBe(true)
|
||||
expect(z.string().safeParse(123).success).toBe(false)
|
||||
expect(z.string().safeParse(123).error).toBeInstanceOf(ZodError)
|
||||
})
|
||||
})
|
||||
@ -1,8 +1,6 @@
|
||||
import { defineConfig, mergeConfig } from 'vitest/config'
|
||||
import viteConfig from './vite.config'
|
||||
|
||||
const isCI = !!process.env.CI
|
||||
|
||||
export default mergeConfig(viteConfig, defineConfig({
|
||||
test: {
|
||||
environment: 'jsdom',
|
||||
@ -10,7 +8,7 @@ export default mergeConfig(viteConfig, defineConfig({
|
||||
setupFiles: ['./vitest.setup.ts'],
|
||||
coverage: {
|
||||
provider: 'v8',
|
||||
reporter: isCI ? ['json', 'json-summary'] : ['text', 'json', 'json-summary'],
|
||||
reporter: ['json', 'json-summary'],
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
Reference in New Issue
Block a user