mirror of
https://github.com/langgenius/dify.git
synced 2026-01-24 13:56:03 +08:00
Compare commits
8 Commits
refactor/a
...
refactor/l
| Author | SHA1 | Date | |
|---|---|---|---|
| 3ca098eaa5 | |||
| 03faf8e91b | |||
| 1cf3e599df | |||
| a85946d3e7 | |||
| 9e4d3c75ae | |||
| 70bea85624 | |||
| b75b7d6c61 | |||
| e819b804ba |
23
.github/workflows/autofix.yml
vendored
23
.github/workflows/autofix.yml
vendored
@ -79,29 +79,6 @@ jobs:
|
||||
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
|
||||
find . -name "*.py.bak" -type f -delete
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
package_json_file: web/package.json
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
cache-dependency-path: ./web/pnpm-lock.yaml
|
||||
|
||||
- name: Install web dependencies
|
||||
run: |
|
||||
cd web
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: ESLint autofix
|
||||
run: |
|
||||
cd web
|
||||
pnpm lint:fix || true
|
||||
|
||||
# mdformat breaks YAML front matter in markdown files. Add --exclude for directories containing YAML front matter.
|
||||
- name: mdformat
|
||||
run: |
|
||||
|
||||
340
api/commands.py
340
api/commands.py
@ -950,346 +950,6 @@ def clean_workflow_runs(
|
||||
)
|
||||
|
||||
|
||||
@click.command(
|
||||
"archive-workflow-runs",
|
||||
help="Archive workflow runs for paid plan tenants to S3-compatible storage.",
|
||||
)
|
||||
@click.option("--tenant-ids", default=None, help="Optional comma-separated tenant IDs for grayscale rollout.")
|
||||
@click.option("--before-days", default=90, show_default=True, help="Archive runs older than N days.")
|
||||
@click.option(
|
||||
"--from-days-ago",
|
||||
default=None,
|
||||
type=click.IntRange(min=0),
|
||||
help="Lower bound in days ago (older). Must be paired with --to-days-ago.",
|
||||
)
|
||||
@click.option(
|
||||
"--to-days-ago",
|
||||
default=None,
|
||||
type=click.IntRange(min=0),
|
||||
help="Upper bound in days ago (newer). Must be paired with --from-days-ago.",
|
||||
)
|
||||
@click.option(
|
||||
"--start-from",
|
||||
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
|
||||
default=None,
|
||||
help="Archive runs created at or after this timestamp (UTC if no timezone).",
|
||||
)
|
||||
@click.option(
|
||||
"--end-before",
|
||||
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
|
||||
default=None,
|
||||
help="Archive runs created before this timestamp (UTC if no timezone).",
|
||||
)
|
||||
@click.option("--batch-size", default=100, show_default=True, help="Batch size for processing.")
|
||||
@click.option("--workers", default=1, show_default=True, type=int, help="Concurrent workflow runs to archive.")
|
||||
@click.option("--limit", default=None, type=int, help="Maximum number of runs to archive.")
|
||||
@click.option("--dry-run", is_flag=True, help="Preview without archiving.")
|
||||
@click.option("--delete-after-archive", is_flag=True, help="Delete runs and related data after archiving.")
|
||||
def archive_workflow_runs(
|
||||
tenant_ids: str | None,
|
||||
before_days: int,
|
||||
from_days_ago: int | None,
|
||||
to_days_ago: int | None,
|
||||
start_from: datetime.datetime | None,
|
||||
end_before: datetime.datetime | None,
|
||||
batch_size: int,
|
||||
workers: int,
|
||||
limit: int | None,
|
||||
dry_run: bool,
|
||||
delete_after_archive: bool,
|
||||
):
|
||||
"""
|
||||
Archive workflow runs for paid plan tenants older than the specified days.
|
||||
|
||||
This command archives the following tables to storage:
|
||||
- workflow_node_executions
|
||||
- workflow_node_execution_offload
|
||||
- workflow_pauses
|
||||
- workflow_pause_reasons
|
||||
- workflow_trigger_logs
|
||||
|
||||
The workflow_runs and workflow_app_logs tables are preserved for UI listing.
|
||||
"""
|
||||
from services.retention.workflow_run.archive_paid_plan_workflow_run import WorkflowRunArchiver
|
||||
|
||||
run_started_at = datetime.datetime.now(datetime.UTC)
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Starting workflow run archiving at {run_started_at.isoformat()}.",
|
||||
fg="white",
|
||||
)
|
||||
)
|
||||
|
||||
if (start_from is None) ^ (end_before is None):
|
||||
click.echo(click.style("start-from and end-before must be provided together.", fg="red"))
|
||||
return
|
||||
|
||||
if (from_days_ago is None) ^ (to_days_ago is None):
|
||||
click.echo(click.style("from-days-ago and to-days-ago must be provided together.", fg="red"))
|
||||
return
|
||||
|
||||
if from_days_ago is not None and to_days_ago is not None:
|
||||
if start_from or end_before:
|
||||
click.echo(click.style("Choose either day offsets or explicit dates, not both.", fg="red"))
|
||||
return
|
||||
if from_days_ago <= to_days_ago:
|
||||
click.echo(click.style("from-days-ago must be greater than to-days-ago.", fg="red"))
|
||||
return
|
||||
now = datetime.datetime.now()
|
||||
start_from = now - datetime.timedelta(days=from_days_ago)
|
||||
end_before = now - datetime.timedelta(days=to_days_ago)
|
||||
before_days = 0
|
||||
|
||||
if start_from and end_before and start_from >= end_before:
|
||||
click.echo(click.style("start-from must be earlier than end-before.", fg="red"))
|
||||
return
|
||||
if workers < 1:
|
||||
click.echo(click.style("workers must be at least 1.", fg="red"))
|
||||
return
|
||||
|
||||
archiver = WorkflowRunArchiver(
|
||||
days=before_days,
|
||||
batch_size=batch_size,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
workers=workers,
|
||||
tenant_ids=[tid.strip() for tid in tenant_ids.split(",")] if tenant_ids else None,
|
||||
limit=limit,
|
||||
dry_run=dry_run,
|
||||
delete_after_archive=delete_after_archive,
|
||||
)
|
||||
summary = archiver.run()
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Summary: processed={summary.total_runs_processed}, archived={summary.runs_archived}, "
|
||||
f"skipped={summary.runs_skipped}, failed={summary.runs_failed}, "
|
||||
f"time={summary.total_elapsed_time:.2f}s",
|
||||
fg="cyan",
|
||||
)
|
||||
)
|
||||
|
||||
run_finished_at = datetime.datetime.now(datetime.UTC)
|
||||
elapsed = run_finished_at - run_started_at
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Workflow run archiving completed. start={run_started_at.isoformat()} "
|
||||
f"end={run_finished_at.isoformat()} duration={elapsed}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@click.command(
|
||||
"restore-workflow-runs",
|
||||
help="Restore archived workflow runs from S3-compatible storage.",
|
||||
)
|
||||
@click.option(
|
||||
"--tenant-ids",
|
||||
required=False,
|
||||
help="Tenant IDs (comma-separated).",
|
||||
)
|
||||
@click.option("--run-id", required=False, help="Workflow run ID to restore.")
|
||||
@click.option(
|
||||
"--start-from",
|
||||
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
|
||||
default=None,
|
||||
help="Optional lower bound (inclusive) for created_at; must be paired with --end-before.",
|
||||
)
|
||||
@click.option(
|
||||
"--end-before",
|
||||
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
|
||||
default=None,
|
||||
help="Optional upper bound (exclusive) for created_at; must be paired with --start-from.",
|
||||
)
|
||||
@click.option("--workers", default=1, show_default=True, type=int, help="Concurrent workflow runs to restore.")
|
||||
@click.option("--limit", type=int, default=100, show_default=True, help="Maximum number of runs to restore.")
|
||||
@click.option("--dry-run", is_flag=True, help="Preview without restoring.")
|
||||
def restore_workflow_runs(
|
||||
tenant_ids: str | None,
|
||||
run_id: str | None,
|
||||
start_from: datetime.datetime | None,
|
||||
end_before: datetime.datetime | None,
|
||||
workers: int,
|
||||
limit: int,
|
||||
dry_run: bool,
|
||||
):
|
||||
"""
|
||||
Restore an archived workflow run from storage to the database.
|
||||
|
||||
This restores the following tables:
|
||||
- workflow_node_executions
|
||||
- workflow_node_execution_offload
|
||||
- workflow_pauses
|
||||
- workflow_pause_reasons
|
||||
- workflow_trigger_logs
|
||||
"""
|
||||
from services.retention.workflow_run.restore_archived_workflow_run import WorkflowRunRestore
|
||||
|
||||
parsed_tenant_ids = None
|
||||
if tenant_ids:
|
||||
parsed_tenant_ids = [tid.strip() for tid in tenant_ids.split(",") if tid.strip()]
|
||||
if not parsed_tenant_ids:
|
||||
raise click.BadParameter("tenant-ids must not be empty")
|
||||
|
||||
if (start_from is None) ^ (end_before is None):
|
||||
raise click.UsageError("--start-from and --end-before must be provided together.")
|
||||
if run_id is None and (start_from is None or end_before is None):
|
||||
raise click.UsageError("--start-from and --end-before are required for batch restore.")
|
||||
if workers < 1:
|
||||
raise click.BadParameter("workers must be at least 1")
|
||||
|
||||
start_time = datetime.datetime.now(datetime.UTC)
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Starting restore of workflow run {run_id} at {start_time.isoformat()}.",
|
||||
fg="white",
|
||||
)
|
||||
)
|
||||
|
||||
restorer = WorkflowRunRestore(dry_run=dry_run, workers=workers)
|
||||
if run_id:
|
||||
results = [restorer.restore_by_run_id(run_id)]
|
||||
else:
|
||||
assert start_from is not None
|
||||
assert end_before is not None
|
||||
results = restorer.restore_batch(
|
||||
parsed_tenant_ids,
|
||||
start_date=start_from,
|
||||
end_date=end_before,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
end_time = datetime.datetime.now(datetime.UTC)
|
||||
elapsed = end_time - start_time
|
||||
|
||||
successes = sum(1 for result in results if result.success)
|
||||
failures = len(results) - successes
|
||||
|
||||
if failures == 0:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Restore completed successfully. success={successes} duration={elapsed}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
else:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Restore completed with failures. success={successes} failed={failures} duration={elapsed}",
|
||||
fg="red",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@click.command(
|
||||
"delete-archived-workflow-runs",
|
||||
help="Delete archived workflow runs from the database.",
|
||||
)
|
||||
@click.option(
|
||||
"--tenant-ids",
|
||||
required=False,
|
||||
help="Tenant IDs (comma-separated).",
|
||||
)
|
||||
@click.option("--run-id", required=False, help="Workflow run ID to delete.")
|
||||
@click.option(
|
||||
"--start-from",
|
||||
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
|
||||
default=None,
|
||||
help="Optional lower bound (inclusive) for created_at; must be paired with --end-before.",
|
||||
)
|
||||
@click.option(
|
||||
"--end-before",
|
||||
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
|
||||
default=None,
|
||||
help="Optional upper bound (exclusive) for created_at; must be paired with --start-from.",
|
||||
)
|
||||
@click.option("--limit", type=int, default=100, show_default=True, help="Maximum number of runs to delete.")
|
||||
@click.option("--dry-run", is_flag=True, help="Preview without deleting.")
|
||||
def delete_archived_workflow_runs(
|
||||
tenant_ids: str | None,
|
||||
run_id: str | None,
|
||||
start_from: datetime.datetime | None,
|
||||
end_before: datetime.datetime | None,
|
||||
limit: int,
|
||||
dry_run: bool,
|
||||
):
|
||||
"""
|
||||
Delete archived workflow runs from the database.
|
||||
"""
|
||||
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
|
||||
|
||||
parsed_tenant_ids = None
|
||||
if tenant_ids:
|
||||
parsed_tenant_ids = [tid.strip() for tid in tenant_ids.split(",") if tid.strip()]
|
||||
if not parsed_tenant_ids:
|
||||
raise click.BadParameter("tenant-ids must not be empty")
|
||||
|
||||
if (start_from is None) ^ (end_before is None):
|
||||
raise click.UsageError("--start-from and --end-before must be provided together.")
|
||||
if run_id is None and (start_from is None or end_before is None):
|
||||
raise click.UsageError("--start-from and --end-before are required for batch delete.")
|
||||
|
||||
start_time = datetime.datetime.now(datetime.UTC)
|
||||
target_desc = f"workflow run {run_id}" if run_id else "workflow runs"
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Starting delete of {target_desc} at {start_time.isoformat()}.",
|
||||
fg="white",
|
||||
)
|
||||
)
|
||||
|
||||
deleter = ArchivedWorkflowRunDeletion(dry_run=dry_run)
|
||||
if run_id:
|
||||
results = [deleter.delete_by_run_id(run_id)]
|
||||
else:
|
||||
assert start_from is not None
|
||||
assert end_before is not None
|
||||
results = deleter.delete_batch(
|
||||
parsed_tenant_ids,
|
||||
start_date=start_from,
|
||||
end_date=end_before,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
for result in results:
|
||||
if result.success:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"{'[DRY RUN] Would delete' if dry_run else 'Deleted'} "
|
||||
f"workflow run {result.run_id} (tenant={result.tenant_id})",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
else:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Failed to delete workflow run {result.run_id}: {result.error}",
|
||||
fg="red",
|
||||
)
|
||||
)
|
||||
|
||||
end_time = datetime.datetime.now(datetime.UTC)
|
||||
elapsed = end_time - start_time
|
||||
|
||||
successes = sum(1 for result in results if result.success)
|
||||
failures = len(results) - successes
|
||||
|
||||
if failures == 0:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Delete completed successfully. success={successes} duration={elapsed}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
else:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Delete completed with failures. success={successes} failed={failures} duration={elapsed}",
|
||||
fg="red",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@click.option("-f", "--force", is_flag=True, help="Skip user confirmation and force the command to execute.")
|
||||
@click.command("clear-orphaned-file-records", help="Clear orphaned file records.")
|
||||
def clear_orphaned_file_records(force: bool):
|
||||
|
||||
@ -11,10 +11,7 @@ from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from core.workflow.enums import WorkflowExecutionStatus
|
||||
from extensions.ext_database import db
|
||||
from fields.workflow_app_log_fields import (
|
||||
build_workflow_app_log_pagination_model,
|
||||
build_workflow_archived_log_pagination_model,
|
||||
)
|
||||
from fields.workflow_app_log_fields import build_workflow_app_log_pagination_model
|
||||
from libs.login import login_required
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
@ -64,7 +61,6 @@ console_ns.schema_model(
|
||||
|
||||
# Register model for flask_restx to avoid dict type issues in Swagger
|
||||
workflow_app_log_pagination_model = build_workflow_app_log_pagination_model(console_ns)
|
||||
workflow_archived_log_pagination_model = build_workflow_archived_log_pagination_model(console_ns)
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow-app-logs")
|
||||
@ -103,33 +99,3 @@ class WorkflowAppLogApi(Resource):
|
||||
)
|
||||
|
||||
return workflow_app_log_pagination
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow-archived-logs")
|
||||
class WorkflowArchivedLogApi(Resource):
|
||||
@console_ns.doc("get_workflow_archived_logs")
|
||||
@console_ns.doc(description="Get workflow archived execution logs")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowAppLogQuery.__name__])
|
||||
@console_ns.response(200, "Workflow archived logs retrieved successfully", workflow_archived_log_pagination_model)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.WORKFLOW])
|
||||
@marshal_with(workflow_archived_log_pagination_model)
|
||||
def get(self, app_model: App):
|
||||
"""
|
||||
Get workflow archived logs
|
||||
"""
|
||||
args = WorkflowAppLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
|
||||
workflow_app_service = WorkflowAppService()
|
||||
with Session(db.engine) as session:
|
||||
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_archive_logs(
|
||||
session=session,
|
||||
app_model=app_model,
|
||||
page=args.page,
|
||||
limit=args.limit,
|
||||
)
|
||||
|
||||
return workflow_app_log_pagination
|
||||
|
||||
@ -1,15 +1,12 @@
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Literal, cast
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from sqlalchemy import select
|
||||
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from extensions.ext_database import db
|
||||
from fields.end_user_fields import simple_end_user_fields
|
||||
from fields.member_fields import simple_account_fields
|
||||
from fields.workflow_run_fields import (
|
||||
@ -22,17 +19,14 @@ from fields.workflow_run_fields import (
|
||||
workflow_run_node_execution_list_fields,
|
||||
workflow_run_pagination_fields,
|
||||
)
|
||||
from libs.archive_storage import ArchiveStorageNotConfiguredError, get_archive_storage
|
||||
from libs.custom_inputs import time_duration
|
||||
from libs.helper import uuid_value
|
||||
from libs.login import current_user, login_required
|
||||
from models import Account, App, AppMode, EndUser, WorkflowArchiveLog, WorkflowRunTriggeredFrom
|
||||
from services.retention.workflow_run.constants import ARCHIVE_BUNDLE_NAME
|
||||
from models import Account, App, AppMode, EndUser, WorkflowRunTriggeredFrom
|
||||
from services.workflow_run_service import WorkflowRunService
|
||||
|
||||
# Workflow run status choices for filtering
|
||||
WORKFLOW_RUN_STATUS_CHOICES = ["running", "succeeded", "failed", "stopped", "partial-succeeded"]
|
||||
EXPORT_SIGNED_URL_EXPIRE_SECONDS = 3600
|
||||
|
||||
# Register models for flask_restx to avoid dict type issues in Swagger
|
||||
# Register in dependency order: base models first, then dependent models
|
||||
@ -99,15 +93,6 @@ workflow_run_node_execution_list_model = console_ns.model(
|
||||
"WorkflowRunNodeExecutionList", workflow_run_node_execution_list_fields_copy
|
||||
)
|
||||
|
||||
workflow_run_export_fields = console_ns.model(
|
||||
"WorkflowRunExport",
|
||||
{
|
||||
"status": fields.String(description="Export status: success/failed"),
|
||||
"presigned_url": fields.String(description="Pre-signed URL for download", required=False),
|
||||
"presigned_url_expires_at": fields.String(description="Pre-signed URL expiration time", required=False),
|
||||
},
|
||||
)
|
||||
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
|
||||
@ -196,56 +181,6 @@ class AdvancedChatAppWorkflowRunListApi(Resource):
|
||||
return result
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/export")
|
||||
class WorkflowRunExportApi(Resource):
|
||||
@console_ns.doc("get_workflow_run_export_url")
|
||||
@console_ns.doc(description="Generate a download URL for an archived workflow run.")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
|
||||
@console_ns.response(200, "Export URL generated", workflow_run_export_fields)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
def get(self, app_model: App, run_id: str):
|
||||
tenant_id = str(app_model.tenant_id)
|
||||
app_id = str(app_model.id)
|
||||
run_id_str = str(run_id)
|
||||
|
||||
run_created_at = db.session.scalar(
|
||||
select(WorkflowArchiveLog.run_created_at)
|
||||
.where(
|
||||
WorkflowArchiveLog.tenant_id == tenant_id,
|
||||
WorkflowArchiveLog.app_id == app_id,
|
||||
WorkflowArchiveLog.workflow_run_id == run_id_str,
|
||||
)
|
||||
.limit(1)
|
||||
)
|
||||
if not run_created_at:
|
||||
return {"code": "archive_log_not_found", "message": "workflow run archive not found"}, 404
|
||||
|
||||
prefix = (
|
||||
f"{tenant_id}/app_id={app_id}/year={run_created_at.strftime('%Y')}/"
|
||||
f"month={run_created_at.strftime('%m')}/workflow_run_id={run_id_str}"
|
||||
)
|
||||
archive_key = f"{prefix}/{ARCHIVE_BUNDLE_NAME}"
|
||||
|
||||
try:
|
||||
archive_storage = get_archive_storage()
|
||||
except ArchiveStorageNotConfiguredError as e:
|
||||
return {"code": "archive_storage_not_configured", "message": str(e)}, 500
|
||||
|
||||
presigned_url = archive_storage.generate_presigned_url(
|
||||
archive_key,
|
||||
expires_in=EXPORT_SIGNED_URL_EXPIRE_SECONDS,
|
||||
)
|
||||
expires_at = datetime.now(UTC) + timedelta(seconds=EXPORT_SIGNED_URL_EXPIRE_SECONDS)
|
||||
return {
|
||||
"status": "success",
|
||||
"presigned_url": presigned_url,
|
||||
"presigned_url_expires_at": expires_at.isoformat(),
|
||||
}, 200
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs/count")
|
||||
class AdvancedChatAppWorkflowRunCountApi(Resource):
|
||||
@console_ns.doc("get_advanced_chat_workflow_runs_count")
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
from flask_restx import Resource, fields
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from libs.login import current_account_with_tenant, current_user, login_required
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
from . import console_ns
|
||||
@ -40,21 +39,5 @@ class SystemFeatureApi(Resource):
|
||||
),
|
||||
)
|
||||
def get(self):
|
||||
"""Get system-wide feature configuration
|
||||
|
||||
NOTE: This endpoint is unauthenticated by design, as it provides system features
|
||||
data required for dashboard initialization.
|
||||
|
||||
Authentication would create circular dependency (can't login without dashboard loading).
|
||||
|
||||
Only non-sensitive configuration data should be returned by this endpoint.
|
||||
"""
|
||||
# NOTE(QuantumGhost): ideally we should access `current_user.is_authenticated`
|
||||
# without a try-catch. However, due to the implementation of user loader (the `load_user_from_request`
|
||||
# in api/extensions/ext_login.py), accessing `current_user.is_authenticated` will
|
||||
# raise `Unauthorized` exception if authentication token is not provided.
|
||||
try:
|
||||
is_authenticated = current_user.is_authenticated
|
||||
except Unauthorized:
|
||||
is_authenticated = False
|
||||
return FeatureService.get_system_features(is_authenticated=is_authenticated).model_dump()
|
||||
"""Get system-wide feature configuration"""
|
||||
return FeatureService.get_system_features().model_dump()
|
||||
|
||||
@ -261,6 +261,17 @@ class DocumentAddByFileApi(DatasetApiResource):
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id):
|
||||
"""Create document by upload file."""
|
||||
args = {}
|
||||
if "data" in request.form:
|
||||
args = json.loads(request.form["data"])
|
||||
if "doc_form" not in args:
|
||||
args["doc_form"] = "text_model"
|
||||
if "doc_language" not in args:
|
||||
args["doc_language"] = "English"
|
||||
|
||||
# get dataset info
|
||||
dataset_id = str(dataset_id)
|
||||
tenant_id = str(tenant_id)
|
||||
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
|
||||
|
||||
if not dataset:
|
||||
@ -269,18 +280,6 @@ class DocumentAddByFileApi(DatasetApiResource):
|
||||
if dataset.provider == "external":
|
||||
raise ValueError("External datasets are not supported.")
|
||||
|
||||
args = {}
|
||||
if "data" in request.form:
|
||||
args = json.loads(request.form["data"])
|
||||
if "doc_form" not in args:
|
||||
args["doc_form"] = dataset.chunk_structure or "text_model"
|
||||
if "doc_language" not in args:
|
||||
args["doc_language"] = "English"
|
||||
|
||||
# get dataset info
|
||||
dataset_id = str(dataset_id)
|
||||
tenant_id = str(tenant_id)
|
||||
|
||||
indexing_technique = args.get("indexing_technique") or dataset.indexing_technique
|
||||
if not indexing_technique:
|
||||
raise ValueError("indexing_technique is required.")
|
||||
@ -371,6 +370,17 @@ class DocumentUpdateByFileApi(DatasetApiResource):
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id, document_id):
|
||||
"""Update document by upload file."""
|
||||
args = {}
|
||||
if "data" in request.form:
|
||||
args = json.loads(request.form["data"])
|
||||
if "doc_form" not in args:
|
||||
args["doc_form"] = "text_model"
|
||||
if "doc_language" not in args:
|
||||
args["doc_language"] = "English"
|
||||
|
||||
# get dataset info
|
||||
dataset_id = str(dataset_id)
|
||||
tenant_id = str(tenant_id)
|
||||
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
|
||||
|
||||
if not dataset:
|
||||
@ -379,18 +389,6 @@ class DocumentUpdateByFileApi(DatasetApiResource):
|
||||
if dataset.provider == "external":
|
||||
raise ValueError("External datasets are not supported.")
|
||||
|
||||
args = {}
|
||||
if "data" in request.form:
|
||||
args = json.loads(request.form["data"])
|
||||
if "doc_form" not in args:
|
||||
args["doc_form"] = dataset.chunk_structure or "text_model"
|
||||
if "doc_language" not in args:
|
||||
args["doc_language"] = "English"
|
||||
|
||||
# get dataset info
|
||||
dataset_id = str(dataset_id)
|
||||
tenant_id = str(tenant_id)
|
||||
|
||||
# indexing_technique is already set in dataset since this is an update
|
||||
args["indexing_technique"] = dataset.indexing_technique
|
||||
|
||||
|
||||
@ -17,15 +17,5 @@ class SystemFeatureApi(Resource):
|
||||
|
||||
Returns:
|
||||
dict: System feature configuration object
|
||||
|
||||
This endpoint is akin to the `SystemFeatureApi` endpoint in api/controllers/console/feature.py,
|
||||
except it is intended for use by the web app, instead of the console dashboard.
|
||||
|
||||
NOTE: This endpoint is unauthenticated by design, as it provides system features
|
||||
data required for webapp initialization.
|
||||
|
||||
Authentication would create circular dependency (can't authenticate without webapp loading).
|
||||
|
||||
Only non-sensitive configuration data should be returned by this endpoint.
|
||||
"""
|
||||
return FeatureService.get_system_features().model_dump()
|
||||
|
||||
@ -4,7 +4,6 @@ from dify_app import DifyApp
|
||||
def init_app(app: DifyApp):
|
||||
from commands import (
|
||||
add_qdrant_index,
|
||||
archive_workflow_runs,
|
||||
clean_expired_messages,
|
||||
clean_workflow_runs,
|
||||
cleanup_orphaned_draft_variables,
|
||||
@ -12,7 +11,6 @@ def init_app(app: DifyApp):
|
||||
clear_orphaned_file_records,
|
||||
convert_to_agent_apps,
|
||||
create_tenant,
|
||||
delete_archived_workflow_runs,
|
||||
extract_plugins,
|
||||
extract_unique_plugins,
|
||||
file_usage,
|
||||
@ -26,7 +24,6 @@ def init_app(app: DifyApp):
|
||||
reset_email,
|
||||
reset_encrypt_key_pair,
|
||||
reset_password,
|
||||
restore_workflow_runs,
|
||||
setup_datasource_oauth_client,
|
||||
setup_system_tool_oauth_client,
|
||||
setup_system_trigger_oauth_client,
|
||||
@ -61,9 +58,6 @@ def init_app(app: DifyApp):
|
||||
setup_datasource_oauth_client,
|
||||
transform_datasource_credentials,
|
||||
install_rag_pipeline_plugins,
|
||||
archive_workflow_runs,
|
||||
delete_archived_workflow_runs,
|
||||
restore_workflow_runs,
|
||||
clean_workflow_runs,
|
||||
clean_expired_messages,
|
||||
]
|
||||
|
||||
@ -2,12 +2,7 @@ from flask_restx import Namespace, fields
|
||||
|
||||
from fields.end_user_fields import build_simple_end_user_model, simple_end_user_fields
|
||||
from fields.member_fields import build_simple_account_model, simple_account_fields
|
||||
from fields.workflow_run_fields import (
|
||||
build_workflow_run_for_archived_log_model,
|
||||
build_workflow_run_for_log_model,
|
||||
workflow_run_for_archived_log_fields,
|
||||
workflow_run_for_log_fields,
|
||||
)
|
||||
from fields.workflow_run_fields import build_workflow_run_for_log_model, workflow_run_for_log_fields
|
||||
from libs.helper import TimestampField
|
||||
|
||||
workflow_app_log_partial_fields = {
|
||||
@ -39,33 +34,6 @@ def build_workflow_app_log_partial_model(api_or_ns: Namespace):
|
||||
return api_or_ns.model("WorkflowAppLogPartial", copied_fields)
|
||||
|
||||
|
||||
workflow_archived_log_partial_fields = {
|
||||
"id": fields.String,
|
||||
"workflow_run": fields.Nested(workflow_run_for_archived_log_fields, allow_null=True),
|
||||
"trigger_metadata": fields.Raw,
|
||||
"created_by_account": fields.Nested(simple_account_fields, attribute="created_by_account", allow_null=True),
|
||||
"created_by_end_user": fields.Nested(simple_end_user_fields, attribute="created_by_end_user", allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
|
||||
def build_workflow_archived_log_partial_model(api_or_ns: Namespace):
|
||||
"""Build the workflow archived log partial model for the API or Namespace."""
|
||||
workflow_run_model = build_workflow_run_for_archived_log_model(api_or_ns)
|
||||
simple_account_model = build_simple_account_model(api_or_ns)
|
||||
simple_end_user_model = build_simple_end_user_model(api_or_ns)
|
||||
|
||||
copied_fields = workflow_archived_log_partial_fields.copy()
|
||||
copied_fields["workflow_run"] = fields.Nested(workflow_run_model, allow_null=True)
|
||||
copied_fields["created_by_account"] = fields.Nested(
|
||||
simple_account_model, attribute="created_by_account", allow_null=True
|
||||
)
|
||||
copied_fields["created_by_end_user"] = fields.Nested(
|
||||
simple_end_user_model, attribute="created_by_end_user", allow_null=True
|
||||
)
|
||||
return api_or_ns.model("WorkflowArchivedLogPartial", copied_fields)
|
||||
|
||||
|
||||
workflow_app_log_pagination_fields = {
|
||||
"page": fields.Integer,
|
||||
"limit": fields.Integer,
|
||||
@ -83,21 +51,3 @@ def build_workflow_app_log_pagination_model(api_or_ns: Namespace):
|
||||
copied_fields = workflow_app_log_pagination_fields.copy()
|
||||
copied_fields["data"] = fields.List(fields.Nested(workflow_app_log_partial_model))
|
||||
return api_or_ns.model("WorkflowAppLogPagination", copied_fields)
|
||||
|
||||
|
||||
workflow_archived_log_pagination_fields = {
|
||||
"page": fields.Integer,
|
||||
"limit": fields.Integer,
|
||||
"total": fields.Integer,
|
||||
"has_more": fields.Boolean,
|
||||
"data": fields.List(fields.Nested(workflow_archived_log_partial_fields)),
|
||||
}
|
||||
|
||||
|
||||
def build_workflow_archived_log_pagination_model(api_or_ns: Namespace):
|
||||
"""Build the workflow archived log pagination model for the API or Namespace."""
|
||||
workflow_archived_log_partial_model = build_workflow_archived_log_partial_model(api_or_ns)
|
||||
|
||||
copied_fields = workflow_archived_log_pagination_fields.copy()
|
||||
copied_fields["data"] = fields.List(fields.Nested(workflow_archived_log_partial_model))
|
||||
return api_or_ns.model("WorkflowArchivedLogPagination", copied_fields)
|
||||
|
||||
@ -23,19 +23,6 @@ def build_workflow_run_for_log_model(api_or_ns: Namespace):
|
||||
return api_or_ns.model("WorkflowRunForLog", workflow_run_for_log_fields)
|
||||
|
||||
|
||||
workflow_run_for_archived_log_fields = {
|
||||
"id": fields.String,
|
||||
"status": fields.String,
|
||||
"triggered_from": fields.String,
|
||||
"elapsed_time": fields.Float,
|
||||
"total_tokens": fields.Integer,
|
||||
}
|
||||
|
||||
|
||||
def build_workflow_run_for_archived_log_model(api_or_ns: Namespace):
|
||||
return api_or_ns.model("WorkflowRunForArchivedLog", workflow_run_for_archived_log_fields)
|
||||
|
||||
|
||||
workflow_run_for_list_fields = {
|
||||
"id": fields.String,
|
||||
"version": fields.String,
|
||||
|
||||
@ -7,6 +7,7 @@ to S3-compatible object storage.
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import gzip
|
||||
import hashlib
|
||||
import logging
|
||||
from collections.abc import Generator
|
||||
@ -38,7 +39,7 @@ class ArchiveStorage:
|
||||
"""
|
||||
S3-compatible storage client for archiving or exporting.
|
||||
|
||||
This client provides methods for storing and retrieving archived data in JSONL format.
|
||||
This client provides methods for storing and retrieving archived data in JSONL+gzip format.
|
||||
"""
|
||||
|
||||
def __init__(self, bucket: str):
|
||||
@ -68,10 +69,7 @@ class ArchiveStorage:
|
||||
aws_access_key_id=dify_config.ARCHIVE_STORAGE_ACCESS_KEY,
|
||||
aws_secret_access_key=dify_config.ARCHIVE_STORAGE_SECRET_KEY,
|
||||
region_name=dify_config.ARCHIVE_STORAGE_REGION,
|
||||
config=Config(
|
||||
s3={"addressing_style": "path"},
|
||||
max_pool_connections=64,
|
||||
),
|
||||
config=Config(s3={"addressing_style": "path"}),
|
||||
)
|
||||
|
||||
# Verify bucket accessibility
|
||||
@ -102,18 +100,12 @@ class ArchiveStorage:
|
||||
"""
|
||||
checksum = hashlib.md5(data).hexdigest()
|
||||
try:
|
||||
response = self.client.put_object(
|
||||
self.client.put_object(
|
||||
Bucket=self.bucket,
|
||||
Key=key,
|
||||
Body=data,
|
||||
ContentMD5=self._content_md5(data),
|
||||
)
|
||||
etag = response.get("ETag")
|
||||
if not etag:
|
||||
raise ArchiveStorageError(f"Missing ETag for '{key}'")
|
||||
normalized_etag = etag.strip('"')
|
||||
if normalized_etag != checksum:
|
||||
raise ArchiveStorageError(f"ETag mismatch for '{key}': expected={checksum}, actual={normalized_etag}")
|
||||
logger.debug("Uploaded object: %s (size=%d, checksum=%s)", key, len(data), checksum)
|
||||
return checksum
|
||||
except ClientError as e:
|
||||
@ -248,18 +240,19 @@ class ArchiveStorage:
|
||||
return base64.b64encode(hashlib.md5(data).digest()).decode()
|
||||
|
||||
@staticmethod
|
||||
def serialize_to_jsonl(records: list[dict[str, Any]]) -> bytes:
|
||||
def serialize_to_jsonl_gz(records: list[dict[str, Any]]) -> bytes:
|
||||
"""
|
||||
Serialize records to JSONL format.
|
||||
Serialize records to gzipped JSONL format.
|
||||
|
||||
Args:
|
||||
records: List of dictionaries to serialize
|
||||
|
||||
Returns:
|
||||
JSONL bytes
|
||||
Gzipped JSONL bytes
|
||||
"""
|
||||
lines = []
|
||||
for record in records:
|
||||
# Convert datetime objects to ISO format strings
|
||||
serialized = ArchiveStorage._serialize_record(record)
|
||||
lines.append(orjson.dumps(serialized))
|
||||
|
||||
@ -267,22 +260,23 @@ class ArchiveStorage:
|
||||
if jsonl_content:
|
||||
jsonl_content += b"\n"
|
||||
|
||||
return jsonl_content
|
||||
return gzip.compress(jsonl_content)
|
||||
|
||||
@staticmethod
|
||||
def deserialize_from_jsonl(data: bytes) -> list[dict[str, Any]]:
|
||||
def deserialize_from_jsonl_gz(data: bytes) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Deserialize JSONL data to records.
|
||||
Deserialize gzipped JSONL data to records.
|
||||
|
||||
Args:
|
||||
data: JSONL bytes
|
||||
data: Gzipped JSONL bytes
|
||||
|
||||
Returns:
|
||||
List of dictionaries
|
||||
"""
|
||||
jsonl_content = gzip.decompress(data)
|
||||
records = []
|
||||
|
||||
for line in data.splitlines():
|
||||
for line in jsonl_content.splitlines():
|
||||
if line:
|
||||
records.append(orjson.loads(line))
|
||||
|
||||
|
||||
@ -1,95 +0,0 @@
|
||||
"""create workflow_archive_logs
|
||||
|
||||
Revision ID: 9d77545f524e
|
||||
Revises: f9f6d18a37f9
|
||||
Create Date: 2026-01-06 17:18:56.292479
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9d77545f524e'
|
||||
down_revision = 'f9f6d18a37f9'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
if _is_pg(conn):
|
||||
op.create_table('workflow_archive_logs',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('log_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('workflow_run_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('log_created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('log_created_from', sa.String(length=255), nullable=True),
|
||||
sa.Column('run_version', sa.String(length=255), nullable=False),
|
||||
sa.Column('run_status', sa.String(length=255), nullable=False),
|
||||
sa.Column('run_triggered_from', sa.String(length=255), nullable=False),
|
||||
sa.Column('run_error', models.types.LongText(), nullable=True),
|
||||
sa.Column('run_elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('run_total_tokens', sa.BigInteger(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('run_total_steps', sa.Integer(), server_default=sa.text('0'), nullable=True),
|
||||
sa.Column('run_created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('run_finished_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('run_exceptions_count', sa.Integer(), server_default=sa.text('0'), nullable=True),
|
||||
sa.Column('trigger_metadata', models.types.LongText(), nullable=True),
|
||||
sa.Column('archived_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_archive_log_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('workflow_archive_logs',
|
||||
sa.Column('id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('log_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('workflow_run_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('log_created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('log_created_from', sa.String(length=255), nullable=True),
|
||||
sa.Column('run_version', sa.String(length=255), nullable=False),
|
||||
sa.Column('run_status', sa.String(length=255), nullable=False),
|
||||
sa.Column('run_triggered_from', sa.String(length=255), nullable=False),
|
||||
sa.Column('run_error', models.types.LongText(), nullable=True),
|
||||
sa.Column('run_elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('run_total_tokens', sa.BigInteger(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('run_total_steps', sa.Integer(), server_default=sa.text('0'), nullable=True),
|
||||
sa.Column('run_created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('run_finished_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('run_exceptions_count', sa.Integer(), server_default=sa.text('0'), nullable=True),
|
||||
sa.Column('trigger_metadata', models.types.LongText(), nullable=True),
|
||||
sa.Column('archived_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_archive_log_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflow_archive_logs', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_archive_log_app_idx', ['tenant_id', 'app_id'], unique=False)
|
||||
batch_op.create_index('workflow_archive_log_run_created_at_idx', ['run_created_at'], unique=False)
|
||||
batch_op.create_index('workflow_archive_log_workflow_run_id_idx', ['workflow_run_id'], unique=False)
|
||||
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('workflow_archive_logs', schema=None) as batch_op:
|
||||
batch_op.drop_index('workflow_archive_log_workflow_run_id_idx')
|
||||
batch_op.drop_index('workflow_archive_log_run_created_at_idx')
|
||||
batch_op.drop_index('workflow_archive_log_app_idx')
|
||||
|
||||
op.drop_table('workflow_archive_logs')
|
||||
# ### end Alembic commands ###
|
||||
@ -103,7 +103,6 @@ from .workflow import (
|
||||
Workflow,
|
||||
WorkflowAppLog,
|
||||
WorkflowAppLogCreatedFrom,
|
||||
WorkflowArchiveLog,
|
||||
WorkflowNodeExecutionModel,
|
||||
WorkflowNodeExecutionOffload,
|
||||
WorkflowNodeExecutionTriggeredFrom,
|
||||
@ -204,7 +203,6 @@ __all__ = [
|
||||
"Workflow",
|
||||
"WorkflowAppLog",
|
||||
"WorkflowAppLogCreatedFrom",
|
||||
"WorkflowArchiveLog",
|
||||
"WorkflowNodeExecutionModel",
|
||||
"WorkflowNodeExecutionOffload",
|
||||
"WorkflowNodeExecutionTriggeredFrom",
|
||||
|
||||
@ -1163,69 +1163,6 @@ class WorkflowAppLog(TypeBase):
|
||||
}
|
||||
|
||||
|
||||
class WorkflowArchiveLog(TypeBase):
|
||||
"""
|
||||
Workflow archive log.
|
||||
|
||||
Stores essential workflow run snapshot data for archived app logs.
|
||||
|
||||
Field sources:
|
||||
- Shared fields (tenant/app/workflow/run ids, created_by*): from WorkflowRun for consistency.
|
||||
- log_* fields: from WorkflowAppLog when present; null if the run has no app log.
|
||||
- run_* fields: workflow run snapshot fields from WorkflowRun.
|
||||
- trigger_metadata: snapshot from WorkflowTriggerLog when present.
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_archive_logs"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="workflow_archive_log_pkey"),
|
||||
sa.Index("workflow_archive_log_app_idx", "tenant_id", "app_id"),
|
||||
sa.Index("workflow_archive_log_workflow_run_id_idx", "workflow_run_id"),
|
||||
sa.Index("workflow_archive_log_run_created_at_idx", "run_created_at"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(
|
||||
StringUUID, insert_default=lambda: str(uuidv7()), default_factory=lambda: str(uuidv7()), init=False
|
||||
)
|
||||
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
workflow_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
workflow_run_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_by_role: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
|
||||
log_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
|
||||
log_created_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
|
||||
log_created_from: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
|
||||
run_version: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
run_status: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
run_triggered_from: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
run_error: Mapped[str | None] = mapped_column(LongText, nullable=True)
|
||||
run_elapsed_time: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("0"))
|
||||
run_total_tokens: Mapped[int] = mapped_column(sa.BigInteger, server_default=sa.text("0"))
|
||||
run_total_steps: Mapped[int] = mapped_column(sa.Integer, server_default=sa.text("0"), nullable=True)
|
||||
run_created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False)
|
||||
run_finished_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
|
||||
run_exceptions_count: Mapped[int] = mapped_column(sa.Integer, server_default=sa.text("0"), nullable=True)
|
||||
|
||||
trigger_metadata: Mapped[str | None] = mapped_column(LongText, nullable=True)
|
||||
archived_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
|
||||
@property
|
||||
def workflow_run_summary(self) -> dict[str, Any]:
|
||||
return {
|
||||
"id": self.workflow_run_id,
|
||||
"status": self.run_status,
|
||||
"triggered_from": self.run_triggered_from,
|
||||
"elapsed_time": self.run_elapsed_time,
|
||||
"total_tokens": self.run_total_tokens,
|
||||
}
|
||||
|
||||
|
||||
class ConversationVariable(TypeBase):
|
||||
__tablename__ = "workflow_conversation_variables"
|
||||
|
||||
|
||||
@ -16,7 +16,7 @@ from typing import Protocol
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from models.workflow import WorkflowNodeExecutionModel, WorkflowNodeExecutionOffload
|
||||
from models.workflow import WorkflowNodeExecutionModel
|
||||
|
||||
|
||||
class DifyAPIWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository, Protocol):
|
||||
@ -209,23 +209,3 @@ class DifyAPIWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository, Pr
|
||||
The number of executions deleted
|
||||
"""
|
||||
...
|
||||
|
||||
def get_offloads_by_execution_ids(
|
||||
self,
|
||||
session: Session,
|
||||
node_execution_ids: Sequence[str],
|
||||
) -> Sequence[WorkflowNodeExecutionOffload]:
|
||||
"""
|
||||
Get offload records by node execution IDs.
|
||||
|
||||
This method retrieves workflow node execution offload records
|
||||
that belong to the given node execution IDs.
|
||||
|
||||
Args:
|
||||
session: The database session to use
|
||||
node_execution_ids: List of node execution IDs to filter by
|
||||
|
||||
Returns:
|
||||
A sequence of WorkflowNodeExecutionOffload instances
|
||||
"""
|
||||
...
|
||||
|
||||
@ -45,7 +45,7 @@ from core.workflow.enums import WorkflowType
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from libs.infinite_scroll_pagination import InfiniteScrollPagination
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from models.workflow import WorkflowAppLog, WorkflowArchiveLog, WorkflowPause, WorkflowPauseReason, WorkflowRun
|
||||
from models.workflow import WorkflowRun
|
||||
from repositories.entities.workflow_pause import WorkflowPauseEntity
|
||||
from repositories.types import (
|
||||
AverageInteractionStats,
|
||||
@ -270,58 +270,6 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol):
|
||||
"""
|
||||
...
|
||||
|
||||
def get_archived_run_ids(
|
||||
self,
|
||||
session: Session,
|
||||
run_ids: Sequence[str],
|
||||
) -> set[str]:
|
||||
"""
|
||||
Fetch workflow run IDs that already have archive log records.
|
||||
"""
|
||||
...
|
||||
|
||||
def get_archived_logs_by_time_range(
|
||||
self,
|
||||
session: Session,
|
||||
tenant_ids: Sequence[str] | None,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
limit: int,
|
||||
) -> Sequence[WorkflowArchiveLog]:
|
||||
"""
|
||||
Fetch archived workflow logs by time range for restore.
|
||||
"""
|
||||
...
|
||||
|
||||
def get_archived_log_by_run_id(
|
||||
self,
|
||||
run_id: str,
|
||||
) -> WorkflowArchiveLog | None:
|
||||
"""
|
||||
Fetch a workflow archive log by workflow run ID.
|
||||
"""
|
||||
...
|
||||
|
||||
def delete_archive_log_by_run_id(
|
||||
self,
|
||||
session: Session,
|
||||
run_id: str,
|
||||
) -> int:
|
||||
"""
|
||||
Delete archive log by workflow run ID.
|
||||
|
||||
Used after restoring a workflow run to remove the archive log record,
|
||||
allowing the run to be archived again if needed.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
run_id: Workflow run ID
|
||||
|
||||
Returns:
|
||||
Number of records deleted (0 or 1)
|
||||
"""
|
||||
...
|
||||
|
||||
def delete_runs_with_related(
|
||||
self,
|
||||
runs: Sequence[WorkflowRun],
|
||||
@ -334,61 +282,6 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol):
|
||||
"""
|
||||
...
|
||||
|
||||
def get_pause_records_by_run_id(
|
||||
self,
|
||||
session: Session,
|
||||
run_id: str,
|
||||
) -> Sequence[WorkflowPause]:
|
||||
"""
|
||||
Fetch workflow pause records by workflow run ID.
|
||||
"""
|
||||
...
|
||||
|
||||
def get_pause_reason_records_by_run_id(
|
||||
self,
|
||||
session: Session,
|
||||
pause_ids: Sequence[str],
|
||||
) -> Sequence[WorkflowPauseReason]:
|
||||
"""
|
||||
Fetch workflow pause reason records by pause IDs.
|
||||
"""
|
||||
...
|
||||
|
||||
def get_app_logs_by_run_id(
|
||||
self,
|
||||
session: Session,
|
||||
run_id: str,
|
||||
) -> Sequence[WorkflowAppLog]:
|
||||
"""
|
||||
Fetch workflow app logs by workflow run ID.
|
||||
"""
|
||||
...
|
||||
|
||||
def create_archive_logs(
|
||||
self,
|
||||
session: Session,
|
||||
run: WorkflowRun,
|
||||
app_logs: Sequence[WorkflowAppLog],
|
||||
trigger_metadata: str | None,
|
||||
) -> int:
|
||||
"""
|
||||
Create archive log records for a workflow run.
|
||||
"""
|
||||
...
|
||||
|
||||
def get_archived_runs_by_time_range(
|
||||
self,
|
||||
session: Session,
|
||||
tenant_ids: Sequence[str] | None,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
limit: int,
|
||||
) -> Sequence[WorkflowRun]:
|
||||
"""
|
||||
Return workflow runs that already have archive logs, for cleanup of `workflow_runs`.
|
||||
"""
|
||||
...
|
||||
|
||||
def count_runs_with_related(
|
||||
self,
|
||||
runs: Sequence[WorkflowRun],
|
||||
|
||||
@ -351,27 +351,3 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut
|
||||
)
|
||||
|
||||
return int(node_executions_count), int(offloads_count)
|
||||
|
||||
@staticmethod
|
||||
def get_by_run(
|
||||
session: Session,
|
||||
run_id: str,
|
||||
) -> Sequence[WorkflowNodeExecutionModel]:
|
||||
"""
|
||||
Fetch node executions for a run using workflow_run_id.
|
||||
"""
|
||||
stmt = select(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.workflow_run_id == run_id)
|
||||
return list(session.scalars(stmt))
|
||||
|
||||
def get_offloads_by_execution_ids(
|
||||
self,
|
||||
session: Session,
|
||||
node_execution_ids: Sequence[str],
|
||||
) -> Sequence[WorkflowNodeExecutionOffload]:
|
||||
if not node_execution_ids:
|
||||
return []
|
||||
|
||||
stmt = select(WorkflowNodeExecutionOffload).where(
|
||||
WorkflowNodeExecutionOffload.node_execution_id.in_(node_execution_ids)
|
||||
)
|
||||
return list(session.scalars(stmt))
|
||||
|
||||
@ -40,7 +40,14 @@ from libs.infinite_scroll_pagination import InfiniteScrollPagination
|
||||
from libs.time_parser import get_time_threshold
|
||||
from libs.uuid_utils import uuidv7
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from models.workflow import WorkflowAppLog, WorkflowArchiveLog, WorkflowPause, WorkflowPauseReason, WorkflowRun
|
||||
from models.workflow import (
|
||||
WorkflowAppLog,
|
||||
WorkflowPauseReason,
|
||||
WorkflowRun,
|
||||
)
|
||||
from models.workflow import (
|
||||
WorkflowPause as WorkflowPauseModel,
|
||||
)
|
||||
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
|
||||
from repositories.entities.workflow_pause import WorkflowPauseEntity
|
||||
from repositories.types import (
|
||||
@ -362,53 +369,6 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
|
||||
return session.scalars(stmt).all()
|
||||
|
||||
def get_archived_run_ids(
|
||||
self,
|
||||
session: Session,
|
||||
run_ids: Sequence[str],
|
||||
) -> set[str]:
|
||||
if not run_ids:
|
||||
return set()
|
||||
|
||||
stmt = select(WorkflowArchiveLog.workflow_run_id).where(WorkflowArchiveLog.workflow_run_id.in_(run_ids))
|
||||
return set(session.scalars(stmt).all())
|
||||
|
||||
def get_archived_log_by_run_id(
|
||||
self,
|
||||
run_id: str,
|
||||
) -> WorkflowArchiveLog | None:
|
||||
with self._session_maker() as session:
|
||||
stmt = select(WorkflowArchiveLog).where(WorkflowArchiveLog.workflow_run_id == run_id).limit(1)
|
||||
return session.scalar(stmt)
|
||||
|
||||
def delete_archive_log_by_run_id(
|
||||
self,
|
||||
session: Session,
|
||||
run_id: str,
|
||||
) -> int:
|
||||
stmt = delete(WorkflowArchiveLog).where(WorkflowArchiveLog.workflow_run_id == run_id)
|
||||
result = session.execute(stmt)
|
||||
return cast(CursorResult, result).rowcount or 0
|
||||
|
||||
def get_pause_records_by_run_id(
|
||||
self,
|
||||
session: Session,
|
||||
run_id: str,
|
||||
) -> Sequence[WorkflowPause]:
|
||||
stmt = select(WorkflowPause).where(WorkflowPause.workflow_run_id == run_id)
|
||||
return list(session.scalars(stmt))
|
||||
|
||||
def get_pause_reason_records_by_run_id(
|
||||
self,
|
||||
session: Session,
|
||||
pause_ids: Sequence[str],
|
||||
) -> Sequence[WorkflowPauseReason]:
|
||||
if not pause_ids:
|
||||
return []
|
||||
|
||||
stmt = select(WorkflowPauseReason).where(WorkflowPauseReason.pause_id.in_(pause_ids))
|
||||
return list(session.scalars(stmt))
|
||||
|
||||
def delete_runs_with_related(
|
||||
self,
|
||||
runs: Sequence[WorkflowRun],
|
||||
@ -436,8 +396,9 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
app_logs_result = session.execute(delete(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(run_ids)))
|
||||
app_logs_deleted = cast(CursorResult, app_logs_result).rowcount or 0
|
||||
|
||||
pause_stmt = select(WorkflowPause.id).where(WorkflowPause.workflow_run_id.in_(run_ids))
|
||||
pause_ids = session.scalars(pause_stmt).all()
|
||||
pause_ids = session.scalars(
|
||||
select(WorkflowPauseModel.id).where(WorkflowPauseModel.workflow_run_id.in_(run_ids))
|
||||
).all()
|
||||
pause_reasons_deleted = 0
|
||||
pauses_deleted = 0
|
||||
|
||||
@ -446,7 +407,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
delete(WorkflowPauseReason).where(WorkflowPauseReason.pause_id.in_(pause_ids))
|
||||
)
|
||||
pause_reasons_deleted = cast(CursorResult, pause_reasons_result).rowcount or 0
|
||||
pauses_result = session.execute(delete(WorkflowPause).where(WorkflowPause.id.in_(pause_ids)))
|
||||
pauses_result = session.execute(delete(WorkflowPauseModel).where(WorkflowPauseModel.id.in_(pause_ids)))
|
||||
pauses_deleted = cast(CursorResult, pauses_result).rowcount or 0
|
||||
|
||||
trigger_logs_deleted = delete_trigger_logs(session, run_ids) if delete_trigger_logs else 0
|
||||
@ -466,124 +427,6 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
"pause_reasons": pause_reasons_deleted,
|
||||
}
|
||||
|
||||
def get_app_logs_by_run_id(
|
||||
self,
|
||||
session: Session,
|
||||
run_id: str,
|
||||
) -> Sequence[WorkflowAppLog]:
|
||||
stmt = select(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id == run_id)
|
||||
return list(session.scalars(stmt))
|
||||
|
||||
def create_archive_logs(
|
||||
self,
|
||||
session: Session,
|
||||
run: WorkflowRun,
|
||||
app_logs: Sequence[WorkflowAppLog],
|
||||
trigger_metadata: str | None,
|
||||
) -> int:
|
||||
if not app_logs:
|
||||
archive_log = WorkflowArchiveLog(
|
||||
log_id=None,
|
||||
log_created_at=None,
|
||||
log_created_from=None,
|
||||
tenant_id=run.tenant_id,
|
||||
app_id=run.app_id,
|
||||
workflow_id=run.workflow_id,
|
||||
workflow_run_id=run.id,
|
||||
created_by_role=run.created_by_role,
|
||||
created_by=run.created_by,
|
||||
run_version=run.version,
|
||||
run_status=run.status,
|
||||
run_triggered_from=run.triggered_from,
|
||||
run_error=run.error,
|
||||
run_elapsed_time=run.elapsed_time,
|
||||
run_total_tokens=run.total_tokens,
|
||||
run_total_steps=run.total_steps,
|
||||
run_created_at=run.created_at,
|
||||
run_finished_at=run.finished_at,
|
||||
run_exceptions_count=run.exceptions_count,
|
||||
trigger_metadata=trigger_metadata,
|
||||
)
|
||||
session.add(archive_log)
|
||||
return 1
|
||||
|
||||
archive_logs = [
|
||||
WorkflowArchiveLog(
|
||||
log_id=app_log.id,
|
||||
log_created_at=app_log.created_at,
|
||||
log_created_from=app_log.created_from,
|
||||
tenant_id=run.tenant_id,
|
||||
app_id=run.app_id,
|
||||
workflow_id=run.workflow_id,
|
||||
workflow_run_id=run.id,
|
||||
created_by_role=run.created_by_role,
|
||||
created_by=run.created_by,
|
||||
run_version=run.version,
|
||||
run_status=run.status,
|
||||
run_triggered_from=run.triggered_from,
|
||||
run_error=run.error,
|
||||
run_elapsed_time=run.elapsed_time,
|
||||
run_total_tokens=run.total_tokens,
|
||||
run_total_steps=run.total_steps,
|
||||
run_created_at=run.created_at,
|
||||
run_finished_at=run.finished_at,
|
||||
run_exceptions_count=run.exceptions_count,
|
||||
trigger_metadata=trigger_metadata,
|
||||
)
|
||||
for app_log in app_logs
|
||||
]
|
||||
session.add_all(archive_logs)
|
||||
return len(archive_logs)
|
||||
|
||||
def get_archived_runs_by_time_range(
|
||||
self,
|
||||
session: Session,
|
||||
tenant_ids: Sequence[str] | None,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
limit: int,
|
||||
) -> Sequence[WorkflowRun]:
|
||||
"""
|
||||
Retrieves WorkflowRun records by joining workflow_archive_logs.
|
||||
|
||||
Used to identify runs that are already archived and ready for deletion.
|
||||
"""
|
||||
stmt = (
|
||||
select(WorkflowRun)
|
||||
.join(WorkflowArchiveLog, WorkflowArchiveLog.workflow_run_id == WorkflowRun.id)
|
||||
.where(
|
||||
WorkflowArchiveLog.run_created_at >= start_date,
|
||||
WorkflowArchiveLog.run_created_at < end_date,
|
||||
)
|
||||
.order_by(WorkflowArchiveLog.run_created_at.asc(), WorkflowArchiveLog.workflow_run_id.asc())
|
||||
.limit(limit)
|
||||
)
|
||||
if tenant_ids:
|
||||
stmt = stmt.where(WorkflowArchiveLog.tenant_id.in_(tenant_ids))
|
||||
return list(session.scalars(stmt))
|
||||
|
||||
def get_archived_logs_by_time_range(
|
||||
self,
|
||||
session: Session,
|
||||
tenant_ids: Sequence[str] | None,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
limit: int,
|
||||
) -> Sequence[WorkflowArchiveLog]:
|
||||
# Returns WorkflowArchiveLog rows directly; use this when workflow_runs may be deleted.
|
||||
stmt = (
|
||||
select(WorkflowArchiveLog)
|
||||
.where(
|
||||
WorkflowArchiveLog.run_created_at >= start_date,
|
||||
WorkflowArchiveLog.run_created_at < end_date,
|
||||
)
|
||||
.order_by(WorkflowArchiveLog.run_created_at.asc(), WorkflowArchiveLog.workflow_run_id.asc())
|
||||
.limit(limit)
|
||||
)
|
||||
if tenant_ids:
|
||||
stmt = stmt.where(WorkflowArchiveLog.tenant_id.in_(tenant_ids))
|
||||
return list(session.scalars(stmt))
|
||||
|
||||
def count_runs_with_related(
|
||||
self,
|
||||
runs: Sequence[WorkflowRun],
|
||||
@ -616,7 +459,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
)
|
||||
|
||||
pause_ids = session.scalars(
|
||||
select(WorkflowPause.id).where(WorkflowPause.workflow_run_id.in_(run_ids))
|
||||
select(WorkflowPauseModel.id).where(WorkflowPauseModel.workflow_run_id.in_(run_ids))
|
||||
).all()
|
||||
pauses_count = len(pause_ids)
|
||||
pause_reasons_count = 0
|
||||
@ -668,7 +511,9 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
ValueError: If workflow_run_id is invalid or workflow run doesn't exist
|
||||
RuntimeError: If workflow is already paused or in invalid state
|
||||
"""
|
||||
previous_pause_model_query = select(WorkflowPause).where(WorkflowPause.workflow_run_id == workflow_run_id)
|
||||
previous_pause_model_query = select(WorkflowPauseModel).where(
|
||||
WorkflowPauseModel.workflow_run_id == workflow_run_id
|
||||
)
|
||||
with self._session_maker() as session, session.begin():
|
||||
# Get the workflow run
|
||||
workflow_run = session.get(WorkflowRun, workflow_run_id)
|
||||
@ -693,7 +538,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
# Upload the state file
|
||||
|
||||
# Create the pause record
|
||||
pause_model = WorkflowPause()
|
||||
pause_model = WorkflowPauseModel()
|
||||
pause_model.id = str(uuidv7())
|
||||
pause_model.workflow_id = workflow_run.workflow_id
|
||||
pause_model.workflow_run_id = workflow_run.id
|
||||
@ -865,13 +710,13 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
"""
|
||||
with self._session_maker() as session, session.begin():
|
||||
# Get the pause model by ID
|
||||
pause_model = session.get(WorkflowPause, pause_entity.id)
|
||||
pause_model = session.get(WorkflowPauseModel, pause_entity.id)
|
||||
if pause_model is None:
|
||||
raise _WorkflowRunError(f"WorkflowPause not found: {pause_entity.id}")
|
||||
self._delete_pause_model(session, pause_model)
|
||||
|
||||
@staticmethod
|
||||
def _delete_pause_model(session: Session, pause_model: WorkflowPause):
|
||||
def _delete_pause_model(session: Session, pause_model: WorkflowPauseModel):
|
||||
storage.delete(pause_model.state_object_key)
|
||||
|
||||
# Delete the pause record
|
||||
@ -906,15 +751,15 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
_limit: int = limit or 1000
|
||||
pruned_record_ids: list[str] = []
|
||||
cond = or_(
|
||||
WorkflowPause.created_at < expiration,
|
||||
WorkflowPauseModel.created_at < expiration,
|
||||
and_(
|
||||
WorkflowPause.resumed_at.is_not(null()),
|
||||
WorkflowPause.resumed_at < resumption_expiration,
|
||||
WorkflowPauseModel.resumed_at.is_not(null()),
|
||||
WorkflowPauseModel.resumed_at < resumption_expiration,
|
||||
),
|
||||
)
|
||||
# First, collect pause records to delete with their state files
|
||||
# Expired pauses (created before expiration time)
|
||||
stmt = select(WorkflowPause).where(cond).limit(_limit)
|
||||
stmt = select(WorkflowPauseModel).where(cond).limit(_limit)
|
||||
|
||||
with self._session_maker(expire_on_commit=False) as session:
|
||||
# Old resumed pauses (resumed more than resumption_duration ago)
|
||||
@ -925,7 +770,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
# Delete state files from storage
|
||||
for pause in pauses_to_delete:
|
||||
with self._session_maker(expire_on_commit=False) as session, session.begin():
|
||||
# todo: this issues a separate query for each WorkflowPause record.
|
||||
# todo: this issues a separate query for each WorkflowPauseModel record.
|
||||
# consider batching this lookup.
|
||||
try:
|
||||
storage.delete(pause.state_object_key)
|
||||
@ -1177,7 +1022,7 @@ class _PrivateWorkflowPauseEntity(WorkflowPauseEntity):
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
pause_model: WorkflowPause,
|
||||
pause_model: WorkflowPauseModel,
|
||||
reason_models: Sequence[WorkflowPauseReason],
|
||||
human_input_form: Sequence = (),
|
||||
) -> None:
|
||||
|
||||
@ -46,11 +46,6 @@ class SQLAlchemyWorkflowTriggerLogRepository(WorkflowTriggerLogRepository):
|
||||
|
||||
return self.session.scalar(query)
|
||||
|
||||
def list_by_run_id(self, run_id: str) -> Sequence[WorkflowTriggerLog]:
|
||||
"""List trigger logs for a workflow run."""
|
||||
query = select(WorkflowTriggerLog).where(WorkflowTriggerLog.workflow_run_id == run_id)
|
||||
return list(self.session.scalars(query).all())
|
||||
|
||||
def get_failed_for_retry(
|
||||
self, tenant_id: str, max_retry_count: int = 3, limit: int = 100
|
||||
) -> Sequence[WorkflowTriggerLog]:
|
||||
|
||||
@ -202,7 +202,7 @@ class FeatureService:
|
||||
return knowledge_rate_limit
|
||||
|
||||
@classmethod
|
||||
def get_system_features(cls, is_authenticated: bool = False) -> SystemFeatureModel:
|
||||
def get_system_features(cls) -> SystemFeatureModel:
|
||||
system_features = SystemFeatureModel()
|
||||
|
||||
cls._fulfill_system_params_from_env(system_features)
|
||||
@ -212,7 +212,7 @@ class FeatureService:
|
||||
system_features.webapp_auth.enabled = True
|
||||
system_features.enable_change_email = False
|
||||
system_features.plugin_manager.enabled = True
|
||||
cls._fulfill_params_from_enterprise(system_features, is_authenticated)
|
||||
cls._fulfill_params_from_enterprise(system_features)
|
||||
|
||||
if dify_config.MARKETPLACE_ENABLED:
|
||||
system_features.enable_marketplace = True
|
||||
@ -310,7 +310,7 @@ class FeatureService:
|
||||
features.next_credit_reset_date = billing_info["next_credit_reset_date"]
|
||||
|
||||
@classmethod
|
||||
def _fulfill_params_from_enterprise(cls, features: SystemFeatureModel, is_authenticated: bool = False):
|
||||
def _fulfill_params_from_enterprise(cls, features: SystemFeatureModel):
|
||||
enterprise_info = EnterpriseService.get_info()
|
||||
|
||||
if "SSOEnforcedForSignin" in enterprise_info:
|
||||
@ -347,14 +347,19 @@ class FeatureService:
|
||||
)
|
||||
features.webapp_auth.sso_config.protocol = enterprise_info.get("SSOEnforcedForWebProtocol", "")
|
||||
|
||||
if is_authenticated and (license_info := enterprise_info.get("License")):
|
||||
features.license.status = LicenseStatus(license_info.get("status", LicenseStatus.INACTIVE))
|
||||
features.license.expired_at = license_info.get("expiredAt", "")
|
||||
if "License" in enterprise_info:
|
||||
license_info = enterprise_info["License"]
|
||||
|
||||
if workspaces_info := license_info.get("workspaces"):
|
||||
features.license.workspaces.enabled = workspaces_info.get("enabled", False)
|
||||
features.license.workspaces.limit = workspaces_info.get("limit", 0)
|
||||
features.license.workspaces.size = workspaces_info.get("used", 0)
|
||||
if "status" in license_info:
|
||||
features.license.status = LicenseStatus(license_info.get("status", LicenseStatus.INACTIVE))
|
||||
|
||||
if "expiredAt" in license_info:
|
||||
features.license.expired_at = license_info["expiredAt"]
|
||||
|
||||
if "workspaces" in license_info:
|
||||
features.license.workspaces.enabled = license_info["workspaces"]["enabled"]
|
||||
features.license.workspaces.limit = license_info["workspaces"]["limit"]
|
||||
features.license.workspaces.size = license_info["workspaces"]["used"]
|
||||
|
||||
if "PluginInstallationPermission" in enterprise_info:
|
||||
plugin_installation_info = enterprise_info["PluginInstallationPermission"]
|
||||
|
||||
@ -1 +0,0 @@
|
||||
"""Workflow run retention services."""
|
||||
|
||||
@ -1,531 +0,0 @@
|
||||
"""
|
||||
Archive Paid Plan Workflow Run Logs Service.
|
||||
|
||||
This service archives workflow run logs for paid plan users older than the configured
|
||||
retention period (default: 90 days) to S3-compatible storage.
|
||||
|
||||
Archived tables:
|
||||
- workflow_runs
|
||||
- workflow_app_logs
|
||||
- workflow_node_executions
|
||||
- workflow_node_execution_offload
|
||||
- workflow_pauses
|
||||
- workflow_pause_reasons
|
||||
- workflow_trigger_logs
|
||||
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
import zipfile
|
||||
from collections.abc import Sequence
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any
|
||||
|
||||
import click
|
||||
from sqlalchemy import inspect
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from configs import dify_config
|
||||
from core.workflow.enums import WorkflowType
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from extensions.ext_database import db
|
||||
from libs.archive_storage import (
|
||||
ArchiveStorage,
|
||||
ArchiveStorageNotConfiguredError,
|
||||
get_archive_storage,
|
||||
)
|
||||
from models.workflow import WorkflowAppLog, WorkflowRun
|
||||
from repositories.api_workflow_node_execution_repository import DifyAPIWorkflowNodeExecutionRepository
|
||||
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
|
||||
from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository
|
||||
from services.billing_service import BillingService
|
||||
from services.retention.workflow_run.constants import ARCHIVE_BUNDLE_NAME, ARCHIVE_SCHEMA_VERSION
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TableStats:
|
||||
"""Statistics for a single archived table."""
|
||||
|
||||
table_name: str
|
||||
row_count: int
|
||||
checksum: str
|
||||
size_bytes: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class ArchiveResult:
|
||||
"""Result of archiving a single workflow run."""
|
||||
|
||||
run_id: str
|
||||
tenant_id: str
|
||||
success: bool
|
||||
tables: list[TableStats] = field(default_factory=list)
|
||||
error: str | None = None
|
||||
elapsed_time: float = 0.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class ArchiveSummary:
|
||||
"""Summary of the entire archive operation."""
|
||||
|
||||
total_runs_processed: int = 0
|
||||
runs_archived: int = 0
|
||||
runs_skipped: int = 0
|
||||
runs_failed: int = 0
|
||||
total_elapsed_time: float = 0.0
|
||||
|
||||
|
||||
class WorkflowRunArchiver:
|
||||
"""
|
||||
Archive workflow run logs for paid plan users.
|
||||
|
||||
Storage Layout:
|
||||
{tenant_id}/app_id={app_id}/year={YYYY}/month={MM}/workflow_run_id={run_id}/
|
||||
└── archive.v1.0.zip
|
||||
├── manifest.json
|
||||
├── workflow_runs.jsonl
|
||||
├── workflow_app_logs.jsonl
|
||||
├── workflow_node_executions.jsonl
|
||||
├── workflow_node_execution_offload.jsonl
|
||||
├── workflow_pauses.jsonl
|
||||
├── workflow_pause_reasons.jsonl
|
||||
└── workflow_trigger_logs.jsonl
|
||||
"""
|
||||
|
||||
ARCHIVED_TYPE = [
|
||||
WorkflowType.WORKFLOW,
|
||||
WorkflowType.RAG_PIPELINE,
|
||||
]
|
||||
ARCHIVED_TABLES = [
|
||||
"workflow_runs",
|
||||
"workflow_app_logs",
|
||||
"workflow_node_executions",
|
||||
"workflow_node_execution_offload",
|
||||
"workflow_pauses",
|
||||
"workflow_pause_reasons",
|
||||
"workflow_trigger_logs",
|
||||
]
|
||||
|
||||
start_from: datetime.datetime | None
|
||||
end_before: datetime.datetime
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
days: int = 90,
|
||||
batch_size: int = 100,
|
||||
start_from: datetime.datetime | None = None,
|
||||
end_before: datetime.datetime | None = None,
|
||||
workers: int = 1,
|
||||
tenant_ids: Sequence[str] | None = None,
|
||||
limit: int | None = None,
|
||||
dry_run: bool = False,
|
||||
delete_after_archive: bool = False,
|
||||
workflow_run_repo: APIWorkflowRunRepository | None = None,
|
||||
):
|
||||
"""
|
||||
Initialize the archiver.
|
||||
|
||||
Args:
|
||||
days: Archive runs older than this many days
|
||||
batch_size: Number of runs to process per batch
|
||||
start_from: Optional start time (inclusive) for archiving
|
||||
end_before: Optional end time (exclusive) for archiving
|
||||
workers: Number of concurrent workflow runs to archive
|
||||
tenant_ids: Optional tenant IDs for grayscale rollout
|
||||
limit: Maximum number of runs to archive (None for unlimited)
|
||||
dry_run: If True, only preview without making changes
|
||||
delete_after_archive: If True, delete runs and related data after archiving
|
||||
"""
|
||||
self.days = days
|
||||
self.batch_size = batch_size
|
||||
if start_from or end_before:
|
||||
if start_from is None or end_before is None:
|
||||
raise ValueError("start_from and end_before must be provided together")
|
||||
if start_from >= end_before:
|
||||
raise ValueError("start_from must be earlier than end_before")
|
||||
self.start_from = start_from.replace(tzinfo=datetime.UTC)
|
||||
self.end_before = end_before.replace(tzinfo=datetime.UTC)
|
||||
else:
|
||||
self.start_from = None
|
||||
self.end_before = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=days)
|
||||
if workers < 1:
|
||||
raise ValueError("workers must be at least 1")
|
||||
self.workers = workers
|
||||
self.tenant_ids = sorted(set(tenant_ids)) if tenant_ids else []
|
||||
self.limit = limit
|
||||
self.dry_run = dry_run
|
||||
self.delete_after_archive = delete_after_archive
|
||||
self.workflow_run_repo = workflow_run_repo
|
||||
|
||||
def run(self) -> ArchiveSummary:
|
||||
"""
|
||||
Main archiving loop.
|
||||
|
||||
Returns:
|
||||
ArchiveSummary with statistics about the operation
|
||||
"""
|
||||
summary = ArchiveSummary()
|
||||
start_time = time.time()
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
self._build_start_message(),
|
||||
fg="white",
|
||||
)
|
||||
)
|
||||
|
||||
# Initialize archive storage (will raise if not configured)
|
||||
try:
|
||||
if not self.dry_run:
|
||||
storage = get_archive_storage()
|
||||
else:
|
||||
storage = None
|
||||
except ArchiveStorageNotConfiguredError as e:
|
||||
click.echo(click.style(f"Archive storage not configured: {e}", fg="red"))
|
||||
return summary
|
||||
|
||||
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
repo = self._get_workflow_run_repo()
|
||||
|
||||
def _archive_with_session(run: WorkflowRun) -> ArchiveResult:
|
||||
with session_maker() as session:
|
||||
return self._archive_run(session, storage, run)
|
||||
|
||||
last_seen: tuple[datetime.datetime, str] | None = None
|
||||
archived_count = 0
|
||||
|
||||
with ThreadPoolExecutor(max_workers=self.workers) as executor:
|
||||
while True:
|
||||
# Check limit
|
||||
if self.limit and archived_count >= self.limit:
|
||||
click.echo(click.style(f"Reached limit of {self.limit} runs", fg="yellow"))
|
||||
break
|
||||
|
||||
# Fetch batch of runs
|
||||
runs = self._get_runs_batch(last_seen)
|
||||
|
||||
if not runs:
|
||||
break
|
||||
|
||||
run_ids = [run.id for run in runs]
|
||||
with session_maker() as session:
|
||||
archived_run_ids = repo.get_archived_run_ids(session, run_ids)
|
||||
|
||||
last_seen = (runs[-1].created_at, runs[-1].id)
|
||||
|
||||
# Filter to paid tenants only
|
||||
tenant_ids = {run.tenant_id for run in runs}
|
||||
paid_tenants = self._filter_paid_tenants(tenant_ids)
|
||||
|
||||
runs_to_process: list[WorkflowRun] = []
|
||||
for run in runs:
|
||||
summary.total_runs_processed += 1
|
||||
|
||||
# Skip non-paid tenants
|
||||
if run.tenant_id not in paid_tenants:
|
||||
summary.runs_skipped += 1
|
||||
continue
|
||||
|
||||
# Skip already archived runs
|
||||
if run.id in archived_run_ids:
|
||||
summary.runs_skipped += 1
|
||||
continue
|
||||
|
||||
# Check limit
|
||||
if self.limit and archived_count + len(runs_to_process) >= self.limit:
|
||||
break
|
||||
|
||||
runs_to_process.append(run)
|
||||
|
||||
if not runs_to_process:
|
||||
continue
|
||||
|
||||
results = list(executor.map(_archive_with_session, runs_to_process))
|
||||
|
||||
for run, result in zip(runs_to_process, results):
|
||||
if result.success:
|
||||
summary.runs_archived += 1
|
||||
archived_count += 1
|
||||
click.echo(
|
||||
click.style(
|
||||
f"{'[DRY RUN] Would archive' if self.dry_run else 'Archived'} "
|
||||
f"run {run.id} (tenant={run.tenant_id}, "
|
||||
f"tables={len(result.tables)}, time={result.elapsed_time:.2f}s)",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
else:
|
||||
summary.runs_failed += 1
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Failed to archive run {run.id}: {result.error}",
|
||||
fg="red",
|
||||
)
|
||||
)
|
||||
|
||||
summary.total_elapsed_time = time.time() - start_time
|
||||
click.echo(
|
||||
click.style(
|
||||
f"{'[DRY RUN] ' if self.dry_run else ''}Archive complete: "
|
||||
f"processed={summary.total_runs_processed}, archived={summary.runs_archived}, "
|
||||
f"skipped={summary.runs_skipped}, failed={summary.runs_failed}, "
|
||||
f"time={summary.total_elapsed_time:.2f}s",
|
||||
fg="white",
|
||||
)
|
||||
)
|
||||
|
||||
return summary
|
||||
|
||||
def _get_runs_batch(
|
||||
self,
|
||||
last_seen: tuple[datetime.datetime, str] | None,
|
||||
) -> Sequence[WorkflowRun]:
|
||||
"""Fetch a batch of workflow runs to archive."""
|
||||
repo = self._get_workflow_run_repo()
|
||||
return repo.get_runs_batch_by_time_range(
|
||||
start_from=self.start_from,
|
||||
end_before=self.end_before,
|
||||
last_seen=last_seen,
|
||||
batch_size=self.batch_size,
|
||||
run_types=self.ARCHIVED_TYPE,
|
||||
tenant_ids=self.tenant_ids or None,
|
||||
)
|
||||
|
||||
def _build_start_message(self) -> str:
|
||||
range_desc = f"before {self.end_before.isoformat()}"
|
||||
if self.start_from:
|
||||
range_desc = f"between {self.start_from.isoformat()} and {self.end_before.isoformat()}"
|
||||
return (
|
||||
f"{'[DRY RUN] ' if self.dry_run else ''}Starting workflow run archiving "
|
||||
f"for runs {range_desc} "
|
||||
f"(batch_size={self.batch_size}, tenant_ids={','.join(self.tenant_ids) or 'all'})"
|
||||
)
|
||||
|
||||
def _filter_paid_tenants(self, tenant_ids: set[str]) -> set[str]:
|
||||
"""Filter tenant IDs to only include paid tenants."""
|
||||
if not dify_config.BILLING_ENABLED:
|
||||
# If billing is not enabled, treat all tenants as paid
|
||||
return tenant_ids
|
||||
|
||||
if not tenant_ids:
|
||||
return set()
|
||||
|
||||
try:
|
||||
bulk_info = BillingService.get_plan_bulk_with_cache(list(tenant_ids))
|
||||
except Exception:
|
||||
logger.exception("Failed to fetch billing plans for tenants")
|
||||
# On error, skip all tenants in this batch
|
||||
return set()
|
||||
|
||||
# Filter to paid tenants (any plan except SANDBOX)
|
||||
paid = set()
|
||||
for tid, info in bulk_info.items():
|
||||
if info and info.get("plan") in (CloudPlan.PROFESSIONAL, CloudPlan.TEAM):
|
||||
paid.add(tid)
|
||||
|
||||
return paid
|
||||
|
||||
def _archive_run(
|
||||
self,
|
||||
session: Session,
|
||||
storage: ArchiveStorage | None,
|
||||
run: WorkflowRun,
|
||||
) -> ArchiveResult:
|
||||
"""Archive a single workflow run."""
|
||||
start_time = time.time()
|
||||
result = ArchiveResult(run_id=run.id, tenant_id=run.tenant_id, success=False)
|
||||
|
||||
try:
|
||||
# Extract data from all tables
|
||||
table_data, app_logs, trigger_metadata = self._extract_data(session, run)
|
||||
|
||||
if self.dry_run:
|
||||
# In dry run, just report what would be archived
|
||||
for table_name in self.ARCHIVED_TABLES:
|
||||
records = table_data.get(table_name, [])
|
||||
result.tables.append(
|
||||
TableStats(
|
||||
table_name=table_name,
|
||||
row_count=len(records),
|
||||
checksum="",
|
||||
size_bytes=0,
|
||||
)
|
||||
)
|
||||
result.success = True
|
||||
else:
|
||||
if storage is None:
|
||||
raise ArchiveStorageNotConfiguredError("Archive storage not configured")
|
||||
archive_key = self._get_archive_key(run)
|
||||
|
||||
# Serialize tables for the archive bundle
|
||||
table_stats: list[TableStats] = []
|
||||
table_payloads: dict[str, bytes] = {}
|
||||
for table_name in self.ARCHIVED_TABLES:
|
||||
records = table_data.get(table_name, [])
|
||||
data = ArchiveStorage.serialize_to_jsonl(records)
|
||||
table_payloads[table_name] = data
|
||||
checksum = ArchiveStorage.compute_checksum(data)
|
||||
|
||||
table_stats.append(
|
||||
TableStats(
|
||||
table_name=table_name,
|
||||
row_count=len(records),
|
||||
checksum=checksum,
|
||||
size_bytes=len(data),
|
||||
)
|
||||
)
|
||||
|
||||
# Generate and upload archive bundle
|
||||
manifest = self._generate_manifest(run, table_stats)
|
||||
manifest_data = json.dumps(manifest, indent=2, default=str).encode("utf-8")
|
||||
archive_data = self._build_archive_bundle(manifest_data, table_payloads)
|
||||
storage.put_object(archive_key, archive_data)
|
||||
|
||||
repo = self._get_workflow_run_repo()
|
||||
archived_log_count = repo.create_archive_logs(session, run, app_logs, trigger_metadata)
|
||||
session.commit()
|
||||
|
||||
deleted_counts = None
|
||||
if self.delete_after_archive:
|
||||
deleted_counts = repo.delete_runs_with_related(
|
||||
[run],
|
||||
delete_node_executions=self._delete_node_executions,
|
||||
delete_trigger_logs=self._delete_trigger_logs,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Archived workflow run %s: tables=%s, archived_logs=%s, deleted=%s",
|
||||
run.id,
|
||||
{s.table_name: s.row_count for s in table_stats},
|
||||
archived_log_count,
|
||||
deleted_counts,
|
||||
)
|
||||
|
||||
result.tables = table_stats
|
||||
result.success = True
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to archive workflow run %s", run.id)
|
||||
result.error = str(e)
|
||||
session.rollback()
|
||||
|
||||
result.elapsed_time = time.time() - start_time
|
||||
return result
|
||||
|
||||
def _extract_data(
|
||||
self,
|
||||
session: Session,
|
||||
run: WorkflowRun,
|
||||
) -> tuple[dict[str, list[dict[str, Any]]], Sequence[WorkflowAppLog], str | None]:
|
||||
table_data: dict[str, list[dict[str, Any]]] = {}
|
||||
table_data["workflow_runs"] = [self._row_to_dict(run)]
|
||||
repo = self._get_workflow_run_repo()
|
||||
app_logs = repo.get_app_logs_by_run_id(session, run.id)
|
||||
table_data["workflow_app_logs"] = [self._row_to_dict(row) for row in app_logs]
|
||||
node_exec_repo = self._get_workflow_node_execution_repo(session)
|
||||
node_exec_records = node_exec_repo.get_executions_by_workflow_run(
|
||||
tenant_id=run.tenant_id,
|
||||
app_id=run.app_id,
|
||||
workflow_run_id=run.id,
|
||||
)
|
||||
node_exec_ids = [record.id for record in node_exec_records]
|
||||
offload_records = node_exec_repo.get_offloads_by_execution_ids(session, node_exec_ids)
|
||||
table_data["workflow_node_executions"] = [self._row_to_dict(row) for row in node_exec_records]
|
||||
table_data["workflow_node_execution_offload"] = [self._row_to_dict(row) for row in offload_records]
|
||||
repo = self._get_workflow_run_repo()
|
||||
pause_records = repo.get_pause_records_by_run_id(session, run.id)
|
||||
pause_ids = [pause.id for pause in pause_records]
|
||||
pause_reason_records = repo.get_pause_reason_records_by_run_id(
|
||||
session,
|
||||
pause_ids,
|
||||
)
|
||||
table_data["workflow_pauses"] = [self._row_to_dict(row) for row in pause_records]
|
||||
table_data["workflow_pause_reasons"] = [self._row_to_dict(row) for row in pause_reason_records]
|
||||
trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(session)
|
||||
trigger_records = trigger_repo.list_by_run_id(run.id)
|
||||
table_data["workflow_trigger_logs"] = [self._row_to_dict(row) for row in trigger_records]
|
||||
trigger_metadata = trigger_records[0].trigger_metadata if trigger_records else None
|
||||
return table_data, app_logs, trigger_metadata
|
||||
|
||||
@staticmethod
|
||||
def _row_to_dict(row: Any) -> dict[str, Any]:
|
||||
mapper = inspect(row).mapper
|
||||
return {str(column.name): getattr(row, mapper.get_property_by_column(column).key) for column in mapper.columns}
|
||||
|
||||
def _get_archive_key(self, run: WorkflowRun) -> str:
|
||||
"""Get the storage key for the archive bundle."""
|
||||
created_at = run.created_at
|
||||
prefix = (
|
||||
f"{run.tenant_id}/app_id={run.app_id}/year={created_at.strftime('%Y')}/"
|
||||
f"month={created_at.strftime('%m')}/workflow_run_id={run.id}"
|
||||
)
|
||||
return f"{prefix}/{ARCHIVE_BUNDLE_NAME}"
|
||||
|
||||
def _generate_manifest(
|
||||
self,
|
||||
run: WorkflowRun,
|
||||
table_stats: list[TableStats],
|
||||
) -> dict[str, Any]:
|
||||
"""Generate a manifest for the archived workflow run."""
|
||||
return {
|
||||
"schema_version": ARCHIVE_SCHEMA_VERSION,
|
||||
"workflow_run_id": run.id,
|
||||
"tenant_id": run.tenant_id,
|
||||
"app_id": run.app_id,
|
||||
"workflow_id": run.workflow_id,
|
||||
"created_at": run.created_at.isoformat(),
|
||||
"archived_at": datetime.datetime.now(datetime.UTC).isoformat(),
|
||||
"tables": {
|
||||
stat.table_name: {
|
||||
"row_count": stat.row_count,
|
||||
"checksum": stat.checksum,
|
||||
"size_bytes": stat.size_bytes,
|
||||
}
|
||||
for stat in table_stats
|
||||
},
|
||||
}
|
||||
|
||||
def _build_archive_bundle(self, manifest_data: bytes, table_payloads: dict[str, bytes]) -> bytes:
|
||||
buffer = io.BytesIO()
|
||||
with zipfile.ZipFile(buffer, mode="w", compression=zipfile.ZIP_DEFLATED) as archive:
|
||||
archive.writestr("manifest.json", manifest_data)
|
||||
for table_name in self.ARCHIVED_TABLES:
|
||||
data = table_payloads.get(table_name)
|
||||
if data is None:
|
||||
raise ValueError(f"Missing archive payload for {table_name}")
|
||||
archive.writestr(f"{table_name}.jsonl", data)
|
||||
return buffer.getvalue()
|
||||
|
||||
def _delete_trigger_logs(self, session: Session, run_ids: Sequence[str]) -> int:
|
||||
trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(session)
|
||||
return trigger_repo.delete_by_run_ids(run_ids)
|
||||
|
||||
def _delete_node_executions(self, session: Session, runs: Sequence[WorkflowRun]) -> tuple[int, int]:
|
||||
run_ids = [run.id for run in runs]
|
||||
return self._get_workflow_node_execution_repo(session).delete_by_runs(session, run_ids)
|
||||
|
||||
def _get_workflow_node_execution_repo(
|
||||
self,
|
||||
session: Session,
|
||||
) -> DifyAPIWorkflowNodeExecutionRepository:
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
|
||||
session_maker = sessionmaker(bind=session.get_bind(), expire_on_commit=False)
|
||||
return DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository(session_maker)
|
||||
|
||||
def _get_workflow_run_repo(self) -> APIWorkflowRunRepository:
|
||||
if self.workflow_run_repo is not None:
|
||||
return self.workflow_run_repo
|
||||
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
|
||||
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
self.workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker)
|
||||
return self.workflow_run_repo
|
||||
@ -1,2 +0,0 @@
|
||||
ARCHIVE_SCHEMA_VERSION = "1.0"
|
||||
ARCHIVE_BUNDLE_NAME = f"archive.v{ARCHIVE_SCHEMA_VERSION}.zip"
|
||||
@ -1,134 +0,0 @@
|
||||
"""
|
||||
Delete Archived Workflow Run Service.
|
||||
|
||||
This service deletes archived workflow run data from the database while keeping
|
||||
archive logs intact.
|
||||
"""
|
||||
|
||||
import time
|
||||
from collections.abc import Sequence
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from extensions.ext_database import db
|
||||
from models.workflow import WorkflowRun
|
||||
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
|
||||
from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeleteResult:
|
||||
run_id: str
|
||||
tenant_id: str
|
||||
success: bool
|
||||
deleted_counts: dict[str, int] = field(default_factory=dict)
|
||||
error: str | None = None
|
||||
elapsed_time: float = 0.0
|
||||
|
||||
|
||||
class ArchivedWorkflowRunDeletion:
|
||||
def __init__(self, dry_run: bool = False):
|
||||
self.dry_run = dry_run
|
||||
self.workflow_run_repo: APIWorkflowRunRepository | None = None
|
||||
|
||||
def delete_by_run_id(self, run_id: str) -> DeleteResult:
|
||||
start_time = time.time()
|
||||
result = DeleteResult(run_id=run_id, tenant_id="", success=False)
|
||||
|
||||
repo = self._get_workflow_run_repo()
|
||||
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
with session_maker() as session:
|
||||
run = session.get(WorkflowRun, run_id)
|
||||
if not run:
|
||||
result.error = f"Workflow run {run_id} not found"
|
||||
result.elapsed_time = time.time() - start_time
|
||||
return result
|
||||
|
||||
result.tenant_id = run.tenant_id
|
||||
if not repo.get_archived_run_ids(session, [run.id]):
|
||||
result.error = f"Workflow run {run_id} is not archived"
|
||||
result.elapsed_time = time.time() - start_time
|
||||
return result
|
||||
|
||||
result = self._delete_run(run)
|
||||
result.elapsed_time = time.time() - start_time
|
||||
return result
|
||||
|
||||
def delete_batch(
|
||||
self,
|
||||
tenant_ids: list[str] | None,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
limit: int = 100,
|
||||
) -> list[DeleteResult]:
|
||||
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
results: list[DeleteResult] = []
|
||||
|
||||
repo = self._get_workflow_run_repo()
|
||||
with session_maker() as session:
|
||||
runs = list(
|
||||
repo.get_archived_runs_by_time_range(
|
||||
session=session,
|
||||
tenant_ids=tenant_ids,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
limit=limit,
|
||||
)
|
||||
)
|
||||
for run in runs:
|
||||
results.append(self._delete_run(run))
|
||||
|
||||
return results
|
||||
|
||||
def _delete_run(self, run: WorkflowRun) -> DeleteResult:
|
||||
start_time = time.time()
|
||||
result = DeleteResult(run_id=run.id, tenant_id=run.tenant_id, success=False)
|
||||
if self.dry_run:
|
||||
result.success = True
|
||||
result.elapsed_time = time.time() - start_time
|
||||
return result
|
||||
|
||||
repo = self._get_workflow_run_repo()
|
||||
try:
|
||||
deleted_counts = repo.delete_runs_with_related(
|
||||
[run],
|
||||
delete_node_executions=self._delete_node_executions,
|
||||
delete_trigger_logs=self._delete_trigger_logs,
|
||||
)
|
||||
result.deleted_counts = deleted_counts
|
||||
result.success = True
|
||||
except Exception as e:
|
||||
result.error = str(e)
|
||||
result.elapsed_time = time.time() - start_time
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def _delete_trigger_logs(session: Session, run_ids: Sequence[str]) -> int:
|
||||
trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(session)
|
||||
return trigger_repo.delete_by_run_ids(run_ids)
|
||||
|
||||
@staticmethod
|
||||
def _delete_node_executions(
|
||||
session: Session,
|
||||
runs: Sequence[WorkflowRun],
|
||||
) -> tuple[int, int]:
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
|
||||
run_ids = [run.id for run in runs]
|
||||
repo = DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository(
|
||||
session_maker=sessionmaker(bind=session.get_bind(), expire_on_commit=False)
|
||||
)
|
||||
return repo.delete_by_runs(session, run_ids)
|
||||
|
||||
def _get_workflow_run_repo(self) -> APIWorkflowRunRepository:
|
||||
if self.workflow_run_repo is not None:
|
||||
return self.workflow_run_repo
|
||||
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
|
||||
self.workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(
|
||||
sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
)
|
||||
return self.workflow_run_repo
|
||||
@ -1,481 +0,0 @@
|
||||
"""
|
||||
Restore Archived Workflow Run Service.
|
||||
|
||||
This service restores archived workflow run data from S3-compatible storage
|
||||
back to the database.
|
||||
"""
|
||||
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
import zipfile
|
||||
from collections.abc import Callable
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Any, cast
|
||||
|
||||
import click
|
||||
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
||||
from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import DeclarativeBase, Session, sessionmaker
|
||||
|
||||
from extensions.ext_database import db
|
||||
from libs.archive_storage import (
|
||||
ArchiveStorage,
|
||||
ArchiveStorageNotConfiguredError,
|
||||
get_archive_storage,
|
||||
)
|
||||
from models.trigger import WorkflowTriggerLog
|
||||
from models.workflow import (
|
||||
WorkflowAppLog,
|
||||
WorkflowArchiveLog,
|
||||
WorkflowNodeExecutionModel,
|
||||
WorkflowNodeExecutionOffload,
|
||||
WorkflowPause,
|
||||
WorkflowPauseReason,
|
||||
WorkflowRun,
|
||||
)
|
||||
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
from services.retention.workflow_run.constants import ARCHIVE_BUNDLE_NAME
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Mapping of table names to SQLAlchemy models
|
||||
TABLE_MODELS = {
|
||||
"workflow_runs": WorkflowRun,
|
||||
"workflow_app_logs": WorkflowAppLog,
|
||||
"workflow_node_executions": WorkflowNodeExecutionModel,
|
||||
"workflow_node_execution_offload": WorkflowNodeExecutionOffload,
|
||||
"workflow_pauses": WorkflowPause,
|
||||
"workflow_pause_reasons": WorkflowPauseReason,
|
||||
"workflow_trigger_logs": WorkflowTriggerLog,
|
||||
}
|
||||
|
||||
SchemaMapper = Callable[[dict[str, Any]], dict[str, Any]]
|
||||
|
||||
SCHEMA_MAPPERS: dict[str, dict[str, SchemaMapper]] = {
|
||||
"1.0": {},
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class RestoreResult:
|
||||
"""Result of restoring a single workflow run."""
|
||||
|
||||
run_id: str
|
||||
tenant_id: str
|
||||
success: bool
|
||||
restored_counts: dict[str, int]
|
||||
error: str | None = None
|
||||
elapsed_time: float = 0.0
|
||||
|
||||
|
||||
class WorkflowRunRestore:
|
||||
"""
|
||||
Restore archived workflow run data from storage to database.
|
||||
|
||||
This service reads archived data from storage and restores it to the
|
||||
database tables. It handles idempotency by skipping records that already
|
||||
exist in the database.
|
||||
"""
|
||||
|
||||
def __init__(self, dry_run: bool = False, workers: int = 1):
|
||||
"""
|
||||
Initialize the restore service.
|
||||
|
||||
Args:
|
||||
dry_run: If True, only preview without making changes
|
||||
workers: Number of concurrent workflow runs to restore
|
||||
"""
|
||||
self.dry_run = dry_run
|
||||
if workers < 1:
|
||||
raise ValueError("workers must be at least 1")
|
||||
self.workers = workers
|
||||
self.workflow_run_repo: APIWorkflowRunRepository | None = None
|
||||
|
||||
def _restore_from_run(
|
||||
self,
|
||||
run: WorkflowRun | WorkflowArchiveLog,
|
||||
*,
|
||||
session_maker: sessionmaker,
|
||||
) -> RestoreResult:
|
||||
start_time = time.time()
|
||||
run_id = run.workflow_run_id if isinstance(run, WorkflowArchiveLog) else run.id
|
||||
created_at = run.run_created_at if isinstance(run, WorkflowArchiveLog) else run.created_at
|
||||
result = RestoreResult(
|
||||
run_id=run_id,
|
||||
tenant_id=run.tenant_id,
|
||||
success=False,
|
||||
restored_counts={},
|
||||
)
|
||||
|
||||
if not self.dry_run:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Starting restore for workflow run {run_id} (tenant={run.tenant_id})",
|
||||
fg="white",
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
storage = get_archive_storage()
|
||||
except ArchiveStorageNotConfiguredError as e:
|
||||
result.error = str(e)
|
||||
click.echo(click.style(f"Archive storage not configured: {e}", fg="red"))
|
||||
result.elapsed_time = time.time() - start_time
|
||||
return result
|
||||
|
||||
prefix = (
|
||||
f"{run.tenant_id}/app_id={run.app_id}/year={created_at.strftime('%Y')}/"
|
||||
f"month={created_at.strftime('%m')}/workflow_run_id={run_id}"
|
||||
)
|
||||
archive_key = f"{prefix}/{ARCHIVE_BUNDLE_NAME}"
|
||||
try:
|
||||
archive_data = storage.get_object(archive_key)
|
||||
except FileNotFoundError:
|
||||
result.error = f"Archive bundle not found: {archive_key}"
|
||||
click.echo(click.style(result.error, fg="red"))
|
||||
result.elapsed_time = time.time() - start_time
|
||||
return result
|
||||
|
||||
with session_maker() as session:
|
||||
try:
|
||||
with zipfile.ZipFile(io.BytesIO(archive_data), mode="r") as archive:
|
||||
try:
|
||||
manifest = self._load_manifest_from_zip(archive)
|
||||
except ValueError as e:
|
||||
result.error = f"Archive bundle invalid: {e}"
|
||||
click.echo(click.style(result.error, fg="red"))
|
||||
return result
|
||||
|
||||
tables = manifest.get("tables", {})
|
||||
schema_version = self._get_schema_version(manifest)
|
||||
for table_name, info in tables.items():
|
||||
row_count = info.get("row_count", 0)
|
||||
if row_count == 0:
|
||||
result.restored_counts[table_name] = 0
|
||||
continue
|
||||
|
||||
if self.dry_run:
|
||||
result.restored_counts[table_name] = row_count
|
||||
continue
|
||||
|
||||
member_path = f"{table_name}.jsonl"
|
||||
try:
|
||||
data = archive.read(member_path)
|
||||
except KeyError:
|
||||
click.echo(
|
||||
click.style(
|
||||
f" Warning: Table data not found in archive: {member_path}",
|
||||
fg="yellow",
|
||||
)
|
||||
)
|
||||
result.restored_counts[table_name] = 0
|
||||
continue
|
||||
|
||||
records = ArchiveStorage.deserialize_from_jsonl(data)
|
||||
restored = self._restore_table_records(
|
||||
session,
|
||||
table_name,
|
||||
records,
|
||||
schema_version=schema_version,
|
||||
)
|
||||
result.restored_counts[table_name] = restored
|
||||
if not self.dry_run:
|
||||
click.echo(
|
||||
click.style(
|
||||
f" Restored {restored}/{len(records)} records to {table_name}",
|
||||
fg="white",
|
||||
)
|
||||
)
|
||||
|
||||
# Verify row counts match manifest
|
||||
manifest_total = sum(info.get("row_count", 0) for info in tables.values())
|
||||
restored_total = sum(result.restored_counts.values())
|
||||
|
||||
if not self.dry_run:
|
||||
# Note: restored count might be less than manifest count if records already exist
|
||||
logger.info(
|
||||
"Restore verification: manifest_total=%d, restored_total=%d",
|
||||
manifest_total,
|
||||
restored_total,
|
||||
)
|
||||
|
||||
# Delete the archive log record after successful restore
|
||||
repo = self._get_workflow_run_repo()
|
||||
repo.delete_archive_log_by_run_id(session, run_id)
|
||||
|
||||
session.commit()
|
||||
|
||||
result.success = True
|
||||
if not self.dry_run:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Completed restore for workflow run {run_id}: restored={result.restored_counts}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to restore workflow run %s", run_id)
|
||||
result.error = str(e)
|
||||
session.rollback()
|
||||
click.echo(click.style(f"Restore failed: {e}", fg="red"))
|
||||
|
||||
result.elapsed_time = time.time() - start_time
|
||||
return result
|
||||
|
||||
def _get_workflow_run_repo(self) -> APIWorkflowRunRepository:
|
||||
if self.workflow_run_repo is not None:
|
||||
return self.workflow_run_repo
|
||||
|
||||
self.workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(
|
||||
sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
)
|
||||
return self.workflow_run_repo
|
||||
|
||||
@staticmethod
|
||||
def _load_manifest_from_zip(archive: zipfile.ZipFile) -> dict[str, Any]:
|
||||
try:
|
||||
data = archive.read("manifest.json")
|
||||
except KeyError as e:
|
||||
raise ValueError("manifest.json missing from archive bundle") from e
|
||||
return json.loads(data.decode("utf-8"))
|
||||
|
||||
def _restore_table_records(
|
||||
self,
|
||||
session: Session,
|
||||
table_name: str,
|
||||
records: list[dict[str, Any]],
|
||||
*,
|
||||
schema_version: str,
|
||||
) -> int:
|
||||
"""
|
||||
Restore records to a table.
|
||||
|
||||
Uses INSERT ... ON CONFLICT DO NOTHING for idempotency.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
table_name: Name of the table
|
||||
records: List of record dictionaries
|
||||
schema_version: Archived schema version from manifest
|
||||
|
||||
Returns:
|
||||
Number of records actually inserted
|
||||
"""
|
||||
if not records:
|
||||
return 0
|
||||
|
||||
model = TABLE_MODELS.get(table_name)
|
||||
if not model:
|
||||
logger.warning("Unknown table: %s", table_name)
|
||||
return 0
|
||||
|
||||
column_names, required_columns, non_nullable_with_default = self._get_model_column_info(model)
|
||||
unknown_fields: set[str] = set()
|
||||
|
||||
# Apply schema mapping, filter to current columns, then convert datetimes
|
||||
converted_records = []
|
||||
for record in records:
|
||||
mapped = self._apply_schema_mapping(table_name, schema_version, record)
|
||||
unknown_fields.update(set(mapped.keys()) - column_names)
|
||||
filtered = {key: value for key, value in mapped.items() if key in column_names}
|
||||
for key in non_nullable_with_default:
|
||||
if key in filtered and filtered[key] is None:
|
||||
filtered.pop(key)
|
||||
missing_required = [key for key in required_columns if key not in filtered or filtered.get(key) is None]
|
||||
if missing_required:
|
||||
missing_cols = ", ".join(sorted(missing_required))
|
||||
raise ValueError(
|
||||
f"Missing required columns for {table_name} (schema_version={schema_version}): {missing_cols}"
|
||||
)
|
||||
converted = self._convert_datetime_fields(filtered, model)
|
||||
converted_records.append(converted)
|
||||
if unknown_fields:
|
||||
logger.warning(
|
||||
"Dropped unknown columns for %s (schema_version=%s): %s",
|
||||
table_name,
|
||||
schema_version,
|
||||
", ".join(sorted(unknown_fields)),
|
||||
)
|
||||
|
||||
# Use INSERT ... ON CONFLICT DO NOTHING for idempotency
|
||||
stmt = pg_insert(model).values(converted_records)
|
||||
stmt = stmt.on_conflict_do_nothing(index_elements=["id"])
|
||||
|
||||
result = session.execute(stmt)
|
||||
return cast(CursorResult, result).rowcount or 0
|
||||
|
||||
def _convert_datetime_fields(
|
||||
self,
|
||||
record: dict[str, Any],
|
||||
model: type[DeclarativeBase] | Any,
|
||||
) -> dict[str, Any]:
|
||||
"""Convert ISO datetime strings to datetime objects."""
|
||||
from sqlalchemy import DateTime
|
||||
|
||||
result = dict(record)
|
||||
|
||||
for column in model.__table__.columns:
|
||||
if isinstance(column.type, DateTime):
|
||||
value = result.get(column.key)
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
result[column.key] = datetime.fromisoformat(value)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return result
|
||||
|
||||
def _get_schema_version(self, manifest: dict[str, Any]) -> str:
|
||||
schema_version = manifest.get("schema_version")
|
||||
if not schema_version:
|
||||
logger.warning("Manifest missing schema_version; defaulting to 1.0")
|
||||
schema_version = "1.0"
|
||||
schema_version = str(schema_version)
|
||||
if schema_version not in SCHEMA_MAPPERS:
|
||||
raise ValueError(f"Unsupported schema_version {schema_version}. Add a mapping before restoring.")
|
||||
return schema_version
|
||||
|
||||
def _apply_schema_mapping(
|
||||
self,
|
||||
table_name: str,
|
||||
schema_version: str,
|
||||
record: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
# Keep hook for forward/backward compatibility when schema evolves.
|
||||
mapper = SCHEMA_MAPPERS.get(schema_version, {}).get(table_name)
|
||||
if mapper is None:
|
||||
return dict(record)
|
||||
return mapper(record)
|
||||
|
||||
def _get_model_column_info(
|
||||
self,
|
||||
model: type[DeclarativeBase] | Any,
|
||||
) -> tuple[set[str], set[str], set[str]]:
|
||||
columns = list(model.__table__.columns)
|
||||
column_names = {column.key for column in columns}
|
||||
required_columns = {
|
||||
column.key
|
||||
for column in columns
|
||||
if not column.nullable
|
||||
and column.default is None
|
||||
and column.server_default is None
|
||||
and not column.autoincrement
|
||||
}
|
||||
non_nullable_with_default = {
|
||||
column.key
|
||||
for column in columns
|
||||
if not column.nullable
|
||||
and (column.default is not None or column.server_default is not None or column.autoincrement)
|
||||
}
|
||||
return column_names, required_columns, non_nullable_with_default
|
||||
|
||||
def restore_batch(
|
||||
self,
|
||||
tenant_ids: list[str] | None,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
limit: int = 100,
|
||||
) -> list[RestoreResult]:
|
||||
"""
|
||||
Restore multiple workflow runs by time range.
|
||||
|
||||
Args:
|
||||
tenant_ids: Optional tenant IDs
|
||||
start_date: Start date filter
|
||||
end_date: End date filter
|
||||
limit: Maximum number of runs to restore (default: 100)
|
||||
|
||||
Returns:
|
||||
List of RestoreResult objects
|
||||
"""
|
||||
results: list[RestoreResult] = []
|
||||
if tenant_ids is not None and not tenant_ids:
|
||||
return results
|
||||
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
repo = self._get_workflow_run_repo()
|
||||
|
||||
with session_maker() as session:
|
||||
archive_logs = repo.get_archived_logs_by_time_range(
|
||||
session=session,
|
||||
tenant_ids=tenant_ids,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Found {len(archive_logs)} archived workflow runs to restore",
|
||||
fg="white",
|
||||
)
|
||||
)
|
||||
|
||||
def _restore_with_session(archive_log: WorkflowArchiveLog) -> RestoreResult:
|
||||
return self._restore_from_run(
|
||||
archive_log,
|
||||
session_maker=session_maker,
|
||||
)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=self.workers) as executor:
|
||||
results = list(executor.map(_restore_with_session, archive_logs))
|
||||
|
||||
total_counts: dict[str, int] = {}
|
||||
for result in results:
|
||||
for table_name, count in result.restored_counts.items():
|
||||
total_counts[table_name] = total_counts.get(table_name, 0) + count
|
||||
success_count = sum(1 for result in results if result.success)
|
||||
|
||||
if self.dry_run:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"[DRY RUN] Would restore {len(results)} workflow runs: totals={total_counts}",
|
||||
fg="yellow",
|
||||
)
|
||||
)
|
||||
else:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Restored {success_count}/{len(results)} workflow runs: totals={total_counts}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
def restore_by_run_id(
|
||||
self,
|
||||
run_id: str,
|
||||
) -> RestoreResult:
|
||||
"""
|
||||
Restore a single workflow run by run ID.
|
||||
"""
|
||||
repo = self._get_workflow_run_repo()
|
||||
archive_log = repo.get_archived_log_by_run_id(run_id)
|
||||
|
||||
if not archive_log:
|
||||
click.echo(click.style(f"Workflow run archive {run_id} not found", fg="red"))
|
||||
return RestoreResult(
|
||||
run_id=run_id,
|
||||
tenant_id="",
|
||||
success=False,
|
||||
restored_counts={},
|
||||
error=f"Workflow run archive {run_id} not found",
|
||||
)
|
||||
|
||||
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
result = self._restore_from_run(archive_log, session_maker=session_maker)
|
||||
if self.dry_run and result.success:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"[DRY RUN] Would restore workflow run {run_id}: totals={result.restored_counts}",
|
||||
fg="yellow",
|
||||
)
|
||||
)
|
||||
return result
|
||||
@ -7,7 +7,7 @@ from sqlalchemy import and_, func, or_, select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.workflow.enums import WorkflowExecutionStatus
|
||||
from models import Account, App, EndUser, WorkflowAppLog, WorkflowArchiveLog, WorkflowRun
|
||||
from models import Account, App, EndUser, WorkflowAppLog, WorkflowRun
|
||||
from models.enums import AppTriggerType, CreatorUserRole
|
||||
from models.trigger import WorkflowTriggerLog
|
||||
from services.plugin.plugin_service import PluginService
|
||||
@ -173,80 +173,7 @@ class WorkflowAppService:
|
||||
"data": items,
|
||||
}
|
||||
|
||||
def get_paginate_workflow_archive_logs(
|
||||
self,
|
||||
*,
|
||||
session: Session,
|
||||
app_model: App,
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
):
|
||||
"""
|
||||
Get paginate workflow archive logs using SQLAlchemy 2.0 style.
|
||||
"""
|
||||
stmt = select(WorkflowArchiveLog).where(
|
||||
WorkflowArchiveLog.tenant_id == app_model.tenant_id,
|
||||
WorkflowArchiveLog.app_id == app_model.id,
|
||||
WorkflowArchiveLog.log_id.isnot(None),
|
||||
)
|
||||
|
||||
stmt = stmt.order_by(WorkflowArchiveLog.run_created_at.desc())
|
||||
|
||||
count_stmt = select(func.count()).select_from(stmt.subquery())
|
||||
total = session.scalar(count_stmt) or 0
|
||||
|
||||
offset_stmt = stmt.offset((page - 1) * limit).limit(limit)
|
||||
|
||||
logs = list(session.scalars(offset_stmt).all())
|
||||
account_ids = {log.created_by for log in logs if log.created_by_role == CreatorUserRole.ACCOUNT}
|
||||
end_user_ids = {log.created_by for log in logs if log.created_by_role == CreatorUserRole.END_USER}
|
||||
|
||||
accounts_by_id = {}
|
||||
if account_ids:
|
||||
accounts_by_id = {
|
||||
account.id: account
|
||||
for account in session.scalars(select(Account).where(Account.id.in_(account_ids))).all()
|
||||
}
|
||||
|
||||
end_users_by_id = {}
|
||||
if end_user_ids:
|
||||
end_users_by_id = {
|
||||
end_user.id: end_user
|
||||
for end_user in session.scalars(select(EndUser).where(EndUser.id.in_(end_user_ids))).all()
|
||||
}
|
||||
|
||||
items = []
|
||||
for log in logs:
|
||||
if log.created_by_role == CreatorUserRole.ACCOUNT:
|
||||
created_by_account = accounts_by_id.get(log.created_by)
|
||||
created_by_end_user = None
|
||||
elif log.created_by_role == CreatorUserRole.END_USER:
|
||||
created_by_account = None
|
||||
created_by_end_user = end_users_by_id.get(log.created_by)
|
||||
else:
|
||||
created_by_account = None
|
||||
created_by_end_user = None
|
||||
|
||||
items.append(
|
||||
{
|
||||
"id": log.id,
|
||||
"workflow_run": log.workflow_run_summary,
|
||||
"trigger_metadata": self.handle_trigger_metadata(app_model.tenant_id, log.trigger_metadata),
|
||||
"created_by_account": created_by_account,
|
||||
"created_by_end_user": created_by_end_user,
|
||||
"created_at": log.log_created_at,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total": total,
|
||||
"has_more": total > page * limit,
|
||||
"data": items,
|
||||
}
|
||||
|
||||
def handle_trigger_metadata(self, tenant_id: str, meta_val: str | None) -> dict[str, Any]:
|
||||
def handle_trigger_metadata(self, tenant_id: str, meta_val: str) -> dict[str, Any]:
|
||||
metadata: dict[str, Any] | None = self._safe_json_loads(meta_val)
|
||||
if not metadata:
|
||||
return {}
|
||||
|
||||
@ -11,10 +11,8 @@ from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from configs import dify_config
|
||||
from core.db.session_factory import session_factory
|
||||
from extensions.ext_database import db
|
||||
from libs.archive_storage import ArchiveStorageNotConfiguredError, get_archive_storage
|
||||
from models import (
|
||||
ApiToken,
|
||||
AppAnnotationHitHistory,
|
||||
@ -45,7 +43,6 @@ from models.workflow import (
|
||||
ConversationVariable,
|
||||
Workflow,
|
||||
WorkflowAppLog,
|
||||
WorkflowArchiveLog,
|
||||
)
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
|
||||
@ -70,9 +67,6 @@ def remove_app_and_related_data_task(self, tenant_id: str, app_id: str):
|
||||
_delete_app_workflow_runs(tenant_id, app_id)
|
||||
_delete_app_workflow_node_executions(tenant_id, app_id)
|
||||
_delete_app_workflow_app_logs(tenant_id, app_id)
|
||||
if dify_config.BILLING_ENABLED and dify_config.ARCHIVE_STORAGE_ENABLED:
|
||||
_delete_app_workflow_archive_logs(tenant_id, app_id)
|
||||
_delete_archived_workflow_run_files(tenant_id, app_id)
|
||||
_delete_app_conversations(tenant_id, app_id)
|
||||
_delete_app_messages(tenant_id, app_id)
|
||||
_delete_workflow_tool_providers(tenant_id, app_id)
|
||||
@ -258,45 +252,6 @@ def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
|
||||
)
|
||||
|
||||
|
||||
def _delete_app_workflow_archive_logs(tenant_id: str, app_id: str):
|
||||
def del_workflow_archive_log(workflow_archive_log_id: str):
|
||||
db.session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
_delete_records(
|
||||
"""select id from workflow_archive_logs where tenant_id=:tenant_id and app_id=:app_id limit 1000""",
|
||||
{"tenant_id": tenant_id, "app_id": app_id},
|
||||
del_workflow_archive_log,
|
||||
"workflow archive log",
|
||||
)
|
||||
|
||||
|
||||
def _delete_archived_workflow_run_files(tenant_id: str, app_id: str):
|
||||
prefix = f"{tenant_id}/app_id={app_id}/"
|
||||
try:
|
||||
archive_storage = get_archive_storage()
|
||||
except ArchiveStorageNotConfiguredError as e:
|
||||
logger.info("Archive storage not configured, skipping archive file cleanup: %s", e)
|
||||
return
|
||||
|
||||
try:
|
||||
keys = archive_storage.list_objects(prefix)
|
||||
except Exception:
|
||||
logger.exception("Failed to list archive files for app %s", app_id)
|
||||
return
|
||||
|
||||
deleted = 0
|
||||
for key in keys:
|
||||
try:
|
||||
archive_storage.delete_object(key)
|
||||
deleted += 1
|
||||
except Exception:
|
||||
logger.exception("Failed to delete archive object %s", key)
|
||||
|
||||
logger.info("Deleted %s archive objects for app %s", deleted, app_id)
|
||||
|
||||
|
||||
def _delete_app_conversations(tenant_id: str, app_id: str):
|
||||
def del_conversation(session, conversation_id: str):
|
||||
session.query(PinnedConversation).where(PinnedConversation.conversation_id == conversation_id).delete(
|
||||
|
||||
@ -4,13 +4,7 @@ import pytest
|
||||
from faker import Faker
|
||||
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from services.feature_service import (
|
||||
FeatureModel,
|
||||
FeatureService,
|
||||
KnowledgeRateLimitModel,
|
||||
LicenseStatus,
|
||||
SystemFeatureModel,
|
||||
)
|
||||
from services.feature_service import FeatureModel, FeatureService, KnowledgeRateLimitModel, SystemFeatureModel
|
||||
|
||||
|
||||
class TestFeatureService:
|
||||
@ -280,7 +274,7 @@ class TestFeatureService:
|
||||
mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100
|
||||
|
||||
# Act: Execute the method under test
|
||||
result = FeatureService.get_system_features(is_authenticated=True)
|
||||
result = FeatureService.get_system_features()
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
assert result is not None
|
||||
@ -330,61 +324,6 @@ class TestFeatureService:
|
||||
# Verify mock interactions
|
||||
mock_external_service_dependencies["enterprise_service"].get_info.assert_called_once()
|
||||
|
||||
def test_get_system_features_unauthenticated(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test system features retrieval for an unauthenticated user.
|
||||
|
||||
This test verifies that:
|
||||
- The response payload is minimized (e.g., verbose license details are excluded).
|
||||
- Essential UI configuration (Branding, SSO, Marketplace) remains available.
|
||||
- The response structure adheres to the public schema for unauthenticated clients.
|
||||
"""
|
||||
# Arrange: Setup test data with exact same config as success test
|
||||
with patch("services.feature_service.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
mock_config.MARKETPLACE_ENABLED = True
|
||||
mock_config.ENABLE_EMAIL_CODE_LOGIN = True
|
||||
mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True
|
||||
mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False
|
||||
mock_config.ALLOW_REGISTER = False
|
||||
mock_config.ALLOW_CREATE_WORKSPACE = False
|
||||
mock_config.MAIL_TYPE = "smtp"
|
||||
mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100
|
||||
|
||||
# Act: Execute with is_authenticated=False
|
||||
result = FeatureService.get_system_features(is_authenticated=False)
|
||||
|
||||
# Assert: Basic structure
|
||||
assert result is not None
|
||||
assert isinstance(result, SystemFeatureModel)
|
||||
|
||||
# --- 1. Verify Response Payload Optimization (Data Minimization) ---
|
||||
# Ensure only essential UI flags are returned to unauthenticated clients
|
||||
# to keep the payload lightweight and adhere to architectural boundaries.
|
||||
assert result.license.status == LicenseStatus.NONE
|
||||
assert result.license.expired_at == ""
|
||||
assert result.license.workspaces.enabled is False
|
||||
assert result.license.workspaces.limit == 0
|
||||
assert result.license.workspaces.size == 0
|
||||
|
||||
# --- 2. Verify Public UI Configuration Availability ---
|
||||
# Ensure that data required for frontend rendering remains accessible.
|
||||
|
||||
# Branding should match the mock data
|
||||
assert result.branding.enabled is True
|
||||
assert result.branding.application_title == "Test Enterprise"
|
||||
assert result.branding.login_page_logo == "https://example.com/logo.png"
|
||||
|
||||
# SSO settings should be visible for login page rendering
|
||||
assert result.sso_enforced_for_signin is True
|
||||
assert result.sso_enforced_for_signin_protocol == "saml"
|
||||
|
||||
# General auth settings should be visible
|
||||
assert result.enable_email_code_login is True
|
||||
|
||||
# Marketplace should be visible
|
||||
assert result.enable_marketplace is True
|
||||
|
||||
def test_get_system_features_basic_config(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test system features retrieval with basic configuration (no enterprise).
|
||||
@ -1092,7 +1031,7 @@ class TestFeatureService:
|
||||
}
|
||||
|
||||
# Act: Execute the method under test
|
||||
result = FeatureService.get_system_features(is_authenticated=True)
|
||||
result = FeatureService.get_system_features()
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
assert result is not None
|
||||
@ -1461,7 +1400,7 @@ class TestFeatureService:
|
||||
}
|
||||
|
||||
# Act: Execute the method under test
|
||||
result = FeatureService.get_system_features(is_authenticated=True)
|
||||
result = FeatureService.get_system_features()
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
assert result is not None
|
||||
|
||||
@ -30,12 +30,3 @@ class TestWorkflowExecutionStatus:
|
||||
|
||||
for status in non_ended_statuses:
|
||||
assert not status.is_ended(), f"{status} should not be considered ended"
|
||||
|
||||
def test_ended_values(self):
|
||||
"""Test ended_values returns the expected status values."""
|
||||
assert set(WorkflowExecutionStatus.ended_values()) == {
|
||||
WorkflowExecutionStatus.SUCCEEDED.value,
|
||||
WorkflowExecutionStatus.FAILED.value,
|
||||
WorkflowExecutionStatus.PARTIAL_SUCCEEDED.value,
|
||||
WorkflowExecutionStatus.STOPPED.value,
|
||||
}
|
||||
|
||||
@ -37,20 +37,6 @@ def _client_error(code: str) -> ClientError:
|
||||
def _mock_client(monkeypatch):
|
||||
client = MagicMock()
|
||||
client.head_bucket.return_value = None
|
||||
# Configure put_object to return a proper ETag that matches the MD5 hash
|
||||
# The ETag format is typically the MD5 hash wrapped in quotes
|
||||
|
||||
def mock_put_object(**kwargs):
|
||||
md5_hash = kwargs.get("Body", b"")
|
||||
if isinstance(md5_hash, bytes):
|
||||
md5_hash = hashlib.md5(md5_hash).hexdigest()
|
||||
else:
|
||||
md5_hash = hashlib.md5(md5_hash.encode()).hexdigest()
|
||||
response = MagicMock()
|
||||
response.get.return_value = f'"{md5_hash}"'
|
||||
return response
|
||||
|
||||
client.put_object.side_effect = mock_put_object
|
||||
boto_client = MagicMock(return_value=client)
|
||||
monkeypatch.setattr(storage_module.boto3, "client", boto_client)
|
||||
return client, boto_client
|
||||
@ -268,8 +254,8 @@ def test_serialization_roundtrip():
|
||||
{"id": "2", "value": 123},
|
||||
]
|
||||
|
||||
data = ArchiveStorage.serialize_to_jsonl(records)
|
||||
decoded = ArchiveStorage.deserialize_from_jsonl(data)
|
||||
data = ArchiveStorage.serialize_to_jsonl_gz(records)
|
||||
decoded = ArchiveStorage.deserialize_from_jsonl_gz(data)
|
||||
|
||||
assert decoded[0]["id"] == "1"
|
||||
assert decoded[0]["payload"]["nested"] == "value"
|
||||
|
||||
@ -1,54 +0,0 @@
|
||||
"""
|
||||
Unit tests for workflow run archiving functionality.
|
||||
|
||||
This module contains tests for:
|
||||
- Archive service
|
||||
- Rollback service
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from services.retention.workflow_run.constants import ARCHIVE_BUNDLE_NAME
|
||||
|
||||
|
||||
class TestWorkflowRunArchiver:
|
||||
"""Tests for the WorkflowRunArchiver class."""
|
||||
|
||||
@patch("services.retention.workflow_run.archive_paid_plan_workflow_run.dify_config")
|
||||
@patch("services.retention.workflow_run.archive_paid_plan_workflow_run.get_archive_storage")
|
||||
def test_archiver_initialization(self, mock_get_storage, mock_config):
|
||||
"""Test archiver can be initialized with various options."""
|
||||
from services.retention.workflow_run.archive_paid_plan_workflow_run import WorkflowRunArchiver
|
||||
|
||||
mock_config.BILLING_ENABLED = False
|
||||
|
||||
archiver = WorkflowRunArchiver(
|
||||
days=90,
|
||||
batch_size=100,
|
||||
tenant_ids=["test-tenant"],
|
||||
limit=50,
|
||||
dry_run=True,
|
||||
)
|
||||
|
||||
assert archiver.days == 90
|
||||
assert archiver.batch_size == 100
|
||||
assert archiver.tenant_ids == ["test-tenant"]
|
||||
assert archiver.limit == 50
|
||||
assert archiver.dry_run is True
|
||||
|
||||
def test_get_archive_key(self):
|
||||
"""Test archive key generation."""
|
||||
from services.retention.workflow_run.archive_paid_plan_workflow_run import WorkflowRunArchiver
|
||||
|
||||
archiver = WorkflowRunArchiver.__new__(WorkflowRunArchiver)
|
||||
|
||||
mock_run = MagicMock()
|
||||
mock_run.tenant_id = "tenant-123"
|
||||
mock_run.app_id = "app-999"
|
||||
mock_run.id = "run-456"
|
||||
mock_run.created_at = datetime(2024, 1, 15, 12, 0, 0)
|
||||
|
||||
key = archiver._get_archive_key(mock_run)
|
||||
|
||||
assert key == f"tenant-123/app_id=app-999/year=2024/month=01/workflow_run_id=run-456/{ARCHIVE_BUNDLE_NAME}"
|
||||
@ -1,180 +0,0 @@
|
||||
"""
|
||||
Unit tests for archived workflow run deletion service.
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
|
||||
class TestArchivedWorkflowRunDeletion:
|
||||
def test_delete_by_run_id_returns_error_when_run_missing(self):
|
||||
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
|
||||
|
||||
deleter = ArchivedWorkflowRunDeletion()
|
||||
repo = MagicMock()
|
||||
session = MagicMock()
|
||||
session.get.return_value = None
|
||||
|
||||
session_maker = MagicMock()
|
||||
session_maker.return_value.__enter__.return_value = session
|
||||
session_maker.return_value.__exit__.return_value = None
|
||||
mock_db = MagicMock()
|
||||
mock_db.engine = MagicMock()
|
||||
|
||||
with (
|
||||
patch("services.retention.workflow_run.delete_archived_workflow_run.db", mock_db),
|
||||
patch(
|
||||
"services.retention.workflow_run.delete_archived_workflow_run.sessionmaker", return_value=session_maker
|
||||
),
|
||||
patch.object(deleter, "_get_workflow_run_repo", return_value=repo),
|
||||
):
|
||||
result = deleter.delete_by_run_id("run-1")
|
||||
|
||||
assert result.success is False
|
||||
assert result.error == "Workflow run run-1 not found"
|
||||
repo.get_archived_run_ids.assert_not_called()
|
||||
|
||||
def test_delete_by_run_id_returns_error_when_not_archived(self):
|
||||
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
|
||||
|
||||
deleter = ArchivedWorkflowRunDeletion()
|
||||
repo = MagicMock()
|
||||
repo.get_archived_run_ids.return_value = set()
|
||||
run = MagicMock()
|
||||
run.id = "run-1"
|
||||
run.tenant_id = "tenant-1"
|
||||
|
||||
session = MagicMock()
|
||||
session.get.return_value = run
|
||||
|
||||
session_maker = MagicMock()
|
||||
session_maker.return_value.__enter__.return_value = session
|
||||
session_maker.return_value.__exit__.return_value = None
|
||||
mock_db = MagicMock()
|
||||
mock_db.engine = MagicMock()
|
||||
|
||||
with (
|
||||
patch("services.retention.workflow_run.delete_archived_workflow_run.db", mock_db),
|
||||
patch(
|
||||
"services.retention.workflow_run.delete_archived_workflow_run.sessionmaker", return_value=session_maker
|
||||
),
|
||||
patch.object(deleter, "_get_workflow_run_repo", return_value=repo),
|
||||
patch.object(deleter, "_delete_run") as mock_delete_run,
|
||||
):
|
||||
result = deleter.delete_by_run_id("run-1")
|
||||
|
||||
assert result.success is False
|
||||
assert result.error == "Workflow run run-1 is not archived"
|
||||
mock_delete_run.assert_not_called()
|
||||
|
||||
def test_delete_by_run_id_calls_delete_run(self):
|
||||
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
|
||||
|
||||
deleter = ArchivedWorkflowRunDeletion()
|
||||
repo = MagicMock()
|
||||
repo.get_archived_run_ids.return_value = {"run-1"}
|
||||
run = MagicMock()
|
||||
run.id = "run-1"
|
||||
run.tenant_id = "tenant-1"
|
||||
|
||||
session = MagicMock()
|
||||
session.get.return_value = run
|
||||
|
||||
session_maker = MagicMock()
|
||||
session_maker.return_value.__enter__.return_value = session
|
||||
session_maker.return_value.__exit__.return_value = None
|
||||
mock_db = MagicMock()
|
||||
mock_db.engine = MagicMock()
|
||||
|
||||
with (
|
||||
patch("services.retention.workflow_run.delete_archived_workflow_run.db", mock_db),
|
||||
patch(
|
||||
"services.retention.workflow_run.delete_archived_workflow_run.sessionmaker", return_value=session_maker
|
||||
),
|
||||
patch.object(deleter, "_get_workflow_run_repo", return_value=repo),
|
||||
patch.object(deleter, "_delete_run", return_value=MagicMock(success=True)) as mock_delete_run,
|
||||
):
|
||||
result = deleter.delete_by_run_id("run-1")
|
||||
|
||||
assert result.success is True
|
||||
mock_delete_run.assert_called_once_with(run)
|
||||
|
||||
def test_delete_batch_uses_repo(self):
|
||||
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
|
||||
|
||||
deleter = ArchivedWorkflowRunDeletion()
|
||||
repo = MagicMock()
|
||||
run1 = MagicMock()
|
||||
run1.id = "run-1"
|
||||
run1.tenant_id = "tenant-1"
|
||||
run2 = MagicMock()
|
||||
run2.id = "run-2"
|
||||
run2.tenant_id = "tenant-1"
|
||||
repo.get_archived_runs_by_time_range.return_value = [run1, run2]
|
||||
|
||||
session = MagicMock()
|
||||
session_maker = MagicMock()
|
||||
session_maker.return_value.__enter__.return_value = session
|
||||
session_maker.return_value.__exit__.return_value = None
|
||||
start_date = MagicMock()
|
||||
end_date = MagicMock()
|
||||
mock_db = MagicMock()
|
||||
mock_db.engine = MagicMock()
|
||||
|
||||
with (
|
||||
patch("services.retention.workflow_run.delete_archived_workflow_run.db", mock_db),
|
||||
patch(
|
||||
"services.retention.workflow_run.delete_archived_workflow_run.sessionmaker", return_value=session_maker
|
||||
),
|
||||
patch.object(deleter, "_get_workflow_run_repo", return_value=repo),
|
||||
patch.object(
|
||||
deleter, "_delete_run", side_effect=[MagicMock(success=True), MagicMock(success=True)]
|
||||
) as mock_delete_run,
|
||||
):
|
||||
results = deleter.delete_batch(
|
||||
tenant_ids=["tenant-1"],
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
limit=2,
|
||||
)
|
||||
|
||||
assert len(results) == 2
|
||||
repo.get_archived_runs_by_time_range.assert_called_once_with(
|
||||
session=session,
|
||||
tenant_ids=["tenant-1"],
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
limit=2,
|
||||
)
|
||||
assert mock_delete_run.call_count == 2
|
||||
|
||||
def test_delete_run_dry_run(self):
|
||||
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
|
||||
|
||||
deleter = ArchivedWorkflowRunDeletion(dry_run=True)
|
||||
run = MagicMock()
|
||||
run.id = "run-1"
|
||||
run.tenant_id = "tenant-1"
|
||||
|
||||
with patch.object(deleter, "_get_workflow_run_repo") as mock_get_repo:
|
||||
result = deleter._delete_run(run)
|
||||
|
||||
assert result.success is True
|
||||
mock_get_repo.assert_not_called()
|
||||
|
||||
def test_delete_run_calls_repo(self):
|
||||
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
|
||||
|
||||
deleter = ArchivedWorkflowRunDeletion()
|
||||
run = MagicMock()
|
||||
run.id = "run-1"
|
||||
run.tenant_id = "tenant-1"
|
||||
|
||||
repo = MagicMock()
|
||||
repo.delete_runs_with_related.return_value = {"runs": 1}
|
||||
|
||||
with patch.object(deleter, "_get_workflow_run_repo", return_value=repo):
|
||||
result = deleter._delete_run(run)
|
||||
|
||||
assert result.success is True
|
||||
assert result.deleted_counts == {"runs": 1}
|
||||
repo.delete_runs_with_related.assert_called_once()
|
||||
@ -1,65 +0,0 @@
|
||||
"""
|
||||
Unit tests for workflow run restore functionality.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
|
||||
class TestWorkflowRunRestore:
|
||||
"""Tests for the WorkflowRunRestore class."""
|
||||
|
||||
def test_restore_initialization(self):
|
||||
"""Restore service should respect dry_run flag."""
|
||||
from services.retention.workflow_run.restore_archived_workflow_run import WorkflowRunRestore
|
||||
|
||||
restore = WorkflowRunRestore(dry_run=True)
|
||||
|
||||
assert restore.dry_run is True
|
||||
|
||||
def test_convert_datetime_fields(self):
|
||||
"""ISO datetime strings should be converted to datetime objects."""
|
||||
from models.workflow import WorkflowRun
|
||||
from services.retention.workflow_run.restore_archived_workflow_run import WorkflowRunRestore
|
||||
|
||||
record = {
|
||||
"id": "test-id",
|
||||
"created_at": "2024-01-01T12:00:00",
|
||||
"finished_at": "2024-01-01T12:05:00",
|
||||
"name": "test",
|
||||
}
|
||||
|
||||
restore = WorkflowRunRestore()
|
||||
result = restore._convert_datetime_fields(record, WorkflowRun)
|
||||
|
||||
assert isinstance(result["created_at"], datetime)
|
||||
assert result["created_at"].year == 2024
|
||||
assert result["created_at"].month == 1
|
||||
assert result["name"] == "test"
|
||||
|
||||
def test_restore_table_records_returns_rowcount(self):
|
||||
"""Restore should return inserted rowcount."""
|
||||
from services.retention.workflow_run.restore_archived_workflow_run import WorkflowRunRestore
|
||||
|
||||
session = MagicMock()
|
||||
session.execute.return_value = MagicMock(rowcount=2)
|
||||
|
||||
restore = WorkflowRunRestore()
|
||||
records = [{"id": "p1", "workflow_run_id": "r1", "created_at": "2024-01-01T00:00:00"}]
|
||||
|
||||
restored = restore._restore_table_records(session, "workflow_pauses", records, schema_version="1.0")
|
||||
|
||||
assert restored == 2
|
||||
session.execute.assert_called_once()
|
||||
|
||||
def test_restore_table_records_unknown_table(self):
|
||||
"""Unknown table names should be ignored gracefully."""
|
||||
from services.retention.workflow_run.restore_archived_workflow_run import WorkflowRunRestore
|
||||
|
||||
session = MagicMock()
|
||||
|
||||
restore = WorkflowRunRestore()
|
||||
restored = restore._restore_table_records(session, "unknown_table", [{"id": "x1"}], schema_version="1.0")
|
||||
|
||||
assert restored == 0
|
||||
session.execute.assert_not_called()
|
||||
@ -2,11 +2,7 @@ from unittest.mock import ANY, MagicMock, call, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from libs.archive_storage import ArchiveStorageNotConfiguredError
|
||||
from models.workflow import WorkflowArchiveLog
|
||||
from tasks.remove_app_and_related_data_task import (
|
||||
_delete_app_workflow_archive_logs,
|
||||
_delete_archived_workflow_run_files,
|
||||
_delete_draft_variable_offload_data,
|
||||
_delete_draft_variables,
|
||||
delete_draft_variables_batch,
|
||||
@ -328,68 +324,3 @@ class TestDeleteDraftVariableOffloadData:
|
||||
|
||||
# Verify error was logged
|
||||
mock_logging.exception.assert_called_once_with("Error deleting draft variable offload data:")
|
||||
|
||||
|
||||
class TestDeleteWorkflowArchiveLogs:
|
||||
@patch("tasks.remove_app_and_related_data_task._delete_records")
|
||||
@patch("tasks.remove_app_and_related_data_task.db")
|
||||
def test_delete_app_workflow_archive_logs_calls_delete_records(self, mock_db, mock_delete_records):
|
||||
tenant_id = "tenant-1"
|
||||
app_id = "app-1"
|
||||
|
||||
_delete_app_workflow_archive_logs(tenant_id, app_id)
|
||||
|
||||
mock_delete_records.assert_called_once()
|
||||
query_sql, params, delete_func, name = mock_delete_records.call_args[0]
|
||||
assert "workflow_archive_logs" in query_sql
|
||||
assert params == {"tenant_id": tenant_id, "app_id": app_id}
|
||||
assert name == "workflow archive log"
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_delete_query = MagicMock()
|
||||
mock_query.where.return_value = mock_delete_query
|
||||
mock_db.session.query.return_value = mock_query
|
||||
|
||||
delete_func("log-1")
|
||||
|
||||
mock_db.session.query.assert_called_once_with(WorkflowArchiveLog)
|
||||
mock_query.where.assert_called_once()
|
||||
mock_delete_query.delete.assert_called_once_with(synchronize_session=False)
|
||||
|
||||
|
||||
class TestDeleteArchivedWorkflowRunFiles:
|
||||
@patch("tasks.remove_app_and_related_data_task.get_archive_storage")
|
||||
@patch("tasks.remove_app_and_related_data_task.logger")
|
||||
def test_delete_archived_workflow_run_files_not_configured(self, mock_logger, mock_get_storage):
|
||||
mock_get_storage.side_effect = ArchiveStorageNotConfiguredError("missing config")
|
||||
|
||||
_delete_archived_workflow_run_files("tenant-1", "app-1")
|
||||
|
||||
assert mock_logger.info.call_count == 1
|
||||
assert "Archive storage not configured" in mock_logger.info.call_args[0][0]
|
||||
|
||||
@patch("tasks.remove_app_and_related_data_task.get_archive_storage")
|
||||
@patch("tasks.remove_app_and_related_data_task.logger")
|
||||
def test_delete_archived_workflow_run_files_list_failure(self, mock_logger, mock_get_storage):
|
||||
storage = MagicMock()
|
||||
storage.list_objects.side_effect = Exception("list failed")
|
||||
mock_get_storage.return_value = storage
|
||||
|
||||
_delete_archived_workflow_run_files("tenant-1", "app-1")
|
||||
|
||||
storage.list_objects.assert_called_once_with("tenant-1/app_id=app-1/")
|
||||
storage.delete_object.assert_not_called()
|
||||
mock_logger.exception.assert_called_once_with("Failed to list archive files for app %s", "app-1")
|
||||
|
||||
@patch("tasks.remove_app_and_related_data_task.get_archive_storage")
|
||||
@patch("tasks.remove_app_and_related_data_task.logger")
|
||||
def test_delete_archived_workflow_run_files_success(self, mock_logger, mock_get_storage):
|
||||
storage = MagicMock()
|
||||
storage.list_objects.return_value = ["key-1", "key-2"]
|
||||
mock_get_storage.return_value = storage
|
||||
|
||||
_delete_archived_workflow_run_files("tenant-1", "app-1")
|
||||
|
||||
storage.list_objects.assert_called_once_with("tenant-1/app_id=app-1/")
|
||||
storage.delete_object.assert_has_calls([call("key-1"), call("key-2")], any_order=False)
|
||||
mock_logger.info.assert_called_with("Deleted %s archive objects for app %s", 2, "app-1")
|
||||
|
||||
@ -23,12 +23,14 @@ import AppSideBar from '@/app/components/app-sidebar'
|
||||
import { useStore } from '@/app/components/app/store'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import { useStore as useTagStore } from '@/app/components/base/tag-management/store'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
|
||||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
import { fetchAppDetailDirect } from '@/service/apps'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import s from './style.module.css'
|
||||
|
||||
const TagManagementModal = dynamic(() => import('@/app/components/base/tag-management'), {
|
||||
@ -108,7 +110,7 @@ const AppDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
|
||||
|
||||
useEffect(() => {
|
||||
if (appDetail) {
|
||||
const localeMode = localStorage.getItem('app-detail-collapse-or-expand') || 'expand'
|
||||
const localeMode = storage.get<string>(STORAGE_KEYS.APP.DETAIL_COLLAPSE) || 'expand'
|
||||
const mode = isMobile ? 'collapse' : 'expand'
|
||||
setAppSidebarExpand(isMobile ? mode : localeMode)
|
||||
// TODO: consider screen size and mode
|
||||
|
||||
@ -15,7 +15,7 @@ import Loading from '@/app/components/base/loading'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import MCPServiceCard from '@/app/components/tools/mcp/mcp-service-card'
|
||||
import { isTriggerNode } from '@/app/components/workflow/types'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import {
|
||||
fetchAppDetail,
|
||||
updateAppSiteAccessToken,
|
||||
@ -25,6 +25,7 @@ import {
|
||||
import { useAppWorkflow } from '@/service/use-workflow'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { asyncRunSafe } from '@/utils'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
export type ICardViewProps = {
|
||||
appId: string
|
||||
@ -126,7 +127,7 @@ const CardView: FC<ICardViewProps> = ({ appId, isInPanel, className }) => {
|
||||
}) as Promise<App>,
|
||||
)
|
||||
if (!err)
|
||||
localStorage.setItem(NEED_REFRESH_APP_LIST_KEY, '1')
|
||||
storage.set(STORAGE_KEYS.APP.NEED_REFRESH_LIST, '1')
|
||||
|
||||
handleCallbackResult(err)
|
||||
}
|
||||
|
||||
@ -18,6 +18,7 @@ import { useStore } from '@/app/components/app/store'
|
||||
import { PipelineFill, PipelineLine } from '@/app/components/base/icons/src/vender/pipeline'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import ExtraInfo from '@/app/components/datasets/extra-info'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import DatasetDetailContext from '@/context/dataset-detail'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
@ -25,6 +26,7 @@ import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
|
||||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
import { useDatasetDetail, useDatasetRelatedApps } from '@/service/knowledge/use-dataset'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
export type IAppDetailLayoutProps = {
|
||||
children: React.ReactNode
|
||||
@ -40,7 +42,7 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
|
||||
const pathname = usePathname()
|
||||
const hideSideBar = pathname.endsWith('documents/create') || pathname.endsWith('documents/create-from-pipeline')
|
||||
const isPipelineCanvas = pathname.endsWith('/pipeline')
|
||||
const workflowCanvasMaximize = localStorage.getItem('workflow-canvas-maximize') === 'true'
|
||||
const workflowCanvasMaximize = storage.getBoolean(STORAGE_KEYS.WORKFLOW.CANVAS_MAXIMIZE, false)
|
||||
const [hideHeader, setHideHeader] = useState(workflowCanvasMaximize)
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
|
||||
@ -110,7 +112,7 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
|
||||
const setAppSidebarExpand = useStore(state => state.setAppSidebarExpand)
|
||||
|
||||
useEffect(() => {
|
||||
const localeMode = localStorage.getItem('app-detail-collapse-or-expand') || 'expand'
|
||||
const localeMode = storage.get<string>(STORAGE_KEYS.APP.DETAIL_COLLAPSE) || 'expand'
|
||||
const mode = isMobile ? 'collapse' : 'expand'
|
||||
setAppSidebarExpand(isMobile ? mode : localeMode)
|
||||
}, [isMobile, setAppSidebarExpand])
|
||||
|
||||
@ -8,12 +8,14 @@ import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Input from '@/app/components/base/input'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import { COUNT_DOWN_KEY, COUNT_DOWN_TIME_MS } from '@/app/components/signin/countdown'
|
||||
import { COUNT_DOWN_TIME_MS } from '@/app/components/signin/countdown'
|
||||
import { emailRegex } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
|
||||
import { useLocale } from '@/context/i18n'
|
||||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
import { sendResetPasswordCode } from '@/service/common'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
export default function CheckCode() {
|
||||
const { t } = useTranslation()
|
||||
@ -41,7 +43,7 @@ export default function CheckCode() {
|
||||
setIsLoading(true)
|
||||
const res = await sendResetPasswordCode(email, locale)
|
||||
if (res.result === 'success') {
|
||||
localStorage.setItem(COUNT_DOWN_KEY, `${COUNT_DOWN_TIME_MS}`)
|
||||
storage.set(STORAGE_KEYS.UI.COUNTDOWN_LEFT_TIME, COUNT_DOWN_TIME_MS)
|
||||
const params = new URLSearchParams(searchParams)
|
||||
params.set('token', encodeURIComponent(res.data))
|
||||
params.set('email', encodeURIComponent(email))
|
||||
|
||||
@ -5,10 +5,12 @@ import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Input from '@/app/components/base/input'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import { COUNT_DOWN_KEY, COUNT_DOWN_TIME_MS } from '@/app/components/signin/countdown'
|
||||
import { COUNT_DOWN_TIME_MS } from '@/app/components/signin/countdown'
|
||||
import { emailRegex } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useLocale } from '@/context/i18n'
|
||||
import { sendWebAppEMailLoginCode } from '@/service/common'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
export default function MailAndCodeAuth() {
|
||||
const { t } = useTranslation()
|
||||
@ -36,7 +38,7 @@ export default function MailAndCodeAuth() {
|
||||
setIsLoading(true)
|
||||
const ret = await sendWebAppEMailLoginCode(email, locale)
|
||||
if (ret.result === 'success') {
|
||||
localStorage.setItem(COUNT_DOWN_KEY, `${COUNT_DOWN_TIME_MS}`)
|
||||
storage.set(STORAGE_KEYS.UI.COUNTDOWN_LEFT_TIME, COUNT_DOWN_TIME_MS)
|
||||
const params = new URLSearchParams(searchParams)
|
||||
params.set('email', encodeURIComponent(email))
|
||||
params.set('token', encodeURIComponent(ret.data))
|
||||
|
||||
@ -10,6 +10,7 @@ import Button from '@/app/components/base/button'
|
||||
import Input from '@/app/components/base/input'
|
||||
import Modal from '@/app/components/base/modal'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import {
|
||||
checkEmailExisted,
|
||||
resetEmail,
|
||||
@ -18,6 +19,7 @@ import {
|
||||
} from '@/service/common'
|
||||
import { useLogout } from '@/service/use-common'
|
||||
import { asyncRunSafe } from '@/utils'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
type Props = {
|
||||
show: boolean
|
||||
@ -172,7 +174,7 @@ const EmailChangeModal = ({ onClose, email, show }: Props) => {
|
||||
const handleLogout = async () => {
|
||||
await logout()
|
||||
|
||||
localStorage.removeItem('setup_status')
|
||||
storage.remove(STORAGE_KEYS.CONFIG.SETUP_STATUS)
|
||||
// Tokens are now stored in cookies and cleared by backend
|
||||
|
||||
router.push('/signin')
|
||||
|
||||
@ -10,9 +10,11 @@ import { resetUser } from '@/app/components/base/amplitude/utils'
|
||||
import Avatar from '@/app/components/base/avatar'
|
||||
import { LogOut01 } from '@/app/components/base/icons/src/vender/line/general'
|
||||
import PremiumBadge from '@/app/components/base/premium-badge'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { useLogout } from '@/service/use-common'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
export type IAppSelector = {
|
||||
isMobile: boolean
|
||||
@ -28,7 +30,7 @@ export default function AppSelector() {
|
||||
const handleLogout = async () => {
|
||||
await logout()
|
||||
|
||||
localStorage.removeItem('setup_status')
|
||||
storage.remove(STORAGE_KEYS.CONFIG.SETUP_STATUS)
|
||||
resetUser()
|
||||
// Tokens are now stored in cookies and cleared by backend
|
||||
|
||||
|
||||
@ -2,7 +2,9 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import CustomDialog from '@/app/components/base/dialog'
|
||||
import { COUNT_DOWN_KEY, COUNT_DOWN_TIME_MS } from '@/app/components/signin/countdown'
|
||||
import { COUNT_DOWN_TIME_MS } from '@/app/components/signin/countdown'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { storage } from '@/utils/storage'
|
||||
import CheckEmail from './components/check-email'
|
||||
import FeedBack from './components/feed-back'
|
||||
import VerifyEmail from './components/verify-email'
|
||||
@ -21,7 +23,7 @@ export default function DeleteAccount(props: DeleteAccountProps) {
|
||||
const handleEmailCheckSuccess = useCallback(async () => {
|
||||
try {
|
||||
setShowVerifyEmail(true)
|
||||
localStorage.setItem(COUNT_DOWN_KEY, `${COUNT_DOWN_TIME_MS}`)
|
||||
storage.set(STORAGE_KEYS.UI.COUNTDOWN_LEFT_TIME, COUNT_DOWN_TIME_MS)
|
||||
}
|
||||
catch (error) { console.error(error) }
|
||||
}, [])
|
||||
|
||||
@ -17,11 +17,12 @@ import Button from '@/app/components/base/button'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import { useLanguage } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useIsLogin } from '@/service/use-common'
|
||||
import { useAuthorizeOAuthApp, useOAuthAppInfo } from '@/service/use-oauth'
|
||||
import { storage } from '@/utils/storage'
|
||||
import {
|
||||
OAUTH_AUTHORIZE_PENDING_KEY,
|
||||
OAUTH_AUTHORIZE_PENDING_TTL,
|
||||
REDIRECT_URL_KEY,
|
||||
} from './constants'
|
||||
@ -31,7 +32,7 @@ function setItemWithExpiry(key: string, value: string, ttl: number) {
|
||||
value,
|
||||
expiry: dayjs().add(ttl, 'seconds').unix(),
|
||||
}
|
||||
localStorage.setItem(key, JSON.stringify(item))
|
||||
storage.set(key, JSON.stringify(item))
|
||||
}
|
||||
|
||||
function buildReturnUrl(pathname: string, search: string) {
|
||||
@ -86,7 +87,7 @@ export default function OAuthAuthorize() {
|
||||
const onLoginSwitchClick = () => {
|
||||
try {
|
||||
const returnUrl = buildReturnUrl('/account/oauth/authorize', `?client_id=${encodeURIComponent(client_id)}&redirect_uri=${encodeURIComponent(redirect_uri)}`)
|
||||
setItemWithExpiry(OAUTH_AUTHORIZE_PENDING_KEY, returnUrl, OAUTH_AUTHORIZE_PENDING_TTL)
|
||||
setItemWithExpiry(STORAGE_KEYS.AUTH.OAUTH_AUTHORIZE_PENDING, returnUrl, OAUTH_AUTHORIZE_PENDING_TTL)
|
||||
router.push(`/signin?${REDIRECT_URL_KEY}=${encodeURIComponent(returnUrl)}`)
|
||||
}
|
||||
catch {
|
||||
|
||||
@ -7,13 +7,16 @@ import { parseAsString, useQueryState } from 'nuqs'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import {
|
||||
EDUCATION_VERIFY_URL_SEARCHPARAMS_ACTION,
|
||||
EDUCATION_VERIFYING_LOCALSTORAGE_ITEM,
|
||||
} from '@/app/education-apply/constants'
|
||||
import { LEGACY_KEY_MIGRATIONS, STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { sendGAEvent } from '@/utils/gtag'
|
||||
import { fetchSetupStatusWithCache } from '@/utils/setup-status'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { resolvePostLoginRedirect } from '../signin/utils/post-login-redirect'
|
||||
import { trackEvent } from './base/amplitude'
|
||||
|
||||
storage.runMigrations(LEGACY_KEY_MIGRATIONS)
|
||||
|
||||
type AppInitializerProps = {
|
||||
children: ReactNode
|
||||
}
|
||||
@ -75,7 +78,7 @@ export const AppInitializer = ({
|
||||
}
|
||||
|
||||
if (action === EDUCATION_VERIFY_URL_SEARCHPARAMS_ACTION)
|
||||
localStorage.setItem(EDUCATION_VERIFYING_LOCALSTORAGE_ITEM, 'yes')
|
||||
storage.set(STORAGE_KEYS.EDUCATION.VERIFYING, 'yes')
|
||||
|
||||
try {
|
||||
const isFinished = await isSetupFinished()
|
||||
|
||||
@ -22,7 +22,7 @@ import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import Button from '@/app/components/base/button'
|
||||
import ContentDialog from '@/app/components/base/content-dialog'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { copyApp, deleteApp, exportAppConfig, updateAppInfo } from '@/service/apps'
|
||||
@ -31,6 +31,7 @@ import { fetchWorkflowDraft } from '@/service/workflow'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { getRedirection } from '@/utils/app-redirection'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import AppIcon from '../base/app-icon'
|
||||
import AppOperations from './app-operations'
|
||||
|
||||
@ -128,7 +129,7 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
|
||||
type: 'success',
|
||||
message: t('newApp.appCreated', { ns: 'app' }),
|
||||
})
|
||||
localStorage.setItem(NEED_REFRESH_APP_LIST_KEY, '1')
|
||||
storage.set(STORAGE_KEYS.APP.NEED_REFRESH_LIST, '1')
|
||||
onPlanInfoChanged()
|
||||
getRedirection(true, newApp, replace)
|
||||
}
|
||||
|
||||
@ -5,9 +5,11 @@ import * as React from 'react'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import Divider from '../base/divider'
|
||||
import { getKeyboardKeyCodeBySystem } from '../workflow/utils'
|
||||
import AppInfo from './app-info'
|
||||
@ -53,7 +55,7 @@ const AppDetailNav = ({
|
||||
const pathname = usePathname()
|
||||
const inWorkflowCanvas = pathname.endsWith('/workflow')
|
||||
const isPipelineCanvas = pathname.endsWith('/pipeline')
|
||||
const workflowCanvasMaximize = localStorage.getItem('workflow-canvas-maximize') === 'true'
|
||||
const workflowCanvasMaximize = storage.getBoolean(STORAGE_KEYS.WORKFLOW.CANVAS_MAXIMIZE, false)
|
||||
const [hideHeader, setHideHeader] = useState(workflowCanvasMaximize)
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
|
||||
@ -64,7 +66,7 @@ const AppDetailNav = ({
|
||||
|
||||
useEffect(() => {
|
||||
if (appSidebarExpand) {
|
||||
localStorage.setItem('app-detail-collapse-or-expand', appSidebarExpand)
|
||||
storage.set(STORAGE_KEYS.APP.DETAIL_COLLAPSE, appSidebarExpand)
|
||||
setAppSidebarExpand(appSidebarExpand)
|
||||
}
|
||||
}, [appSidebarExpand, setAppSidebarExpand])
|
||||
|
||||
@ -30,8 +30,10 @@ import { ModelTypeEnum } from '@/app/components/header/account-setting/model-pro
|
||||
|
||||
import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||
import ModelParameterModal from '@/app/components/header/account-setting/model-provider-page/model-parameter-modal'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { generateBasicAppFirstTimeRule, generateRule } from '@/service/debug'
|
||||
import { useGenerateRuleTemplate } from '@/service/use-apps'
|
||||
import { storage } from '@/utils/storage'
|
||||
import IdeaOutput from './idea-output'
|
||||
import InstructionEditorInBasic from './instruction-editor'
|
||||
import InstructionEditorInWorkflow from './instruction-editor-in-workflow'
|
||||
@ -83,9 +85,7 @@ const GetAutomaticRes: FC<IGetAutomaticResProps> = ({
|
||||
onFinished,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const localModel = localStorage.getItem('auto-gen-model')
|
||||
? JSON.parse(localStorage.getItem('auto-gen-model') as string) as Model
|
||||
: null
|
||||
const localModel = storage.get<Model>(STORAGE_KEYS.CONFIG.AUTO_GEN_MODEL)
|
||||
const [model, setModel] = React.useState<Model>(localModel || {
|
||||
name: '',
|
||||
provider: '',
|
||||
@ -178,9 +178,7 @@ const GetAutomaticRes: FC<IGetAutomaticResProps> = ({
|
||||
|
||||
useEffect(() => {
|
||||
if (defaultModel) {
|
||||
const localModel = localStorage.getItem('auto-gen-model')
|
||||
? JSON.parse(localStorage.getItem('auto-gen-model') || '')
|
||||
: null
|
||||
const localModel = storage.get<Model>(STORAGE_KEYS.CONFIG.AUTO_GEN_MODEL)
|
||||
if (localModel) {
|
||||
setModel(localModel)
|
||||
}
|
||||
@ -209,7 +207,7 @@ const GetAutomaticRes: FC<IGetAutomaticResProps> = ({
|
||||
mode: newValue.mode as ModelModeType,
|
||||
}
|
||||
setModel(newModel)
|
||||
localStorage.setItem('auto-gen-model', JSON.stringify(newModel))
|
||||
storage.set(STORAGE_KEYS.CONFIG.AUTO_GEN_MODEL, newModel)
|
||||
}, [model, setModel])
|
||||
|
||||
const handleCompletionParamsChange = useCallback((newParams: FormValue) => {
|
||||
@ -218,7 +216,7 @@ const GetAutomaticRes: FC<IGetAutomaticResProps> = ({
|
||||
completion_params: newParams as CompletionParams,
|
||||
}
|
||||
setModel(newModel)
|
||||
localStorage.setItem('auto-gen-model', JSON.stringify(newModel))
|
||||
storage.set(STORAGE_KEYS.CONFIG.AUTO_GEN_MODEL, newModel)
|
||||
}, [model, setModel])
|
||||
|
||||
const onGenerate = async () => {
|
||||
|
||||
@ -17,8 +17,10 @@ import Toast from '@/app/components/base/toast'
|
||||
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||
import ModelParameterModal from '@/app/components/header/account-setting/model-provider-page/model-parameter-modal'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { generateRule } from '@/service/debug'
|
||||
import { useGenerateRuleTemplate } from '@/service/use-apps'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { languageMap } from '../../../../workflow/nodes/_base/components/editor/code-editor/index'
|
||||
import IdeaOutput from '../automatic/idea-output'
|
||||
import InstructionEditor from '../automatic/instruction-editor-in-workflow'
|
||||
@ -62,9 +64,7 @@ export const GetCodeGeneratorResModal: FC<IGetCodeGeneratorResProps> = (
|
||||
presence_penalty: 0,
|
||||
frequency_penalty: 0,
|
||||
}
|
||||
const localModel = localStorage.getItem('auto-gen-model')
|
||||
? JSON.parse(localStorage.getItem('auto-gen-model') as string) as Model
|
||||
: null
|
||||
const localModel = storage.get<Model>(STORAGE_KEYS.CONFIG.AUTO_GEN_MODEL)
|
||||
const [model, setModel] = React.useState<Model>(localModel || {
|
||||
name: '',
|
||||
provider: '',
|
||||
@ -115,7 +115,7 @@ export const GetCodeGeneratorResModal: FC<IGetCodeGeneratorResProps> = (
|
||||
mode: newValue.mode as ModelModeType,
|
||||
}
|
||||
setModel(newModel)
|
||||
localStorage.setItem('auto-gen-model', JSON.stringify(newModel))
|
||||
storage.set(STORAGE_KEYS.CONFIG.AUTO_GEN_MODEL, newModel)
|
||||
}, [model, setModel])
|
||||
|
||||
const handleCompletionParamsChange = useCallback((newParams: FormValue) => {
|
||||
@ -124,7 +124,7 @@ export const GetCodeGeneratorResModal: FC<IGetCodeGeneratorResProps> = (
|
||||
completion_params: newParams as CompletionParams,
|
||||
}
|
||||
setModel(newModel)
|
||||
localStorage.setItem('auto-gen-model', JSON.stringify(newModel))
|
||||
storage.set(STORAGE_KEYS.CONFIG.AUTO_GEN_MODEL, newModel)
|
||||
}, [model, setModel])
|
||||
|
||||
const onGenerate = async () => {
|
||||
@ -168,9 +168,7 @@ export const GetCodeGeneratorResModal: FC<IGetCodeGeneratorResProps> = (
|
||||
|
||||
useEffect(() => {
|
||||
if (defaultModel) {
|
||||
const localModel = localStorage.getItem('auto-gen-model')
|
||||
? JSON.parse(localStorage.getItem('auto-gen-model') || '')
|
||||
: null
|
||||
const localModel = storage.get<Model>(STORAGE_KEYS.CONFIG.AUTO_GEN_MODEL)
|
||||
if (localModel) {
|
||||
setModel({
|
||||
...localModel,
|
||||
|
||||
@ -14,27 +14,20 @@ import {
|
||||
} from 'react'
|
||||
import { SupportUploadFileTypes } from '@/app/components/workflow/types'
|
||||
import { DEFAULT_CHAT_PROMPT_CONFIG, DEFAULT_COMPLETION_PROMPT_CONFIG } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useDebugConfigurationContext } from '@/context/debug-configuration'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import {
|
||||
AgentStrategy,
|
||||
} from '@/types/app'
|
||||
import { promptVariablesToUserInputsForm } from '@/utils/model-config'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { ORCHESTRATE_CHANGED } from './types'
|
||||
|
||||
export const useDebugWithSingleOrMultipleModel = (appId: string) => {
|
||||
const localeDebugWithSingleOrMultipleModelConfigs = localStorage.getItem('app-debug-with-single-or-multiple-models')
|
||||
const localeDebugWithSingleOrMultipleModelConfigs = storage.get<DebugWithSingleOrMultipleModelConfigs>(STORAGE_KEYS.CONFIG.DEBUG_MODELS)
|
||||
|
||||
const debugWithSingleOrMultipleModelConfigs = useRef<DebugWithSingleOrMultipleModelConfigs>({})
|
||||
|
||||
if (localeDebugWithSingleOrMultipleModelConfigs) {
|
||||
try {
|
||||
debugWithSingleOrMultipleModelConfigs.current = JSON.parse(localeDebugWithSingleOrMultipleModelConfigs) || {}
|
||||
}
|
||||
catch (e) {
|
||||
console.error(e)
|
||||
}
|
||||
}
|
||||
const debugWithSingleOrMultipleModelConfigs = useRef<DebugWithSingleOrMultipleModelConfigs>(localeDebugWithSingleOrMultipleModelConfigs || {})
|
||||
|
||||
const [
|
||||
debugWithMultipleModel,
|
||||
@ -55,7 +48,7 @@ export const useDebugWithSingleOrMultipleModel = (appId: string) => {
|
||||
configs: modelConfigs,
|
||||
}
|
||||
debugWithSingleOrMultipleModelConfigs.current[appId] = value
|
||||
localStorage.setItem('app-debug-with-single-or-multiple-models', JSON.stringify(debugWithSingleOrMultipleModelConfigs.current))
|
||||
storage.set(STORAGE_KEYS.CONFIG.DEBUG_MODELS, debugWithSingleOrMultipleModelConfigs.current)
|
||||
setDebugWithMultipleModel(value.multiple)
|
||||
setMultipleModelConfigs(value.configs)
|
||||
}, [appId])
|
||||
|
||||
@ -16,7 +16,7 @@ import Loading from '@/app/components/base/loading'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import CreateAppModal from '@/app/components/explore/create-app-modal'
|
||||
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { DSLImportMode } from '@/models/app'
|
||||
import { importDSL } from '@/service/apps'
|
||||
@ -25,6 +25,7 @@ import { useExploreAppList } from '@/service/use-explore'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { getRedirection } from '@/utils/app-redirection'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import AppCard from '../app-card'
|
||||
import Sidebar, { AppCategories, AppCategoryLabel } from './sidebar'
|
||||
|
||||
@ -145,7 +146,7 @@ const Apps = ({
|
||||
onSuccess()
|
||||
if (app.app_id)
|
||||
await handleCheckPluginDependencies(app.app_id)
|
||||
localStorage.setItem(NEED_REFRESH_APP_LIST_KEY, '1')
|
||||
storage.set(STORAGE_KEYS.APP.NEED_REFRESH_LIST, '1')
|
||||
getRedirection(isCurrentWorkspaceEditor, { id: app.app_id!, mode }, push)
|
||||
}
|
||||
catch {
|
||||
|
||||
@ -4,7 +4,6 @@ import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { trackEvent } from '@/app/components/base/amplitude'
|
||||
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { createApp } from '@/service/apps'
|
||||
@ -12,6 +11,8 @@ import { AppModeEnum } from '@/types/app'
|
||||
import { getRedirection } from '@/utils/app-redirection'
|
||||
import CreateAppModal from './index'
|
||||
|
||||
const NEED_REFRESH_APP_LIST_KEY_PREFIXED = 'v1:needRefreshAppList'
|
||||
|
||||
vi.mock('ahooks', () => ({
|
||||
useDebounceFn: (fn: (...args: any[]) => any) => {
|
||||
const run = (...args: any[]) => fn(...args)
|
||||
@ -142,7 +143,7 @@ describe('CreateAppModal', () => {
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'success', message: 'app.newApp.appCreated' })
|
||||
expect(onSuccess).toHaveBeenCalled()
|
||||
expect(onClose).toHaveBeenCalled()
|
||||
await waitFor(() => expect(mockSetItem).toHaveBeenCalledWith(NEED_REFRESH_APP_LIST_KEY, '1'))
|
||||
await waitFor(() => expect(mockSetItem).toHaveBeenCalledWith(NEED_REFRESH_APP_LIST_KEY_PREFIXED, '1'))
|
||||
await waitFor(() => expect(mockGetRedirection).toHaveBeenCalledWith(true, mockApp, mockPush))
|
||||
})
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ import Input from '@/app/components/base/input'
|
||||
import Textarea from '@/app/components/base/textarea'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import AppsFull from '@/app/components/billing/apps-full-in-dialog'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import useTheme from '@/hooks/use-theme'
|
||||
@ -27,6 +27,7 @@ import { createApp } from '@/service/apps'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { getRedirection } from '@/utils/app-redirection'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { basePath } from '@/utils/var'
|
||||
import AppIconPicker from '../../base/app-icon-picker'
|
||||
|
||||
@ -91,7 +92,7 @@ function CreateApp({ onClose, onSuccess, onCreateFromTemplate, defaultAppMode }:
|
||||
notify({ type: 'success', message: t('newApp.appCreated', { ns: 'app' }) })
|
||||
onSuccess()
|
||||
onClose()
|
||||
localStorage.setItem(NEED_REFRESH_APP_LIST_KEY, '1')
|
||||
storage.set(STORAGE_KEYS.APP.NEED_REFRESH_LIST, '1')
|
||||
getRedirection(isCurrentWorkspaceEditor, app, push)
|
||||
}
|
||||
catch (e: any) {
|
||||
|
||||
@ -15,7 +15,7 @@ import Modal from '@/app/components/base/modal'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import AppsFull from '@/app/components/billing/apps-full-in-dialog'
|
||||
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import {
|
||||
@ -28,6 +28,7 @@ import {
|
||||
} from '@/service/apps'
|
||||
import { getRedirection } from '@/utils/app-redirection'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import Uploader from './uploader'
|
||||
|
||||
type CreateFromDSLModalProps = {
|
||||
@ -130,7 +131,7 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
|
||||
message: t(status === DSLImportStatus.COMPLETED ? 'newApp.appCreated' : 'newApp.caution', { ns: 'app' }),
|
||||
children: status === DSLImportStatus.COMPLETED_WITH_WARNINGS && t('newApp.appCreateDSLWarning', { ns: 'app' }),
|
||||
})
|
||||
localStorage.setItem(NEED_REFRESH_APP_LIST_KEY, '1')
|
||||
storage.set(STORAGE_KEYS.APP.NEED_REFRESH_LIST, '1')
|
||||
if (app_id)
|
||||
await handleCheckPluginDependencies(app_id)
|
||||
getRedirection(isCurrentWorkspaceEditor, { id: app_id!, mode: app_mode }, push)
|
||||
@ -190,7 +191,7 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
|
||||
})
|
||||
if (app_id)
|
||||
await handleCheckPluginDependencies(app_id)
|
||||
localStorage.setItem(NEED_REFRESH_APP_LIST_KEY, '1')
|
||||
storage.set(STORAGE_KEYS.APP.NEED_REFRESH_LIST, '1')
|
||||
getRedirection(isCurrentWorkspaceEditor, { id: app_id!, mode: app_mode }, push)
|
||||
}
|
||||
else if (status === DSLImportStatus.FAILED) {
|
||||
|
||||
@ -5,10 +5,11 @@ import * as React from 'react'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import { Plan } from '@/app/components/billing/type'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import SwitchAppModal from './index'
|
||||
|
||||
const NEED_REFRESH_APP_LIST_KEY_PREFIXED = 'v1:needRefreshAppList'
|
||||
|
||||
const mockPush = vi.fn()
|
||||
const mockReplace = vi.fn()
|
||||
vi.mock('next/navigation', () => ({
|
||||
@ -257,7 +258,7 @@ describe('SwitchAppModal', () => {
|
||||
expect(onSuccess).toHaveBeenCalledTimes(1)
|
||||
expect(onClose).toHaveBeenCalledTimes(1)
|
||||
expect(notify).toHaveBeenCalledWith({ type: 'success', message: 'app.newApp.appCreated' })
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith(NEED_REFRESH_APP_LIST_KEY, '1')
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith(NEED_REFRESH_APP_LIST_KEY_PREFIXED, '1')
|
||||
expect(mockPush).toHaveBeenCalledWith('/app/new-app-001/workflow')
|
||||
expect(mockReplace).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
@ -17,13 +17,14 @@ import Input from '@/app/components/base/input'
|
||||
import Modal from '@/app/components/base/modal'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import AppsFull from '@/app/components/billing/apps-full-in-dialog'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { deleteApp, switchApp } from '@/service/apps'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { getRedirection } from '@/utils/app-redirection'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import AppIconPicker from '../../base/app-icon-picker'
|
||||
|
||||
type SwitchAppModalProps = {
|
||||
@ -73,7 +74,7 @@ const SwitchAppModal = ({ show, appDetail, inAppDetail = false, onSuccess, onClo
|
||||
setAppDetail()
|
||||
if (removeOriginal)
|
||||
await deleteApp(appDetail.id)
|
||||
localStorage.setItem(NEED_REFRESH_APP_LIST_KEY, '1')
|
||||
storage.set(STORAGE_KEYS.APP.NEED_REFRESH_LIST, '1')
|
||||
getRedirection(
|
||||
isCurrentWorkspaceEditor,
|
||||
{
|
||||
|
||||
@ -20,7 +20,7 @@ import CustomPopover from '@/app/components/base/popover'
|
||||
import TagSelector from '@/app/components/base/tag-management/selector'
|
||||
import Toast, { ToastContext } from '@/app/components/base/toast'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
@ -33,6 +33,7 @@ import { fetchWorkflowDraft } from '@/service/workflow'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { getRedirection } from '@/utils/app-redirection'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { formatTime } from '@/utils/time'
|
||||
import { basePath } from '@/utils/var'
|
||||
|
||||
@ -144,7 +145,7 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
||||
type: 'success',
|
||||
message: t('newApp.appCreated', { ns: 'app' }),
|
||||
})
|
||||
localStorage.setItem(NEED_REFRESH_APP_LIST_KEY, '1')
|
||||
storage.set(STORAGE_KEYS.APP.NEED_REFRESH_LIST, '1')
|
||||
if (onRefresh)
|
||||
onRefresh()
|
||||
onPlanInfoChanged()
|
||||
|
||||
@ -434,13 +434,15 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
describe('Local Storage Refresh', () => {
|
||||
it('should call refetch when refresh key is set in localStorage', () => {
|
||||
localStorage.setItem('needRefreshAppList', '1')
|
||||
it('should call refetch when refresh key is set in localStorage', async () => {
|
||||
localStorage.setItem('v1:needRefreshAppList', '1')
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(mockRefetch).toHaveBeenCalled()
|
||||
expect(localStorage.getItem('needRefreshAppList')).toBeNull()
|
||||
await vi.waitFor(() => {
|
||||
expect(mockRefetch).toHaveBeenCalled()
|
||||
})
|
||||
expect(localStorage.getItem('v1:needRefreshAppList')).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@ -22,13 +22,14 @@ import TabSliderNew from '@/app/components/base/tab-slider-new'
|
||||
import TagFilter from '@/app/components/base/tag-management/filter'
|
||||
import { useStore as useTagStore } from '@/app/components/base/tag-management/store'
|
||||
import CheckboxWithLabel from '@/app/components/datasets/create/website/base/checkbox-with-label'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { CheckModal } from '@/hooks/use-pay'
|
||||
import { useInfiniteAppList } from '@/service/use-apps'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import AppCard from './app-card'
|
||||
import { AppCardSkeleton } from './app-card-skeleton'
|
||||
import Empty from './empty'
|
||||
@ -134,8 +135,8 @@ const List: FC<Props> = ({
|
||||
]
|
||||
|
||||
useEffect(() => {
|
||||
if (localStorage.getItem(NEED_REFRESH_APP_LIST_KEY) === '1') {
|
||||
localStorage.removeItem(NEED_REFRESH_APP_LIST_KEY)
|
||||
if (storage.get<string>(STORAGE_KEYS.APP.NEED_REFRESH_LIST) === '1') {
|
||||
storage.remove(STORAGE_KEYS.APP.NEED_REFRESH_LIST)
|
||||
refetch()
|
||||
}
|
||||
}, [refetch])
|
||||
|
||||
@ -11,9 +11,10 @@ import {
|
||||
generationConversationName,
|
||||
} from '@/service/share'
|
||||
import { shareQueryKeys } from '@/service/use-share'
|
||||
import { CONVERSATION_ID_INFO } from '../constants'
|
||||
import { useChatWithHistory } from './hooks'
|
||||
|
||||
const CONVERSATION_ID_INFO_KEY = 'v1:conversationIdInfo'
|
||||
|
||||
vi.mock('@/hooks/use-app-favicon', () => ({
|
||||
useAppFavicon: vi.fn(),
|
||||
}))
|
||||
@ -120,14 +121,14 @@ const setConversationIdInfo = (appId: string, conversationId: string) => {
|
||||
'DEFAULT': conversationId,
|
||||
},
|
||||
}
|
||||
localStorage.setItem(CONVERSATION_ID_INFO, JSON.stringify(value))
|
||||
localStorage.setItem(CONVERSATION_ID_INFO_KEY, JSON.stringify(value))
|
||||
}
|
||||
|
||||
// Scenario: useChatWithHistory integrates share queries for conversations and chat list.
|
||||
describe('useChatWithHistory', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
localStorage.removeItem(CONVERSATION_ID_INFO)
|
||||
localStorage.removeItem(CONVERSATION_ID_INFO_KEY)
|
||||
mockStoreState.appInfo = {
|
||||
app_id: 'app-1',
|
||||
custom_config: null,
|
||||
@ -144,7 +145,7 @@ describe('useChatWithHistory', () => {
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
localStorage.removeItem(CONVERSATION_ID_INFO)
|
||||
localStorage.removeItem(CONVERSATION_ID_INFO_KEY)
|
||||
})
|
||||
|
||||
// Scenario: share query results populate conversation lists and trigger chat list fetch.
|
||||
@ -268,7 +269,7 @@ describe('useChatWithHistory', () => {
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
const storedValue = localStorage.getItem(CONVERSATION_ID_INFO)
|
||||
const storedValue = localStorage.getItem(CONVERSATION_ID_INFO_KEY)
|
||||
const parsed = storedValue ? JSON.parse(storedValue) : {}
|
||||
const storedUserId = parsed['app-1']?.['user-1']
|
||||
const storedDefaultId = parsed['app-1']?.DEFAULT
|
||||
|
||||
@ -23,6 +23,7 @@ import { useTranslation } from 'react-i18next'
|
||||
import { getProcessedFilesFromResponse } from '@/app/components/base/file-uploader/utils'
|
||||
import { useToastContext } from '@/app/components/base/toast'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useWebAppStore } from '@/context/web-app-context'
|
||||
import { useAppFavicon } from '@/hooks/use-app-favicon'
|
||||
import { changeLanguage } from '@/i18n-config/client'
|
||||
@ -41,6 +42,7 @@ import {
|
||||
useShareConversations,
|
||||
} from '@/service/use-share'
|
||||
import { TransferMethod } from '@/types/app'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { addFileInfos, sortAgentSorts } from '../../../tools/utils'
|
||||
import { CONVERSATION_ID_INFO } from '../constants'
|
||||
import { buildChatItemTree, getProcessedSystemVariablesFromUrlParams, getRawInputsFromUrlParams, getRawUserVariablesFromUrlParams } from '../utils'
|
||||
@ -128,27 +130,15 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => {
|
||||
|
||||
const [sidebarCollapseState, setSidebarCollapseState] = useState<boolean>(() => {
|
||||
if (typeof window !== 'undefined') {
|
||||
try {
|
||||
const localState = localStorage.getItem('webappSidebarCollapse')
|
||||
return localState === 'collapsed'
|
||||
}
|
||||
catch {
|
||||
// localStorage may be disabled in private browsing mode or by security settings
|
||||
// fallback to default value
|
||||
return false
|
||||
}
|
||||
const localState = storage.get<string>(STORAGE_KEYS.APP.SIDEBAR_COLLAPSE)
|
||||
return localState === 'collapsed'
|
||||
}
|
||||
return false
|
||||
})
|
||||
const handleSidebarCollapse = useCallback((state: boolean) => {
|
||||
if (appId) {
|
||||
setSidebarCollapseState(state)
|
||||
try {
|
||||
localStorage.setItem('webappSidebarCollapse', state ? 'collapsed' : 'expanded')
|
||||
}
|
||||
catch {
|
||||
// localStorage may be disabled, continue without persisting state
|
||||
}
|
||||
storage.set(STORAGE_KEYS.APP.SIDEBAR_COLLAPSE, state ? 'collapsed' : 'expanded')
|
||||
}
|
||||
}, [appId, setSidebarCollapseState])
|
||||
const [conversationIdInfo, setConversationIdInfo] = useLocalStorageState<Record<string, Record<string, string>>>(CONVERSATION_ID_INFO, {
|
||||
|
||||
@ -1,2 +1,2 @@
|
||||
export const CONVERSATION_ID_INFO = 'conversationIdInfo'
|
||||
export const CONVERSATION_ID_INFO = 'v1:conversationIdInfo'
|
||||
export const UUID_NIL = '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
@ -11,9 +11,10 @@ import {
|
||||
generationConversationName,
|
||||
} from '@/service/share'
|
||||
import { shareQueryKeys } from '@/service/use-share'
|
||||
import { CONVERSATION_ID_INFO } from '../constants'
|
||||
import { useEmbeddedChatbot } from './hooks'
|
||||
|
||||
const CONVERSATION_ID_INFO_KEY = 'v1:conversationIdInfo'
|
||||
|
||||
vi.mock('@/i18n-config/client', () => ({
|
||||
changeLanguage: vi.fn().mockResolvedValue(undefined),
|
||||
}))
|
||||
@ -113,7 +114,7 @@ const createConversationData = (overrides: Partial<AppConversationData> = {}): A
|
||||
describe('useEmbeddedChatbot', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
localStorage.removeItem(CONVERSATION_ID_INFO)
|
||||
localStorage.removeItem(CONVERSATION_ID_INFO_KEY)
|
||||
mockStoreState.appInfo = {
|
||||
app_id: 'app-1',
|
||||
custom_config: null,
|
||||
@ -131,7 +132,7 @@ describe('useEmbeddedChatbot', () => {
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
localStorage.removeItem(CONVERSATION_ID_INFO)
|
||||
localStorage.removeItem(CONVERSATION_ID_INFO_KEY)
|
||||
})
|
||||
|
||||
// Scenario: share query results populate conversation lists and trigger chat list fetch.
|
||||
@ -251,7 +252,7 @@ describe('useEmbeddedChatbot', () => {
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
const storedValue = localStorage.getItem(CONVERSATION_ID_INFO)
|
||||
const storedValue = localStorage.getItem(CONVERSATION_ID_INFO_KEY)
|
||||
const parsed = storedValue ? JSON.parse(storedValue) : {}
|
||||
const storedUserId = parsed['app-1']?.['embedded-user-1']
|
||||
const storedDefaultId = parsed['app-1']?.DEFAULT
|
||||
|
||||
@ -105,7 +105,7 @@ describe('PlanComp', () => {
|
||||
|
||||
await waitFor(() => expect(mutateAsyncMock).toHaveBeenCalled())
|
||||
await waitFor(() => expect(push).toHaveBeenCalledWith('/education-apply?token=token'))
|
||||
expect(localStorage.removeItem).toHaveBeenCalledWith(EDUCATION_VERIFYING_LOCALSTORAGE_ITEM)
|
||||
expect(localStorage.removeItem).toHaveBeenCalledWith(`v1:${EDUCATION_VERIFYING_LOCALSTORAGE_ITEM}`)
|
||||
})
|
||||
|
||||
it('shows modal when education verify fails', async () => {
|
||||
|
||||
@ -14,12 +14,13 @@ import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import { ApiAggregate, TriggerAll } from '@/app/components/base/icons/src/vender/workflow'
|
||||
import UsageInfo from '@/app/components/billing/usage-info'
|
||||
import { EDUCATION_VERIFYING_LOCALSTORAGE_ITEM } from '@/app/education-apply/constants'
|
||||
import VerifyStateModal from '@/app/education-apply/verify-state-modal'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useModalContextSelector } from '@/context/modal-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { useEducationVerify } from '@/service/use-education'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { getDaysUntilEndOfMonth } from '@/utils/time'
|
||||
import { Loading } from '../../base/icons/src/public/thought'
|
||||
import { NUM_INFINITE } from '../config'
|
||||
@ -72,7 +73,7 @@ const PlanComp: FC<Props> = ({
|
||||
if (isPending)
|
||||
return
|
||||
mutateAsync().then((res) => {
|
||||
localStorage.removeItem(EDUCATION_VERIFYING_LOCALSTORAGE_ITEM)
|
||||
storage.remove(STORAGE_KEYS.EDUCATION.VERIFYING)
|
||||
if (unmountedRef.current)
|
||||
return
|
||||
router.push(`/education-apply?token=${res.token}`)
|
||||
|
||||
@ -4,8 +4,9 @@ import { useBoolean } from 'ahooks'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useBuiltInMetaDataFields, useCreateMetaData, useDatasetMetaData, useDeleteMetaData, useRenameMeta, useUpdateBuiltInStatus } from '@/service/knowledge/use-metadata'
|
||||
import { isShowManageMetadataLocalStorageKey } from '../types'
|
||||
import { storage } from '@/utils/storage'
|
||||
import useCheckMetadataName from './use-check-metadata-name'
|
||||
|
||||
const useEditDatasetMetadata = ({
|
||||
@ -24,10 +25,10 @@ const useEditDatasetMetadata = ({
|
||||
}] = useBoolean(false)
|
||||
|
||||
useEffect(() => {
|
||||
const isShowManageMetadata = localStorage.getItem(isShowManageMetadataLocalStorageKey)
|
||||
const isShowManageMetadata = storage.get<string>(STORAGE_KEYS.UI.SHOW_MANAGE_METADATA)
|
||||
if (isShowManageMetadata) {
|
||||
showEditModal()
|
||||
localStorage.removeItem(isShowManageMetadataLocalStorageKey)
|
||||
storage.remove(STORAGE_KEYS.UI.SHOW_MANAGE_METADATA)
|
||||
}
|
||||
}, [])
|
||||
|
||||
|
||||
@ -7,12 +7,14 @@ import * as React from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import useTimestamp from '@/hooks/use-timestamp'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import AddMetadataButton from '../add-metadata-button'
|
||||
import InputCombined from '../edit-metadata-batch/input-combined'
|
||||
import SelectMetadataModal from '../metadata-dataset/select-metadata-modal'
|
||||
import { DataType, isShowManageMetadataLocalStorageKey } from '../types'
|
||||
import { DataType } from '../types'
|
||||
import Field from './field'
|
||||
|
||||
type Props = {
|
||||
@ -53,7 +55,7 @@ const InfoGroup: FC<Props> = ({
|
||||
const { formatTime: formatTimestamp } = useTimestamp()
|
||||
|
||||
const handleMangeMetadata = () => {
|
||||
localStorage.setItem(isShowManageMetadataLocalStorageKey, 'true')
|
||||
storage.set(STORAGE_KEYS.UI.SHOW_MANAGE_METADATA, 'true')
|
||||
router.push(`/datasets/${dataSetId}/documents`)
|
||||
}
|
||||
|
||||
|
||||
@ -23,6 +23,7 @@ import PremiumBadge from '@/app/components/base/premium-badge'
|
||||
import ThemeSwitcher from '@/app/components/base/theme-switcher'
|
||||
import { ACCOUNT_SETTING_TAB } from '@/app/components/header/account-setting/constants'
|
||||
import { IS_CLOUD_EDITION } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
@ -30,6 +31,7 @@ import { useModalContext } from '@/context/modal-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { useLogout } from '@/service/use-common'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import AccountAbout from '../account-about'
|
||||
import GithubStar from '../github-star'
|
||||
import Indicator from '../indicator'
|
||||
@ -55,13 +57,13 @@ export default function AppSelector() {
|
||||
const handleLogout = async () => {
|
||||
await logout()
|
||||
resetUser()
|
||||
localStorage.removeItem('setup_status')
|
||||
storage.remove(STORAGE_KEYS.CONFIG.SETUP_STATUS)
|
||||
// Tokens are now stored in cookies and cleared by backend
|
||||
|
||||
// To avoid use other account's education notice info
|
||||
localStorage.removeItem('education-reverify-prev-expire-at')
|
||||
localStorage.removeItem('education-reverify-has-noticed')
|
||||
localStorage.removeItem('education-expired-has-noticed')
|
||||
storage.remove(STORAGE_KEYS.EDUCATION.REVERIFY_PREV_EXPIRE_AT)
|
||||
storage.remove(STORAGE_KEYS.EDUCATION.REVERIFY_HAS_NOTICED)
|
||||
storage.remove(STORAGE_KEYS.EDUCATION.EXPIRED_HAS_NOTICED)
|
||||
|
||||
router.push('/signin')
|
||||
}
|
||||
|
||||
@ -2,8 +2,10 @@
|
||||
import { usePathname } from 'next/navigation'
|
||||
import * as React from 'react'
|
||||
import { useState } from 'react'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import s from './index.module.css'
|
||||
|
||||
type HeaderWrapperProps = {
|
||||
@ -18,7 +20,7 @@ const HeaderWrapper = ({
|
||||
// Check if the current path is a workflow canvas & fullscreen
|
||||
const inWorkflowCanvas = pathname.endsWith('/workflow')
|
||||
const isPipelineCanvas = pathname.endsWith('/pipeline')
|
||||
const workflowCanvasMaximize = localStorage.getItem('workflow-canvas-maximize') === 'true'
|
||||
const workflowCanvasMaximize = storage.getBoolean(STORAGE_KEYS.WORKFLOW.CANVAS_MAXIMIZE, false)
|
||||
const [hideHeader, setHideHeader] = useState(workflowCanvasMaximize)
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
|
||||
|
||||
@ -1,18 +1,20 @@
|
||||
import { useState } from 'react'
|
||||
import { X } from '@/app/components/base/icons/src/vender/line/general'
|
||||
import { useLanguage } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { NOTICE_I18N } from '@/i18n-config/language'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
const MaintenanceNotice = () => {
|
||||
const locale = useLanguage()
|
||||
|
||||
const [showNotice, setShowNotice] = useState(() => localStorage.getItem('hide-maintenance-notice') !== '1')
|
||||
const [showNotice, setShowNotice] = useState(() => storage.get<string>(STORAGE_KEYS.UI.HIDE_MAINTENANCE_NOTICE) !== '1')
|
||||
const handleJumpNotice = () => {
|
||||
window.open(NOTICE_I18N.href, '_blank')
|
||||
}
|
||||
|
||||
const handleCloseNotice = () => {
|
||||
localStorage.setItem('hide-maintenance-notice', '1')
|
||||
storage.set(STORAGE_KEYS.UI.HIDE_MAINTENANCE_NOTICE, '1')
|
||||
setShowNotice(false)
|
||||
}
|
||||
|
||||
|
||||
@ -1,42 +1,3 @@
|
||||
'use client'
|
||||
|
||||
import { SerwistProvider } from '@serwist/turbopack/react'
|
||||
import { useEffect } from 'react'
|
||||
import { IS_DEV } from '@/config'
|
||||
import { isClient } from '@/utils/client'
|
||||
|
||||
export function PWAProvider({ children }: { children: React.ReactNode }) {
|
||||
if (IS_DEV) {
|
||||
return <DisabledPWAProvider>{children}</DisabledPWAProvider>
|
||||
}
|
||||
|
||||
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
|
||||
const swUrl = `${basePath}/serwist/sw.js`
|
||||
|
||||
return (
|
||||
<SerwistProvider swUrl={swUrl}>
|
||||
{children}
|
||||
</SerwistProvider>
|
||||
)
|
||||
}
|
||||
|
||||
function DisabledPWAProvider({ children }: { children: React.ReactNode }) {
|
||||
useEffect(() => {
|
||||
if (isClient && 'serviceWorker' in navigator) {
|
||||
navigator.serviceWorker.getRegistrations()
|
||||
.then((registrations) => {
|
||||
registrations.forEach((registration) => {
|
||||
registration.unregister()
|
||||
.catch((error) => {
|
||||
console.error('Error unregistering service worker:', error)
|
||||
})
|
||||
})
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Error unregistering service workers:', error)
|
||||
})
|
||||
}
|
||||
}, [])
|
||||
|
||||
return <>{children}</>
|
||||
}
|
||||
export { SerwistProvider } from '@serwist/turbopack/react'
|
||||
|
||||
@ -2,6 +2,8 @@
|
||||
import { useCountDown } from 'ahooks'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
export const COUNT_DOWN_TIME_MS = 59000
|
||||
export const COUNT_DOWN_KEY = 'leftTime'
|
||||
@ -12,23 +14,23 @@ type CountdownProps = {
|
||||
|
||||
export default function Countdown({ onResend }: CountdownProps) {
|
||||
const { t } = useTranslation()
|
||||
const [leftTime, setLeftTime] = useState(() => Number(localStorage.getItem(COUNT_DOWN_KEY) || COUNT_DOWN_TIME_MS))
|
||||
const [leftTime, setLeftTime] = useState(() => storage.getNumber(STORAGE_KEYS.UI.COUNTDOWN_LEFT_TIME, COUNT_DOWN_TIME_MS))
|
||||
const [time] = useCountDown({
|
||||
leftTime,
|
||||
onEnd: () => {
|
||||
setLeftTime(0)
|
||||
localStorage.removeItem(COUNT_DOWN_KEY)
|
||||
storage.remove(STORAGE_KEYS.UI.COUNTDOWN_LEFT_TIME)
|
||||
},
|
||||
})
|
||||
|
||||
const resend = async function () {
|
||||
setLeftTime(COUNT_DOWN_TIME_MS)
|
||||
localStorage.setItem(COUNT_DOWN_KEY, `${COUNT_DOWN_TIME_MS}`)
|
||||
storage.set(STORAGE_KEYS.UI.COUNTDOWN_LEFT_TIME, COUNT_DOWN_TIME_MS)
|
||||
onResend?.()
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
localStorage.setItem(COUNT_DOWN_KEY, `${time}`)
|
||||
storage.set(STORAGE_KEYS.UI.COUNTDOWN_LEFT_TIME, time)
|
||||
}, [time])
|
||||
|
||||
return (
|
||||
|
||||
@ -12,9 +12,11 @@ import Loading from '@/app/components/base/loading'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import InstallFromMarketplace from '@/app/components/plugins/install-plugin/install-from-marketplace'
|
||||
import Action from '@/app/components/workflow/block-selector/market-place-plugin/action'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useGetLanguage } from '@/context/i18n'
|
||||
import { isServer } from '@/utils/client'
|
||||
import { formatNumber } from '@/utils/format'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { getMarketplaceUrl } from '@/utils/var'
|
||||
import BlockIcon from '../block-icon'
|
||||
import { BlockEnum } from '../types'
|
||||
@ -34,8 +36,6 @@ type FeaturedToolsProps = {
|
||||
onInstallSuccess?: () => void
|
||||
}
|
||||
|
||||
const STORAGE_KEY = 'workflow_tools_featured_collapsed'
|
||||
|
||||
const FeaturedTools = ({
|
||||
plugins,
|
||||
providerMap,
|
||||
@ -50,14 +50,14 @@ const FeaturedTools = ({
|
||||
const [isCollapsed, setIsCollapsed] = useState<boolean>(() => {
|
||||
if (isServer)
|
||||
return false
|
||||
const stored = window.localStorage.getItem(STORAGE_KEY)
|
||||
const stored = storage.get<string>(STORAGE_KEYS.WORKFLOW.TOOLS_FEATURED_COLLAPSED)
|
||||
return stored === 'true'
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
if (isServer)
|
||||
return
|
||||
const stored = window.localStorage.getItem(STORAGE_KEY)
|
||||
const stored = storage.get<string>(STORAGE_KEYS.WORKFLOW.TOOLS_FEATURED_COLLAPSED)
|
||||
if (stored !== null)
|
||||
setIsCollapsed(stored === 'true')
|
||||
}, [])
|
||||
@ -65,7 +65,7 @@ const FeaturedTools = ({
|
||||
useEffect(() => {
|
||||
if (isServer)
|
||||
return
|
||||
window.localStorage.setItem(STORAGE_KEY, String(isCollapsed))
|
||||
storage.set(STORAGE_KEYS.WORKFLOW.TOOLS_FEATURED_COLLAPSED, String(isCollapsed))
|
||||
}, [isCollapsed])
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@ -11,9 +11,11 @@ import Loading from '@/app/components/base/loading'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import InstallFromMarketplace from '@/app/components/plugins/install-plugin/install-from-marketplace'
|
||||
import Action from '@/app/components/workflow/block-selector/market-place-plugin/action'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useGetLanguage } from '@/context/i18n'
|
||||
import { isServer } from '@/utils/client'
|
||||
import { formatNumber } from '@/utils/format'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { getMarketplaceUrl } from '@/utils/var'
|
||||
import BlockIcon from '../block-icon'
|
||||
import { BlockEnum } from '../types'
|
||||
@ -30,8 +32,6 @@ type FeaturedTriggersProps = {
|
||||
onInstallSuccess?: () => void | Promise<void>
|
||||
}
|
||||
|
||||
const STORAGE_KEY = 'workflow_triggers_featured_collapsed'
|
||||
|
||||
const FeaturedTriggers = ({
|
||||
plugins,
|
||||
providerMap,
|
||||
@ -45,14 +45,14 @@ const FeaturedTriggers = ({
|
||||
const [isCollapsed, setIsCollapsed] = useState<boolean>(() => {
|
||||
if (isServer)
|
||||
return false
|
||||
const stored = window.localStorage.getItem(STORAGE_KEY)
|
||||
const stored = storage.get<string>(STORAGE_KEYS.WORKFLOW.TRIGGERS_FEATURED_COLLAPSED)
|
||||
return stored === 'true'
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
if (isServer)
|
||||
return
|
||||
const stored = window.localStorage.getItem(STORAGE_KEY)
|
||||
const stored = storage.get<string>(STORAGE_KEYS.WORKFLOW.TRIGGERS_FEATURED_COLLAPSED)
|
||||
if (stored !== null)
|
||||
setIsCollapsed(stored === 'true')
|
||||
}, [])
|
||||
@ -60,7 +60,7 @@ const FeaturedTriggers = ({
|
||||
useEffect(() => {
|
||||
if (isServer)
|
||||
return
|
||||
window.localStorage.setItem(STORAGE_KEY, String(isCollapsed))
|
||||
storage.set(STORAGE_KEYS.WORKFLOW.TRIGGERS_FEATURED_COLLAPSED, String(isCollapsed))
|
||||
}, [isCollapsed])
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@ -10,8 +10,10 @@ import { Trans, useTranslation } from 'react-i18next'
|
||||
import { ArrowDownRoundFill } from '@/app/components/base/icons/src/vender/solid/arrows'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import { getFormattedPlugin } from '@/app/components/plugins/marketplace/utils'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useRAGRecommendedPlugins } from '@/service/use-tools'
|
||||
import { isServer } from '@/utils/client'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { getMarketplaceUrl } from '@/utils/var'
|
||||
import List from './list'
|
||||
|
||||
@ -21,8 +23,6 @@ type RAGToolRecommendationsProps = {
|
||||
onTagsChange: Dispatch<SetStateAction<string[]>>
|
||||
}
|
||||
|
||||
const STORAGE_KEY = 'workflow_rag_recommendations_collapsed'
|
||||
|
||||
const RAGToolRecommendations = ({
|
||||
viewType,
|
||||
onSelect,
|
||||
@ -32,14 +32,14 @@ const RAGToolRecommendations = ({
|
||||
const [isCollapsed, setIsCollapsed] = useState<boolean>(() => {
|
||||
if (isServer)
|
||||
return false
|
||||
const stored = window.localStorage.getItem(STORAGE_KEY)
|
||||
const stored = storage.get<string>(STORAGE_KEYS.WORKFLOW.RAG_RECOMMENDATIONS_COLLAPSED)
|
||||
return stored === 'true'
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
if (isServer)
|
||||
return
|
||||
const stored = window.localStorage.getItem(STORAGE_KEY)
|
||||
const stored = storage.get<string>(STORAGE_KEYS.WORKFLOW.RAG_RECOMMENDATIONS_COLLAPSED)
|
||||
if (stored !== null)
|
||||
setIsCollapsed(stored === 'true')
|
||||
}, [])
|
||||
@ -47,7 +47,7 @@ const RAGToolRecommendations = ({
|
||||
useEffect(() => {
|
||||
if (isServer)
|
||||
return
|
||||
window.localStorage.setItem(STORAGE_KEY, String(isCollapsed))
|
||||
storage.set(STORAGE_KEYS.WORKFLOW.RAG_RECOMMENDATIONS_COLLAPSED, String(isCollapsed))
|
||||
}, [isCollapsed])
|
||||
|
||||
const {
|
||||
|
||||
@ -5,7 +5,9 @@ import {
|
||||
useCallback,
|
||||
} from 'react'
|
||||
import { useReactFlow, useStoreApi } from 'reactflow'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import { storage } from '@/utils/storage'
|
||||
import {
|
||||
CUSTOM_NODE,
|
||||
NODE_LAYOUT_HORIZONTAL_PADDING,
|
||||
@ -342,7 +344,7 @@ export const useWorkflowCanvasMaximize = () => {
|
||||
return
|
||||
|
||||
setMaximizeCanvas(!maximizeCanvas)
|
||||
localStorage.setItem('workflow-canvas-maximize', String(!maximizeCanvas))
|
||||
storage.set(STORAGE_KEYS.WORKFLOW.CANVAS_MAXIMIZE, !maximizeCanvas)
|
||||
eventEmitter?.emit({
|
||||
type: 'workflow-canvas-maximize',
|
||||
payload: !maximizeCanvas,
|
||||
|
||||
@ -26,12 +26,12 @@ const Wrap = ({
|
||||
isExpand,
|
||||
children,
|
||||
}: Props) => {
|
||||
const panelWidth = useStore(state => state.panelWidth)
|
||||
const nodePanelWidth = useStore(state => state.nodePanelWidth)
|
||||
const wrapStyle = (() => {
|
||||
if (isExpand) {
|
||||
return {
|
||||
...style,
|
||||
width: panelWidth - 1,
|
||||
width: nodePanelWidth - 1,
|
||||
}
|
||||
}
|
||||
return style
|
||||
|
||||
@ -44,19 +44,11 @@ const NextStep = ({
|
||||
const connectedEdges = getConnectedEdges([selectedNode] as Node[], edges).filter(edge => edge.source === selectedNode!.id)
|
||||
|
||||
const list = useMemo(() => {
|
||||
const resolveNextNodes = (connected: typeof connectedEdges) => {
|
||||
return connected.reduce<Node[]>((acc, edge) => {
|
||||
const nextNode = outgoers.find(outgoer => outgoer.id === edge.target)
|
||||
if (nextNode)
|
||||
acc.push(nextNode)
|
||||
return acc
|
||||
}, [])
|
||||
}
|
||||
let items = []
|
||||
if (branches?.length) {
|
||||
items = branches.map((branch, index) => {
|
||||
const connected = connectedEdges.filter(edge => edge.sourceHandle === branch.id)
|
||||
const nextNodes = resolveNextNodes(connected)
|
||||
const nextNodes = connected.map(edge => outgoers.find(outgoer => outgoer.id === edge.target)!)
|
||||
|
||||
return {
|
||||
branch: {
|
||||
@ -69,7 +61,7 @@ const NextStep = ({
|
||||
}
|
||||
else {
|
||||
const connected = connectedEdges.filter(edge => edge.sourceHandle === 'source')
|
||||
const nextNodes = resolveNextNodes(connected)
|
||||
const nextNodes = connected.map(edge => outgoers.find(outgoer => outgoer.id === edge.target)!)
|
||||
|
||||
items = [{
|
||||
branch: {
|
||||
@ -81,7 +73,7 @@ const NextStep = ({
|
||||
|
||||
if (data.error_strategy === ErrorHandleTypeEnum.failBranch && hasErrorHandleNode(data.type)) {
|
||||
const connected = connectedEdges.filter(edge => edge.sourceHandle === ErrorHandleTypeEnum.failBranch)
|
||||
const nextNodes = resolveNextNodes(connected)
|
||||
const nextNodes = connected.map(edge => outgoers.find(outgoer => outgoer.id === edge.target)!)
|
||||
|
||||
items.push({
|
||||
branch: {
|
||||
|
||||
@ -26,7 +26,7 @@ const createPanelWidthManager = (storageKey: string) => {
|
||||
|
||||
describe('Workflow Panel Width Persistence', () => {
|
||||
describe('Node Panel Width Management', () => {
|
||||
const storageKey = 'workflow-node-panel-width'
|
||||
const storageKey = 'v1:workflow-node-panel-width'
|
||||
|
||||
it('should save user resize to localStorage', () => {
|
||||
const manager = createPanelWidthManager(storageKey)
|
||||
@ -74,7 +74,7 @@ describe('Workflow Panel Width Persistence', () => {
|
||||
|
||||
describe('Bug Scenario Reproduction', () => {
|
||||
it('should reproduce original bug behavior (for comparison)', () => {
|
||||
const storageKey = 'workflow-node-panel-width'
|
||||
const storageKey = 'v1:workflow-node-panel-width'
|
||||
|
||||
// Original buggy behavior - always saves regardless of source
|
||||
const buggyUpdate = (width: number) => {
|
||||
@ -89,7 +89,7 @@ describe('Workflow Panel Width Persistence', () => {
|
||||
})
|
||||
|
||||
it('should verify fix prevents localStorage pollution', () => {
|
||||
const storageKey = 'workflow-node-panel-width'
|
||||
const storageKey = 'v1:workflow-node-panel-width'
|
||||
const manager = createPanelWidthManager(storageKey)
|
||||
|
||||
localStorage.setItem(storageKey, '500') // User preference
|
||||
@ -101,7 +101,7 @@ describe('Workflow Panel Width Persistence', () => {
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle multiple rapid operations correctly', () => {
|
||||
const manager = createPanelWidthManager('workflow-node-panel-width')
|
||||
const manager = createPanelWidthManager('v1:workflow-node-panel-width')
|
||||
|
||||
// Rapid system adjustments
|
||||
manager.updateWidth(300, 'system')
|
||||
@ -112,12 +112,12 @@ describe('Workflow Panel Width Persistence', () => {
|
||||
manager.updateWidth(550, 'user')
|
||||
|
||||
expect(localStorage.setItem).toHaveBeenCalledTimes(1)
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith('workflow-node-panel-width', '550')
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith('v1:workflow-node-panel-width', '550')
|
||||
})
|
||||
|
||||
it('should handle corrupted localStorage gracefully', () => {
|
||||
localStorage.setItem('workflow-node-panel-width', '150') // Below minimum
|
||||
const manager = createPanelWidthManager('workflow-node-panel-width')
|
||||
localStorage.setItem('v1:workflow-node-panel-width', '150') // Below minimum
|
||||
const manager = createPanelWidthManager('v1:workflow-node-panel-width')
|
||||
|
||||
const storedWidth = manager.getStoredWidth()
|
||||
expect(storedWidth).toBe(150) // Returns raw value
|
||||
@ -125,13 +125,13 @@ describe('Workflow Panel Width Persistence', () => {
|
||||
// User can correct the preference
|
||||
const correctedWidth = manager.updateWidth(500, 'user')
|
||||
expect(correctedWidth).toBe(500)
|
||||
expect(localStorage.getItem('workflow-node-panel-width')).toBe('500')
|
||||
expect(localStorage.getItem('v1:workflow-node-panel-width')).toBe('500')
|
||||
})
|
||||
})
|
||||
|
||||
describe('TypeScript Type Safety', () => {
|
||||
it('should enforce source parameter type', () => {
|
||||
const manager = createPanelWidthManager('workflow-node-panel-width')
|
||||
const manager = createPanelWidthManager('v1:workflow-node-panel-width')
|
||||
|
||||
// Valid source values
|
||||
manager.updateWidth(500, 'user')
|
||||
|
||||
@ -59,12 +59,14 @@ import {
|
||||
hasRetryNode,
|
||||
isSupportCustomRunForm,
|
||||
} from '@/app/components/workflow/utils'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useModalContext } from '@/context/modal-context'
|
||||
import { useAllBuiltInTools } from '@/service/use-tools'
|
||||
import { useAllTriggerPlugins } from '@/service/use-triggers'
|
||||
import { FlowType } from '@/types/common'
|
||||
import { canFindTool } from '@/utils'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { useResizePanel } from '../../hooks/use-resize-panel'
|
||||
import BeforeRunForm from '../before-run-form'
|
||||
import PanelWrap from '../before-run-form/panel-wrap'
|
||||
@ -137,7 +139,7 @@ const BasePanel: FC<BasePanelProps> = ({
|
||||
const newValue = Math.max(400, Math.min(width, maxNodePanelWidth))
|
||||
|
||||
if (source === 'user')
|
||||
localStorage.setItem('workflow-node-panel-width', `${newValue}`)
|
||||
storage.set(STORAGE_KEYS.WORKFLOW.NODE_PANEL_WIDTH, newValue)
|
||||
|
||||
setNodePanelWidth(newValue)
|
||||
}, [maxNodePanelWidth, setNodePanelWidth])
|
||||
|
||||
@ -12,10 +12,12 @@ import {
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import useTheme from '@/hooks/use-theme'
|
||||
import { useGenerateStructuredOutputRules } from '@/service/use-common'
|
||||
import { ModelModeType, Theme } from '@/types/app'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { useMittContext } from '../visual-editor/context'
|
||||
import { useVisualEditorStore } from '../visual-editor/store'
|
||||
import { SchemaGeneratorDark, SchemaGeneratorLight } from './assets'
|
||||
@ -36,9 +38,7 @@ const JsonSchemaGenerator: FC<JsonSchemaGeneratorProps> = ({
|
||||
onApply,
|
||||
crossAxisOffset,
|
||||
}) => {
|
||||
const localModel = localStorage.getItem('auto-gen-model')
|
||||
? JSON.parse(localStorage.getItem('auto-gen-model') as string) as Model
|
||||
: null
|
||||
const localModel = storage.get<Model>(STORAGE_KEYS.CONFIG.AUTO_GEN_MODEL)
|
||||
const [open, setOpen] = useState(false)
|
||||
const [view, setView] = useState(GeneratorView.promptEditor)
|
||||
const [model, setModel] = useState<Model>(localModel || {
|
||||
@ -60,9 +60,7 @@ const JsonSchemaGenerator: FC<JsonSchemaGeneratorProps> = ({
|
||||
|
||||
useEffect(() => {
|
||||
if (defaultModel) {
|
||||
const localModel = localStorage.getItem('auto-gen-model')
|
||||
? JSON.parse(localStorage.getItem('auto-gen-model') || '')
|
||||
: null
|
||||
const localModel = storage.get<Model>(STORAGE_KEYS.CONFIG.AUTO_GEN_MODEL)
|
||||
if (localModel) {
|
||||
setModel(localModel)
|
||||
}
|
||||
@ -95,7 +93,7 @@ const JsonSchemaGenerator: FC<JsonSchemaGeneratorProps> = ({
|
||||
mode: newValue.mode as ModelModeType,
|
||||
}
|
||||
setModel(newModel)
|
||||
localStorage.setItem('auto-gen-model', JSON.stringify(newModel))
|
||||
storage.set(STORAGE_KEYS.CONFIG.AUTO_GEN_MODEL, newModel)
|
||||
}, [model, setModel])
|
||||
|
||||
const handleCompletionParamsChange = useCallback((newParams: FormValue) => {
|
||||
@ -104,7 +102,7 @@ const JsonSchemaGenerator: FC<JsonSchemaGeneratorProps> = ({
|
||||
completion_params: newParams as CompletionParams,
|
||||
}
|
||||
setModel(newModel)
|
||||
localStorage.setItem('auto-gen-model', JSON.stringify(newModel))
|
||||
storage.set(STORAGE_KEYS.CONFIG.AUTO_GEN_MODEL, newModel)
|
||||
}, [model, setModel])
|
||||
|
||||
const { mutateAsync: generateStructuredOutputRules, isPending: isGenerating } = useGenerateStructuredOutputRules()
|
||||
|
||||
@ -27,7 +27,7 @@ const createMockLocalStorage = () => {
|
||||
|
||||
// Preview panel width logic
|
||||
const createPreviewPanelManager = () => {
|
||||
const storageKey = 'debug-and-preview-panel-width'
|
||||
const storageKey = 'v1:debug-and-preview-panel-width'
|
||||
|
||||
return {
|
||||
updateWidth: (width: number, source: PanelWidthSource = 'user') => {
|
||||
@ -63,7 +63,7 @@ describe('Debug and Preview Panel Width Persistence', () => {
|
||||
const result = manager.updateWidth(450, 'user')
|
||||
|
||||
expect(result).toBe(450)
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith('debug-and-preview-panel-width', '450')
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith('v1:debug-and-preview-panel-width', '450')
|
||||
})
|
||||
|
||||
it('should not save system compression to localStorage', () => {
|
||||
@ -80,17 +80,17 @@ describe('Debug and Preview Panel Width Persistence', () => {
|
||||
|
||||
// Both user and system operations should behave consistently
|
||||
manager.updateWidth(500, 'user')
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith('debug-and-preview-panel-width', '500')
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith('v1:debug-and-preview-panel-width', '500')
|
||||
|
||||
manager.updateWidth(200, 'system')
|
||||
expect(localStorage.getItem('debug-and-preview-panel-width')).toBe('500')
|
||||
expect(localStorage.getItem('v1:debug-and-preview-panel-width')).toBe('500')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Dual Panel Scenario', () => {
|
||||
it('should maintain independence from Node Panel', () => {
|
||||
localStorage.setItem('workflow-node-panel-width', '600')
|
||||
localStorage.setItem('debug-and-preview-panel-width', '450')
|
||||
localStorage.setItem('v1:workflow-node-panel-width', '600')
|
||||
localStorage.setItem('v1:debug-and-preview-panel-width', '450')
|
||||
|
||||
const manager = createPreviewPanelManager()
|
||||
|
||||
@ -98,8 +98,8 @@ describe('Debug and Preview Panel Width Persistence', () => {
|
||||
manager.updateWidth(200, 'system')
|
||||
|
||||
// Only preview panel storage key should be unaffected
|
||||
expect(localStorage.getItem('debug-and-preview-panel-width')).toBe('450')
|
||||
expect(localStorage.getItem('workflow-node-panel-width')).toBe('600')
|
||||
expect(localStorage.getItem('v1:debug-and-preview-panel-width')).toBe('450')
|
||||
expect(localStorage.getItem('v1:workflow-node-panel-width')).toBe('600')
|
||||
})
|
||||
|
||||
it('should handle F12 scenario consistently', () => {
|
||||
@ -107,13 +107,13 @@ describe('Debug and Preview Panel Width Persistence', () => {
|
||||
|
||||
// User sets preference
|
||||
manager.updateWidth(500, 'user')
|
||||
expect(localStorage.getItem('debug-and-preview-panel-width')).toBe('500')
|
||||
expect(localStorage.getItem('v1:debug-and-preview-panel-width')).toBe('500')
|
||||
|
||||
// F12 opens causing viewport compression
|
||||
manager.updateWidth(180, 'system')
|
||||
|
||||
// User preference preserved
|
||||
expect(localStorage.getItem('debug-and-preview-panel-width')).toBe('500')
|
||||
expect(localStorage.getItem('v1:debug-and-preview-panel-width')).toBe('500')
|
||||
})
|
||||
})
|
||||
|
||||
@ -124,7 +124,7 @@ describe('Debug and Preview Panel Width Persistence', () => {
|
||||
// Same 400px minimum as Node Panel
|
||||
const result = manager.updateWidth(300, 'user')
|
||||
expect(result).toBe(400)
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith('debug-and-preview-panel-width', '400')
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith('v1:debug-and-preview-panel-width', '400')
|
||||
})
|
||||
|
||||
it('should use same source parameter pattern', () => {
|
||||
@ -132,7 +132,7 @@ describe('Debug and Preview Panel Width Persistence', () => {
|
||||
|
||||
// Default to 'user' when source not specified
|
||||
manager.updateWidth(500)
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith('debug-and-preview-panel-width', '500')
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith('v1:debug-and-preview-panel-width', '500')
|
||||
|
||||
// Explicit 'system' source
|
||||
manager.updateWidth(300, 'system')
|
||||
|
||||
@ -18,7 +18,9 @@ import Tooltip from '@/app/components/base/tooltip'
|
||||
import { useEdgesInteractionsWithoutSync } from '@/app/components/workflow/hooks/use-edges-interactions-without-sync'
|
||||
import { useNodesInteractionsWithoutSync } from '@/app/components/workflow/hooks/use-nodes-interactions-without-sync'
|
||||
import { useStore } from '@/app/components/workflow/store'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import {
|
||||
useWorkflowInteractions,
|
||||
} from '../../hooks'
|
||||
@ -56,7 +58,7 @@ const DebugAndPreview = () => {
|
||||
const setPanelWidth = useStore(s => s.setPreviewPanelWidth)
|
||||
const handleResize = useCallback((width: number, source: 'user' | 'system' = 'user') => {
|
||||
if (source === 'user')
|
||||
localStorage.setItem('debug-and-preview-panel-width', `${width}`)
|
||||
storage.set(STORAGE_KEYS.WORKFLOW.PREVIEW_PANEL_WIDTH, width)
|
||||
setPanelWidth(width)
|
||||
}, [setPanelWidth])
|
||||
const maxPanelWidth = useMemo(() => {
|
||||
|
||||
@ -1,11 +1,12 @@
|
||||
import type { StateCreator } from 'zustand'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
export type LayoutSliceShape = {
|
||||
workflowCanvasWidth?: number
|
||||
workflowCanvasHeight?: number
|
||||
setWorkflowCanvasWidth: (width: number) => void
|
||||
setWorkflowCanvasHeight: (height: number) => void
|
||||
// rightPanelWidth - otherPanelWidth = nodePanelWidth
|
||||
rightPanelWidth?: number
|
||||
setRightPanelWidth: (width: number) => void
|
||||
nodePanelWidth: number
|
||||
@ -14,11 +15,11 @@ export type LayoutSliceShape = {
|
||||
setPreviewPanelWidth: (width: number) => void
|
||||
otherPanelWidth: number
|
||||
setOtherPanelWidth: (width: number) => void
|
||||
bottomPanelWidth: number // min-width = 400px; default-width = auto || 480px;
|
||||
bottomPanelWidth: number
|
||||
setBottomPanelWidth: (width: number) => void
|
||||
bottomPanelHeight: number
|
||||
setBottomPanelHeight: (height: number) => void
|
||||
variableInspectPanelHeight: number // min-height = 120px; default-height = 320px;
|
||||
variableInspectPanelHeight: number
|
||||
setVariableInspectPanelHeight: (height: number) => void
|
||||
maximizeCanvas: boolean
|
||||
setMaximizeCanvas: (maximize: boolean) => void
|
||||
@ -31,9 +32,9 @@ export const createLayoutSlice: StateCreator<LayoutSliceShape> = set => ({
|
||||
setWorkflowCanvasHeight: height => set(() => ({ workflowCanvasHeight: height })),
|
||||
rightPanelWidth: undefined,
|
||||
setRightPanelWidth: width => set(() => ({ rightPanelWidth: width })),
|
||||
nodePanelWidth: localStorage.getItem('workflow-node-panel-width') ? Number.parseFloat(localStorage.getItem('workflow-node-panel-width')!) : 400,
|
||||
nodePanelWidth: storage.getNumber(STORAGE_KEYS.WORKFLOW.NODE_PANEL_WIDTH, 420),
|
||||
setNodePanelWidth: width => set(() => ({ nodePanelWidth: width })),
|
||||
previewPanelWidth: localStorage.getItem('debug-and-preview-panel-width') ? Number.parseFloat(localStorage.getItem('debug-and-preview-panel-width')!) : 400,
|
||||
previewPanelWidth: storage.getNumber(STORAGE_KEYS.WORKFLOW.PREVIEW_PANEL_WIDTH, 400),
|
||||
setPreviewPanelWidth: width => set(() => ({ previewPanelWidth: width })),
|
||||
otherPanelWidth: 400,
|
||||
setOtherPanelWidth: width => set(() => ({ otherPanelWidth: width })),
|
||||
@ -41,8 +42,8 @@ export const createLayoutSlice: StateCreator<LayoutSliceShape> = set => ({
|
||||
setBottomPanelWidth: width => set(() => ({ bottomPanelWidth: width })),
|
||||
bottomPanelHeight: 324,
|
||||
setBottomPanelHeight: height => set(() => ({ bottomPanelHeight: height })),
|
||||
variableInspectPanelHeight: localStorage.getItem('workflow-variable-inpsect-panel-height') ? Number.parseFloat(localStorage.getItem('workflow-variable-inpsect-panel-height')!) : 320,
|
||||
variableInspectPanelHeight: storage.getNumber(STORAGE_KEYS.WORKFLOW.VARIABLE_INSPECT_PANEL_HEIGHT, 320),
|
||||
setVariableInspectPanelHeight: height => set(() => ({ variableInspectPanelHeight: height })),
|
||||
maximizeCanvas: localStorage.getItem('workflow-canvas-maximize') === 'true',
|
||||
maximizeCanvas: storage.getBoolean(STORAGE_KEYS.WORKFLOW.CANVAS_MAXIMIZE, false),
|
||||
setMaximizeCanvas: maximize => set(() => ({ maximizeCanvas: maximize })),
|
||||
})
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
import type { StateCreator } from 'zustand'
|
||||
|
||||
export type PanelSliceShape = {
|
||||
panelWidth: number
|
||||
showFeaturesPanel: boolean
|
||||
setShowFeaturesPanel: (showFeaturesPanel: boolean) => void
|
||||
showWorkflowVersionHistoryPanel: boolean
|
||||
@ -27,7 +26,6 @@ export type PanelSliceShape = {
|
||||
}
|
||||
|
||||
export const createPanelSlice: StateCreator<PanelSliceShape> = set => ({
|
||||
panelWidth: localStorage.getItem('workflow-node-panel-width') ? Number.parseFloat(localStorage.getItem('workflow-node-panel-width')!) : 420,
|
||||
showFeaturesPanel: false,
|
||||
setShowFeaturesPanel: showFeaturesPanel => set(() => ({ showFeaturesPanel })),
|
||||
showWorkflowVersionHistoryPanel: false,
|
||||
|
||||
@ -5,6 +5,8 @@ import type {
|
||||
WorkflowRunningData,
|
||||
} from '@/app/components/workflow/types'
|
||||
import type { FileUploadConfigResponse } from '@/models/common'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
type PreviewRunningData = WorkflowRunningData & {
|
||||
resultTabActive?: boolean
|
||||
@ -63,10 +65,10 @@ export const createWorkflowSlice: StateCreator<WorkflowSliceShape> = set => ({
|
||||
setSelection: selection => set(() => ({ selection })),
|
||||
bundleNodeSize: null,
|
||||
setBundleNodeSize: bundleNodeSize => set(() => ({ bundleNodeSize })),
|
||||
controlMode: localStorage.getItem('workflow-operation-mode') === 'pointer' ? 'pointer' : 'hand',
|
||||
controlMode: storage.get<'pointer' | 'hand'>(STORAGE_KEYS.WORKFLOW.OPERATION_MODE) === 'pointer' ? 'pointer' : 'hand',
|
||||
setControlMode: (controlMode) => {
|
||||
set(() => ({ controlMode }))
|
||||
localStorage.setItem('workflow-operation-mode', controlMode)
|
||||
storage.set(STORAGE_KEYS.WORKFLOW.OPERATION_MODE, controlMode)
|
||||
},
|
||||
mousePosition: { pageX: 0, pageY: 0, elementX: 0, elementY: 0 },
|
||||
setMousePosition: mousePosition => set(() => ({ mousePosition })),
|
||||
|
||||
@ -4,7 +4,9 @@ import {
|
||||
useCallback,
|
||||
useMemo,
|
||||
} from 'react'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { useResizePanel } from '../nodes/_base/hooks/use-resize-panel'
|
||||
import { useStore } from '../store'
|
||||
import Panel from './panel'
|
||||
@ -21,8 +23,8 @@ const VariableInspectPanel: FC = () => {
|
||||
return workflowCanvasHeight - 60
|
||||
}, [workflowCanvasHeight])
|
||||
|
||||
const handleResize = useCallback((width: number, height: number) => {
|
||||
localStorage.setItem('workflow-variable-inpsect-panel-height', `${height}`)
|
||||
const handleResize = useCallback((_width: number, height: number) => {
|
||||
storage.set(STORAGE_KEYS.WORKFLOW.VARIABLE_INSPECT_PANEL_HEIGHT, height)
|
||||
setVariableInspectPanelHeight(height)
|
||||
}, [setVariableInspectPanelHeight])
|
||||
|
||||
|
||||
@ -13,13 +13,14 @@ import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Checkbox from '@/app/components/base/checkbox'
|
||||
import { useToastContext } from '@/app/components/base/toast'
|
||||
import { EDUCATION_VERIFYING_LOCALSTORAGE_ITEM } from '@/app/education-apply/constants'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import {
|
||||
useEducationAdd,
|
||||
useInvalidateEducationStatus,
|
||||
} from '@/service/use-education'
|
||||
import { storage } from '@/utils/storage'
|
||||
import DifyLogo from '../components/base/logo/dify-logo'
|
||||
import RoleSelector from './role-selector'
|
||||
import SearchInput from './search-input'
|
||||
@ -47,7 +48,7 @@ const EducationApplyAge = () => {
|
||||
setShowModal(undefined)
|
||||
onPlanInfoChanged()
|
||||
updateEducationStatus()
|
||||
localStorage.removeItem(EDUCATION_VERIFYING_LOCALSTORAGE_ITEM)
|
||||
storage.remove(STORAGE_KEYS.EDUCATION.VERIFYING)
|
||||
router.replace('/')
|
||||
}
|
||||
|
||||
|
||||
@ -10,14 +10,15 @@ import {
|
||||
useState,
|
||||
} from 'react'
|
||||
import { ACCOUNT_SETTING_TAB } from '@/app/components/header/account-setting/constants'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useModalContextSelector } from '@/context/modal-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { useEducationAutocomplete, useEducationVerify } from '@/service/use-education'
|
||||
import { storage } from '@/utils/storage'
|
||||
import {
|
||||
EDUCATION_RE_VERIFY_ACTION,
|
||||
EDUCATION_VERIFY_URL_SEARCHPARAMS_ACTION,
|
||||
EDUCATION_VERIFYING_LOCALSTORAGE_ITEM,
|
||||
} from './constants'
|
||||
|
||||
dayjs.extend(utc)
|
||||
@ -133,7 +134,7 @@ const useEducationReverifyNotice = ({
|
||||
export const useEducationInit = () => {
|
||||
const setShowAccountSettingModal = useModalContextSelector(s => s.setShowAccountSettingModal)
|
||||
const setShowEducationExpireNoticeModal = useModalContextSelector(s => s.setShowEducationExpireNoticeModal)
|
||||
const educationVerifying = localStorage.getItem(EDUCATION_VERIFYING_LOCALSTORAGE_ITEM)
|
||||
const educationVerifying = storage.get<string>(STORAGE_KEYS.EDUCATION.VERIFYING)
|
||||
const searchParams = useSearchParams()
|
||||
const educationVerifyAction = searchParams.get('action')
|
||||
|
||||
@ -156,7 +157,7 @@ export const useEducationInit = () => {
|
||||
setShowAccountSettingModal({ payload: ACCOUNT_SETTING_TAB.BILLING })
|
||||
|
||||
if (educationVerifyAction === EDUCATION_VERIFY_URL_SEARCHPARAMS_ACTION)
|
||||
localStorage.setItem(EDUCATION_VERIFYING_LOCALSTORAGE_ITEM, 'yes')
|
||||
storage.set(STORAGE_KEYS.EDUCATION.VERIFYING, 'yes')
|
||||
}
|
||||
if (educationVerifyAction === EDUCATION_RE_VERIFY_ACTION)
|
||||
handleVerify()
|
||||
|
||||
@ -3,8 +3,10 @@ import { useTranslation } from 'react-i18next'
|
||||
import Avatar from '@/app/components/base/avatar'
|
||||
import Button from '@/app/components/base/button'
|
||||
import { Triangle } from '@/app/components/base/icons/src/public/education'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useLogout } from '@/service/use-common'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
const UserInfo = () => {
|
||||
const router = useRouter()
|
||||
@ -15,7 +17,7 @@ const UserInfo = () => {
|
||||
const handleLogout = async () => {
|
||||
await logout()
|
||||
|
||||
localStorage.removeItem('setup_status')
|
||||
storage.remove(STORAGE_KEYS.CONFIG.SETUP_STATUS)
|
||||
// Tokens are now stored in cookies and cleared by backend
|
||||
|
||||
router.push('/signin')
|
||||
|
||||
@ -158,7 +158,7 @@ describe('InstallForm', () => {
|
||||
render(<InstallForm />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith('setup_status', 'finished')
|
||||
expect(localStorage.setItem).toHaveBeenCalledWith('v1:setup_status', 'finished')
|
||||
expect(mockPush).toHaveBeenCalledWith('/signin')
|
||||
})
|
||||
})
|
||||
|
||||
@ -13,12 +13,14 @@ import { formContext, useAppForm } from '@/app/components/base/form'
|
||||
import { zodSubmitValidator } from '@/app/components/base/form/utils/zod-submit-validator'
|
||||
import Input from '@/app/components/base/input'
|
||||
import { validPassword } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
|
||||
import { LICENSE_LINK } from '@/constants/link'
|
||||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
import { fetchInitValidateStatus, fetchSetupStatus, login, setup } from '@/service/common'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { encryptPassword as encodePassword } from '@/utils/encryption'
|
||||
import { storage } from '@/utils/storage'
|
||||
import Loading from '../components/base/loading'
|
||||
|
||||
const accountFormSchema = z.object({
|
||||
@ -85,7 +87,7 @@ const InstallForm = () => {
|
||||
useEffect(() => {
|
||||
fetchSetupStatus().then((res: SetupStatusResponse) => {
|
||||
if (res.step === 'finished') {
|
||||
localStorage.setItem('setup_status', 'finished')
|
||||
storage.set(STORAGE_KEYS.CONFIG.SETUP_STATUS, 'finished')
|
||||
router.push('/signin')
|
||||
}
|
||||
else {
|
||||
|
||||
@ -12,7 +12,7 @@ import { ToastProvider } from './components/base/toast'
|
||||
import BrowserInitializer from './components/browser-initializer'
|
||||
import { ReactScanLoader } from './components/devtools/react-scan/loader'
|
||||
import { I18nServerProvider } from './components/provider/i18n-server'
|
||||
import { PWAProvider } from './components/provider/serwist'
|
||||
import { SerwistProvider } from './components/provider/serwist'
|
||||
import SentryInitializer from './components/sentry-initializer'
|
||||
import RoutePrefixHandle from './routePrefixHandle'
|
||||
import './styles/globals.css'
|
||||
@ -40,6 +40,9 @@ const LocaleLayout = async ({
|
||||
}) => {
|
||||
const locale = await getLocaleOnServer()
|
||||
|
||||
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
|
||||
const swUrl = `${basePath}/serwist/sw.js`
|
||||
|
||||
const datasetMap: Record<DatasetAttr, string | undefined> = {
|
||||
[DatasetAttr.DATA_API_PREFIX]: process.env.NEXT_PUBLIC_API_PREFIX,
|
||||
[DatasetAttr.DATA_PUBLIC_API_PREFIX]: process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX,
|
||||
@ -93,7 +96,7 @@ const LocaleLayout = async ({
|
||||
className="color-scheme h-full select-auto"
|
||||
{...datasetMap}
|
||||
>
|
||||
<PWAProvider>
|
||||
<SerwistProvider swUrl={swUrl}>
|
||||
<ReactScanLoader />
|
||||
<JotaiProvider>
|
||||
<ThemeProvider
|
||||
@ -121,7 +124,7 @@ const LocaleLayout = async ({
|
||||
</ThemeProvider>
|
||||
</JotaiProvider>
|
||||
<RoutePrefixHandle />
|
||||
</PWAProvider>
|
||||
</SerwistProvider>
|
||||
</body>
|
||||
</html>
|
||||
)
|
||||
|
||||
@ -9,10 +9,12 @@ import Button from '@/app/components/base/button'
|
||||
import Input from '@/app/components/base/input'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import { emailRegex } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useLocale } from '@/context/i18n'
|
||||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
import { sendResetPasswordCode } from '@/service/common'
|
||||
import { COUNT_DOWN_KEY, COUNT_DOWN_TIME_MS } from '../components/signin/countdown'
|
||||
import { storage } from '@/utils/storage'
|
||||
import { COUNT_DOWN_TIME_MS } from '../components/signin/countdown'
|
||||
|
||||
export default function CheckCode() {
|
||||
const { t } = useTranslation()
|
||||
@ -40,7 +42,7 @@ export default function CheckCode() {
|
||||
setIsLoading(true)
|
||||
const res = await sendResetPasswordCode(email, locale)
|
||||
if (res.result === 'success') {
|
||||
localStorage.setItem(COUNT_DOWN_KEY, `${COUNT_DOWN_TIME_MS}`)
|
||||
storage.set(STORAGE_KEYS.UI.COUNTDOWN_LEFT_TIME, COUNT_DOWN_TIME_MS)
|
||||
const params = new URLSearchParams(searchParams)
|
||||
params.set('token', encodeURIComponent(res.data))
|
||||
params.set('email', encodeURIComponent(email))
|
||||
|
||||
@ -5,10 +5,12 @@ import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Input from '@/app/components/base/input'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import { COUNT_DOWN_KEY, COUNT_DOWN_TIME_MS } from '@/app/components/signin/countdown'
|
||||
import { COUNT_DOWN_TIME_MS } from '@/app/components/signin/countdown'
|
||||
import { emailRegex } from '@/config'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useLocale } from '@/context/i18n'
|
||||
import { sendEMailLoginCode } from '@/service/common'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
type MailAndCodeAuthProps = {
|
||||
isInvite: boolean
|
||||
@ -40,7 +42,7 @@ export default function MailAndCodeAuth({ isInvite }: MailAndCodeAuthProps) {
|
||||
setIsLoading(true)
|
||||
const ret = await sendEMailLoginCode(email, locale)
|
||||
if (ret.result === 'success') {
|
||||
localStorage.setItem(COUNT_DOWN_KEY, `${COUNT_DOWN_TIME_MS}`)
|
||||
storage.set(STORAGE_KEYS.UI.COUNTDOWN_LEFT_TIME, COUNT_DOWN_TIME_MS)
|
||||
const params = new URLSearchParams(searchParams)
|
||||
params.set('email', encodeURIComponent(email))
|
||||
params.set('token', encodeURIComponent(ret.data))
|
||||
|
||||
@ -1,15 +1,17 @@
|
||||
import type { ReadonlyURLSearchParams } from 'next/navigation'
|
||||
import dayjs from 'dayjs'
|
||||
import { OAUTH_AUTHORIZE_PENDING_KEY, REDIRECT_URL_KEY } from '@/app/account/oauth/authorize/constants'
|
||||
import { REDIRECT_URL_KEY } from '@/app/account/oauth/authorize/constants'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
function getItemWithExpiry(key: string): string | null {
|
||||
const itemStr = localStorage.getItem(key)
|
||||
const itemStr = storage.get<string>(key)
|
||||
if (!itemStr)
|
||||
return null
|
||||
|
||||
try {
|
||||
const item = JSON.parse(itemStr)
|
||||
localStorage.removeItem(key)
|
||||
storage.remove(key)
|
||||
if (!item?.value)
|
||||
return null
|
||||
|
||||
@ -24,7 +26,7 @@ export const resolvePostLoginRedirect = (searchParams: ReadonlyURLSearchParams)
|
||||
const redirectUrl = searchParams.get(REDIRECT_URL_KEY)
|
||||
if (redirectUrl) {
|
||||
try {
|
||||
localStorage.removeItem(OAUTH_AUTHORIZE_PENDING_KEY)
|
||||
storage.remove(STORAGE_KEYS.AUTH.OAUTH_AUTHORIZE_PENDING)
|
||||
return decodeURIComponent(redirectUrl)
|
||||
}
|
||||
catch (e) {
|
||||
@ -33,5 +35,5 @@ export const resolvePostLoginRedirect = (searchParams: ReadonlyURLSearchParams)
|
||||
}
|
||||
}
|
||||
|
||||
return getItemWithExpiry(OAUTH_AUTHORIZE_PENDING_KEY)
|
||||
return getItemWithExpiry(STORAGE_KEYS.AUTH.OAUTH_AUTHORIZE_PENDING)
|
||||
}
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import type { ModelParameterRule } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { PromptRole } from '@/models/debug'
|
||||
import { PipelineInputVarType } from '@/models/pipeline'
|
||||
import { AgentStrategy } from '@/types/app'
|
||||
@ -179,7 +180,7 @@ export const CSRF_COOKIE_NAME = () => {
|
||||
return isSecure ? '__Host-csrf_token' : 'csrf_token'
|
||||
}
|
||||
export const CSRF_HEADER_NAME = 'X-CSRF-Token'
|
||||
export const ACCESS_TOKEN_LOCAL_STORAGE_NAME = 'access_token'
|
||||
export const ACCESS_TOKEN_LOCAL_STORAGE_NAME = STORAGE_KEYS.AUTH.ACCESS_TOKEN
|
||||
export const PASSPORT_LOCAL_STORAGE_NAME = (appCode: string) => `passport-${appCode}`
|
||||
export const PASSPORT_HEADER_NAME = 'X-App-Passport'
|
||||
|
||||
@ -229,7 +230,7 @@ export const VAR_ITEM_TEMPLATE_IN_PIPELINE = {
|
||||
|
||||
export const appDefaultIconBackground = '#D5F5F6'
|
||||
|
||||
export const NEED_REFRESH_APP_LIST_KEY = 'needRefreshAppList'
|
||||
export const NEED_REFRESH_APP_LIST_KEY = STORAGE_KEYS.APP.NEED_REFRESH_LIST
|
||||
|
||||
export const DATASET_DEFAULT = {
|
||||
top_k: 4,
|
||||
|
||||
77
web/config/storage-keys.ts
Normal file
77
web/config/storage-keys.ts
Normal file
@ -0,0 +1,77 @@
|
||||
export const STORAGE_KEYS = {
|
||||
WORKFLOW: {
|
||||
NODE_PANEL_WIDTH: 'workflow-node-panel-width',
|
||||
PREVIEW_PANEL_WIDTH: 'debug-and-preview-panel-width',
|
||||
VARIABLE_INSPECT_PANEL_HEIGHT: 'workflow-variable-inspect-panel-height',
|
||||
CANVAS_MAXIMIZE: 'workflow-canvas-maximize',
|
||||
OPERATION_MODE: 'workflow-operation-mode',
|
||||
RAG_RECOMMENDATIONS_COLLAPSED: 'workflow_rag_recommendations_collapsed',
|
||||
TOOLS_FEATURED_COLLAPSED: 'workflow_tools_featured_collapsed',
|
||||
TRIGGERS_FEATURED_COLLAPSED: 'workflow_triggers_featured_collapsed',
|
||||
},
|
||||
APP: {
|
||||
SIDEBAR_COLLAPSE: 'webappSidebarCollapse',
|
||||
NEED_REFRESH_LIST: 'needRefreshAppList',
|
||||
DETAIL_COLLAPSE: 'app-detail-collapse-or-expand',
|
||||
},
|
||||
CONVERSATION: {
|
||||
ID_INFO: 'conversationIdInfo',
|
||||
},
|
||||
AUTH: {
|
||||
ACCESS_TOKEN: 'access_token',
|
||||
REFRESH_LOCK: 'is_other_tab_refreshing',
|
||||
LAST_REFRESH_TIME: 'last_refresh_time',
|
||||
OAUTH_AUTHORIZE_PENDING: 'oauth_authorize_pending',
|
||||
},
|
||||
EDUCATION: {
|
||||
VERIFYING: 'educationVerifying',
|
||||
REVERIFY_PREV_EXPIRE_AT: 'education-reverify-prev-expire-at',
|
||||
REVERIFY_HAS_NOTICED: 'education-reverify-has-noticed',
|
||||
EXPIRED_HAS_NOTICED: 'education-expired-has-noticed',
|
||||
},
|
||||
CONFIG: {
|
||||
AUTO_GEN_MODEL: 'auto-gen-model',
|
||||
DEBUG_MODELS: 'app-debug-with-single-or-multiple-models',
|
||||
SETUP_STATUS: 'setup_status',
|
||||
},
|
||||
UI: {
|
||||
THEME: 'theme',
|
||||
ANTHROPIC_QUOTA_NOTICE: 'anthropic_quota_notice',
|
||||
HIDE_MAINTENANCE_NOTICE: 'hide-maintenance-notice',
|
||||
COUNTDOWN_LEFT_TIME: 'leftTime',
|
||||
SHOW_MANAGE_METADATA: 'dify-isShowManageMetadata',
|
||||
},
|
||||
} as const
|
||||
|
||||
export type StorageKeys = typeof STORAGE_KEYS
|
||||
|
||||
export const LEGACY_KEY_MIGRATIONS: Array<{ old: string, new: string }> = [
|
||||
{ old: 'workflow-node-panel-width', new: 'workflow-node-panel-width' },
|
||||
{ old: 'debug-and-preview-panel-width', new: 'debug-and-preview-panel-width' },
|
||||
{ old: 'workflow-variable-inspect-panel-height', new: 'workflow-variable-inspect-panel-height' },
|
||||
{ old: 'workflow-canvas-maximize', new: 'workflow-canvas-maximize' },
|
||||
{ old: 'workflow-operation-mode', new: 'workflow-operation-mode' },
|
||||
{ old: 'workflow_rag_recommendations_collapsed', new: 'workflow_rag_recommendations_collapsed' },
|
||||
{ old: 'workflow_tools_featured_collapsed', new: 'workflow_tools_featured_collapsed' },
|
||||
{ old: 'workflow_triggers_featured_collapsed', new: 'workflow_triggers_featured_collapsed' },
|
||||
{ old: 'webappSidebarCollapse', new: 'webappSidebarCollapse' },
|
||||
{ old: 'needRefreshAppList', new: 'needRefreshAppList' },
|
||||
{ old: 'app-detail-collapse-or-expand', new: 'app-detail-collapse-or-expand' },
|
||||
{ old: 'conversationIdInfo', new: 'conversationIdInfo' },
|
||||
{ old: 'access_token', new: 'access_token' },
|
||||
{ old: 'is_other_tab_refreshing', new: 'is_other_tab_refreshing' },
|
||||
{ old: 'last_refresh_time', new: 'last_refresh_time' },
|
||||
{ old: 'oauth_authorize_pending', new: 'oauth_authorize_pending' },
|
||||
{ old: 'educationVerifying', new: 'educationVerifying' },
|
||||
{ old: 'education-reverify-prev-expire-at', new: 'education-reverify-prev-expire-at' },
|
||||
{ old: 'education-reverify-has-noticed', new: 'education-reverify-has-noticed' },
|
||||
{ old: 'education-expired-has-noticed', new: 'education-expired-has-noticed' },
|
||||
{ old: 'auto-gen-model', new: 'auto-gen-model' },
|
||||
{ old: 'app-debug-with-single-or-multiple-models', new: 'app-debug-with-single-or-multiple-models' },
|
||||
{ old: 'setup_status', new: 'setup_status' },
|
||||
{ old: 'theme', new: 'theme' },
|
||||
{ old: 'anthropic_quota_notice', new: 'anthropic_quota_notice' },
|
||||
{ old: 'hide-maintenance-notice', new: 'hide-maintenance-notice' },
|
||||
{ old: 'leftTime', new: 'leftTime' },
|
||||
{ old: 'dify-isShowManageMetadata', new: 'dify-isShowManageMetadata' },
|
||||
]
|
||||
@ -6,6 +6,7 @@ import { NUM_INFINITE } from '@/app/components/billing/config'
|
||||
import { Plan } from '@/app/components/billing/type'
|
||||
import { IS_CLOUD_EDITION } from '@/config'
|
||||
import { isServer } from '@/utils/client'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
export type TriggerEventsLimitModalPayload = {
|
||||
usage: number
|
||||
@ -80,15 +81,10 @@ export const useTriggerEventsLimitModal = ({
|
||||
if (dismissedTriggerEventsLimitStorageKeysRef.current[storageKey])
|
||||
return
|
||||
|
||||
let persistDismiss = true
|
||||
const persistDismiss = storage.isAvailable()
|
||||
let hasDismissed = false
|
||||
try {
|
||||
if (localStorage.getItem(storageKey) === '1')
|
||||
hasDismissed = true
|
||||
}
|
||||
catch {
|
||||
persistDismiss = false
|
||||
}
|
||||
if (storage.get<string>(storageKey) === '1')
|
||||
hasDismissed = true
|
||||
if (hasDismissed)
|
||||
return
|
||||
|
||||
@ -110,16 +106,9 @@ export const useTriggerEventsLimitModal = ({
|
||||
const storageKey = showTriggerEventsLimitModal?.payload.storageKey
|
||||
if (!storageKey)
|
||||
return
|
||||
if (showTriggerEventsLimitModal?.payload.persistDismiss) {
|
||||
try {
|
||||
localStorage.setItem(storageKey, '1')
|
||||
return
|
||||
}
|
||||
catch {
|
||||
// ignore error and fall back to in-memory guard
|
||||
}
|
||||
}
|
||||
dismissedTriggerEventsLimitStorageKeysRef.current[storageKey] = true
|
||||
if (showTriggerEventsLimitModal?.payload.persistDismiss)
|
||||
storage.set(storageKey, '1')
|
||||
}, [showTriggerEventsLimitModal])
|
||||
|
||||
return {
|
||||
|
||||
@ -130,7 +130,7 @@ describe('ModalContextProvider trigger events limit modal', () => {
|
||||
expect(setItemSpy.mock.calls.length).toBeGreaterThan(0)
|
||||
})
|
||||
const [key, value] = setItemSpy.mock.calls[0]
|
||||
expect(key).toContain('trigger-events-limit-dismissed-workspace-1-professional-3000-')
|
||||
expect(key).toContain('v1:trigger-events-limit-dismissed-workspace-1-professional-3000-')
|
||||
expect(value).toBe('1')
|
||||
})
|
||||
|
||||
|
||||
@ -30,15 +30,15 @@ import {
|
||||
DEFAULT_ACCOUNT_SETTING_TAB,
|
||||
isValidAccountSettingTab,
|
||||
} from '@/app/components/header/account-setting/constants'
|
||||
import {
|
||||
EDUCATION_VERIFYING_LOCALSTORAGE_ITEM,
|
||||
} from '@/app/education-apply/constants'
|
||||
|
||||
import { STORAGE_KEYS } from '@/config/storage-keys'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import {
|
||||
useAccountSettingModal,
|
||||
usePricingModal,
|
||||
} from '@/hooks/use-query-params'
|
||||
import { storage } from '@/utils/storage'
|
||||
|
||||
import {
|
||||
|
||||
@ -183,10 +183,10 @@ export const ModalContextProvider = ({
|
||||
|
||||
const [showAnnotationFullModal, setShowAnnotationFullModal] = useState(false)
|
||||
const handleCancelAccountSettingModal = () => {
|
||||
const educationVerifying = localStorage.getItem(EDUCATION_VERIFYING_LOCALSTORAGE_ITEM)
|
||||
const educationVerifying = storage.get<string>(STORAGE_KEYS.EDUCATION.VERIFYING)
|
||||
|
||||
if (educationVerifying === 'yes')
|
||||
localStorage.removeItem(EDUCATION_VERIFYING_LOCALSTORAGE_ITEM)
|
||||
storage.remove(STORAGE_KEYS.EDUCATION.VERIFYING)
|
||||
|
||||
accountSettingCallbacksRef.current?.onCancelCallback?.()
|
||||
accountSettingCallbacksRef.current = null
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user