Compare commits

..

18 Commits

Author SHA1 Message Date
e1016eaedb add command sql log args 2026-01-21 20:54:56 +08:00
e5656b2b5c add from_days_ago and to_days_ago 2026-01-21 14:16:45 +08:00
fe9575ada5 Merge branch 'archive-workflow-backend' of github.com:hjlarry/dify into archive-workflow-backend 2026-01-21 10:14:16 +08:00
198c39b97e rm unused property 2026-01-21 10:14:11 +08:00
b4f9f3d224 [autofix.ci] apply automated fixes 2026-01-21 01:41:05 +00:00
c89d8ba740 fix comment 2026-01-21 09:35:24 +08:00
adae82d073 _delete_node_executions use run_ids 2026-01-21 09:35:20 +08:00
6d23f8a071 use single run_id query nodeExecution 2026-01-21 09:30:28 +08:00
4864076b43 update migration file to support uuidv7 2026-01-21 09:19:24 +08:00
d8c6d81810 chore(web): restore eslint suppressions from myori/main 2026-01-20 23:07:28 +08:00
87a8545ca5 chore(web): revert eslint suppressions 2026-01-20 22:55:01 +08:00
8118a2fc38 Merge remote-tracking branch 'myori/main' into archive-workflow-backend 2026-01-20 22:47:39 +08:00
8f949d503a chore(i18n): sync translations with en-US (#31298)
Co-authored-by: claude[bot] <41898282+claude[bot]@users.noreply.github.com>
Co-authored-by: Claude Sonnet 4.5 <noreply@anthropic.com>
Co-authored-by: yyh <92089059+lyzno1@users.noreply.github.com>
2026-01-20 21:40:43 +08:00
c9519d2f0e chore(deps): bump js-yaml from 4.1.0 to 4.1.1 in /web (#31297)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-20 21:27:15 +08:00
92fc50ac57 chore(web): remove version prefixes from package.json (#31286)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: hyoban <38493346+hyoban@users.noreply.github.com>
2026-01-20 21:14:50 +08:00
3bb80a0934 chore: lint for i18n place holder (#31283)
Co-authored-by: yyh <yuanyouhuilyz@gmail.com>
2026-01-20 21:14:20 +08:00
b4d29ad6ee feat: archive workflow runs backend 2026-01-20 20:45:45 +08:00
yyh
54921844bb fix(web): disable HTML escaping for form field validation messages (#31292) 2026-01-20 18:43:01 +08:00
310 changed files with 5900 additions and 1053 deletions

View File

@ -134,6 +134,9 @@ jobs:
with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
github_token: ${{ secrets.GITHUB_TOKEN }}
# Allow github-actions bot to trigger this workflow via repository_dispatch
# See: https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
allowed_bots: 'github-actions[bot]'
prompt: |
You are a professional i18n synchronization engineer for the Dify project.
Your task is to keep all language translations in sync with the English source (en-US).
@ -285,6 +288,22 @@ jobs:
- `${variable}` - Template literal
- `<tag>content</tag>` - HTML tags
- `_one`, `_other` - Pluralization suffixes (these are KEY suffixes, not values)
**CRITICAL: Variable names and tag names MUST stay in English - NEVER translate them**
✅ CORRECT examples:
- English: "{{count}} items" → Japanese: "{{count}} 個のアイテム"
- English: "{{name}} updated" → Korean: "{{name}} 업데이트됨"
- English: "<email>{{email}}</email>" → Chinese: "<email>{{email}}</email>"
- English: "<CustomLink>Marketplace</CustomLink>" → Japanese: "<CustomLink>マーケットプレイス</CustomLink>"
❌ WRONG examples (NEVER do this - will break the application):
- "{{count}}" → "{{カウント}}" ❌ (variable name translated to Japanese)
- "{{name}}" → "{{이름}}" ❌ (variable name translated to Korean)
- "{{email}}" → "{{邮箱}}" ❌ (variable name translated to Chinese)
- "<email>" → "<メール>" ❌ (tag name translated)
- "<CustomLink>" → "<自定义链接>" ❌ (component name translated)
- Use appropriate language register (formal/informal) based on existing translations
- Match existing translation style in each language
- Technical terms: check existing conventions per language

View File

@ -4,13 +4,15 @@ import json
import logging
import secrets
import time
from collections.abc import Iterator
from contextlib import contextmanager
from typing import Any
import click
import sqlalchemy as sa
from flask import current_app
from pydantic import TypeAdapter
from sqlalchemy import select
from sqlalchemy import event, select
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import sessionmaker
@ -55,6 +57,28 @@ from tasks.remove_app_and_related_data_task import delete_draft_variables_batch
logger = logging.getLogger(__name__)
@contextmanager
def _log_slow_queries(engine: sa.engine.Engine, threshold_ms: float) -> Iterator[None]:
def _before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
context._query_start_time = time.perf_counter()
def _after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
start_time = getattr(context, "_query_start_time", None)
if start_time is None:
return
elapsed_ms = (time.perf_counter() - start_time) * 1000
if elapsed_ms >= threshold_ms:
logger.warning("Slow SQL query (%.2fms): %s", elapsed_ms, statement)
event.listen(engine, "before_cursor_execute", _before_cursor_execute)
event.listen(engine, "after_cursor_execute", _after_cursor_execute)
try:
yield
finally:
event.remove(engine, "before_cursor_execute", _before_cursor_execute)
event.remove(engine, "after_cursor_execute", _after_cursor_execute)
@click.command("reset-password", help="Reset the account password.")
@click.option("--email", prompt=True, help="Account email to reset password for")
@click.option("--new-password", prompt=True, help="New password")
@ -950,6 +974,356 @@ def clean_workflow_runs(
)
@click.command(
"archive-workflow-runs",
help="Archive workflow runs for paid plan tenants to S3-compatible storage.",
)
@click.option("--tenant-ids", default=None, help="Optional comma-separated tenant IDs for grayscale rollout.")
@click.option("--before-days", default=90, show_default=True, help="Archive runs older than N days.")
@click.option(
"--from-days-ago",
default=None,
type=click.IntRange(min=0),
help="Lower bound in days ago (older). Must be paired with --to-days-ago.",
)
@click.option(
"--to-days-ago",
default=None,
type=click.IntRange(min=0),
help="Upper bound in days ago (newer). Must be paired with --from-days-ago.",
)
@click.option(
"--start-from",
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
default=None,
help="Archive runs created at or after this timestamp (UTC if no timezone).",
)
@click.option(
"--end-before",
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
default=None,
help="Archive runs created before this timestamp (UTC if no timezone).",
)
@click.option("--batch-size", default=100, show_default=True, help="Batch size for processing.")
@click.option("--workers", default=1, show_default=True, type=int, help="Concurrent workflow runs to archive.")
@click.option("--limit", default=None, type=int, help="Maximum number of runs to archive.")
@click.option("--dry-run", is_flag=True, help="Preview without archiving.")
@click.option("--delete-after-archive", is_flag=True, help="Delete runs and related data after archiving.")
@click.option(
"--slow-ms",
default=1000,
show_default=True,
type=int,
help="Log SQL queries slower than this threshold (ms).",
)
def archive_workflow_runs(
tenant_ids: str | None,
before_days: int,
from_days_ago: int | None,
to_days_ago: int | None,
start_from: datetime.datetime | None,
end_before: datetime.datetime | None,
batch_size: int,
workers: int,
limit: int | None,
dry_run: bool,
delete_after_archive: bool,
slow_ms: int,
):
"""
Archive workflow runs for paid plan tenants older than the specified days.
This command archives the following tables to storage:
- workflow_node_executions
- workflow_node_execution_offload
- workflow_pauses
- workflow_pause_reasons
- workflow_trigger_logs
The workflow_runs and workflow_app_logs tables are preserved for UI listing.
"""
from services.retention.workflow_run.archive_paid_plan_workflow_run import WorkflowRunArchiver
run_started_at = datetime.datetime.now(datetime.UTC)
click.echo(
click.style(
f"Starting workflow run archiving at {run_started_at.isoformat()}.",
fg="white",
)
)
if (start_from is None) ^ (end_before is None):
click.echo(click.style("start-from and end-before must be provided together.", fg="red"))
return
if (from_days_ago is None) ^ (to_days_ago is None):
click.echo(click.style("from-days-ago and to-days-ago must be provided together.", fg="red"))
return
if from_days_ago is not None and to_days_ago is not None:
if start_from or end_before:
click.echo(click.style("Choose either day offsets or explicit dates, not both.", fg="red"))
return
if from_days_ago <= to_days_ago:
click.echo(click.style("from-days-ago must be greater than to-days-ago.", fg="red"))
return
now = datetime.datetime.now()
start_from = now - datetime.timedelta(days=from_days_ago)
end_before = now - datetime.timedelta(days=to_days_ago)
before_days = 0
if start_from and end_before and start_from >= end_before:
click.echo(click.style("start-from must be earlier than end-before.", fg="red"))
return
if workers < 1:
click.echo(click.style("workers must be at least 1.", fg="red"))
return
archiver = WorkflowRunArchiver(
days=before_days,
batch_size=batch_size,
start_from=start_from,
end_before=end_before,
workers=workers,
tenant_ids=[tid.strip() for tid in tenant_ids.split(",")] if tenant_ids else None,
limit=limit,
dry_run=dry_run,
delete_after_archive=delete_after_archive,
)
logger.info("Enable slow query logging for archive-workflow-runs (>=%sms).", slow_ms)
with _log_slow_queries(db.engine, slow_ms):
summary = archiver.run()
click.echo(
click.style(
f"Summary: processed={summary.total_runs_processed}, archived={summary.runs_archived}, "
f"skipped={summary.runs_skipped}, failed={summary.runs_failed}, "
f"time={summary.total_elapsed_time:.2f}s",
fg="cyan",
)
)
run_finished_at = datetime.datetime.now(datetime.UTC)
elapsed = run_finished_at - run_started_at
click.echo(
click.style(
f"Workflow run archiving completed. start={run_started_at.isoformat()} "
f"end={run_finished_at.isoformat()} duration={elapsed}",
fg="green",
)
)
@click.command(
"restore-workflow-runs",
help="Restore archived workflow runs from S3-compatible storage.",
)
@click.option(
"--tenant-ids",
required=False,
help="Tenant IDs (comma-separated).",
)
@click.option("--run-id", required=False, help="Workflow run ID to restore.")
@click.option(
"--start-from",
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
default=None,
help="Optional lower bound (inclusive) for created_at; must be paired with --end-before.",
)
@click.option(
"--end-before",
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
default=None,
help="Optional upper bound (exclusive) for created_at; must be paired with --start-from.",
)
@click.option("--workers", default=1, show_default=True, type=int, help="Concurrent workflow runs to restore.")
@click.option("--limit", type=int, default=100, show_default=True, help="Maximum number of runs to restore.")
@click.option("--dry-run", is_flag=True, help="Preview without restoring.")
def restore_workflow_runs(
tenant_ids: str | None,
run_id: str | None,
start_from: datetime.datetime | None,
end_before: datetime.datetime | None,
workers: int,
limit: int,
dry_run: bool,
):
"""
Restore an archived workflow run from storage to the database.
This restores the following tables:
- workflow_node_executions
- workflow_node_execution_offload
- workflow_pauses
- workflow_pause_reasons
- workflow_trigger_logs
"""
from services.retention.workflow_run.restore_archived_workflow_run import WorkflowRunRestore
parsed_tenant_ids = None
if tenant_ids:
parsed_tenant_ids = [tid.strip() for tid in tenant_ids.split(",") if tid.strip()]
if not parsed_tenant_ids:
raise click.BadParameter("tenant-ids must not be empty")
if (start_from is None) ^ (end_before is None):
raise click.UsageError("--start-from and --end-before must be provided together.")
if run_id is None and (start_from is None or end_before is None):
raise click.UsageError("--start-from and --end-before are required for batch restore.")
if workers < 1:
raise click.BadParameter("workers must be at least 1")
start_time = datetime.datetime.now(datetime.UTC)
click.echo(
click.style(
f"Starting restore of workflow run {run_id} at {start_time.isoformat()}.",
fg="white",
)
)
restorer = WorkflowRunRestore(dry_run=dry_run, workers=workers)
if run_id:
results = [restorer.restore_by_run_id(run_id)]
else:
assert start_from is not None
assert end_before is not None
results = restorer.restore_batch(
parsed_tenant_ids,
start_date=start_from,
end_date=end_before,
limit=limit,
)
end_time = datetime.datetime.now(datetime.UTC)
elapsed = end_time - start_time
successes = sum(1 for result in results if result.success)
failures = len(results) - successes
if failures == 0:
click.echo(
click.style(
f"Restore completed successfully. success={successes} duration={elapsed}",
fg="green",
)
)
else:
click.echo(
click.style(
f"Restore completed with failures. success={successes} failed={failures} duration={elapsed}",
fg="red",
)
)
@click.command(
"delete-archived-workflow-runs",
help="Delete archived workflow runs from the database.",
)
@click.option(
"--tenant-ids",
required=False,
help="Tenant IDs (comma-separated).",
)
@click.option("--run-id", required=False, help="Workflow run ID to delete.")
@click.option(
"--start-from",
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
default=None,
help="Optional lower bound (inclusive) for created_at; must be paired with --end-before.",
)
@click.option(
"--end-before",
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
default=None,
help="Optional upper bound (exclusive) for created_at; must be paired with --start-from.",
)
@click.option("--limit", type=int, default=100, show_default=True, help="Maximum number of runs to delete.")
@click.option("--dry-run", is_flag=True, help="Preview without deleting.")
def delete_archived_workflow_runs(
tenant_ids: str | None,
run_id: str | None,
start_from: datetime.datetime | None,
end_before: datetime.datetime | None,
limit: int,
dry_run: bool,
):
"""
Delete archived workflow runs from the database.
"""
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
parsed_tenant_ids = None
if tenant_ids:
parsed_tenant_ids = [tid.strip() for tid in tenant_ids.split(",") if tid.strip()]
if not parsed_tenant_ids:
raise click.BadParameter("tenant-ids must not be empty")
if (start_from is None) ^ (end_before is None):
raise click.UsageError("--start-from and --end-before must be provided together.")
if run_id is None and (start_from is None or end_before is None):
raise click.UsageError("--start-from and --end-before are required for batch delete.")
start_time = datetime.datetime.now(datetime.UTC)
target_desc = f"workflow run {run_id}" if run_id else "workflow runs"
click.echo(
click.style(
f"Starting delete of {target_desc} at {start_time.isoformat()}.",
fg="white",
)
)
deleter = ArchivedWorkflowRunDeletion(dry_run=dry_run)
if run_id:
results = [deleter.delete_by_run_id(run_id)]
else:
assert start_from is not None
assert end_before is not None
results = deleter.delete_batch(
parsed_tenant_ids,
start_date=start_from,
end_date=end_before,
limit=limit,
)
for result in results:
if result.success:
click.echo(
click.style(
f"{'[DRY RUN] Would delete' if dry_run else 'Deleted'} "
f"workflow run {result.run_id} (tenant={result.tenant_id})",
fg="green",
)
)
else:
click.echo(
click.style(
f"Failed to delete workflow run {result.run_id}: {result.error}",
fg="red",
)
)
end_time = datetime.datetime.now(datetime.UTC)
elapsed = end_time - start_time
successes = sum(1 for result in results if result.success)
failures = len(results) - successes
if failures == 0:
click.echo(
click.style(
f"Delete completed successfully. success={successes} duration={elapsed}",
fg="green",
)
)
else:
click.echo(
click.style(
f"Delete completed with failures. success={successes} failed={failures} duration={elapsed}",
fg="red",
)
)
@click.option("-f", "--force", is_flag=True, help="Skip user confirmation and force the command to execute.")
@click.command("clear-orphaned-file-records", help="Clear orphaned file records.")
def clear_orphaned_file_records(force: bool):

View File

@ -11,7 +11,10 @@ from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from core.workflow.enums import WorkflowExecutionStatus
from extensions.ext_database import db
from fields.workflow_app_log_fields import build_workflow_app_log_pagination_model
from fields.workflow_app_log_fields import (
build_workflow_app_log_pagination_model,
build_workflow_archived_log_pagination_model,
)
from libs.login import login_required
from models import App
from models.model import AppMode
@ -61,6 +64,7 @@ console_ns.schema_model(
# Register model for flask_restx to avoid dict type issues in Swagger
workflow_app_log_pagination_model = build_workflow_app_log_pagination_model(console_ns)
workflow_archived_log_pagination_model = build_workflow_archived_log_pagination_model(console_ns)
@console_ns.route("/apps/<uuid:app_id>/workflow-app-logs")
@ -99,3 +103,33 @@ class WorkflowAppLogApi(Resource):
)
return workflow_app_log_pagination
@console_ns.route("/apps/<uuid:app_id>/workflow-archived-logs")
class WorkflowArchivedLogApi(Resource):
@console_ns.doc("get_workflow_archived_logs")
@console_ns.doc(description="Get workflow archived execution logs")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(console_ns.models[WorkflowAppLogQuery.__name__])
@console_ns.response(200, "Workflow archived logs retrieved successfully", workflow_archived_log_pagination_model)
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.WORKFLOW])
@marshal_with(workflow_archived_log_pagination_model)
def get(self, app_model: App):
"""
Get workflow archived logs
"""
args = WorkflowAppLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
workflow_app_service = WorkflowAppService()
with Session(db.engine) as session:
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_archive_logs(
session=session,
app_model=app_model,
page=args.page,
limit=args.limit,
)
return workflow_app_log_pagination

View File

@ -1,12 +1,15 @@
from datetime import UTC, datetime, timedelta
from typing import Literal, cast
from flask import request
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel, Field, field_validator
from sqlalchemy import select
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
from fields.end_user_fields import simple_end_user_fields
from fields.member_fields import simple_account_fields
from fields.workflow_run_fields import (
@ -19,14 +22,17 @@ from fields.workflow_run_fields import (
workflow_run_node_execution_list_fields,
workflow_run_pagination_fields,
)
from libs.archive_storage import ArchiveStorageNotConfiguredError, get_archive_storage
from libs.custom_inputs import time_duration
from libs.helper import uuid_value
from libs.login import current_user, login_required
from models import Account, App, AppMode, EndUser, WorkflowRunTriggeredFrom
from models import Account, App, AppMode, EndUser, WorkflowArchiveLog, WorkflowRunTriggeredFrom
from services.retention.workflow_run.constants import ARCHIVE_BUNDLE_NAME
from services.workflow_run_service import WorkflowRunService
# Workflow run status choices for filtering
WORKFLOW_RUN_STATUS_CHOICES = ["running", "succeeded", "failed", "stopped", "partial-succeeded"]
EXPORT_SIGNED_URL_EXPIRE_SECONDS = 3600
# Register models for flask_restx to avoid dict type issues in Swagger
# Register in dependency order: base models first, then dependent models
@ -93,6 +99,15 @@ workflow_run_node_execution_list_model = console_ns.model(
"WorkflowRunNodeExecutionList", workflow_run_node_execution_list_fields_copy
)
workflow_run_export_fields = console_ns.model(
"WorkflowRunExport",
{
"status": fields.String(description="Export status: success/failed"),
"presigned_url": fields.String(description="Pre-signed URL for download", required=False),
"presigned_url_expires_at": fields.String(description="Pre-signed URL expiration time", required=False),
},
)
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
@ -181,6 +196,56 @@ class AdvancedChatAppWorkflowRunListApi(Resource):
return result
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/export")
class WorkflowRunExportApi(Resource):
@console_ns.doc("get_workflow_run_export_url")
@console_ns.doc(description="Generate a download URL for an archived workflow run.")
@console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
@console_ns.response(200, "Export URL generated", workflow_run_export_fields)
@setup_required
@login_required
@account_initialization_required
@get_app_model()
def get(self, app_model: App, run_id: str):
tenant_id = str(app_model.tenant_id)
app_id = str(app_model.id)
run_id_str = str(run_id)
run_created_at = db.session.scalar(
select(WorkflowArchiveLog.run_created_at)
.where(
WorkflowArchiveLog.tenant_id == tenant_id,
WorkflowArchiveLog.app_id == app_id,
WorkflowArchiveLog.workflow_run_id == run_id_str,
)
.limit(1)
)
if not run_created_at:
return {"code": "archive_log_not_found", "message": "workflow run archive not found"}, 404
prefix = (
f"{tenant_id}/app_id={app_id}/year={run_created_at.strftime('%Y')}/"
f"month={run_created_at.strftime('%m')}/workflow_run_id={run_id_str}"
)
archive_key = f"{prefix}/{ARCHIVE_BUNDLE_NAME}"
try:
archive_storage = get_archive_storage()
except ArchiveStorageNotConfiguredError as e:
return {"code": "archive_storage_not_configured", "message": str(e)}, 500
presigned_url = archive_storage.generate_presigned_url(
archive_key,
expires_in=EXPORT_SIGNED_URL_EXPIRE_SECONDS,
)
expires_at = datetime.now(UTC) + timedelta(seconds=EXPORT_SIGNED_URL_EXPIRE_SECONDS)
return {
"status": "success",
"presigned_url": presigned_url,
"presigned_url_expires_at": expires_at.isoformat(),
}, 200
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs/count")
class AdvancedChatAppWorkflowRunCountApi(Resource):
@console_ns.doc("get_advanced_chat_workflow_runs_count")

View File

@ -4,6 +4,7 @@ from dify_app import DifyApp
def init_app(app: DifyApp):
from commands import (
add_qdrant_index,
archive_workflow_runs,
clean_expired_messages,
clean_workflow_runs,
cleanup_orphaned_draft_variables,
@ -11,6 +12,7 @@ def init_app(app: DifyApp):
clear_orphaned_file_records,
convert_to_agent_apps,
create_tenant,
delete_archived_workflow_runs,
extract_plugins,
extract_unique_plugins,
file_usage,
@ -24,6 +26,7 @@ def init_app(app: DifyApp):
reset_email,
reset_encrypt_key_pair,
reset_password,
restore_workflow_runs,
setup_datasource_oauth_client,
setup_system_tool_oauth_client,
setup_system_trigger_oauth_client,
@ -58,6 +61,9 @@ def init_app(app: DifyApp):
setup_datasource_oauth_client,
transform_datasource_credentials,
install_rag_pipeline_plugins,
archive_workflow_runs,
delete_archived_workflow_runs,
restore_workflow_runs,
clean_workflow_runs,
clean_expired_messages,
]

View File

@ -2,7 +2,12 @@ from flask_restx import Namespace, fields
from fields.end_user_fields import build_simple_end_user_model, simple_end_user_fields
from fields.member_fields import build_simple_account_model, simple_account_fields
from fields.workflow_run_fields import build_workflow_run_for_log_model, workflow_run_for_log_fields
from fields.workflow_run_fields import (
build_workflow_run_for_archived_log_model,
build_workflow_run_for_log_model,
workflow_run_for_archived_log_fields,
workflow_run_for_log_fields,
)
from libs.helper import TimestampField
workflow_app_log_partial_fields = {
@ -34,6 +39,33 @@ def build_workflow_app_log_partial_model(api_or_ns: Namespace):
return api_or_ns.model("WorkflowAppLogPartial", copied_fields)
workflow_archived_log_partial_fields = {
"id": fields.String,
"workflow_run": fields.Nested(workflow_run_for_archived_log_fields, allow_null=True),
"trigger_metadata": fields.Raw,
"created_by_account": fields.Nested(simple_account_fields, attribute="created_by_account", allow_null=True),
"created_by_end_user": fields.Nested(simple_end_user_fields, attribute="created_by_end_user", allow_null=True),
"created_at": TimestampField,
}
def build_workflow_archived_log_partial_model(api_or_ns: Namespace):
"""Build the workflow archived log partial model for the API or Namespace."""
workflow_run_model = build_workflow_run_for_archived_log_model(api_or_ns)
simple_account_model = build_simple_account_model(api_or_ns)
simple_end_user_model = build_simple_end_user_model(api_or_ns)
copied_fields = workflow_archived_log_partial_fields.copy()
copied_fields["workflow_run"] = fields.Nested(workflow_run_model, allow_null=True)
copied_fields["created_by_account"] = fields.Nested(
simple_account_model, attribute="created_by_account", allow_null=True
)
copied_fields["created_by_end_user"] = fields.Nested(
simple_end_user_model, attribute="created_by_end_user", allow_null=True
)
return api_or_ns.model("WorkflowArchivedLogPartial", copied_fields)
workflow_app_log_pagination_fields = {
"page": fields.Integer,
"limit": fields.Integer,
@ -51,3 +83,21 @@ def build_workflow_app_log_pagination_model(api_or_ns: Namespace):
copied_fields = workflow_app_log_pagination_fields.copy()
copied_fields["data"] = fields.List(fields.Nested(workflow_app_log_partial_model))
return api_or_ns.model("WorkflowAppLogPagination", copied_fields)
workflow_archived_log_pagination_fields = {
"page": fields.Integer,
"limit": fields.Integer,
"total": fields.Integer,
"has_more": fields.Boolean,
"data": fields.List(fields.Nested(workflow_archived_log_partial_fields)),
}
def build_workflow_archived_log_pagination_model(api_or_ns: Namespace):
"""Build the workflow archived log pagination model for the API or Namespace."""
workflow_archived_log_partial_model = build_workflow_archived_log_partial_model(api_or_ns)
copied_fields = workflow_archived_log_pagination_fields.copy()
copied_fields["data"] = fields.List(fields.Nested(workflow_archived_log_partial_model))
return api_or_ns.model("WorkflowArchivedLogPagination", copied_fields)

View File

@ -23,6 +23,19 @@ def build_workflow_run_for_log_model(api_or_ns: Namespace):
return api_or_ns.model("WorkflowRunForLog", workflow_run_for_log_fields)
workflow_run_for_archived_log_fields = {
"id": fields.String,
"status": fields.String,
"triggered_from": fields.String,
"elapsed_time": fields.Float,
"total_tokens": fields.Integer,
}
def build_workflow_run_for_archived_log_model(api_or_ns: Namespace):
return api_or_ns.model("WorkflowRunForArchivedLog", workflow_run_for_archived_log_fields)
workflow_run_for_list_fields = {
"id": fields.String,
"version": fields.String,

View File

@ -7,7 +7,6 @@ to S3-compatible object storage.
import base64
import datetime
import gzip
import hashlib
import logging
from collections.abc import Generator
@ -39,7 +38,7 @@ class ArchiveStorage:
"""
S3-compatible storage client for archiving or exporting.
This client provides methods for storing and retrieving archived data in JSONL+gzip format.
This client provides methods for storing and retrieving archived data in JSONL format.
"""
def __init__(self, bucket: str):
@ -69,7 +68,10 @@ class ArchiveStorage:
aws_access_key_id=dify_config.ARCHIVE_STORAGE_ACCESS_KEY,
aws_secret_access_key=dify_config.ARCHIVE_STORAGE_SECRET_KEY,
region_name=dify_config.ARCHIVE_STORAGE_REGION,
config=Config(s3={"addressing_style": "path"}),
config=Config(
s3={"addressing_style": "path"},
max_pool_connections=64,
),
)
# Verify bucket accessibility
@ -100,12 +102,18 @@ class ArchiveStorage:
"""
checksum = hashlib.md5(data).hexdigest()
try:
self.client.put_object(
response = self.client.put_object(
Bucket=self.bucket,
Key=key,
Body=data,
ContentMD5=self._content_md5(data),
)
etag = response.get("ETag")
if not etag:
raise ArchiveStorageError(f"Missing ETag for '{key}'")
normalized_etag = etag.strip('"')
if normalized_etag != checksum:
raise ArchiveStorageError(f"ETag mismatch for '{key}': expected={checksum}, actual={normalized_etag}")
logger.debug("Uploaded object: %s (size=%d, checksum=%s)", key, len(data), checksum)
return checksum
except ClientError as e:
@ -240,19 +248,18 @@ class ArchiveStorage:
return base64.b64encode(hashlib.md5(data).digest()).decode()
@staticmethod
def serialize_to_jsonl_gz(records: list[dict[str, Any]]) -> bytes:
def serialize_to_jsonl(records: list[dict[str, Any]]) -> bytes:
"""
Serialize records to gzipped JSONL format.
Serialize records to JSONL format.
Args:
records: List of dictionaries to serialize
Returns:
Gzipped JSONL bytes
JSONL bytes
"""
lines = []
for record in records:
# Convert datetime objects to ISO format strings
serialized = ArchiveStorage._serialize_record(record)
lines.append(orjson.dumps(serialized))
@ -260,23 +267,22 @@ class ArchiveStorage:
if jsonl_content:
jsonl_content += b"\n"
return gzip.compress(jsonl_content)
return jsonl_content
@staticmethod
def deserialize_from_jsonl_gz(data: bytes) -> list[dict[str, Any]]:
def deserialize_from_jsonl(data: bytes) -> list[dict[str, Any]]:
"""
Deserialize gzipped JSONL data to records.
Deserialize JSONL data to records.
Args:
data: Gzipped JSONL bytes
data: JSONL bytes
Returns:
List of dictionaries
"""
jsonl_content = gzip.decompress(data)
records = []
for line in jsonl_content.splitlines():
for line in data.splitlines():
if line:
records.append(orjson.loads(line))

View File

@ -0,0 +1,95 @@
"""create workflow_archive_logs
Revision ID: 9d77545f524e
Revises: 7df29de0f6be
Create Date: 2026-01-06 17:18:56.292479
"""
from alembic import op
import models as models
import sqlalchemy as sa
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '9d77545f524e'
down_revision = '7df29de0f6be'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('workflow_archive_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('log_id', models.types.StringUUID(), nullable=True),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_run_id', models.types.StringUUID(), nullable=False),
sa.Column('created_by_role', sa.String(length=255), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('log_created_at', sa.DateTime(), nullable=True),
sa.Column('log_created_from', sa.String(length=255), nullable=True),
sa.Column('run_version', sa.String(length=255), nullable=False),
sa.Column('run_status', sa.String(length=255), nullable=False),
sa.Column('run_triggered_from', sa.String(length=255), nullable=False),
sa.Column('run_error', models.types.LongText(), nullable=True),
sa.Column('run_elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False),
sa.Column('run_total_tokens', sa.BigInteger(), server_default=sa.text('0'), nullable=False),
sa.Column('run_total_steps', sa.Integer(), server_default=sa.text('0'), nullable=True),
sa.Column('run_created_at', sa.DateTime(), nullable=False),
sa.Column('run_finished_at', sa.DateTime(), nullable=True),
sa.Column('run_exceptions_count', sa.Integer(), server_default=sa.text('0'), nullable=True),
sa.Column('trigger_metadata', models.types.LongText(), nullable=True),
sa.Column('archived_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_archive_log_pkey')
)
else:
op.create_table('workflow_archive_logs',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('log_id', models.types.StringUUID(), nullable=True),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_run_id', models.types.StringUUID(), nullable=False),
sa.Column('created_by_role', sa.String(length=255), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('log_created_at', sa.DateTime(), nullable=True),
sa.Column('log_created_from', sa.String(length=255), nullable=True),
sa.Column('run_version', sa.String(length=255), nullable=False),
sa.Column('run_status', sa.String(length=255), nullable=False),
sa.Column('run_triggered_from', sa.String(length=255), nullable=False),
sa.Column('run_error', models.types.LongText(), nullable=True),
sa.Column('run_elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False),
sa.Column('run_total_tokens', sa.BigInteger(), server_default=sa.text('0'), nullable=False),
sa.Column('run_total_steps', sa.Integer(), server_default=sa.text('0'), nullable=True),
sa.Column('run_created_at', sa.DateTime(), nullable=False),
sa.Column('run_finished_at', sa.DateTime(), nullable=True),
sa.Column('run_exceptions_count', sa.Integer(), server_default=sa.text('0'), nullable=True),
sa.Column('trigger_metadata', models.types.LongText(), nullable=True),
sa.Column('archived_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_archive_log_pkey')
)
with op.batch_alter_table('workflow_archive_logs', schema=None) as batch_op:
batch_op.create_index('workflow_archive_log_app_idx', ['tenant_id', 'app_id'], unique=False)
batch_op.create_index('workflow_archive_log_run_created_at_idx', ['run_created_at'], unique=False)
batch_op.create_index('workflow_archive_log_workflow_run_id_idx', ['workflow_run_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_archive_logs', schema=None) as batch_op:
batch_op.drop_index('workflow_archive_log_workflow_run_id_idx')
batch_op.drop_index('workflow_archive_log_run_created_at_idx')
batch_op.drop_index('workflow_archive_log_app_idx')
op.drop_table('workflow_archive_logs')
# ### end Alembic commands ###

View File

@ -100,6 +100,7 @@ from .workflow import (
Workflow,
WorkflowAppLog,
WorkflowAppLogCreatedFrom,
WorkflowArchiveLog,
WorkflowNodeExecutionModel,
WorkflowNodeExecutionOffload,
WorkflowNodeExecutionTriggeredFrom,
@ -197,6 +198,7 @@ __all__ = [
"Workflow",
"WorkflowAppLog",
"WorkflowAppLogCreatedFrom",
"WorkflowArchiveLog",
"WorkflowNodeExecutionModel",
"WorkflowNodeExecutionOffload",
"WorkflowNodeExecutionTriggeredFrom",

View File

@ -1163,6 +1163,69 @@ class WorkflowAppLog(TypeBase):
}
class WorkflowArchiveLog(TypeBase):
"""
Workflow archive log.
Stores essential workflow run snapshot data for archived app logs.
Field sources:
- Shared fields (tenant/app/workflow/run ids, created_by*): from WorkflowRun for consistency.
- log_* fields: from WorkflowAppLog when present; null if the run has no app log.
- run_* fields: workflow run snapshot fields from WorkflowRun.
- trigger_metadata: snapshot from WorkflowTriggerLog when present.
"""
__tablename__ = "workflow_archive_logs"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="workflow_archive_log_pkey"),
sa.Index("workflow_archive_log_app_idx", "tenant_id", "app_id"),
sa.Index("workflow_archive_log_workflow_run_id_idx", "workflow_run_id"),
sa.Index("workflow_archive_log_run_created_at_idx", "run_created_at"),
)
id: Mapped[str] = mapped_column(
StringUUID, insert_default=lambda: str(uuidv7()), default_factory=lambda: str(uuidv7()), init=False
)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
workflow_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
workflow_run_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
created_by_role: Mapped[str] = mapped_column(String(255), nullable=False)
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
log_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
log_created_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
log_created_from: Mapped[str | None] = mapped_column(String(255), nullable=True)
run_version: Mapped[str] = mapped_column(String(255), nullable=False)
run_status: Mapped[str] = mapped_column(String(255), nullable=False)
run_triggered_from: Mapped[str] = mapped_column(String(255), nullable=False)
run_error: Mapped[str | None] = mapped_column(LongText, nullable=True)
run_elapsed_time: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("0"))
run_total_tokens: Mapped[int] = mapped_column(sa.BigInteger, server_default=sa.text("0"))
run_total_steps: Mapped[int] = mapped_column(sa.Integer, server_default=sa.text("0"), nullable=True)
run_created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False)
run_finished_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
run_exceptions_count: Mapped[int] = mapped_column(sa.Integer, server_default=sa.text("0"), nullable=True)
trigger_metadata: Mapped[str | None] = mapped_column(LongText, nullable=True)
archived_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
@property
def workflow_run_summary(self) -> dict[str, Any]:
return {
"id": self.workflow_run_id,
"status": self.run_status,
"triggered_from": self.run_triggered_from,
"elapsed_time": self.run_elapsed_time,
"total_tokens": self.run_total_tokens,
}
class ConversationVariable(TypeBase):
__tablename__ = "workflow_conversation_variables"

View File

@ -45,7 +45,7 @@ from core.workflow.enums import WorkflowType
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
from libs.infinite_scroll_pagination import InfiniteScrollPagination
from models.enums import WorkflowRunTriggeredFrom
from models.workflow import WorkflowRun
from models.workflow import WorkflowAppLog, WorkflowArchiveLog, WorkflowPause, WorkflowPauseReason, WorkflowRun
from repositories.entities.workflow_pause import WorkflowPauseEntity
from repositories.types import (
AverageInteractionStats,
@ -270,6 +270,38 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol):
"""
...
def get_archived_run_ids(
self,
session: Session,
run_ids: Sequence[str],
) -> set[str]:
"""
Fetch workflow run IDs that already have archive log records.
"""
...
def get_archived_logs_by_time_range(
self,
session: Session,
tenant_ids: Sequence[str] | None,
start_date: datetime,
end_date: datetime,
limit: int,
) -> Sequence[WorkflowArchiveLog]:
"""
Fetch archived workflow logs by time range for restore.
"""
...
def get_archived_log_by_run_id(
self,
run_id: str,
) -> WorkflowArchiveLog | None:
"""
Fetch a workflow archive log by workflow run ID.
"""
...
def delete_runs_with_related(
self,
runs: Sequence[WorkflowRun],
@ -282,6 +314,61 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol):
"""
...
def get_pause_records_by_run_id(
self,
session: Session,
run_id: str,
) -> Sequence[WorkflowPause]:
"""
Fetch workflow pause records by workflow run ID.
"""
...
def get_pause_reason_records_by_run_id(
self,
session: Session,
pause_ids: Sequence[str],
) -> Sequence[WorkflowPauseReason]:
"""
Fetch workflow pause reason records by pause IDs.
"""
...
def get_app_logs_by_run_id(
self,
session: Session,
run_id: str,
) -> Sequence[WorkflowAppLog]:
"""
Fetch workflow app logs by workflow run ID.
"""
...
def create_archive_logs(
self,
session: Session,
run: WorkflowRun,
app_logs: Sequence[WorkflowAppLog],
trigger_metadata: str | None,
) -> int:
"""
Create archive log records for a workflow run.
"""
...
def get_archived_runs_by_time_range(
self,
session: Session,
tenant_ids: Sequence[str] | None,
start_date: datetime,
end_date: datetime,
limit: int,
) -> Sequence[WorkflowRun]:
"""
Return workflow runs that already have archive logs, for cleanup of `workflow_runs`.
"""
...
def count_runs_with_related(
self,
runs: Sequence[WorkflowRun],

View File

@ -351,3 +351,27 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut
)
return int(node_executions_count), int(offloads_count)
@staticmethod
def get_by_run(
session: Session,
run_id: str,
) -> Sequence[WorkflowNodeExecutionModel]:
"""
Fetch node executions for a run using workflow_run_id.
"""
stmt = select(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.workflow_run_id == run_id)
return list(session.scalars(stmt))
@staticmethod
def get_offloads_by_execution_ids(
session: Session,
node_execution_ids: Sequence[str],
) -> Sequence[WorkflowNodeExecutionOffload]:
if not node_execution_ids:
return []
stmt = select(WorkflowNodeExecutionOffload).where(
WorkflowNodeExecutionOffload.node_execution_id.in_(node_execution_ids)
)
return list(session.scalars(stmt))

View File

@ -40,14 +40,7 @@ from libs.infinite_scroll_pagination import InfiniteScrollPagination
from libs.time_parser import get_time_threshold
from libs.uuid_utils import uuidv7
from models.enums import WorkflowRunTriggeredFrom
from models.workflow import (
WorkflowAppLog,
WorkflowPauseReason,
WorkflowRun,
)
from models.workflow import (
WorkflowPause as WorkflowPauseModel,
)
from models.workflow import WorkflowAppLog, WorkflowArchiveLog, WorkflowPause, WorkflowPauseReason, WorkflowRun
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
from repositories.entities.workflow_pause import WorkflowPauseEntity
from repositories.types import (
@ -369,6 +362,44 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
return session.scalars(stmt).all()
def get_archived_run_ids(
self,
session: Session,
run_ids: Sequence[str],
) -> set[str]:
if not run_ids:
return set()
stmt = select(WorkflowArchiveLog.workflow_run_id).where(WorkflowArchiveLog.workflow_run_id.in_(run_ids))
return set(session.scalars(stmt).all())
def get_archived_log_by_run_id(
self,
run_id: str,
) -> WorkflowArchiveLog | None:
with self._session_maker() as session:
stmt = select(WorkflowArchiveLog).where(WorkflowArchiveLog.workflow_run_id == run_id).limit(1)
return session.scalar(stmt)
def get_pause_records_by_run_id(
self,
session: Session,
run_id: str,
) -> Sequence[WorkflowPause]:
stmt = select(WorkflowPause).where(WorkflowPause.workflow_run_id == run_id)
return list(session.scalars(stmt))
def get_pause_reason_records_by_run_id(
self,
session: Session,
pause_ids: Sequence[str],
) -> Sequence[WorkflowPauseReason]:
if not pause_ids:
return []
stmt = select(WorkflowPauseReason).where(WorkflowPauseReason.pause_id.in_(pause_ids))
return list(session.scalars(stmt))
def delete_runs_with_related(
self,
runs: Sequence[WorkflowRun],
@ -396,9 +427,8 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
app_logs_result = session.execute(delete(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(run_ids)))
app_logs_deleted = cast(CursorResult, app_logs_result).rowcount or 0
pause_ids = session.scalars(
select(WorkflowPauseModel.id).where(WorkflowPauseModel.workflow_run_id.in_(run_ids))
).all()
pause_stmt = select(WorkflowPause.id).where(WorkflowPause.workflow_run_id.in_(run_ids))
pause_ids = session.scalars(pause_stmt).all()
pause_reasons_deleted = 0
pauses_deleted = 0
@ -407,7 +437,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
delete(WorkflowPauseReason).where(WorkflowPauseReason.pause_id.in_(pause_ids))
)
pause_reasons_deleted = cast(CursorResult, pause_reasons_result).rowcount or 0
pauses_result = session.execute(delete(WorkflowPauseModel).where(WorkflowPauseModel.id.in_(pause_ids)))
pauses_result = session.execute(delete(WorkflowPause).where(WorkflowPause.id.in_(pause_ids)))
pauses_deleted = cast(CursorResult, pauses_result).rowcount or 0
trigger_logs_deleted = delete_trigger_logs(session, run_ids) if delete_trigger_logs else 0
@ -427,6 +457,124 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
"pause_reasons": pause_reasons_deleted,
}
def get_app_logs_by_run_id(
self,
session: Session,
run_id: str,
) -> Sequence[WorkflowAppLog]:
stmt = select(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id == run_id)
return list(session.scalars(stmt))
def create_archive_logs(
self,
session: Session,
run: WorkflowRun,
app_logs: Sequence[WorkflowAppLog],
trigger_metadata: str | None,
) -> int:
if not app_logs:
archive_log = WorkflowArchiveLog(
log_id=None,
log_created_at=None,
log_created_from=None,
tenant_id=run.tenant_id,
app_id=run.app_id,
workflow_id=run.workflow_id,
workflow_run_id=run.id,
created_by_role=run.created_by_role,
created_by=run.created_by,
run_version=run.version,
run_status=run.status,
run_triggered_from=run.triggered_from,
run_error=run.error,
run_elapsed_time=run.elapsed_time,
run_total_tokens=run.total_tokens,
run_total_steps=run.total_steps,
run_created_at=run.created_at,
run_finished_at=run.finished_at,
run_exceptions_count=run.exceptions_count,
trigger_metadata=trigger_metadata,
)
session.add(archive_log)
return 1
archive_logs = [
WorkflowArchiveLog(
log_id=app_log.id,
log_created_at=app_log.created_at,
log_created_from=app_log.created_from,
tenant_id=run.tenant_id,
app_id=run.app_id,
workflow_id=run.workflow_id,
workflow_run_id=run.id,
created_by_role=run.created_by_role,
created_by=run.created_by,
run_version=run.version,
run_status=run.status,
run_triggered_from=run.triggered_from,
run_error=run.error,
run_elapsed_time=run.elapsed_time,
run_total_tokens=run.total_tokens,
run_total_steps=run.total_steps,
run_created_at=run.created_at,
run_finished_at=run.finished_at,
run_exceptions_count=run.exceptions_count,
trigger_metadata=trigger_metadata,
)
for app_log in app_logs
]
session.add_all(archive_logs)
return len(archive_logs)
def get_archived_runs_by_time_range(
self,
session: Session,
tenant_ids: Sequence[str] | None,
start_date: datetime,
end_date: datetime,
limit: int,
) -> Sequence[WorkflowRun]:
"""
Retrieves WorkflowRun records by joining workflow_archive_logs.
Used to identify runs that are already archived and ready for deletion.
"""
stmt = (
select(WorkflowRun)
.join(WorkflowArchiveLog, WorkflowArchiveLog.workflow_run_id == WorkflowRun.id)
.where(
WorkflowArchiveLog.run_created_at >= start_date,
WorkflowArchiveLog.run_created_at < end_date,
)
.order_by(WorkflowArchiveLog.run_created_at.asc(), WorkflowArchiveLog.workflow_run_id.asc())
.limit(limit)
)
if tenant_ids:
stmt = stmt.where(WorkflowArchiveLog.tenant_id.in_(tenant_ids))
return list(session.scalars(stmt))
def get_archived_logs_by_time_range(
self,
session: Session,
tenant_ids: Sequence[str] | None,
start_date: datetime,
end_date: datetime,
limit: int,
) -> Sequence[WorkflowArchiveLog]:
# Returns WorkflowArchiveLog rows directly; use this when workflow_runs may be deleted.
stmt = (
select(WorkflowArchiveLog)
.where(
WorkflowArchiveLog.run_created_at >= start_date,
WorkflowArchiveLog.run_created_at < end_date,
)
.order_by(WorkflowArchiveLog.run_created_at.asc(), WorkflowArchiveLog.workflow_run_id.asc())
.limit(limit)
)
if tenant_ids:
stmt = stmt.where(WorkflowArchiveLog.tenant_id.in_(tenant_ids))
return list(session.scalars(stmt))
def count_runs_with_related(
self,
runs: Sequence[WorkflowRun],
@ -459,7 +607,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
)
pause_ids = session.scalars(
select(WorkflowPauseModel.id).where(WorkflowPauseModel.workflow_run_id.in_(run_ids))
select(WorkflowPause.id).where(WorkflowPause.workflow_run_id.in_(run_ids))
).all()
pauses_count = len(pause_ids)
pause_reasons_count = 0
@ -511,9 +659,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
ValueError: If workflow_run_id is invalid or workflow run doesn't exist
RuntimeError: If workflow is already paused or in invalid state
"""
previous_pause_model_query = select(WorkflowPauseModel).where(
WorkflowPauseModel.workflow_run_id == workflow_run_id
)
previous_pause_model_query = select(WorkflowPause).where(WorkflowPause.workflow_run_id == workflow_run_id)
with self._session_maker() as session, session.begin():
# Get the workflow run
workflow_run = session.get(WorkflowRun, workflow_run_id)
@ -538,7 +684,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
# Upload the state file
# Create the pause record
pause_model = WorkflowPauseModel()
pause_model = WorkflowPause()
pause_model.id = str(uuidv7())
pause_model.workflow_id = workflow_run.workflow_id
pause_model.workflow_run_id = workflow_run.id
@ -710,13 +856,13 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
"""
with self._session_maker() as session, session.begin():
# Get the pause model by ID
pause_model = session.get(WorkflowPauseModel, pause_entity.id)
pause_model = session.get(WorkflowPause, pause_entity.id)
if pause_model is None:
raise _WorkflowRunError(f"WorkflowPause not found: {pause_entity.id}")
self._delete_pause_model(session, pause_model)
@staticmethod
def _delete_pause_model(session: Session, pause_model: WorkflowPauseModel):
def _delete_pause_model(session: Session, pause_model: WorkflowPause):
storage.delete(pause_model.state_object_key)
# Delete the pause record
@ -751,15 +897,15 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
_limit: int = limit or 1000
pruned_record_ids: list[str] = []
cond = or_(
WorkflowPauseModel.created_at < expiration,
WorkflowPause.created_at < expiration,
and_(
WorkflowPauseModel.resumed_at.is_not(null()),
WorkflowPauseModel.resumed_at < resumption_expiration,
WorkflowPause.resumed_at.is_not(null()),
WorkflowPause.resumed_at < resumption_expiration,
),
)
# First, collect pause records to delete with their state files
# Expired pauses (created before expiration time)
stmt = select(WorkflowPauseModel).where(cond).limit(_limit)
stmt = select(WorkflowPause).where(cond).limit(_limit)
with self._session_maker(expire_on_commit=False) as session:
# Old resumed pauses (resumed more than resumption_duration ago)
@ -770,7 +916,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
# Delete state files from storage
for pause in pauses_to_delete:
with self._session_maker(expire_on_commit=False) as session, session.begin():
# todo: this issues a separate query for each WorkflowPauseModel record.
# todo: this issues a separate query for each WorkflowPause record.
# consider batching this lookup.
try:
storage.delete(pause.state_object_key)
@ -1022,7 +1168,7 @@ class _PrivateWorkflowPauseEntity(WorkflowPauseEntity):
def __init__(
self,
*,
pause_model: WorkflowPauseModel,
pause_model: WorkflowPause,
reason_models: Sequence[WorkflowPauseReason],
human_input_form: Sequence = (),
) -> None:

View File

@ -46,6 +46,11 @@ class SQLAlchemyWorkflowTriggerLogRepository(WorkflowTriggerLogRepository):
return self.session.scalar(query)
def list_by_run_id(self, run_id: str) -> Sequence[WorkflowTriggerLog]:
"""List trigger logs for a workflow run."""
query = select(WorkflowTriggerLog).where(WorkflowTriggerLog.workflow_run_id == run_id)
return list(self.session.scalars(query).all())
def get_failed_for_retry(
self, tenant_id: str, max_retry_count: int = 3, limit: int = 100
) -> Sequence[WorkflowTriggerLog]:

View File

@ -0,0 +1 @@
"""Workflow run retention services."""

View File

@ -0,0 +1,519 @@
"""
Archive Paid Plan Workflow Run Logs Service.
This service archives workflow run logs for paid plan users older than the configured
retention period (default: 90 days) to S3-compatible storage.
Archived tables:
- workflow_runs
- workflow_app_logs
- workflow_node_executions
- workflow_node_execution_offload
- workflow_pauses
- workflow_pause_reasons
- workflow_trigger_logs
"""
import datetime
import io
import json
import logging
import time
import zipfile
from collections.abc import Sequence
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass, field
from typing import Any
import click
from sqlalchemy import inspect
from sqlalchemy.orm import Session, sessionmaker
from configs import dify_config
from core.workflow.enums import WorkflowType
from enums.cloud_plan import CloudPlan
from extensions.ext_database import db
from libs.archive_storage import (
ArchiveStorage,
ArchiveStorageNotConfiguredError,
get_archive_storage,
)
from models.workflow import WorkflowAppLog, WorkflowRun
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
from repositories.sqlalchemy_api_workflow_node_execution_repository import (
DifyAPISQLAlchemyWorkflowNodeExecutionRepository,
)
from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository
from services.billing_service import BillingService
from services.retention.workflow_run.constants import ARCHIVE_BUNDLE_NAME, ARCHIVE_SCHEMA_VERSION
logger = logging.getLogger(__name__)
@dataclass
class TableStats:
"""Statistics for a single archived table."""
table_name: str
row_count: int
checksum: str
size_bytes: int
@dataclass
class ArchiveResult:
"""Result of archiving a single workflow run."""
run_id: str
tenant_id: str
success: bool
tables: list[TableStats] = field(default_factory=list)
error: str | None = None
elapsed_time: float = 0.0
@dataclass
class ArchiveSummary:
"""Summary of the entire archive operation."""
total_runs_processed: int = 0
runs_archived: int = 0
runs_skipped: int = 0
runs_failed: int = 0
total_elapsed_time: float = 0.0
class WorkflowRunArchiver:
"""
Archive workflow run logs for paid plan users.
Storage Layout:
{tenant_id}/app_id={app_id}/year={YYYY}/month={MM}/workflow_run_id={run_id}/
└── archive.v1.0.zip
├── manifest.json
├── workflow_runs.jsonl
├── workflow_app_logs.jsonl
├── workflow_node_executions.jsonl
├── workflow_node_execution_offload.jsonl
├── workflow_pauses.jsonl
├── workflow_pause_reasons.jsonl
└── workflow_trigger_logs.jsonl
"""
ARCHIVED_TYPE = [
WorkflowType.WORKFLOW,
WorkflowType.RAG_PIPELINE,
]
ARCHIVED_TABLES = [
"workflow_runs",
"workflow_app_logs",
"workflow_node_executions",
"workflow_node_execution_offload",
"workflow_pauses",
"workflow_pause_reasons",
"workflow_trigger_logs",
]
def __init__(
self,
days: int = 90,
batch_size: int = 100,
start_from: datetime.datetime | None = None,
end_before: datetime.datetime | None = None,
workers: int = 1,
tenant_ids: Sequence[str] | None = None,
limit: int | None = None,
dry_run: bool = False,
delete_after_archive: bool = False,
workflow_run_repo: APIWorkflowRunRepository | None = None,
):
"""
Initialize the archiver.
Args:
days: Archive runs older than this many days
batch_size: Number of runs to process per batch
start_from: Optional start time (inclusive) for archiving
end_before: Optional end time (exclusive) for archiving
workers: Number of concurrent workflow runs to archive
tenant_ids: Optional tenant IDs for grayscale rollout
limit: Maximum number of runs to archive (None for unlimited)
dry_run: If True, only preview without making changes
delete_after_archive: If True, delete runs and related data after archiving
"""
self.days = days
self.batch_size = batch_size
if start_from or end_before:
if start_from is None or end_before is None:
raise ValueError("start_from and end_before must be provided together")
if start_from >= end_before:
raise ValueError("start_from must be earlier than end_before")
self.start_from = start_from.replace(tzinfo=datetime.UTC)
self.end_before = end_before.replace(tzinfo=datetime.UTC)
else:
self.start_from = None
self.end_before = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=days)
if workers < 1:
raise ValueError("workers must be at least 1")
self.workers = workers
self.tenant_ids = sorted(set(tenant_ids)) if tenant_ids else []
self.limit = limit
self.dry_run = dry_run
self.delete_after_archive = delete_after_archive
self.workflow_run_repo = workflow_run_repo
def run(self) -> ArchiveSummary:
"""
Main archiving loop.
Returns:
ArchiveSummary with statistics about the operation
"""
summary = ArchiveSummary()
start_time = time.time()
click.echo(
click.style(
self._build_start_message(),
fg="white",
)
)
# Initialize archive storage (will raise if not configured)
try:
if not self.dry_run:
storage = get_archive_storage()
else:
storage = None
except ArchiveStorageNotConfiguredError as e:
click.echo(click.style(f"Archive storage not configured: {e}", fg="red"))
return summary
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
repo = self._get_workflow_run_repo()
def _archive_with_session(run: WorkflowRun) -> ArchiveResult:
with session_maker() as session:
return self._archive_run(session, storage, run)
last_seen: tuple[datetime.datetime, str] | None = None
archived_count = 0
with ThreadPoolExecutor(max_workers=self.workers) as executor:
while True:
# Check limit
if self.limit and archived_count >= self.limit:
click.echo(click.style(f"Reached limit of {self.limit} runs", fg="yellow"))
break
# Fetch batch of runs
runs = self._get_runs_batch(last_seen)
if not runs:
break
run_ids = [run.id for run in runs]
with session_maker() as session:
archived_run_ids = repo.get_archived_run_ids(session, run_ids)
last_seen = (runs[-1].created_at, runs[-1].id)
# Filter to paid tenants only
tenant_ids = {run.tenant_id for run in runs}
paid_tenants = self._filter_paid_tenants(tenant_ids)
runs_to_process: list[WorkflowRun] = []
for run in runs:
summary.total_runs_processed += 1
# Skip non-paid tenants
if run.tenant_id not in paid_tenants:
summary.runs_skipped += 1
continue
# Skip already archived runs
if run.id in archived_run_ids:
summary.runs_skipped += 1
continue
# Check limit
if self.limit and archived_count + len(runs_to_process) >= self.limit:
break
runs_to_process.append(run)
if not runs_to_process:
continue
results = list(executor.map(_archive_with_session, runs_to_process))
for run, result in zip(runs_to_process, results):
if result.success:
summary.runs_archived += 1
archived_count += 1
click.echo(
click.style(
f"{'[DRY RUN] Would archive' if self.dry_run else 'Archived'} "
f"run {run.id} (tenant={run.tenant_id}, "
f"tables={len(result.tables)}, time={result.elapsed_time:.2f}s)",
fg="green",
)
)
else:
summary.runs_failed += 1
click.echo(
click.style(
f"Failed to archive run {run.id}: {result.error}",
fg="red",
)
)
summary.total_elapsed_time = time.time() - start_time
click.echo(
click.style(
f"{'[DRY RUN] ' if self.dry_run else ''}Archive complete: "
f"processed={summary.total_runs_processed}, archived={summary.runs_archived}, "
f"skipped={summary.runs_skipped}, failed={summary.runs_failed}, "
f"time={summary.total_elapsed_time:.2f}s",
fg="white",
)
)
return summary
def _get_runs_batch(
self,
last_seen: tuple[datetime.datetime, str] | None,
) -> Sequence[WorkflowRun]:
"""Fetch a batch of workflow runs to archive."""
repo = self._get_workflow_run_repo()
return repo.get_runs_batch_by_time_range(
start_from=self.start_from,
end_before=self.end_before,
last_seen=last_seen,
batch_size=self.batch_size,
run_types=self.ARCHIVED_TYPE,
tenant_ids=self.tenant_ids or None,
)
def _build_start_message(self) -> str:
range_desc = f"before {self.end_before.isoformat()}"
if self.start_from:
range_desc = f"between {self.start_from.isoformat()} and {self.end_before.isoformat()}"
return (
f"{'[DRY RUN] ' if self.dry_run else ''}Starting workflow run archiving "
f"for runs {range_desc} "
f"(batch_size={self.batch_size}, tenant_ids={','.join(self.tenant_ids) or 'all'})"
)
def _filter_paid_tenants(self, tenant_ids: set[str]) -> set[str]:
"""Filter tenant IDs to only include paid tenants."""
if not dify_config.BILLING_ENABLED:
# If billing is not enabled, treat all tenants as paid
return tenant_ids
if not tenant_ids:
return set()
try:
bulk_info = BillingService.get_plan_bulk_with_cache(list(tenant_ids))
except Exception:
logger.exception("Failed to fetch billing plans for tenants")
# On error, skip all tenants in this batch
return set()
# Filter to paid tenants (any plan except SANDBOX)
paid = set()
for tid, info in bulk_info.items():
if info and info.get("plan") in (CloudPlan.PROFESSIONAL, CloudPlan.TEAM):
paid.add(tid)
return paid
def _archive_run(
self,
session: Session,
storage: ArchiveStorage | None,
run: WorkflowRun,
) -> ArchiveResult:
"""Archive a single workflow run."""
start_time = time.time()
result = ArchiveResult(run_id=run.id, tenant_id=run.tenant_id, success=False)
try:
# Extract data from all tables
table_data, app_logs, trigger_metadata = self._extract_data(session, run)
if self.dry_run:
# In dry run, just report what would be archived
for table_name in self.ARCHIVED_TABLES:
records = table_data.get(table_name, [])
result.tables.append(
TableStats(
table_name=table_name,
row_count=len(records),
checksum="",
size_bytes=0,
)
)
result.success = True
else:
if storage is None:
raise ArchiveStorageNotConfiguredError("Archive storage not configured")
archive_key = self._get_archive_key(run)
# Serialize tables for the archive bundle
table_stats: list[TableStats] = []
table_payloads: dict[str, bytes] = {}
for table_name in self.ARCHIVED_TABLES:
records = table_data.get(table_name, [])
data = ArchiveStorage.serialize_to_jsonl(records)
table_payloads[table_name] = data
checksum = ArchiveStorage.compute_checksum(data)
table_stats.append(
TableStats(
table_name=table_name,
row_count=len(records),
checksum=checksum,
size_bytes=len(data),
)
)
# Generate and upload archive bundle
manifest = self._generate_manifest(run, table_stats)
manifest_data = json.dumps(manifest, indent=2, default=str).encode("utf-8")
archive_data = self._build_archive_bundle(manifest_data, table_payloads)
storage.put_object(archive_key, archive_data)
repo = self._get_workflow_run_repo()
archived_log_count = repo.create_archive_logs(session, run, app_logs, trigger_metadata)
session.commit()
deleted_counts = None
if self.delete_after_archive:
deleted_counts = repo.delete_runs_with_related(
[run],
delete_node_executions=self._delete_node_executions,
delete_trigger_logs=self._delete_trigger_logs,
)
logger.info(
"Archived workflow run %s: tables=%s, archived_logs=%s, deleted=%s",
run.id,
{s.table_name: s.row_count for s in table_stats},
archived_log_count,
deleted_counts,
)
result.tables = table_stats
result.success = True
except Exception as e:
logger.exception("Failed to archive workflow run %s", run.id)
result.error = str(e)
session.rollback()
result.elapsed_time = time.time() - start_time
return result
def _extract_data(
self,
session: Session,
run: WorkflowRun,
) -> tuple[dict[str, list[dict[str, Any]]], Sequence[WorkflowAppLog], str | None]:
table_data: dict[str, list[dict[str, Any]]] = {}
table_data["workflow_runs"] = [self._row_to_dict(run)]
repo = self._get_workflow_run_repo()
app_logs = repo.get_app_logs_by_run_id(session, run.id)
table_data["workflow_app_logs"] = [self._row_to_dict(row) for row in app_logs]
node_exec_records = DifyAPISQLAlchemyWorkflowNodeExecutionRepository.get_by_run(session, run.id)
node_exec_ids = [record.id for record in node_exec_records]
offload_records = DifyAPISQLAlchemyWorkflowNodeExecutionRepository.get_offloads_by_execution_ids(
session,
node_exec_ids,
)
table_data["workflow_node_executions"] = [self._row_to_dict(row) for row in node_exec_records]
table_data["workflow_node_execution_offload"] = [self._row_to_dict(row) for row in offload_records]
repo = self._get_workflow_run_repo()
pause_records = repo.get_pause_records_by_run_id(session, run.id)
pause_ids = [pause.id for pause in pause_records]
pause_reason_records = repo.get_pause_reason_records_by_run_id(
session,
pause_ids,
)
table_data["workflow_pauses"] = [self._row_to_dict(row) for row in pause_records]
table_data["workflow_pause_reasons"] = [self._row_to_dict(row) for row in pause_reason_records]
trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(session)
trigger_records = trigger_repo.list_by_run_id(run.id)
table_data["workflow_trigger_logs"] = [self._row_to_dict(row) for row in trigger_records]
trigger_metadata = trigger_records[0].trigger_metadata if trigger_records else None
return table_data, app_logs, trigger_metadata
@staticmethod
def _row_to_dict(row: Any) -> dict[str, Any]:
mapper = inspect(row).mapper
return {str(column.name): getattr(row, mapper.get_property_by_column(column).key) for column in mapper.columns}
def _get_archive_key(self, run: WorkflowRun) -> str:
"""Get the storage key for the archive bundle."""
created_at = run.created_at
prefix = (
f"{run.tenant_id}/app_id={run.app_id}/year={created_at.strftime('%Y')}/"
f"month={created_at.strftime('%m')}/workflow_run_id={run.id}"
)
return f"{prefix}/{ARCHIVE_BUNDLE_NAME}"
def _generate_manifest(
self,
run: WorkflowRun,
table_stats: list[TableStats],
) -> dict[str, Any]:
"""Generate a manifest for the archived workflow run."""
return {
"schema_version": ARCHIVE_SCHEMA_VERSION,
"workflow_run_id": run.id,
"tenant_id": run.tenant_id,
"app_id": run.app_id,
"workflow_id": run.workflow_id,
"created_at": run.created_at.isoformat(),
"archived_at": datetime.datetime.now(datetime.UTC).isoformat(),
"tables": {
stat.table_name: {
"row_count": stat.row_count,
"checksum": stat.checksum,
"size_bytes": stat.size_bytes,
}
for stat in table_stats
},
}
def _build_archive_bundle(self, manifest_data: bytes, table_payloads: dict[str, bytes]) -> bytes:
buffer = io.BytesIO()
with zipfile.ZipFile(buffer, mode="w", compression=zipfile.ZIP_DEFLATED) as archive:
archive.writestr("manifest.json", manifest_data)
for table_name in self.ARCHIVED_TABLES:
data = table_payloads.get(table_name)
if data is None:
raise ValueError(f"Missing archive payload for {table_name}")
archive.writestr(f"{table_name}.jsonl", data)
return buffer.getvalue()
def _delete_trigger_logs(self, session: Session, run_ids: Sequence[str]) -> int:
trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(session)
return trigger_repo.delete_by_run_ids(run_ids)
def _delete_node_executions(self, session: Session, runs: Sequence[WorkflowRun]) -> tuple[int, int]:
run_ids = [run.id for run in runs]
return DifyAPISQLAlchemyWorkflowNodeExecutionRepository.delete_by_runs(session, run_ids)
def _get_workflow_run_repo(self) -> APIWorkflowRunRepository:
if self.workflow_run_repo is not None:
return self.workflow_run_repo
from repositories.factory import DifyAPIRepositoryFactory
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
self.workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker)
return self.workflow_run_repo

View File

@ -0,0 +1,2 @@
ARCHIVE_SCHEMA_VERSION = "1.0"
ARCHIVE_BUNDLE_NAME = f"archive.v{ARCHIVE_SCHEMA_VERSION}.zip"

View File

@ -0,0 +1,132 @@
"""
Delete Archived Workflow Run Service.
This service deletes archived workflow run data from the database while keeping
archive logs intact.
"""
import time
from collections.abc import Sequence
from dataclasses import dataclass, field
from datetime import datetime
from sqlalchemy.orm import Session, sessionmaker
from extensions.ext_database import db
from models.workflow import WorkflowRun
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
from repositories.sqlalchemy_api_workflow_node_execution_repository import (
DifyAPISQLAlchemyWorkflowNodeExecutionRepository,
)
from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository
@dataclass
class DeleteResult:
run_id: str
tenant_id: str
success: bool
deleted_counts: dict[str, int] = field(default_factory=dict)
error: str | None = None
elapsed_time: float = 0.0
class ArchivedWorkflowRunDeletion:
def __init__(self, dry_run: bool = False):
self.dry_run = dry_run
self.workflow_run_repo: APIWorkflowRunRepository | None = None
def delete_by_run_id(self, run_id: str) -> DeleteResult:
start_time = time.time()
result = DeleteResult(run_id=run_id, tenant_id="", success=False)
repo = self._get_workflow_run_repo()
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
with session_maker() as session:
run = session.get(WorkflowRun, run_id)
if not run:
result.error = f"Workflow run {run_id} not found"
result.elapsed_time = time.time() - start_time
return result
result.tenant_id = run.tenant_id
if not repo.get_archived_run_ids(session, [run.id]):
result.error = f"Workflow run {run_id} is not archived"
result.elapsed_time = time.time() - start_time
return result
result = self._delete_run(run)
result.elapsed_time = time.time() - start_time
return result
def delete_batch(
self,
tenant_ids: list[str] | None,
start_date: datetime,
end_date: datetime,
limit: int = 100,
) -> list[DeleteResult]:
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
results: list[DeleteResult] = []
repo = self._get_workflow_run_repo()
with session_maker() as session:
runs = list(
repo.get_archived_runs_by_time_range(
session=session,
tenant_ids=tenant_ids,
start_date=start_date,
end_date=end_date,
limit=limit,
)
)
for run in runs:
results.append(self._delete_run(run))
return results
def _delete_run(self, run: WorkflowRun) -> DeleteResult:
start_time = time.time()
result = DeleteResult(run_id=run.id, tenant_id=run.tenant_id, success=False)
if self.dry_run:
result.success = True
result.elapsed_time = time.time() - start_time
return result
repo = self._get_workflow_run_repo()
try:
deleted_counts = repo.delete_runs_with_related(
[run],
delete_node_executions=self._delete_node_executions,
delete_trigger_logs=self._delete_trigger_logs,
)
result.deleted_counts = deleted_counts
result.success = True
except Exception as e:
result.error = str(e)
result.elapsed_time = time.time() - start_time
return result
@staticmethod
def _delete_trigger_logs(session: Session, run_ids: Sequence[str]) -> int:
trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(session)
return trigger_repo.delete_by_run_ids(run_ids)
@staticmethod
def _delete_node_executions(
session: Session,
runs: Sequence[WorkflowRun],
) -> tuple[int, int]:
run_ids = [run.id for run in runs]
return DifyAPISQLAlchemyWorkflowNodeExecutionRepository.delete_by_runs(session, run_ids)
def _get_workflow_run_repo(self) -> APIWorkflowRunRepository:
if self.workflow_run_repo is not None:
return self.workflow_run_repo
from repositories.factory import DifyAPIRepositoryFactory
self.workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(
sessionmaker(bind=db.engine, expire_on_commit=False)
)
return self.workflow_run_repo

View File

@ -0,0 +1,476 @@
"""
Restore Archived Workflow Run Service.
This service restores archived workflow run data from S3-compatible storage
back to the database.
"""
import io
import json
import logging
import time
import zipfile
from collections.abc import Callable
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass
from datetime import datetime
from typing import Any, cast
import click
from sqlalchemy.dialects.postgresql import insert as pg_insert
from sqlalchemy.engine import CursorResult
from sqlalchemy.orm import DeclarativeBase, Session, sessionmaker
from extensions.ext_database import db
from libs.archive_storage import (
ArchiveStorage,
ArchiveStorageNotConfiguredError,
get_archive_storage,
)
from models.trigger import WorkflowTriggerLog
from models.workflow import (
WorkflowAppLog,
WorkflowArchiveLog,
WorkflowNodeExecutionModel,
WorkflowNodeExecutionOffload,
WorkflowPause,
WorkflowPauseReason,
WorkflowRun,
)
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
from repositories.factory import DifyAPIRepositoryFactory
from services.retention.workflow_run.constants import ARCHIVE_BUNDLE_NAME
logger = logging.getLogger(__name__)
# Mapping of table names to SQLAlchemy models
TABLE_MODELS = {
"workflow_runs": WorkflowRun,
"workflow_app_logs": WorkflowAppLog,
"workflow_node_executions": WorkflowNodeExecutionModel,
"workflow_node_execution_offload": WorkflowNodeExecutionOffload,
"workflow_pauses": WorkflowPause,
"workflow_pause_reasons": WorkflowPauseReason,
"workflow_trigger_logs": WorkflowTriggerLog,
}
SchemaMapper = Callable[[dict[str, Any]], dict[str, Any]]
SCHEMA_MAPPERS: dict[str, dict[str, SchemaMapper]] = {
"1.0": {},
}
@dataclass
class RestoreResult:
"""Result of restoring a single workflow run."""
run_id: str
tenant_id: str
success: bool
restored_counts: dict[str, int]
error: str | None = None
elapsed_time: float = 0.0
class WorkflowRunRestore:
"""
Restore archived workflow run data from storage to database.
This service reads archived data from storage and restores it to the
database tables. It handles idempotency by skipping records that already
exist in the database.
"""
def __init__(self, dry_run: bool = False, workers: int = 1):
"""
Initialize the restore service.
Args:
dry_run: If True, only preview without making changes
workers: Number of concurrent workflow runs to restore
"""
self.dry_run = dry_run
if workers < 1:
raise ValueError("workers must be at least 1")
self.workers = workers
self.workflow_run_repo: APIWorkflowRunRepository | None = None
def _restore_from_run(
self,
run: WorkflowRun | WorkflowArchiveLog,
*,
session_maker: sessionmaker,
) -> RestoreResult:
start_time = time.time()
run_id = run.workflow_run_id if isinstance(run, WorkflowArchiveLog) else run.id
created_at = run.run_created_at if isinstance(run, WorkflowArchiveLog) else run.created_at
result = RestoreResult(
run_id=run_id,
tenant_id=run.tenant_id,
success=False,
restored_counts={},
)
if not self.dry_run:
click.echo(
click.style(
f"Starting restore for workflow run {run_id} (tenant={run.tenant_id})",
fg="white",
)
)
try:
storage = get_archive_storage()
except ArchiveStorageNotConfiguredError as e:
result.error = str(e)
click.echo(click.style(f"Archive storage not configured: {e}", fg="red"))
result.elapsed_time = time.time() - start_time
return result
prefix = (
f"{run.tenant_id}/app_id={run.app_id}/year={created_at.strftime('%Y')}/"
f"month={created_at.strftime('%m')}/workflow_run_id={run_id}"
)
archive_key = f"{prefix}/{ARCHIVE_BUNDLE_NAME}"
try:
archive_data = storage.get_object(archive_key)
except FileNotFoundError:
result.error = f"Archive bundle not found: {archive_key}"
click.echo(click.style(result.error, fg="red"))
result.elapsed_time = time.time() - start_time
return result
with session_maker() as session:
try:
with zipfile.ZipFile(io.BytesIO(archive_data), mode="r") as archive:
try:
manifest = self._load_manifest_from_zip(archive)
except ValueError as e:
result.error = f"Archive bundle invalid: {e}"
click.echo(click.style(result.error, fg="red"))
return result
tables = manifest.get("tables", {})
schema_version = self._get_schema_version(manifest)
for table_name, info in tables.items():
row_count = info.get("row_count", 0)
if row_count == 0:
result.restored_counts[table_name] = 0
continue
if self.dry_run:
result.restored_counts[table_name] = row_count
continue
member_path = f"{table_name}.jsonl"
try:
data = archive.read(member_path)
except KeyError:
click.echo(
click.style(
f" Warning: Table data not found in archive: {member_path}",
fg="yellow",
)
)
result.restored_counts[table_name] = 0
continue
records = ArchiveStorage.deserialize_from_jsonl(data)
restored = self._restore_table_records(
session,
table_name,
records,
schema_version=schema_version,
)
result.restored_counts[table_name] = restored
if not self.dry_run:
click.echo(
click.style(
f" Restored {restored}/{len(records)} records to {table_name}",
fg="white",
)
)
# Verify row counts match manifest
manifest_total = sum(info.get("row_count", 0) for info in tables.values())
restored_total = sum(result.restored_counts.values())
if not self.dry_run:
# Note: restored count might be less than manifest count if records already exist
logger.info(
"Restore verification: manifest_total=%d, restored_total=%d",
manifest_total,
restored_total,
)
session.commit()
result.success = True
if not self.dry_run:
click.echo(
click.style(
f"Completed restore for workflow run {run_id}: restored={result.restored_counts}",
fg="green",
)
)
except Exception as e:
logger.exception("Failed to restore workflow run %s", run_id)
result.error = str(e)
session.rollback()
click.echo(click.style(f"Restore failed: {e}", fg="red"))
result.elapsed_time = time.time() - start_time
return result
def _get_workflow_run_repo(self) -> APIWorkflowRunRepository:
if self.workflow_run_repo is not None:
return self.workflow_run_repo
self.workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(
sessionmaker(bind=db.engine, expire_on_commit=False)
)
return self.workflow_run_repo
@staticmethod
def _load_manifest_from_zip(archive: zipfile.ZipFile) -> dict[str, Any]:
try:
data = archive.read("manifest.json")
except KeyError as e:
raise ValueError("manifest.json missing from archive bundle") from e
return json.loads(data.decode("utf-8"))
def _restore_table_records(
self,
session: Session,
table_name: str,
records: list[dict[str, Any]],
*,
schema_version: str,
) -> int:
"""
Restore records to a table.
Uses INSERT ... ON CONFLICT DO NOTHING for idempotency.
Args:
session: Database session
table_name: Name of the table
records: List of record dictionaries
schema_version: Archived schema version from manifest
Returns:
Number of records actually inserted
"""
if not records:
return 0
model = TABLE_MODELS.get(table_name)
if not model:
logger.warning("Unknown table: %s", table_name)
return 0
column_names, required_columns, non_nullable_with_default = self._get_model_column_info(model)
unknown_fields: set[str] = set()
# Apply schema mapping, filter to current columns, then convert datetimes
converted_records = []
for record in records:
mapped = self._apply_schema_mapping(table_name, schema_version, record)
unknown_fields.update(set(mapped.keys()) - column_names)
filtered = {key: value for key, value in mapped.items() if key in column_names}
for key in non_nullable_with_default:
if key in filtered and filtered[key] is None:
filtered.pop(key)
missing_required = [key for key in required_columns if key not in filtered or filtered.get(key) is None]
if missing_required:
missing_cols = ", ".join(sorted(missing_required))
raise ValueError(
f"Missing required columns for {table_name} (schema_version={schema_version}): {missing_cols}"
)
converted = self._convert_datetime_fields(filtered, model)
converted_records.append(converted)
if unknown_fields:
logger.warning(
"Dropped unknown columns for %s (schema_version=%s): %s",
table_name,
schema_version,
", ".join(sorted(unknown_fields)),
)
# Use INSERT ... ON CONFLICT DO NOTHING for idempotency
stmt = pg_insert(model).values(converted_records)
stmt = stmt.on_conflict_do_nothing(index_elements=["id"])
result = session.execute(stmt)
return cast(CursorResult, result).rowcount or 0
def _convert_datetime_fields(
self,
record: dict[str, Any],
model: type[DeclarativeBase] | Any,
) -> dict[str, Any]:
"""Convert ISO datetime strings to datetime objects."""
from sqlalchemy import DateTime
result = dict(record)
for column in model.__table__.columns:
if isinstance(column.type, DateTime):
value = result.get(column.key)
if isinstance(value, str):
try:
result[column.key] = datetime.fromisoformat(value)
except ValueError:
pass
return result
def _get_schema_version(self, manifest: dict[str, Any]) -> str:
schema_version = manifest.get("schema_version")
if not schema_version:
logger.warning("Manifest missing schema_version; defaulting to 1.0")
schema_version = "1.0"
schema_version = str(schema_version)
if schema_version not in SCHEMA_MAPPERS:
raise ValueError(f"Unsupported schema_version {schema_version}. Add a mapping before restoring.")
return schema_version
def _apply_schema_mapping(
self,
table_name: str,
schema_version: str,
record: dict[str, Any],
) -> dict[str, Any]:
# Keep hook for forward/backward compatibility when schema evolves.
mapper = SCHEMA_MAPPERS.get(schema_version, {}).get(table_name)
if mapper is None:
return dict(record)
return mapper(record)
def _get_model_column_info(
self,
model: type[DeclarativeBase] | Any,
) -> tuple[set[str], set[str], set[str]]:
columns = list(model.__table__.columns)
column_names = {column.key for column in columns}
required_columns = {
column.key
for column in columns
if not column.nullable
and column.default is None
and column.server_default is None
and not column.autoincrement
}
non_nullable_with_default = {
column.key
for column in columns
if not column.nullable
and (column.default is not None or column.server_default is not None or column.autoincrement)
}
return column_names, required_columns, non_nullable_with_default
def restore_batch(
self,
tenant_ids: list[str] | None,
start_date: datetime,
end_date: datetime,
limit: int = 100,
) -> list[RestoreResult]:
"""
Restore multiple workflow runs by time range.
Args:
tenant_ids: Optional tenant IDs
start_date: Start date filter
end_date: End date filter
limit: Maximum number of runs to restore (default: 100)
Returns:
List of RestoreResult objects
"""
results: list[RestoreResult] = []
if tenant_ids is not None and not tenant_ids:
return results
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
repo = self._get_workflow_run_repo()
with session_maker() as session:
archive_logs = repo.get_archived_logs_by_time_range(
session=session,
tenant_ids=tenant_ids,
start_date=start_date,
end_date=end_date,
limit=limit,
)
click.echo(
click.style(
f"Found {len(archive_logs)} archived workflow runs to restore",
fg="white",
)
)
def _restore_with_session(archive_log: WorkflowArchiveLog) -> RestoreResult:
return self._restore_from_run(
archive_log,
session_maker=session_maker,
)
with ThreadPoolExecutor(max_workers=self.workers) as executor:
results = list(executor.map(_restore_with_session, archive_logs))
total_counts: dict[str, int] = {}
for result in results:
for table_name, count in result.restored_counts.items():
total_counts[table_name] = total_counts.get(table_name, 0) + count
success_count = sum(1 for result in results if result.success)
if self.dry_run:
click.echo(
click.style(
f"[DRY RUN] Would restore {len(results)} workflow runs: totals={total_counts}",
fg="yellow",
)
)
else:
click.echo(
click.style(
f"Restored {success_count}/{len(results)} workflow runs: totals={total_counts}",
fg="green",
)
)
return results
def restore_by_run_id(
self,
run_id: str,
) -> RestoreResult:
"""
Restore a single workflow run by run ID.
"""
repo = self._get_workflow_run_repo()
archive_log = repo.get_archived_log_by_run_id(run_id)
if not archive_log:
click.echo(click.style(f"Workflow run archive {run_id} not found", fg="red"))
return RestoreResult(
run_id=run_id,
tenant_id="",
success=False,
restored_counts={},
error=f"Workflow run archive {run_id} not found",
)
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
result = self._restore_from_run(archive_log, session_maker=session_maker)
if self.dry_run and result.success:
click.echo(
click.style(
f"[DRY RUN] Would restore workflow run {run_id}: totals={result.restored_counts}",
fg="yellow",
)
)
return result

View File

@ -7,7 +7,7 @@ from sqlalchemy import and_, func, or_, select
from sqlalchemy.orm import Session
from core.workflow.enums import WorkflowExecutionStatus
from models import Account, App, EndUser, WorkflowAppLog, WorkflowRun
from models import Account, App, EndUser, WorkflowAppLog, WorkflowArchiveLog, WorkflowRun
from models.enums import AppTriggerType, CreatorUserRole
from models.trigger import WorkflowTriggerLog
from services.plugin.plugin_service import PluginService
@ -173,7 +173,80 @@ class WorkflowAppService:
"data": items,
}
def handle_trigger_metadata(self, tenant_id: str, meta_val: str) -> dict[str, Any]:
def get_paginate_workflow_archive_logs(
self,
*,
session: Session,
app_model: App,
page: int = 1,
limit: int = 20,
):
"""
Get paginate workflow archive logs using SQLAlchemy 2.0 style.
"""
stmt = select(WorkflowArchiveLog).where(
WorkflowArchiveLog.tenant_id == app_model.tenant_id,
WorkflowArchiveLog.app_id == app_model.id,
WorkflowArchiveLog.log_id.isnot(None),
)
stmt = stmt.order_by(WorkflowArchiveLog.run_created_at.desc())
count_stmt = select(func.count()).select_from(stmt.subquery())
total = session.scalar(count_stmt) or 0
offset_stmt = stmt.offset((page - 1) * limit).limit(limit)
logs = list(session.scalars(offset_stmt).all())
account_ids = {log.created_by for log in logs if log.created_by_role == CreatorUserRole.ACCOUNT}
end_user_ids = {log.created_by for log in logs if log.created_by_role == CreatorUserRole.END_USER}
accounts_by_id = {}
if account_ids:
accounts_by_id = {
account.id: account
for account in session.scalars(select(Account).where(Account.id.in_(account_ids))).all()
}
end_users_by_id = {}
if end_user_ids:
end_users_by_id = {
end_user.id: end_user
for end_user in session.scalars(select(EndUser).where(EndUser.id.in_(end_user_ids))).all()
}
items = []
for log in logs:
if log.created_by_role == CreatorUserRole.ACCOUNT:
created_by_account = accounts_by_id.get(log.created_by)
created_by_end_user = None
elif log.created_by_role == CreatorUserRole.END_USER:
created_by_account = None
created_by_end_user = end_users_by_id.get(log.created_by)
else:
created_by_account = None
created_by_end_user = None
items.append(
{
"id": log.id,
"workflow_run": log.workflow_run_summary,
"trigger_metadata": self.handle_trigger_metadata(app_model.tenant_id, log.trigger_metadata),
"created_by_account": created_by_account,
"created_by_end_user": created_by_end_user,
"created_at": log.log_created_at,
}
)
return {
"page": page,
"limit": limit,
"total": total,
"has_more": total > page * limit,
"data": items,
}
def handle_trigger_metadata(self, tenant_id: str, meta_val: str | None) -> dict[str, Any]:
metadata: dict[str, Any] | None = self._safe_json_loads(meta_val)
if not metadata:
return {}

View File

@ -9,7 +9,9 @@ from sqlalchemy import delete
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import sessionmaker
from configs import dify_config
from extensions.ext_database import db
from libs.archive_storage import ArchiveStorageNotConfiguredError, get_archive_storage
from models import (
ApiToken,
AppAnnotationHitHistory,
@ -40,6 +42,7 @@ from models.workflow import (
ConversationVariable,
Workflow,
WorkflowAppLog,
WorkflowArchiveLog,
)
from repositories.factory import DifyAPIRepositoryFactory
@ -64,6 +67,9 @@ def remove_app_and_related_data_task(self, tenant_id: str, app_id: str):
_delete_app_workflow_runs(tenant_id, app_id)
_delete_app_workflow_node_executions(tenant_id, app_id)
_delete_app_workflow_app_logs(tenant_id, app_id)
if dify_config.BILLING_ENABLED and dify_config.ARCHIVE_STORAGE_ENABLED:
_delete_app_workflow_archive_logs(tenant_id, app_id)
_delete_archived_workflow_run_files(tenant_id, app_id)
_delete_app_conversations(tenant_id, app_id)
_delete_app_messages(tenant_id, app_id)
_delete_workflow_tool_providers(tenant_id, app_id)
@ -254,6 +260,45 @@ def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
)
def _delete_app_workflow_archive_logs(tenant_id: str, app_id: str):
def del_workflow_archive_log(workflow_archive_log_id: str):
db.session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
synchronize_session=False
)
_delete_records(
"""select id from workflow_archive_logs where tenant_id=:tenant_id and app_id=:app_id limit 1000""",
{"tenant_id": tenant_id, "app_id": app_id},
del_workflow_archive_log,
"workflow archive log",
)
def _delete_archived_workflow_run_files(tenant_id: str, app_id: str):
prefix = f"{tenant_id}/app_id={app_id}/"
try:
archive_storage = get_archive_storage()
except ArchiveStorageNotConfiguredError as e:
logger.info("Archive storage not configured, skipping archive file cleanup: %s", e)
return
try:
keys = archive_storage.list_objects(prefix)
except Exception:
logger.exception("Failed to list archive files for app %s", app_id)
return
deleted = 0
for key in keys:
try:
archive_storage.delete_object(key)
deleted += 1
except Exception:
logger.exception("Failed to delete archive object %s", key)
logger.info("Deleted %s archive objects for app %s", deleted, app_id)
def _delete_app_conversations(tenant_id: str, app_id: str):
def del_conversation(conversation_id: str):
db.session.query(PinnedConversation).where(PinnedConversation.conversation_id == conversation_id).delete(

View File

@ -30,3 +30,12 @@ class TestWorkflowExecutionStatus:
for status in non_ended_statuses:
assert not status.is_ended(), f"{status} should not be considered ended"
def test_ended_values(self):
"""Test ended_values returns the expected status values."""
assert set(WorkflowExecutionStatus.ended_values()) == {
WorkflowExecutionStatus.SUCCEEDED.value,
WorkflowExecutionStatus.FAILED.value,
WorkflowExecutionStatus.PARTIAL_SUCCEEDED.value,
WorkflowExecutionStatus.STOPPED.value,
}

View File

@ -37,6 +37,20 @@ def _client_error(code: str) -> ClientError:
def _mock_client(monkeypatch):
client = MagicMock()
client.head_bucket.return_value = None
# Configure put_object to return a proper ETag that matches the MD5 hash
# The ETag format is typically the MD5 hash wrapped in quotes
def mock_put_object(**kwargs):
md5_hash = kwargs.get("Body", b"")
if isinstance(md5_hash, bytes):
md5_hash = hashlib.md5(md5_hash).hexdigest()
else:
md5_hash = hashlib.md5(md5_hash.encode()).hexdigest()
response = MagicMock()
response.get.return_value = f'"{md5_hash}"'
return response
client.put_object.side_effect = mock_put_object
boto_client = MagicMock(return_value=client)
monkeypatch.setattr(storage_module.boto3, "client", boto_client)
return client, boto_client
@ -254,8 +268,8 @@ def test_serialization_roundtrip():
{"id": "2", "value": 123},
]
data = ArchiveStorage.serialize_to_jsonl_gz(records)
decoded = ArchiveStorage.deserialize_from_jsonl_gz(data)
data = ArchiveStorage.serialize_to_jsonl(records)
decoded = ArchiveStorage.deserialize_from_jsonl(data)
assert decoded[0]["id"] == "1"
assert decoded[0]["payload"]["nested"] == "value"

View File

@ -6,6 +6,7 @@ from datetime import datetime
from typing import Any
from unittest.mock import MagicMock, Mock
import pytest
from sqlalchemy.orm import sessionmaker
from core.repositories.sqlalchemy_workflow_node_execution_repository import (
@ -13,7 +14,15 @@ from core.repositories.sqlalchemy_workflow_node_execution_repository import (
)
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution
from core.workflow.enums import NodeType
from models import Account, WorkflowNodeExecutionModel, WorkflowNodeExecutionTriggeredFrom
from models import (
Account,
WorkflowNodeExecutionModel,
WorkflowNodeExecutionTriggeredFrom,
WorkflowRunTriggeredFrom,
)
from repositories.sqlalchemy_api_workflow_node_execution_repository import (
DifyAPISQLAlchemyWorkflowNodeExecutionRepository,
)
class TestSQLAlchemyWorkflowNodeExecutionRepositoryProcessData:
@ -104,3 +113,43 @@ class TestSQLAlchemyWorkflowNodeExecutionRepositoryProcessData:
# Should not be truncated
assert domain_model.process_data_truncated is False
assert domain_model.get_truncated_process_data() is None
class TestDifyAPISQLAlchemyWorkflowNodeExecutionRepositoryTriggeredFromMapping:
@pytest.mark.parametrize(
"run_triggered_from",
[
WorkflowRunTriggeredFrom.APP_RUN.value,
WorkflowRunTriggeredFrom.DEBUGGING.value,
WorkflowRunTriggeredFrom.SCHEDULE.value,
WorkflowRunTriggeredFrom.PLUGIN.value,
WorkflowRunTriggeredFrom.WEBHOOK.value,
],
)
def test_maps_workflow_run_group(self, run_triggered_from: str) -> None:
result = DifyAPISQLAlchemyWorkflowNodeExecutionRepository._map_run_triggered_from_to_node_triggered_from(
run_triggered_from
)
assert result == WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value
@pytest.mark.parametrize(
"run_triggered_from",
[
WorkflowRunTriggeredFrom.RAG_PIPELINE_RUN.value,
WorkflowRunTriggeredFrom.RAG_PIPELINE_DEBUGGING.value,
],
)
def test_maps_rag_pipeline_group(self, run_triggered_from: str) -> None:
result = DifyAPISQLAlchemyWorkflowNodeExecutionRepository._map_run_triggered_from_to_node_triggered_from(
run_triggered_from
)
assert result == WorkflowNodeExecutionTriggeredFrom.RAG_PIPELINE_RUN.value
def test_maps_unknown_value_to_empty_string(self) -> None:
result = DifyAPISQLAlchemyWorkflowNodeExecutionRepository._map_run_triggered_from_to_node_triggered_from(
"unknown-source"
)
assert result == ""

View File

@ -0,0 +1,54 @@
"""
Unit tests for workflow run archiving functionality.
This module contains tests for:
- Archive service
- Rollback service
"""
from datetime import datetime
from unittest.mock import MagicMock, patch
from services.retention.workflow_run.constants import ARCHIVE_BUNDLE_NAME
class TestWorkflowRunArchiver:
"""Tests for the WorkflowRunArchiver class."""
@patch("services.retention.workflow_run.archive_paid_plan_workflow_run.dify_config")
@patch("services.retention.workflow_run.archive_paid_plan_workflow_run.get_archive_storage")
def test_archiver_initialization(self, mock_get_storage, mock_config):
"""Test archiver can be initialized with various options."""
from services.retention.workflow_run.archive_paid_plan_workflow_run import WorkflowRunArchiver
mock_config.BILLING_ENABLED = False
archiver = WorkflowRunArchiver(
days=90,
batch_size=100,
tenant_ids=["test-tenant"],
limit=50,
dry_run=True,
)
assert archiver.days == 90
assert archiver.batch_size == 100
assert archiver.tenant_ids == ["test-tenant"]
assert archiver.limit == 50
assert archiver.dry_run is True
def test_get_archive_key(self):
"""Test archive key generation."""
from services.retention.workflow_run.archive_paid_plan_workflow_run import WorkflowRunArchiver
archiver = WorkflowRunArchiver.__new__(WorkflowRunArchiver)
mock_run = MagicMock()
mock_run.tenant_id = "tenant-123"
mock_run.app_id = "app-999"
mock_run.id = "run-456"
mock_run.created_at = datetime(2024, 1, 15, 12, 0, 0)
key = archiver._get_archive_key(mock_run)
assert key == f"tenant-123/app_id=app-999/year=2024/month=01/workflow_run_id=run-456/{ARCHIVE_BUNDLE_NAME}"

View File

@ -0,0 +1,180 @@
"""
Unit tests for archived workflow run deletion service.
"""
from unittest.mock import MagicMock, patch
class TestArchivedWorkflowRunDeletion:
def test_delete_by_run_id_returns_error_when_run_missing(self):
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
deleter = ArchivedWorkflowRunDeletion()
repo = MagicMock()
session = MagicMock()
session.get.return_value = None
session_maker = MagicMock()
session_maker.return_value.__enter__.return_value = session
session_maker.return_value.__exit__.return_value = None
mock_db = MagicMock()
mock_db.engine = MagicMock()
with (
patch("services.retention.workflow_run.delete_archived_workflow_run.db", mock_db),
patch(
"services.retention.workflow_run.delete_archived_workflow_run.sessionmaker", return_value=session_maker
),
patch.object(deleter, "_get_workflow_run_repo", return_value=repo),
):
result = deleter.delete_by_run_id("run-1")
assert result.success is False
assert result.error == "Workflow run run-1 not found"
repo.get_archived_run_ids.assert_not_called()
def test_delete_by_run_id_returns_error_when_not_archived(self):
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
deleter = ArchivedWorkflowRunDeletion()
repo = MagicMock()
repo.get_archived_run_ids.return_value = set()
run = MagicMock()
run.id = "run-1"
run.tenant_id = "tenant-1"
session = MagicMock()
session.get.return_value = run
session_maker = MagicMock()
session_maker.return_value.__enter__.return_value = session
session_maker.return_value.__exit__.return_value = None
mock_db = MagicMock()
mock_db.engine = MagicMock()
with (
patch("services.retention.workflow_run.delete_archived_workflow_run.db", mock_db),
patch(
"services.retention.workflow_run.delete_archived_workflow_run.sessionmaker", return_value=session_maker
),
patch.object(deleter, "_get_workflow_run_repo", return_value=repo),
patch.object(deleter, "_delete_run") as mock_delete_run,
):
result = deleter.delete_by_run_id("run-1")
assert result.success is False
assert result.error == "Workflow run run-1 is not archived"
mock_delete_run.assert_not_called()
def test_delete_by_run_id_calls_delete_run(self):
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
deleter = ArchivedWorkflowRunDeletion()
repo = MagicMock()
repo.get_archived_run_ids.return_value = {"run-1"}
run = MagicMock()
run.id = "run-1"
run.tenant_id = "tenant-1"
session = MagicMock()
session.get.return_value = run
session_maker = MagicMock()
session_maker.return_value.__enter__.return_value = session
session_maker.return_value.__exit__.return_value = None
mock_db = MagicMock()
mock_db.engine = MagicMock()
with (
patch("services.retention.workflow_run.delete_archived_workflow_run.db", mock_db),
patch(
"services.retention.workflow_run.delete_archived_workflow_run.sessionmaker", return_value=session_maker
),
patch.object(deleter, "_get_workflow_run_repo", return_value=repo),
patch.object(deleter, "_delete_run", return_value=MagicMock(success=True)) as mock_delete_run,
):
result = deleter.delete_by_run_id("run-1")
assert result.success is True
mock_delete_run.assert_called_once_with(run)
def test_delete_batch_uses_repo(self):
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
deleter = ArchivedWorkflowRunDeletion()
repo = MagicMock()
run1 = MagicMock()
run1.id = "run-1"
run1.tenant_id = "tenant-1"
run2 = MagicMock()
run2.id = "run-2"
run2.tenant_id = "tenant-1"
repo.get_archived_runs_by_time_range.return_value = [run1, run2]
session = MagicMock()
session_maker = MagicMock()
session_maker.return_value.__enter__.return_value = session
session_maker.return_value.__exit__.return_value = None
start_date = MagicMock()
end_date = MagicMock()
mock_db = MagicMock()
mock_db.engine = MagicMock()
with (
patch("services.retention.workflow_run.delete_archived_workflow_run.db", mock_db),
patch(
"services.retention.workflow_run.delete_archived_workflow_run.sessionmaker", return_value=session_maker
),
patch.object(deleter, "_get_workflow_run_repo", return_value=repo),
patch.object(
deleter, "_delete_run", side_effect=[MagicMock(success=True), MagicMock(success=True)]
) as mock_delete_run,
):
results = deleter.delete_batch(
tenant_ids=["tenant-1"],
start_date=start_date,
end_date=end_date,
limit=2,
)
assert len(results) == 2
repo.get_archived_runs_by_time_range.assert_called_once_with(
session=session,
tenant_ids=["tenant-1"],
start_date=start_date,
end_date=end_date,
limit=2,
)
assert mock_delete_run.call_count == 2
def test_delete_run_dry_run(self):
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
deleter = ArchivedWorkflowRunDeletion(dry_run=True)
run = MagicMock()
run.id = "run-1"
run.tenant_id = "tenant-1"
with patch.object(deleter, "_get_workflow_run_repo") as mock_get_repo:
result = deleter._delete_run(run)
assert result.success is True
mock_get_repo.assert_not_called()
def test_delete_run_calls_repo(self):
from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion
deleter = ArchivedWorkflowRunDeletion()
run = MagicMock()
run.id = "run-1"
run.tenant_id = "tenant-1"
repo = MagicMock()
repo.delete_runs_with_related.return_value = {"runs": 1}
with patch.object(deleter, "_get_workflow_run_repo", return_value=repo):
result = deleter._delete_run(run)
assert result.success is True
assert result.deleted_counts == {"runs": 1}
repo.delete_runs_with_related.assert_called_once()

View File

@ -0,0 +1,65 @@
"""
Unit tests for workflow run restore functionality.
"""
from datetime import datetime
from unittest.mock import MagicMock
class TestWorkflowRunRestore:
"""Tests for the WorkflowRunRestore class."""
def test_restore_initialization(self):
"""Restore service should respect dry_run flag."""
from services.retention.workflow_run.restore_archived_workflow_run import WorkflowRunRestore
restore = WorkflowRunRestore(dry_run=True)
assert restore.dry_run is True
def test_convert_datetime_fields(self):
"""ISO datetime strings should be converted to datetime objects."""
from models.workflow import WorkflowRun
from services.retention.workflow_run.restore_archived_workflow_run import WorkflowRunRestore
record = {
"id": "test-id",
"created_at": "2024-01-01T12:00:00",
"finished_at": "2024-01-01T12:05:00",
"name": "test",
}
restore = WorkflowRunRestore()
result = restore._convert_datetime_fields(record, WorkflowRun)
assert isinstance(result["created_at"], datetime)
assert result["created_at"].year == 2024
assert result["created_at"].month == 1
assert result["name"] == "test"
def test_restore_table_records_returns_rowcount(self):
"""Restore should return inserted rowcount."""
from services.retention.workflow_run.restore_archived_workflow_run import WorkflowRunRestore
session = MagicMock()
session.execute.return_value = MagicMock(rowcount=2)
restore = WorkflowRunRestore()
records = [{"id": "p1", "workflow_run_id": "r1", "created_at": "2024-01-01T00:00:00"}]
restored = restore._restore_table_records(session, "workflow_pauses", records, schema_version="1.0")
assert restored == 2
session.execute.assert_called_once()
def test_restore_table_records_unknown_table(self):
"""Unknown table names should be ignored gracefully."""
from services.retention.workflow_run.restore_archived_workflow_run import WorkflowRunRestore
session = MagicMock()
restore = WorkflowRunRestore()
restored = restore._restore_table_records(session, "unknown_table", [{"id": "x1"}], schema_version="1.0")
assert restored == 0
session.execute.assert_not_called()

View File

@ -2,7 +2,11 @@ from unittest.mock import ANY, MagicMock, call, patch
import pytest
from libs.archive_storage import ArchiveStorageNotConfiguredError
from models.workflow import WorkflowArchiveLog
from tasks.remove_app_and_related_data_task import (
_delete_app_workflow_archive_logs,
_delete_archived_workflow_run_files,
_delete_draft_variable_offload_data,
_delete_draft_variables,
delete_draft_variables_batch,
@ -335,3 +339,68 @@ class TestDeleteDraftVariableOffloadData:
# Verify error was logged
mock_logging.exception.assert_called_once_with("Error deleting draft variable offload data:")
class TestDeleteWorkflowArchiveLogs:
@patch("tasks.remove_app_and_related_data_task._delete_records")
@patch("tasks.remove_app_and_related_data_task.db")
def test_delete_app_workflow_archive_logs_calls_delete_records(self, mock_db, mock_delete_records):
tenant_id = "tenant-1"
app_id = "app-1"
_delete_app_workflow_archive_logs(tenant_id, app_id)
mock_delete_records.assert_called_once()
query_sql, params, delete_func, name = mock_delete_records.call_args[0]
assert "workflow_archive_logs" in query_sql
assert params == {"tenant_id": tenant_id, "app_id": app_id}
assert name == "workflow archive log"
mock_query = MagicMock()
mock_delete_query = MagicMock()
mock_query.where.return_value = mock_delete_query
mock_db.session.query.return_value = mock_query
delete_func("log-1")
mock_db.session.query.assert_called_once_with(WorkflowArchiveLog)
mock_query.where.assert_called_once()
mock_delete_query.delete.assert_called_once_with(synchronize_session=False)
class TestDeleteArchivedWorkflowRunFiles:
@patch("tasks.remove_app_and_related_data_task.get_archive_storage")
@patch("tasks.remove_app_and_related_data_task.logger")
def test_delete_archived_workflow_run_files_not_configured(self, mock_logger, mock_get_storage):
mock_get_storage.side_effect = ArchiveStorageNotConfiguredError("missing config")
_delete_archived_workflow_run_files("tenant-1", "app-1")
assert mock_logger.info.call_count == 1
assert "Archive storage not configured" in mock_logger.info.call_args[0][0]
@patch("tasks.remove_app_and_related_data_task.get_archive_storage")
@patch("tasks.remove_app_and_related_data_task.logger")
def test_delete_archived_workflow_run_files_list_failure(self, mock_logger, mock_get_storage):
storage = MagicMock()
storage.list_objects.side_effect = Exception("list failed")
mock_get_storage.return_value = storage
_delete_archived_workflow_run_files("tenant-1", "app-1")
storage.list_objects.assert_called_once_with("tenant-1/app_id=app-1/")
storage.delete_object.assert_not_called()
mock_logger.exception.assert_called_once_with("Failed to list archive files for app %s", "app-1")
@patch("tasks.remove_app_and_related_data_task.get_archive_storage")
@patch("tasks.remove_app_and_related_data_task.logger")
def test_delete_archived_workflow_run_files_success(self, mock_logger, mock_get_storage):
storage = MagicMock()
storage.list_objects.return_value = ["key-1", "key-2"]
mock_get_storage.return_value = storage
_delete_archived_workflow_run_files("tenant-1", "app-1")
storage.list_objects.assert_called_once_with("tenant-1/app_id=app-1/")
storage.delete_object.assert_has_calls([call("key-1"), call("key-2")], any_order=False)
mock_logger.info.assert_called_with("Deleted %s archive objects for app %s", 2, "app-1")

1
web/.npmrc Normal file
View File

@ -0,0 +1 @@
save-exact=true

View File

@ -203,7 +203,7 @@ const Annotation: FC<Props> = (props) => {
</Filter>
{isLoading
? <Loading type="app" />
// eslint-disable-next-line sonarjs/no-nested-conditional
: total > 0
? (
<List

View File

@ -134,7 +134,6 @@ const GetAutomaticRes: FC<IGetAutomaticResProps> = ({
},
] as const
// eslint-disable-next-line sonarjs/no-nested-template-literals, sonarjs/no-nested-conditional
const [instructionFromSessionStorage, setInstruction] = useSessionStorageState<string>(`improve-instruction-${flowId}${isBasicMode ? '' : `-${nodeId}${editorId ? `-${editorId}` : ''}`}`)
const instruction = instructionFromSessionStorage || ''
const [ideaOutput, setIdeaOutput] = useState<string>('')

View File

@ -175,7 +175,7 @@ describe('SettingsModal', () => {
renderSettingsModal()
fireEvent.click(screen.getByText('appOverview.overview.appInfo.settings.more.entry'))
const privacyInput = screen.getByPlaceholderText('appOverview.overview.appInfo.settings.more.privacyPolicyPlaceholder')
// eslint-disable-next-line sonarjs/no-clear-text-protocols
fireEvent.change(privacyInput, { target: { value: 'ftp://invalid-url' } })
fireEvent.click(screen.getByText('common.operation.save'))

View File

@ -0,0 +1,17 @@
<svg width="25" height="25" viewBox="0 0 25 25" fill="none" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g clip-path="url(#clip0_6305_73327)">
<path d="M0.5 12.5C0.5 8.77247 0.5 6.9087 1.10896 5.43853C1.92092 3.47831 3.47831 1.92092 5.43853 1.10896C6.9087 0.5 8.77247 0.5 12.5 0.5C16.2275 0.5 18.0913 0.5 19.5615 1.10896C21.5217 1.92092 23.0791 3.47831 23.891 5.43853C24.5 6.9087 24.5 8.77247 24.5 12.5C24.5 16.2275 24.5 18.0913 23.891 19.5615C23.0791 21.5217 21.5217 23.0791 19.5615 23.891C18.0913 24.5 16.2275 24.5 12.5 24.5C8.77247 24.5 6.9087 24.5 5.43853 23.891C3.47831 23.0791 1.92092 21.5217 1.10896 19.5615C0.5 18.0913 0.5 16.2275 0.5 12.5Z" fill="white"/>
<rect width="24" height="24" transform="translate(0.5 0.5)" fill="url(#pattern0_6305_73327)"/>
<rect width="24" height="24" transform="translate(0.5 0.5)" fill="white" fill-opacity="0.01"/>
</g>
<path d="M12.5 0.25C14.3603 0.25 15.7684 0.250313 16.8945 0.327148C18.0228 0.404144 18.8867 0.558755 19.6572 0.87793C21.6787 1.71525 23.2847 3.32133 24.1221 5.34277C24.4412 6.11333 24.5959 6.97723 24.6729 8.10547C24.7497 9.23161 24.75 10.6397 24.75 12.5C24.75 14.3603 24.7497 15.7684 24.6729 16.8945C24.5959 18.0228 24.4412 18.8867 24.1221 19.6572C23.2847 21.6787 21.6787 23.2847 19.6572 24.1221C18.8867 24.4412 18.0228 24.5959 16.8945 24.6729C15.7684 24.7497 14.3603 24.75 12.5 24.75C10.6397 24.75 9.23161 24.7497 8.10547 24.6729C6.97723 24.5959 6.11333 24.4412 5.34277 24.1221C3.32133 23.2847 1.71525 21.6787 0.87793 19.6572C0.558755 18.8867 0.404144 18.0228 0.327148 16.8945C0.250313 15.7684 0.25 14.3603 0.25 12.5C0.25 10.6397 0.250313 9.23161 0.327148 8.10547C0.404144 6.97723 0.558755 6.11333 0.87793 5.34277C1.71525 3.32133 3.32133 1.71525 5.34277 0.87793C6.11333 0.558755 6.97723 0.404144 8.10547 0.327148C9.23161 0.250313 10.6397 0.25 12.5 0.25Z" stroke="#101828" stroke-opacity="0.08" stroke-width="0.5"/>
<defs>
<pattern id="pattern0_6305_73327" patternContentUnits="objectBoundingBox" width="1" height="1">
<use xlink:href="#image0_6305_73327" transform="scale(0.00625)"/>
</pattern>
<clipPath id="clip0_6305_73327">
<path d="M0.5 12.5C0.5 8.77247 0.5 6.9087 1.10896 5.43853C1.92092 3.47831 3.47831 1.92092 5.43853 1.10896C6.9087 0.5 8.77247 0.5 12.5 0.5C16.2275 0.5 18.0913 0.5 19.5615 1.10896C21.5217 1.92092 23.0791 3.47831 23.891 5.43853C24.5 6.9087 24.5 8.77247 24.5 12.5C24.5 16.2275 24.5 18.0913 23.891 19.5615C23.0791 21.5217 21.5217 23.0791 19.5615 23.891C18.0913 24.5 16.2275 24.5 12.5 24.5C8.77247 24.5 6.9087 24.5 5.43853 23.891C3.47831 23.0791 1.92092 21.5217 1.10896 19.5615C0.5 18.0913 0.5 16.2275 0.5 12.5Z" fill="white"/>
</clipPath>
<image id="image0_6305_73327" width="160" height="160" preserveAspectRatio="none" xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAKAAAACgCAYAAACLz2ctAAAACXBIWXMAACxLAAAsSwGlPZapAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAm6SURBVHgB7Z09jFVFGIZn/Wf9I5FSKIVSKYndErolVJJQQUKliRWFNhY2UGxFItUmUJFARZaOQGekA0poF8slUcFFRVnPu9cDw/HuPWdmvpnvu3PeJ9m4MQH23H3P/DzzzczCVoMjRInXHCGKMIBEFQaQqMIAElUYQKIKA0hUYQCJKgwgUYUBJKowgEQVBpCowgASVRhAogoDSFRhAIkqDCBRhQEkqjCARBUGkKjCABJVGECiCgNIVGEAiSpvuJGyubnlLq0+cQ/u/739fSyLiwvu8JF33PKxXY6EM9oW8Mrl393dO8+Swgfw59euPXU//finI+GMMoCPNp43gflr+3u0YBJcubzZhPG5I2GMMoA/nH/84vvjJxbd/gPpIxG0hDdvsBUMZXQBRFf5cP2f7e8Pff729tfysUUnwa0bf7iNDbaCIYwugNeb8VpLO3FAC4ggpoJW8GoztiTDGVUAEb62hULg9ux5+fjoiiXGg5jYYGZNhjGaAGLicbPpIsFHTfC62gThW2p0igTXr206MozRBHDt2uYL5XK0CZ/f+rXA5037/6GgBaSWGcYoAuhrF7R+O4330Ap+cUJmQkItM4xRBNDXLkd7Viw+O/gWtUxBqg/gNO3SB7VMOaoP4DTt0gdaQIkJCbVMP1UXI8zSLn2gq77dtJ6pa8XQMheaIcCuxfLvOiZVe/e97ixTbQD7tEsfrZbxW9BYEEINMPw4880HImPaXFTbBQ/RLn1IaRlNLq4+cZapMoBDtUsfUpUymuCzWBNoxXNRZQBDtMss/DHkPIPZuFUnWV0AY7TLNCataB0eD0OR60ZbweoCGKNdpoExZE0OD1L84bq9IomqAuh3mUsJEwh/DFkTWB60RjUB7GqXwwki2R9D1gSKJKyVilUTQAntAvwxZI1Y0zJVBFBKuwCrg3UprGmZKgLY3WQUSy3apQ9LWmbuA9jVLiinisEfQ9aOJS1jYpEQA+NHG3HjLkntklp4ME9Ay+CF3btPNwLqAYQakGh5qF3CwWePYgVNVLtgdJ9S3d6Ci2+9atUufVjQMqotoN99Yuz26cE3XQzrzRgQQV46Eq5f0O0eFtoNNy/cu/PM3b0zafG1JyNqAeyWqz+4/8ydPI29ueGN8qHm6+dmmQmnXYV2Kah4kdiUPk/4L772GFClC54240zdxIN9HBZNvzWkliulUPnXd1roT9nEg6pffFkvwNREcrlSiuIBnDXjTN3Ec+r0e+5p83fcGonPC0VquVKS4j9BX0VGytkqeKvRrWCpiZvCX0VyuVKSogGEdmlnX7NIOVul7VZqX9MNRWq5UpqiARwaipSzVTCrxYoIJjTcFD5BarkyB8UCGDrBSDlbBa0gJiSXOCHZRmq5MgdFAhiz0E8tI4M17dKlyE8Tu7+CWiYNi9qlS/YApiz0U8ukYVG7dMn+E6W2QNQycVjVLl2yLwRKjMGgZfZHlg2h20ELiEnN/gNxxQ7ziD/mtqRdumQPIE5nSt3k02qZmLe41TII4Bhr/sC9xr1aUi8+C1sNLiMIzsXV9DPyEKSzKx9GVcuAlXO/zc2MGF3muZXdLhX/ma2ekpV9DIhWy8KRt1KnnpZAqsv0n9nqyf1FpkUWjrxttYx1JFcq/Ge2enJ/kQBauYkIWsb6kWvSKxX+M1u0AcXEEDyU9k1ErZaxSo6VCv+ZJ2LaVitYLICSv/zUahmLrWDOlQr/ma2d3F9UjVu4iajVMtbIuVLhP/NkU7qdCUnRAEr+8iWqZaxQYqXCf2b4UCtKqvjiILXM/ym1UmFRy6isTlPLvKRkgahFLaMSQGqZl5Qej1rTMmr1OdQyOgWi1rSMWgDHrmVStUtKmZslLaNaoThmLZN6jDBmshLPrK1lVAM4Vi0jdYxwyhjOipZRr9Eeo5aROkY4dQxnQcuY2CQwJi2Teoxw94BxqWfW0jImAjgmLZN6jHCX1DGctpYxs01qDFpmOWHiMWt3YcoYTlvLmAlg7VomdeKB8vpZSD1zaS1jaqOoFS2TY202Vbv0hUJKRZXWMqYCaEXLSM3MW0rd3jSPWsbcVvkatQwG+rGE3N40j1rG3lkNzo6WkahSSXmhYu51mzctYzKAVrQMxoKpExJp7dKHpJYpcXWZ2X2KuDNE4g1stUxMK9TOzGNPW82lXfrAn3u6+djtitzEjwAiyCWurTUbwKuCt3tLnC0Teo9cbu3SB168VGIvDgrBZBc8RDuEoKFlcmuXEhw/8a7LjbkAouvJccB4SS1Tw6XZ+PlLFMuaC2Cut7+klimlXXKBF6hUjaSpAOa+Tr6ElimtXXIgtSI1BFMBXMssP0tomdLaRZrSZ0mbCeBkopD/AMmc1TJa2kWSo4W3J5gJYMk7PXJUy2hrFwmgXUqfJW0igKW1A1rA2JPzd9Iy1C5xqAcwl3bpI6VypDvRoHaJRz2AWm+/pJahdolHNYDa2iFVy6A7pnZJQ3UteH1dpugRV0Hs3Rf3KLjCIEY7oOVGK0rtkoZqAGOvXHj171hwX379ftE3uB23Uruko9oFS+zF1TjgBy0XamPmXbvgs9O+wku9HAtT/++/+9XFgO6j9BvctlynTr+rrl1Snh9DFgxdtFEPID6Epf9q7kLR6D7Q+qVol0nFsszEA89v9RLCoZgQ0TGb0j9uglv6w29PpUrRLlL7bjWePwcmAojwhW5K/6qZeJQGLZcV7aLx/DkwdTTH0DGVhrVvx20WtIvWqkUOTD3FyQFdm8aBkhLaBRt8JLSL1XtOYjEVwCFaZl61y4Xzj50ES4qrFjkw9ySzKjI0tYuFaheN58+NuQC2WmYa1C51hQ+YbMunaRkN7YDqaWqXvJgM4DQto6EdsH+E2iUvZk9GQCt42xs7fXvmF6fB3oQja6ld+jH9VCcTuj4pYjcxUbsMY2GrwRkGv8gSpzR1weQBtYIAXfCZwLNl0GJLjP0QvhonHy3mA6jJhfNPmhZwEkJUvwydBEC7XFyN33/cgtn3uZXdrmbqHFgI4W9EH3q2DLVLGAzgDPyN6EM3MVG7hMEA9hByhQG1SzgMYA/+RvS+s2WoXcJhAAfgy+idtAy1SxwM4ED6rjBgtUscDOBA/PMBu2fLsNolHgYwAF/LtGfLULukwQAGME3LULukwZWQQBA8LLPhv+19GhKcbVY8xjT2a2ELGEhXy0gwJu3ShQGMAIFZFrpg+5NmcpPjeth5gV0wUYUtIFGFASSqMIBEFQaQqMIAElUYQKIKA0hUYQCJKgwgUYUBJKowgEQVBpCowgASVRhAogoDSFRhAIkqDCBRhQEkqjCARBUGkKjCABJVGECiCgNIVPkXGPWKHZj1nMYAAAAASUVORK5CYII="/>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 6.1 KiB

View File

@ -0,0 +1,4 @@
<svg width="40" height="40" viewBox="0 0 40 40" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect width="40" height="40" fill="white"/>
<path d="M25.7926 10.1311H21.5089L29.3208 29.869H33.6045L25.7926 10.1311ZM13.4164 10.1311L5.60449 29.869H9.97273L11.5703 25.724H19.743L21.3405 29.869H25.7087L17.8969 10.1311H13.4164ZM12.9834 22.0583L15.6566 15.1217L18.3299 22.0583H12.9834Z" fill="black"/>
</svg>

After

Width:  |  Height:  |  Size: 403 B

View File

@ -0,0 +1,4 @@
<svg width="40" height="40" viewBox="0 0 40 40" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect width="40" height="40" fill="white"/>
<path d="M36.6676 11.2917C36.3316 11.1277 36.1871 11.4402 35.9906 11.599C35.9242 11.6511 35.8668 11.7188 35.8108 11.7787C35.3199 12.3048 34.747 12.6485 33.9996 12.6068C32.9046 12.5469 31.971 12.8907 31.1455 13.7293C30.9696 12.6954 30.3863 12.0782 29.4996 11.6824C29.0348 11.4766 28.5647 11.2709 28.2406 10.823C28.0127 10.5053 27.9515 10.1511 27.8368 9.80214C27.7652 9.59121 27.6923 9.37506 27.4502 9.33861C27.1871 9.29694 27.0843 9.51829 26.9814 9.70318C26.5674 10.4584 26.4084 11.2917 26.4228 12.1355C26.4592 14.0313 27.26 15.5417 28.8486 16.6173C29.0296 16.7397 29.0764 16.8646 29.0191 17.0443C28.9111 17.4141 28.7822 17.7735 28.6676 18.1433C28.596 18.3803 28.4879 18.4323 28.2354 18.3282C27.363 17.9637 26.609 17.4246 25.9436 16.7709C24.8135 15.6771 23.7914 14.4689 22.5166 13.5235C22.2171 13.3021 21.919 13.0964 21.609 12.9011C20.3082 11.6355 21.7796 10.5964 22.1194 10.474C22.4762 10.3464 22.2431 9.9037 21.092 9.90891C19.9423 9.91413 18.889 10.2995 17.5478 10.8126C17.3512 10.8907 17.1455 10.948 16.9332 10.9922C15.7158 10.7631 14.4515 10.711 13.1298 10.8594C10.6428 11.1381 8.65587 12.3152 7.19493 14.3255C5.44102 16.7397 5.02826 19.4845 5.53347 22.349C6.06473 25.3646 7.60249 27.8646 9.96707 29.8178C12.4176 31.8413 15.2406 32.8334 18.4606 32.6433C20.4163 32.5313 22.5947 32.2683 25.0504 30.1875C25.6702 30.4949 26.3199 30.6173 27.3994 30.711C28.2302 30.7891 29.0296 30.6694 29.6494 30.5417C30.6194 30.3361 30.5518 29.4375 30.2015 29.2709C27.3578 27.9454 27.9814 28.4845 27.4136 28.0495C28.859 26.3361 31.0374 24.5574 31.889 18.797C31.9554 18.3386 31.898 18.0522 31.889 17.6798C31.8838 17.4558 31.9346 17.3673 32.1923 17.3413C32.9046 17.2605 33.596 17.0651 34.2314 16.7137C36.0739 15.7058 36.816 14.0522 36.9918 12.0678C37.0179 11.7657 36.9866 11.4506 36.6676 11.2917ZM20.613 29.1485C17.8564 26.9793 16.5204 26.2657 15.9684 26.297C15.4527 26.3255 15.5452 26.9167 15.6584 27.3022C15.777 27.6823 15.9319 27.9454 16.1494 28.2787C16.2991 28.5001 16.402 28.8307 15.9996 29.0755C15.1116 29.6277 13.5687 28.8907 13.4958 28.8542C11.7001 27.797 10.1988 26.3985 9.14025 24.487C8.11941 22.6459 7.52566 20.6719 7.42801 18.5651C7.40197 18.0547 7.5517 17.875 8.05691 17.7839C8.72227 17.6615 9.40978 17.6355 10.0751 17.7318C12.8876 18.1433 15.2822 19.4037 17.2887 21.3959C18.4346 22.5339 19.3018 23.8907 20.195 25.2162C21.1442 26.6251 22.1663 27.9662 23.4671 29.0651C23.9254 29.4506 24.2926 29.7449 24.6428 29.961C23.5856 30.0782 21.8199 30.1042 20.613 29.1485ZM21.9332 20.6407C21.9332 20.4141 22.1142 20.2345 22.342 20.2345C22.3928 20.2345 22.4398 20.2449 22.4814 20.2605C22.5374 20.2813 22.5895 20.3126 22.6299 20.3594C22.7027 20.4298 22.7444 20.5339 22.7444 20.6407C22.7444 20.8673 22.5635 21.047 22.3368 21.047C22.109 21.047 21.9332 20.8673 21.9332 20.6407ZM26.036 22.7501C25.7731 22.8569 25.51 22.9506 25.2575 22.961C24.8655 22.9793 24.4371 22.8203 24.204 22.6251C23.8434 22.323 23.5856 22.1537 23.4762 21.6225C23.4306 21.3959 23.4567 21.047 23.497 20.8465C23.5908 20.4141 23.4866 20.1381 23.1832 19.8855C22.9346 19.6798 22.6207 19.6251 22.2744 19.6251C22.1455 19.6251 22.027 19.5678 21.9384 19.5209C21.7939 19.4479 21.6754 19.2683 21.7887 19.047C21.8251 18.9766 22.001 18.8022 22.0426 18.7709C22.5114 18.5027 23.053 18.5913 23.5543 18.7918C24.0191 18.9818 24.3694 19.3307 24.8746 19.823C25.3915 20.4194 25.484 20.5861 25.7783 21.0313C26.01 21.3829 26.2223 21.7422 26.3668 22.1537C26.454 22.4089 26.3408 22.6198 26.036 22.7501Z" fill="#4D6BFE"/>
</svg>

After

Width:  |  Height:  |  Size: 3.5 KiB

View File

@ -0,0 +1,105 @@
<svg width="40" height="40" viewBox="0 0 40 40" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect width="40" height="40" fill="white"/>
<mask id="mask0_3892_95663" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="6" y="6" width="28" height="29">
<path d="M20 6C20.2936 6 20.5488 6.2005 20.6205 6.48556C20.8393 7.3566 21.1277 8.20866 21.4828 9.03356C22.4116 11.191 23.6854 13.0791 25.3032 14.6968C26.9218 16.3146 28.8095 17.5888 30.9664 18.5172C31.7941 18.8735 32.6436 19.16 33.5149 19.3795C33.6533 19.4143 33.7762 19.4942 33.8641 19.6067C33.9519 19.7192 33.9998 19.8578 34 20.0005C34 20.2941 33.7995 20.5492 33.5149 20.621C32.6437 20.8399 31.7915 21.1282 30.9664 21.4833C28.8095 22.4121 26.9209 23.6859 25.3032 25.3036C23.6854 26.9223 22.4116 28.8099 21.4828 30.9669C21.1278 31.7919 20.8394 32.6439 20.6205 33.5149C20.586 33.6534 20.5062 33.7764 20.3937 33.8644C20.2813 33.9524 20.1427 34.0003 20 34.0005C19.8572 34.0003 19.7186 33.9525 19.6062 33.8645C19.4937 33.7765 19.414 33.6535 19.3795 33.5149C19.1605 32.6439 18.872 31.7918 18.5167 30.9669C17.5884 28.8099 16.3151 26.9214 14.6964 25.3036C13.0782 23.6859 11.1906 22.4121 9.03309 21.4833C8.20814 21.1283 7.35608 20.8399 6.48509 20.621C6.34667 20.5864 6.22377 20.5065 6.13589 20.3941C6.04801 20.2817 6.00018 20.1432 6 20.0005C6.00024 19.8578 6.04808 19.7192 6.13594 19.6067C6.2238 19.4942 6.34667 19.4143 6.48509 19.3795C7.35612 19.1607 8.20819 18.8723 9.03309 18.5172C11.1906 17.5888 13.0786 16.3146 14.6964 14.6968C16.3141 13.0791 17.5884 11.191 18.5167 9.03356C18.8719 8.20862 19.1604 7.35656 19.3795 6.48556C19.4508 6.2005 19.7064 6 20 6Z" fill="black"/>
<path d="M20 6C20.2936 6 20.5488 6.2005 20.6205 6.48556C20.8393 7.3566 21.1277 8.20866 21.4828 9.03356C22.4116 11.191 23.6854 13.0791 25.3032 14.6968C26.9218 16.3146 28.8095 17.5888 30.9664 18.5172C31.7941 18.8735 32.6436 19.16 33.5149 19.3795C33.6533 19.4143 33.7762 19.4942 33.8641 19.6067C33.9519 19.7192 33.9998 19.8578 34 20.0005C34 20.2941 33.7995 20.5492 33.5149 20.621C32.6437 20.8399 31.7915 21.1282 30.9664 21.4833C28.8095 22.4121 26.9209 23.6859 25.3032 25.3036C23.6854 26.9223 22.4116 28.8099 21.4828 30.9669C21.1278 31.7919 20.8394 32.6439 20.6205 33.5149C20.586 33.6534 20.5062 33.7764 20.3937 33.8644C20.2813 33.9524 20.1427 34.0003 20 34.0005C19.8572 34.0003 19.7186 33.9525 19.6062 33.8645C19.4937 33.7765 19.414 33.6535 19.3795 33.5149C19.1605 32.6439 18.872 31.7918 18.5167 30.9669C17.5884 28.8099 16.3151 26.9214 14.6964 25.3036C13.0782 23.6859 11.1906 22.4121 9.03309 21.4833C8.20814 21.1283 7.35608 20.8399 6.48509 20.621C6.34667 20.5864 6.22377 20.5065 6.13589 20.3941C6.04801 20.2817 6.00018 20.1432 6 20.0005C6.00024 19.8578 6.04808 19.7192 6.13594 19.6067C6.2238 19.4942 6.34667 19.4143 6.48509 19.3795C7.35612 19.1607 8.20819 18.8723 9.03309 18.5172C11.1906 17.5888 13.0786 16.3146 14.6964 14.6968C16.3141 13.0791 17.5884 11.191 18.5167 9.03356C18.8719 8.20862 19.1604 7.35656 19.3795 6.48556C19.4508 6.2005 19.7064 6 20 6Z" fill="url(#paint0_linear_3892_95663)"/>
</mask>
<g mask="url(#mask0_3892_95663)">
<g filter="url(#filter0_f_3892_95663)">
<path d="M3.47232 27.8921C6.70753 29.0411 10.426 26.8868 11.7778 23.0804C13.1296 19.274 11.6028 15.2569 8.36763 14.108C5.13242 12.959 1.41391 15.1133 0.06211 18.9197C-1.28969 22.7261 0.23711 26.7432 3.47232 27.8921Z" fill="#FFE432"/>
</g>
<g filter="url(#filter1_f_3892_95663)">
<path d="M17.8359 15.341C22.2806 15.341 25.8838 11.6588 25.8838 7.11644C25.8838 2.57412 22.2806 -1.10815 17.8359 -1.10815C13.3912 -1.10815 9.78809 2.57412 9.78809 7.11644C9.78809 11.6588 13.3912 15.341 17.8359 15.341Z" fill="#FC413D"/>
</g>
<g filter="url(#filter2_f_3892_95663)">
<path d="M14.7081 41.6431C19.3478 41.4163 22.8707 36.3599 22.5768 30.3493C22.283 24.3387 18.2836 19.65 13.644 19.8769C9.00433 20.1037 5.48139 25.1601 5.77525 31.1707C6.06911 37.1813 10.0685 41.87 14.7081 41.6431Z" fill="#00B95C"/>
</g>
<g filter="url(#filter3_f_3892_95663)">
<path d="M14.7081 41.6431C19.3478 41.4163 22.8707 36.3599 22.5768 30.3493C22.283 24.3387 18.2836 19.65 13.644 19.8769C9.00433 20.1037 5.48139 25.1601 5.77525 31.1707C6.06911 37.1813 10.0685 41.87 14.7081 41.6431Z" fill="#00B95C"/>
</g>
<g filter="url(#filter4_f_3892_95663)">
<path d="M19.355 38.0071C23.2447 35.6405 24.2857 30.2506 21.6803 25.9684C19.0748 21.6862 13.8095 20.1334 9.91983 22.5C6.03016 24.8666 4.98909 30.2565 7.59454 34.5387C10.2 38.8209 15.4653 40.3738 19.355 38.0071Z" fill="#00B95C"/>
</g>
<g filter="url(#filter5_f_3892_95663)">
<path d="M35.0759 24.5504C39.4477 24.5504 42.9917 21.1377 42.9917 16.9278C42.9917 12.7179 39.4477 9.30518 35.0759 9.30518C30.7042 9.30518 27.1602 12.7179 27.1602 16.9278C27.1602 21.1377 30.7042 24.5504 35.0759 24.5504Z" fill="#3186FF"/>
</g>
<g filter="url(#filter6_f_3892_95663)">
<path d="M0.362818 23.6667C4.3882 26.7279 10.2688 25.7676 13.4976 21.5219C16.7264 17.2762 16.0806 11.3528 12.0552 8.29156C8.02982 5.23037 2.14917 6.19062 -1.07959 10.4364C-4.30835 14.6821 -3.66256 20.6055 0.362818 23.6667Z" fill="#FBBC04"/>
</g>
<g filter="url(#filter7_f_3892_95663)">
<path d="M20.9877 28.1903C25.7924 31.4936 32.1612 30.5732 35.2128 26.1346C38.2644 21.696 36.8432 15.4199 32.0385 12.1166C27.2338 8.81334 20.865 9.73372 17.8134 14.1723C14.7618 18.611 16.183 24.887 20.9877 28.1903Z" fill="#3186FF"/>
</g>
<g filter="url(#filter8_f_3892_95663)">
<path d="M29.7231 4.99175C30.9455 6.65415 29.3748 9.88535 26.2149 12.2096C23.0549 14.5338 19.5026 15.0707 18.2801 13.4088C17.0576 11.7468 18.6284 8.51514 21.7883 6.19092C24.9482 3.86717 28.5006 3.32982 29.7231 4.99175Z" fill="#749BFF"/>
</g>
<g filter="url(#filter9_f_3892_95663)">
<path d="M19.6891 12.9486C24.5759 8.41581 26.2531 2.27858 23.4354 -0.759249C20.6176 -3.79708 14.3718 -2.58516 9.485 1.94765C4.59823 6.48046 2.92099 12.6177 5.73879 15.6555C8.55658 18.6933 14.8024 17.4814 19.6891 12.9486Z" fill="#FC413D"/>
</g>
<g filter="url(#filter10_f_3892_95663)">
<path d="M9.6712 29.23C12.5757 31.3088 15.9102 31.6247 17.1191 29.9356C18.328 28.2465 16.9535 25.1921 14.049 23.1133C11.1446 21.0345 7.81003 20.7186 6.60113 22.4077C5.39223 24.0968 6.76675 27.1512 9.6712 29.23Z" fill="#FFEE48"/>
</g>
</g>
<defs>
<filter id="filter0_f_3892_95663" x="-3.44095" y="10.7885" width="18.7217" height="20.4229" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="1.50514" result="effect1_foregroundBlur_3892_95663"/>
</filter>
<filter id="filter1_f_3892_95663" x="-4.76352" y="-15.6598" width="45.1989" height="45.5524" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="7.2758" result="effect1_foregroundBlur_3892_95663"/>
</filter>
<filter id="filter2_f_3892_95663" x="-6.61209" y="7.49899" width="41.5757" height="46.522" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="6.18495" result="effect1_foregroundBlur_3892_95663"/>
</filter>
<filter id="filter3_f_3892_95663" x="-6.61209" y="7.49899" width="41.5757" height="46.522" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="6.18495" result="effect1_foregroundBlur_3892_95663"/>
</filter>
<filter id="filter4_f_3892_95663" x="-6.21073" y="9.02316" width="41.6959" height="42.4608" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="6.18495" result="effect1_foregroundBlur_3892_95663"/>
</filter>
<filter id="filter5_f_3892_95663" x="15.405" y="-2.44994" width="39.3423" height="38.7556" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="5.87756" result="effect1_foregroundBlur_3892_95663"/>
</filter>
<filter id="filter6_f_3892_95663" x="-13.7886" y="-4.15284" width="39.9951" height="40.2639" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="5.32691" result="effect1_foregroundBlur_3892_95663"/>
</filter>
<filter id="filter7_f_3892_95663" x="6.6925" y="0.620963" width="39.6414" height="39.065" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="4.75678" result="effect1_foregroundBlur_3892_95663"/>
</filter>
<filter id="filter8_f_3892_95663" x="9.35225" y="-4.48661" width="29.2984" height="27.3739" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="4.25649" result="effect1_foregroundBlur_3892_95663"/>
</filter>
<filter id="filter9_f_3892_95663" x="-2.81919" y="-9.62339" width="34.8122" height="34.143" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="3.59514" result="effect1_foregroundBlur_3892_95663"/>
</filter>
<filter id="filter10_f_3892_95663" x="-2.73761" y="12.4221" width="29.1949" height="27.4994" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feGaussianBlur stdDeviation="4.44986" result="effect1_foregroundBlur_3892_95663"/>
</filter>
<linearGradient id="paint0_linear_3892_95663" x1="13.9595" y1="24.7349" x2="28.5025" y2="12.4738" gradientUnits="userSpaceOnUse">
<stop stop-color="#4893FC"/>
<stop offset="0.27" stop-color="#4893FC"/>
<stop offset="0.777" stop-color="#969DFF"/>
<stop offset="1" stop-color="#BD99FE"/>
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 10 KiB

View File

@ -0,0 +1,11 @@
<svg width="40" height="40" viewBox="0 0 40 40" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect width="40" height="40" fill="white"/>
<g clip-path="url(#clip0_3892_95659)">
<path d="M15.745 24.54L26.715 16.35C27.254 15.95 28.022 16.106 28.279 16.73C29.628 20.018 29.025 23.971 26.341 26.685C23.658 29.399 19.924 29.995 16.511 28.639L12.783 30.384C18.13 34.081 24.623 33.166 28.681 29.06C31.9 25.805 32.897 21.368 31.965 17.367L31.973 17.376C30.622 11.498 32.305 9.149 35.755 4.345L36 4L31.46 8.59V8.576L15.743 24.544M13.48 26.531C9.643 22.824 10.305 17.085 13.58 13.776C16 11.327 19.968 10.328 23.432 11.797L27.152 10.06C26.482 9.57 25.622 9.043 24.637 8.673C20.182 6.819 14.848 7.742 11.227 11.401C7.744 14.924 6.648 20.341 8.53 24.962C9.935 28.416 7.631 30.86 5.31 33.326C4.49 34.2 3.666 35.074 3 36L13.478 26.534" fill="black"/>
</g>
<defs>
<clipPath id="clip0_3892_95659">
<rect width="33" height="32" fill="white" transform="translate(3 4)"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 981 B

View File

@ -0,0 +1,17 @@
<svg width="26" height="26" viewBox="0 0 26 26" fill="none" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g clip-path="url(#clip0_3892_83671)">
<path d="M1 13C1 9.27247 1 7.4087 1.60896 5.93853C2.42092 3.97831 3.97831 2.42092 5.93853 1.60896C7.4087 1 9.27247 1 13 1C16.7275 1 18.5913 1 20.0615 1.60896C22.0217 2.42092 23.5791 3.97831 24.391 5.93853C25 7.4087 25 9.27247 25 13C25 16.7275 25 18.5913 24.391 20.0615C23.5791 22.0217 22.0217 23.5791 20.0615 24.391C18.5913 25 16.7275 25 13 25C9.27247 25 7.4087 25 5.93853 24.391C3.97831 23.5791 2.42092 22.0217 1.60896 20.0615C1 18.5913 1 16.7275 1 13Z" fill="white"/>
<rect width="24" height="24" transform="translate(1 1)" fill="url(#pattern0_3892_83671)"/>
<rect width="24" height="24" transform="translate(1 1)" fill="white" fill-opacity="0.01"/>
</g>
<path d="M13 0.75C14.8603 0.75 16.2684 0.750313 17.3945 0.827148C18.5228 0.904144 19.3867 1.05876 20.1572 1.37793C22.1787 2.21525 23.7847 3.82133 24.6221 5.84277C24.9412 6.61333 25.0959 7.47723 25.1729 8.60547C25.2497 9.73161 25.25 11.1397 25.25 13C25.25 14.8603 25.2497 16.2684 25.1729 17.3945C25.0959 18.5228 24.9412 19.3867 24.6221 20.1572C23.7847 22.1787 22.1787 23.7847 20.1572 24.6221C19.3867 24.9412 18.5228 25.0959 17.3945 25.1729C16.2684 25.2497 14.8603 25.25 13 25.25C11.1397 25.25 9.73161 25.2497 8.60547 25.1729C7.47723 25.0959 6.61333 24.9412 5.84277 24.6221C3.82133 23.7847 2.21525 22.1787 1.37793 20.1572C1.05876 19.3867 0.904144 18.5228 0.827148 17.3945C0.750313 16.2684 0.75 14.8603 0.75 13C0.75 11.1397 0.750313 9.73161 0.827148 8.60547C0.904144 7.47723 1.05876 6.61333 1.37793 5.84277C2.21525 3.82133 3.82133 2.21525 5.84277 1.37793C6.61333 1.05876 7.47723 0.904144 8.60547 0.827148C9.73161 0.750313 11.1397 0.75 13 0.75Z" stroke="#101828" stroke-opacity="0.08" stroke-width="0.5"/>
<defs>
<pattern id="pattern0_3892_83671" patternContentUnits="objectBoundingBox" width="1" height="1">
<use xlink:href="#image0_3892_83671" transform="scale(0.00625)"/>
</pattern>
<clipPath id="clip0_3892_83671">
<path d="M1 13C1 9.27247 1 7.4087 1.60896 5.93853C2.42092 3.97831 3.97831 2.42092 5.93853 1.60896C7.4087 1 9.27247 1 13 1C16.7275 1 18.5913 1 20.0615 1.60896C22.0217 2.42092 23.5791 3.97831 24.391 5.93853C25 7.4087 25 9.27247 25 13C25 16.7275 25 18.5913 24.391 20.0615C23.5791 22.0217 22.0217 23.5791 20.0615 24.391C18.5913 25 16.7275 25 13 25C9.27247 25 7.4087 25 5.93853 24.391C3.97831 23.5791 2.42092 22.0217 1.60896 20.0615C1 18.5913 1 16.7275 1 13Z" fill="white"/>
</clipPath>
<image id="image0_3892_83671" width="160" height="160" preserveAspectRatio="none" xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAKAAAACgCAYAAACLz2ctAAAACXBIWXMAACxLAAAsSwGlPZapAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA0ySURBVHgB7Z3vsdM6E8Z93nm/QwdABUAFgQqACgIVABUAFcCpAKgAOoBUwKECOBUAFfj6yYzumNx4pV2vtKtkfzO+3CHESezHWu0frS7GiSEIjPjfEASGhAADU0KAgSkhwMCUEGBgSggwMCUEGJgSAgxMCQEGpoQAA1NCgIEpIcDAlBBgYEoIMDAlBBiYEgIMTAkBBqaEAANTQoCBKSHAwJQQYGBKCDAwJQQYmPL/4cz5/fv38PXr12G32w0/f/4crq6u9n+P/0/cvHlzuH379v7Pe/fuDZvNZv8n/i5Yx8U5LkyH6D5+/Dh8/vx5Lz4pEOGDBw+G58+fhxiljGfEly9fxkkweODUD5wX5w94nIUAf/z4UU14x4SIzwvKOHkBvn79epzmbk3ENz9evHgx/vr1awxoTnYOCCfi2bNnq+Z4a8G8cDLLMT8kOEkBwpN9+PDh3tngMvd4EzgPBC05H3j37t3eUTkknffw3PjsdMDROWnGE+PDhw8sk4s526tXr/YORM5k4vVPnz6NT58+HSeRskwypgJ4/yRG9vsnEe5NOj771DgpAUJ8JTcUAoXovn37Nq7h/fv3zZyb+XeHgE/F4z4ZAUJMJTdwMoXqzgFGJu6IqHHgM/HQ9cxJCBBhj5wA8PraES8HRtXWIsTRc+jnJHLBcDjmqbNDttvtMImv+oT+4uLiL+elFfD079y5M7x582bojrFzMLkfiNEBo1JtMB+zMMHHDjgsPY2GXYdhMOrhyV9iEt8wCXSo+fnSWCNG41TQcOvWrb9eQ0jm+vp6H07CwQ3/dBV/HDsG3uCwMBI8fvx4rAWcmNzIOyzM1eA5c50gzF3fvn3LGmXhLdee82rQrQBh9gbC4ahlhiAgbmoPD4PW9+EUVPQgwm4FSI1+EIk2kkqamhUy+I0lI2LNh1GDLgWIC9rK9MJcUmJfGnlgMmuD61Dy3eCYeC2M6FKA1PxLc8SRVNLUCHTnKIk/IpXnkS4FiCd6yeRpIAmrWAeDS0ToMX3XnQAp87t27icpXIVQvdzYnAjx4HqjOwFCZEsXWDoCpbAKx9ymggZvcyuYWup7e8sddyNA3GiIb8n8Sp9uSVgFE3+vk3p8L2r6gNc84V6AMG/wbHMi4U6yvYRVMGpri5mKkXqbC7oVIFcgKPQshZvFgPi1Y4v4vvOHStuJoa6dlrOmgTsBSlewlVYLU05Mi3le7sGCedcQYm4U9DKFcCVAbjm9xKyUjn7aIxInoK1VaEoJnWMxauJGgDnvTUuAORHUCKtIl4auTaNREYOaxRoczAWIkQEXY434tASILIYmWnWCUrOMa7t0TjwQHjAVIC7QUlhl6aLVFKDWvKhGJwYIWWI2qe/hoUjBtCQfxZypGxUFGgChwHJK8A81WVtOj8JRlMXfv39ffUE8il+nacq+ABdNlUqhliGUXPvamAkQNyp3ISEIiA7igwg9MzkNe+GhAru0ghm/aZqnsbprQYhPnjzZP7zUOpjE3bt3F1/78+fPYM5oADU5TsextQ3U+zRMsARJQPtYuVZpadXhAQeHglqumntvC5oLEBc65xFut9uj8zFPAsT3k3juubgirg9nXjwMdNiGinuepQBzTznEt4QXAR5mMUoOblyxtOJ5fhzzlikB4t9b01SAOdObKyiwFiA+QzI6SeOKGCkli91THxoQI+CMXJVGboSwEiC+FzdWmdJ4Gkjmh8ksexdgMy8YXiLltcEb9LaOdR5W4YQ+0nvRKUEDXBdcnynfzfKWJ9Huu0ZQ5zVnbEQuAV9CyxFQK4tRo4GQZH645prVpIkAcxUopZPzFgKs1U9au2WGNGwzPzysGW5igi8vLxdfg5nwYnphbpFpKM1ipC6mJSDrgHOXBpBzJLM8CVEUpHfTfXVsAOU5ckMTQ8URkHOksvw1DoImXLPsZYFSdQFS5pdbmetBgEtl+dIVddpmkBO2OYswzOQ9Ll4AbnWHpQBL43laAeQ1cEZlaxFWFyCVruJ6YRYClJblS7pZaYuh1JO3rI6uLkDKLHFpLcC1bTY8zA9LC36tPOLqAtRcx9tKgHhoNG+IRIjaZjk3N4TwLRYqVRfgUtJesjSwtgBrt9mQzA/ned215Kp3LBoYVRfg0o+VLIrxWA8ogSNAbbOc89RbZ0fMKqItusn3SsrrIpC9Noidyye37rRvJkDpvmvnTGrKviabggcfGZQlkAVqucFjdQEuPW0a6ahzBZVFqHLBru8SkLqj0nctR8EYAR0BUcDUljA3y9xSMZAbBVvdn+oCXEp4r9n+9Bi73W7onVRgwKmNnK+S41xPnJ8aBakCEk3MTDDQnGtgOWSXW1UdASMbqlyw0U6pswazDCFywPmXaDUPrC5A6inTHrUgQqlJ8gh+D/a4KzXLXAcC92ZJ4K3McHUBbjabxdfw1HIoqV/jLtz2zrzur8Qsf//+feBAibvFKFhdgHjClkZBPGGcHwkBlhZfQtzd7iB5BIgPIoQYKbPMHbVaWqhjNPGCNV1+3IBSkwSSWZaGLLyB345gshaUhWpiQcYGUG3CcEiS7pK8KtJ/mtU5UgaiAKEE7aWWS9exRUPzJiMgZYYB5mtcJJ4inJOWUf5esEyLNgtE51x+qTC4nmLwXyyzVc0EmEv/cAOpc5KnCCF6W9zeA2cxAgJqFEzhkzXAS06eYgixHMu0aFMBYgREl88lYIZfvnw5rAXmGE0tqc86Rm1vGb8Pn+GNJQE2GRnHxuS2khoG3ZVaksZC2uXwKO8vWbJp5QVrb3/GwaRDKtW1c5iFTDTXKFgsl+Q2sbQS4NK5WuyoZNYlH8sWczekxhoNbr89SXd6ye6bVgKkdlRqsUbEdJuGUjFYLtyeC7FkXcaarloWAsTDtXSu0u3P1mC+UY1lKwnNndElzco9CNB6HxEXW3Vx2t566beXRmXOHnDeBEidp1XzIhcClOyjpr2ZoMQs43tzvzs14rcWIPV7W60RNhdgiUdcMhJpIdkmgfvAeBCgVtPQtZhu1QXWRuHXrhA7BBkUpPS0sik4B4LiODxlZ6gyOCQMWn1XcwFSCe/SSPzaFWLHwPkgRCp9SIHvjvdKO5jWBNeLqkbfbrdDK8wFeH19vfgaCi+lK8Q0KjkgInS656akUqkYKnW8geuS65zftLJoNIbawTzNQ/CnZFusNY19pCGaksD5YDgHzKUlERFoibkAKU/s2LZT3LwuN2wjabULT5hz46wEmHuILbbu6mIEPEQSt8ttk5DSZ5xz4pB0T7UQYC7EhIeolec7x7UAc00icfMl+dbDC91i88E5LQVYOqK3Nr0JcwFSPf9KcpHSvC7MET5b0tl+bYFEKwHi95U8WFp72kkwF6BWF32MlhrbV1EmSutG1RYgJy8taRSqibkAqYsp7aKqLcS0KY0G1BJVDQFyphKYF1v0hZ7jOheMv5cgMcvHjtKwCgfKE9UQYOmhXfArxYUAqdDKGgFIwjZJ+NqT8pK4YisBYvsJL7gQIFUUqTFH4ZhlSViFghNX1CokaPlgrcWFAHOtO7QEQZVCaZd3ScrySz+fKqNv9fu0cCFAQI0SmrVph/PDGvO80vDH4chbChU5OCa81lsvcHAjwNxT7WFz5RySsnyJQHIZG4gfUxfPwku4ESCg5mle9rc9hqQsf818bEnkuEY4pwfvthRXAqSyItqmWAPJPE+6++acpc/z5mCU4EqAIFcOb7m16BzNFXUcqPmfRycjhzsBlnh42qviOEjKtTQX2C/FNT1PUSjcCRCUFJ+23umb22Zjbm610FiM5A2XAsTNLlmZppmjpZCUa9X4bpzi3V5wKUCAC1oyx6qxUD3RKqxSArV81aKSWQu3AgS46KUjj+aNl+SQa6a5ci3teoj3LeFagIAjwjQiYq7GDVzjJksKVHFo548P0aig8coF/jM4Jy0l5C61xHJKtO3FgeWGN27c+Ot1LAnFOa+urvYHF6z3rd0OGAvIqeWdPwp3UHLL2Am1WmZIjhp9C4+RS7lZltJr0Y0AE9wGk5qHdlhliZKQT4tNZFrQnQCBJBisddSOt2HumhvpIb5ewy6HdCnARI31H6UmWLt7KGddcg+VQaV0LcAE5mPwBiV9BtccGIXXxiAhJk5BQ48FBxQnIcA5GJkwf+J2XYWA0TgdI5GkkTnej8/OhWPwOh4YiI4zjfBYTq9BF2GYNSC8gh6E6UggRIPwBY5j3a+m9Jt405wU/pmDz0bIR9IPEedDf8GSDbt74+QFuAZp/FGTFrFGS8z7A3omdUvlbvmlQWpw6a2zqjpjUETL0I/XFWw1CAEy0dgPhBJez4UFEmIOKATzwsvLy/0OmJI8cgJzvEePHu3b4lru22tFCFCBVNCw2+3+9boPPd40j0uFEZvNZi++cxTdnBBgYEp4wYEpIcDAlBBgYEoIMDAlBBiYEgIMTAkBBqaEAANTQoCBKSHAwJQQYGBKCDAwJQQYmBICDEwJAQamhAADU0KAgSkhwMCUEGBgSggwMCUEGJgSAgxMCQEGpvwDojzI2oXtJzYAAAAASUVORK5CYII="/>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 7.1 KiB

View File

@ -0,0 +1,36 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "40",
"height": "40",
"viewBox": "0 0 40 40",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"width": "40",
"height": "40",
"fill": "white"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M25.7926 10.1311H21.5089L29.3208 29.869H33.6045L25.7926 10.1311ZM13.4164 10.1311L5.60449 29.869H9.97273L11.5703 25.724H19.743L21.3405 29.869H25.7087L17.8969 10.1311H13.4164ZM12.9834 22.0583L15.6566 15.1217L18.3299 22.0583H12.9834Z",
"fill": "black"
},
"children": []
}
]
},
"name": "AnthropicShortLight"
}

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './AnthropicShortLight.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'AnthropicShortLight'
export default Icon

View File

@ -0,0 +1,36 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "40",
"height": "40",
"viewBox": "0 0 40 40",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"width": "40",
"height": "40",
"fill": "white"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M36.6676 11.2917C36.3316 11.1277 36.1871 11.4402 35.9906 11.599C35.9242 11.6511 35.8668 11.7188 35.8108 11.7787C35.3199 12.3048 34.747 12.6485 33.9996 12.6068C32.9046 12.5469 31.971 12.8907 31.1455 13.7293C30.9696 12.6954 30.3863 12.0782 29.4996 11.6824C29.0348 11.4766 28.5647 11.2709 28.2406 10.823C28.0127 10.5053 27.9515 10.1511 27.8368 9.80214C27.7652 9.59121 27.6923 9.37506 27.4502 9.33861C27.1871 9.29694 27.0843 9.51829 26.9814 9.70318C26.5674 10.4584 26.4084 11.2917 26.4228 12.1355C26.4592 14.0313 27.26 15.5417 28.8486 16.6173C29.0296 16.7397 29.0764 16.8646 29.0191 17.0443C28.9111 17.4141 28.7822 17.7735 28.6676 18.1433C28.596 18.3803 28.4879 18.4323 28.2354 18.3282C27.363 17.9637 26.609 17.4246 25.9436 16.7709C24.8135 15.6771 23.7914 14.4689 22.5166 13.5235C22.2171 13.3021 21.919 13.0964 21.609 12.9011C20.3082 11.6355 21.7796 10.5964 22.1194 10.474C22.4762 10.3464 22.2431 9.9037 21.092 9.90891C19.9423 9.91413 18.889 10.2995 17.5478 10.8126C17.3512 10.8907 17.1455 10.948 16.9332 10.9922C15.7158 10.7631 14.4515 10.711 13.1298 10.8594C10.6428 11.1381 8.65587 12.3152 7.19493 14.3255C5.44102 16.7397 5.02826 19.4845 5.53347 22.349C6.06473 25.3646 7.60249 27.8646 9.96707 29.8178C12.4176 31.8413 15.2406 32.8334 18.4606 32.6433C20.4163 32.5313 22.5947 32.2683 25.0504 30.1875C25.6702 30.4949 26.3199 30.6173 27.3994 30.711C28.2302 30.7891 29.0296 30.6694 29.6494 30.5417C30.6194 30.3361 30.5518 29.4375 30.2015 29.2709C27.3578 27.9454 27.9814 28.4845 27.4136 28.0495C28.859 26.3361 31.0374 24.5574 31.889 18.797C31.9554 18.3386 31.898 18.0522 31.889 17.6798C31.8838 17.4558 31.9346 17.3673 32.1923 17.3413C32.9046 17.2605 33.596 17.0651 34.2314 16.7137C36.0739 15.7058 36.816 14.0522 36.9918 12.0678C37.0179 11.7657 36.9866 11.4506 36.6676 11.2917ZM20.613 29.1485C17.8564 26.9793 16.5204 26.2657 15.9684 26.297C15.4527 26.3255 15.5452 26.9167 15.6584 27.3022C15.777 27.6823 15.9319 27.9454 16.1494 28.2787C16.2991 28.5001 16.402 28.8307 15.9996 29.0755C15.1116 29.6277 13.5687 28.8907 13.4958 28.8542C11.7001 27.797 10.1988 26.3985 9.14025 24.487C8.11941 22.6459 7.52566 20.6719 7.42801 18.5651C7.40197 18.0547 7.5517 17.875 8.05691 17.7839C8.72227 17.6615 9.40978 17.6355 10.0751 17.7318C12.8876 18.1433 15.2822 19.4037 17.2887 21.3959C18.4346 22.5339 19.3018 23.8907 20.195 25.2162C21.1442 26.6251 22.1663 27.9662 23.4671 29.0651C23.9254 29.4506 24.2926 29.7449 24.6428 29.961C23.5856 30.0782 21.8199 30.1042 20.613 29.1485ZM21.9332 20.6407C21.9332 20.4141 22.1142 20.2345 22.342 20.2345C22.3928 20.2345 22.4398 20.2449 22.4814 20.2605C22.5374 20.2813 22.5895 20.3126 22.6299 20.3594C22.7027 20.4298 22.7444 20.5339 22.7444 20.6407C22.7444 20.8673 22.5635 21.047 22.3368 21.047C22.109 21.047 21.9332 20.8673 21.9332 20.6407ZM26.036 22.7501C25.7731 22.8569 25.51 22.9506 25.2575 22.961C24.8655 22.9793 24.4371 22.8203 24.204 22.6251C23.8434 22.323 23.5856 22.1537 23.4762 21.6225C23.4306 21.3959 23.4567 21.047 23.497 20.8465C23.5908 20.4141 23.4866 20.1381 23.1832 19.8855C22.9346 19.6798 22.6207 19.6251 22.2744 19.6251C22.1455 19.6251 22.027 19.5678 21.9384 19.5209C21.7939 19.4479 21.6754 19.2683 21.7887 19.047C21.8251 18.9766 22.001 18.8022 22.0426 18.7709C22.5114 18.5027 23.053 18.5913 23.5543 18.7918C24.0191 18.9818 24.3694 19.3307 24.8746 19.823C25.3915 20.4194 25.484 20.5861 25.7783 21.0313C26.01 21.3829 26.2223 21.7422 26.3668 22.1537C26.454 22.4089 26.3408 22.6198 26.036 22.7501Z",
"fill": "#4D6BFE"
},
"children": []
}
]
},
"name": "Deepseek"
}

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './Deepseek.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'Deepseek'
export default Icon

View File

@ -0,0 +1,807 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "40",
"height": "40",
"viewBox": "0 0 40 40",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"width": "40",
"height": "40",
"fill": "white"
},
"children": []
},
{
"type": "element",
"name": "mask",
"attributes": {
"id": "mask0_3892_95663",
"style": "mask-type:alpha",
"maskUnits": "userSpaceOnUse",
"x": "6",
"y": "6",
"width": "28",
"height": "29"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M20 6C20.2936 6 20.5488 6.2005 20.6205 6.48556C20.8393 7.3566 21.1277 8.20866 21.4828 9.03356C22.4116 11.191 23.6854 13.0791 25.3032 14.6968C26.9218 16.3146 28.8095 17.5888 30.9664 18.5172C31.7941 18.8735 32.6436 19.16 33.5149 19.3795C33.6533 19.4143 33.7762 19.4942 33.8641 19.6067C33.9519 19.7192 33.9998 19.8578 34 20.0005C34 20.2941 33.7995 20.5492 33.5149 20.621C32.6437 20.8399 31.7915 21.1282 30.9664 21.4833C28.8095 22.4121 26.9209 23.6859 25.3032 25.3036C23.6854 26.9223 22.4116 28.8099 21.4828 30.9669C21.1278 31.7919 20.8394 32.6439 20.6205 33.5149C20.586 33.6534 20.5062 33.7764 20.3937 33.8644C20.2813 33.9524 20.1427 34.0003 20 34.0005C19.8572 34.0003 19.7186 33.9525 19.6062 33.8645C19.4937 33.7765 19.414 33.6535 19.3795 33.5149C19.1605 32.6439 18.872 31.7918 18.5167 30.9669C17.5884 28.8099 16.3151 26.9214 14.6964 25.3036C13.0782 23.6859 11.1906 22.4121 9.03309 21.4833C8.20814 21.1283 7.35608 20.8399 6.48509 20.621C6.34667 20.5864 6.22377 20.5065 6.13589 20.3941C6.04801 20.2817 6.00018 20.1432 6 20.0005C6.00024 19.8578 6.04808 19.7192 6.13594 19.6067C6.2238 19.4942 6.34667 19.4143 6.48509 19.3795C7.35612 19.1607 8.20819 18.8723 9.03309 18.5172C11.1906 17.5888 13.0786 16.3146 14.6964 14.6968C16.3141 13.0791 17.5884 11.191 18.5167 9.03356C18.8719 8.20862 19.1604 7.35656 19.3795 6.48556C19.4508 6.2005 19.7064 6 20 6Z",
"fill": "black"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M20 6C20.2936 6 20.5488 6.2005 20.6205 6.48556C20.8393 7.3566 21.1277 8.20866 21.4828 9.03356C22.4116 11.191 23.6854 13.0791 25.3032 14.6968C26.9218 16.3146 28.8095 17.5888 30.9664 18.5172C31.7941 18.8735 32.6436 19.16 33.5149 19.3795C33.6533 19.4143 33.7762 19.4942 33.8641 19.6067C33.9519 19.7192 33.9998 19.8578 34 20.0005C34 20.2941 33.7995 20.5492 33.5149 20.621C32.6437 20.8399 31.7915 21.1282 30.9664 21.4833C28.8095 22.4121 26.9209 23.6859 25.3032 25.3036C23.6854 26.9223 22.4116 28.8099 21.4828 30.9669C21.1278 31.7919 20.8394 32.6439 20.6205 33.5149C20.586 33.6534 20.5062 33.7764 20.3937 33.8644C20.2813 33.9524 20.1427 34.0003 20 34.0005C19.8572 34.0003 19.7186 33.9525 19.6062 33.8645C19.4937 33.7765 19.414 33.6535 19.3795 33.5149C19.1605 32.6439 18.872 31.7918 18.5167 30.9669C17.5884 28.8099 16.3151 26.9214 14.6964 25.3036C13.0782 23.6859 11.1906 22.4121 9.03309 21.4833C8.20814 21.1283 7.35608 20.8399 6.48509 20.621C6.34667 20.5864 6.22377 20.5065 6.13589 20.3941C6.04801 20.2817 6.00018 20.1432 6 20.0005C6.00024 19.8578 6.04808 19.7192 6.13594 19.6067C6.2238 19.4942 6.34667 19.4143 6.48509 19.3795C7.35612 19.1607 8.20819 18.8723 9.03309 18.5172C11.1906 17.5888 13.0786 16.3146 14.6964 14.6968C16.3141 13.0791 17.5884 11.191 18.5167 9.03356C18.8719 8.20862 19.1604 7.35656 19.3795 6.48556C19.4508 6.2005 19.7064 6 20 6Z",
"fill": "url(#paint0_linear_3892_95663)"
},
"children": []
}
]
},
{
"type": "element",
"name": "g",
"attributes": {
"mask": "url(#mask0_3892_95663)"
},
"children": [
{
"type": "element",
"name": "g",
"attributes": {
"filter": "url(#filter0_f_3892_95663)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M3.47232 27.8921C6.70753 29.0411 10.426 26.8868 11.7778 23.0804C13.1296 19.274 11.6028 15.2569 8.36763 14.108C5.13242 12.959 1.41391 15.1133 0.06211 18.9197C-1.28969 22.7261 0.23711 26.7432 3.47232 27.8921Z",
"fill": "#FFE432"
},
"children": []
}
]
},
{
"type": "element",
"name": "g",
"attributes": {
"filter": "url(#filter1_f_3892_95663)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M17.8359 15.341C22.2806 15.341 25.8838 11.6588 25.8838 7.11644C25.8838 2.57412 22.2806 -1.10815 17.8359 -1.10815C13.3912 -1.10815 9.78809 2.57412 9.78809 7.11644C9.78809 11.6588 13.3912 15.341 17.8359 15.341Z",
"fill": "#FC413D"
},
"children": []
}
]
},
{
"type": "element",
"name": "g",
"attributes": {
"filter": "url(#filter2_f_3892_95663)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M14.7081 41.6431C19.3478 41.4163 22.8707 36.3599 22.5768 30.3493C22.283 24.3387 18.2836 19.65 13.644 19.8769C9.00433 20.1037 5.48139 25.1601 5.77525 31.1707C6.06911 37.1813 10.0685 41.87 14.7081 41.6431Z",
"fill": "#00B95C"
},
"children": []
}
]
},
{
"type": "element",
"name": "g",
"attributes": {
"filter": "url(#filter3_f_3892_95663)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M14.7081 41.6431C19.3478 41.4163 22.8707 36.3599 22.5768 30.3493C22.283 24.3387 18.2836 19.65 13.644 19.8769C9.00433 20.1037 5.48139 25.1601 5.77525 31.1707C6.06911 37.1813 10.0685 41.87 14.7081 41.6431Z",
"fill": "#00B95C"
},
"children": []
}
]
},
{
"type": "element",
"name": "g",
"attributes": {
"filter": "url(#filter4_f_3892_95663)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M19.355 38.0071C23.2447 35.6405 24.2857 30.2506 21.6803 25.9684C19.0748 21.6862 13.8095 20.1334 9.91983 22.5C6.03016 24.8666 4.98909 30.2565 7.59454 34.5387C10.2 38.8209 15.4653 40.3738 19.355 38.0071Z",
"fill": "#00B95C"
},
"children": []
}
]
},
{
"type": "element",
"name": "g",
"attributes": {
"filter": "url(#filter5_f_3892_95663)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M35.0759 24.5504C39.4477 24.5504 42.9917 21.1377 42.9917 16.9278C42.9917 12.7179 39.4477 9.30518 35.0759 9.30518C30.7042 9.30518 27.1602 12.7179 27.1602 16.9278C27.1602 21.1377 30.7042 24.5504 35.0759 24.5504Z",
"fill": "#3186FF"
},
"children": []
}
]
},
{
"type": "element",
"name": "g",
"attributes": {
"filter": "url(#filter6_f_3892_95663)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M0.362818 23.6667C4.3882 26.7279 10.2688 25.7676 13.4976 21.5219C16.7264 17.2762 16.0806 11.3528 12.0552 8.29156C8.02982 5.23037 2.14917 6.19062 -1.07959 10.4364C-4.30835 14.6821 -3.66256 20.6055 0.362818 23.6667Z",
"fill": "#FBBC04"
},
"children": []
}
]
},
{
"type": "element",
"name": "g",
"attributes": {
"filter": "url(#filter7_f_3892_95663)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M20.9877 28.1903C25.7924 31.4936 32.1612 30.5732 35.2128 26.1346C38.2644 21.696 36.8432 15.4199 32.0385 12.1166C27.2338 8.81334 20.865 9.73372 17.8134 14.1723C14.7618 18.611 16.183 24.887 20.9877 28.1903Z",
"fill": "#3186FF"
},
"children": []
}
]
},
{
"type": "element",
"name": "g",
"attributes": {
"filter": "url(#filter8_f_3892_95663)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M29.7231 4.99175C30.9455 6.65415 29.3748 9.88535 26.2149 12.2096C23.0549 14.5338 19.5026 15.0707 18.2801 13.4088C17.0576 11.7468 18.6284 8.51514 21.7883 6.19092C24.9482 3.86717 28.5006 3.32982 29.7231 4.99175Z",
"fill": "#749BFF"
},
"children": []
}
]
},
{
"type": "element",
"name": "g",
"attributes": {
"filter": "url(#filter9_f_3892_95663)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M19.6891 12.9486C24.5759 8.41581 26.2531 2.27858 23.4354 -0.759249C20.6176 -3.79708 14.3718 -2.58516 9.485 1.94765C4.59823 6.48046 2.92099 12.6177 5.73879 15.6555C8.55658 18.6933 14.8024 17.4814 19.6891 12.9486Z",
"fill": "#FC413D"
},
"children": []
}
]
},
{
"type": "element",
"name": "g",
"attributes": {
"filter": "url(#filter10_f_3892_95663)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M9.6712 29.23C12.5757 31.3088 15.9102 31.6247 17.1191 29.9356C18.328 28.2465 16.9535 25.1921 14.049 23.1133C11.1446 21.0345 7.81003 20.7186 6.60113 22.4077C5.39223 24.0968 6.76675 27.1512 9.6712 29.23Z",
"fill": "#FFEE48"
},
"children": []
}
]
}
]
},
{
"type": "element",
"name": "defs",
"attributes": {},
"children": [
{
"type": "element",
"name": "filter",
"attributes": {
"id": "filter0_f_3892_95663",
"x": "-3.44095",
"y": "10.7885",
"width": "18.7217",
"height": "20.4229",
"filterUnits": "userSpaceOnUse",
"color-interpolation-filters": "sRGB"
},
"children": [
{
"type": "element",
"name": "feFlood",
"attributes": {
"flood-opacity": "0",
"result": "BackgroundImageFix"
},
"children": []
},
{
"type": "element",
"name": "feBlend",
"attributes": {
"mode": "normal",
"in": "SourceGraphic",
"in2": "BackgroundImageFix",
"result": "shape"
},
"children": []
},
{
"type": "element",
"name": "feGaussianBlur",
"attributes": {
"stdDeviation": "1.50514",
"result": "effect1_foregroundBlur_3892_95663"
},
"children": []
}
]
},
{
"type": "element",
"name": "filter",
"attributes": {
"id": "filter1_f_3892_95663",
"x": "-4.76352",
"y": "-15.6598",
"width": "45.1989",
"height": "45.5524",
"filterUnits": "userSpaceOnUse",
"color-interpolation-filters": "sRGB"
},
"children": [
{
"type": "element",
"name": "feFlood",
"attributes": {
"flood-opacity": "0",
"result": "BackgroundImageFix"
},
"children": []
},
{
"type": "element",
"name": "feBlend",
"attributes": {
"mode": "normal",
"in": "SourceGraphic",
"in2": "BackgroundImageFix",
"result": "shape"
},
"children": []
},
{
"type": "element",
"name": "feGaussianBlur",
"attributes": {
"stdDeviation": "7.2758",
"result": "effect1_foregroundBlur_3892_95663"
},
"children": []
}
]
},
{
"type": "element",
"name": "filter",
"attributes": {
"id": "filter2_f_3892_95663",
"x": "-6.61209",
"y": "7.49899",
"width": "41.5757",
"height": "46.522",
"filterUnits": "userSpaceOnUse",
"color-interpolation-filters": "sRGB"
},
"children": [
{
"type": "element",
"name": "feFlood",
"attributes": {
"flood-opacity": "0",
"result": "BackgroundImageFix"
},
"children": []
},
{
"type": "element",
"name": "feBlend",
"attributes": {
"mode": "normal",
"in": "SourceGraphic",
"in2": "BackgroundImageFix",
"result": "shape"
},
"children": []
},
{
"type": "element",
"name": "feGaussianBlur",
"attributes": {
"stdDeviation": "6.18495",
"result": "effect1_foregroundBlur_3892_95663"
},
"children": []
}
]
},
{
"type": "element",
"name": "filter",
"attributes": {
"id": "filter3_f_3892_95663",
"x": "-6.61209",
"y": "7.49899",
"width": "41.5757",
"height": "46.522",
"filterUnits": "userSpaceOnUse",
"color-interpolation-filters": "sRGB"
},
"children": [
{
"type": "element",
"name": "feFlood",
"attributes": {
"flood-opacity": "0",
"result": "BackgroundImageFix"
},
"children": []
},
{
"type": "element",
"name": "feBlend",
"attributes": {
"mode": "normal",
"in": "SourceGraphic",
"in2": "BackgroundImageFix",
"result": "shape"
},
"children": []
},
{
"type": "element",
"name": "feGaussianBlur",
"attributes": {
"stdDeviation": "6.18495",
"result": "effect1_foregroundBlur_3892_95663"
},
"children": []
}
]
},
{
"type": "element",
"name": "filter",
"attributes": {
"id": "filter4_f_3892_95663",
"x": "-6.21073",
"y": "9.02316",
"width": "41.6959",
"height": "42.4608",
"filterUnits": "userSpaceOnUse",
"color-interpolation-filters": "sRGB"
},
"children": [
{
"type": "element",
"name": "feFlood",
"attributes": {
"flood-opacity": "0",
"result": "BackgroundImageFix"
},
"children": []
},
{
"type": "element",
"name": "feBlend",
"attributes": {
"mode": "normal",
"in": "SourceGraphic",
"in2": "BackgroundImageFix",
"result": "shape"
},
"children": []
},
{
"type": "element",
"name": "feGaussianBlur",
"attributes": {
"stdDeviation": "6.18495",
"result": "effect1_foregroundBlur_3892_95663"
},
"children": []
}
]
},
{
"type": "element",
"name": "filter",
"attributes": {
"id": "filter5_f_3892_95663",
"x": "15.405",
"y": "-2.44994",
"width": "39.3423",
"height": "38.7556",
"filterUnits": "userSpaceOnUse",
"color-interpolation-filters": "sRGB"
},
"children": [
{
"type": "element",
"name": "feFlood",
"attributes": {
"flood-opacity": "0",
"result": "BackgroundImageFix"
},
"children": []
},
{
"type": "element",
"name": "feBlend",
"attributes": {
"mode": "normal",
"in": "SourceGraphic",
"in2": "BackgroundImageFix",
"result": "shape"
},
"children": []
},
{
"type": "element",
"name": "feGaussianBlur",
"attributes": {
"stdDeviation": "5.87756",
"result": "effect1_foregroundBlur_3892_95663"
},
"children": []
}
]
},
{
"type": "element",
"name": "filter",
"attributes": {
"id": "filter6_f_3892_95663",
"x": "-13.7886",
"y": "-4.15284",
"width": "39.9951",
"height": "40.2639",
"filterUnits": "userSpaceOnUse",
"color-interpolation-filters": "sRGB"
},
"children": [
{
"type": "element",
"name": "feFlood",
"attributes": {
"flood-opacity": "0",
"result": "BackgroundImageFix"
},
"children": []
},
{
"type": "element",
"name": "feBlend",
"attributes": {
"mode": "normal",
"in": "SourceGraphic",
"in2": "BackgroundImageFix",
"result": "shape"
},
"children": []
},
{
"type": "element",
"name": "feGaussianBlur",
"attributes": {
"stdDeviation": "5.32691",
"result": "effect1_foregroundBlur_3892_95663"
},
"children": []
}
]
},
{
"type": "element",
"name": "filter",
"attributes": {
"id": "filter7_f_3892_95663",
"x": "6.6925",
"y": "0.620963",
"width": "39.6414",
"height": "39.065",
"filterUnits": "userSpaceOnUse",
"color-interpolation-filters": "sRGB"
},
"children": [
{
"type": "element",
"name": "feFlood",
"attributes": {
"flood-opacity": "0",
"result": "BackgroundImageFix"
},
"children": []
},
{
"type": "element",
"name": "feBlend",
"attributes": {
"mode": "normal",
"in": "SourceGraphic",
"in2": "BackgroundImageFix",
"result": "shape"
},
"children": []
},
{
"type": "element",
"name": "feGaussianBlur",
"attributes": {
"stdDeviation": "4.75678",
"result": "effect1_foregroundBlur_3892_95663"
},
"children": []
}
]
},
{
"type": "element",
"name": "filter",
"attributes": {
"id": "filter8_f_3892_95663",
"x": "9.35225",
"y": "-4.48661",
"width": "29.2984",
"height": "27.3739",
"filterUnits": "userSpaceOnUse",
"color-interpolation-filters": "sRGB"
},
"children": [
{
"type": "element",
"name": "feFlood",
"attributes": {
"flood-opacity": "0",
"result": "BackgroundImageFix"
},
"children": []
},
{
"type": "element",
"name": "feBlend",
"attributes": {
"mode": "normal",
"in": "SourceGraphic",
"in2": "BackgroundImageFix",
"result": "shape"
},
"children": []
},
{
"type": "element",
"name": "feGaussianBlur",
"attributes": {
"stdDeviation": "4.25649",
"result": "effect1_foregroundBlur_3892_95663"
},
"children": []
}
]
},
{
"type": "element",
"name": "filter",
"attributes": {
"id": "filter9_f_3892_95663",
"x": "-2.81919",
"y": "-9.62339",
"width": "34.8122",
"height": "34.143",
"filterUnits": "userSpaceOnUse",
"color-interpolation-filters": "sRGB"
},
"children": [
{
"type": "element",
"name": "feFlood",
"attributes": {
"flood-opacity": "0",
"result": "BackgroundImageFix"
},
"children": []
},
{
"type": "element",
"name": "feBlend",
"attributes": {
"mode": "normal",
"in": "SourceGraphic",
"in2": "BackgroundImageFix",
"result": "shape"
},
"children": []
},
{
"type": "element",
"name": "feGaussianBlur",
"attributes": {
"stdDeviation": "3.59514",
"result": "effect1_foregroundBlur_3892_95663"
},
"children": []
}
]
},
{
"type": "element",
"name": "filter",
"attributes": {
"id": "filter10_f_3892_95663",
"x": "-2.73761",
"y": "12.4221",
"width": "29.1949",
"height": "27.4994",
"filterUnits": "userSpaceOnUse",
"color-interpolation-filters": "sRGB"
},
"children": [
{
"type": "element",
"name": "feFlood",
"attributes": {
"flood-opacity": "0",
"result": "BackgroundImageFix"
},
"children": []
},
{
"type": "element",
"name": "feBlend",
"attributes": {
"mode": "normal",
"in": "SourceGraphic",
"in2": "BackgroundImageFix",
"result": "shape"
},
"children": []
},
{
"type": "element",
"name": "feGaussianBlur",
"attributes": {
"stdDeviation": "4.44986",
"result": "effect1_foregroundBlur_3892_95663"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "paint0_linear_3892_95663",
"x1": "13.9595",
"y1": "24.7349",
"x2": "28.5025",
"y2": "12.4738",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"stop-color": "#4893FC"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0.27",
"stop-color": "#4893FC"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "0.777",
"stop-color": "#969DFF"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#BD99FE"
},
"children": []
}
]
}
]
}
]
},
"name": "Gemini"
}

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './Gemini.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'Gemini'
export default Icon

View File

@ -0,0 +1,72 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "40",
"height": "40",
"viewBox": "0 0 40 40",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"width": "40",
"height": "40",
"fill": "white"
},
"children": []
},
{
"type": "element",
"name": "g",
"attributes": {
"clip-path": "url(#clip0_3892_95659)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M15.745 24.54L26.715 16.35C27.254 15.95 28.022 16.106 28.279 16.73C29.628 20.018 29.025 23.971 26.341 26.685C23.658 29.399 19.924 29.995 16.511 28.639L12.783 30.384C18.13 34.081 24.623 33.166 28.681 29.06C31.9 25.805 32.897 21.368 31.965 17.367L31.973 17.376C30.622 11.498 32.305 9.149 35.755 4.345L36 4L31.46 8.59V8.576L15.743 24.544M13.48 26.531C9.643 22.824 10.305 17.085 13.58 13.776C16 11.327 19.968 10.328 23.432 11.797L27.152 10.06C26.482 9.57 25.622 9.043 24.637 8.673C20.182 6.819 14.848 7.742 11.227 11.401C7.744 14.924 6.648 20.341 8.53 24.962C9.935 28.416 7.631 30.86 5.31 33.326C4.49 34.2 3.666 35.074 3 36L13.478 26.534",
"fill": "black"
},
"children": []
}
]
},
{
"type": "element",
"name": "defs",
"attributes": {},
"children": [
{
"type": "element",
"name": "clipPath",
"attributes": {
"id": "clip0_3892_95659"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"width": "33",
"height": "32",
"fill": "white",
"transform": "translate(3 4)"
},
"children": []
}
]
}
]
}
]
},
"name": "Grok"
}

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './Grok.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'Grok'
export default Icon

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './OpenaiBlue.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'OpenaiBlue'
export default Icon

View File

@ -0,0 +1,128 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "26",
"height": "26",
"viewBox": "0 0 26 26",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg",
"xmlns:xlink": "http://www.w3.org/1999/xlink"
},
"children": [
{
"type": "element",
"name": "g",
"attributes": {
"clip-path": "url(#clip0_3892_83671)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M1 13C1 9.27247 1 7.4087 1.60896 5.93853C2.42092 3.97831 3.97831 2.42092 5.93853 1.60896C7.4087 1 9.27247 1 13 1C16.7275 1 18.5913 1 20.0615 1.60896C22.0217 2.42092 23.5791 3.97831 24.391 5.93853C25 7.4087 25 9.27247 25 13C25 16.7275 25 18.5913 24.391 20.0615C23.5791 22.0217 22.0217 23.5791 20.0615 24.391C18.5913 25 16.7275 25 13 25C9.27247 25 7.4087 25 5.93853 24.391C3.97831 23.5791 2.42092 22.0217 1.60896 20.0615C1 18.5913 1 16.7275 1 13Z",
"fill": "white"
},
"children": []
},
{
"type": "element",
"name": "rect",
"attributes": {
"width": "24",
"height": "24",
"transform": "translate(1 1)",
"fill": "url(#pattern0_3892_83671)"
},
"children": []
},
{
"type": "element",
"name": "rect",
"attributes": {
"width": "24",
"height": "24",
"transform": "translate(1 1)",
"fill": "white",
"fill-opacity": "0.01"
},
"children": []
}
]
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M13 0.75C14.8603 0.75 16.2684 0.750313 17.3945 0.827148C18.5228 0.904144 19.3867 1.05876 20.1572 1.37793C22.1787 2.21525 23.7847 3.82133 24.6221 5.84277C24.9412 6.61333 25.0959 7.47723 25.1729 8.60547C25.2497 9.73161 25.25 11.1397 25.25 13C25.25 14.8603 25.2497 16.2684 25.1729 17.3945C25.0959 18.5228 24.9412 19.3867 24.6221 20.1572C23.7847 22.1787 22.1787 23.7847 20.1572 24.6221C19.3867 24.9412 18.5228 25.0959 17.3945 25.1729C16.2684 25.2497 14.8603 25.25 13 25.25C11.1397 25.25 9.73161 25.2497 8.60547 25.1729C7.47723 25.0959 6.61333 24.9412 5.84277 24.6221C3.82133 23.7847 2.21525 22.1787 1.37793 20.1572C1.05876 19.3867 0.904144 18.5228 0.827148 17.3945C0.750313 16.2684 0.75 14.8603 0.75 13C0.75 11.1397 0.750313 9.73161 0.827148 8.60547C0.904144 7.47723 1.05876 6.61333 1.37793 5.84277C2.21525 3.82133 3.82133 2.21525 5.84277 1.37793C6.61333 1.05876 7.47723 0.904144 8.60547 0.827148C9.73161 0.750313 11.1397 0.75 13 0.75Z",
"stroke": "#101828",
"stroke-opacity": "0.08",
"stroke-width": "0.5"
},
"children": []
},
{
"type": "element",
"name": "defs",
"attributes": {},
"children": [
{
"type": "element",
"name": "pattern",
"attributes": {
"id": "pattern0_3892_83671",
"patternContentUnits": "objectBoundingBox",
"width": "1",
"height": "1"
},
"children": [
{
"type": "element",
"name": "use",
"attributes": {
"xlink:href": "#image0_3892_83671",
"transform": "scale(0.00625)"
},
"children": []
}
]
},
{
"type": "element",
"name": "clipPath",
"attributes": {
"id": "clip0_3892_83671"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M1 13C1 9.27247 1 7.4087 1.60896 5.93853C2.42092 3.97831 3.97831 2.42092 5.93853 1.60896C7.4087 1 9.27247 1 13 1C16.7275 1 18.5913 1 20.0615 1.60896C22.0217 2.42092 23.5791 3.97831 24.391 5.93853C25 7.4087 25 9.27247 25 13C25 16.7275 25 18.5913 24.391 20.0615C23.5791 22.0217 22.0217 23.5791 20.0615 24.391C18.5913 25 16.7275 25 13 25C9.27247 25 7.4087 25 5.93853 24.391C3.97831 23.5791 2.42092 22.0217 1.60896 20.0615C1 18.5913 1 16.7275 1 13Z",
"fill": "white"
},
"children": []
}
]
},
{
"type": "element",
"name": "image",
"attributes": {
"id": "image0_3892_83671",
"width": "160",
"height": "160",
"preserveAspectRatio": "none",
"xlink:href": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAKAAAACgCAYAAACLz2ctAAAACXBIWXMAACxLAAAsSwGlPZapAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA0ySURBVHgB7Z3vsdM6E8Z93nm/QwdABUAFgQqACgIVABUAFcCpAKgAOoBUwKECOBUAFfj6yYzumNx4pV2vtKtkfzO+3CHESezHWu0frS7GiSEIjPjfEASGhAADU0KAgSkhwMCUEGBgSggwMCUEGJgSAgxMCQEGpoQAA1NCgIEpIcDAlBBgYEoIMDAlBBiYEgIMTAkBBqaEAANTQoCBKSHAwJQQYGBKCDAwJQQYmPL/4cz5/fv38PXr12G32w0/f/4crq6u9n+P/0/cvHlzuH379v7Pe/fuDZvNZv8n/i5Yx8U5LkyH6D5+/Dh8/vx5Lz4pEOGDBw+G58+fhxiljGfEly9fxkkweODUD5wX5w94nIUAf/z4UU14x4SIzwvKOHkBvn79epzmbk3ENz9evHgx/vr1awxoTnYOCCfi2bNnq+Z4a8G8cDLLMT8kOEkBwpN9+PDh3tngMvd4EzgPBC05H3j37t3eUTkknffw3PjsdMDROWnGE+PDhw8sk4s526tXr/YORM5k4vVPnz6NT58+HSeRskwypgJ4/yRG9vsnEe5NOj771DgpAUJ8JTcUAoXovn37Nq7h/fv3zZyb+XeHgE/F4z4ZAUJMJTdwMoXqzgFGJu6IqHHgM/HQ9cxJCBBhj5wA8PraES8HRtXWIsTRc+jnJHLBcDjmqbNDttvtMImv+oT+4uLiL+elFfD079y5M7x582bojrFzMLkfiNEBo1JtMB+zMMHHDjgsPY2GXYdhMOrhyV9iEt8wCXSo+fnSWCNG41TQcOvWrb9eQ0jm+vp6H07CwQ3/dBV/HDsG3uCwMBI8fvx4rAWcmNzIOyzM1eA5c50gzF3fvn3LGmXhLdee82rQrQBh9gbC4ahlhiAgbmoPD4PW9+EUVPQgwm4FSI1+EIk2kkqamhUy+I0lI2LNh1GDLgWIC9rK9MJcUmJfGnlgMmuD61Dy3eCYeC2M6FKA1PxLc8SRVNLUCHTnKIk/IpXnkS4FiCd6yeRpIAmrWAeDS0ToMX3XnQAp87t27icpXIVQvdzYnAjx4HqjOwFCZEsXWDoCpbAKx9ymggZvcyuYWup7e8sddyNA3GiIb8n8Sp9uSVgFE3+vk3p8L2r6gNc84V6AMG/wbHMi4U6yvYRVMGpri5mKkXqbC7oVIFcgKPQshZvFgPi1Y4v4vvOHStuJoa6dlrOmgTsBSlewlVYLU05Mi3le7sGCedcQYm4U9DKFcCVAbjm9xKyUjn7aIxInoK1VaEoJnWMxauJGgDnvTUuAORHUCKtIl4auTaNREYOaxRoczAWIkQEXY434tASILIYmWnWCUrOMa7t0TjwQHjAVIC7QUlhl6aLVFKDWvKhGJwYIWWI2qe/hoUjBtCQfxZypGxUFGgChwHJK8A81WVtOj8JRlMXfv39ffUE8il+nacq+ABdNlUqhliGUXPvamAkQNyp3ISEIiA7igwg9MzkNe+GhAru0ghm/aZqnsbprQYhPnjzZP7zUOpjE3bt3F1/78+fPYM5oADU5TsextQ3U+zRMsARJQPtYuVZpadXhAQeHglqumntvC5oLEBc65xFut9uj8zFPAsT3k3juubgirg9nXjwMdNiGinuepQBzTznEt4QXAR5mMUoOblyxtOJ5fhzzlikB4t9b01SAOdObKyiwFiA+QzI6SeOKGCkli91THxoQI+CMXJVGboSwEiC+FzdWmdJ4Gkjmh8ksexdgMy8YXiLltcEb9LaOdR5W4YQ+0nvRKUEDXBdcnynfzfKWJ9Huu0ZQ5zVnbEQuAV9CyxFQK4tRo4GQZH645prVpIkAcxUopZPzFgKs1U9au2WGNGwzPzysGW5igi8vLxdfg5nwYnphbpFpKM1ipC6mJSDrgHOXBpBzJLM8CVEUpHfTfXVsAOU5ckMTQ8URkHOksvw1DoImXLPsZYFSdQFS5pdbmetBgEtl+dIVddpmkBO2OYswzOQ9Ll4AbnWHpQBL43laAeQ1cEZlaxFWFyCVruJ6YRYClJblS7pZaYuh1JO3rI6uLkDKLHFpLcC1bTY8zA9LC36tPOLqAtRcx9tKgHhoNG+IRIjaZjk3N4TwLRYqVRfgUtJesjSwtgBrt9mQzA/ned215Kp3LBoYVRfg0o+VLIrxWA8ogSNAbbOc89RbZ0fMKqItusn3SsrrIpC9Noidyye37rRvJkDpvmvnTGrKviabggcfGZQlkAVqucFjdQEuPW0a6ahzBZVFqHLBru8SkLqj0nctR8EYAR0BUcDUljA3y9xSMZAbBVvdn+oCXEp4r9n+9Bi73W7onVRgwKmNnK+S41xPnJ8aBakCEk3MTDDQnGtgOWSXW1UdASMbqlyw0U6pswazDCFywPmXaDUPrC5A6inTHrUgQqlJ8gh+D/a4KzXLXAcC92ZJ4K3McHUBbjabxdfw1HIoqV/jLtz2zrzur8Qsf//+feBAibvFKFhdgHjClkZBPGGcHwkBlhZfQtzd7iB5BIgPIoQYKbPMHbVaWqhjNPGCNV1+3IBSkwSSWZaGLLyB345gshaUhWpiQcYGUG3CcEiS7pK8KtJ/mtU5UgaiAKEE7aWWS9exRUPzJiMgZYYB5mtcJJ4inJOWUf5esEyLNgtE51x+qTC4nmLwXyyzVc0EmEv/cAOpc5KnCCF6W9zeA2cxAgJqFEzhkzXAS06eYgixHMu0aFMBYgREl88lYIZfvnw5rAXmGE0tqc86Rm1vGb8Pn+GNJQE2GRnHxuS2khoG3ZVaksZC2uXwKO8vWbJp5QVrb3/GwaRDKtW1c5iFTDTXKFgsl+Q2sbQS4NK5WuyoZNYlH8sWczekxhoNbr89SXd6ye6bVgKkdlRqsUbEdJuGUjFYLtyeC7FkXcaarloWAsTDtXSu0u3P1mC+UY1lKwnNndElzco9CNB6HxEXW3Vx2t566beXRmXOHnDeBEidp1XzIhcClOyjpr2ZoMQs43tzvzs14rcWIPV7W60RNhdgiUdcMhJpIdkmgfvAeBCgVtPQtZhu1QXWRuHXrhA7BBkUpPS0sik4B4LiODxlZ6gyOCQMWn1XcwFSCe/SSPzaFWLHwPkgRCp9SIHvjvdKO5jWBNeLqkbfbrdDK8wFeH19vfgaCi+lK8Q0KjkgInS656akUqkYKnW8geuS65zftLJoNIbawTzNQ/CnZFusNY19pCGaksD5YDgHzKUlERFoibkAKU/s2LZT3LwuN2wjabULT5hz46wEmHuILbbu6mIEPEQSt8ttk5DSZ5xz4pB0T7UQYC7EhIeolec7x7UAc00icfMl+dbDC91i88E5LQVYOqK3Nr0JcwFSPf9KcpHSvC7MET5b0tl+bYFEKwHi95U8WFp72kkwF6BWF32MlhrbV1EmSutG1RYgJy8taRSqibkAqYsp7aKqLcS0KY0G1BJVDQFyphKYF1v0hZ7jOheMv5cgMcvHjtKwCgfKE9UQYOmhXfArxYUAqdDKGgFIwjZJ+NqT8pK4YisBYvsJL7gQIFUUqTFH4ZhlSViFghNX1CokaPlgrcWFAHOtO7QEQZVCaZd3ScrySz+fKqNv9fu0cCFAQI0SmrVph/PDGvO80vDH4chbChU5OCa81lsvcHAjwNxT7WFz5RySsnyJQHIZG4gfUxfPwku4ESCg5mle9rc9hqQsf818bEnkuEY4pwfvthRXAqSyItqmWAPJPE+6++acpc/z5mCU4EqAIFcOb7m16BzNFXUcqPmfRycjhzsBlnh42qviOEjKtTQX2C/FNT1PUSjcCRCUFJ+23umb22Zjbm610FiM5A2XAsTNLlmZppmjpZCUa9X4bpzi3V5wKUCAC1oyx6qxUD3RKqxSArV81aKSWQu3AgS46KUjj+aNl+SQa6a5ci3teoj3LeFagIAjwjQiYq7GDVzjJksKVHFo548P0aig8coF/jM4Jy0l5C61xHJKtO3FgeWGN27c+Ot1LAnFOa+urvYHF6z3rd0OGAvIqeWdPwp3UHLL2Am1WmZIjhp9C4+RS7lZltJr0Y0AE9wGk5qHdlhliZKQT4tNZFrQnQCBJBisddSOt2HumhvpIb5ewy6HdCnARI31H6UmWLt7KGddcg+VQaV0LcAE5mPwBiV9BtccGIXXxiAhJk5BQ48FBxQnIcA5GJkwf+J2XYWA0TgdI5GkkTnej8/OhWPwOh4YiI4zjfBYTq9BF2GYNSC8gh6E6UggRIPwBY5j3a+m9Jt405wU/pmDz0bIR9IPEedDf8GSDbt74+QFuAZp/FGTFrFGS8z7A3omdUvlbvmlQWpw6a2zqjpjUETL0I/XFWw1CAEy0dgPhBJez4UFEmIOKATzwsvLy/0OmJI8cgJzvEePHu3b4lru22tFCFCBVNCw2+3+9boPPd40j0uFEZvNZi++cxTdnBBgYEp4wYEpIcDAlBBgYEoIMDAlBBiYEgIMTAkBBqaEAANTQoCBKSHAwJQQYGBKCDAwJQQYmBICDEwJAQamhAADU0KAgSkhwMCUEGBgSggwMCUEGJgSAgxMCQEGpvwDojzI2oXtJzYAAAAASUVORK5CYII="
},
"children": []
}
]
}
]
},
"name": "OpenaiSmall"
}

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './OpenaiSmall.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'OpenaiSmall'
export default Icon

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './OpenaiTeal.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'OpenaiTeal'
export default Icon

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './OpenaiViolet.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'OpenaiViolet'
export default Icon

View File

@ -0,0 +1,128 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "25",
"height": "25",
"viewBox": "0 0 25 25",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg",
"xmlns:xlink": "http://www.w3.org/1999/xlink"
},
"children": [
{
"type": "element",
"name": "g",
"attributes": {
"clip-path": "url(#clip0_6305_73327)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M0.5 12.5C0.5 8.77247 0.5 6.9087 1.10896 5.43853C1.92092 3.47831 3.47831 1.92092 5.43853 1.10896C6.9087 0.5 8.77247 0.5 12.5 0.5C16.2275 0.5 18.0913 0.5 19.5615 1.10896C21.5217 1.92092 23.0791 3.47831 23.891 5.43853C24.5 6.9087 24.5 8.77247 24.5 12.5C24.5 16.2275 24.5 18.0913 23.891 19.5615C23.0791 21.5217 21.5217 23.0791 19.5615 23.891C18.0913 24.5 16.2275 24.5 12.5 24.5C8.77247 24.5 6.9087 24.5 5.43853 23.891C3.47831 23.0791 1.92092 21.5217 1.10896 19.5615C0.5 18.0913 0.5 16.2275 0.5 12.5Z",
"fill": "white"
},
"children": []
},
{
"type": "element",
"name": "rect",
"attributes": {
"width": "24",
"height": "24",
"transform": "translate(0.5 0.5)",
"fill": "url(#pattern0_6305_73327)"
},
"children": []
},
{
"type": "element",
"name": "rect",
"attributes": {
"width": "24",
"height": "24",
"transform": "translate(0.5 0.5)",
"fill": "white",
"fill-opacity": "0.01"
},
"children": []
}
]
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M12.5 0.25C14.3603 0.25 15.7684 0.250313 16.8945 0.327148C18.0228 0.404144 18.8867 0.558755 19.6572 0.87793C21.6787 1.71525 23.2847 3.32133 24.1221 5.34277C24.4412 6.11333 24.5959 6.97723 24.6729 8.10547C24.7497 9.23161 24.75 10.6397 24.75 12.5C24.75 14.3603 24.7497 15.7684 24.6729 16.8945C24.5959 18.0228 24.4412 18.8867 24.1221 19.6572C23.2847 21.6787 21.6787 23.2847 19.6572 24.1221C18.8867 24.4412 18.0228 24.5959 16.8945 24.6729C15.7684 24.7497 14.3603 24.75 12.5 24.75C10.6397 24.75 9.23161 24.7497 8.10547 24.6729C6.97723 24.5959 6.11333 24.4412 5.34277 24.1221C3.32133 23.2847 1.71525 21.6787 0.87793 19.6572C0.558755 18.8867 0.404144 18.0228 0.327148 16.8945C0.250313 15.7684 0.25 14.3603 0.25 12.5C0.25 10.6397 0.250313 9.23161 0.327148 8.10547C0.404144 6.97723 0.558755 6.11333 0.87793 5.34277C1.71525 3.32133 3.32133 1.71525 5.34277 0.87793C6.11333 0.558755 6.97723 0.404144 8.10547 0.327148C9.23161 0.250313 10.6397 0.25 12.5 0.25Z",
"stroke": "#101828",
"stroke-opacity": "0.08",
"stroke-width": "0.5"
},
"children": []
},
{
"type": "element",
"name": "defs",
"attributes": {},
"children": [
{
"type": "element",
"name": "pattern",
"attributes": {
"id": "pattern0_6305_73327",
"patternContentUnits": "objectBoundingBox",
"width": "1",
"height": "1"
},
"children": [
{
"type": "element",
"name": "use",
"attributes": {
"xlink:href": "#image0_6305_73327",
"transform": "scale(0.00625)"
},
"children": []
}
]
},
{
"type": "element",
"name": "clipPath",
"attributes": {
"id": "clip0_6305_73327"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M0.5 12.5C0.5 8.77247 0.5 6.9087 1.10896 5.43853C1.92092 3.47831 3.47831 1.92092 5.43853 1.10896C6.9087 0.5 8.77247 0.5 12.5 0.5C16.2275 0.5 18.0913 0.5 19.5615 1.10896C21.5217 1.92092 23.0791 3.47831 23.891 5.43853C24.5 6.9087 24.5 8.77247 24.5 12.5C24.5 16.2275 24.5 18.0913 23.891 19.5615C23.0791 21.5217 21.5217 23.0791 19.5615 23.891C18.0913 24.5 16.2275 24.5 12.5 24.5C8.77247 24.5 6.9087 24.5 5.43853 23.891C3.47831 23.0791 1.92092 21.5217 1.10896 19.5615C0.5 18.0913 0.5 16.2275 0.5 12.5Z",
"fill": "white"
},
"children": []
}
]
},
{
"type": "element",
"name": "image",
"attributes": {
"id": "image0_6305_73327",
"width": "160",
"height": "160",
"preserveAspectRatio": "none",
"xlink:href": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAKAAAACgCAYAAACLz2ctAAAACXBIWXMAACxLAAAsSwGlPZapAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAm6SURBVHgB7Z09jFVFGIZn/Wf9I5FSKIVSKYndErolVJJQQUKliRWFNhY2UGxFItUmUJFARZaOQGekA0poF8slUcFFRVnPu9cDw/HuPWdmvpnvu3PeJ9m4MQH23H3P/DzzzczCVoMjRInXHCGKMIBEFQaQqMIAElUYQKIKA0hUYQCJKgwgUYUBJKowgEQVBpCowgASVRhAogoDSFRhAIkqDCBRhQEkqjCARBUGkKjCABJVGECiCgNIVGEAiSpvuJGyubnlLq0+cQ/u/739fSyLiwvu8JF33PKxXY6EM9oW8Mrl393dO8+Swgfw59euPXU//finI+GMMoCPNp43gflr+3u0YBJcubzZhPG5I2GMMoA/nH/84vvjJxbd/gPpIxG0hDdvsBUMZXQBRFf5cP2f7e8Pff729tfysUUnwa0bf7iNDbaCIYwugNeb8VpLO3FAC4ggpoJW8GoztiTDGVUAEb62hULg9ux5+fjoiiXGg5jYYGZNhjGaAGLicbPpIsFHTfC62gThW2p0igTXr206MozRBHDt2uYL5XK0CZ/f+rXA5037/6GgBaSWGcYoAuhrF7R+O4330Ap+cUJmQkItM4xRBNDXLkd7Viw+O/gWtUxBqg/gNO3SB7VMOaoP4DTt0gdaQIkJCbVMP1UXI8zSLn2gq77dtJ6pa8XQMheaIcCuxfLvOiZVe/e97ixTbQD7tEsfrZbxW9BYEEINMPw4880HImPaXFTbBQ/RLn1IaRlNLq4+cZapMoBDtUsfUpUymuCzWBNoxXNRZQBDtMss/DHkPIPZuFUnWV0AY7TLNCataB0eD0OR60ZbweoCGKNdpoExZE0OD1L84bq9IomqAuh3mUsJEwh/DFkTWB60RjUB7GqXwwki2R9D1gSKJKyVilUTQAntAvwxZI1Y0zJVBFBKuwCrg3UprGmZKgLY3WQUSy3apQ9LWmbuA9jVLiinisEfQ9aOJS1jYpEQA+NHG3HjLkntklp4ME9Ay+CF3btPNwLqAYQakGh5qF3CwWePYgVNVLtgdJ9S3d6Ci2+9atUufVjQMqotoN99Yuz26cE3XQzrzRgQQV46Eq5f0O0eFtoNNy/cu/PM3b0zafG1JyNqAeyWqz+4/8ydPI29ueGN8qHm6+dmmQmnXYV2Kah4kdiUPk/4L772GFClC54240zdxIN9HBZNvzWkliulUPnXd1roT9nEg6pffFkvwNREcrlSiuIBnDXjTN3Ec+r0e+5p83fcGonPC0VquVKS4j9BX0VGytkqeKvRrWCpiZvCX0VyuVKSogGEdmlnX7NIOVul7VZqX9MNRWq5UpqiARwaipSzVTCrxYoIJjTcFD5BarkyB8UCGDrBSDlbBa0gJiSXOCHZRmq5MgdFAhiz0E8tI4M17dKlyE8Tu7+CWiYNi9qlS/YApiz0U8ukYVG7dMn+E6W2QNQycVjVLl2yLwRKjMGgZfZHlg2h20ELiEnN/gNxxQ7ziD/mtqRdumQPIE5nSt3k02qZmLe41TII4Bhr/sC9xr1aUi8+C1sNLiMIzsXV9DPyEKSzKx9GVcuAlXO/zc2MGF3muZXdLhX/ma2ekpV9DIhWy8KRt1KnnpZAqsv0n9nqyf1FpkUWjrxttYx1JFcq/Ge2enJ/kQBauYkIWsb6kWvSKxX+M1u0AcXEEDyU9k1ErZaxSo6VCv+ZJ2LaVitYLICSv/zUahmLrWDOlQr/ma2d3F9UjVu4iajVMtbIuVLhP/NkU7qdCUnRAEr+8iWqZaxQYqXCf2b4UCtKqvjiILXM/ym1UmFRy6isTlPLvKRkgahFLaMSQGqZl5Qej1rTMmr1OdQyOgWi1rSMWgDHrmVStUtKmZslLaNaoThmLZN6jDBmshLPrK1lVAM4Vi0jdYxwyhjOipZRr9Eeo5aROkY4dQxnQcuY2CQwJi2Teoxw94BxqWfW0jImAjgmLZN6jHCX1DGctpYxs01qDFpmOWHiMWt3YcoYTlvLmAlg7VomdeKB8vpZSD1zaS1jaqOoFS2TY202Vbv0hUJKRZXWMqYCaEXLSM3MW0rd3jSPWsbcVvkatQwG+rGE3N40j1rG3lkNzo6WkahSSXmhYu51mzctYzKAVrQMxoKpExJp7dKHpJYpcXWZ2X2KuDNE4g1stUxMK9TOzGNPW82lXfrAn3u6+djtitzEjwAiyCWurTUbwKuCt3tLnC0Teo9cbu3SB168VGIvDgrBZBc8RDuEoKFlcmuXEhw/8a7LjbkAouvJccB4SS1Tw6XZ+PlLFMuaC2Cut7+klimlXXKBF6hUjaSpAOa+Tr6ElimtXXIgtSI1BFMBXMssP0tomdLaRZrSZ0mbCeBkopD/AMmc1TJa2kWSo4W3J5gJYMk7PXJUy2hrFwmgXUqfJW0igKW1A1rA2JPzd9Iy1C5xqAcwl3bpI6VypDvRoHaJRz2AWm+/pJahdolHNYDa2iFVy6A7pnZJQ3UteH1dpugRV0Hs3Rf3KLjCIEY7oOVGK0rtkoZqAGOvXHj171hwX379ftE3uB23Uruko9oFS+zF1TjgBy0XamPmXbvgs9O+wku9HAtT/++/+9XFgO6j9BvctlynTr+rrl1Snh9DFgxdtFEPID6Epf9q7kLR6D7Q+qVol0nFsszEA89v9RLCoZgQ0TGb0j9uglv6w29PpUrRLlL7bjWePwcmAojwhW5K/6qZeJQGLZcV7aLx/DkwdTTH0DGVhrVvx20WtIvWqkUOTD3FyQFdm8aBkhLaBRt8JLSL1XtOYjEVwCFaZl61y4Xzj50ES4qrFjkw9ySzKjI0tYuFaheN58+NuQC2WmYa1C51hQ+YbMunaRkN7YDqaWqXvJgM4DQto6EdsH+E2iUvZk9GQCt42xs7fXvmF6fB3oQja6ld+jH9VCcTuj4pYjcxUbsMY2GrwRkGv8gSpzR1weQBtYIAXfCZwLNl0GJLjP0QvhonHy3mA6jJhfNPmhZwEkJUvwydBEC7XFyN33/cgtn3uZXdrmbqHFgI4W9EH3q2DLVLGAzgDPyN6EM3MVG7hMEA9hByhQG1SzgMYA/+RvS+s2WoXcJhAAfgy+idtAy1SxwM4ED6rjBgtUscDOBA/PMBu2fLsNolHgYwAF/LtGfLULukwQAGME3LULukwZWQQBA8LLPhv+19GhKcbVY8xjT2a2ELGEhXy0gwJu3ShQGMAIFZFrpg+5NmcpPjeth5gV0wUYUtIFGFASSqMIBEFQaQqMIAElUYQKIKA0hUYQCJKgwgUYUBJKowgEQVBpCowgASVRhAogoDSFRhAIkqDCBRhQEkqjCARBUGkKjCABJVGECiCgNIVPkXGPWKHZj1nMYAAAAASUVORK5CYII="
},
"children": []
}
]
}
]
},
"name": "Tongyi"
}

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './Tongyi.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'Tongyi'
export default Icon

View File

@ -1,6 +1,7 @@
export { default as Anthropic } from './Anthropic'
export { default as AnthropicDark } from './AnthropicDark'
export { default as AnthropicLight } from './AnthropicLight'
export { default as AnthropicShortLight } from './AnthropicShortLight'
export { default as AnthropicText } from './AnthropicText'
export { default as Azureai } from './Azureai'
export { default as AzureaiText } from './AzureaiText'
@ -12,8 +13,11 @@ export { default as Chatglm } from './Chatglm'
export { default as ChatglmText } from './ChatglmText'
export { default as Cohere } from './Cohere'
export { default as CohereText } from './CohereText'
export { default as Deepseek } from './Deepseek'
export { default as Gemini } from './Gemini'
export { default as Gpt3 } from './Gpt3'
export { default as Gpt4 } from './Gpt4'
export { default as Grok } from './Grok'
export { default as Huggingface } from './Huggingface'
export { default as HuggingfaceText } from './HuggingfaceText'
export { default as HuggingfaceTextHub } from './HuggingfaceTextHub'
@ -26,14 +30,19 @@ export { default as Localai } from './Localai'
export { default as LocalaiText } from './LocalaiText'
export { default as Microsoft } from './Microsoft'
export { default as OpenaiBlack } from './OpenaiBlack'
export { default as OpenaiBlue } from './OpenaiBlue'
export { default as OpenaiGreen } from './OpenaiGreen'
export { default as OpenaiSmall } from './OpenaiSmall'
export { default as OpenaiTeal } from './OpenaiTeal'
export { default as OpenaiText } from './OpenaiText'
export { default as OpenaiTransparent } from './OpenaiTransparent'
export { default as OpenaiViolet } from './OpenaiViolet'
export { default as OpenaiYellow } from './OpenaiYellow'
export { default as Openllm } from './Openllm'
export { default as OpenllmText } from './OpenllmText'
export { default as Replicate } from './Replicate'
export { default as ReplicateText } from './ReplicateText'
export { default as Tongyi } from './Tongyi'
export { default as XorbitsInference } from './XorbitsInference'
export { default as XorbitsInferenceText } from './XorbitsInferenceText'
export { default as Zhipuai } from './Zhipuai'

View File

@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />

View File

@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />

View File

@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />

View File

@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />

View File

@ -1,14 +1,16 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "80px",
"height": "18px",
"viewBox": "0 0 80 18",
"version": "1.1"
"version": "1.1",
"xmlns": "http://www.w3.org/2000/svg",
"xmlns:xlink": "http://www.w3.org/1999/xlink"
},
"isRootNode": true,
"children": [
{
"type": "element",

View File

@ -1,14 +1,16 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "80px",
"height": "18px",
"width": "120px",
"height": "27px",
"viewBox": "0 0 80 18",
"version": "1.1"
"version": "1.1",
"xmlns": "http://www.w3.org/2000/svg",
"xmlns:xlink": "http://www.w3.org/1999/xlink"
},
"isRootNode": true,
"children": [
{
"type": "element",

View File

@ -205,7 +205,7 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
}
catch {
try {
// eslint-disable-next-line no-new-func, sonarjs/code-eval
// eslint-disable-next-line no-new-func
const result = new Function(`return ${trimmedContent}`)()
if (typeof result === 'object' && result !== null) {
setFinalChartOption(result)
@ -250,7 +250,7 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
}
catch {
try {
// eslint-disable-next-line no-new-func, sonarjs/code-eval
// eslint-disable-next-line no-new-func
const result = new Function(`return ${trimmedContent}`)()
if (typeof result === 'object' && result !== null) {
setFinalChartOption(result)

View File

@ -75,6 +75,9 @@ const buildAppContext = (overrides: Partial<AppContextValue> = {}): AppContextVa
created_at: 0,
role: 'normal',
providers: [],
trial_credits: 200,
trial_credits_used: 0,
next_credit_reset_date: 0,
}
const langGeniusVersionInfo: LangGeniusVersionResponse = {
current_env: '',
@ -96,6 +99,7 @@ const buildAppContext = (overrides: Partial<AppContextValue> = {}): AppContextVa
mutateCurrentWorkspace: vi.fn(),
langGeniusVersionInfo,
isLoadingCurrentWorkspace: false,
isValidatingCurrentWorkspace: false,
}
const useSelector: AppContextValue['useSelector'] = selector => selector({ ...base, useSelector })
return {

View File

@ -28,10 +28,10 @@ const RuleDetail = ({
case 'mode':
value = !sourceData?.mode
? value
// eslint-disable-next-line sonarjs/no-nested-conditional
: sourceData.mode === ProcessMode.general
? (t('embedding.custom', { ns: 'datasetDocuments' }) as string)
// eslint-disable-next-line sonarjs/no-nested-conditional
: `${t('embedding.hierarchical', { ns: 'datasetDocuments' })} · ${sourceData?.rules?.parent_mode === 'paragraph'
? t('parentMode.paragraph', { ns: 'dataset' })
: t('parentMode.fullDoc', { ns: 'dataset' })}`
@ -70,7 +70,7 @@ const RuleDetail = ({
src={
retrievalMethod === RETRIEVE_METHOD.fullText
? retrievalIcon.fullText
// eslint-disable-next-line sonarjs/no-nested-conditional
: retrievalMethod === RETRIEVE_METHOD.hybrid
? retrievalIcon.hybrid
: retrievalIcon.vector

View File

@ -2142,8 +2142,8 @@ describe('Integration: Hit Testing Flow', () => {
fireEvent.click(submitButton)
await waitFor(() => {
// Verify component is rendered (use hidden: true in case dialog opened)
expect(screen.getByRole('textbox', { hidden: true })).toBeInTheDocument()
// Verify component is rendered
expect(screen.getByRole('textbox')).toBeInTheDocument()
})
})

View File

@ -403,7 +403,7 @@ const Form = () => {
</div>
</>
)
// eslint-disable-next-line sonarjs/no-nested-conditional
: indexMethod
? (
<>

View File

@ -6,8 +6,10 @@ import {
RiBrainLine,
} from '@remixicon/react'
import { useDebounce } from 'ahooks'
import { useMemo } from 'react'
import { useEffect, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import { IS_CLOUD_EDITION } from '@/config'
import { useAppContext } from '@/context/app-context'
import { useGlobalPublicStore } from '@/context/global-public-context'
import { useProviderContext } from '@/context/provider-context'
import { cn } from '@/utils/classnames'
@ -20,6 +22,7 @@ import {
} from './hooks'
import InstallFromMarketplace from './install-from-marketplace'
import ProviderAddedCard from './provider-added-card'
import QuotaPanel from './provider-added-card/quota-panel'
import SystemModelSelector from './system-model-selector'
type Props = {
@ -31,6 +34,7 @@ const FixedModelProvider = ['langgenius/openai/openai', 'langgenius/anthropic/an
const ModelProviderPage = ({ searchText }: Props) => {
const debouncedSearchText = useDebounce(searchText, { wait: 500 })
const { t } = useTranslation()
const { mutateCurrentWorkspace, isValidatingCurrentWorkspace } = useAppContext()
const { data: textGenerationDefaultModel, isLoading: isTextGenerationDefaultModelLoading } = useDefaultModel(ModelTypeEnum.textGeneration)
const { data: embeddingsDefaultModel, isLoading: isEmbeddingsDefaultModelLoading } = useDefaultModel(ModelTypeEnum.textEmbedding)
const { data: rerankDefaultModel, isLoading: isRerankDefaultModelLoading } = useDefaultModel(ModelTypeEnum.rerank)
@ -88,6 +92,10 @@ const ModelProviderPage = ({ searchText }: Props) => {
return [filteredConfiguredProviders, filteredNotConfiguredProviders]
}, [configuredProviders, debouncedSearchText, notConfiguredProviders])
useEffect(() => {
mutateCurrentWorkspace()
}, [mutateCurrentWorkspace])
return (
<div className="relative -mt-2 pt-1">
<div className={cn('mb-2 flex items-center')}>
@ -115,6 +123,7 @@ const ModelProviderPage = ({ searchText }: Props) => {
/>
</div>
</div>
{IS_CLOUD_EDITION && <QuotaPanel providers={providers} isLoading={isValidatingCurrentWorkspace} />}
{!filteredConfiguredProviders?.length && (
<div className="mb-2 rounded-[10px] bg-workflow-process-bg p-4">
<div className="flex h-10 w-10 items-center justify-center rounded-[10px] border-[0.5px] border-components-card-border bg-components-card-bg shadow-lg backdrop-blur">

View File

@ -7,6 +7,7 @@ import { useToastContext } from '@/app/components/base/toast'
import { ConfigProvider } from '@/app/components/header/account-setting/model-provider-page/model-auth'
import { useCredentialStatus } from '@/app/components/header/account-setting/model-provider-page/model-auth/hooks'
import Indicator from '@/app/components/header/indicator'
import { IS_CLOUD_EDITION } from '@/config'
import { useEventEmitterContextContext } from '@/context/event-emitter'
import { changeModelProviderPriority } from '@/service/common'
import { cn } from '@/utils/classnames'
@ -26,6 +27,7 @@ import PriorityUseTip from './priority-use-tip'
type CredentialPanelProps = {
provider: ModelProvider
}
const CredentialPanel = ({
provider,
}: CredentialPanelProps) => {
@ -47,6 +49,8 @@ const CredentialPanel = ({
notAllowedToUse,
} = useCredentialStatus(provider)
const showPrioritySelector = systemConfig.enabled && isCustomConfigured && IS_CLOUD_EDITION
const handleChangePriority = async (key: PreferredProviderTypeEnum) => {
const res = await changeModelProviderPriority({
url: `/workspaces/current/model-providers/${provider.provider}/preferred-provider-type`,
@ -114,7 +118,7 @@ const CredentialPanel = ({
provider={provider}
/>
{
systemConfig.enabled && isCustomConfigured && (
showPrioritySelector && (
<PrioritySelector
value={priorityUseType}
onSelect={handleChangePriority}
@ -131,7 +135,7 @@ const CredentialPanel = ({
)
}
{
systemConfig.enabled && isCustomConfigured && !provider.provider_credential_schema && (
showPrioritySelector && !provider.provider_credential_schema && (
<div className="ml-1">
<PrioritySelector
value={priorityUseType}

View File

@ -3,6 +3,7 @@ import type {
ModelItem,
ModelProvider,
} from '../declarations'
import type { ModelProviderQuotaGetPaid } from '../utils'
import {
RiArrowRightSLine,
RiInformation2Fill,
@ -28,7 +29,6 @@ import {
} from '../utils'
import CredentialPanel from './credential-panel'
import ModelList from './model-list'
import QuotaPanel from './quota-panel'
export const UPDATE_MODEL_PROVIDER_CUSTOM_MODEL_LIST = 'UPDATE_MODEL_PROVIDER_CUSTOM_MODEL_LIST'
type ProviderAddedCardProps = {
@ -49,7 +49,7 @@ const ProviderAddedCard: FC<ProviderAddedCardProps> = ({
const systemConfig = provider.system_configuration
const hasModelList = fetched && !!modelList.length
const { isCurrentWorkspaceManager } = useAppContext()
const showQuota = systemConfig.enabled && [...MODEL_PROVIDER_QUOTA_GET_PAID].includes(provider.provider) && !IS_CE_EDITION
const showModelProvider = systemConfig.enabled && MODEL_PROVIDER_QUOTA_GET_PAID.includes(provider.provider as ModelProviderQuotaGetPaid) && !IS_CE_EDITION
const showCredential = configurationMethods.includes(ConfigurationMethodEnum.predefinedModel) && isCurrentWorkspaceManager
const getModelList = async (providerName: string) => {
@ -104,13 +104,6 @@ const ProviderAddedCard: FC<ProviderAddedCardProps> = ({
}
</div>
</div>
{
showQuota && (
<QuotaPanel
provider={provider}
/>
)
}
{
showCredential && (
<CredentialPanel
@ -122,7 +115,7 @@ const ProviderAddedCard: FC<ProviderAddedCardProps> = ({
{
collapsed && (
<div className="system-xs-medium group flex items-center justify-between border-t border-t-divider-subtle py-1.5 pl-2 pr-[11px] text-text-tertiary">
{(showQuota || !notConfigured) && (
{(showModelProvider || !notConfigured) && (
<>
<div className="flex h-6 items-center pl-1 pr-1.5 leading-6 group-hover:hidden">
{
@ -150,7 +143,7 @@ const ProviderAddedCard: FC<ProviderAddedCardProps> = ({
</div>
</>
)}
{!showQuota && notConfigured && (
{!showModelProvider && notConfigured && (
<div className="flex h-6 items-center pl-1 pr-1.5">
<RiInformation2Fill className="mr-1 h-4 w-4 text-text-accent" />
<span className="system-xs-medium text-text-secondary">{t('modelProvider.configureTip', { ns: 'common' })}</span>

View File

@ -1,66 +1,163 @@
import type { FC } from 'react'
import type { ModelProvider } from '../declarations'
import type { Plugin } from '@/app/components/plugins/types'
import { useBoolean } from 'ahooks'
import * as React from 'react'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { AnthropicShortLight, Deepseek, Gemini, Grok, OpenaiSmall, Tongyi } from '@/app/components/base/icons/src/public/llm'
import Loading from '@/app/components/base/loading'
import Tooltip from '@/app/components/base/tooltip'
import InstallFromMarketplace from '@/app/components/plugins/install-plugin/install-from-marketplace'
import { useAppContext } from '@/context/app-context'
import useTimestamp from '@/hooks/use-timestamp'
import { cn } from '@/utils/classnames'
import { formatNumber } from '@/utils/format'
import {
CustomConfigurationStatusEnum,
PreferredProviderTypeEnum,
QuotaUnitEnum,
} from '../declarations'
import {
MODEL_PROVIDER_QUOTA_GET_PAID,
} from '../utils'
import PriorityUseTip from './priority-use-tip'
import { PreferredProviderTypeEnum } from '../declarations'
import { useMarketplaceAllPlugins } from '../hooks'
import { modelNameMap, ModelProviderQuotaGetPaid } from '../utils'
const allProviders = [
{ key: ModelProviderQuotaGetPaid.OPENAI, Icon: OpenaiSmall },
{ key: ModelProviderQuotaGetPaid.ANTHROPIC, Icon: AnthropicShortLight },
{ key: ModelProviderQuotaGetPaid.GEMINI, Icon: Gemini },
{ key: ModelProviderQuotaGetPaid.X, Icon: Grok },
{ key: ModelProviderQuotaGetPaid.DEEPSEEK, Icon: Deepseek },
{ key: ModelProviderQuotaGetPaid.TONGYI, Icon: Tongyi },
] as const
// Map provider key to plugin ID
// provider key format: langgenius/provider/model, plugin ID format: langgenius/provider
const providerKeyToPluginId: Record<string, string> = {
[ModelProviderQuotaGetPaid.OPENAI]: 'langgenius/openai',
[ModelProviderQuotaGetPaid.ANTHROPIC]: 'langgenius/anthropic',
[ModelProviderQuotaGetPaid.GEMINI]: 'langgenius/gemini',
[ModelProviderQuotaGetPaid.X]: 'langgenius/x',
[ModelProviderQuotaGetPaid.DEEPSEEK]: 'langgenius/deepseek',
[ModelProviderQuotaGetPaid.TONGYI]: 'langgenius/tongyi',
}
type QuotaPanelProps = {
provider: ModelProvider
providers: ModelProvider[]
isLoading?: boolean
}
const QuotaPanel: FC<QuotaPanelProps> = ({
provider,
providers,
isLoading = false,
}) => {
const { t } = useTranslation()
const { currentWorkspace } = useAppContext()
const credits = Math.max((currentWorkspace.trial_credits - currentWorkspace.trial_credits_used) || 0, 0)
const providerMap = useMemo(() => new Map(
providers.map(p => [p.provider, p.preferred_provider_type]),
), [providers])
const { formatTime } = useTimestamp()
const {
plugins: allPlugins,
} = useMarketplaceAllPlugins(providers, '')
const [selectedPlugin, setSelectedPlugin] = useState<Plugin | null>(null)
const [isShowInstallModal, {
setTrue: showInstallFromMarketplace,
setFalse: hideInstallFromMarketplace,
}] = useBoolean(false)
const selectedPluginIdRef = useRef<string | null>(null)
const customConfig = provider.custom_configuration
const priorityUseType = provider.preferred_provider_type
const systemConfig = provider.system_configuration
const currentQuota = systemConfig.enabled && systemConfig.quota_configurations.find(item => item.quota_type === systemConfig.current_quota_type)
const openaiOrAnthropic = MODEL_PROVIDER_QUOTA_GET_PAID.includes(provider.provider)
const handleIconClick = useCallback((key: string) => {
const providerType = providerMap.get(key)
if (!providerType && allPlugins) {
const pluginId = providerKeyToPluginId[key]
const plugin = allPlugins.find(p => p.plugin_id === pluginId)
if (plugin) {
setSelectedPlugin(plugin)
selectedPluginIdRef.current = pluginId
showInstallFromMarketplace()
}
}
}, [allPlugins, providerMap, showInstallFromMarketplace])
useEffect(() => {
if (isShowInstallModal && selectedPluginIdRef.current) {
const isInstalled = providers.some(p => p.provider.startsWith(selectedPluginIdRef.current!))
if (isInstalled) {
hideInstallFromMarketplace()
selectedPluginIdRef.current = null
}
}
}, [providers, isShowInstallModal, hideInstallFromMarketplace])
if (isLoading) {
return (
<div className="my-2 flex min-h-[72px] items-center justify-center rounded-xl border-[0.5px] border-components-panel-border bg-third-party-model-bg-default shadow-xs">
<Loading />
</div>
)
}
return (
<div className="group relative min-w-[112px] shrink-0 rounded-lg border-[0.5px] border-components-panel-border bg-white/[0.18] px-3 py-2 shadow-xs">
<div className={cn('my-2 min-w-[72px] shrink-0 rounded-xl border-[0.5px] pb-2.5 pl-4 pr-2.5 pt-3 shadow-xs', credits <= 0 ? 'border-state-destructive-border hover:bg-state-destructive-hover' : 'border-components-panel-border bg-third-party-model-bg-default')}>
<div className="system-xs-medium-uppercase mb-2 flex h-4 items-center text-text-tertiary">
{t('modelProvider.quota', { ns: 'common' })}
<Tooltip popupContent={
openaiOrAnthropic
? t('modelProvider.card.tip', { ns: 'common' })
: t('modelProvider.quotaTip', { ns: 'common' })
}
/>
<Tooltip popupContent={t('modelProvider.card.tip', { ns: 'common' })} />
</div>
{
currentQuota && (
<div className="flex h-4 items-center text-xs text-text-tertiary">
<span className="system-md-semibold-uppercase mr-0.5 text-text-secondary">{formatNumber(Math.max((currentQuota?.quota_limit || 0) - (currentQuota?.quota_used || 0), 0))}</span>
{
currentQuota?.quota_unit === QuotaUnitEnum.tokens && 'Tokens'
<div className="flex items-center justify-between">
<div className="flex items-center gap-1 text-xs text-text-tertiary">
<span className="system-md-semibold-uppercase mr-0.5 text-text-secondary">{formatNumber(credits)}</span>
<span>{t('modelProvider.credits', { ns: 'common' })}</span>
{currentWorkspace.next_credit_reset_date
? (
<>
<span>·</span>
<span>
{t('modelProvider.resetDate', {
ns: 'common',
date: formatTime(currentWorkspace.next_credit_reset_date, t('dateFormat', { ns: 'appLog' })),
interpolation: { escapeValue: false },
})}
</span>
</>
)
: null}
</div>
<div className="flex items-center gap-1">
{allProviders.map(({ key, Icon }) => {
const providerType = providerMap.get(key)
const usingQuota = providerType === PreferredProviderTypeEnum.system
const getTooltipKey = () => {
if (usingQuota)
return 'modelProvider.card.modelSupported'
if (providerType === PreferredProviderTypeEnum.custom)
return 'modelProvider.card.modelAPI'
return 'modelProvider.card.modelNotSupported'
}
{
currentQuota?.quota_unit === QuotaUnitEnum.times && t('modelProvider.callTimes', { ns: 'common' })
}
{
currentQuota?.quota_unit === QuotaUnitEnum.credits && t('modelProvider.credits', { ns: 'common' })
}
</div>
)
}
{
priorityUseType === PreferredProviderTypeEnum.system && customConfig.status === CustomConfigurationStatusEnum.active && (
<PriorityUseTip />
)
}
return (
<Tooltip
key={key}
popupContent={t(getTooltipKey(), { modelName: modelNameMap[key], ns: 'common' })}
>
<div
className={cn('relative h-6 w-6', !providerType && 'cursor-pointer hover:opacity-80')}
onClick={() => handleIconClick(key)}
>
<Icon className="h-6 w-6 rounded-lg" />
{!usingQuota && (
<div className="absolute inset-0 rounded-lg border-[0.5px] border-components-panel-border-subtle bg-background-default-dodge opacity-30" />
)}
</div>
</Tooltip>
)
})}
</div>
</div>
{isShowInstallModal && selectedPlugin && (
<InstallFromMarketplace
manifest={selectedPlugin}
uniqueIdentifier={selectedPlugin.latest_package_identifier}
onClose={hideInstallFromMarketplace}
onSuccess={hideInstallFromMarketplace}
/>
)}
</div>
)
}
export default QuotaPanel
export default React.memo(QuotaPanel)

View File

@ -17,7 +17,25 @@ import {
ModelTypeEnum,
} from './declarations'
export const MODEL_PROVIDER_QUOTA_GET_PAID = ['langgenius/anthropic/anthropic', 'langgenius/openai/openai', 'langgenius/azure_openai/azure_openai']
export enum ModelProviderQuotaGetPaid {
ANTHROPIC = 'langgenius/anthropic/anthropic',
OPENAI = 'langgenius/openai/openai',
// AZURE_OPENAI = 'langgenius/azure_openai/azure_openai',
GEMINI = 'langgenius/gemini/google',
X = 'langgenius/x/x',
DEEPSEEK = 'langgenius/deepseek/deepseek',
TONGYI = 'langgenius/tongyi/tongyi',
}
export const MODEL_PROVIDER_QUOTA_GET_PAID = [ModelProviderQuotaGetPaid.ANTHROPIC, ModelProviderQuotaGetPaid.OPENAI, ModelProviderQuotaGetPaid.GEMINI, ModelProviderQuotaGetPaid.X, ModelProviderQuotaGetPaid.DEEPSEEK, ModelProviderQuotaGetPaid.TONGYI]
export const modelNameMap = {
[ModelProviderQuotaGetPaid.OPENAI]: 'OpenAI',
[ModelProviderQuotaGetPaid.ANTHROPIC]: 'Anthropic',
[ModelProviderQuotaGetPaid.GEMINI]: 'Gemini',
[ModelProviderQuotaGetPaid.X]: 'xAI',
[ModelProviderQuotaGetPaid.DEEPSEEK]: 'DeepSeek',
[ModelProviderQuotaGetPaid.TONGYI]: 'Tongyi',
}
export const isNullOrUndefined = (value: any) => {
return value === undefined || value === null

View File

@ -92,7 +92,7 @@ const ProviderCardComponent: FC<Props> = ({
manifest={payload}
uniqueIdentifier={payload.latest_package_identifier}
onClose={hideInstallFromMarketplace}
onSuccess={() => hideInstallFromMarketplace()}
onSuccess={hideInstallFromMarketplace}
/>
)
}

View File

@ -23,7 +23,7 @@ describe('GetSchema', () => {
it('shows an error when the URL is not http', () => {
fireEvent.click(screen.getByText('tools.createTool.importFromUrl'))
const input = screen.getByPlaceholderText('tools.createTool.importFromUrlPlaceHolder')
// eslint-disable-next-line sonarjs/no-clear-text-protocols
fireEvent.change(input, { target: { value: 'ftp://invalid' } })
fireEvent.click(screen.getByText('common.operation.ok'))

View File

@ -474,7 +474,6 @@ const formatItem = (
break
}
// eslint-disable-next-line sonarjs/no-duplicated-branches
case BlockEnum.VariableAggregator: {
const { output_type, advanced_settings }
= data as VariableAssignerNodeType
@ -1875,7 +1874,7 @@ export const updateNodeVars = (
}
break
}
// eslint-disable-next-line sonarjs/no-duplicated-branches
case BlockEnum.VariableAggregator: {
const payload = data as VariableAssignerNodeType
if (payload.variables) {

View File

@ -139,20 +139,22 @@ const Panel: FC<NodePanelProps<DataSourceNodeType>> = ({ id, data }) => {
return (
<div key={outputItem.name}>
{outputItem.value?.type === 'object' ? (
<StructureOutputItem
rootClassName="code-sm-semibold text-text-secondary"
payload={wrapStructuredVarItem(outputItem, schemaType)}
/>
) : (
<VarItem
name={outputItem.name}
// eslint-disable-next-line sonarjs/no-nested-template-literals
type={`${outputItem.type.toLocaleLowerCase()}${schemaType ? ` (${schemaType})` : ''}`}
description={outputItem.description}
isIndent={hasObjectOutput}
/>
)}
{outputItem.value?.type === 'object'
? (
<StructureOutputItem
rootClassName="code-sm-semibold text-text-secondary"
payload={wrapStructuredVarItem(outputItem, schemaType)}
/>
)
: (
<VarItem
name={outputItem.name}
type={`${outputItem.type.toLocaleLowerCase()}${schemaType ? ` (${schemaType})` : ''}`}
description={outputItem.description}
isIndent={hasObjectOutput}
/>
)}
</div>
)
})}

View File

@ -100,7 +100,7 @@ const RetrievalConfig: FC<Props> = ({
score_threshold: configs.score_threshold_enabled ? (configs.score_threshold ?? DATASET_DEFAULT.score_threshold) : null,
reranking_model: retrieval_mode === RETRIEVE_TYPE.oneWay
? undefined
// eslint-disable-next-line sonarjs/no-nested-conditional
: (!configs.reranking_model?.reranking_provider_name
? undefined
: {

View File

@ -135,7 +135,7 @@ export const getMultipleRetrievalConfig = (
vector_setting: {
vector_weight: allHighQualityVectorSearch
? DEFAULT_WEIGHTED_SCORE.allHighQualityVectorSearch.semantic
// eslint-disable-next-line sonarjs/no-nested-conditional
: allHighQualityFullTextSearch
? DEFAULT_WEIGHTED_SCORE.allHighQualityFullTextSearch.semantic
: DEFAULT_WEIGHTED_SCORE.other.semantic,
@ -145,7 +145,7 @@ export const getMultipleRetrievalConfig = (
keyword_setting: {
keyword_weight: allHighQualityVectorSearch
? DEFAULT_WEIGHTED_SCORE.allHighQualityVectorSearch.keyword
// eslint-disable-next-line sonarjs/no-nested-conditional
: allHighQualityFullTextSearch
? DEFAULT_WEIGHTED_SCORE.allHighQualityFullTextSearch.keyword
: DEFAULT_WEIGHTED_SCORE.other.keyword,
@ -232,7 +232,6 @@ export const getMultipleRetrievalConfig = (
result.reranking_mode = RerankingModeEnum.RerankingModel
result.reranking_enable = true
// eslint-disable-next-line sonarjs/nested-control-flow
if ((!result.reranking_model?.provider || !result.reranking_model?.model) && isFallbackRerankModelValid) {
result.reranking_model = {
provider: fallbackRerankModel.provider || '',

View File

@ -124,20 +124,22 @@ const Panel: FC<NodePanelProps<ToolNodeType>> = ({
// TODO empty object type always match `qa_structured` schema type
return (
<div key={outputItem.name}>
{outputItem.value?.type === 'object' ? (
<StructureOutputItem
rootClassName="code-sm-semibold text-text-secondary"
payload={wrapStructuredVarItem(outputItem, schemaType)}
/>
) : (
<VarItem
name={outputItem.name}
// eslint-disable-next-line sonarjs/no-nested-template-literals
type={`${outputItem.type.toLocaleLowerCase()}${schemaType ? ` (${schemaType})` : ''}`}
description={outputItem.description}
isIndent={hasObjectOutput}
/>
)}
{outputItem.value?.type === 'object'
? (
<StructureOutputItem
rootClassName="code-sm-semibold text-text-secondary"
payload={wrapStructuredVarItem(outputItem, schemaType)}
/>
)
: (
<VarItem
name={outputItem.name}
type={`${outputItem.type.toLocaleLowerCase()}${schemaType ? ` (${schemaType})` : ''}`}
description={outputItem.description}
isIndent={hasObjectOutput}
/>
)}
</div>
)
})}

View File

@ -18,5 +18,4 @@ export function getSelectedNode(
return $isAtNodeEnd(anchor) ? anchorNode : focusNode
}
// eslint-disable-next-line sonarjs/empty-string-repetition
export const urlRegExp = /((([A-Za-z]{3,9}:(?:\/\/)?)(?:[-;:&=+$,\w]+@)?[A-Za-z0-9.-]+|(?:www.|[-;:&=+$,\w]+@)[A-Za-z0-9.-]+)((?:\/[+~%/.\w-]*)?\??[-+=&;%@.\w]*#?\w*)?)/

View File

@ -29,6 +29,7 @@ export type AppContextValue = {
langGeniusVersionInfo: LangGeniusVersionResponse
useSelector: typeof useSelector
isLoadingCurrentWorkspace: boolean
isValidatingCurrentWorkspace: boolean
}
const userProfilePlaceholder = {
@ -58,6 +59,9 @@ const initialWorkspaceInfo: ICurrentWorkspace = {
created_at: 0,
role: 'normal',
providers: [],
trial_credits: 200,
trial_credits_used: 0,
next_credit_reset_date: 0,
}
const AppContext = createContext<AppContextValue>({
@ -72,6 +76,7 @@ const AppContext = createContext<AppContextValue>({
langGeniusVersionInfo: initialLangGeniusVersionInfo,
useSelector,
isLoadingCurrentWorkspace: false,
isValidatingCurrentWorkspace: false,
})
export function useSelector<T>(selector: (value: AppContextValue) => T): T {
@ -86,7 +91,7 @@ export const AppContextProvider: FC<AppContextProviderProps> = ({ children }) =>
const queryClient = useQueryClient()
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
const { data: userProfileResp } = useUserProfile()
const { data: currentWorkspaceResp, isPending: isLoadingCurrentWorkspace } = useCurrentWorkspace()
const { data: currentWorkspaceResp, isPending: isLoadingCurrentWorkspace, isFetching: isValidatingCurrentWorkspace } = useCurrentWorkspace()
const langGeniusVersionQuery = useLangGeniusVersion(
userProfileResp?.meta.currentVersion,
!systemFeatures.branding.enabled,
@ -195,6 +200,7 @@ export const AppContextProvider: FC<AppContextProviderProps> = ({ children }) =>
isCurrentWorkspaceDatasetOperator,
mutateCurrentWorkspace,
isLoadingCurrentWorkspace,
isValidatingCurrentWorkspace,
}}
>
<div className="flex h-full flex-col overflow-y-auto">

View File

@ -1,6 +1,8 @@
import consistentPlaceholders from './rules/consistent-placeholders.js'
import noAsAnyInT from './rules/no-as-any-in-t.js'
import noExtraKeys from './rules/no-extra-keys.js'
import noLegacyNamespacePrefix from './rules/no-legacy-namespace-prefix.js'
import noVersionPrefix from './rules/no-version-prefix.js'
import requireNsOption from './rules/require-ns-option.js'
import validI18nKeys from './rules/valid-i18n-keys.js'
@ -11,9 +13,11 @@ const plugin = {
version: '1.0.0',
},
rules: {
'consistent-placeholders': consistentPlaceholders,
'no-as-any-in-t': noAsAnyInT,
'no-extra-keys': noExtraKeys,
'no-legacy-namespace-prefix': noLegacyNamespacePrefix,
'no-version-prefix': noVersionPrefix,
'require-ns-option': requireNsOption,
'valid-i18n-keys': validI18nKeys,
},

View File

@ -0,0 +1,109 @@
import fs from 'node:fs'
import path, { normalize, sep } from 'node:path'
import { cleanJsonText } from '../utils.js'
/**
* Extract placeholders from a string
* Matches patterns like {{name}}, {{count}}, etc.
* @param {string} str
* @returns {string[]} Sorted array of placeholder names
*/
function extractPlaceholders(str) {
const matches = str.match(/\{\{\w+\}\}/g) || []
return matches.map(m => m.slice(2, -2)).sort()
}
/**
* Compare two arrays and return if they're equal
* @param {string[]} arr1
* @param {string[]} arr2
* @returns {boolean} True if arrays contain the same elements in the same order
*/
function arraysEqual(arr1, arr2) {
if (arr1.length !== arr2.length)
return false
return arr1.every((val, i) => val === arr2[i])
}
/** @type {import('eslint').Rule.RuleModule} */
export default {
meta: {
type: 'problem',
docs: {
description: 'Ensure placeholders in translations match the en-US source',
},
},
create(context) {
return {
Program(node) {
const { filename, sourceCode } = context
if (!filename.endsWith('.json'))
return
const parts = normalize(filename).split(sep)
const jsonFile = parts.at(-1)
const lang = parts.at(-2)
// Skip English files - they are the source of truth
if (lang === 'en-US')
return
let currentJson = {}
let englishJson = {}
try {
currentJson = JSON.parse(cleanJsonText(sourceCode.text))
const englishFilePath = path.join(path.dirname(filename), '..', 'en-US', jsonFile ?? '')
englishJson = JSON.parse(fs.readFileSync(englishFilePath, 'utf8'))
}
catch (error) {
context.report({
node,
message: `Error parsing JSON: ${error instanceof Error ? error.message : String(error)}`,
})
return
}
// Check each key in the current translation
for (const key of Object.keys(currentJson)) {
// Skip if the key doesn't exist in English (handled by no-extra-keys rule)
if (!Object.prototype.hasOwnProperty.call(englishJson, key))
continue
const currentValue = currentJson[key]
const englishValue = englishJson[key]
// Skip non-string values
if (typeof currentValue !== 'string' || typeof englishValue !== 'string')
continue
const currentPlaceholders = extractPlaceholders(currentValue)
const englishPlaceholders = extractPlaceholders(englishValue)
if (!arraysEqual(currentPlaceholders, englishPlaceholders)) {
const missing = englishPlaceholders.filter(p => !currentPlaceholders.includes(p))
const extra = currentPlaceholders.filter(p => !englishPlaceholders.includes(p))
let message = `Placeholder mismatch in "${key}": `
const details = []
if (missing.length > 0)
details.push(`missing {{${missing.join('}}, {{')}}}`)
if (extra.length > 0)
details.push(`extra {{${extra.join('}}, {{')}}}`)
message += details.join('; ')
message += `. Expected: {{${englishPlaceholders.join('}}, {{') || 'none'}}}`
context.report({
node,
message,
})
}
}
},
}
},
}

View File

@ -0,0 +1,45 @@
const DEPENDENCY_KEYS = ['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies']
const VERSION_PREFIXES = ['^', '~']
/** @type {import('eslint').Rule.RuleModule} */
export default {
meta: {
type: 'problem',
docs: {
description: `Ensure package.json dependencies do not use version prefixes (${VERSION_PREFIXES.join(' or ')})`,
},
fixable: 'code',
},
create(context) {
const { filename } = context
if (!filename.endsWith('package.json'))
return {}
const selector = `JSONProperty:matches(${DEPENDENCY_KEYS.map(k => `[key.value="${k}"]`).join(', ')}) > JSONObjectExpression > JSONProperty`
return {
[selector](node) {
const versionNode = node.value
if (versionNode && versionNode.type === 'JSONLiteral' && typeof versionNode.value === 'string') {
const version = versionNode.value
const foundPrefix = VERSION_PREFIXES.find(prefix => version.startsWith(prefix))
if (foundPrefix) {
const packageName = node.key.value || node.key.name
const cleanVersion = version.substring(1)
const canAutoFix = /^\d+\.\d+\.\d+$/.test(cleanVersion)
context.report({
node: versionNode,
message: `Dependency "${packageName}" has version prefix "${foundPrefix}" that should be removed (found: "${version}", expected: "${cleanVersion}")`,
fix: canAutoFix
? fixer => fixer.replaceText(versionNode, `"${cleanVersion}"`)
: undefined,
})
}
}
},
}
},
}

View File

@ -3280,9 +3280,6 @@
}
},
"app/components/workflow/nodes/data-source/panel.tsx": {
"style/multiline-ternary": {
"count": 2
},
"ts/no-explicit-any": {
"count": 3
}
@ -3665,9 +3662,6 @@
}
},
"app/components/workflow/nodes/tool/panel.tsx": {
"style/multiline-ternary": {
"count": 2
},
"ts/no-explicit-any": {
"count": 2
}

View File

@ -131,6 +131,17 @@ export default antfu(
'dify-i18n/valid-i18n-keys': 'error',
'dify-i18n/no-extra-keys': 'error',
'dify-i18n/consistent-placeholders': 'error',
},
},
// package.json version prefix validation
{
files: ['**/package.json'],
plugins: {
'dify-i18n': difyI18n,
},
rules: {
'dify-i18n/no-version-prefix': 'error',
},
},
)

View File

@ -9,5 +9,8 @@ export function getInitOptions(): InitOptions {
partialBundledLanguages: true,
keySeparator: false,
ns: namespacesCamelCase,
interpolation: {
escapeValue: false,
},
}
}

View File

@ -61,6 +61,7 @@
"account.workspaceName": "اسم مساحة العمل",
"account.workspaceNamePlaceholder": "أدخل اسم مساحة العمل",
"actionMsg.copySuccessfully": "تم النسخ بنجاح",
"actionMsg.downloadUnsuccessfully": "فشل التنزيل. يرجى المحاولة مرة أخرى لاحقًا.",
"actionMsg.generatedSuccessfully": "تم الإنشاء بنجاح",
"actionMsg.generatedUnsuccessfully": "فشل الإنشاء",
"actionMsg.modifiedSuccessfully": "تم التعديل بنجاح",
@ -91,6 +92,7 @@
"apiBasedExtension.title": "توفر ملحقات API إدارة مركزية لواجهة برمجة التطبيقات، مما يبسط التكوين لسهولة الاستخدام عبر تطبيقات Dify.",
"apiBasedExtension.type": "النوع",
"appMenus.apiAccess": "وصول API",
"appMenus.apiAccessTip": "يمكن الوصول إلى قاعدة المعرفة هذه عبر واجهة برمجة تطبيقات الخدمة",
"appMenus.logAndAnn": "السجلات والتعليقات التوضيحية",
"appMenus.logs": "السجلات",
"appMenus.overview": "المراقبة",
@ -339,6 +341,9 @@
"modelProvider.callTimes": "أوقات الاتصال",
"modelProvider.card.buyQuota": "شراء حصة",
"modelProvider.card.callTimes": "أوقات الاتصال",
"modelProvider.card.modelAPI": "نماذج {{modelName}} تستخدم مفتاح API.",
"modelProvider.card.modelNotSupported": "نماذج {{modelName}} غير مثبتة.",
"modelProvider.card.modelSupported": "نماذج {{modelName}} تستخدم هذا الحصة.",
"modelProvider.card.onTrial": "في التجربة",
"modelProvider.card.paid": "مدفوع",
"modelProvider.card.priorityUse": "أولوية الاستخدام",
@ -394,6 +399,7 @@
"modelProvider.quotaTip": "الرموز المجانية المتاحة المتبقية",
"modelProvider.rerankModel.key": "نموذج إعادة الترتيب",
"modelProvider.rerankModel.tip": "سيعيد نموذج إعادة الترتيب ترتيب قائمة المستندات المرشحة بناءً على المطابقة الدلالية مع استعلام المستخدم، مما يحسن نتائج الترتيب الدلالي",
"modelProvider.resetDate": "إعادة التعيين في {{date}}",
"modelProvider.searchModel": "نموذج البحث",
"modelProvider.selectModel": "اختر نموذجك",
"modelProvider.selector.emptySetting": "يرجى الانتقال إلى الإعدادات للتكوين",

View File

@ -26,6 +26,7 @@
"list.action.archive": "أرشيف",
"list.action.batchAdd": "إضافة دفعة",
"list.action.delete": "حذف",
"list.action.download": "تحميل",
"list.action.enableWarning": "لا يمكن تمكين الملف المؤرشف",
"list.action.pause": "إيقاف مؤقت",
"list.action.resume": "استئناف",

View File

@ -7,6 +7,7 @@
"batchAction.cancel": "إلغاء",
"batchAction.delete": "حذف",
"batchAction.disable": "تعطيل",
"batchAction.download": "تحميل",
"batchAction.enable": "تمكين",
"batchAction.reIndex": "إعادة الفهرسة",
"batchAction.selected": "محدد",

View File

@ -236,7 +236,7 @@
"task.installSuccess": "تم تثبيت {{successLength}} من الإضافات بنجاح",
"task.installed": "مثبت",
"task.installedError": "{{errorLength}} إضافات فشل تثبيتها",
"task.installing": "تثبيت {{installingLength}} إضافات، 0 تم.",
"task.installing": "جارٍ تثبيت الإضافات.",
"task.installingWithError": "تثبيت {{installingLength}} إضافات، {{successLength}} نجاح، {{errorLength}} فشل",
"task.installingWithSuccess": "تثبيت {{installingLength}} إضافات، {{successLength}} نجاح.",
"task.runningPlugins": "تثبيت الإضافات",

View File

@ -251,10 +251,10 @@
"openingStatement.notIncludeKey": "Das Anfangsprompt enthält nicht die Variable: {{key}}. Bitte fügen Sie sie dem Anfangsprompt hinzu.",
"openingStatement.openingQuestion": "Eröffnungsfragen",
"openingStatement.openingQuestionPlaceholder": "Sie können Variablen verwenden, versuchen Sie {{variable}} einzugeben.",
"openingStatement.placeholder": "Schreiben Sie hier Ihre Eröffnungsnachricht, Sie können Variablen verwenden, versuchen Sie {{Variable}} zu tippen.",
"openingStatement.placeholder": "Schreiben Sie hier Ihre Eröffnungsnachricht, Sie können Variablen verwenden, versuchen Sie {{variable}} zu tippen.",
"openingStatement.title": "Gesprächseröffner",
"openingStatement.tooShort": "Für die Erzeugung von Eröffnungsbemerkungen für das Gespräch werden mindestens 20 Wörter des Anfangsprompts benötigt.",
"openingStatement.varTip": "Sie können Variablen verwenden, versuchen Sie {{Variable}} zu tippen",
"openingStatement.varTip": "Sie können Variablen verwenden, versuchen Sie {{variable}} zu tippen",
"openingStatement.writeOpener": "Eröffnung schreiben",
"operation.addFeature": "Funktion hinzufügen",
"operation.agree": "gefällt mir",

View File

@ -83,7 +83,7 @@
"gotoAnything.emptyState.noKnowledgeBasesFound": "Keine Wissensdatenbanken gefunden",
"gotoAnything.emptyState.noPluginsFound": "Keine Plugins gefunden",
"gotoAnything.emptyState.noWorkflowNodesFound": "Keine Workflow-Knoten gefunden",
"gotoAnything.emptyState.tryDifferentTerm": "Versuchen Sie einen anderen Suchbegriff oder entfernen Sie den {{mode}}-Filter",
"gotoAnything.emptyState.tryDifferentTerm": "Versuchen Sie einen anderen Suchbegriff",
"gotoAnything.emptyState.trySpecificSearch": "Versuchen Sie {{shortcuts}} für spezifische Suchen",
"gotoAnything.groups.apps": "Apps",
"gotoAnything.groups.commands": "Befehle",

View File

@ -65,7 +65,7 @@
"plansCommon.annotatedResponse.title": "Kontingentgrenzen für Annotationen",
"plansCommon.annotatedResponse.tooltip": "Manuelle Bearbeitung und Annotation von Antworten bieten anpassbare, hochwertige Frage-Antwort-Fähigkeiten für Apps. (Nur anwendbar in Chat-Apps)",
"plansCommon.annotationQuota": "Kontingent für Anmerkungen",
"plansCommon.annualBilling": "Jährliche Abrechnung",
"plansCommon.annualBilling": "Jährliche Abrechnung, sparen Sie {{percent}}%",
"plansCommon.apiRateLimit": "API-Datenlimit",
"plansCommon.apiRateLimitTooltip": "Die API-Datenbeschränkung gilt für alle Anfragen, die über die Dify-API gemacht werden, einschließlich Textgenerierung, Chat-Konversationen, Workflow-Ausführungen und Dokumentenverarbeitung.",
"plansCommon.apiRateLimitUnit": "{{count,number}}",
@ -91,7 +91,7 @@
"plansCommon.freeTrialTipPrefix": "Melden Sie sich an und erhalten Sie ein",
"plansCommon.freeTrialTipSuffix": "Keine Kreditkarte erforderlich",
"plansCommon.getStarted": "Loslegen",
"plansCommon.logsHistory": "Protokollverlauf",
"plansCommon.logsHistory": "{{days}} Protokollverlauf",
"plansCommon.member": "Mitglied",
"plansCommon.memberAfter": "Mitglied",
"plansCommon.messageRequest.title": "Nachrichtenguthaben",
@ -144,7 +144,7 @@
"plansCommon.unavailable": "Nicht verfügbar",
"plansCommon.unlimited": "Unbegrenzt",
"plansCommon.unlimitedApiRate": "Keine API-Ratebeschränkung",
"plansCommon.vectorSpace": "Vektorraum",
"plansCommon.vectorSpace": "{{size}} Vektorraum",
"plansCommon.vectorSpaceTooltip": "Vektorraum ist das Langzeitspeichersystem, das erforderlich ist, damit LLMs Ihre Daten verstehen können.",
"plansCommon.workflowExecution.faster": "Schnellere Arbeitsablauf-Ausführung",
"plansCommon.workflowExecution.priority": "Prioritäts-Workflow-Ausführung",

View File

@ -61,6 +61,7 @@
"account.workspaceName": "Arbeitsbereichsname",
"account.workspaceNamePlaceholder": "Arbeitsbereichnamen eingeben",
"actionMsg.copySuccessfully": "Erfolgreich kopiert",
"actionMsg.downloadUnsuccessfully": "Download fehlgeschlagen. Bitte versuchen Sie es später erneut.",
"actionMsg.generatedSuccessfully": "Erfolgreich generiert",
"actionMsg.generatedUnsuccessfully": "Generierung nicht erfolgreich",
"actionMsg.modifiedSuccessfully": "Erfolgreich geändert",
@ -91,6 +92,7 @@
"apiBasedExtension.title": "API-Erweiterungen bieten zentralisiertes API-Management und vereinfachen die Konfiguration für eine einfache Verwendung in Difys Anwendungen.",
"apiBasedExtension.type": "Typ",
"appMenus.apiAccess": "API-Zugriff",
"appMenus.apiAccessTip": "Diese Wissensdatenbank ist über die Service-API zugänglich",
"appMenus.logAndAnn": "Protokolle & Ank.",
"appMenus.logs": "Baumstämme",
"appMenus.overview": "Übersicht",
@ -172,7 +174,7 @@
"fileUploader.pasteFileLinkInvalid": "Ungültiger Dateilink",
"fileUploader.uploadDisabled": "Datei-Upload ist deaktiviert",
"fileUploader.uploadFromComputer": "Lokaler Upload",
"fileUploader.uploadFromComputerLimit": "Datei hochladen darf {{size}} nicht überschreiten",
"fileUploader.uploadFromComputerLimit": "Der Upload von {{type}} darf {{size}} nicht überschreiten",
"fileUploader.uploadFromComputerReadError": "Lesen der Datei fehlgeschlagen, bitte versuchen Sie es erneut.",
"fileUploader.uploadFromComputerUploadError": "Datei-Upload fehlgeschlagen, bitte erneut hochladen.",
"imageInput.browse": "blättern",
@ -339,6 +341,9 @@
"modelProvider.callTimes": "Anrufzeiten",
"modelProvider.card.buyQuota": "Kontingent kaufen",
"modelProvider.card.callTimes": "Anrufzeiten",
"modelProvider.card.modelAPI": "{{modelName}}-Modelle verwenden den API-Schlüssel.",
"modelProvider.card.modelNotSupported": "{{modelName}}-Modelle sind nicht installiert.",
"modelProvider.card.modelSupported": "{{modelName}}-Modelle verwenden dieses Kontingent.",
"modelProvider.card.onTrial": "In Probe",
"modelProvider.card.paid": "Bezahlt",
"modelProvider.card.priorityUse": "Priorisierte Nutzung",
@ -394,6 +399,7 @@
"modelProvider.quotaTip": "Verbleibende verfügbare kostenlose Token",
"modelProvider.rerankModel.key": "Rerank-Modell",
"modelProvider.rerankModel.tip": "Rerank-Modell wird die Kandidatendokumentenliste basierend auf der semantischen Übereinstimmung mit der Benutzeranfrage neu ordnen und die Ergebnisse der semantischen Rangordnung verbessern",
"modelProvider.resetDate": "Zurücksetzen am {{date}}",
"modelProvider.searchModel": "Suchmodell",
"modelProvider.selectModel": "Wählen Sie Ihr Modell",
"modelProvider.selector.emptySetting": "Bitte gehen Sie zu den Einstellungen, um zu konfigurieren",

View File

@ -140,7 +140,7 @@
"stepTwo.preview": "Bestätigen & Vorschau",
"stepTwo.previewButton": "Umschalten zum Frage-und-Antwort-Format",
"stepTwo.previewChunk": "Vorschau Chunk",
"stepTwo.previewChunkCount": "{{Anzahl}} Geschätzte Chunks",
"stepTwo.previewChunkCount": "{{count}} Geschätzte Chunks",
"stepTwo.previewChunkTip": "Klicken Sie auf die Schaltfläche \"Preview Chunk\" auf der linken Seite, um die Vorschau zu laden",
"stepTwo.previewSwitchTipEnd": " zusätzliche Tokens verbrauchen",
"stepTwo.previewSwitchTipStart": "Die aktuelle Chunk-Vorschau ist im Textformat, ein Wechsel zur Vorschau im Frage-und-Antwort-Format wird",

View File

@ -26,6 +26,7 @@
"list.action.archive": "Archivieren",
"list.action.batchAdd": "Batch hinzufügen",
"list.action.delete": "Löschen",
"list.action.download": "Herunterladen",
"list.action.enableWarning": "Archivierte Datei kann nicht aktiviert werden",
"list.action.pause": "Pause",
"list.action.resume": "Fortsetzen",

View File

@ -3,7 +3,7 @@
"dateTimeFormat": "MM/DD/YYYY hh:mm A",
"desc": "Testen Sie die Treffereffektivität des Wissens anhand des gegebenen Abfragetextes.",
"hit.emptyTip": "Ergebnisse des Abruf-Tests werden hier angezeigt",
"hit.title": "ABRUFPARAGRAFEN",
"hit.title": "{{num}} Abgerufene Chunks",
"hitChunks": "Klicken Sie auf {{num}} untergeordnete Chunks",
"imageUploader.dropZoneTip": "Datei hierher ziehen, um sie hochzuladen",
"imageUploader.singleChunkAttachmentLimitTooltip": "Die Anzahl der Einzelblock-Anhänge darf {{limit}} nicht überschreiten",

Some files were not shown because too many files have changed in this diff Show More