mirror of
https://github.com/langgenius/dify.git
synced 2026-05-12 05:07:41 +08:00
Compare commits
8 Commits
main
...
codex/depr
| Author | SHA1 | Date | |
|---|---|---|---|
| 532f6fce62 | |||
| 542ab77efb | |||
| 168db76744 | |||
| 87fb5118e8 | |||
| 151b1cddd6 | |||
| ccf886d078 | |||
| 5baa6916da | |||
| af004a88c3 |
@ -3,6 +3,8 @@ CLI command modules extracted from `commands.py`.
|
||||
"""
|
||||
|
||||
from .account import create_tenant, reset_email, reset_password
|
||||
from .app_maintenance import convert_to_agent_apps, fix_app_site_missing
|
||||
from .database import upgrade_db
|
||||
from .plugin import (
|
||||
extract_plugins,
|
||||
extract_unique_plugins,
|
||||
@ -25,7 +27,6 @@ from .retention import (
|
||||
restore_workflow_runs,
|
||||
)
|
||||
from .storage import clear_orphaned_file_records, file_usage, migrate_oss, remove_orphaned_files_on_storage
|
||||
from .system import convert_to_agent_apps, fix_app_site_missing, reset_encrypt_key_pair, upgrade_db
|
||||
from .vector import (
|
||||
add_qdrant_index,
|
||||
migrate_annotation_vector_database,
|
||||
@ -33,6 +34,8 @@ from .vector import (
|
||||
old_metadata_migration,
|
||||
vdb_migrate,
|
||||
)
|
||||
from .workflow_migration import migrate_legacy_sys_files_workflows
|
||||
from .workspace import reset_encrypt_key_pair
|
||||
|
||||
__all__ = [
|
||||
"add_qdrant_index",
|
||||
@ -55,6 +58,7 @@ __all__ = [
|
||||
"migrate_annotation_vector_database",
|
||||
"migrate_data_for_plugin",
|
||||
"migrate_knowledge_vector_database",
|
||||
"migrate_legacy_sys_files_workflows",
|
||||
"migrate_oss",
|
||||
"old_metadata_migration",
|
||||
"remove_orphaned_files_on_storage",
|
||||
|
||||
@ -1,74 +1,26 @@
|
||||
"""App data maintenance CLI commands."""
|
||||
|
||||
import logging
|
||||
|
||||
import click
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import delete, select, update
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy import select, update
|
||||
|
||||
from configs import dify_config
|
||||
from events.app_event import app_was_created
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.db_migration_lock import DbMigrationAutoRenewLock
|
||||
from libs.rsa import generate_key_pair
|
||||
from models import Tenant
|
||||
from models.model import App, AppMode, Conversation
|
||||
from models.provider import Provider, ProviderModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DB_UPGRADE_LOCK_TTL_SECONDS = 60
|
||||
|
||||
|
||||
@click.command(
|
||||
"reset-encrypt-key-pair",
|
||||
help="Reset the asymmetric key pair of workspace for encrypt LLM credentials. "
|
||||
"After the reset, all LLM credentials will become invalid, "
|
||||
"requiring re-entry."
|
||||
"Only support SELF_HOSTED mode.",
|
||||
)
|
||||
@click.confirmation_option(
|
||||
prompt=click.style(
|
||||
"Are you sure you want to reset encrypt key pair? This operation cannot be rolled back!", fg="red"
|
||||
)
|
||||
)
|
||||
def reset_encrypt_key_pair():
|
||||
"""
|
||||
Reset the encrypted key pair of workspace for encrypt LLM credentials.
|
||||
After the reset, all LLM credentials will become invalid, requiring re-entry.
|
||||
Only support SELF_HOSTED mode.
|
||||
"""
|
||||
if dify_config.EDITION != "SELF_HOSTED":
|
||||
click.echo(click.style("This command is only for SELF_HOSTED installations.", fg="red"))
|
||||
return
|
||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||
tenants = session.scalars(select(Tenant)).all()
|
||||
for tenant in tenants:
|
||||
if not tenant:
|
||||
click.echo(click.style("No workspaces found. Run /install first.", fg="red"))
|
||||
return
|
||||
|
||||
tenant.encrypt_public_key = generate_key_pair(tenant.id)
|
||||
|
||||
session.execute(delete(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id))
|
||||
session.execute(delete(ProviderModel).where(ProviderModel.tenant_id == tenant.id))
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@click.command("convert-to-agent-apps", help="Convert Agent Assistant to Agent App.")
|
||||
def convert_to_agent_apps():
|
||||
def convert_to_agent_apps() -> None:
|
||||
"""
|
||||
Convert Agent Assistant to Agent App.
|
||||
"""
|
||||
click.echo(click.style("Starting convert to agent apps.", fg="green"))
|
||||
|
||||
proceeded_app_ids = []
|
||||
proceeded_app_ids: list[str] = []
|
||||
|
||||
while True:
|
||||
# fetch first 1000 apps
|
||||
@ -121,48 +73,14 @@ def convert_to_agent_apps():
|
||||
click.echo(click.style(f"Conversion complete. Converted {len(proceeded_app_ids)} agent apps.", fg="green"))
|
||||
|
||||
|
||||
@click.command("upgrade-db", help="Upgrade the database")
|
||||
def upgrade_db():
|
||||
click.echo("Preparing database migration...")
|
||||
lock = DbMigrationAutoRenewLock(
|
||||
redis_client=redis_client,
|
||||
name="db_upgrade_lock",
|
||||
ttl_seconds=DB_UPGRADE_LOCK_TTL_SECONDS,
|
||||
logger=logger,
|
||||
log_context="db_migration",
|
||||
)
|
||||
if lock.acquire(blocking=False):
|
||||
migration_succeeded = False
|
||||
try:
|
||||
click.echo(click.style("Starting database migration.", fg="green"))
|
||||
|
||||
# run db migration
|
||||
import flask_migrate
|
||||
|
||||
flask_migrate.upgrade()
|
||||
|
||||
migration_succeeded = True
|
||||
click.echo(click.style("Database migration successful!", fg="green"))
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to execute database migration")
|
||||
click.echo(click.style(f"Database migration failed: {e}", fg="red"))
|
||||
raise SystemExit(1)
|
||||
finally:
|
||||
status = "successful" if migration_succeeded else "failed"
|
||||
lock.release_safely(status=status)
|
||||
else:
|
||||
click.echo("Database migration skipped")
|
||||
|
||||
|
||||
@click.command("fix-app-site-missing", help="Fix app related site missing issue.")
|
||||
def fix_app_site_missing():
|
||||
def fix_app_site_missing() -> None:
|
||||
"""
|
||||
Fix app related site missing issue.
|
||||
"""
|
||||
click.echo(click.style("Starting fix for missing app-related sites.", fg="green"))
|
||||
|
||||
failed_app_ids = []
|
||||
failed_app_ids: list[str] = []
|
||||
while True:
|
||||
sql = """select apps.id as id from apps left join sites on sites.app_id=apps.id
|
||||
where sites.id is null limit 1000"""
|
||||
45
api/commands/database.py
Normal file
45
api/commands/database.py
Normal file
@ -0,0 +1,45 @@
|
||||
"""Database schema migration CLI commands."""
|
||||
|
||||
import logging
|
||||
|
||||
import click
|
||||
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.db_migration_lock import DbMigrationAutoRenewLock
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DB_UPGRADE_LOCK_TTL_SECONDS = 60
|
||||
|
||||
|
||||
@click.command("upgrade-db", help="Upgrade the database")
|
||||
def upgrade_db() -> None:
|
||||
click.echo("Preparing database migration...")
|
||||
lock = DbMigrationAutoRenewLock(
|
||||
redis_client=redis_client,
|
||||
name="db_upgrade_lock",
|
||||
ttl_seconds=DB_UPGRADE_LOCK_TTL_SECONDS,
|
||||
logger=logger,
|
||||
log_context="db_migration",
|
||||
)
|
||||
if lock.acquire(blocking=False):
|
||||
migration_succeeded = False
|
||||
try:
|
||||
click.echo(click.style("Starting database migration.", fg="green"))
|
||||
|
||||
import flask_migrate
|
||||
|
||||
flask_migrate.upgrade()
|
||||
|
||||
migration_succeeded = True
|
||||
click.echo(click.style("Database migration successful!", fg="green"))
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to execute database migration")
|
||||
click.echo(click.style(f"Database migration failed: {e}", fg="red"))
|
||||
raise SystemExit(1)
|
||||
finally:
|
||||
status = "successful" if migration_succeeded else "failed"
|
||||
lock.release_safely(status=status)
|
||||
else:
|
||||
click.echo("Database migration skipped")
|
||||
172
api/commands/workflow_migration.py
Normal file
172
api/commands/workflow_migration.py
Normal file
@ -0,0 +1,172 @@
|
||||
"""Workflow data migration CLI commands.
|
||||
|
||||
TODO: Remove the legacy system file workflow migration command after the production migration is complete.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
|
||||
import click
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from extensions.ext_database import db
|
||||
from models.workflow import Workflow, WorkflowType
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class LegacySysFilesWorkflowMigrationStats:
|
||||
scanned: int = 0
|
||||
migrated: int = 0
|
||||
failed: int = 0
|
||||
batches: int = 0
|
||||
last_id: str | None = None
|
||||
|
||||
|
||||
def _build_legacy_sys_files_workflow_query(
|
||||
*,
|
||||
start_after_id: str | None,
|
||||
batch_size: int,
|
||||
tenant_id: str | None,
|
||||
app_id: str | None,
|
||||
):
|
||||
# Workflow IDs are UUID4, so this is not chronological pagination. The migration only needs a stable total
|
||||
# order that matches the resume cursor; ordering by the same primary-key column used in the `id > cursor`
|
||||
# predicate lets each batch continue deterministically without offset scans.
|
||||
stmt = (
|
||||
select(Workflow)
|
||||
.where(Workflow.type.in_((WorkflowType.WORKFLOW, WorkflowType.CHAT)))
|
||||
.order_by(Workflow.id)
|
||||
.limit(batch_size)
|
||||
)
|
||||
if start_after_id:
|
||||
stmt = stmt.where(Workflow.id > start_after_id)
|
||||
if tenant_id:
|
||||
stmt = stmt.where(Workflow.tenant_id == tenant_id)
|
||||
if app_id:
|
||||
stmt = stmt.where(Workflow.app_id == app_id)
|
||||
return stmt
|
||||
|
||||
|
||||
def _migrate_legacy_sys_files_workflow_batch(
|
||||
*,
|
||||
session: Session,
|
||||
start_after_id: str | None,
|
||||
batch_size: int,
|
||||
tenant_id: str | None,
|
||||
app_id: str | None,
|
||||
dry_run: bool,
|
||||
) -> LegacySysFilesWorkflowMigrationStats:
|
||||
stats = LegacySysFilesWorkflowMigrationStats()
|
||||
workflows = session.scalars(
|
||||
_build_legacy_sys_files_workflow_query(
|
||||
start_after_id=start_after_id,
|
||||
batch_size=batch_size,
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
)
|
||||
).all()
|
||||
|
||||
for workflow in workflows:
|
||||
stats.scanned += 1
|
||||
stats.last_id = workflow.id
|
||||
try:
|
||||
if workflow.migrate_legacy_sys_files_graph_in_place():
|
||||
stats.migrated += 1
|
||||
except Exception:
|
||||
stats.failed += 1
|
||||
logger.exception("Failed to migrate legacy sys.files workflow, workflow_id=%s", workflow.id)
|
||||
|
||||
if dry_run:
|
||||
session.rollback()
|
||||
else:
|
||||
session.commit()
|
||||
return stats
|
||||
|
||||
|
||||
def run_legacy_sys_files_workflow_migration(
|
||||
*,
|
||||
batch_size: int,
|
||||
limit: int | None,
|
||||
start_after_id: str | None,
|
||||
tenant_id: str | None,
|
||||
app_id: str | None,
|
||||
dry_run: bool,
|
||||
) -> LegacySysFilesWorkflowMigrationStats:
|
||||
"""Scan Workflow and Advanced Chat graphs in keyset-paginated batches."""
|
||||
if batch_size <= 0:
|
||||
raise click.UsageError("--batch-size must be greater than 0")
|
||||
if limit is not None and limit <= 0:
|
||||
raise click.UsageError("--limit must be greater than 0 when provided")
|
||||
|
||||
session_maker = sessionmaker(db.engine, expire_on_commit=False)
|
||||
total = LegacySysFilesWorkflowMigrationStats(last_id=start_after_id)
|
||||
next_start_after_id = start_after_id
|
||||
|
||||
while limit is None or total.scanned < limit:
|
||||
remaining = None if limit is None else limit - total.scanned
|
||||
current_batch_size = batch_size if remaining is None else min(batch_size, remaining)
|
||||
if current_batch_size <= 0:
|
||||
break
|
||||
|
||||
with session_maker() as session:
|
||||
batch_stats = _migrate_legacy_sys_files_workflow_batch(
|
||||
session=session,
|
||||
start_after_id=next_start_after_id,
|
||||
batch_size=current_batch_size,
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
|
||||
if batch_stats.scanned == 0:
|
||||
break
|
||||
|
||||
total.scanned += batch_stats.scanned
|
||||
total.migrated += batch_stats.migrated
|
||||
total.failed += batch_stats.failed
|
||||
total.batches += 1
|
||||
total.last_id = batch_stats.last_id
|
||||
next_start_after_id = batch_stats.last_id
|
||||
|
||||
if batch_stats.scanned < current_batch_size:
|
||||
break
|
||||
|
||||
return total
|
||||
|
||||
|
||||
@click.command(
|
||||
"migrate-legacy-sys-files-workflows",
|
||||
help="Migrate Workflow and Advanced Chat graphs that still reference deprecated sys.files.",
|
||||
)
|
||||
@click.option("--batch-size", default=1000, show_default=True, type=int, help="Number of workflows to scan per batch.")
|
||||
@click.option("--limit", default=None, type=int, help="Maximum number of workflows to scan in this run.")
|
||||
@click.option("--start-after-id", default=None, help="Resume scanning after this workflow ID.")
|
||||
@click.option("--tenant-id", default=None, help="Limit migration to one tenant.")
|
||||
@click.option("--app-id", default=None, help="Limit migration to one app.")
|
||||
@click.option("--dry-run", is_flag=True, default=False, help="Scan and report without saving changes.")
|
||||
def migrate_legacy_sys_files_workflows(
|
||||
batch_size: int,
|
||||
limit: int | None,
|
||||
start_after_id: str | None,
|
||||
tenant_id: str | None,
|
||||
app_id: str | None,
|
||||
dry_run: bool,
|
||||
) -> None:
|
||||
stats = run_legacy_sys_files_workflow_migration(
|
||||
batch_size=batch_size,
|
||||
limit=limit,
|
||||
start_after_id=start_after_id,
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
click.echo(
|
||||
"Legacy sys.files workflow migration finished: "
|
||||
f"scanned={stats.scanned} migrated={stats.migrated} failed={stats.failed} "
|
||||
f"batches={stats.batches} last_id={stats.last_id or ''}"
|
||||
)
|
||||
if dry_run:
|
||||
click.echo("Dry run only: no workflow graph changes were saved.")
|
||||
52
api/commands/workspace.py
Normal file
52
api/commands/workspace.py
Normal file
@ -0,0 +1,52 @@
|
||||
"""Workspace maintenance CLI commands."""
|
||||
|
||||
import click
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.ext_database import db
|
||||
from libs.rsa import generate_key_pair
|
||||
from models import Tenant
|
||||
from models.provider import Provider, ProviderModel
|
||||
|
||||
|
||||
@click.command(
|
||||
"reset-encrypt-key-pair",
|
||||
help="Reset the asymmetric key pair of workspace for encrypt LLM credentials. "
|
||||
"After the reset, all LLM credentials will become invalid, "
|
||||
"requiring re-entry."
|
||||
"Only support SELF_HOSTED mode.",
|
||||
)
|
||||
@click.confirmation_option(
|
||||
prompt=click.style(
|
||||
"Are you sure you want to reset encrypt key pair? This operation cannot be rolled back!", fg="red"
|
||||
)
|
||||
)
|
||||
def reset_encrypt_key_pair() -> None:
|
||||
"""
|
||||
Reset the encrypted key pair of workspace for encrypt LLM credentials.
|
||||
After the reset, all LLM credentials will become invalid, requiring re-entry.
|
||||
Only support SELF_HOSTED mode.
|
||||
"""
|
||||
if dify_config.EDITION != "SELF_HOSTED":
|
||||
click.echo(click.style("This command is only for SELF_HOSTED installations.", fg="red"))
|
||||
return
|
||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||
tenants = session.scalars(select(Tenant)).all()
|
||||
for tenant in tenants:
|
||||
if not tenant:
|
||||
click.echo(click.style("No workspaces found. Run /install first.", fg="red"))
|
||||
return
|
||||
|
||||
tenant.encrypt_public_key = generate_key_pair(tenant.id)
|
||||
|
||||
session.execute(delete(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id))
|
||||
session.execute(delete(ProviderModel).where(ProviderModel.tenant_id == tenant.id))
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
@ -19,6 +19,10 @@ from controllers.service_api.app.error import (
|
||||
ProviderNotInitializeError,
|
||||
ProviderQuotaExceededError,
|
||||
)
|
||||
from controllers.service_api.app.legacy_system_files import (
|
||||
attach_legacy_system_file_warning_for_service_api,
|
||||
normalize_legacy_system_file_args_for_service_api,
|
||||
)
|
||||
from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token
|
||||
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
@ -205,11 +209,20 @@ class ChatApi(Resource):
|
||||
args["external_trace_id"] = external_trace_id
|
||||
|
||||
streaming = payload.response_mode == "streaming"
|
||||
legacy_system_file_compat = None
|
||||
if app_mode == AppMode.ADVANCED_CHAT:
|
||||
args, legacy_system_file_compat = normalize_legacy_system_file_args_for_service_api(
|
||||
app_model=app_model,
|
||||
args=args,
|
||||
raw_payload=service_api_ns.payload,
|
||||
workflow_id=args.get("workflow_id"),
|
||||
)
|
||||
|
||||
try:
|
||||
response = AppGenerateService.generate(
|
||||
app_model=app_model, user=end_user, args=args, invoke_from=InvokeFrom.SERVICE_API, streaming=streaming
|
||||
)
|
||||
response = attach_legacy_system_file_warning_for_service_api(response, legacy_system_file_compat)
|
||||
|
||||
return helper.compact_generate_response(response)
|
||||
except WorkflowNotFoundError as ex:
|
||||
|
||||
57
api/controllers/service_api/app/legacy_system_files.py
Normal file
57
api/controllers/service_api/app/legacy_system_files.py
Normal file
@ -0,0 +1,57 @@
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.workflow.legacy_system_files import (
|
||||
LegacySysFilesCompatVariable,
|
||||
attach_legacy_sys_files_warning,
|
||||
normalize_legacy_sys_files_args,
|
||||
)
|
||||
from models.model import App
|
||||
from services.app_generate_service import AppGenerateService
|
||||
|
||||
|
||||
def normalize_legacy_system_file_args_for_service_api(
|
||||
*,
|
||||
app_model: App,
|
||||
args: dict[str, Any],
|
||||
raw_payload: Mapping[str, Any] | None,
|
||||
workflow_id: str | None = None,
|
||||
) -> tuple[dict[str, Any], LegacySysFilesCompatVariable | None]:
|
||||
# TODO: Remove this hidden Service API compatibility path after all persisted workflows are migrated.
|
||||
args_with_hidden_system = _copy_hidden_system_files_arg(args=args, raw_payload=raw_payload)
|
||||
if not _has_legacy_file_arg(args_with_hidden_system):
|
||||
return args, None
|
||||
|
||||
workflow = AppGenerateService.get_workflow(app_model, InvokeFrom.SERVICE_API, workflow_id)
|
||||
return normalize_legacy_sys_files_args(graph=workflow.graph_dict, args=args_with_hidden_system)
|
||||
|
||||
|
||||
def attach_legacy_system_file_warning_for_service_api(
|
||||
response: Mapping[str, Any] | Any,
|
||||
compat_variable: LegacySysFilesCompatVariable | None,
|
||||
) -> Mapping[str, Any] | Any:
|
||||
# TODO: Remove this warning once Service API clients no longer need the legacy migration notice.
|
||||
return attach_legacy_sys_files_warning(response, compat_variable)
|
||||
|
||||
|
||||
def _copy_hidden_system_files_arg(
|
||||
*,
|
||||
args: dict[str, Any],
|
||||
raw_payload: Mapping[str, Any] | None,
|
||||
) -> dict[str, Any]:
|
||||
system = raw_payload.get("system") if isinstance(raw_payload, Mapping) else None
|
||||
if not isinstance(system, Mapping) or "files" not in system or system["files"] is None:
|
||||
return args
|
||||
|
||||
copied_args = dict(args)
|
||||
copied_args["system"] = {"files": system["files"]}
|
||||
return copied_args
|
||||
|
||||
|
||||
def _has_legacy_file_arg(args: Mapping[str, Any]) -> bool:
|
||||
if args.get("files") is not None:
|
||||
return True
|
||||
|
||||
system = args.get("system")
|
||||
return isinstance(system, Mapping) and system.get("files") is not None
|
||||
@ -20,6 +20,10 @@ from controllers.service_api.app.error import (
|
||||
ProviderNotInitializeError,
|
||||
ProviderQuotaExceededError,
|
||||
)
|
||||
from controllers.service_api.app.legacy_system_files import (
|
||||
attach_legacy_system_file_warning_for_service_api,
|
||||
normalize_legacy_system_file_args_for_service_api,
|
||||
)
|
||||
from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token
|
||||
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||
@ -279,11 +283,17 @@ class WorkflowRunApi(Resource):
|
||||
if external_trace_id:
|
||||
args["external_trace_id"] = external_trace_id
|
||||
streaming = payload.response_mode == "streaming"
|
||||
args, legacy_system_file_compat = normalize_legacy_system_file_args_for_service_api(
|
||||
app_model=app_model,
|
||||
args=args,
|
||||
raw_payload=service_api_ns.payload,
|
||||
)
|
||||
|
||||
try:
|
||||
response = AppGenerateService.generate(
|
||||
app_model=app_model, user=end_user, args=args, invoke_from=InvokeFrom.SERVICE_API, streaming=streaming
|
||||
)
|
||||
response = attach_legacy_system_file_warning_for_service_api(response, legacy_system_file_compat)
|
||||
|
||||
return helper.compact_generate_response(response)
|
||||
except ProviderTokenNotInitError as ex:
|
||||
@ -339,11 +349,18 @@ class WorkflowRunByIdApi(Resource):
|
||||
if external_trace_id:
|
||||
args["external_trace_id"] = external_trace_id
|
||||
streaming = payload.response_mode == "streaming"
|
||||
args, legacy_system_file_compat = normalize_legacy_system_file_args_for_service_api(
|
||||
app_model=app_model,
|
||||
args=args,
|
||||
raw_payload=service_api_ns.payload,
|
||||
workflow_id=workflow_id,
|
||||
)
|
||||
|
||||
try:
|
||||
response = AppGenerateService.generate(
|
||||
app_model=app_model, user=end_user, args=args, invoke_from=InvokeFrom.SERVICE_API, streaming=streaming
|
||||
)
|
||||
response = attach_legacy_system_file_warning_for_service_api(response, legacy_system_file_compat)
|
||||
|
||||
return helper.compact_generate_response(response)
|
||||
except WorkflowNotFoundError as ex:
|
||||
|
||||
@ -45,6 +45,7 @@ from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.prompt.utils.get_thread_messages_length import get_thread_messages_length
|
||||
from core.repositories import DifyCoreRepositoryFactory
|
||||
from core.repositories.factory import WorkflowExecutionRepository, WorkflowNodeExecutionRepository
|
||||
from core.workflow.legacy_system_files import normalize_legacy_sys_files_args
|
||||
from extensions.ext_database import db
|
||||
from factories import file_factory
|
||||
from graphon.graph_engine.layers import GraphEngineLayer
|
||||
@ -129,6 +130,8 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
if not args.get("query"):
|
||||
raise ValueError("query is required")
|
||||
|
||||
# TODO: Remove this compatibility normalization after all persisted workflows are migrated.
|
||||
args, _ = normalize_legacy_sys_files_args(graph=workflow.graph_dict, args=args)
|
||||
query = args["query"]
|
||||
if not isinstance(query, str):
|
||||
raise ValueError("query must be a string")
|
||||
|
||||
@ -36,6 +36,7 @@ from core.helper.trace_id_helper import extract_external_trace_id_from_args, ext
|
||||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.repositories import DifyCoreRepositoryFactory
|
||||
from core.repositories.factory import WorkflowExecutionRepository, WorkflowNodeExecutionRepository
|
||||
from core.workflow.legacy_system_files import normalize_legacy_sys_files_args
|
||||
from extensions.ext_database import db
|
||||
from factories import file_factory
|
||||
from graphon.graph_engine.layers import GraphEngineLayer
|
||||
@ -133,6 +134,8 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
pause_state_config: PauseStateLayerConfig | None = None,
|
||||
) -> Mapping[str, Any] | Generator[Mapping[str, Any] | str, None, None]:
|
||||
with self._bind_file_access_scope(tenant_id=app_model.tenant_id, user=user, invoke_from=invoke_from):
|
||||
# TODO: Remove this compatibility normalization after all persisted workflows are migrated.
|
||||
args, _ = normalize_legacy_sys_files_args(graph=workflow.graph_dict, args=args)
|
||||
files: Sequence[Mapping[str, Any]] = args.get("files") or []
|
||||
|
||||
# parse files
|
||||
|
||||
260
api/core/workflow/legacy_system_files.py
Normal file
260
api/core/workflow/legacy_system_files.py
Normal file
@ -0,0 +1,260 @@
|
||||
"""Compatibility helpers for workflows that still reference deprecated `sys.files`.
|
||||
|
||||
TODO: Remove this module after all persisted Workflow and Advanced Chat graphs
|
||||
have been migrated from the deprecated system file variable to `userinput.files`.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
import json
|
||||
import re
|
||||
from collections.abc import Iterable, Mapping
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
_LEGACY_SYSTEM_NODE_ID = "sys"
|
||||
_USER_INPUT_NODE_ID = "userinput"
|
||||
_LEGACY_FILES_VARIABLE = "files"
|
||||
_LEGACY_FILE_SELECTOR = [_LEGACY_SYSTEM_NODE_ID, _LEGACY_FILES_VARIABLE]
|
||||
_USER_INPUT_FILE_SELECTOR = [_USER_INPUT_NODE_ID, _LEGACY_FILES_VARIABLE]
|
||||
_USER_INPUT_FILE_INPUT_KEY = ".".join(_USER_INPUT_FILE_SELECTOR)
|
||||
_LEGACY_FILES_TEMPLATE = "{{#sys.files#}}"
|
||||
_USER_INPUT_FILES_TEMPLATE = "{{#userinput.files#}}"
|
||||
_LEGACY_FILES_TEMPLATE_PATTERN = re.compile(r"\{\{#sys\.files#\}\}")
|
||||
_USER_INPUT_FILES_TEMPLATE_PATTERN = re.compile(r"\{\{#userinput\.files#\}\}")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class LegacySysFilesCompatVariable:
|
||||
start_node_id: str
|
||||
variable_name: str
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class LegacySysFilesGraphMigrationResult:
|
||||
graph: dict[str, Any]
|
||||
changed: bool
|
||||
|
||||
|
||||
def migrate_legacy_sys_files_graph(
|
||||
graph: Mapping[str, Any],
|
||||
*,
|
||||
features: Mapping[str, Any] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Return a graph where legacy file-system references point to `userinput.files`."""
|
||||
|
||||
return migrate_legacy_sys_files_graph_with_result(graph, features=features).graph
|
||||
|
||||
|
||||
def migrate_legacy_sys_files_graph_with_result(
|
||||
graph: Mapping[str, Any],
|
||||
*,
|
||||
features: Mapping[str, Any] | None = None,
|
||||
) -> LegacySysFilesGraphMigrationResult:
|
||||
"""Return the migrated graph and whether any legacy reference was rewritten."""
|
||||
|
||||
_ = features
|
||||
graph_copy = dict(graph)
|
||||
nodes = graph_copy.get("nodes")
|
||||
if not isinstance(nodes, list):
|
||||
return LegacySysFilesGraphMigrationResult(graph=graph_copy, changed=False)
|
||||
|
||||
# Legacy references are stored in node data. Restricting both search and replacement to `nodes`
|
||||
# avoids recursively scanning graph-level metadata and edges for every workflow load.
|
||||
if not _may_contain_legacy_sys_files_reference(nodes) or not _contains_legacy_sys_files_reference(nodes):
|
||||
return LegacySysFilesGraphMigrationResult(graph=graph_copy, changed=False)
|
||||
|
||||
nodes_copy = copy.deepcopy(nodes)
|
||||
graph_copy["nodes"] = _replace_legacy_sys_files_references(nodes_copy)
|
||||
return LegacySysFilesGraphMigrationResult(graph=graph_copy, changed=True)
|
||||
|
||||
|
||||
def resolve_legacy_sys_files_compat_variable(graph: Mapping[str, Any]) -> LegacySysFilesCompatVariable | None:
|
||||
"""Resolve the target variable used by the `sys.files` compatibility layer."""
|
||||
|
||||
nodes = graph.get("nodes")
|
||||
if not isinstance(nodes, list):
|
||||
return None
|
||||
has_legacy_reference = _may_contain_legacy_sys_files_reference(nodes) and _contains_legacy_sys_files_reference(
|
||||
nodes
|
||||
)
|
||||
has_userinput_reference = _may_contain_userinput_files_reference(nodes) and _contains_userinput_files_reference(
|
||||
nodes
|
||||
)
|
||||
if not (has_legacy_reference or has_userinput_reference):
|
||||
return None
|
||||
return LegacySysFilesCompatVariable(start_node_id=_USER_INPUT_NODE_ID, variable_name=_LEGACY_FILES_VARIABLE)
|
||||
|
||||
|
||||
def normalize_legacy_sys_files_args(
|
||||
*,
|
||||
graph: Mapping[str, Any],
|
||||
args: Mapping[str, Any],
|
||||
) -> tuple[dict[str, Any], LegacySysFilesCompatVariable | None]:
|
||||
"""Map Service/Web API file arguments onto the `userinput.files` system alias.
|
||||
|
||||
The top-level `files` argument and hidden `system.files` payload both feed
|
||||
the same runtime file collection. After graph references are migrated, the
|
||||
file collection is exposed in the variable pool as `userinput.files`.
|
||||
"""
|
||||
|
||||
compat_variable = resolve_legacy_sys_files_compat_variable(graph)
|
||||
if compat_variable is None:
|
||||
return dict(args), None
|
||||
|
||||
normalized_args = dict(args)
|
||||
files_from_input, input_files_used = _extract_userinput_files(args)
|
||||
if input_files_used:
|
||||
normalized_args.setdefault("files", files_from_input)
|
||||
return normalized_args, None
|
||||
|
||||
files, legacy_files_used = _extract_legacy_files(args)
|
||||
if not legacy_files_used:
|
||||
return normalized_args, None
|
||||
|
||||
normalized_args.setdefault("files", files)
|
||||
|
||||
raw_inputs = normalized_args.get("inputs")
|
||||
inputs = dict(raw_inputs) if isinstance(raw_inputs, Mapping) else {}
|
||||
inputs.setdefault(_USER_INPUT_FILE_INPUT_KEY, files)
|
||||
normalized_args["inputs"] = inputs
|
||||
return normalized_args, compat_variable
|
||||
|
||||
|
||||
def attach_legacy_sys_files_warning(
|
||||
response: Mapping[str, Any] | Iterable[Any],
|
||||
compat_variable: LegacySysFilesCompatVariable | None,
|
||||
) -> Mapping[str, Any] | Iterable[Any]:
|
||||
if compat_variable is None:
|
||||
return response
|
||||
|
||||
warning = build_legacy_sys_files_warning(compat_variable)
|
||||
if isinstance(response, Mapping):
|
||||
response_with_warning = dict(response)
|
||||
existing_warnings = response_with_warning.get("warnings")
|
||||
warnings = list(existing_warnings) if isinstance(existing_warnings, list) else []
|
||||
warnings.append(warning)
|
||||
response_with_warning["warnings"] = warnings
|
||||
return response_with_warning
|
||||
|
||||
def _with_warning() -> Iterable[str]:
|
||||
try:
|
||||
yield f"data: {json.dumps({'event': 'warning', 'warning': warning})}\n\n"
|
||||
yield from response
|
||||
finally:
|
||||
close = getattr(response, "close", None)
|
||||
if callable(close):
|
||||
close()
|
||||
|
||||
return _with_warning()
|
||||
|
||||
|
||||
def build_legacy_sys_files_warning(compat_variable: LegacySysFilesCompatVariable) -> str:
|
||||
variable_selector = ".".join((compat_variable.start_node_id, compat_variable.variable_name))
|
||||
return (
|
||||
"sys.files is deprecated. This workflow now reads files from "
|
||||
f"`{variable_selector}`; update Service API calls to pass files in "
|
||||
f"`inputs.{variable_selector}` instead of `system.files` or top-level `files`."
|
||||
)
|
||||
|
||||
|
||||
def _contains_legacy_sys_files_reference(value: Any) -> bool:
|
||||
if _is_legacy_sys_files_selector(value):
|
||||
return True
|
||||
|
||||
if isinstance(value, str):
|
||||
return bool(_LEGACY_FILES_TEMPLATE_PATTERN.search(value))
|
||||
|
||||
if isinstance(value, Mapping):
|
||||
return any(_contains_legacy_sys_files_reference(item) for item in value.values())
|
||||
|
||||
if isinstance(value, list):
|
||||
return any(_contains_legacy_sys_files_reference(item) for item in value)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _contains_userinput_files_reference(value: Any) -> bool:
|
||||
if _is_userinput_files_selector(value):
|
||||
return True
|
||||
|
||||
if isinstance(value, str):
|
||||
return bool(_USER_INPUT_FILES_TEMPLATE_PATTERN.search(value))
|
||||
|
||||
if isinstance(value, Mapping):
|
||||
return any(_contains_userinput_files_reference(item) for item in value.values())
|
||||
|
||||
if isinstance(value, list):
|
||||
return any(_contains_userinput_files_reference(item) for item in value)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _replace_legacy_sys_files_references(value: Any) -> Any:
|
||||
if _is_legacy_sys_files_selector(value):
|
||||
return list(_USER_INPUT_FILE_SELECTOR)
|
||||
|
||||
if isinstance(value, str):
|
||||
return _LEGACY_FILES_TEMPLATE_PATTERN.sub(_USER_INPUT_FILES_TEMPLATE, value)
|
||||
|
||||
if isinstance(value, Mapping):
|
||||
return {key: _replace_legacy_sys_files_references(item) for key, item in value.items()}
|
||||
|
||||
if isinstance(value, list):
|
||||
return [_replace_legacy_sys_files_references(item) for item in value]
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def _is_legacy_sys_files_selector(value: Any) -> bool:
|
||||
return (
|
||||
isinstance(value, list)
|
||||
and len(value) == 2
|
||||
and value[0] == _LEGACY_SYSTEM_NODE_ID
|
||||
and value[1] == _LEGACY_FILES_VARIABLE
|
||||
)
|
||||
|
||||
|
||||
def _is_userinput_files_selector(value: Any) -> bool:
|
||||
return isinstance(value, list) and value == _USER_INPUT_FILE_SELECTOR
|
||||
|
||||
|
||||
def _may_contain_legacy_sys_files_reference(value: list[Any]) -> bool:
|
||||
serialized_value = _serialize_for_fast_reference_search(value)
|
||||
if serialized_value is None:
|
||||
return True
|
||||
return _LEGACY_FILES_TEMPLATE in serialized_value or '["sys","files"]' in serialized_value
|
||||
|
||||
|
||||
def _may_contain_userinput_files_reference(value: list[Any]) -> bool:
|
||||
serialized_value = _serialize_for_fast_reference_search(value)
|
||||
if serialized_value is None:
|
||||
return True
|
||||
return _USER_INPUT_FILES_TEMPLATE in serialized_value or '["userinput","files"]' in serialized_value
|
||||
|
||||
|
||||
def _serialize_for_fast_reference_search(value: list[Any]) -> str | None:
|
||||
try:
|
||||
return json.dumps(value, ensure_ascii=False, separators=(",", ":"))
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _extract_legacy_files(args: Mapping[str, Any]) -> tuple[Any, bool]:
|
||||
if "files" in args and args["files"] is not None:
|
||||
return args["files"], True
|
||||
|
||||
system = args.get("system")
|
||||
if isinstance(system, Mapping) and "files" in system and system["files"] is not None:
|
||||
return system["files"], True
|
||||
|
||||
return None, False
|
||||
|
||||
|
||||
def _extract_userinput_files(args: Mapping[str, Any]) -> tuple[Any, bool]:
|
||||
inputs = args.get("inputs")
|
||||
if isinstance(inputs, Mapping) and inputs.get(_USER_INPUT_FILE_INPUT_KEY) is not None:
|
||||
return inputs[_USER_INPUT_FILE_INPUT_KEY], True
|
||||
|
||||
return None, False
|
||||
@ -16,6 +16,7 @@ from .variable_prefixes import (
|
||||
ENVIRONMENT_VARIABLE_NODE_ID,
|
||||
RAG_PIPELINE_VARIABLE_NODE_ID,
|
||||
SYSTEM_VARIABLE_NODE_ID,
|
||||
USER_INPUT_VARIABLE_NODE_ID,
|
||||
)
|
||||
|
||||
|
||||
@ -118,6 +119,11 @@ def build_bootstrap_variables(
|
||||
*(_with_selector(variable, ENVIRONMENT_VARIABLE_NODE_ID) for variable in environment_variables),
|
||||
*(_with_selector(variable, CONVERSATION_VARIABLE_NODE_ID) for variable in conversation_variables),
|
||||
]
|
||||
# TODO: Remove this alias once all `sys.files` references and Service API callers are migrated.
|
||||
for variable in system_variables:
|
||||
if variable.name == SystemVariableKey.FILES.value:
|
||||
variables.append(_with_selector(variable, USER_INPUT_VARIABLE_NODE_ID))
|
||||
break
|
||||
|
||||
rag_pipeline_variables_map: defaultdict[str, dict[str, Any]] = defaultdict(dict)
|
||||
for rag_var in rag_pipeline_variables:
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
SYSTEM_VARIABLE_NODE_ID = "sys"
|
||||
USER_INPUT_VARIABLE_NODE_ID = "userinput"
|
||||
ENVIRONMENT_VARIABLE_NODE_ID = "env"
|
||||
CONVERSATION_VARIABLE_NODE_ID = "conversation"
|
||||
RAG_PIPELINE_VARIABLE_NODE_ID = "rag"
|
||||
|
||||
@ -21,6 +21,7 @@ def init_app(app: DifyApp):
|
||||
install_plugins,
|
||||
install_rag_pipeline_plugins,
|
||||
migrate_data_for_plugin,
|
||||
migrate_legacy_sys_files_workflows,
|
||||
migrate_oss,
|
||||
old_metadata_migration,
|
||||
remove_orphaned_files_on_storage,
|
||||
@ -46,6 +47,7 @@ def init_app(app: DifyApp):
|
||||
create_tenant,
|
||||
upgrade_db,
|
||||
fix_app_site_missing,
|
||||
migrate_legacy_sys_files_workflows,
|
||||
migrate_data_for_plugin,
|
||||
extract_plugins,
|
||||
extract_unique_plugins,
|
||||
|
||||
@ -25,6 +25,7 @@ from typing_extensions import deprecated
|
||||
|
||||
from core.trigger.constants import TRIGGER_PLUGIN_NODE_TYPE
|
||||
from core.workflow.human_input_adapter import adapt_node_config_for_graph
|
||||
from core.workflow.legacy_system_files import migrate_legacy_sys_files_graph_with_result
|
||||
from core.workflow.variable_prefixes import (
|
||||
CONVERSATION_VARIABLE_NODE_ID,
|
||||
SYSTEM_VARIABLE_NODE_ID,
|
||||
@ -274,7 +275,42 @@ class Workflow(Base): # bug
|
||||
# Currently, the following functions / methods would mutate the returned dict:
|
||||
#
|
||||
# - `_get_graph_and_variable_pool_for_single_node_run`.
|
||||
return json.loads(self.graph) if self.graph else {}
|
||||
if not self.graph:
|
||||
return {}
|
||||
|
||||
graph = json.loads(self.graph)
|
||||
if not self._supports_legacy_sys_files_compatibility():
|
||||
return graph
|
||||
|
||||
# TODO: Remove this load-time compatibility rewrite after all persisted workflows are migrated.
|
||||
migration_result = migrate_legacy_sys_files_graph_with_result(
|
||||
graph,
|
||||
features=self.normalized_features_dict,
|
||||
)
|
||||
if migration_result.changed:
|
||||
self.graph = json.dumps(migration_result.graph)
|
||||
return migration_result.graph
|
||||
|
||||
def migrate_legacy_sys_files_graph_in_place(self) -> bool:
|
||||
if not self.graph or not self._supports_legacy_sys_files_compatibility():
|
||||
return False
|
||||
|
||||
# TODO: Remove this in-place compatibility rewrite after all persisted workflows are migrated.
|
||||
migration_result = migrate_legacy_sys_files_graph_with_result(
|
||||
json.loads(self.graph),
|
||||
features=self.normalized_features_dict,
|
||||
)
|
||||
if migration_result.changed:
|
||||
self.graph = json.dumps(migration_result.graph)
|
||||
return migration_result.changed
|
||||
|
||||
def _supports_legacy_sys_files_compatibility(self) -> bool:
|
||||
return self.type in {
|
||||
WorkflowType.WORKFLOW,
|
||||
WorkflowType.CHAT,
|
||||
WorkflowType.WORKFLOW.value,
|
||||
WorkflowType.CHAT.value,
|
||||
}
|
||||
|
||||
def get_node_config_by_id(self, node_id: str) -> NodeConfigDict:
|
||||
"""Extract a node configuration from the workflow graph by node ID.
|
||||
@ -436,7 +472,7 @@ class Workflow(Base): # bug
|
||||
"memory":
|
||||
{
|
||||
"window": { "enabled": false, "size": 10 },
|
||||
"query_prompt_template": "{{#sys.query#}}\n\n{{#sys.files#}}",
|
||||
"query_prompt_template": "{{#sys.query#}}",
|
||||
"role_prefix": { "user": "", "assistant": "" },
|
||||
},
|
||||
"selected": false,
|
||||
@ -1426,7 +1462,7 @@ class ConversationVariable(TypeBase):
|
||||
return variable_factory.build_conversation_variable_from_mapping(mapping)
|
||||
|
||||
|
||||
# Only `sys.query` and `sys.files` could be modified.
|
||||
# TODO: Remove file-system-variable editability after all persisted workflows are migrated.
|
||||
_EDITABLE_SYSTEM_VARIABLE = frozenset(("query", "files"))
|
||||
|
||||
|
||||
|
||||
@ -18,6 +18,7 @@ from core.app.features.rate_limiting import RateLimit
|
||||
from core.app.features.rate_limiting.rate_limit import rate_limit_context
|
||||
from core.app.layers.pause_state_persist_layer import PauseStateLayerConfig
|
||||
from core.db import session_factory
|
||||
from core.workflow.legacy_system_files import normalize_legacy_sys_files_args
|
||||
from enums.quota_type import QuotaType
|
||||
from extensions.otel import AppGenerateHandler, trace_span
|
||||
from models.model import Account, App, AppMode, EndUser
|
||||
@ -118,6 +119,7 @@ class AppGenerateService:
|
||||
try:
|
||||
request_id = rate_limit.enter(request_id)
|
||||
quota_charge.commit()
|
||||
|
||||
effective_mode = (
|
||||
AppMode.AGENT_CHAT if app_model.is_agent and app_model.mode != AppMode.AGENT_CHAT else app_model.mode
|
||||
)
|
||||
@ -152,6 +154,11 @@ class AppGenerateService:
|
||||
case AppMode.ADVANCED_CHAT:
|
||||
workflow_id = args.get("workflow_id")
|
||||
workflow = cls._get_workflow(app_model, invoke_from, workflow_id)
|
||||
# TODO: Remove this compatibility normalization after all persisted workflows are migrated.
|
||||
args, _ = normalize_legacy_sys_files_args(
|
||||
graph=workflow.graph_dict,
|
||||
args=args,
|
||||
)
|
||||
|
||||
if streaming:
|
||||
# Streaming mode: subscribe to SSE and enqueue the execution on first subscriber
|
||||
@ -173,7 +180,7 @@ class AppGenerateService:
|
||||
|
||||
on_subscribe = cls._build_streaming_task_on_subscribe(on_subscribe)
|
||||
generator = AdvancedChatAppGenerator()
|
||||
return rate_limit.generate(
|
||||
response = rate_limit.generate(
|
||||
generator.convert_to_event_stream(
|
||||
generator.retrieve_events(
|
||||
AppMode.ADVANCED_CHAT,
|
||||
@ -183,6 +190,7 @@ class AppGenerateService:
|
||||
),
|
||||
request_id=request_id,
|
||||
)
|
||||
return response
|
||||
else:
|
||||
# Blocking mode: run synchronously and return JSON instead of SSE
|
||||
# Keep behaviour consistent with WORKFLOW blocking branch.
|
||||
@ -191,7 +199,7 @@ class AppGenerateService:
|
||||
state_owner_user_id=workflow.created_by,
|
||||
)
|
||||
advanced_generator = AdvancedChatAppGenerator()
|
||||
return rate_limit.generate(
|
||||
response = rate_limit.generate(
|
||||
advanced_generator.convert_to_event_stream(
|
||||
advanced_generator.generate(
|
||||
app_model=app_model,
|
||||
@ -206,9 +214,15 @@ class AppGenerateService:
|
||||
),
|
||||
request_id=request_id,
|
||||
)
|
||||
return response
|
||||
case AppMode.WORKFLOW:
|
||||
workflow_id = args.get("workflow_id")
|
||||
workflow = cls._get_workflow(app_model, invoke_from, workflow_id)
|
||||
# TODO: Remove this compatibility normalization after all persisted workflows are migrated.
|
||||
args, _ = normalize_legacy_sys_files_args(
|
||||
graph=workflow.graph_dict,
|
||||
args=args,
|
||||
)
|
||||
if streaming:
|
||||
with rate_limit_context(rate_limit, request_id):
|
||||
payload = AppExecutionParams.new(
|
||||
@ -228,7 +242,7 @@ class AppGenerateService:
|
||||
workflow_based_app_execution_task.delay(payload_json)
|
||||
|
||||
on_subscribe = cls._build_streaming_task_on_subscribe(on_subscribe)
|
||||
return rate_limit.generate(
|
||||
response = rate_limit.generate(
|
||||
WorkflowAppGenerator.convert_to_event_stream(
|
||||
MessageBasedAppGenerator.retrieve_events(
|
||||
AppMode.WORKFLOW,
|
||||
@ -238,12 +252,13 @@ class AppGenerateService:
|
||||
),
|
||||
request_id,
|
||||
)
|
||||
return response
|
||||
|
||||
pause_config = PauseStateLayerConfig(
|
||||
session_factory=session_factory.get_session_maker(),
|
||||
state_owner_user_id=workflow.created_by,
|
||||
)
|
||||
return rate_limit.generate(
|
||||
response = rate_limit.generate(
|
||||
WorkflowAppGenerator.convert_to_event_stream(
|
||||
WorkflowAppGenerator().generate(
|
||||
app_model=app_model,
|
||||
@ -259,6 +274,7 @@ class AppGenerateService:
|
||||
),
|
||||
request_id,
|
||||
)
|
||||
return response
|
||||
case _:
|
||||
raise ValueError(f"Invalid app mode {app_model.mode}")
|
||||
except Exception:
|
||||
@ -383,7 +399,7 @@ class AppGenerateService:
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _get_workflow(cls, app_model: App, invoke_from: InvokeFrom, workflow_id: str | None = None) -> Workflow:
|
||||
def get_workflow(cls, app_model: App, invoke_from: InvokeFrom, workflow_id: str | None = None) -> Workflow:
|
||||
"""
|
||||
Get workflow
|
||||
:param app_model: app model
|
||||
@ -419,6 +435,10 @@ class AppGenerateService:
|
||||
|
||||
return workflow
|
||||
|
||||
@classmethod
|
||||
def _get_workflow(cls, app_model: App, invoke_from: InvokeFrom, workflow_id: str | None = None) -> Workflow:
|
||||
return cls.get_workflow(app_model, invoke_from, workflow_id)
|
||||
|
||||
@classmethod
|
||||
def get_response_generator(
|
||||
cls,
|
||||
|
||||
@ -5,7 +5,7 @@ import uuid
|
||||
from collections.abc import Callable, Generator, Mapping, Sequence
|
||||
from typing import Any, cast
|
||||
|
||||
from sqlalchemy import exists, select
|
||||
from sqlalchemy import exists, select, update
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from configs import dify_config
|
||||
@ -91,6 +91,8 @@ from .human_input_delivery_test_service import (
|
||||
from .workflow_draft_variable_service import DraftVariableSaver, DraftVarLoader, WorkflowDraftVariableService
|
||||
from .workflow_restore import apply_published_workflow_snapshot_to_draft
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_file_access_controller = DatabaseFileAccessController()
|
||||
|
||||
|
||||
@ -154,7 +156,7 @@ class WorkflowService:
|
||||
)
|
||||
|
||||
# return draft workflow
|
||||
return workflow
|
||||
return self._persist_legacy_sys_files_migration_on_load(workflow)
|
||||
|
||||
def get_published_workflow_by_id(
|
||||
self, app_model: App, workflow_id: str, session: Session | None = None
|
||||
@ -183,6 +185,7 @@ class WorkflowService:
|
||||
f"Cannot use draft workflow version. Workflow ID: {workflow_id}. "
|
||||
f"Please use a published workflow version or leave workflow_id empty."
|
||||
)
|
||||
self._persist_legacy_sys_files_migration_on_load(workflow, persist_in_separate_session=session is None)
|
||||
return workflow
|
||||
|
||||
def get_published_workflow(self, app_model: App, session: Session | None = None) -> Workflow | None:
|
||||
@ -208,6 +211,40 @@ class WorkflowService:
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
return self._persist_legacy_sys_files_migration_on_load(workflow, persist_in_separate_session=session is None)
|
||||
|
||||
@staticmethod
|
||||
def _persist_legacy_sys_files_migration_on_load(
|
||||
workflow: Workflow | None,
|
||||
*,
|
||||
persist_in_separate_session: bool = True,
|
||||
) -> Workflow | None:
|
||||
if workflow is None:
|
||||
return None
|
||||
if not isinstance(workflow, Workflow):
|
||||
return workflow
|
||||
|
||||
# TODO: Remove this load-time persistence path after the historical workflow migration is complete.
|
||||
original_graph = workflow.graph
|
||||
if not workflow.migrate_legacy_sys_files_graph_in_place():
|
||||
return workflow
|
||||
|
||||
if not persist_in_separate_session:
|
||||
return workflow
|
||||
|
||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||
result = session.execute(
|
||||
update(Workflow)
|
||||
.where(Workflow.id == workflow.id, Workflow.graph == original_graph)
|
||||
.values(graph=workflow.graph)
|
||||
)
|
||||
if getattr(result, "rowcount", None) == 0:
|
||||
logger.warning(
|
||||
"Skipped persisting legacy sys.files workflow migration because the workflow changed concurrently, "
|
||||
"workflow_id=%s",
|
||||
workflow.id,
|
||||
)
|
||||
|
||||
return workflow
|
||||
|
||||
def get_accessible_app_ids(self, app_ids: Sequence[str], tenant_id: str) -> set[str]:
|
||||
|
||||
@ -58,10 +58,7 @@ workflow:
|
||||
variable_selector: []
|
||||
desc: ''
|
||||
memory:
|
||||
query_prompt_template: '{{#sys.query#}}
|
||||
|
||||
|
||||
{{#sys.files#}}'
|
||||
query_prompt_template: '{{#sys.query#}}'
|
||||
window:
|
||||
enabled: false
|
||||
size: 10
|
||||
|
||||
@ -159,10 +159,7 @@ workflow:
|
||||
variable_selector: []
|
||||
desc: ''
|
||||
memory:
|
||||
query_prompt_template: '{{#sys.query#}}
|
||||
|
||||
|
||||
{{#sys.files#}}'
|
||||
query_prompt_template: '{{#sys.query#}}'
|
||||
role_prefix:
|
||||
assistant: ''
|
||||
user: ''
|
||||
@ -204,10 +201,7 @@ workflow:
|
||||
variable_selector: []
|
||||
desc: ''
|
||||
memory:
|
||||
query_prompt_template: '{{#sys.query#}}
|
||||
|
||||
|
||||
{{#sys.files#}}'
|
||||
query_prompt_template: '{{#sys.query#}}'
|
||||
role_prefix:
|
||||
assistant: ''
|
||||
user: ''
|
||||
|
||||
@ -175,10 +175,7 @@ workflow:
|
||||
variable_selector: []
|
||||
desc: ''
|
||||
memory:
|
||||
query_prompt_template: '{{#sys.query#}}
|
||||
|
||||
|
||||
{{#sys.files#}}'
|
||||
query_prompt_template: '{{#sys.query#}}'
|
||||
role_prefix:
|
||||
assistant: ''
|
||||
user: ''
|
||||
|
||||
@ -0,0 +1,235 @@
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import click
|
||||
import pytest
|
||||
|
||||
from commands import migrate_legacy_sys_files_workflows
|
||||
from commands import workflow_migration as workflow_migration_commands
|
||||
|
||||
|
||||
def test_migrate_legacy_sys_files_workflows_command_passes_batch_options(mocker, capsys):
|
||||
runner = mocker.patch.object(
|
||||
workflow_migration_commands,
|
||||
"run_legacy_sys_files_workflow_migration",
|
||||
return_value=workflow_migration_commands.LegacySysFilesWorkflowMigrationStats(
|
||||
scanned=10,
|
||||
migrated=2,
|
||||
failed=0,
|
||||
batches=1,
|
||||
last_id="workflow-10",
|
||||
),
|
||||
)
|
||||
|
||||
migrate_legacy_sys_files_workflows.callback(
|
||||
batch_size=200,
|
||||
limit=500,
|
||||
start_after_id="workflow-1",
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
dry_run=True,
|
||||
)
|
||||
|
||||
runner.assert_called_once_with(
|
||||
batch_size=200,
|
||||
limit=500,
|
||||
start_after_id="workflow-1",
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
dry_run=True,
|
||||
)
|
||||
captured = capsys.readouterr()
|
||||
assert "scanned=10" in captured.out
|
||||
assert "migrated=2" in captured.out
|
||||
assert "last_id=workflow-10" in captured.out
|
||||
|
||||
|
||||
def test_migrate_legacy_sys_files_workflows_rejects_non_positive_batch_size():
|
||||
with pytest.raises(click.UsageError, match="batch-size"):
|
||||
migrate_legacy_sys_files_workflows.callback(
|
||||
batch_size=0,
|
||||
limit=None,
|
||||
start_after_id=None,
|
||||
tenant_id=None,
|
||||
app_id=None,
|
||||
dry_run=False,
|
||||
)
|
||||
|
||||
|
||||
def test_migrate_legacy_sys_files_workflows_rejects_non_positive_limit():
|
||||
with pytest.raises(click.UsageError, match="limit"):
|
||||
migrate_legacy_sys_files_workflows.callback(
|
||||
batch_size=100,
|
||||
limit=0,
|
||||
start_after_id=None,
|
||||
tenant_id=None,
|
||||
app_id=None,
|
||||
dry_run=False,
|
||||
)
|
||||
|
||||
|
||||
def test_build_legacy_sys_files_workflow_query_uses_keyset_pagination():
|
||||
stmt = workflow_migration_commands._build_legacy_sys_files_workflow_query(
|
||||
start_after_id="workflow-1",
|
||||
batch_size=200,
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
)
|
||||
compiled = str(stmt.compile(compile_kwargs={"literal_binds": True}))
|
||||
|
||||
assert "workflows.id > 'workflow-1'" in compiled
|
||||
assert "workflows.tenant_id = 'tenant-1'" in compiled
|
||||
assert "workflows.app_id = 'app-1'" in compiled
|
||||
assert "ORDER BY workflows.id" in compiled
|
||||
assert "LIMIT 200" in compiled
|
||||
|
||||
|
||||
def test_migrate_legacy_sys_files_workflow_batch_dry_run_rolls_back():
|
||||
migrated_workflow = MagicMock()
|
||||
migrated_workflow.id = "workflow-1"
|
||||
migrated_workflow.migrate_legacy_sys_files_graph_in_place.return_value = True
|
||||
untouched_workflow = MagicMock()
|
||||
untouched_workflow.id = "workflow-2"
|
||||
untouched_workflow.migrate_legacy_sys_files_graph_in_place.return_value = False
|
||||
session = MagicMock()
|
||||
session.scalars.return_value.all.return_value = [migrated_workflow, untouched_workflow]
|
||||
|
||||
stats = workflow_migration_commands._migrate_legacy_sys_files_workflow_batch(
|
||||
session=session,
|
||||
start_after_id=None,
|
||||
batch_size=200,
|
||||
tenant_id=None,
|
||||
app_id=None,
|
||||
dry_run=True,
|
||||
)
|
||||
|
||||
assert stats.scanned == 2
|
||||
assert stats.migrated == 1
|
||||
assert stats.failed == 0
|
||||
assert stats.last_id == "workflow-2"
|
||||
session.rollback.assert_called_once()
|
||||
session.commit.assert_not_called()
|
||||
|
||||
|
||||
def test_migrate_legacy_sys_files_workflow_batch_commits_and_counts_failures(caplog):
|
||||
migrated_workflow = MagicMock()
|
||||
migrated_workflow.id = "workflow-1"
|
||||
migrated_workflow.migrate_legacy_sys_files_graph_in_place.return_value = True
|
||||
failing_workflow = MagicMock()
|
||||
failing_workflow.id = "workflow-2"
|
||||
failing_workflow.migrate_legacy_sys_files_graph_in_place.side_effect = RuntimeError("boom")
|
||||
session = MagicMock()
|
||||
session.scalars.return_value.all.return_value = [migrated_workflow, failing_workflow]
|
||||
|
||||
stats = workflow_migration_commands._migrate_legacy_sys_files_workflow_batch(
|
||||
session=session,
|
||||
start_after_id=None,
|
||||
batch_size=200,
|
||||
tenant_id=None,
|
||||
app_id=None,
|
||||
dry_run=False,
|
||||
)
|
||||
|
||||
assert stats.scanned == 2
|
||||
assert stats.migrated == 1
|
||||
assert stats.failed == 1
|
||||
assert stats.last_id == "workflow-2"
|
||||
assert "Failed to migrate legacy" in caplog.text
|
||||
session.commit.assert_called_once()
|
||||
session.rollback.assert_not_called()
|
||||
|
||||
|
||||
def test_run_legacy_sys_files_workflow_migration_uses_keyset_batches(mocker):
|
||||
session_maker = MagicMock()
|
||||
sessions = [MagicMock(), MagicMock()]
|
||||
session_maker.side_effect = sessions
|
||||
mocker.patch.object(workflow_migration_commands, "sessionmaker", return_value=session_maker)
|
||||
mocker.patch.object(workflow_migration_commands, "db", SimpleNamespace(engine=object()))
|
||||
migrate_batch = mocker.patch.object(
|
||||
workflow_migration_commands,
|
||||
"_migrate_legacy_sys_files_workflow_batch",
|
||||
side_effect=[
|
||||
workflow_migration_commands.LegacySysFilesWorkflowMigrationStats(
|
||||
scanned=2,
|
||||
migrated=1,
|
||||
failed=0,
|
||||
last_id="workflow-2",
|
||||
),
|
||||
workflow_migration_commands.LegacySysFilesWorkflowMigrationStats(
|
||||
scanned=1,
|
||||
migrated=1,
|
||||
failed=0,
|
||||
last_id="workflow-3",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
stats = workflow_migration_commands.run_legacy_sys_files_workflow_migration(
|
||||
batch_size=2,
|
||||
limit=3,
|
||||
start_after_id="workflow-0",
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
dry_run=True,
|
||||
)
|
||||
|
||||
assert stats.scanned == 3
|
||||
assert stats.migrated == 2
|
||||
assert stats.batches == 2
|
||||
assert stats.last_id == "workflow-3"
|
||||
assert migrate_batch.call_args_list[0].kwargs["start_after_id"] == "workflow-0"
|
||||
assert migrate_batch.call_args_list[0].kwargs["batch_size"] == 2
|
||||
assert migrate_batch.call_args_list[1].kwargs["start_after_id"] == "workflow-2"
|
||||
assert migrate_batch.call_args_list[1].kwargs["batch_size"] == 1
|
||||
|
||||
|
||||
def test_run_legacy_sys_files_workflow_migration_stops_on_empty_batch(mocker):
|
||||
session_maker = MagicMock(return_value=MagicMock())
|
||||
mocker.patch.object(workflow_migration_commands, "sessionmaker", return_value=session_maker)
|
||||
mocker.patch.object(workflow_migration_commands, "db", SimpleNamespace(engine=object()))
|
||||
mocker.patch.object(
|
||||
workflow_migration_commands,
|
||||
"_migrate_legacy_sys_files_workflow_batch",
|
||||
return_value=workflow_migration_commands.LegacySysFilesWorkflowMigrationStats(scanned=0),
|
||||
)
|
||||
|
||||
stats = workflow_migration_commands.run_legacy_sys_files_workflow_migration(
|
||||
batch_size=2,
|
||||
limit=None,
|
||||
start_after_id=None,
|
||||
tenant_id=None,
|
||||
app_id=None,
|
||||
dry_run=False,
|
||||
)
|
||||
|
||||
assert stats.scanned == 0
|
||||
assert stats.batches == 0
|
||||
|
||||
|
||||
def test_run_legacy_sys_files_workflow_migration_stops_on_short_batch(mocker):
|
||||
session_maker = MagicMock(return_value=MagicMock())
|
||||
mocker.patch.object(workflow_migration_commands, "sessionmaker", return_value=session_maker)
|
||||
mocker.patch.object(workflow_migration_commands, "db", SimpleNamespace(engine=object()))
|
||||
migrate_batch = mocker.patch.object(
|
||||
workflow_migration_commands,
|
||||
"_migrate_legacy_sys_files_workflow_batch",
|
||||
return_value=workflow_migration_commands.LegacySysFilesWorkflowMigrationStats(
|
||||
scanned=1,
|
||||
migrated=1,
|
||||
failed=0,
|
||||
last_id="workflow-1",
|
||||
),
|
||||
)
|
||||
|
||||
stats = workflow_migration_commands.run_legacy_sys_files_workflow_migration(
|
||||
batch_size=2,
|
||||
limit=None,
|
||||
start_after_id=None,
|
||||
tenant_id=None,
|
||||
app_id=None,
|
||||
dry_run=False,
|
||||
)
|
||||
|
||||
assert stats.scanned == 1
|
||||
assert stats.batches == 1
|
||||
migrate_batch.assert_called_once()
|
||||
@ -4,7 +4,7 @@ import types
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import commands
|
||||
from commands import system as system_commands
|
||||
from commands import database as database_commands
|
||||
from libs.db_migration_lock import LockNotOwnedError, RedisError
|
||||
|
||||
HEARTBEAT_WAIT_TIMEOUT_SECONDS = 5.0
|
||||
@ -25,11 +25,11 @@ def _invoke_upgrade_db() -> int:
|
||||
|
||||
|
||||
def test_upgrade_db_skips_when_lock_not_acquired(monkeypatch, capsys):
|
||||
monkeypatch.setattr(system_commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 1234)
|
||||
monkeypatch.setattr(database_commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 1234)
|
||||
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = False
|
||||
system_commands.redis_client.lock.return_value = lock
|
||||
database_commands.redis_client.lock.return_value = lock
|
||||
|
||||
exit_code = _invoke_upgrade_db()
|
||||
captured = capsys.readouterr()
|
||||
@ -37,18 +37,20 @@ def test_upgrade_db_skips_when_lock_not_acquired(monkeypatch, capsys):
|
||||
assert exit_code == 0
|
||||
assert "Database migration skipped" in captured.out
|
||||
|
||||
system_commands.redis_client.lock.assert_called_once_with(name="db_upgrade_lock", timeout=1234, thread_local=False)
|
||||
database_commands.redis_client.lock.assert_called_once_with(
|
||||
name="db_upgrade_lock", timeout=1234, thread_local=False
|
||||
)
|
||||
lock.acquire.assert_called_once_with(blocking=False)
|
||||
lock.release.assert_not_called()
|
||||
|
||||
|
||||
def test_upgrade_db_failure_not_masked_by_lock_release(monkeypatch, capsys):
|
||||
monkeypatch.setattr(system_commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 321)
|
||||
monkeypatch.setattr(database_commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 321)
|
||||
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = True
|
||||
lock.release.side_effect = LockNotOwnedError("simulated")
|
||||
system_commands.redis_client.lock.return_value = lock
|
||||
database_commands.redis_client.lock.return_value = lock
|
||||
|
||||
def _upgrade():
|
||||
raise RuntimeError("boom")
|
||||
@ -61,18 +63,18 @@ def test_upgrade_db_failure_not_masked_by_lock_release(monkeypatch, capsys):
|
||||
assert exit_code == 1
|
||||
assert "Database migration failed: boom" in captured.out
|
||||
|
||||
system_commands.redis_client.lock.assert_called_once_with(name="db_upgrade_lock", timeout=321, thread_local=False)
|
||||
database_commands.redis_client.lock.assert_called_once_with(name="db_upgrade_lock", timeout=321, thread_local=False)
|
||||
lock.acquire.assert_called_once_with(blocking=False)
|
||||
lock.release.assert_called_once()
|
||||
|
||||
|
||||
def test_upgrade_db_success_ignores_lock_not_owned_on_release(monkeypatch, capsys):
|
||||
monkeypatch.setattr(system_commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 999)
|
||||
monkeypatch.setattr(database_commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 999)
|
||||
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = True
|
||||
lock.release.side_effect = LockNotOwnedError("simulated")
|
||||
system_commands.redis_client.lock.return_value = lock
|
||||
database_commands.redis_client.lock.return_value = lock
|
||||
|
||||
_install_fake_flask_migrate(monkeypatch, lambda: None)
|
||||
|
||||
@ -82,7 +84,7 @@ def test_upgrade_db_success_ignores_lock_not_owned_on_release(monkeypatch, capsy
|
||||
assert exit_code == 0
|
||||
assert "Database migration successful!" in captured.out
|
||||
|
||||
system_commands.redis_client.lock.assert_called_once_with(name="db_upgrade_lock", timeout=999, thread_local=False)
|
||||
database_commands.redis_client.lock.assert_called_once_with(name="db_upgrade_lock", timeout=999, thread_local=False)
|
||||
lock.acquire.assert_called_once_with(blocking=False)
|
||||
lock.release.assert_called_once()
|
||||
|
||||
@ -93,11 +95,11 @@ def test_upgrade_db_renews_lock_during_migration(monkeypatch, capsys):
|
||||
"""
|
||||
|
||||
# Use a small TTL so the heartbeat interval triggers quickly.
|
||||
monkeypatch.setattr(system_commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 0.3)
|
||||
monkeypatch.setattr(database_commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 0.3)
|
||||
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = True
|
||||
system_commands.redis_client.lock.return_value = lock
|
||||
database_commands.redis_client.lock.return_value = lock
|
||||
|
||||
renewed = threading.Event()
|
||||
|
||||
@ -121,11 +123,11 @@ def test_upgrade_db_renews_lock_during_migration(monkeypatch, capsys):
|
||||
|
||||
def test_upgrade_db_ignores_reacquire_errors(monkeypatch, capsys):
|
||||
# Use a small TTL so heartbeat runs during the upgrade call.
|
||||
monkeypatch.setattr(system_commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 0.3)
|
||||
monkeypatch.setattr(database_commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 0.3)
|
||||
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = True
|
||||
system_commands.redis_client.lock.return_value = lock
|
||||
database_commands.redis_client.lock.return_value = lock
|
||||
|
||||
attempted = threading.Event()
|
||||
|
||||
|
||||
49
api/tests/unit_tests/commands/test_workspace.py
Normal file
49
api/tests/unit_tests/commands/test_workspace.py
Normal file
@ -0,0 +1,49 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from commands import reset_encrypt_key_pair
|
||||
from commands import workspace as workspace_commands
|
||||
|
||||
|
||||
def test_reset_encrypt_key_pair_skips_non_self_hosted(monkeypatch, capsys):
|
||||
monkeypatch.setattr(workspace_commands.dify_config, "EDITION", "CLOUD")
|
||||
|
||||
reset_encrypt_key_pair.callback()
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "only for SELF_HOSTED" in captured.out
|
||||
|
||||
|
||||
def test_reset_encrypt_key_pair_rotates_keys_and_removes_custom_provider_data(monkeypatch, capsys):
|
||||
monkeypatch.setattr(workspace_commands.dify_config, "EDITION", "SELF_HOSTED")
|
||||
monkeypatch.setattr(workspace_commands, "generate_key_pair", lambda tenant_id: f"public-key-{tenant_id}")
|
||||
tenant = MagicMock()
|
||||
tenant.id = "tenant-1"
|
||||
session = MagicMock()
|
||||
session.scalars.return_value.all.return_value = [tenant]
|
||||
session_manager = MagicMock()
|
||||
session_manager.begin.return_value.__enter__.return_value = session
|
||||
monkeypatch.setattr(workspace_commands, "sessionmaker", lambda *args, **kwargs: session_manager)
|
||||
monkeypatch.setattr(workspace_commands, "db", MagicMock(engine=object()))
|
||||
|
||||
reset_encrypt_key_pair.callback()
|
||||
|
||||
assert tenant.encrypt_public_key == "public-key-tenant-1"
|
||||
assert session.execute.call_count == 2
|
||||
captured = capsys.readouterr()
|
||||
assert "tenant-1 has been reset" in captured.out
|
||||
|
||||
|
||||
def test_reset_encrypt_key_pair_stops_when_workspace_record_is_missing(monkeypatch, capsys):
|
||||
monkeypatch.setattr(workspace_commands.dify_config, "EDITION", "SELF_HOSTED")
|
||||
session = MagicMock()
|
||||
session.scalars.return_value.all.return_value = [None]
|
||||
session_manager = MagicMock()
|
||||
session_manager.begin.return_value.__enter__.return_value = session
|
||||
monkeypatch.setattr(workspace_commands, "sessionmaker", lambda *args, **kwargs: session_manager)
|
||||
monkeypatch.setattr(workspace_commands, "db", MagicMock(engine=object()))
|
||||
|
||||
reset_encrypt_key_pair.callback()
|
||||
|
||||
session.execute.assert_not_called()
|
||||
captured = capsys.readouterr()
|
||||
assert "No workspaces found" in captured.out
|
||||
@ -0,0 +1,83 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from controllers.service_api.app.legacy_system_files import (
|
||||
attach_legacy_system_file_warning_for_service_api,
|
||||
normalize_legacy_system_file_args_for_service_api,
|
||||
)
|
||||
from services.app_generate_service import AppGenerateService
|
||||
|
||||
_LEGACY_FILE_TEMPLATE = "{{#" + ".".join(("sys", "files")) + "#}}"
|
||||
_USER_INPUT_FILE_INPUT_KEY = ".".join(("userinput", "files"))
|
||||
|
||||
|
||||
def _legacy_file_graph() -> dict:
|
||||
return {
|
||||
"nodes": [
|
||||
{"id": "start", "data": {"type": "start", "variables": []}},
|
||||
{"id": "answer", "data": {"type": "answer", "answer": _LEGACY_FILE_TEMPLATE}},
|
||||
],
|
||||
"edges": [],
|
||||
}
|
||||
|
||||
|
||||
def test_hidden_service_api_file_payload_maps_to_userinput_files(mocker):
|
||||
workflow = MagicMock()
|
||||
workflow.graph_dict = _legacy_file_graph()
|
||||
get_workflow = mocker.patch.object(AppGenerateService, "get_workflow", return_value=workflow)
|
||||
app_model = MagicMock()
|
||||
files = [{"transfer_method": "remote_url", "url": "https://example.com/a.png"}]
|
||||
|
||||
args, compat_variable = normalize_legacy_system_file_args_for_service_api(
|
||||
app_model=app_model,
|
||||
args={"inputs": {}},
|
||||
raw_payload={"system": {"files": files}},
|
||||
)
|
||||
|
||||
get_workflow.assert_called_once()
|
||||
assert compat_variable is not None
|
||||
assert args["files"] == files
|
||||
assert args["inputs"][_USER_INPUT_FILE_INPUT_KEY] == files
|
||||
|
||||
|
||||
def test_service_api_file_payload_is_ignored_when_absent(mocker):
|
||||
get_workflow = mocker.patch.object(AppGenerateService, "get_workflow")
|
||||
app_model = MagicMock()
|
||||
original_args = {"inputs": {}}
|
||||
|
||||
args, compat_variable = normalize_legacy_system_file_args_for_service_api(
|
||||
app_model=app_model,
|
||||
args=original_args,
|
||||
raw_payload={},
|
||||
)
|
||||
|
||||
assert args is original_args
|
||||
assert compat_variable is None
|
||||
get_workflow.assert_not_called()
|
||||
|
||||
|
||||
def test_top_level_service_api_file_payload_still_checks_workflow_graph(mocker):
|
||||
workflow = MagicMock()
|
||||
workflow.graph_dict = {"nodes": []}
|
||||
get_workflow = mocker.patch.object(AppGenerateService, "get_workflow", return_value=workflow)
|
||||
app_model = MagicMock()
|
||||
files = [{"id": "file-1"}]
|
||||
|
||||
args, compat_variable = normalize_legacy_system_file_args_for_service_api(
|
||||
app_model=app_model,
|
||||
args={"inputs": {}, "files": files},
|
||||
raw_payload={},
|
||||
)
|
||||
|
||||
get_workflow.assert_called_once()
|
||||
assert args["files"] == files
|
||||
assert compat_variable is None
|
||||
|
||||
|
||||
def test_service_api_warning_is_attached_only_when_compatibility_was_used():
|
||||
compat_variable = MagicMock(start_node_id="userinput", variable_name="files")
|
||||
|
||||
response = attach_legacy_system_file_warning_for_service_api({"answer": "ok"}, compat_variable)
|
||||
response_without_warning = attach_legacy_system_file_warning_for_service_api({"answer": "ok"}, None)
|
||||
|
||||
assert response["warnings"]
|
||||
assert response_without_warning == {"answer": "ok"}
|
||||
@ -44,7 +44,7 @@ class TestAdvancedChatAppGeneratorValidation:
|
||||
with pytest.raises(ValueError, match="query must be a string"):
|
||||
generator.generate(
|
||||
app_model=SimpleNamespace(),
|
||||
workflow=SimpleNamespace(),
|
||||
workflow=SimpleNamespace(graph_dict={"nodes": []}),
|
||||
user=SimpleNamespace(),
|
||||
args={"inputs": {}, "query": 123},
|
||||
invoke_from=InvokeFrom.WEB_APP,
|
||||
@ -177,7 +177,7 @@ class TestAdvancedChatAppGeneratorInternals:
|
||||
|
||||
result = generator.generate(
|
||||
app_model=SimpleNamespace(id="app", tenant_id="tenant"),
|
||||
workflow=SimpleNamespace(features_dict={}),
|
||||
workflow=SimpleNamespace(features_dict={}, graph_dict={"nodes": []}),
|
||||
user=user,
|
||||
args={
|
||||
"query": "hello",
|
||||
@ -1147,7 +1147,7 @@ class TestAdvancedChatAppGeneratorInternals:
|
||||
monkeypatch.setattr(generator, "_generate", _fake_generate)
|
||||
|
||||
app_model = SimpleNamespace(id="app", tenant_id="tenant")
|
||||
workflow = SimpleNamespace(features_dict={})
|
||||
workflow = SimpleNamespace(features_dict={}, graph_dict={"nodes": []})
|
||||
from models import Account
|
||||
|
||||
user = Account(name="Tester", email="tester@example.com")
|
||||
@ -1226,7 +1226,7 @@ class TestAdvancedChatAppGeneratorInternals:
|
||||
monkeypatch.setattr(generator, "_generate", _fake_generate)
|
||||
|
||||
app_model = SimpleNamespace(id="app", tenant_id="tenant")
|
||||
workflow = SimpleNamespace(features_dict={})
|
||||
workflow = SimpleNamespace(features_dict={}, graph_dict={"nodes": []})
|
||||
from models.model import EndUser
|
||||
|
||||
user = EndUser(tenant_id="tenant", type="session", name="tester", session_id="session")
|
||||
|
||||
@ -177,7 +177,7 @@ class TestWorkflowAppGeneratorGenerate:
|
||||
|
||||
result = generator.generate(
|
||||
app_model=SimpleNamespace(id="app", tenant_id="tenant"),
|
||||
workflow=SimpleNamespace(features_dict={}),
|
||||
workflow=SimpleNamespace(features_dict={}, graph_dict={"nodes": []}),
|
||||
user=SimpleNamespace(id="user", session_id="session"),
|
||||
args={"inputs": {}, SKIP_PREPARE_USER_INPUTS_KEY: True},
|
||||
invoke_from=InvokeFrom.WEB_APP,
|
||||
|
||||
143
api/tests/unit_tests/core/workflow/test_legacy_system_files.py
Normal file
143
api/tests/unit_tests/core/workflow/test_legacy_system_files.py
Normal file
@ -0,0 +1,143 @@
|
||||
from core.workflow.legacy_system_files import (
|
||||
LegacySysFilesCompatVariable,
|
||||
attach_legacy_sys_files_warning,
|
||||
migrate_legacy_sys_files_graph_with_result,
|
||||
normalize_legacy_sys_files_args,
|
||||
resolve_legacy_sys_files_compat_variable,
|
||||
)
|
||||
|
||||
_LEGACY_NODE_ID = "sys"
|
||||
_LEGACY_ALIAS_NODE_ID = "userinput"
|
||||
_LEGACY_VARIABLE_NAME = "files"
|
||||
_LEGACY_SELECTOR = [_LEGACY_NODE_ID, _LEGACY_VARIABLE_NAME]
|
||||
_LEGACY_TEMPLATE = "{{#" + ".".join((_LEGACY_NODE_ID, _LEGACY_VARIABLE_NAME)) + "#}}"
|
||||
_LEGACY_ALIAS_SELECTOR = [_LEGACY_ALIAS_NODE_ID, _LEGACY_VARIABLE_NAME]
|
||||
_LEGACY_ALIAS_TEMPLATE = "{{#" + ".".join((_LEGACY_ALIAS_NODE_ID, _LEGACY_VARIABLE_NAME)) + "#}}"
|
||||
_LEGACY_ALIAS_INPUT_KEY = ".".join((_LEGACY_ALIAS_NODE_ID, _LEGACY_VARIABLE_NAME))
|
||||
|
||||
|
||||
def test_migrate_legacy_sys_files_graph_ignores_invalid_or_unrelated_graphs():
|
||||
assert not migrate_legacy_sys_files_graph_with_result({}).changed
|
||||
assert not migrate_legacy_sys_files_graph_with_result({"nodes": [], "edges": [_LEGACY_SELECTOR]}).changed
|
||||
assert not migrate_legacy_sys_files_graph_with_result({"nodes": [{"data": {"value": ["sys", "query"]}}]}).changed
|
||||
|
||||
|
||||
def test_migrate_legacy_sys_files_graph_rewrites_sys_files_to_userinput_files_without_start_variable():
|
||||
graph = {
|
||||
"nodes": [
|
||||
{"id": "start", "data": {"type": "start", "variables": [{"variable": "sys_files"}]}},
|
||||
{
|
||||
"id": "answer",
|
||||
"data": {
|
||||
"type": "answer",
|
||||
"answer": _LEGACY_SELECTOR,
|
||||
"template": _LEGACY_TEMPLATE,
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
result = migrate_legacy_sys_files_graph_with_result(graph)
|
||||
|
||||
assert result.changed
|
||||
start_data = result.graph["nodes"][0]["data"]
|
||||
assert start_data["variables"] == [{"variable": "sys_files"}]
|
||||
assert result.graph["nodes"][1]["data"]["answer"] == _LEGACY_ALIAS_SELECTOR
|
||||
assert result.graph["nodes"][1]["data"]["template"] == _LEGACY_ALIAS_TEMPLATE
|
||||
|
||||
|
||||
def test_migrate_legacy_sys_files_graph_leaves_userinput_files_target_unchanged():
|
||||
graph = {
|
||||
"nodes": [
|
||||
{"id": "start", "data": {"type": "start", "variables": []}},
|
||||
{
|
||||
"id": "answer",
|
||||
"data": {
|
||||
"type": "answer",
|
||||
"answer": _LEGACY_ALIAS_SELECTOR,
|
||||
"template": _LEGACY_ALIAS_TEMPLATE,
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
result = migrate_legacy_sys_files_graph_with_result(graph)
|
||||
|
||||
assert not result.changed
|
||||
assert result.graph == graph
|
||||
|
||||
|
||||
def test_resolve_legacy_sys_files_compat_variable_returns_userinput_files_target():
|
||||
assert resolve_legacy_sys_files_compat_variable({}) is None
|
||||
assert resolve_legacy_sys_files_compat_variable({"nodes": [{"data": {"value": ["sys", "query"]}}]}) is None
|
||||
|
||||
compat_variable = resolve_legacy_sys_files_compat_variable({"nodes": [{"data": {"value": _LEGACY_SELECTOR}}]})
|
||||
|
||||
assert compat_variable == LegacySysFilesCompatVariable(
|
||||
start_node_id=_LEGACY_ALIAS_NODE_ID,
|
||||
variable_name=_LEGACY_VARIABLE_NAME,
|
||||
)
|
||||
assert (
|
||||
resolve_legacy_sys_files_compat_variable({"nodes": [{"data": {"value": _LEGACY_ALIAS_SELECTOR}}]})
|
||||
== compat_variable
|
||||
)
|
||||
|
||||
|
||||
def test_normalize_legacy_sys_files_args_handles_no_compat_and_top_level_files():
|
||||
args_without_legacy, compat_without_legacy = normalize_legacy_sys_files_args(
|
||||
graph={"nodes": []},
|
||||
args={"inputs": {}},
|
||||
)
|
||||
assert args_without_legacy == {"inputs": {}}
|
||||
assert compat_without_legacy is None
|
||||
|
||||
files = [{"id": "file-1"}]
|
||||
graph = {
|
||||
"nodes": [
|
||||
{"id": "start", "data": {"type": "start", "variables": []}},
|
||||
{"id": "answer", "data": {"type": "answer", "answer": _LEGACY_TEMPLATE}},
|
||||
],
|
||||
}
|
||||
normalized_args, compat_variable = normalize_legacy_sys_files_args(
|
||||
graph=graph,
|
||||
args={"inputs": {}, "files": files},
|
||||
)
|
||||
|
||||
assert compat_variable is not None
|
||||
assert normalized_args["files"] == files
|
||||
assert normalized_args["inputs"][".".join((compat_variable.start_node_id, compat_variable.variable_name))] == files
|
||||
|
||||
|
||||
def test_normalize_legacy_sys_files_args_maps_userinput_files_to_top_level_files_without_warning():
|
||||
files = [{"id": "file-1"}]
|
||||
normalized_args, compat_variable = normalize_legacy_sys_files_args(
|
||||
graph={"nodes": [{"data": {"type": "answer", "answer": _LEGACY_ALIAS_TEMPLATE}}]},
|
||||
args={"inputs": {_LEGACY_ALIAS_INPUT_KEY: files}},
|
||||
)
|
||||
|
||||
assert compat_variable is None
|
||||
assert normalized_args["files"] == files
|
||||
assert normalized_args["inputs"] == {_LEGACY_ALIAS_INPUT_KEY: files}
|
||||
|
||||
|
||||
def test_attach_legacy_sys_files_warning_wraps_stream_and_closes_source():
|
||||
class CloseableStream:
|
||||
closed = False
|
||||
|
||||
def __iter__(self):
|
||||
yield "data: payload\n\n"
|
||||
|
||||
def close(self):
|
||||
self.closed = True
|
||||
|
||||
stream = CloseableStream()
|
||||
wrapped = attach_legacy_sys_files_warning(
|
||||
stream,
|
||||
LegacySysFilesCompatVariable(start_node_id=_LEGACY_ALIAS_NODE_ID, variable_name=_LEGACY_VARIABLE_NAME),
|
||||
)
|
||||
|
||||
chunks = list(wrapped)
|
||||
|
||||
assert "warning" in chunks[0]
|
||||
assert chunks[1] == "data: payload\n\n"
|
||||
assert stream.closed
|
||||
@ -1,6 +1,7 @@
|
||||
from types import SimpleNamespace
|
||||
|
||||
from core.workflow.system_variables import (
|
||||
build_bootstrap_variables,
|
||||
build_system_variables,
|
||||
default_system_variables,
|
||||
get_node_creation_preload_selectors,
|
||||
@ -56,6 +57,25 @@ def test_build_system_variables_preserves_file_values():
|
||||
assert system_values["files"] == [file]
|
||||
|
||||
|
||||
def test_build_bootstrap_variables_adds_userinput_files_alias():
|
||||
file = File(
|
||||
file_type=FileType.DOCUMENT,
|
||||
transfer_method=FileTransferMethod.LOCAL_FILE,
|
||||
related_id="file-id",
|
||||
filename="test.txt",
|
||||
extension=".txt",
|
||||
mime_type="text/plain",
|
||||
size=1,
|
||||
storage_key="storage-key",
|
||||
)
|
||||
|
||||
bootstrap_variables = build_bootstrap_variables(system_variables=build_system_variables(files=[file]))
|
||||
file_variables_by_selector = {tuple(variable.selector): variable for variable in bootstrap_variables}
|
||||
|
||||
assert file_variables_by_selector[("sys", "files")].value == [file]
|
||||
assert file_variables_by_selector[("userinput", "files")].value == [file]
|
||||
|
||||
|
||||
def test_default_system_variables_generates_workflow_run_id():
|
||||
system_variables = default_system_variables()
|
||||
system_values = system_variables_to_mapping(system_variables)
|
||||
|
||||
@ -17,6 +17,11 @@ from models.workflow import (
|
||||
is_system_variable_editable,
|
||||
)
|
||||
|
||||
_LEGACY_FILE_TEMPLATE = "{{#" + ".".join(("sys", "files")) + "#}}"
|
||||
_LEGACY_FILE_SELECTOR = ["sys", "files"]
|
||||
_USER_INPUT_FILE_TEMPLATE = "{{#" + ".".join(("userinput", "files")) + "#}}"
|
||||
_USER_INPUT_FILE_SELECTOR = ["userinput", "files"]
|
||||
|
||||
|
||||
def test_environment_variables():
|
||||
# tenant_id context variable removed - using current_user.current_tenant_id directly
|
||||
@ -193,6 +198,131 @@ class TestIsSystemVariableEditable:
|
||||
assert is_system_variable_editable("invalid_or_new_system_variable") == False
|
||||
|
||||
|
||||
class TestWorkflowLegacySysFilesCompatibility:
|
||||
def _make_workflow(self, graph: dict, *, features: dict | None = None) -> Workflow:
|
||||
return Workflow(
|
||||
tenant_id="tenant_id",
|
||||
app_id="app_id",
|
||||
type="workflow",
|
||||
version="draft",
|
||||
graph=json.dumps(graph),
|
||||
features=json.dumps(features or {}),
|
||||
created_by="account_id",
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
)
|
||||
|
||||
def test_graph_dict_rewrites_legacy_sys_files_references_to_userinput_files(self):
|
||||
workflow = self._make_workflow(
|
||||
{
|
||||
"nodes": [
|
||||
{
|
||||
"id": "start",
|
||||
"data": {
|
||||
"type": "start",
|
||||
"title": "Start",
|
||||
"variables": [],
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "llm",
|
||||
"data": {
|
||||
"type": "llm",
|
||||
"prompt_template": [{"role": "user", "text": f"files: {_LEGACY_FILE_TEMPLATE}"}],
|
||||
"context": {"variable_selector": _LEGACY_FILE_SELECTOR},
|
||||
},
|
||||
},
|
||||
],
|
||||
"edges": [],
|
||||
}
|
||||
)
|
||||
|
||||
graph = workflow.graph_dict
|
||||
start_node = next(node for node in graph["nodes"] if node["id"] == "start")
|
||||
llm_node = next(node for node in graph["nodes"] if node["id"] == "llm")
|
||||
|
||||
assert start_node["data"]["variables"] == []
|
||||
assert llm_node["data"]["prompt_template"][0]["text"] == f"files: {_USER_INPUT_FILE_TEMPLATE}"
|
||||
assert llm_node["data"]["context"]["variable_selector"] == _USER_INPUT_FILE_SELECTOR
|
||||
|
||||
stored_graph = json.loads(workflow.graph)
|
||||
stored_llm_node = next(node for node in stored_graph["nodes"] if node["id"] == "llm")
|
||||
assert stored_llm_node["data"]["prompt_template"][0]["text"] == f"files: {_USER_INPUT_FILE_TEMPLATE}"
|
||||
assert stored_llm_node["data"]["context"]["variable_selector"] == _USER_INPUT_FILE_SELECTOR
|
||||
|
||||
def test_graph_dict_preserves_existing_start_variables_when_migrating_legacy_sys_files(self):
|
||||
workflow = self._make_workflow(
|
||||
{
|
||||
"nodes": [
|
||||
{
|
||||
"id": "start",
|
||||
"data": {
|
||||
"type": "start",
|
||||
"title": "Start",
|
||||
"variables": [
|
||||
{"variable": "sys_files", "label": "Existing", "type": "text-input"},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "answer",
|
||||
"data": {
|
||||
"type": "answer",
|
||||
"answer": _LEGACY_FILE_TEMPLATE,
|
||||
},
|
||||
},
|
||||
],
|
||||
"edges": [],
|
||||
}
|
||||
)
|
||||
|
||||
graph = workflow.graph_dict
|
||||
start_node = next(node for node in graph["nodes"] if node["id"] == "start")
|
||||
answer_node = next(node for node in graph["nodes"] if node["id"] == "answer")
|
||||
|
||||
assert [variable["variable"] for variable in start_node["data"]["variables"]] == ["sys_files"]
|
||||
assert answer_node["data"]["answer"] == _USER_INPUT_FILE_TEMPLATE
|
||||
|
||||
def test_graph_dict_leaves_userinput_files_references_unchanged(self):
|
||||
workflow = self._make_workflow(
|
||||
{
|
||||
"nodes": [
|
||||
{
|
||||
"id": "start",
|
||||
"data": {
|
||||
"type": "start",
|
||||
"title": "Start",
|
||||
"variables": [],
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "answer",
|
||||
"data": {
|
||||
"type": "answer",
|
||||
"answer": _USER_INPUT_FILE_TEMPLATE,
|
||||
},
|
||||
},
|
||||
],
|
||||
"edges": [],
|
||||
},
|
||||
features={
|
||||
"file_upload": {
|
||||
"enabled": True,
|
||||
"allowed_file_upload_methods": ["remote_url"],
|
||||
"allowed_file_types": ["document", "custom"],
|
||||
"allowed_file_extensions": [".pdf"],
|
||||
"number_limits": 8,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
graph = workflow.graph_dict
|
||||
start_node = next(node for node in graph["nodes"] if node["id"] == "start")
|
||||
|
||||
assert start_node["data"]["variables"] == []
|
||||
assert json.loads(workflow.graph) == graph
|
||||
|
||||
|
||||
class TestWorkflowDraftVariableGetValue:
|
||||
def test_get_value_by_case(self):
|
||||
@dataclasses.dataclass
|
||||
|
||||
@ -85,6 +85,33 @@ def _make_workflow(*, workflow_id: str = "workflow-id", created_by: str = "owner
|
||||
return workflow
|
||||
|
||||
|
||||
_LEGACY_FILE_TEMPLATE = "{{#" + ".".join(("sys", "files")) + "#}}"
|
||||
_USER_INPUT_FILE_INPUT_KEY = ".".join(("userinput", "files"))
|
||||
|
||||
|
||||
def _legacy_system_file_graph() -> dict:
|
||||
return {
|
||||
"nodes": [
|
||||
{
|
||||
"id": "start",
|
||||
"data": {
|
||||
"type": "start",
|
||||
"title": "Start",
|
||||
"variables": [],
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "answer",
|
||||
"data": {
|
||||
"type": "answer",
|
||||
"answer": _LEGACY_FILE_TEMPLATE,
|
||||
},
|
||||
},
|
||||
],
|
||||
"edges": [],
|
||||
}
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _noop_rate_limit_context(rate_limit, request_id):
|
||||
"""Drop-in replacement for rate_limit_context that doesn't touch Redis."""
|
||||
@ -390,6 +417,60 @@ class TestGenerate:
|
||||
assert call_kwargs.get("pause_state_config") is not None
|
||||
assert call_kwargs["pause_state_config"].state_owner_user_id == "owner-id"
|
||||
|
||||
def test_workflow_service_api_maps_system_files_to_compat_start_input(self, mocker: MockerFixture):
|
||||
workflow = _make_workflow()
|
||||
workflow.graph_dict = _legacy_system_file_graph()
|
||||
mocker.patch.object(AppGenerateService, "_get_workflow", return_value=workflow)
|
||||
gen_spy = mocker.patch(
|
||||
"services.app_generate_service.WorkflowAppGenerator.generate",
|
||||
return_value={"result": "workflow-blocking"},
|
||||
)
|
||||
mocker.patch(
|
||||
"services.app_generate_service.WorkflowAppGenerator.convert_to_event_stream",
|
||||
side_effect=lambda x: x,
|
||||
)
|
||||
files = [{"transfer_method": "remote_url", "url": "https://example.com/a.png"}]
|
||||
|
||||
result = AppGenerateService.generate(
|
||||
app_model=_make_app(AppMode.WORKFLOW),
|
||||
user=_make_user(),
|
||||
args={"inputs": {}, "system": {"files": files}},
|
||||
invoke_from=InvokeFrom.SERVICE_API,
|
||||
streaming=False,
|
||||
)
|
||||
|
||||
assert result == {"result": "workflow-blocking"}
|
||||
forwarded_args = gen_spy.call_args.kwargs["args"]
|
||||
assert forwarded_args["files"] == files
|
||||
assert forwarded_args["inputs"][_USER_INPUT_FILE_INPUT_KEY] == files
|
||||
|
||||
def test_advanced_chat_service_api_maps_files_to_compat_start_input(self, mocker: MockerFixture):
|
||||
workflow = _make_workflow()
|
||||
workflow.graph_dict = _legacy_system_file_graph()
|
||||
mocker.patch.object(AppGenerateService, "_get_workflow", return_value=workflow)
|
||||
gen_spy = mocker.patch(
|
||||
"services.app_generate_service.AdvancedChatAppGenerator.generate",
|
||||
return_value={"result": "advanced-blocking"},
|
||||
)
|
||||
mocker.patch(
|
||||
"services.app_generate_service.AdvancedChatAppGenerator.convert_to_event_stream",
|
||||
side_effect=lambda x: x,
|
||||
)
|
||||
files = [{"transfer_method": "remote_url", "url": "https://example.com/a.png"}]
|
||||
|
||||
result = AppGenerateService.generate(
|
||||
app_model=_make_app(AppMode.ADVANCED_CHAT),
|
||||
user=_make_user(),
|
||||
args={"workflow_id": None, "query": "hi", "inputs": {}, "files": files},
|
||||
invoke_from=InvokeFrom.SERVICE_API,
|
||||
streaming=False,
|
||||
)
|
||||
|
||||
assert result == {"result": "advanced-blocking"}
|
||||
forwarded_args = gen_spy.call_args.kwargs["args"]
|
||||
assert forwarded_args["files"] == files
|
||||
assert forwarded_args["inputs"][_USER_INPUT_FILE_INPUT_KEY] == files
|
||||
|
||||
# -- WORKFLOW streaming -------------------------------------------------
|
||||
def test_workflow_streaming(self, mocker: MockerFixture, monkeypatch: pytest.MonkeyPatch):
|
||||
workflow = _make_workflow()
|
||||
@ -422,6 +503,41 @@ class TestGenerate:
|
||||
# The inner on_subscribe closure was invoked by _build_streaming_task_on_subscribe
|
||||
delay_spy.assert_called_once()
|
||||
|
||||
def test_workflow_streaming_service_api_maps_legacy_system_files(
|
||||
self, mocker: MockerFixture, monkeypatch: pytest.MonkeyPatch
|
||||
):
|
||||
workflow = _make_workflow()
|
||||
workflow.graph_dict = _legacy_system_file_graph()
|
||||
mocker.patch.object(AppGenerateService, "_get_workflow", return_value=workflow)
|
||||
params_spy = mocker.patch(
|
||||
"services.app_generate_service.AppExecutionParams.new",
|
||||
return_value=MagicMock(workflow_run_id="wfr-legacy", model_dump_json=MagicMock(return_value="{}")),
|
||||
)
|
||||
mocker.patch("services.app_generate_service.workflow_based_app_execution_task.delay")
|
||||
monkeypatch.setattr(ags_module.dify_config, "PUBSUB_REDIS_CHANNEL_TYPE", "streams")
|
||||
mocker.patch(
|
||||
"services.app_generate_service.MessageBasedAppGenerator.retrieve_events",
|
||||
return_value=iter(['data: {"event": "done"}\n\n']),
|
||||
)
|
||||
mocker.patch(
|
||||
"services.app_generate_service.WorkflowAppGenerator.convert_to_event_stream",
|
||||
side_effect=lambda x: x,
|
||||
)
|
||||
files = [{"transfer_method": "remote_url", "url": "https://example.com/a.png"}]
|
||||
|
||||
result = AppGenerateService.generate(
|
||||
app_model=_make_app(AppMode.WORKFLOW),
|
||||
user=_make_user(),
|
||||
args={"inputs": {}, "system": {"files": files}},
|
||||
invoke_from=InvokeFrom.SERVICE_API,
|
||||
streaming=True,
|
||||
)
|
||||
|
||||
assert next(iter(result)) == 'data: {"event": "done"}\n\n'
|
||||
forwarded_args = params_spy.call_args.kwargs["args"]
|
||||
assert forwarded_args["files"] == files
|
||||
assert forwarded_args["inputs"][_USER_INPUT_FILE_INPUT_KEY] == files
|
||||
|
||||
# -- Invalid mode -------------------------------------------------------
|
||||
def test_invalid_mode_raises(self, mocker: MockerFixture):
|
||||
app = _make_app("invalid-mode", is_agent=False)
|
||||
|
||||
@ -43,6 +43,9 @@ from services.workflow_service import (
|
||||
_setup_variable_pool,
|
||||
)
|
||||
|
||||
_LEGACY_FILE_TEMPLATE = "{{#" + ".".join(("sys", "files")) + "#}}"
|
||||
_USER_INPUT_FILE_TEMPLATE = "{{#" + ".".join(("userinput", "files")) + "#}}"
|
||||
|
||||
|
||||
class TestWorkflowAssociatedDataFactory:
|
||||
"""
|
||||
@ -346,6 +349,45 @@ class TestWorkflowService:
|
||||
|
||||
assert result == mock_workflow
|
||||
|
||||
def test_get_draft_workflow_persists_legacy_sys_files_migration(self, workflow_service, mock_db_session, mocker):
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
workflow = Workflow(
|
||||
tenant_id=app.tenant_id,
|
||||
app_id=app.id,
|
||||
type=WorkflowType.WORKFLOW,
|
||||
version=Workflow.VERSION_DRAFT,
|
||||
graph=json.dumps(
|
||||
{
|
||||
"nodes": [
|
||||
{"id": "start", "data": {"type": "start", "variables": []}},
|
||||
{"id": "answer", "data": {"type": "answer", "answer": _LEGACY_FILE_TEMPLATE}},
|
||||
],
|
||||
"edges": [],
|
||||
}
|
||||
),
|
||||
features="{}",
|
||||
created_by="account-id",
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
)
|
||||
workflow.id = "workflow-id"
|
||||
original_graph = workflow.graph
|
||||
mock_db_session.session.scalar.return_value = workflow
|
||||
migration_session = MagicMock()
|
||||
session_factory = MagicMock()
|
||||
session_factory.begin.return_value.__enter__.return_value = migration_session
|
||||
mocker.patch("services.workflow_service.sessionmaker", return_value=session_factory)
|
||||
|
||||
result = workflow_service.get_draft_workflow(app)
|
||||
|
||||
assert result == workflow
|
||||
assert _LEGACY_FILE_TEMPLATE not in workflow.graph
|
||||
assert _USER_INPUT_FILE_TEMPLATE in workflow.graph
|
||||
migration_session.execute.assert_called_once()
|
||||
update_stmt = migration_session.execute.call_args.args[0]
|
||||
assert str(workflow.id) in str(update_stmt.compile(compile_kwargs={"literal_binds": True}))
|
||||
assert original_graph != workflow.graph
|
||||
|
||||
def test_get_draft_workflow_returns_none(self, workflow_service, mock_db_session):
|
||||
"""Test get_draft_workflow returns None when no draft exists."""
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
|
||||
@ -33,6 +33,8 @@ from services.workflow_draft_variable_service import (
|
||||
WorkflowDraftVariableService,
|
||||
)
|
||||
|
||||
_SYSTEM_FILE_OUTPUT_KEY = ".".join((SYSTEM_VARIABLE_NODE_ID, "files"))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_engine() -> Engine:
|
||||
@ -132,7 +134,7 @@ class TestDraftVariableSaver:
|
||||
assert node_id == c.expected_node_id, fail_msg
|
||||
assert name == c.expected_name, fail_msg
|
||||
|
||||
def test_build_variables_from_start_mapping_rebuilds_system_files(self):
|
||||
def test_build_variables_from_start_mapping_rebuilds_system_file_variable(self):
|
||||
mock_session = MagicMock(spec=Session)
|
||||
mock_user = MagicMock(spec=Account)
|
||||
mock_user.id = str(uuid.uuid4())
|
||||
@ -167,7 +169,7 @@ class TestDraftVariableSaver:
|
||||
return_value=rebuilt_file,
|
||||
) as rebuild_file,
|
||||
):
|
||||
draft_vars = saver._build_variables_from_start_mapping({"sys.files": [raw_file]})
|
||||
draft_vars = saver._build_variables_from_start_mapping({_SYSTEM_FILE_OUTPUT_KEY: [raw_file]})
|
||||
|
||||
sys_var = draft_vars[0]
|
||||
assert sys_var.get_value().value[0] == rebuilt_file
|
||||
@ -248,7 +250,7 @@ class TestDraftVariableSaver:
|
||||
|
||||
@patch("services.workflow_draft_variable_service._batch_upsert_draft_variable", autospec=True)
|
||||
def test_start_node_save_persists_sys_timestamp_and_workflow_run_id(self, mock_batch_upsert):
|
||||
"""Start node should persist common `sys.*` variables, not only `sys.files`."""
|
||||
"""Start node should persist common system variables."""
|
||||
mock_session = MagicMock(spec=Session)
|
||||
mock_user = MagicMock(spec=Account)
|
||||
mock_user.id = "test-user-id"
|
||||
@ -524,7 +526,7 @@ class TestWorkflowDraftVariableService:
|
||||
|
||||
# Create mock execution record
|
||||
mock_execution = Mock(spec=WorkflowNodeExecutionModel)
|
||||
mock_execution.load_full_outputs.return_value = {"sys.files": "[]"}
|
||||
mock_execution.load_full_outputs.return_value = {_SYSTEM_FILE_OUTPUT_KEY: "[]"}
|
||||
|
||||
# Mock the repository to return the execution record
|
||||
service._api_node_execution_repo = Mock()
|
||||
|
||||
@ -1463,11 +1463,6 @@
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"web/app/components/base/prompt-editor/plugins/component-picker-block/index.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/base/prompt-editor/plugins/component-picker-block/menu.tsx": {
|
||||
"erasable-syntax-only/parameter-properties": {
|
||||
"count": 1
|
||||
@ -2886,11 +2881,6 @@
|
||||
"count": 5
|
||||
}
|
||||
},
|
||||
"web/app/components/workflow-app/hooks/use-workflow-template.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"web/app/components/workflow-app/index.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
@ -3223,11 +3213,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/workflow/nodes/_base/components/memory-config.tsx": {
|
||||
"unicorn/prefer-number-properties": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/workflow/nodes/_base/components/mixed-variable-text-input/index.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
@ -3853,21 +3838,11 @@
|
||||
"count": 8
|
||||
}
|
||||
},
|
||||
"web/app/components/workflow/nodes/start/panel.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"web/app/components/workflow/nodes/start/use-config.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/workflow/nodes/start/use-single-run-form-params.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"web/app/components/workflow/nodes/template-transform/default.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
|
||||
@ -15,7 +15,6 @@ export const mockedWorkflowProcess = {
|
||||
predecessor_node_id: null,
|
||||
inputs: {
|
||||
'sys.query': 'hi',
|
||||
'sys.files': [],
|
||||
'sys.conversation_id': '92ce0a3e-8f15-43d1-b31d-32716c4b10a7',
|
||||
'sys.user_id': 'fbff43f9-d5a4-4e85-b63b-d3a91d806c6f',
|
||||
'sys.dialogue_count': 1,
|
||||
@ -26,7 +25,6 @@ export const mockedWorkflowProcess = {
|
||||
process_data: null,
|
||||
outputs: {
|
||||
'sys.query': 'hi',
|
||||
'sys.files': [],
|
||||
'sys.conversation_id': '92ce0a3e-8f15-43d1-b31d-32716c4b10a7',
|
||||
'sys.user_id': 'fbff43f9-d5a4-4e85-b63b-d3a91d806c6f',
|
||||
'sys.dialogue_count': 1,
|
||||
|
||||
@ -468,14 +468,13 @@ describe('ComponentPicker (component-picker-block/index.tsx)', () => {
|
||||
expect(dispatchSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('handles workflow variable selection for nested fields: sys.query, sys.files, and normal paths', async () => {
|
||||
it('handles workflow variable selection for nested fields: system query and normal paths', async () => {
|
||||
const captures: Captures = { editor: null, eventEmitter: null }
|
||||
const user = userEvent.setup()
|
||||
|
||||
const workflowVariableBlock = makeWorkflowVariableBlock({}, [
|
||||
makeWorkflowVarNode('node-1', 'Node 1', [
|
||||
makeWorkflowNodeVar('sys.query', VarType.object, [makeWorkflowNodeVar('q', VarType.string)]),
|
||||
makeWorkflowNodeVar('sys.files', VarType.object, [makeWorkflowNodeVar('f', VarType.string)]),
|
||||
makeWorkflowNodeVar('output', VarType.object, [makeWorkflowNodeVar('x', VarType.string)]),
|
||||
]),
|
||||
])
|
||||
@ -513,10 +512,6 @@ describe('ComponentPicker (component-picker-block/index.tsx)', () => {
|
||||
expect(dispatchSpy).toHaveBeenCalledWith(INSERT_WORKFLOW_VARIABLE_BLOCK_COMMAND, ['sys.query'])
|
||||
await waitFor(() => expect(readEditorText(editor)).not.toContain('{'))
|
||||
|
||||
await openPickerAndSelectField('sys.files', 'f')
|
||||
expect(dispatchSpy).toHaveBeenCalledWith(INSERT_WORKFLOW_VARIABLE_BLOCK_COMMAND, ['sys.files'])
|
||||
await waitFor(() => expect(readEditorText(editor)).not.toContain('{'))
|
||||
|
||||
await openPickerAndSelectField('output', 'x')
|
||||
expect(dispatchSpy).toHaveBeenCalledWith(INSERT_WORKFLOW_VARIABLE_BLOCK_COMMAND, ['node-1', 'output', 'x'])
|
||||
await waitFor(() => expect(readEditorText(editor)).not.toContain('{'))
|
||||
|
||||
@ -15,6 +15,7 @@ import type {
|
||||
} from '../../types'
|
||||
import type { PickerBlockMenuOption } from './menu'
|
||||
import type { EventEmitterValue } from '@/context/event-emitter'
|
||||
// eslint-disable-next-line no-restricted-imports -- Existing Lexical typeahead positioning uses Floating UI directly.
|
||||
import {
|
||||
flip,
|
||||
offset,
|
||||
@ -202,7 +203,7 @@ const ComponentPicker = ({
|
||||
else if (varName === 'last_run')
|
||||
editor.dispatchCommand(INSERT_LAST_RUN_BLOCK_COMMAND, null)
|
||||
}
|
||||
else if (variables[1] === 'sys.query' || variables[1] === 'sys.files') {
|
||||
else if (variables[1] === 'sys.query') {
|
||||
editor.dispatchCommand(INSERT_WORKFLOW_VARIABLE_BLOCK_COMMAND, [variables[1]])
|
||||
}
|
||||
else {
|
||||
|
||||
@ -429,7 +429,6 @@ Chat applications support session persistence, allowing previous chat history to
|
||||
"id": "a4959eb4-c852-4e0c-ac7a-348233f7f345",
|
||||
"workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
"inputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -473,7 +472,6 @@ Chat applications support session persistence, allowing previous chat history to
|
||||
"index": 1,
|
||||
"predecessor_node_id": null,
|
||||
"inputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -484,7 +482,6 @@ Chat applications support session persistence, allowing previous chat history to
|
||||
"process_data": {},
|
||||
"process_data_truncated": false,
|
||||
"outputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -1047,7 +1044,7 @@ Chat applications support session persistence, allowing previous chat history to
|
||||
```streaming {{ title: 'Response' }}
|
||||
event: ping
|
||||
|
||||
data: {"event":"workflow_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.files":[],"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","sys.timestamp":1776087863},"created_at":1776087863,"reason":"initial"}}
|
||||
data: {"event":"workflow_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","sys.timestamp":1776087863},"created_at":1776087863,"reason":"initial"}}
|
||||
|
||||
data: {"event":"node_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"b552d685-1119-4e6a-9a81-e91a23e5324b","node_id":"1775717266623","node_type":"start","title":"User Input","index":1,"predecessor_node_id":null,"inputs":null,"created_at":1776087863,"extras":{},"iteration_id":null,"loop_id":null}}
|
||||
|
||||
@ -1059,7 +1056,7 @@ Chat applications support session persistence, allowing previous chat history to
|
||||
|
||||
data: {"event":"workflow_paused","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","paused_nodes":["1775717346519"],"outputs":{},"reasons":[{"form_id":"019d8716-0fde-75da-8207-1458ccde76e5","form_content":"this is form 1:\n{{#$output.some_field#}}\n","inputs":[{"type":"paragraph","output_variable_name":"some_field","default":{"type":"variable","selector":["sys","workflow_run_id"],"value":""}}],"actions":[{"id":"approve","title":"YES","button_style":"default"},{"id":"reject","title":"NO","button_style":"default"}],"display_in_ui":true,"node_id":"1775717346519","node_title":"Human Input","resolved_default_values":{"some_field":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"form_token":"n7hFG4ZDYdGcgZ5VDc7EGM","type":"human_input_required"}],"status":"paused","created_at":1776087863,"elapsed_time":0.0,"total_tokens":0,"total_steps":2}}
|
||||
|
||||
data: {"event":"workflow_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.files":[],"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"created_at":1776087877,"reason":"resumption"}}
|
||||
data: {"event":"workflow_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"created_at":1776087877,"reason":"resumption"}}
|
||||
|
||||
data: {"event":"node_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"8d7e8e01-5159-4089-a4b6-3aa394992cc2","node_id":"1775717346519","node_type":"human-input","title":"Human Input","index":1,"predecessor_node_id":null,"inputs":null,"inputs_truncated":false,"created_at":1776087877,"extras":{},"iteration_id":null,"loop_id":null,"agent_strategy":null}}
|
||||
|
||||
|
||||
@ -429,7 +429,6 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
"id": "a4959eb4-c852-4e0c-ac7a-348233f7f345",
|
||||
"workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
"inputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -473,7 +472,6 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
"index": 1,
|
||||
"predecessor_node_id": null,
|
||||
"inputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -484,7 +482,6 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
"process_data": {},
|
||||
"process_data_truncated": false,
|
||||
"outputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -1048,7 +1045,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
```streaming {{ title: '応答' }}
|
||||
event: ping
|
||||
|
||||
data: {"event":"workflow_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.files":[],"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","sys.timestamp":1776087863},"created_at":1776087863,"reason":"initial"}}
|
||||
data: {"event":"workflow_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","sys.timestamp":1776087863},"created_at":1776087863,"reason":"initial"}}
|
||||
|
||||
data: {"event":"node_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"b552d685-1119-4e6a-9a81-e91a23e5324b","node_id":"1775717266623","node_type":"start","title":"User Input","index":1,"predecessor_node_id":null,"inputs":null,"created_at":1776087863,"extras":{},"iteration_id":null,"loop_id":null}}
|
||||
|
||||
@ -1060,7 +1057,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
|
||||
data: {"event":"workflow_paused","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","paused_nodes":["1775717346519"],"outputs":{},"reasons":[{"form_id":"019d8716-0fde-75da-8207-1458ccde76e5","form_content":"this is form 1:\n{{#$output.some_field#}}\n","inputs":[{"type":"paragraph","output_variable_name":"some_field","default":{"type":"variable","selector":["sys","workflow_run_id"],"value":""}}],"actions":[{"id":"approve","title":"YES","button_style":"default"},{"id":"reject","title":"NO","button_style":"default"}],"display_in_ui":true,"node_id":"1775717346519","node_title":"Human Input","resolved_default_values":{"some_field":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"form_token":"n7hFG4ZDYdGcgZ5VDc7EGM","type":"human_input_required"}],"status":"paused","created_at":1776087863,"elapsed_time":0.0,"total_tokens":0,"total_steps":2}}
|
||||
|
||||
data: {"event":"workflow_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.files":[],"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"created_at":1776087877,"reason":"resumption"}}
|
||||
data: {"event":"workflow_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"created_at":1776087877,"reason":"resumption"}}
|
||||
|
||||
data: {"event":"node_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"8d7e8e01-5159-4089-a4b6-3aa394992cc2","node_id":"1775717346519","node_type":"human-input","title":"Human Input","index":1,"predecessor_node_id":null,"inputs":null,"inputs_truncated":false,"created_at":1776087877,"extras":{},"iteration_id":null,"loop_id":null,"agent_strategy":null}}
|
||||
|
||||
|
||||
@ -428,7 +428,6 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
"id": "a4959eb4-c852-4e0c-ac7a-348233f7f345",
|
||||
"workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
"inputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -472,7 +471,6 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
"index": 1,
|
||||
"predecessor_node_id": null,
|
||||
"inputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -483,7 +481,6 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
"process_data": {},
|
||||
"process_data_truncated": false,
|
||||
"outputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -1041,7 +1038,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
```streaming {{ title: 'Response' }}
|
||||
event: ping
|
||||
|
||||
data: {"event":"workflow_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.files":[],"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","sys.timestamp":1776087863},"created_at":1776087863,"reason":"initial"}}
|
||||
data: {"event":"workflow_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","sys.timestamp":1776087863},"created_at":1776087863,"reason":"initial"}}
|
||||
|
||||
data: {"event":"node_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"b552d685-1119-4e6a-9a81-e91a23e5324b","node_id":"1775717266623","node_type":"start","title":"User Input","index":1,"predecessor_node_id":null,"inputs":null,"created_at":1776087863,"extras":{},"iteration_id":null,"loop_id":null}}
|
||||
|
||||
@ -1053,7 +1050,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
|
||||
data: {"event":"workflow_paused","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","paused_nodes":["1775717346519"],"outputs":{},"reasons":[{"form_id":"019d8716-0fde-75da-8207-1458ccde76e5","form_content":"this is form 1:\n{{#$output.some_field#}}\n","inputs":[{"type":"paragraph","output_variable_name":"some_field","default":{"type":"variable","selector":["sys","workflow_run_id"],"value":""}}],"actions":[{"id":"approve","title":"YES","button_style":"default"},{"id":"reject","title":"NO","button_style":"default"}],"display_in_ui":true,"node_id":"1775717346519","node_title":"Human Input","resolved_default_values":{"some_field":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"form_token":"n7hFG4ZDYdGcgZ5VDc7EGM","type":"human_input_required"}],"status":"paused","created_at":1776087863,"elapsed_time":0.0,"total_tokens":0,"total_steps":2}}
|
||||
|
||||
data: {"event":"workflow_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.files":[],"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"created_at":1776087877,"reason":"resumption"}}
|
||||
data: {"event":"workflow_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"created_at":1776087877,"reason":"resumption"}}
|
||||
|
||||
data: {"event":"node_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"8d7e8e01-5159-4089-a4b6-3aa394992cc2","node_id":"1775717346519","node_type":"human-input","title":"Human Input","index":1,"predecessor_node_id":null,"inputs":null,"inputs_truncated":false,"created_at":1776087877,"extras":{},"iteration_id":null,"loop_id":null,"agent_strategy":null}}
|
||||
|
||||
|
||||
@ -348,7 +348,6 @@ Workflow applications offers non-session support and is ideal for translation, a
|
||||
"id": "a4959eb4-c852-4e0c-ac7a-348233f7f345",
|
||||
"workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
"inputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -392,7 +391,6 @@ Workflow applications offers non-session support and is ideal for translation, a
|
||||
"index": 1,
|
||||
"predecessor_node_id": null,
|
||||
"inputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -403,7 +401,6 @@ Workflow applications offers non-session support and is ideal for translation, a
|
||||
"process_data": {},
|
||||
"process_data_truncated": false,
|
||||
"outputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -927,7 +924,7 @@ Workflow applications offers non-session support and is ideal for translation, a
|
||||
"id": "b1ad3277-089e-42c6-9dff-6820d94fbc76",
|
||||
"workflow_id": "19eff89f-ec03-4f75-b0fc-897e7effea02",
|
||||
"status": "succeeded",
|
||||
"inputs": "{\"sys.files\": [], \"sys.user_id\": \"abc-123\"}",
|
||||
"inputs": "{\"sys.user_id\": \"abc-123\"}",
|
||||
"outputs": null,
|
||||
"error": null,
|
||||
"total_steps": 3,
|
||||
@ -1139,7 +1136,7 @@ Workflow applications offers non-session support and is ideal for translation, a
|
||||
```streaming {{ title: 'Response' }}
|
||||
event: ping
|
||||
|
||||
data: {"event":"workflow_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.files":[],"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","sys.timestamp":1776087863},"created_at":1776087863,"reason":"initial"}}
|
||||
data: {"event":"workflow_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","sys.timestamp":1776087863},"created_at":1776087863,"reason":"initial"}}
|
||||
|
||||
data: {"event":"node_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"b552d685-1119-4e6a-9a81-e91a23e5324b","node_id":"1775717266623","node_type":"start","title":"User Input","index":1,"predecessor_node_id":null,"inputs":null,"created_at":1776087863,"extras":{},"iteration_id":null,"loop_id":null}}
|
||||
|
||||
@ -1151,7 +1148,7 @@ Workflow applications offers non-session support and is ideal for translation, a
|
||||
|
||||
data: {"event":"workflow_paused","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","paused_nodes":["1775717346519"],"outputs":{},"reasons":[{"form_id":"019d8716-0fde-75da-8207-1458ccde76e5","form_content":"this is form 1:\n{{#$output.some_field#}}\n","inputs":[{"type":"paragraph","output_variable_name":"some_field","default":{"type":"variable","selector":["sys","workflow_run_id"],"value":""}}],"actions":[{"id":"approve","title":"YES","button_style":"default"},{"id":"reject","title":"NO","button_style":"default"}],"display_in_ui":true,"node_id":"1775717346519","node_title":"Human Input","resolved_default_values":{"some_field":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"form_token":"n7hFG4ZDYdGcgZ5VDc7EGM","type":"human_input_required"}],"status":"paused","created_at":1776087863,"elapsed_time":0.0,"total_tokens":0,"total_steps":2}}
|
||||
|
||||
data: {"event":"workflow_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.files":[],"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"created_at":1776087877,"reason":"resumption"}}
|
||||
data: {"event":"workflow_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"created_at":1776087877,"reason":"resumption"}}
|
||||
|
||||
data: {"event":"node_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"8d7e8e01-5159-4089-a4b6-3aa394992cc2","node_id":"1775717346519","node_type":"human-input","title":"Human Input","index":1,"predecessor_node_id":null,"inputs":null,"inputs_truncated":false,"created_at":1776087877,"extras":{},"iteration_id":null,"loop_id":null,"agent_strategy":null}}
|
||||
|
||||
|
||||
@ -348,7 +348,6 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
"id": "a4959eb4-c852-4e0c-ac7a-348233f7f345",
|
||||
"workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
"inputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -392,7 +391,6 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
"index": 1,
|
||||
"predecessor_node_id": null,
|
||||
"inputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -403,7 +401,6 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
"process_data": {},
|
||||
"process_data_truncated": false,
|
||||
"outputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -922,7 +919,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
"id": "b1ad3277-089e-42c6-9dff-6820d94fbc76",
|
||||
"workflow_id": "19eff89f-ec03-4f75-b0fc-897e7effea02",
|
||||
"status": "succeeded",
|
||||
"inputs": "{\"sys.files\": [], \"sys.user_id\": \"abc-123\"}",
|
||||
"inputs": "{\"sys.user_id\": \"abc-123\"}",
|
||||
"outputs": null,
|
||||
"error": null,
|
||||
"total_steps": 3,
|
||||
@ -1134,7 +1131,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
```streaming {{ title: '応答' }}
|
||||
event: ping
|
||||
|
||||
data: {"event":"workflow_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.files":[],"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","sys.timestamp":1776087863},"created_at":1776087863,"reason":"initial"}}
|
||||
data: {"event":"workflow_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","sys.timestamp":1776087863},"created_at":1776087863,"reason":"initial"}}
|
||||
|
||||
data: {"event":"node_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"b552d685-1119-4e6a-9a81-e91a23e5324b","node_id":"1775717266623","node_type":"start","title":"User Input","index":1,"predecessor_node_id":null,"inputs":null,"created_at":1776087863,"extras":{},"iteration_id":null,"loop_id":null}}
|
||||
|
||||
@ -1146,7 +1143,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
|
||||
data: {"event":"workflow_paused","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","paused_nodes":["1775717346519"],"outputs":{},"reasons":[{"form_id":"019d8716-0fde-75da-8207-1458ccde76e5","form_content":"this is form 1:\n{{#$output.some_field#}}\n","inputs":[{"type":"paragraph","output_variable_name":"some_field","default":{"type":"variable","selector":["sys","workflow_run_id"],"value":""}}],"actions":[{"id":"approve","title":"YES","button_style":"default"},{"id":"reject","title":"NO","button_style":"default"}],"display_in_ui":true,"node_id":"1775717346519","node_title":"Human Input","resolved_default_values":{"some_field":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"form_token":"n7hFG4ZDYdGcgZ5VDc7EGM","type":"human_input_required"}],"status":"paused","created_at":1776087863,"elapsed_time":0.0,"total_tokens":0,"total_steps":2}}
|
||||
|
||||
data: {"event":"workflow_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.files":[],"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"created_at":1776087877,"reason":"resumption"}}
|
||||
data: {"event":"workflow_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"created_at":1776087877,"reason":"resumption"}}
|
||||
|
||||
data: {"event":"node_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"8d7e8e01-5159-4089-a4b6-3aa394992cc2","node_id":"1775717346519","node_type":"human-input","title":"Human Input","index":1,"predecessor_node_id":null,"inputs":null,"inputs_truncated":false,"created_at":1776087877,"extras":{},"iteration_id":null,"loop_id":null,"agent_strategy":null}}
|
||||
|
||||
|
||||
@ -338,7 +338,6 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
|
||||
"id": "a4959eb4-c852-4e0c-ac7a-348233f7f345",
|
||||
"workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
"inputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -382,7 +381,6 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
|
||||
"index": 1,
|
||||
"predecessor_node_id": null,
|
||||
"inputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -393,7 +391,6 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
|
||||
"process_data": {},
|
||||
"process_data_truncated": false,
|
||||
"outputs": {
|
||||
"sys.files": [],
|
||||
"sys.user_id": "abc-123",
|
||||
"sys.app_id": "d1074979-f67e-4114-8691-e35878df9a89",
|
||||
"sys.workflow_id": "e46514f1-c008-41ff-94b0-4f33d4b97d36",
|
||||
@ -915,7 +912,7 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
|
||||
"id": "b1ad3277-089e-42c6-9dff-6820d94fbc76",
|
||||
"workflow_id": "19eff89f-ec03-4f75-b0fc-897e7effea02",
|
||||
"status": "succeeded",
|
||||
"inputs": "{\"sys.files\": [], \"sys.user_id\": \"abc-123\"}",
|
||||
"inputs": "{\"sys.user_id\": \"abc-123\"}",
|
||||
"outputs": null,
|
||||
"error": null,
|
||||
"total_steps": 3,
|
||||
@ -1127,7 +1124,7 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
|
||||
```streaming {{ title: 'Response' }}
|
||||
event: ping
|
||||
|
||||
data: {"event":"workflow_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.files":[],"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","sys.timestamp":1776087863},"created_at":1776087863,"reason":"initial"}}
|
||||
data: {"event":"workflow_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","sys.timestamp":1776087863},"created_at":1776087863,"reason":"initial"}}
|
||||
|
||||
data: {"event":"node_started","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"id":"b552d685-1119-4e6a-9a81-e91a23e5324b","node_id":"1775717266623","node_type":"start","title":"User Input","index":1,"predecessor_node_id":null,"inputs":null,"created_at":1776087863,"extras":{},"iteration_id":null,"loop_id":null}}
|
||||
|
||||
@ -1139,7 +1136,7 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
|
||||
|
||||
data: {"event":"workflow_paused","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","data":{"workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","paused_nodes":["1775717346519"],"outputs":{},"reasons":[{"form_id":"019d8716-0fde-75da-8207-1458ccde76e5","form_content":"this is form 1:\n{{#$output.some_field#}}\n","inputs":[{"type":"paragraph","output_variable_name":"some_field","default":{"type":"variable","selector":["sys","workflow_run_id"],"value":""}}],"actions":[{"id":"approve","title":"YES","button_style":"default"},{"id":"reject","title":"NO","button_style":"default"}],"display_in_ui":true,"node_id":"1775717346519","node_title":"Human Input","resolved_default_values":{"some_field":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"form_token":"n7hFG4ZDYdGcgZ5VDc7EGM","type":"human_input_required"}],"status":"paused","created_at":1776087863,"elapsed_time":0.0,"total_tokens":0,"total_steps":2}}
|
||||
|
||||
data: {"event":"workflow_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.files":[],"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"created_at":1776087877,"reason":"resumption"}}
|
||||
data: {"event":"workflow_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","inputs":{"sys.user_id":"abc-123","sys.app_id":"d1074979-f67e-4114-8691-e35878df9a89","sys.workflow_id":"e46514f1-c008-41ff-94b0-4f33d4b97d36","sys.workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c"},"created_at":1776087877,"reason":"resumption"}}
|
||||
|
||||
data: {"event":"node_started","workflow_run_id":"5d7ef348-e1c1-4f6d-bb9b-62cc2fb2ef3c","task_id":"1784c3dd-20eb-4919-bd5d-a8d800b74ada","data":{"id":"8d7e8e01-5159-4089-a4b6-3aa394992cc2","node_id":"1775717346519","node_type":"human-input","title":"Human Input","index":1,"predecessor_node_id":null,"inputs":null,"inputs_truncated":false,"created_at":1776087877,"extras":{},"iteration_id":null,"loop_id":null,"agent_strategy":null}}
|
||||
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
import type { AnswerNodeType } from '@/app/components/workflow/nodes/answer/types'
|
||||
import type { LLMNodeType } from '@/app/components/workflow/nodes/llm/types'
|
||||
import type { StartNodeType } from '@/app/components/workflow/nodes/start/types'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import {
|
||||
@ -24,37 +26,41 @@ export const useWorkflowTemplate = () => {
|
||||
})
|
||||
|
||||
if (isChatMode) {
|
||||
const llmData: LLMNodeType = {
|
||||
...(llmDefault.defaultValue as LLMNodeType),
|
||||
desc: '',
|
||||
memory: {
|
||||
window: { enabled: false, size: 10 },
|
||||
query_prompt_template: '{{#sys.query#}}',
|
||||
},
|
||||
selected: true,
|
||||
type: llmDefault.metaData.type,
|
||||
title: t(`blocks.${llmDefault.metaData.type}`, { ns: 'workflow' }),
|
||||
}
|
||||
const { newNode: llmNode } = generateNewNode({
|
||||
id: 'llm',
|
||||
data: {
|
||||
...llmDefault.defaultValue,
|
||||
memory: {
|
||||
window: { enabled: false, size: 10 },
|
||||
query_prompt_template: '{{#sys.query#}}\n\n{{#sys.files#}}',
|
||||
},
|
||||
selected: true,
|
||||
type: llmDefault.metaData.type,
|
||||
title: t(`blocks.${llmDefault.metaData.type}`, { ns: 'workflow' }),
|
||||
},
|
||||
data: llmData,
|
||||
position: {
|
||||
x: START_INITIAL_POSITION.x + NODE_WIDTH_X_OFFSET,
|
||||
y: START_INITIAL_POSITION.y,
|
||||
},
|
||||
} as any)
|
||||
})
|
||||
|
||||
const answerData: AnswerNodeType = {
|
||||
...(answerDefault.defaultValue as AnswerNodeType),
|
||||
answer: `{{#${llmNode.id}.text#}}`,
|
||||
desc: '',
|
||||
type: answerDefault.metaData.type,
|
||||
title: t(`blocks.${answerDefault.metaData.type}`, { ns: 'workflow' }),
|
||||
}
|
||||
const { newNode: answerNode } = generateNewNode({
|
||||
id: 'answer',
|
||||
data: {
|
||||
...answerDefault.defaultValue,
|
||||
answer: `{{#${llmNode.id}.text#}}`,
|
||||
type: answerDefault.metaData.type,
|
||||
title: t(`blocks.${answerDefault.metaData.type}`, { ns: 'workflow' }),
|
||||
},
|
||||
data: answerData,
|
||||
position: {
|
||||
x: START_INITIAL_POSITION.x + NODE_WIDTH_X_OFFSET * 2,
|
||||
y: START_INITIAL_POSITION.y,
|
||||
},
|
||||
} as any)
|
||||
})
|
||||
|
||||
const startToLlmEdge = {
|
||||
id: `${startNode.id}-${llmNode.id}`,
|
||||
|
||||
@ -85,6 +85,7 @@ export const getGlobalVars = (isChatMode: boolean): Var[] => {
|
||||
|
||||
export const VAR_SHOW_NAME_MAP: Record<string, string> = {
|
||||
'sys.query': 'query',
|
||||
// TODO: Remove this display alias after all persisted workflows are migrated.
|
||||
'sys.files': 'files',
|
||||
}
|
||||
|
||||
|
||||
@ -5,17 +5,12 @@ import { Resolution } from '@/types/app'
|
||||
import useConfigVision from '../use-config-vision'
|
||||
|
||||
const mockUseTextGenerationCurrentProviderAndModelAndModelList = vi.hoisted(() => vi.fn())
|
||||
const mockUseIsChatMode = vi.hoisted(() => vi.fn())
|
||||
|
||||
vi.mock('@/app/components/header/account-setting/model-provider-page/hooks', () => ({
|
||||
useTextGenerationCurrentProviderAndModelAndModelList: (...args: unknown[]) =>
|
||||
mockUseTextGenerationCurrentProviderAndModelAndModelList(...args),
|
||||
}))
|
||||
|
||||
vi.mock('../use-workflow', () => ({
|
||||
useIsChatMode: () => mockUseIsChatMode(),
|
||||
}))
|
||||
|
||||
const createModel = (overrides: Partial<ModelConfig> = {}): ModelConfig => ({
|
||||
provider: 'openai',
|
||||
name: 'gpt-4o',
|
||||
@ -32,7 +27,6 @@ const createVisionPayload = (overrides: Partial<{ enabled: boolean, configs?: Vi
|
||||
describe('useConfigVision', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockUseIsChatMode.mockReturnValue(false)
|
||||
mockUseTextGenerationCurrentProviderAndModelAndModelList.mockReturnValue({
|
||||
currentModel: {
|
||||
features: [],
|
||||
@ -40,9 +34,8 @@ describe('useConfigVision', () => {
|
||||
})
|
||||
})
|
||||
|
||||
it('should expose vision capability and enable default chat configs for vision models', () => {
|
||||
it('should expose vision capability and require an explicit file variable for vision models', () => {
|
||||
const onChange = vi.fn()
|
||||
mockUseIsChatMode.mockReturnValue(true)
|
||||
mockUseTextGenerationCurrentProviderAndModelAndModelList.mockReturnValue({
|
||||
currentModel: {
|
||||
features: [ModelFeatureEnum.vision],
|
||||
@ -64,7 +57,7 @@ describe('useConfigVision', () => {
|
||||
enabled: true,
|
||||
configs: {
|
||||
detail: Resolution.high,
|
||||
variable_selector: ['sys', 'files'],
|
||||
variable_selector: [],
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
@ -6,7 +6,6 @@ import {
|
||||
} from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import { useTextGenerationCurrentProviderAndModelAndModelList } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||
import { Resolution } from '@/types/app'
|
||||
import { useIsChatMode } from './use-workflow'
|
||||
|
||||
type Payload = {
|
||||
enabled: boolean
|
||||
@ -32,8 +31,6 @@ const useConfigVision = (model: ModelConfig, {
|
||||
},
|
||||
)
|
||||
|
||||
const isChatMode = useIsChatMode()
|
||||
|
||||
const getIsVisionModel = useCallback(() => {
|
||||
return !!currModel?.features?.includes(ModelFeatureEnum.vision)
|
||||
}, [currModel])
|
||||
@ -43,10 +40,10 @@ const useConfigVision = (model: ModelConfig, {
|
||||
const handleVisionResolutionEnabledChange = useCallback((enabled: boolean) => {
|
||||
const newPayload = produce(payload, (draft) => {
|
||||
draft.enabled = enabled
|
||||
if (enabled && isChatMode) {
|
||||
if (enabled) {
|
||||
draft.configs = {
|
||||
detail: Resolution.high,
|
||||
variable_selector: ['sys', 'files'],
|
||||
variable_selector: [],
|
||||
}
|
||||
}
|
||||
else if (!enabled) {
|
||||
@ -54,7 +51,7 @@ const useConfigVision = (model: ModelConfig, {
|
||||
}
|
||||
})
|
||||
onChange(newPayload)
|
||||
}, [isChatMode, onChange, payload])
|
||||
}, [onChange, payload])
|
||||
|
||||
const handleVisionResolutionChange = useCallback((config: VisionSetting) => {
|
||||
const newPayload = produce(payload, (draft) => {
|
||||
|
||||
@ -55,7 +55,7 @@ type Props = {
|
||||
|
||||
const MEMORY_DEFAULT: Memory = {
|
||||
window: { enabled: false, size: WINDOW_SIZE_DEFAULT },
|
||||
query_prompt_template: '{{#sys.query#}}\n\n{{#sys.files#}}',
|
||||
query_prompt_template: '{{#sys.query#}}',
|
||||
}
|
||||
|
||||
const MemoryConfig: FC<Props> = ({
|
||||
@ -91,7 +91,7 @@ const MemoryConfig: FC<Props> = ({
|
||||
}
|
||||
else {
|
||||
limitedSize = Number.parseInt(limitedSize as string, 10)
|
||||
if (isNaN(limitedSize))
|
||||
if (Number.isNaN(limitedSize))
|
||||
limitedSize = WINDOW_SIZE_DEFAULT
|
||||
|
||||
if (limitedSize < WINDOW_SIZE_MIN)
|
||||
|
||||
@ -3,10 +3,11 @@ import type { HumanInputNodeType } from '@/app/components/workflow/nodes/human-i
|
||||
import type { LLMNodeType } from '@/app/components/workflow/nodes/llm/types'
|
||||
import type { Node, PromptItem } from '@/app/components/workflow/types'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { createStartNode } from '@/app/components/workflow/__tests__/fixtures'
|
||||
import { DeliveryMethodType } from '@/app/components/workflow/nodes/human-input/types'
|
||||
import { BlockEnum, EditionType, PromptRole } from '@/app/components/workflow/types'
|
||||
import { BlockEnum, EditionType, InputVarType, PromptRole, VarType } from '@/app/components/workflow/types'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { getNodeUsedVars, updateNodeVars } from '../utils'
|
||||
import { getNodeOutputVars, getNodeUsedVars, toNodeAvailableVars, updateNodeVars } from '../utils'
|
||||
|
||||
const createNode = <T>(data: Node<T>['data']): Node<T> => ({
|
||||
id: 'node-1',
|
||||
@ -89,6 +90,54 @@ describe('variable utils', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('node output variables', () => {
|
||||
it('should expose sys.query but not deprecated system file output for start nodes in chat mode', () => {
|
||||
const deprecatedSystemFileVariable = ['sys', 'files'].join('.')
|
||||
const startNode = createStartNode({
|
||||
id: 'start',
|
||||
data: {
|
||||
type: BlockEnum.Start,
|
||||
variables: [{
|
||||
label: 'Files',
|
||||
variable: 'files',
|
||||
type: InputVarType.multiFiles,
|
||||
required: false,
|
||||
}],
|
||||
},
|
||||
})
|
||||
|
||||
expect(getNodeOutputVars(startNode, true)).toEqual([
|
||||
['start', 'files'],
|
||||
['start', 'sys', 'query'],
|
||||
])
|
||||
|
||||
const availableVars = toNodeAvailableVars({
|
||||
beforeNodes: [startNode],
|
||||
isChatMode: true,
|
||||
filterVar: () => true,
|
||||
allPluginInfoList: {},
|
||||
})
|
||||
|
||||
expect(availableVars).toEqual(expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
nodeId: 'start',
|
||||
vars: expect.arrayContaining([
|
||||
expect.objectContaining({ variable: 'files', type: VarType.arrayFile }),
|
||||
expect.objectContaining({ variable: 'sys.query', type: VarType.string }),
|
||||
]),
|
||||
}),
|
||||
]))
|
||||
expect(availableVars).not.toEqual(expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
nodeId: 'start',
|
||||
vars: expect.arrayContaining([
|
||||
expect.objectContaining({ variable: deprecatedSystemFileVariable }),
|
||||
]),
|
||||
}),
|
||||
]))
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateNodeVars', () => {
|
||||
it('should replace answer prompt references', () => {
|
||||
const node = createNode<AnswerNodeType>({
|
||||
|
||||
@ -9,7 +9,6 @@ import {
|
||||
|
||||
describe('var-reference-vars helpers', () => {
|
||||
it('should derive display names for flat and mapped variables', () => {
|
||||
expect(getVariableDisplayName('sys.files', false)).toBe('files')
|
||||
expect(getVariableDisplayName('current', true, true)).toBe('current_code')
|
||||
expect(getVariableDisplayName('foo', true, false)).toBe('foo')
|
||||
})
|
||||
|
||||
@ -373,10 +373,6 @@ const formatItem = (
|
||||
type: VarType.string,
|
||||
})
|
||||
}
|
||||
res.vars.push({
|
||||
variable: 'sys.files',
|
||||
type: VarType.arrayFile,
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
@ -2106,7 +2102,6 @@ export const getNodeOutputVars = (
|
||||
|
||||
if (isChatMode) {
|
||||
res.push([id, 'sys', 'query'])
|
||||
res.push([id, 'sys', 'files'])
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
@ -747,6 +747,7 @@ const useOneStepRun = <T>({
|
||||
const isStartNode = data.type === BlockEnum.Start
|
||||
const postData: Record<string, any> = {}
|
||||
if (isStartNode) {
|
||||
// TODO: Remove this legacy submit-data fallback after all persisted workflows are migrated.
|
||||
const { '#sys.query#': query, '#sys.files#': files, ...inputs } = submitData
|
||||
if (isChatMode)
|
||||
postData.conversation_id = ''
|
||||
|
||||
@ -99,12 +99,12 @@ describe('list-operator/node', () => {
|
||||
<Node
|
||||
id="list-node"
|
||||
data={createData({
|
||||
variable: ['sys', 'files'],
|
||||
variable: ['sys', 'query'],
|
||||
})}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('Start:start:sys.files')).toBeInTheDocument()
|
||||
expect(screen.getByText('Start:start:sys.query')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('returns null when no input variable is configured', () => {
|
||||
|
||||
@ -124,7 +124,7 @@ describe('use-single-run-form-params helpers', () => {
|
||||
mockGetNodeUsedVars.mockImplementation((node: Node) => {
|
||||
switch (node.id) {
|
||||
case 'tool-a':
|
||||
return [['sys', 'files']]
|
||||
return [['sys', 'query']]
|
||||
case 'tool-b':
|
||||
return [['start-node', 'answer'], ['current-node', 'self'], ['inner-node', 'secret']]
|
||||
default:
|
||||
@ -132,7 +132,7 @@ describe('use-single-run-form-params helpers', () => {
|
||||
}
|
||||
})
|
||||
mockGetNodeUsedVarPassToServerKey.mockImplementation((_node: Node, selector: string[]) => {
|
||||
return selector[0] === 'sys' ? ['sys_files', 'sys_files_backup'] : 'answer_key'
|
||||
return selector[0] === 'sys' ? ['sys_query', 'sys_query_backup'] : 'answer_key'
|
||||
})
|
||||
mockGetNodeInfoById.mockImplementation((nodes: Node[], id: string) => nodes.find(node => node.id === id))
|
||||
mockIsSystemVar.mockImplementation((selector: string[]) => selector[0] === 'sys')
|
||||
@ -152,11 +152,11 @@ describe('use-single-run-form-params helpers', () => {
|
||||
|
||||
expect(toVarInputs).toHaveBeenCalledWith([
|
||||
expect.objectContaining({
|
||||
variable: 'sys.files',
|
||||
variable: 'sys.query',
|
||||
label: {
|
||||
nodeType: BlockEnum.Start,
|
||||
nodeName: 'System',
|
||||
variable: 'sys.files',
|
||||
variable: 'sys.query',
|
||||
},
|
||||
}),
|
||||
expect.objectContaining({
|
||||
@ -169,10 +169,10 @@ describe('use-single-run-form-params helpers', () => {
|
||||
}),
|
||||
])
|
||||
expect(result.usedOutVars).toEqual([
|
||||
createInputVar('sys.files', {
|
||||
createInputVar('sys.query', {
|
||||
nodeType: BlockEnum.Start,
|
||||
nodeName: 'System',
|
||||
variable: 'sys.files',
|
||||
variable: 'sys.query',
|
||||
}),
|
||||
createInputVar('start-node.answer', {
|
||||
nodeType: BlockEnum.Start,
|
||||
@ -181,11 +181,11 @@ describe('use-single-run-form-params helpers', () => {
|
||||
}),
|
||||
])
|
||||
expect(result.allVarObject).toEqual({
|
||||
[['sys.files', 'tool-a', 0].join(VALUE_SELECTOR_DELIMITER)]: {
|
||||
inSingleRunPassedKey: 'sys_files',
|
||||
[['sys.query', 'tool-a', 0].join(VALUE_SELECTOR_DELIMITER)]: {
|
||||
inSingleRunPassedKey: 'sys_query',
|
||||
},
|
||||
[['sys.files', 'tool-a', 1].join(VALUE_SELECTOR_DELIMITER)]: {
|
||||
inSingleRunPassedKey: 'sys_files_backup',
|
||||
[['sys.query', 'tool-a', 1].join(VALUE_SELECTOR_DELIMITER)]: {
|
||||
inSingleRunPassedKey: 'sys_query_backup',
|
||||
},
|
||||
[['start-node.answer', 'tool-b', 0].join(VALUE_SELECTOR_DELIMITER)]: {
|
||||
inSingleRunPassedKey: 'answer_key',
|
||||
|
||||
@ -8,6 +8,7 @@ import Panel from '../panel'
|
||||
const mockUseConfig = vi.hoisted(() => vi.fn())
|
||||
const mockConfigVarModal = vi.hoisted(() => vi.fn())
|
||||
const mockRemoveEffectVarConfirm = vi.hoisted(() => vi.fn())
|
||||
const legacyFilesVariable = ['userinput', 'files'].join('.')
|
||||
|
||||
vi.mock('../use-config', () => ({
|
||||
__esModule: true,
|
||||
@ -90,7 +91,7 @@ describe('StartPanel', () => {
|
||||
render(<Panel id="start-node" data={createData()} panelProps={{} as PanelProps} />)
|
||||
|
||||
expect(screen.getByText('userinput.query')).toBeInTheDocument()
|
||||
expect(screen.getByText('userinput.files')).toBeInTheDocument()
|
||||
expect(screen.getByText(legacyFilesVariable)).toBeInTheDocument()
|
||||
expect(screen.queryByText('LEGACY')).not.toBeInTheDocument()
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'common.operation.add workflow.nodes.start.inputField' }))
|
||||
@ -116,6 +117,7 @@ describe('StartPanel', () => {
|
||||
render(<Panel id="start-node" data={createData()} panelProps={{} as PanelProps} />)
|
||||
|
||||
expect(screen.queryByText('userinput.query')).not.toBeInTheDocument()
|
||||
expect(screen.getByText(legacyFilesVariable)).toBeInTheDocument()
|
||||
expect(screen.getByText('LEGACY')).toBeInTheDocument()
|
||||
expect(screen.getByText('remove-confirm')).toBeInTheDocument()
|
||||
|
||||
|
||||
@ -37,8 +37,9 @@ describe('start/use-single-run-form-params', () => {
|
||||
})
|
||||
})
|
||||
|
||||
it('should include sys.query and sys.files dependencies for chat mode', () => {
|
||||
it('should include sys.query but not deprecated system file dependencies for chat mode', () => {
|
||||
mockUseIsChatMode.mockReturnValue(true)
|
||||
const deprecatedSystemFileVariable = `#${['sys', 'files'].join('.')}#`
|
||||
|
||||
const { result } = renderHook(() => useSingleRunFormParams({
|
||||
id: 'start-node',
|
||||
@ -55,7 +56,9 @@ describe('start/use-single-run-form-params', () => {
|
||||
expect(result.current.forms[0]!.inputs).toEqual(expect.arrayContaining([
|
||||
expect.objectContaining({ variable: 'query' }),
|
||||
expect.objectContaining({ variable: '#sys.query#', required: true }),
|
||||
expect.objectContaining({ variable: '#sys.files#', required: false }),
|
||||
]))
|
||||
expect(result.current.forms[0]!.inputs).toEqual(expect.not.arrayContaining([
|
||||
expect.objectContaining({ variable: deprecatedSystemFileVariable }),
|
||||
]))
|
||||
|
||||
result.current.forms[0]!.onChange({ query: 'updated' })
|
||||
@ -63,7 +66,6 @@ describe('start/use-single-run-form-params', () => {
|
||||
expect(setRunInputData).toHaveBeenCalledWith({ query: 'updated' })
|
||||
expect(result.current.getDependentVars()).toEqual([
|
||||
['start-node', 'query'],
|
||||
['sys', 'files'],
|
||||
['sys', 'query'],
|
||||
])
|
||||
expect(result.current.getDependentVar('query')).toEqual(['start-node', 'query'])
|
||||
@ -87,7 +89,6 @@ describe('start/use-single-run-form-params', () => {
|
||||
]))
|
||||
expect(result.current.getDependentVars()).toEqual([
|
||||
['start-node', 'query'],
|
||||
['sys', 'files'],
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
@ -6,12 +6,25 @@ import { useTranslation } from 'react-i18next'
|
||||
import ConfigVarModal from '@/app/components/app/configuration/config-var/config-modal'
|
||||
import Field from '@/app/components/workflow/nodes/_base/components/field'
|
||||
import Split from '@/app/components/workflow/nodes/_base/components/split'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import RemoveEffectVarConfirm from '../_base/components/remove-effect-var-confirm'
|
||||
import VarItem from './components/var-item'
|
||||
import VarList from './components/var-list'
|
||||
import useConfig from './use-config'
|
||||
|
||||
const i18nPrefix = 'nodes.start'
|
||||
const chatQueryInputVar: InputVar = {
|
||||
variable: 'userinput.query',
|
||||
label: '',
|
||||
type: InputVarType.textInput,
|
||||
required: false,
|
||||
}
|
||||
const userInputFilesVar: InputVar = {
|
||||
variable: 'userinput.files',
|
||||
label: '',
|
||||
type: InputVarType.multiFiles,
|
||||
required: false,
|
||||
}
|
||||
|
||||
const Panel: FC<NodePanelProps<StartNodeType>> = ({
|
||||
id,
|
||||
@ -67,34 +80,31 @@ const Panel: FC<NodePanelProps<StartNodeType>> = ({
|
||||
/>
|
||||
|
||||
<div className="mt-1 space-y-1">
|
||||
<Split className="my-2" />
|
||||
{
|
||||
isChatMode && (
|
||||
<VarItem
|
||||
readonly
|
||||
payload={{
|
||||
variable: 'userinput.query',
|
||||
} as any}
|
||||
rightContent={(
|
||||
<div className="text-xs font-normal text-text-tertiary">
|
||||
String
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
<>
|
||||
<Split className="my-2" />
|
||||
<VarItem
|
||||
readonly
|
||||
payload={chatQueryInputVar}
|
||||
rightContent={(
|
||||
<div className="text-xs font-normal text-text-tertiary">
|
||||
String
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
<VarItem
|
||||
readonly
|
||||
showLegacyBadge={!isChatMode}
|
||||
payload={{
|
||||
variable: 'userinput.files',
|
||||
} as any}
|
||||
payload={userInputFilesVar}
|
||||
rightContent={(
|
||||
<div className="text-xs font-normal text-text-tertiary">
|
||||
Array[File]
|
||||
</div>
|
||||
)}
|
||||
showLegacyBadge={!isChatMode}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
|
||||
@ -9,10 +9,10 @@ import { useIsChatMode } from '../../hooks'
|
||||
type Params = {
|
||||
id: string
|
||||
payload: StartNodeType
|
||||
runInputData: Record<string, any>
|
||||
runInputDataRef: RefObject<Record<string, any>>
|
||||
runInputData: FormProps['values']
|
||||
runInputDataRef: RefObject<FormProps['values']>
|
||||
getInputVars: (textList: string[]) => InputVar[]
|
||||
setRunInputData: (data: Record<string, any>) => void
|
||||
setRunInputData: FormProps['onChange']
|
||||
toVarInputs: (variables: Variable[]) => InputVar[]
|
||||
}
|
||||
const useSingleRunFormParams = ({
|
||||
@ -42,13 +42,6 @@ const useSingleRunFormParams = ({
|
||||
})
|
||||
}
|
||||
|
||||
inputs.push({
|
||||
label: 'sys.files',
|
||||
variable: '#sys.files#',
|
||||
type: InputVarType.multiFiles,
|
||||
required: false,
|
||||
})
|
||||
|
||||
forms.push(
|
||||
{
|
||||
label: t('nodes.llm.singleRun.variable', { ns: 'workflow' })!,
|
||||
@ -65,7 +58,7 @@ const useSingleRunFormParams = ({
|
||||
const inputVars = payload.variables.map((item) => {
|
||||
return [id, item.variable]
|
||||
})
|
||||
const vars: ValueSelector[] = [...inputVars, ['sys', 'files']]
|
||||
const vars: ValueSelector[] = [...inputVars]
|
||||
|
||||
if (isChatMode)
|
||||
vars.push(['sys', 'query'])
|
||||
|
||||
Reference in New Issue
Block a user