Compare commits

..

1 Commits

Author SHA1 Message Date
73a386c443 refactor: strip external imports in workflow template transform 2026-02-04 18:52:49 +08:00
18 changed files with 131 additions and 563 deletions

2
.github/CODEOWNERS vendored
View File

@ -239,7 +239,7 @@
/web/app/components/base/ @iamjoel @zxhlyh
# Frontend - Base Components Tests
/web/app/components/base/**/*.spec.tsx @hyoban @CodingOnStar
/web/app/components/base/**/__tests__/ @hyoban @CodingOnStar
# Frontend - Utils and Hooks
/web/utils/classnames.ts @iamjoel @zxhlyh

View File

@ -136,7 +136,6 @@ ignore_imports =
core.workflow.nodes.llm.llm_utils -> models.provider
core.workflow.nodes.llm.llm_utils -> services.credit_pool_service
core.workflow.nodes.llm.node -> core.tools.signature
core.workflow.nodes.template_transform.template_transform_node -> configs
core.workflow.nodes.tool.tool_node -> core.callback_handler.workflow_tool_callback_handler
core.workflow.nodes.tool.tool_node -> core.tools.tool_engine
core.workflow.nodes.tool.tool_node -> core.tools.tool_manager

View File

@ -47,6 +47,7 @@ class DifyNodeFactory(NodeFactory):
code_providers: Sequence[type[CodeNodeProvider]] | None = None,
code_limits: CodeNodeLimits | None = None,
template_renderer: Jinja2TemplateRenderer | None = None,
template_transform_max_output_length: int | None = None,
http_request_http_client: HttpClientProtocol | None = None,
http_request_tool_file_manager_factory: Callable[[], ToolFileManager] = ToolFileManager,
http_request_file_manager: FileManagerProtocol | None = None,
@ -68,6 +69,11 @@ class DifyNodeFactory(NodeFactory):
max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH,
)
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
self._template_transform_max_output_length = (
template_transform_max_output_length
if template_transform_max_output_length is not None
else dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
)
self._http_request_http_client = http_request_http_client or ssrf_proxy
self._http_request_tool_file_manager_factory = http_request_tool_file_manager_factory
self._http_request_file_manager = http_request_file_manager or file_manager
@ -122,6 +128,7 @@ class DifyNodeFactory(NodeFactory):
graph_init_params=self.graph_init_params,
graph_runtime_state=self.graph_runtime_state,
template_renderer=self._template_renderer,
max_output_length=self._template_transform_max_output_length,
)
if node_type == NodeType.HTTP_REQUEST:

View File

@ -1,7 +1,6 @@
from collections.abc import Mapping, Sequence
from typing import TYPE_CHECKING, Any
from configs import dify_config
from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
from core.workflow.node_events import NodeRunResult
from core.workflow.nodes.base.node import Node
@ -16,12 +15,13 @@ if TYPE_CHECKING:
from core.workflow.entities import GraphInitParams
from core.workflow.runtime import GraphRuntimeState
MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH = 400_000
class TemplateTransformNode(Node[TemplateTransformNodeData]):
node_type = NodeType.TEMPLATE_TRANSFORM
_template_renderer: Jinja2TemplateRenderer
_max_output_length: int
def __init__(
self,
@ -31,6 +31,7 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
graph_runtime_state: "GraphRuntimeState",
*,
template_renderer: Jinja2TemplateRenderer | None = None,
max_output_length: int | None = None,
) -> None:
super().__init__(
id=id,
@ -39,6 +40,9 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
graph_runtime_state=graph_runtime_state,
)
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
self._max_output_length = (
max_output_length if max_output_length is not None else DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH
)
@classmethod
def get_default_config(cls, filters: Mapping[str, object] | None = None) -> Mapping[str, object]:
@ -69,11 +73,11 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
except TemplateRenderError as e:
return NodeRunResult(inputs=variables, status=WorkflowNodeExecutionStatus.FAILED, error=str(e))
if len(rendered) > MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH:
if len(rendered) > self._max_output_length:
return NodeRunResult(
inputs=variables,
status=WorkflowNodeExecutionStatus.FAILED,
error=f"Output length exceeds {MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH} characters",
error=f"Output length exceeds {self._max_output_length} characters",
)
return NodeRunResult(

View File

@ -1,6 +1,6 @@
[project]
name = "dify-api"
version = "1.12.1"
version = "1.12.0"
requires-python = ">=3.11,<3.13"
dependencies = [

View File

@ -259,8 +259,8 @@ def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
def _delete_app_workflow_archive_logs(tenant_id: str, app_id: str):
def del_workflow_archive_log(session, workflow_archive_log_id: str):
session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
def del_workflow_archive_log(workflow_archive_log_id: str):
db.session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
synchronize_session=False
)
@ -420,7 +420,7 @@ def delete_draft_variables_batch(app_id: str, batch_size: int = 1000) -> int:
total_files_deleted = 0
while True:
with session_factory.create_session() as session, session.begin():
with session_factory.create_session() as session:
# Get a batch of draft variable IDs along with their file_ids
query_sql = """
SELECT id, file_id FROM workflow_draft_variables

View File

@ -10,10 +10,7 @@ from models import Tenant
from models.enums import CreatorUserRole
from models.model import App, UploadFile
from models.workflow import WorkflowDraftVariable, WorkflowDraftVariableFile
from tasks.remove_app_and_related_data_task import (
_delete_draft_variables,
delete_draft_variables_batch,
)
from tasks.remove_app_and_related_data_task import _delete_draft_variables, delete_draft_variables_batch
@pytest.fixture
@ -300,18 +297,12 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
def test_delete_draft_variables_with_offload_data(self, mock_storage, setup_offload_test_data):
data = setup_offload_test_data
app_id = data["app"].id
upload_file_ids = [uf.id for uf in data["upload_files"]]
variable_file_ids = [vf.id for vf in data["variable_files"]]
mock_storage.delete.return_value = None
with session_factory.create_session() as session:
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
var_files_before = (
session.query(WorkflowDraftVariableFile)
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
.count()
)
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
var_files_before = session.query(WorkflowDraftVariableFile).count()
upload_files_before = session.query(UploadFile).count()
assert draft_vars_before == 3
assert var_files_before == 2
assert upload_files_before == 2
@ -324,12 +315,8 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
assert draft_vars_after == 0
with session_factory.create_session() as session:
var_files_after = (
session.query(WorkflowDraftVariableFile)
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
.count()
)
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
var_files_after = session.query(WorkflowDraftVariableFile).count()
upload_files_after = session.query(UploadFile).count()
assert var_files_after == 0
assert upload_files_after == 0
@ -342,8 +329,6 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
def test_delete_draft_variables_storage_failure_continues_cleanup(self, mock_storage, setup_offload_test_data):
data = setup_offload_test_data
app_id = data["app"].id
upload_file_ids = [uf.id for uf in data["upload_files"]]
variable_file_ids = [vf.id for vf in data["variable_files"]]
mock_storage.delete.side_effect = [Exception("Storage error"), None]
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
@ -354,12 +339,8 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
assert draft_vars_after == 0
with session_factory.create_session() as session:
var_files_after = (
session.query(WorkflowDraftVariableFile)
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
.count()
)
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
var_files_after = session.query(WorkflowDraftVariableFile).count()
upload_files_after = session.query(UploadFile).count()
assert var_files_after == 0
assert upload_files_after == 0
@ -414,275 +395,3 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
if app2_obj:
session.delete(app2_obj)
session.commit()
class TestDeleteDraftVariablesSessionCommit:
"""Test suite to verify session commit behavior in delete_draft_variables_batch."""
@pytest.fixture
def setup_offload_test_data(self, app_and_tenant):
"""Create test data with offload files for session commit tests."""
from core.variables.types import SegmentType
from libs.datetime_utils import naive_utc_now
tenant, app = app_and_tenant
with session_factory.create_session() as session:
upload_file1 = UploadFile(
tenant_id=tenant.id,
storage_type="local",
key="test/file1.json",
name="file1.json",
size=1024,
extension="json",
mime_type="application/json",
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid.uuid4()),
created_at=naive_utc_now(),
used=False,
)
upload_file2 = UploadFile(
tenant_id=tenant.id,
storage_type="local",
key="test/file2.json",
name="file2.json",
size=2048,
extension="json",
mime_type="application/json",
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid.uuid4()),
created_at=naive_utc_now(),
used=False,
)
session.add(upload_file1)
session.add(upload_file2)
session.flush()
var_file1 = WorkflowDraftVariableFile(
tenant_id=tenant.id,
app_id=app.id,
user_id=str(uuid.uuid4()),
upload_file_id=upload_file1.id,
size=1024,
length=10,
value_type=SegmentType.STRING,
)
var_file2 = WorkflowDraftVariableFile(
tenant_id=tenant.id,
app_id=app.id,
user_id=str(uuid.uuid4()),
upload_file_id=upload_file2.id,
size=2048,
length=20,
value_type=SegmentType.OBJECT,
)
session.add(var_file1)
session.add(var_file2)
session.flush()
draft_var1 = WorkflowDraftVariable.new_node_variable(
app_id=app.id,
node_id="node_1",
name="large_var_1",
value=StringSegment(value="truncated..."),
node_execution_id=str(uuid.uuid4()),
file_id=var_file1.id,
)
draft_var2 = WorkflowDraftVariable.new_node_variable(
app_id=app.id,
node_id="node_2",
name="large_var_2",
value=StringSegment(value="truncated..."),
node_execution_id=str(uuid.uuid4()),
file_id=var_file2.id,
)
draft_var3 = WorkflowDraftVariable.new_node_variable(
app_id=app.id,
node_id="node_3",
name="regular_var",
value=StringSegment(value="regular_value"),
node_execution_id=str(uuid.uuid4()),
)
session.add(draft_var1)
session.add(draft_var2)
session.add(draft_var3)
session.commit()
data = {
"app": app,
"tenant": tenant,
"upload_files": [upload_file1, upload_file2],
"variable_files": [var_file1, var_file2],
"draft_variables": [draft_var1, draft_var2, draft_var3],
}
yield data
with session_factory.create_session() as session:
for table, ids in [
(WorkflowDraftVariable, [v.id for v in data["draft_variables"]]),
(WorkflowDraftVariableFile, [vf.id for vf in data["variable_files"]]),
(UploadFile, [uf.id for uf in data["upload_files"]]),
]:
cleanup_query = delete(table).where(table.id.in_(ids)).execution_options(synchronize_session=False)
session.execute(cleanup_query)
session.commit()
@pytest.fixture
def setup_commit_test_data(self, app_and_tenant):
"""Create test data for session commit tests."""
tenant, app = app_and_tenant
variable_ids: list[str] = []
with session_factory.create_session() as session:
variables = []
for i in range(10):
var = WorkflowDraftVariable.new_node_variable(
app_id=app.id,
node_id=f"node_{i}",
name=f"var_{i}",
value=StringSegment(value="test_value"),
node_execution_id=str(uuid.uuid4()),
)
session.add(var)
variables.append(var)
session.commit()
variable_ids = [v.id for v in variables]
yield {
"app": app,
"tenant": tenant,
"variable_ids": variable_ids,
}
with session_factory.create_session() as session:
cleanup_query = (
delete(WorkflowDraftVariable)
.where(WorkflowDraftVariable.id.in_(variable_ids))
.execution_options(synchronize_session=False)
)
session.execute(cleanup_query)
session.commit()
def test_session_commit_is_called_after_each_batch(self, setup_commit_test_data):
"""Test that session.begin() is used for automatic transaction management."""
data = setup_commit_test_data
app_id = data["app"].id
# Since session.begin() is used, the transaction is automatically committed
# when the with block exits successfully. We verify this by checking that
# data is actually persisted.
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
# Verify all data was deleted (proves transaction was committed)
with session_factory.create_session() as session:
remaining_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
assert deleted_count == 10
assert remaining_count == 0
def test_data_persisted_after_batch_deletion(self, setup_commit_test_data):
"""Test that data is actually persisted to database after batch deletion with commits."""
data = setup_commit_test_data
app_id = data["app"].id
variable_ids = data["variable_ids"]
# Verify initial state
with session_factory.create_session() as session:
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
assert initial_count == 10
# Perform deletion with small batch size to force multiple commits
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
assert deleted_count == 10
# Verify all data is deleted in a new session (proves commits worked)
with session_factory.create_session() as session:
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
assert final_count == 0
# Verify specific IDs are deleted
with session_factory.create_session() as session:
remaining_vars = (
session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.id.in_(variable_ids)).count()
)
assert remaining_vars == 0
def test_session_commit_with_empty_dataset(self, setup_commit_test_data):
"""Test session behavior when deleting from an empty dataset."""
nonexistent_app_id = str(uuid.uuid4())
# Should not raise any errors and should return 0
deleted_count = delete_draft_variables_batch(nonexistent_app_id, batch_size=10)
assert deleted_count == 0
def test_session_commit_with_single_batch(self, setup_commit_test_data):
"""Test that commit happens correctly when all data fits in a single batch."""
data = setup_commit_test_data
app_id = data["app"].id
with session_factory.create_session() as session:
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
assert initial_count == 10
# Delete all in a single batch
deleted_count = delete_draft_variables_batch(app_id, batch_size=100)
assert deleted_count == 10
# Verify data is persisted
with session_factory.create_session() as session:
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
assert final_count == 0
def test_invalid_batch_size_raises_error(self, setup_commit_test_data):
"""Test that invalid batch size raises ValueError."""
data = setup_commit_test_data
app_id = data["app"].id
with pytest.raises(ValueError, match="batch_size must be positive"):
delete_draft_variables_batch(app_id, batch_size=0)
with pytest.raises(ValueError, match="batch_size must be positive"):
delete_draft_variables_batch(app_id, batch_size=-1)
@patch("extensions.ext_storage.storage")
def test_session_commit_with_offload_data_cleanup(self, mock_storage, setup_offload_test_data):
"""Test that session commits correctly when cleaning up offload data."""
data = setup_offload_test_data
app_id = data["app"].id
upload_file_ids = [uf.id for uf in data["upload_files"]]
mock_storage.delete.return_value = None
# Verify initial state
with session_factory.create_session() as session:
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
var_files_before = (
session.query(WorkflowDraftVariableFile)
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
.count()
)
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
assert draft_vars_before == 3
assert var_files_before == 2
assert upload_files_before == 2
# Delete variables with offload data
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
assert deleted_count == 3
# Verify all data is persisted (deleted) in new session
with session_factory.create_session() as session:
draft_vars_after = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
var_files_after = (
session.query(WorkflowDraftVariableFile)
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
.count()
)
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
assert draft_vars_after == 0
assert var_files_after == 0
assert upload_files_after == 0
# Verify storage cleanup was called
assert mock_storage.delete.call_count == 2

View File

@ -217,7 +217,6 @@ class TestTemplateTransformNode:
@patch(
"core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template"
)
@patch("core.workflow.nodes.template_transform.template_transform_node.MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH", 10)
def test_run_output_length_exceeds_limit(
self, mock_execute, basic_node_data, mock_graph, mock_graph_runtime_state, graph_init_params
):
@ -231,6 +230,7 @@ class TestTemplateTransformNode:
graph_init_params=graph_init_params,
graph=mock_graph,
graph_runtime_state=mock_graph_runtime_state,
max_output_length=10,
)
result = node._run()

View File

@ -350,7 +350,7 @@ class TestDeleteWorkflowArchiveLogs:
mock_query.where.return_value = mock_delete_query
mock_db.session.query.return_value = mock_query
delete_func(mock_db.session, "log-1")
delete_func("log-1")
mock_db.session.query.assert_called_once_with(WorkflowArchiveLog)
mock_query.where.assert_called_once()

2
api/uv.lock generated
View File

@ -1368,7 +1368,7 @@ wheels = [
[[package]]
name = "dify-api"
version = "1.12.1"
version = "1.12.0"
source = { virtual = "." }
dependencies = [
{ name = "aliyun-log-python-sdk" },

View File

@ -21,7 +21,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.12.0
restart: always
environment:
# Use the shared environment variables.
@ -63,7 +63,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.12.0
restart: always
environment:
# Use the shared environment variables.
@ -102,7 +102,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.12.0
restart: always
environment:
# Use the shared environment variables.
@ -132,7 +132,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.12.1
image: langgenius/dify-web:1.12.0
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}

View File

@ -707,7 +707,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.12.0
restart: always
environment:
# Use the shared environment variables.
@ -749,7 +749,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.12.0
restart: always
environment:
# Use the shared environment variables.
@ -788,7 +788,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.12.0
restart: always
environment:
# Use the shared environment variables.
@ -818,7 +818,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.12.1
image: langgenius/dify-web:1.12.0
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}

View File

@ -98,46 +98,31 @@ export const useNodesSyncDraft = () => {
) => {
if (getNodesReadOnly())
return
const postParams = getPostParams()
// Get base params without hash
const baseParams = getPostParams()
if (!baseParams)
return
const {
setSyncWorkflowDraftHash,
setDraftUpdatedAt,
} = workflowStore.getState()
try {
// IMPORTANT: Get the LATEST hash right before sending the request
// This ensures that even if queued, each request uses the most recent hash
const latestHash = workflowStore.getState().syncWorkflowDraftHash
const postParams = {
...baseParams,
params: {
...baseParams.params,
hash: latestHash || null, // null for first-time, otherwise use latest hash
},
if (postParams) {
const {
setSyncWorkflowDraftHash,
setDraftUpdatedAt,
} = workflowStore.getState()
try {
const res = await syncWorkflowDraft(postParams)
setSyncWorkflowDraftHash(res.hash)
setDraftUpdatedAt(res.updated_at)
callback?.onSuccess?.()
}
const res = await syncWorkflowDraft(postParams)
setSyncWorkflowDraftHash(res.hash)
setDraftUpdatedAt(res.updated_at)
callback?.onSuccess?.()
}
catch (error: any) {
if (error && error.json && !error.bodyUsed) {
error.json().then((err: any) => {
if (err.code === 'draft_workflow_not_sync' && !notRefreshWhenSyncError)
handleRefreshWorkflowDraft()
})
catch (error: any) {
if (error && error.json && !error.bodyUsed) {
error.json().then((err: any) => {
if (err.code === 'draft_workflow_not_sync' && !notRefreshWhenSyncError)
handleRefreshWorkflowDraft()
})
}
callback?.onError?.()
}
finally {
callback?.onSettled?.()
}
callback?.onError?.()
}
finally {
callback?.onSettled?.()
}
}, [workflowStore, getPostParams, getNodesReadOnly, handleRefreshWorkflowDraft])

View File

@ -1,119 +0,0 @@
import type {
TriggerLogEntity,
TriggerOAuthClientParams,
TriggerOAuthConfig,
TriggerProviderApiEntity,
TriggerSubscription,
TriggerSubscriptionBuilder,
} from '@/app/components/workflow/block-selector/types'
import { type } from '@orpc/contract'
import { base } from '../base'
export const triggersContract = base
.route({ path: '/workspaces/current/triggers', method: 'GET' })
.input(type<{ query?: { type?: string } }>())
.output(type<TriggerProviderApiEntity[]>())
export const triggerProviderInfoContract = base
.route({ path: '/workspaces/current/trigger-provider/{provider}/info', method: 'GET' })
.input(type<{ params: { provider: string } }>())
.output(type<TriggerProviderApiEntity>())
export const triggerSubscriptionsContract = base
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/list', method: 'GET' })
.input(type<{ params: { provider: string } }>())
.output(type<TriggerSubscription[]>())
export const triggerSubscriptionBuilderCreateContract = base
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/create', method: 'POST' })
.input(type<{
params: { provider: string }
body?: { credential_type?: string }
}>())
.output(type<{ subscription_builder: TriggerSubscriptionBuilder }>())
export const triggerSubscriptionBuilderUpdateContract = base
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/update/{subscriptionBuilderId}', method: 'POST' })
.input(type<{
params: { provider: string, subscriptionBuilderId: string }
body?: {
name?: string
properties?: Record<string, unknown>
parameters?: Record<string, unknown>
credentials?: Record<string, unknown>
}
}>())
.output(type<TriggerSubscriptionBuilder>())
export const triggerSubscriptionBuilderVerifyUpdateContract = base
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/verify-and-update/{subscriptionBuilderId}', method: 'POST' })
.input(type<{
params: { provider: string, subscriptionBuilderId: string }
body?: { credentials?: Record<string, unknown> }
}>())
.output(type<{ verified: boolean }>())
export const triggerSubscriptionVerifyContract = base
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/verify/{subscriptionId}', method: 'POST' })
.input(type<{
params: { provider: string, subscriptionId: string }
body?: { credentials?: Record<string, unknown> }
}>())
.output(type<{ verified: boolean }>())
export const triggerSubscriptionBuildContract = base
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/build/{subscriptionBuilderId}', method: 'POST' })
.input(type<{
params: { provider: string, subscriptionBuilderId: string }
body?: {
name?: string
parameters?: Record<string, unknown>
}
}>())
.output(type<unknown>())
export const triggerSubscriptionDeleteContract = base
.route({ path: '/workspaces/current/trigger-provider/{subscriptionId}/subscriptions/delete', method: 'POST' })
.input(type<{ params: { subscriptionId: string } }>())
.output(type<{ result: string }>())
export const triggerSubscriptionUpdateContract = base
.route({ path: '/workspaces/current/trigger-provider/{subscriptionId}/subscriptions/update', method: 'POST' })
.input(type<{
params: { subscriptionId: string }
body?: {
name?: string
properties?: Record<string, unknown>
parameters?: Record<string, unknown>
credentials?: Record<string, unknown>
}
}>())
.output(type<{ result: string, id: string }>())
export const triggerSubscriptionBuilderLogsContract = base
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/logs/{subscriptionBuilderId}', method: 'GET' })
.input(type<{ params: { provider: string, subscriptionBuilderId: string } }>())
.output(type<{ logs: TriggerLogEntity[] }>())
export const triggerOAuthConfigContract = base
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'GET' })
.input(type<{ params: { provider: string } }>())
.output(type<TriggerOAuthConfig>())
export const triggerOAuthConfigureContract = base
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'POST' })
.input(type<{
params: { provider: string }
body: { client_params?: TriggerOAuthClientParams, enabled: boolean }
}>())
.output(type<{ result: string }>())
export const triggerOAuthDeleteContract = base
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'DELETE' })
.input(type<{ params: { provider: string } }>())
.output(type<{ result: string }>())
export const triggerOAuthInitiateContract = base
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/oauth/authorize', method: 'GET' })
.input(type<{ params: { provider: string } }>())
.output(type<{ authorization_url: string, subscription_builder: TriggerSubscriptionBuilder }>())

View File

@ -1,23 +1,6 @@
import type { InferContractRouterInputs } from '@orpc/contract'
import { bindPartnerStackContract, invoicesContract } from './console/billing'
import { systemFeaturesContract } from './console/system'
import {
triggerOAuthConfigContract,
triggerOAuthConfigureContract,
triggerOAuthDeleteContract,
triggerOAuthInitiateContract,
triggerProviderInfoContract,
triggersContract,
triggerSubscriptionBuildContract,
triggerSubscriptionBuilderCreateContract,
triggerSubscriptionBuilderLogsContract,
triggerSubscriptionBuilderUpdateContract,
triggerSubscriptionBuilderVerifyUpdateContract,
triggerSubscriptionDeleteContract,
triggerSubscriptionsContract,
triggerSubscriptionUpdateContract,
triggerSubscriptionVerifyContract,
} from './console/trigger'
import { trialAppDatasetsContract, trialAppInfoContract, trialAppParametersContract, trialAppWorkflowsContract } from './console/try-app'
import { collectionPluginsContract, collectionsContract, searchAdvancedContract } from './marketplace'
@ -41,23 +24,6 @@ export const consoleRouterContract = {
invoices: invoicesContract,
bindPartnerStack: bindPartnerStackContract,
},
triggers: {
list: triggersContract,
providerInfo: triggerProviderInfoContract,
subscriptions: triggerSubscriptionsContract,
subscriptionBuilderCreate: triggerSubscriptionBuilderCreateContract,
subscriptionBuilderUpdate: triggerSubscriptionBuilderUpdateContract,
subscriptionBuilderVerifyUpdate: triggerSubscriptionBuilderVerifyUpdateContract,
subscriptionVerify: triggerSubscriptionVerifyContract,
subscriptionBuild: triggerSubscriptionBuildContract,
subscriptionDelete: triggerSubscriptionDeleteContract,
subscriptionUpdate: triggerSubscriptionUpdateContract,
subscriptionBuilderLogs: triggerSubscriptionBuilderLogsContract,
oauthConfig: triggerOAuthConfigContract,
oauthConfigure: triggerOAuthConfigureContract,
oauthDelete: triggerOAuthDeleteContract,
oauthInitiate: triggerOAuthInitiateContract,
},
}
export type ConsoleInputs = InferContractRouterInputs<typeof consoleRouterContract>

View File

@ -1,7 +1,7 @@
{
"name": "dify-web",
"type": "module",
"version": "1.12.1",
"version": "1.12.0",
"private": true,
"packageManager": "pnpm@10.27.0+sha512.72d699da16b1179c14ba9e64dc71c9a40988cbdc65c264cb0e489db7de917f20dcf4d64d8723625f2969ba52d4b7e2a1170682d9ac2a5dcaeaab732b7e16f04a",
"imports": {

View File

@ -10,14 +10,17 @@ import type {
} from '@/app/components/workflow/block-selector/types'
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { CollectionType } from '@/app/components/tools/types'
import { consoleClient, consoleQuery } from '@/service/client'
import { get, post } from './base'
import { del, get, post } from './base'
import { useInvalid } from './use-base'
const NAME_SPACE = 'triggers'
// Trigger Provider Service - Provider ID Format: plugin_id/provider_name
// Convert backend API response to frontend ToolWithProvider format
const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): TriggerWithProvider => {
return {
// Collection fields
id: provider.plugin_id || provider.name,
name: provider.name,
author: provider.author,
@ -55,9 +58,12 @@ const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): Trigg
labels: provider.tags || [],
output_schema: event.output_schema || {},
})),
// Trigger-specific schema fields
subscription_constructor: provider.subscription_constructor,
subscription_schema: provider.subscription_schema,
supported_creation_methods: provider.supported_creation_methods,
meta: {
version: '1.0',
},
@ -66,20 +72,22 @@ const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): Trigg
export const useAllTriggerPlugins = (enabled = true) => {
return useQuery<TriggerWithProvider[]>({
queryKey: consoleQuery.triggers.list.queryKey({ input: {} }),
queryKey: [NAME_SPACE, 'all'],
queryFn: async () => {
const response = await consoleClient.triggers.list({})
const response = await get<TriggerProviderApiEntity[]>('/workspaces/current/triggers')
return response.map(convertToTriggerWithProvider)
},
enabled,
staleTime: 0,
gcTime: 0,
})
}
export const useTriggerPluginsByType = (triggerType: string, enabled = true) => {
return useQuery<TriggerWithProvider[]>({
queryKey: consoleQuery.triggers.list.queryKey({ input: { query: { type: triggerType } } }),
queryKey: [NAME_SPACE, 'byType', triggerType],
queryFn: async () => {
const response = await consoleClient.triggers.list({ query: { type: triggerType } })
const response = await get<TriggerProviderApiEntity[]>(`/workspaces/current/triggers?type=${triggerType}`)
return response.map(convertToTriggerWithProvider)
},
enabled: enabled && !!triggerType,
@ -87,23 +95,25 @@ export const useTriggerPluginsByType = (triggerType: string, enabled = true) =>
}
export const useInvalidateAllTriggerPlugins = () => {
return useInvalid(consoleQuery.triggers.list.queryKey({ input: {} }))
return useInvalid([NAME_SPACE, 'all'])
}
// ===== Trigger Subscriptions Management =====
export const useTriggerProviderInfo = (provider: string, enabled = true) => {
return useQuery<TriggerProviderApiEntity>({
queryKey: consoleQuery.triggers.providerInfo.queryKey({ input: { params: { provider } } }),
queryFn: () => consoleClient.triggers.providerInfo({ params: { provider } }),
queryKey: [NAME_SPACE, 'provider-info', provider],
queryFn: () => get<TriggerProviderApiEntity>(`/workspaces/current/trigger-provider/${provider}/info`),
enabled: enabled && !!provider,
staleTime: 0,
gcTime: 0,
})
}
export const useTriggerSubscriptions = (provider: string, enabled = true) => {
return useQuery<TriggerSubscription[]>({
queryKey: consoleQuery.triggers.subscriptions.queryKey({ input: { params: { provider } } }),
queryFn: () => consoleClient.triggers.subscriptions({ params: { provider } }),
queryKey: [NAME_SPACE, 'list-subscriptions', provider],
queryFn: () => get<TriggerSubscription[]>(`/workspaces/current/trigger-provider/${provider}/subscriptions/list`),
enabled: enabled && !!provider,
})
}
@ -112,30 +122,30 @@ export const useInvalidateTriggerSubscriptions = () => {
const queryClient = useQueryClient()
return (provider: string) => {
queryClient.invalidateQueries({
queryKey: consoleQuery.triggers.subscriptions.queryKey({ input: { params: { provider } } }),
queryKey: [NAME_SPACE, 'subscriptions', provider],
})
}
}
export const useCreateTriggerSubscriptionBuilder = () => {
return useMutation({
mutationKey: consoleQuery.triggers.subscriptionBuilderCreate.mutationKey(),
mutationKey: [NAME_SPACE, 'create-subscription-builder'],
mutationFn: (payload: {
provider: string
credential_type?: string
}) => {
const { provider, ...body } = payload
return consoleClient.triggers.subscriptionBuilderCreate({
params: { provider },
body,
})
return post<{ subscription_builder: TriggerSubscriptionBuilder }>(
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/create`,
{ body },
)
},
})
}
export const useUpdateTriggerSubscriptionBuilder = () => {
return useMutation({
mutationKey: consoleQuery.triggers.subscriptionBuilderUpdate.mutationKey(),
mutationKey: [NAME_SPACE, 'update-subscription-builder'],
mutationFn: (payload: {
provider: string
subscriptionBuilderId: string
@ -145,17 +155,17 @@ export const useUpdateTriggerSubscriptionBuilder = () => {
credentials?: Record<string, unknown>
}) => {
const { provider, subscriptionBuilderId, ...body } = payload
return consoleClient.triggers.subscriptionBuilderUpdate({
params: { provider, subscriptionBuilderId },
body,
})
return post<TriggerSubscriptionBuilder>(
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/update/${subscriptionBuilderId}`,
{ body },
)
},
})
}
export const useVerifyAndUpdateTriggerSubscriptionBuilder = () => {
return useMutation({
mutationKey: consoleQuery.triggers.subscriptionBuilderVerifyUpdate.mutationKey(),
mutationKey: [NAME_SPACE, 'verify-and-update-subscription-builder'],
mutationFn: (payload: {
provider: string
subscriptionBuilderId: string
@ -173,7 +183,7 @@ export const useVerifyAndUpdateTriggerSubscriptionBuilder = () => {
export const useVerifyTriggerSubscription = () => {
return useMutation({
mutationKey: consoleQuery.triggers.subscriptionVerify.mutationKey(),
mutationKey: [NAME_SPACE, 'verify-subscription'],
mutationFn: (payload: {
provider: string
subscriptionId: string
@ -198,24 +208,24 @@ export type BuildTriggerSubscriptionPayload = {
export const useBuildTriggerSubscription = () => {
return useMutation({
mutationKey: consoleQuery.triggers.subscriptionBuild.mutationKey(),
mutationKey: [NAME_SPACE, 'build-subscription'],
mutationFn: (payload: BuildTriggerSubscriptionPayload) => {
const { provider, subscriptionBuilderId, ...body } = payload
return consoleClient.triggers.subscriptionBuild({
params: { provider, subscriptionBuilderId },
body,
})
return post(
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/build/${subscriptionBuilderId}`,
{ body },
)
},
})
}
export const useDeleteTriggerSubscription = () => {
return useMutation({
mutationKey: consoleQuery.triggers.subscriptionDelete.mutationKey(),
mutationKey: [NAME_SPACE, 'delete-subscription'],
mutationFn: (subscriptionId: string) => {
return consoleClient.triggers.subscriptionDelete({
params: { subscriptionId },
})
return post<{ result: string }>(
`/workspaces/current/trigger-provider/${subscriptionId}/subscriptions/delete`,
)
},
})
}
@ -230,13 +240,13 @@ export type UpdateTriggerSubscriptionPayload = {
export const useUpdateTriggerSubscription = () => {
return useMutation({
mutationKey: consoleQuery.triggers.subscriptionUpdate.mutationKey(),
mutationKey: [NAME_SPACE, 'update-subscription'],
mutationFn: (payload: UpdateTriggerSubscriptionPayload) => {
const { subscriptionId, ...body } = payload
return consoleClient.triggers.subscriptionUpdate({
params: { subscriptionId },
body,
})
return post<{ result: string, id: string }>(
`/workspaces/current/trigger-provider/${subscriptionId}/subscriptions/update`,
{ body },
)
},
})
}
@ -252,8 +262,10 @@ export const useTriggerSubscriptionBuilderLogs = (
const { enabled = true, refetchInterval = false } = options
return useQuery<{ logs: TriggerLogEntity[] }>({
queryKey: consoleQuery.triggers.subscriptionBuilderLogs.queryKey({ input: { params: { provider, subscriptionBuilderId } } }),
queryFn: () => consoleClient.triggers.subscriptionBuilderLogs({ params: { provider, subscriptionBuilderId } }),
queryKey: [NAME_SPACE, 'subscription-builder-logs', provider, subscriptionBuilderId],
queryFn: () => get(
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/logs/${subscriptionBuilderId}`,
),
enabled: enabled && !!provider && !!subscriptionBuilderId,
refetchInterval,
})
@ -262,8 +274,8 @@ export const useTriggerSubscriptionBuilderLogs = (
// ===== OAuth Management =====
export const useTriggerOAuthConfig = (provider: string, enabled = true) => {
return useQuery<TriggerOAuthConfig>({
queryKey: consoleQuery.triggers.oauthConfig.queryKey({ input: { params: { provider } } }),
queryFn: () => consoleClient.triggers.oauthConfig({ params: { provider } }),
queryKey: [NAME_SPACE, 'oauth-config', provider],
queryFn: () => get<TriggerOAuthConfig>(`/workspaces/current/trigger-provider/${provider}/oauth/client`),
enabled: enabled && !!provider,
})
}
@ -276,31 +288,31 @@ export type ConfigureTriggerOAuthPayload = {
export const useConfigureTriggerOAuth = () => {
return useMutation({
mutationKey: consoleQuery.triggers.oauthConfigure.mutationKey(),
mutationKey: [NAME_SPACE, 'configure-oauth'],
mutationFn: (payload: ConfigureTriggerOAuthPayload) => {
const { provider, ...body } = payload
return consoleClient.triggers.oauthConfigure({
params: { provider },
body,
})
return post<{ result: string }>(
`/workspaces/current/trigger-provider/${provider}/oauth/client`,
{ body },
)
},
})
}
export const useDeleteTriggerOAuth = () => {
return useMutation({
mutationKey: consoleQuery.triggers.oauthDelete.mutationKey(),
mutationKey: [NAME_SPACE, 'delete-oauth'],
mutationFn: (provider: string) => {
return consoleClient.triggers.oauthDelete({
params: { provider },
})
return del<{ result: string }>(
`/workspaces/current/trigger-provider/${provider}/oauth/client`,
)
},
})
}
export const useInitiateTriggerOAuth = () => {
return useMutation({
mutationKey: consoleQuery.triggers.oauthInitiate.mutationKey(),
mutationKey: [NAME_SPACE, 'initiate-oauth'],
mutationFn: (provider: string) => {
return get<{ authorization_url: string, subscription_builder: TriggerSubscriptionBuilder }>(
`/workspaces/current/trigger-provider/${provider}/subscriptions/oauth/authorize`,
@ -324,6 +336,7 @@ export const useTriggerPluginDynamicOptions = (payload: {
return useQuery<{ options: FormOption[] }>({
queryKey: [NAME_SPACE, 'dynamic-options', payload.plugin_id, payload.provider, payload.action, payload.parameter, payload.credential_id, payload.credentials, payload.extra],
queryFn: () => {
// Use new endpoint with POST when credentials provided (for edit mode)
if (payload.credentials) {
return post<{ options: FormOption[] }>(
'/workspaces/current/plugin/parameters/dynamic-options-with-credentials',
@ -340,6 +353,7 @@ export const useTriggerPluginDynamicOptions = (payload: {
{ silent: true },
)
}
// Use original GET endpoint for normal cases
return get<{ options: FormOption[] }>(
'/workspaces/current/plugin/parameters/dynamic-options',
{
@ -358,6 +372,7 @@ export const useTriggerPluginDynamicOptions = (payload: {
enabled: enabled && !!payload.plugin_id && !!payload.provider && !!payload.action && !!payload.parameter && !!payload.credential_id,
retry: 0,
staleTime: 0,
gcTime: 0,
})
}
@ -367,7 +382,7 @@ export const useInvalidateTriggerOAuthConfig = () => {
const queryClient = useQueryClient()
return (provider: string) => {
queryClient.invalidateQueries({
queryKey: consoleQuery.triggers.oauthConfig.queryKey({ input: { params: { provider } } }),
queryKey: [NAME_SPACE, 'oauth-config', provider],
})
}
}

2
web/types/i18n.d.ts vendored
View File

@ -27,3 +27,5 @@ export type I18nKeysWithPrefix<
> = Prefix extends ''
? keyof Resources[NS]
: Extract<keyof Resources[NS], `${Prefix}${string}`>
type A = I18nKeysWithPrefix<'billing'>