mirror of
https://github.com/langgenius/dify.git
synced 2026-02-05 03:05:32 +08:00
Compare commits
5 Commits
fix/fix-te
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 365f749ed5 | |||
| f686197589 | |||
| f584be9cf0 | |||
| 3bd228ddb7 | |||
| 0dfa59b1db |
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@ -239,7 +239,7 @@
|
||||
/web/app/components/base/ @iamjoel @zxhlyh
|
||||
|
||||
# Frontend - Base Components Tests
|
||||
/web/app/components/base/**/__tests__/ @hyoban @CodingOnStar
|
||||
/web/app/components/base/**/*.spec.tsx @hyoban @CodingOnStar
|
||||
|
||||
# Frontend - Utils and Hooks
|
||||
/web/utils/classnames.ts @iamjoel @zxhlyh
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dify-api"
|
||||
version = "1.12.0"
|
||||
version = "1.12.1"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
|
||||
@ -259,8 +259,8 @@ def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
|
||||
|
||||
|
||||
def _delete_app_workflow_archive_logs(tenant_id: str, app_id: str):
|
||||
def del_workflow_archive_log(workflow_archive_log_id: str):
|
||||
db.session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
|
||||
def del_workflow_archive_log(session, workflow_archive_log_id: str):
|
||||
session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
@ -420,7 +420,7 @@ def delete_draft_variables_batch(app_id: str, batch_size: int = 1000) -> int:
|
||||
total_files_deleted = 0
|
||||
|
||||
while True:
|
||||
with session_factory.create_session() as session:
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
# Get a batch of draft variable IDs along with their file_ids
|
||||
query_sql = """
|
||||
SELECT id, file_id FROM workflow_draft_variables
|
||||
|
||||
@ -10,7 +10,10 @@ from models import Tenant
|
||||
from models.enums import CreatorUserRole
|
||||
from models.model import App, UploadFile
|
||||
from models.workflow import WorkflowDraftVariable, WorkflowDraftVariableFile
|
||||
from tasks.remove_app_and_related_data_task import _delete_draft_variables, delete_draft_variables_batch
|
||||
from tasks.remove_app_and_related_data_task import (
|
||||
_delete_draft_variables,
|
||||
delete_draft_variables_batch,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -297,12 +300,18 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
def test_delete_draft_variables_with_offload_data(self, mock_storage, setup_offload_test_data):
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
variable_file_ids = [vf.id for vf in data["variable_files"]]
|
||||
mock_storage.delete.return_value = None
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_before = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_before = session.query(UploadFile).count()
|
||||
var_files_before = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_before == 3
|
||||
assert var_files_before == 2
|
||||
assert upload_files_before == 2
|
||||
@ -315,8 +324,12 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
assert draft_vars_after == 0
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
var_files_after = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_after = session.query(UploadFile).count()
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
@ -329,6 +342,8 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
def test_delete_draft_variables_storage_failure_continues_cleanup(self, mock_storage, setup_offload_test_data):
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
variable_file_ids = [vf.id for vf in data["variable_files"]]
|
||||
mock_storage.delete.side_effect = [Exception("Storage error"), None]
|
||||
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
|
||||
@ -339,8 +354,12 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
assert draft_vars_after == 0
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
var_files_after = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_after = session.query(UploadFile).count()
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
@ -395,3 +414,275 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
if app2_obj:
|
||||
session.delete(app2_obj)
|
||||
session.commit()
|
||||
|
||||
|
||||
class TestDeleteDraftVariablesSessionCommit:
|
||||
"""Test suite to verify session commit behavior in delete_draft_variables_batch."""
|
||||
|
||||
@pytest.fixture
|
||||
def setup_offload_test_data(self, app_and_tenant):
|
||||
"""Create test data with offload files for session commit tests."""
|
||||
from core.variables.types import SegmentType
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
|
||||
tenant, app = app_and_tenant
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
upload_file1 = UploadFile(
|
||||
tenant_id=tenant.id,
|
||||
storage_type="local",
|
||||
key="test/file1.json",
|
||||
name="file1.json",
|
||||
size=1024,
|
||||
extension="json",
|
||||
mime_type="application/json",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid.uuid4()),
|
||||
created_at=naive_utc_now(),
|
||||
used=False,
|
||||
)
|
||||
upload_file2 = UploadFile(
|
||||
tenant_id=tenant.id,
|
||||
storage_type="local",
|
||||
key="test/file2.json",
|
||||
name="file2.json",
|
||||
size=2048,
|
||||
extension="json",
|
||||
mime_type="application/json",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid.uuid4()),
|
||||
created_at=naive_utc_now(),
|
||||
used=False,
|
||||
)
|
||||
session.add(upload_file1)
|
||||
session.add(upload_file2)
|
||||
session.flush()
|
||||
|
||||
var_file1 = WorkflowDraftVariableFile(
|
||||
tenant_id=tenant.id,
|
||||
app_id=app.id,
|
||||
user_id=str(uuid.uuid4()),
|
||||
upload_file_id=upload_file1.id,
|
||||
size=1024,
|
||||
length=10,
|
||||
value_type=SegmentType.STRING,
|
||||
)
|
||||
var_file2 = WorkflowDraftVariableFile(
|
||||
tenant_id=tenant.id,
|
||||
app_id=app.id,
|
||||
user_id=str(uuid.uuid4()),
|
||||
upload_file_id=upload_file2.id,
|
||||
size=2048,
|
||||
length=20,
|
||||
value_type=SegmentType.OBJECT,
|
||||
)
|
||||
session.add(var_file1)
|
||||
session.add(var_file2)
|
||||
session.flush()
|
||||
|
||||
draft_var1 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_1",
|
||||
name="large_var_1",
|
||||
value=StringSegment(value="truncated..."),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
file_id=var_file1.id,
|
||||
)
|
||||
draft_var2 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_2",
|
||||
name="large_var_2",
|
||||
value=StringSegment(value="truncated..."),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
file_id=var_file2.id,
|
||||
)
|
||||
draft_var3 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_3",
|
||||
name="regular_var",
|
||||
value=StringSegment(value="regular_value"),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
session.add(draft_var1)
|
||||
session.add(draft_var2)
|
||||
session.add(draft_var3)
|
||||
session.commit()
|
||||
|
||||
data = {
|
||||
"app": app,
|
||||
"tenant": tenant,
|
||||
"upload_files": [upload_file1, upload_file2],
|
||||
"variable_files": [var_file1, var_file2],
|
||||
"draft_variables": [draft_var1, draft_var2, draft_var3],
|
||||
}
|
||||
|
||||
yield data
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
for table, ids in [
|
||||
(WorkflowDraftVariable, [v.id for v in data["draft_variables"]]),
|
||||
(WorkflowDraftVariableFile, [vf.id for vf in data["variable_files"]]),
|
||||
(UploadFile, [uf.id for uf in data["upload_files"]]),
|
||||
]:
|
||||
cleanup_query = delete(table).where(table.id.in_(ids)).execution_options(synchronize_session=False)
|
||||
session.execute(cleanup_query)
|
||||
session.commit()
|
||||
|
||||
@pytest.fixture
|
||||
def setup_commit_test_data(self, app_and_tenant):
|
||||
"""Create test data for session commit tests."""
|
||||
tenant, app = app_and_tenant
|
||||
variable_ids: list[str] = []
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
variables = []
|
||||
for i in range(10):
|
||||
var = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id=f"node_{i}",
|
||||
name=f"var_{i}",
|
||||
value=StringSegment(value="test_value"),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
session.add(var)
|
||||
variables.append(var)
|
||||
session.commit()
|
||||
variable_ids = [v.id for v in variables]
|
||||
|
||||
yield {
|
||||
"app": app,
|
||||
"tenant": tenant,
|
||||
"variable_ids": variable_ids,
|
||||
}
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
cleanup_query = (
|
||||
delete(WorkflowDraftVariable)
|
||||
.where(WorkflowDraftVariable.id.in_(variable_ids))
|
||||
.execution_options(synchronize_session=False)
|
||||
)
|
||||
session.execute(cleanup_query)
|
||||
session.commit()
|
||||
|
||||
def test_session_commit_is_called_after_each_batch(self, setup_commit_test_data):
|
||||
"""Test that session.begin() is used for automatic transaction management."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
# Since session.begin() is used, the transaction is automatically committed
|
||||
# when the with block exits successfully. We verify this by checking that
|
||||
# data is actually persisted.
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
|
||||
|
||||
# Verify all data was deleted (proves transaction was committed)
|
||||
with session_factory.create_session() as session:
|
||||
remaining_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
|
||||
assert deleted_count == 10
|
||||
assert remaining_count == 0
|
||||
|
||||
def test_data_persisted_after_batch_deletion(self, setup_commit_test_data):
|
||||
"""Test that data is actually persisted to database after batch deletion with commits."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
variable_ids = data["variable_ids"]
|
||||
|
||||
# Verify initial state
|
||||
with session_factory.create_session() as session:
|
||||
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert initial_count == 10
|
||||
|
||||
# Perform deletion with small batch size to force multiple commits
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
|
||||
|
||||
assert deleted_count == 10
|
||||
|
||||
# Verify all data is deleted in a new session (proves commits worked)
|
||||
with session_factory.create_session() as session:
|
||||
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert final_count == 0
|
||||
|
||||
# Verify specific IDs are deleted
|
||||
with session_factory.create_session() as session:
|
||||
remaining_vars = (
|
||||
session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.id.in_(variable_ids)).count()
|
||||
)
|
||||
assert remaining_vars == 0
|
||||
|
||||
def test_session_commit_with_empty_dataset(self, setup_commit_test_data):
|
||||
"""Test session behavior when deleting from an empty dataset."""
|
||||
nonexistent_app_id = str(uuid.uuid4())
|
||||
|
||||
# Should not raise any errors and should return 0
|
||||
deleted_count = delete_draft_variables_batch(nonexistent_app_id, batch_size=10)
|
||||
assert deleted_count == 0
|
||||
|
||||
def test_session_commit_with_single_batch(self, setup_commit_test_data):
|
||||
"""Test that commit happens correctly when all data fits in a single batch."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert initial_count == 10
|
||||
|
||||
# Delete all in a single batch
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=100)
|
||||
assert deleted_count == 10
|
||||
|
||||
# Verify data is persisted
|
||||
with session_factory.create_session() as session:
|
||||
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert final_count == 0
|
||||
|
||||
def test_invalid_batch_size_raises_error(self, setup_commit_test_data):
|
||||
"""Test that invalid batch size raises ValueError."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
with pytest.raises(ValueError, match="batch_size must be positive"):
|
||||
delete_draft_variables_batch(app_id, batch_size=0)
|
||||
|
||||
with pytest.raises(ValueError, match="batch_size must be positive"):
|
||||
delete_draft_variables_batch(app_id, batch_size=-1)
|
||||
|
||||
@patch("extensions.ext_storage.storage")
|
||||
def test_session_commit_with_offload_data_cleanup(self, mock_storage, setup_offload_test_data):
|
||||
"""Test that session commits correctly when cleaning up offload data."""
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
mock_storage.delete.return_value = None
|
||||
|
||||
# Verify initial state
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_before = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
|
||||
.count()
|
||||
)
|
||||
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_before == 3
|
||||
assert var_files_before == 2
|
||||
assert upload_files_before == 2
|
||||
|
||||
# Delete variables with offload data
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
|
||||
assert deleted_count == 3
|
||||
|
||||
# Verify all data is persisted (deleted) in new session
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_after = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_after == 0
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
# Verify storage cleanup was called
|
||||
assert mock_storage.delete.call_count == 2
|
||||
|
||||
@ -350,7 +350,7 @@ class TestDeleteWorkflowArchiveLogs:
|
||||
mock_query.where.return_value = mock_delete_query
|
||||
mock_db.session.query.return_value = mock_query
|
||||
|
||||
delete_func("log-1")
|
||||
delete_func(mock_db.session, "log-1")
|
||||
|
||||
mock_db.session.query.assert_called_once_with(WorkflowArchiveLog)
|
||||
mock_query.where.assert_called_once()
|
||||
|
||||
2
api/uv.lock
generated
2
api/uv.lock
generated
@ -1368,7 +1368,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "dify-api"
|
||||
version = "1.12.0"
|
||||
version = "1.12.1"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aliyun-log-python-sdk" },
|
||||
|
||||
@ -21,7 +21,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -63,7 +63,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -102,7 +102,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -132,7 +132,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.12.0
|
||||
image: langgenius/dify-web:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@ -707,7 +707,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -749,7 +749,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -788,7 +788,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -818,7 +818,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.12.0
|
||||
image: langgenius/dify-web:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@ -98,31 +98,46 @@ export const useNodesSyncDraft = () => {
|
||||
) => {
|
||||
if (getNodesReadOnly())
|
||||
return
|
||||
const postParams = getPostParams()
|
||||
|
||||
if (postParams) {
|
||||
const {
|
||||
setSyncWorkflowDraftHash,
|
||||
setDraftUpdatedAt,
|
||||
} = workflowStore.getState()
|
||||
try {
|
||||
const res = await syncWorkflowDraft(postParams)
|
||||
setSyncWorkflowDraftHash(res.hash)
|
||||
setDraftUpdatedAt(res.updated_at)
|
||||
callback?.onSuccess?.()
|
||||
// Get base params without hash
|
||||
const baseParams = getPostParams()
|
||||
if (!baseParams)
|
||||
return
|
||||
|
||||
const {
|
||||
setSyncWorkflowDraftHash,
|
||||
setDraftUpdatedAt,
|
||||
} = workflowStore.getState()
|
||||
|
||||
try {
|
||||
// IMPORTANT: Get the LATEST hash right before sending the request
|
||||
// This ensures that even if queued, each request uses the most recent hash
|
||||
const latestHash = workflowStore.getState().syncWorkflowDraftHash
|
||||
|
||||
const postParams = {
|
||||
...baseParams,
|
||||
params: {
|
||||
...baseParams.params,
|
||||
hash: latestHash || null, // null for first-time, otherwise use latest hash
|
||||
},
|
||||
}
|
||||
catch (error: any) {
|
||||
if (error && error.json && !error.bodyUsed) {
|
||||
error.json().then((err: any) => {
|
||||
if (err.code === 'draft_workflow_not_sync' && !notRefreshWhenSyncError)
|
||||
handleRefreshWorkflowDraft()
|
||||
})
|
||||
}
|
||||
callback?.onError?.()
|
||||
}
|
||||
finally {
|
||||
callback?.onSettled?.()
|
||||
|
||||
const res = await syncWorkflowDraft(postParams)
|
||||
setSyncWorkflowDraftHash(res.hash)
|
||||
setDraftUpdatedAt(res.updated_at)
|
||||
callback?.onSuccess?.()
|
||||
}
|
||||
catch (error: any) {
|
||||
if (error && error.json && !error.bodyUsed) {
|
||||
error.json().then((err: any) => {
|
||||
if (err.code === 'draft_workflow_not_sync' && !notRefreshWhenSyncError)
|
||||
handleRefreshWorkflowDraft()
|
||||
})
|
||||
}
|
||||
callback?.onError?.()
|
||||
}
|
||||
finally {
|
||||
callback?.onSettled?.()
|
||||
}
|
||||
}, [workflowStore, getPostParams, getNodesReadOnly, handleRefreshWorkflowDraft])
|
||||
|
||||
|
||||
119
web/contract/console/trigger.ts
Normal file
119
web/contract/console/trigger.ts
Normal file
@ -0,0 +1,119 @@
|
||||
import type {
|
||||
TriggerLogEntity,
|
||||
TriggerOAuthClientParams,
|
||||
TriggerOAuthConfig,
|
||||
TriggerProviderApiEntity,
|
||||
TriggerSubscription,
|
||||
TriggerSubscriptionBuilder,
|
||||
} from '@/app/components/workflow/block-selector/types'
|
||||
import { type } from '@orpc/contract'
|
||||
import { base } from '../base'
|
||||
|
||||
export const triggersContract = base
|
||||
.route({ path: '/workspaces/current/triggers', method: 'GET' })
|
||||
.input(type<{ query?: { type?: string } }>())
|
||||
.output(type<TriggerProviderApiEntity[]>())
|
||||
|
||||
export const triggerProviderInfoContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/info', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<TriggerProviderApiEntity>())
|
||||
|
||||
export const triggerSubscriptionsContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/list', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<TriggerSubscription[]>())
|
||||
|
||||
export const triggerSubscriptionBuilderCreateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/create', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string }
|
||||
body?: { credential_type?: string }
|
||||
}>())
|
||||
.output(type<{ subscription_builder: TriggerSubscriptionBuilder }>())
|
||||
|
||||
export const triggerSubscriptionBuilderUpdateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/update/{subscriptionBuilderId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionBuilderId: string }
|
||||
body?: {
|
||||
name?: string
|
||||
properties?: Record<string, unknown>
|
||||
parameters?: Record<string, unknown>
|
||||
credentials?: Record<string, unknown>
|
||||
}
|
||||
}>())
|
||||
.output(type<TriggerSubscriptionBuilder>())
|
||||
|
||||
export const triggerSubscriptionBuilderVerifyUpdateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/verify-and-update/{subscriptionBuilderId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionBuilderId: string }
|
||||
body?: { credentials?: Record<string, unknown> }
|
||||
}>())
|
||||
.output(type<{ verified: boolean }>())
|
||||
|
||||
export const triggerSubscriptionVerifyContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/verify/{subscriptionId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionId: string }
|
||||
body?: { credentials?: Record<string, unknown> }
|
||||
}>())
|
||||
.output(type<{ verified: boolean }>())
|
||||
|
||||
export const triggerSubscriptionBuildContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/build/{subscriptionBuilderId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionBuilderId: string }
|
||||
body?: {
|
||||
name?: string
|
||||
parameters?: Record<string, unknown>
|
||||
}
|
||||
}>())
|
||||
.output(type<unknown>())
|
||||
|
||||
export const triggerSubscriptionDeleteContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{subscriptionId}/subscriptions/delete', method: 'POST' })
|
||||
.input(type<{ params: { subscriptionId: string } }>())
|
||||
.output(type<{ result: string }>())
|
||||
|
||||
export const triggerSubscriptionUpdateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{subscriptionId}/subscriptions/update', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { subscriptionId: string }
|
||||
body?: {
|
||||
name?: string
|
||||
properties?: Record<string, unknown>
|
||||
parameters?: Record<string, unknown>
|
||||
credentials?: Record<string, unknown>
|
||||
}
|
||||
}>())
|
||||
.output(type<{ result: string, id: string }>())
|
||||
|
||||
export const triggerSubscriptionBuilderLogsContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/logs/{subscriptionBuilderId}', method: 'GET' })
|
||||
.input(type<{ params: { provider: string, subscriptionBuilderId: string } }>())
|
||||
.output(type<{ logs: TriggerLogEntity[] }>())
|
||||
|
||||
export const triggerOAuthConfigContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<TriggerOAuthConfig>())
|
||||
|
||||
export const triggerOAuthConfigureContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string }
|
||||
body: { client_params?: TriggerOAuthClientParams, enabled: boolean }
|
||||
}>())
|
||||
.output(type<{ result: string }>())
|
||||
|
||||
export const triggerOAuthDeleteContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'DELETE' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<{ result: string }>())
|
||||
|
||||
export const triggerOAuthInitiateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/oauth/authorize', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<{ authorization_url: string, subscription_builder: TriggerSubscriptionBuilder }>())
|
||||
@ -1,6 +1,23 @@
|
||||
import type { InferContractRouterInputs } from '@orpc/contract'
|
||||
import { bindPartnerStackContract, invoicesContract } from './console/billing'
|
||||
import { systemFeaturesContract } from './console/system'
|
||||
import {
|
||||
triggerOAuthConfigContract,
|
||||
triggerOAuthConfigureContract,
|
||||
triggerOAuthDeleteContract,
|
||||
triggerOAuthInitiateContract,
|
||||
triggerProviderInfoContract,
|
||||
triggersContract,
|
||||
triggerSubscriptionBuildContract,
|
||||
triggerSubscriptionBuilderCreateContract,
|
||||
triggerSubscriptionBuilderLogsContract,
|
||||
triggerSubscriptionBuilderUpdateContract,
|
||||
triggerSubscriptionBuilderVerifyUpdateContract,
|
||||
triggerSubscriptionDeleteContract,
|
||||
triggerSubscriptionsContract,
|
||||
triggerSubscriptionUpdateContract,
|
||||
triggerSubscriptionVerifyContract,
|
||||
} from './console/trigger'
|
||||
import { trialAppDatasetsContract, trialAppInfoContract, trialAppParametersContract, trialAppWorkflowsContract } from './console/try-app'
|
||||
import { collectionPluginsContract, collectionsContract, searchAdvancedContract } from './marketplace'
|
||||
|
||||
@ -24,6 +41,23 @@ export const consoleRouterContract = {
|
||||
invoices: invoicesContract,
|
||||
bindPartnerStack: bindPartnerStackContract,
|
||||
},
|
||||
triggers: {
|
||||
list: triggersContract,
|
||||
providerInfo: triggerProviderInfoContract,
|
||||
subscriptions: triggerSubscriptionsContract,
|
||||
subscriptionBuilderCreate: triggerSubscriptionBuilderCreateContract,
|
||||
subscriptionBuilderUpdate: triggerSubscriptionBuilderUpdateContract,
|
||||
subscriptionBuilderVerifyUpdate: triggerSubscriptionBuilderVerifyUpdateContract,
|
||||
subscriptionVerify: triggerSubscriptionVerifyContract,
|
||||
subscriptionBuild: triggerSubscriptionBuildContract,
|
||||
subscriptionDelete: triggerSubscriptionDeleteContract,
|
||||
subscriptionUpdate: triggerSubscriptionUpdateContract,
|
||||
subscriptionBuilderLogs: triggerSubscriptionBuilderLogsContract,
|
||||
oauthConfig: triggerOAuthConfigContract,
|
||||
oauthConfigure: triggerOAuthConfigureContract,
|
||||
oauthDelete: triggerOAuthDeleteContract,
|
||||
oauthInitiate: triggerOAuthInitiateContract,
|
||||
},
|
||||
}
|
||||
|
||||
export type ConsoleInputs = InferContractRouterInputs<typeof consoleRouterContract>
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dify-web",
|
||||
"type": "module",
|
||||
"version": "1.12.0",
|
||||
"version": "1.12.1",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.27.0+sha512.72d699da16b1179c14ba9e64dc71c9a40988cbdc65c264cb0e489db7de917f20dcf4d64d8723625f2969ba52d4b7e2a1170682d9ac2a5dcaeaab732b7e16f04a",
|
||||
"imports": {
|
||||
|
||||
@ -10,17 +10,14 @@ import type {
|
||||
} from '@/app/components/workflow/block-selector/types'
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { CollectionType } from '@/app/components/tools/types'
|
||||
import { del, get, post } from './base'
|
||||
import { consoleClient, consoleQuery } from '@/service/client'
|
||||
import { get, post } from './base'
|
||||
import { useInvalid } from './use-base'
|
||||
|
||||
const NAME_SPACE = 'triggers'
|
||||
|
||||
// Trigger Provider Service - Provider ID Format: plugin_id/provider_name
|
||||
|
||||
// Convert backend API response to frontend ToolWithProvider format
|
||||
const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): TriggerWithProvider => {
|
||||
return {
|
||||
// Collection fields
|
||||
id: provider.plugin_id || provider.name,
|
||||
name: provider.name,
|
||||
author: provider.author,
|
||||
@ -58,12 +55,9 @@ const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): Trigg
|
||||
labels: provider.tags || [],
|
||||
output_schema: event.output_schema || {},
|
||||
})),
|
||||
|
||||
// Trigger-specific schema fields
|
||||
subscription_constructor: provider.subscription_constructor,
|
||||
subscription_schema: provider.subscription_schema,
|
||||
supported_creation_methods: provider.supported_creation_methods,
|
||||
|
||||
meta: {
|
||||
version: '1.0',
|
||||
},
|
||||
@ -72,22 +66,20 @@ const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): Trigg
|
||||
|
||||
export const useAllTriggerPlugins = (enabled = true) => {
|
||||
return useQuery<TriggerWithProvider[]>({
|
||||
queryKey: [NAME_SPACE, 'all'],
|
||||
queryKey: consoleQuery.triggers.list.queryKey({ input: {} }),
|
||||
queryFn: async () => {
|
||||
const response = await get<TriggerProviderApiEntity[]>('/workspaces/current/triggers')
|
||||
const response = await consoleClient.triggers.list({})
|
||||
return response.map(convertToTriggerWithProvider)
|
||||
},
|
||||
enabled,
|
||||
staleTime: 0,
|
||||
gcTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
export const useTriggerPluginsByType = (triggerType: string, enabled = true) => {
|
||||
return useQuery<TriggerWithProvider[]>({
|
||||
queryKey: [NAME_SPACE, 'byType', triggerType],
|
||||
queryKey: consoleQuery.triggers.list.queryKey({ input: { query: { type: triggerType } } }),
|
||||
queryFn: async () => {
|
||||
const response = await get<TriggerProviderApiEntity[]>(`/workspaces/current/triggers?type=${triggerType}`)
|
||||
const response = await consoleClient.triggers.list({ query: { type: triggerType } })
|
||||
return response.map(convertToTriggerWithProvider)
|
||||
},
|
||||
enabled: enabled && !!triggerType,
|
||||
@ -95,25 +87,23 @@ export const useTriggerPluginsByType = (triggerType: string, enabled = true) =>
|
||||
}
|
||||
|
||||
export const useInvalidateAllTriggerPlugins = () => {
|
||||
return useInvalid([NAME_SPACE, 'all'])
|
||||
return useInvalid(consoleQuery.triggers.list.queryKey({ input: {} }))
|
||||
}
|
||||
|
||||
// ===== Trigger Subscriptions Management =====
|
||||
|
||||
export const useTriggerProviderInfo = (provider: string, enabled = true) => {
|
||||
return useQuery<TriggerProviderApiEntity>({
|
||||
queryKey: [NAME_SPACE, 'provider-info', provider],
|
||||
queryFn: () => get<TriggerProviderApiEntity>(`/workspaces/current/trigger-provider/${provider}/info`),
|
||||
queryKey: consoleQuery.triggers.providerInfo.queryKey({ input: { params: { provider } } }),
|
||||
queryFn: () => consoleClient.triggers.providerInfo({ params: { provider } }),
|
||||
enabled: enabled && !!provider,
|
||||
staleTime: 0,
|
||||
gcTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
export const useTriggerSubscriptions = (provider: string, enabled = true) => {
|
||||
return useQuery<TriggerSubscription[]>({
|
||||
queryKey: [NAME_SPACE, 'list-subscriptions', provider],
|
||||
queryFn: () => get<TriggerSubscription[]>(`/workspaces/current/trigger-provider/${provider}/subscriptions/list`),
|
||||
queryKey: consoleQuery.triggers.subscriptions.queryKey({ input: { params: { provider } } }),
|
||||
queryFn: () => consoleClient.triggers.subscriptions({ params: { provider } }),
|
||||
enabled: enabled && !!provider,
|
||||
})
|
||||
}
|
||||
@ -122,30 +112,30 @@ export const useInvalidateTriggerSubscriptions = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return (provider: string) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [NAME_SPACE, 'subscriptions', provider],
|
||||
queryKey: consoleQuery.triggers.subscriptions.queryKey({ input: { params: { provider } } }),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export const useCreateTriggerSubscriptionBuilder = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'create-subscription-builder'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuilderCreate.mutationKey(),
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
credential_type?: string
|
||||
}) => {
|
||||
const { provider, ...body } = payload
|
||||
return post<{ subscription_builder: TriggerSubscriptionBuilder }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/create`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.subscriptionBuilderCreate({
|
||||
params: { provider },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useUpdateTriggerSubscriptionBuilder = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'update-subscription-builder'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuilderUpdate.mutationKey(),
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
subscriptionBuilderId: string
|
||||
@ -155,17 +145,17 @@ export const useUpdateTriggerSubscriptionBuilder = () => {
|
||||
credentials?: Record<string, unknown>
|
||||
}) => {
|
||||
const { provider, subscriptionBuilderId, ...body } = payload
|
||||
return post<TriggerSubscriptionBuilder>(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/update/${subscriptionBuilderId}`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.subscriptionBuilderUpdate({
|
||||
params: { provider, subscriptionBuilderId },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useVerifyAndUpdateTriggerSubscriptionBuilder = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'verify-and-update-subscription-builder'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuilderVerifyUpdate.mutationKey(),
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
subscriptionBuilderId: string
|
||||
@ -183,7 +173,7 @@ export const useVerifyAndUpdateTriggerSubscriptionBuilder = () => {
|
||||
|
||||
export const useVerifyTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'verify-subscription'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionVerify.mutationKey(),
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
subscriptionId: string
|
||||
@ -208,24 +198,24 @@ export type BuildTriggerSubscriptionPayload = {
|
||||
|
||||
export const useBuildTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'build-subscription'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuild.mutationKey(),
|
||||
mutationFn: (payload: BuildTriggerSubscriptionPayload) => {
|
||||
const { provider, subscriptionBuilderId, ...body } = payload
|
||||
return post(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/build/${subscriptionBuilderId}`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.subscriptionBuild({
|
||||
params: { provider, subscriptionBuilderId },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDeleteTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'delete-subscription'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionDelete.mutationKey(),
|
||||
mutationFn: (subscriptionId: string) => {
|
||||
return post<{ result: string }>(
|
||||
`/workspaces/current/trigger-provider/${subscriptionId}/subscriptions/delete`,
|
||||
)
|
||||
return consoleClient.triggers.subscriptionDelete({
|
||||
params: { subscriptionId },
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
@ -240,13 +230,13 @@ export type UpdateTriggerSubscriptionPayload = {
|
||||
|
||||
export const useUpdateTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'update-subscription'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionUpdate.mutationKey(),
|
||||
mutationFn: (payload: UpdateTriggerSubscriptionPayload) => {
|
||||
const { subscriptionId, ...body } = payload
|
||||
return post<{ result: string, id: string }>(
|
||||
`/workspaces/current/trigger-provider/${subscriptionId}/subscriptions/update`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.subscriptionUpdate({
|
||||
params: { subscriptionId },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
@ -262,10 +252,8 @@ export const useTriggerSubscriptionBuilderLogs = (
|
||||
const { enabled = true, refetchInterval = false } = options
|
||||
|
||||
return useQuery<{ logs: TriggerLogEntity[] }>({
|
||||
queryKey: [NAME_SPACE, 'subscription-builder-logs', provider, subscriptionBuilderId],
|
||||
queryFn: () => get(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/logs/${subscriptionBuilderId}`,
|
||||
),
|
||||
queryKey: consoleQuery.triggers.subscriptionBuilderLogs.queryKey({ input: { params: { provider, subscriptionBuilderId } } }),
|
||||
queryFn: () => consoleClient.triggers.subscriptionBuilderLogs({ params: { provider, subscriptionBuilderId } }),
|
||||
enabled: enabled && !!provider && !!subscriptionBuilderId,
|
||||
refetchInterval,
|
||||
})
|
||||
@ -274,8 +262,8 @@ export const useTriggerSubscriptionBuilderLogs = (
|
||||
// ===== OAuth Management =====
|
||||
export const useTriggerOAuthConfig = (provider: string, enabled = true) => {
|
||||
return useQuery<TriggerOAuthConfig>({
|
||||
queryKey: [NAME_SPACE, 'oauth-config', provider],
|
||||
queryFn: () => get<TriggerOAuthConfig>(`/workspaces/current/trigger-provider/${provider}/oauth/client`),
|
||||
queryKey: consoleQuery.triggers.oauthConfig.queryKey({ input: { params: { provider } } }),
|
||||
queryFn: () => consoleClient.triggers.oauthConfig({ params: { provider } }),
|
||||
enabled: enabled && !!provider,
|
||||
})
|
||||
}
|
||||
@ -288,31 +276,31 @@ export type ConfigureTriggerOAuthPayload = {
|
||||
|
||||
export const useConfigureTriggerOAuth = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'configure-oauth'],
|
||||
mutationKey: consoleQuery.triggers.oauthConfigure.mutationKey(),
|
||||
mutationFn: (payload: ConfigureTriggerOAuthPayload) => {
|
||||
const { provider, ...body } = payload
|
||||
return post<{ result: string }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/oauth/client`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.oauthConfigure({
|
||||
params: { provider },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDeleteTriggerOAuth = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'delete-oauth'],
|
||||
mutationKey: consoleQuery.triggers.oauthDelete.mutationKey(),
|
||||
mutationFn: (provider: string) => {
|
||||
return del<{ result: string }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/oauth/client`,
|
||||
)
|
||||
return consoleClient.triggers.oauthDelete({
|
||||
params: { provider },
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useInitiateTriggerOAuth = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'initiate-oauth'],
|
||||
mutationKey: consoleQuery.triggers.oauthInitiate.mutationKey(),
|
||||
mutationFn: (provider: string) => {
|
||||
return get<{ authorization_url: string, subscription_builder: TriggerSubscriptionBuilder }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/oauth/authorize`,
|
||||
@ -336,7 +324,6 @@ export const useTriggerPluginDynamicOptions = (payload: {
|
||||
return useQuery<{ options: FormOption[] }>({
|
||||
queryKey: [NAME_SPACE, 'dynamic-options', payload.plugin_id, payload.provider, payload.action, payload.parameter, payload.credential_id, payload.credentials, payload.extra],
|
||||
queryFn: () => {
|
||||
// Use new endpoint with POST when credentials provided (for edit mode)
|
||||
if (payload.credentials) {
|
||||
return post<{ options: FormOption[] }>(
|
||||
'/workspaces/current/plugin/parameters/dynamic-options-with-credentials',
|
||||
@ -353,7 +340,6 @@ export const useTriggerPluginDynamicOptions = (payload: {
|
||||
{ silent: true },
|
||||
)
|
||||
}
|
||||
// Use original GET endpoint for normal cases
|
||||
return get<{ options: FormOption[] }>(
|
||||
'/workspaces/current/plugin/parameters/dynamic-options',
|
||||
{
|
||||
@ -372,7 +358,6 @@ export const useTriggerPluginDynamicOptions = (payload: {
|
||||
enabled: enabled && !!payload.plugin_id && !!payload.provider && !!payload.action && !!payload.parameter && !!payload.credential_id,
|
||||
retry: 0,
|
||||
staleTime: 0,
|
||||
gcTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
@ -382,7 +367,7 @@ export const useInvalidateTriggerOAuthConfig = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return (provider: string) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [NAME_SPACE, 'oauth-config', provider],
|
||||
queryKey: consoleQuery.triggers.oauthConfig.queryKey({ input: { params: { provider } } }),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
2
web/types/i18n.d.ts
vendored
2
web/types/i18n.d.ts
vendored
@ -27,5 +27,3 @@ export type I18nKeysWithPrefix<
|
||||
> = Prefix extends ''
|
||||
? keyof Resources[NS]
|
||||
: Extract<keyof Resources[NS], `${Prefix}${string}`>
|
||||
|
||||
type A = I18nKeysWithPrefix<'billing'>
|
||||
|
||||
Reference in New Issue
Block a user