Compare commits

..

5 Commits

27 changed files with 1704 additions and 356 deletions

View File

@ -1,27 +1,14 @@
from typing import Literal
from uuid import UUID
from flask import request
from flask_restx import Namespace, Resource, fields, marshal_with
from pydantic import BaseModel, Field
from werkzeug.exceptions import Forbidden
from controllers.common.schema import register_schema_models
from controllers.console import console_ns
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from controllers.fastopenapi import console_router
from libs.login import current_account_with_tenant, login_required
from services.tag_service import TagService
dataset_tag_fields = {
"id": fields.String,
"name": fields.String,
"type": fields.String,
"binding_count": fields.String,
}
def build_dataset_tag_fields(api_or_ns: Namespace):
return api_or_ns.model("DataSetTag", dataset_tag_fields)
class TagBasePayload(BaseModel):
name: str = Field(description="Tag name", min_length=1, max_length=50)
@ -45,115 +32,129 @@ class TagListQueryParam(BaseModel):
keyword: str | None = Field(None, description="Search keyword")
register_schema_models(
console_ns,
TagBasePayload,
TagBindingPayload,
TagBindingRemovePayload,
TagListQueryParam,
class TagResponse(BaseModel):
id: str = Field(description="Tag ID")
name: str = Field(description="Tag name")
type: str = Field(description="Tag type")
binding_count: int = Field(description="Number of bindings")
class TagBindingResult(BaseModel):
result: Literal["success"] = Field(description="Operation result", examples=["success"])
@console_router.get(
"/tags",
response_model=list[TagResponse],
tags=["console"],
)
@setup_required
@login_required
@account_initialization_required
def list_tags(query: TagListQueryParam) -> list[TagResponse]:
_, current_tenant_id = current_account_with_tenant()
tags = TagService.get_tags(query.type, current_tenant_id, query.keyword)
return [
TagResponse(
id=tag.id,
name=tag.name,
type=tag.type,
binding_count=int(tag.binding_count),
)
for tag in tags
]
@console_ns.route("/tags")
class TagListApi(Resource):
@setup_required
@login_required
@account_initialization_required
@console_ns.doc(
params={"type": 'Tag type filter. Can be "knowledge" or "app".', "keyword": "Search keyword for tag name."}
)
@marshal_with(dataset_tag_fields)
def get(self):
_, current_tenant_id = current_account_with_tenant()
raw_args = request.args.to_dict()
param = TagListQueryParam.model_validate(raw_args)
tags = TagService.get_tags(param.type, current_tenant_id, param.keyword)
@console_router.post(
"/tags",
response_model=TagResponse,
tags=["console"],
)
@setup_required
@login_required
@account_initialization_required
def create_tag(payload: TagBasePayload) -> TagResponse:
current_user, _ = current_account_with_tenant()
# The role of the current user in the tag table must be admin, owner, or editor
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
return tags, 200
tag = TagService.save_tags(payload.model_dump())
@console_ns.expect(console_ns.models[TagBasePayload.__name__])
@setup_required
@login_required
@account_initialization_required
def post(self):
current_user, _ = current_account_with_tenant()
# The role of the current user in the ta table must be admin, owner, or editor
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
payload = TagBasePayload.model_validate(console_ns.payload or {})
tag = TagService.save_tags(payload.model_dump())
response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": 0}
return response, 200
return TagResponse(id=tag.id, name=tag.name, type=tag.type, binding_count=0)
@console_ns.route("/tags/<uuid:tag_id>")
class TagUpdateDeleteApi(Resource):
@console_ns.expect(console_ns.models[TagBasePayload.__name__])
@setup_required
@login_required
@account_initialization_required
def patch(self, tag_id):
current_user, _ = current_account_with_tenant()
tag_id = str(tag_id)
# The role of the current user in the ta table must be admin, owner, or editor
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
@console_router.patch(
"/tags/<uuid:tag_id>",
response_model=TagResponse,
tags=["console"],
)
@setup_required
@login_required
@account_initialization_required
def update_tag(tag_id: UUID, payload: TagBasePayload) -> TagResponse:
current_user, _ = current_account_with_tenant()
tag_id_str = str(tag_id)
# The role of the current user in the ta table must be admin, owner, or editor
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
payload = TagBasePayload.model_validate(console_ns.payload or {})
tag = TagService.update_tags(payload.model_dump(), tag_id)
tag = TagService.update_tags(payload.model_dump(), tag_id_str)
binding_count = TagService.get_tag_binding_count(tag_id)
binding_count = TagService.get_tag_binding_count(tag_id_str)
response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": binding_count}
return response, 200
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def delete(self, tag_id):
tag_id = str(tag_id)
TagService.delete_tag(tag_id)
return 204
return TagResponse(id=tag.id, name=tag.name, type=tag.type, binding_count=binding_count)
@console_ns.route("/tag-bindings/create")
class TagBindingCreateApi(Resource):
@console_ns.expect(console_ns.models[TagBindingPayload.__name__])
@setup_required
@login_required
@account_initialization_required
def post(self):
current_user, _ = current_account_with_tenant()
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
@console_router.delete(
"/tags/<uuid:tag_id>",
tags=["console"],
status_code=204,
)
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def delete_tag(tag_id: UUID) -> None:
tag_id_str = str(tag_id)
payload = TagBindingPayload.model_validate(console_ns.payload or {})
TagService.save_tag_binding(payload.model_dump())
return {"result": "success"}, 200
TagService.delete_tag(tag_id_str)
@console_ns.route("/tag-bindings/remove")
class TagBindingDeleteApi(Resource):
@console_ns.expect(console_ns.models[TagBindingRemovePayload.__name__])
@setup_required
@login_required
@account_initialization_required
def post(self):
current_user, _ = current_account_with_tenant()
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
@console_router.post(
"/tag-bindings/create",
response_model=TagBindingResult,
tags=["console"],
)
@setup_required
@login_required
@account_initialization_required
def create_tag_binding(payload: TagBindingPayload) -> TagBindingResult:
current_user, _ = current_account_with_tenant()
# The role of the current user in the tag table must be admin, owner, editor, or dataset_operator
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
payload = TagBindingRemovePayload.model_validate(console_ns.payload or {})
TagService.delete_tag_binding(payload.model_dump())
TagService.save_tag_binding(payload.model_dump())
return {"result": "success"}, 200
return TagBindingResult(result="success")
@console_router.post(
"/tag-bindings/remove",
response_model=TagBindingResult,
tags=["console"],
)
@setup_required
@login_required
@account_initialization_required
def delete_tag_binding(payload: TagBindingRemovePayload) -> TagBindingResult:
current_user, _ = current_account_with_tenant()
# The role of the current user in the tag table must be admin, owner, editor, or dataset_operator
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
TagService.delete_tag_binding(payload.model_dump())
return TagBindingResult(result="success")

View File

@ -1,6 +1,6 @@
[project]
name = "dify-api"
version = "1.12.0"
version = "1.11.4"
requires-python = ">=3.11,<3.13"
dependencies = [

View File

@ -24,7 +24,7 @@ class TagService:
escaped_keyword = escape_like_pattern(keyword)
query = query.where(sa.and_(Tag.name.ilike(f"%{escaped_keyword}%", escape="\\")))
query = query.group_by(Tag.id, Tag.type, Tag.name, Tag.created_at)
results: list = query.order_by(Tag.created_at.desc()).all()
results = query.order_by(Tag.created_at.desc()).all()
return results
@staticmethod

View File

@ -0,0 +1,222 @@
import builtins
import contextlib
import importlib
import sys
from types import SimpleNamespace
from unittest.mock import MagicMock, patch
import pytest
from flask import Flask
from flask.views import MethodView
from extensions import ext_fastopenapi
from extensions.ext_database import db
@pytest.fixture
def app():
app = Flask(__name__)
app.config["TESTING"] = True
app.config["SECRET_KEY"] = "test-secret"
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///:memory:"
db.init_app(app)
return app
@pytest.fixture(autouse=True)
def fix_method_view_issue(monkeypatch):
if not hasattr(builtins, "MethodView"):
monkeypatch.setattr(builtins, "MethodView", MethodView, raising=False)
def _create_isolated_router():
import controllers.fastopenapi
router_class = type(controllers.fastopenapi.console_router)
return router_class()
@contextlib.contextmanager
def _patch_auth_and_router(temp_router):
def noop(func):
return func
default_user = MagicMock(has_edit_permission=True, is_dataset_editor=False)
with (
patch("controllers.fastopenapi.console_router", temp_router),
patch("extensions.ext_fastopenapi.console_router", temp_router),
patch("controllers.console.wraps.setup_required", side_effect=noop),
patch("libs.login.login_required", side_effect=noop),
patch("controllers.console.wraps.account_initialization_required", side_effect=noop),
patch("controllers.console.wraps.edit_permission_required", side_effect=noop),
patch("libs.login.current_account_with_tenant", return_value=(default_user, "tenant-id")),
patch("configs.dify_config.EDITION", "CLOUD"),
):
import extensions.ext_fastopenapi
importlib.reload(extensions.ext_fastopenapi)
yield
def _force_reload_module(target_module: str, alias_module: str):
if target_module in sys.modules:
del sys.modules[target_module]
if alias_module in sys.modules:
del sys.modules[alias_module]
module = importlib.import_module(target_module)
sys.modules[alias_module] = sys.modules[target_module]
return module
def _dedupe_routes(router):
seen = set()
unique_routes = []
for path, method, endpoint in reversed(router.get_routes()):
key = (path, method, endpoint.__name__)
if key in seen:
continue
seen.add(key)
unique_routes.append((path, method, endpoint))
router._routes = list(reversed(unique_routes))
def _cleanup_modules(target_module: str, alias_module: str):
if target_module in sys.modules:
del sys.modules[target_module]
if alias_module in sys.modules:
del sys.modules[alias_module]
@pytest.fixture
def mock_tags_module_env():
target_module = "controllers.console.tag.tags"
alias_module = "api.controllers.console.tag.tags"
temp_router = _create_isolated_router()
try:
with _patch_auth_and_router(temp_router):
tags_module = _force_reload_module(target_module, alias_module)
_dedupe_routes(temp_router)
yield tags_module
finally:
_cleanup_modules(target_module, alias_module)
def test_list_tags_success(app: Flask, mock_tags_module_env):
# Arrange
tag = SimpleNamespace(id="tag-1", name="Alpha", type="app", binding_count=2)
with patch("controllers.console.tag.tags.TagService.get_tags", return_value=[tag]):
ext_fastopenapi.init_app(app)
client = app.test_client()
# Act
response = client.get("/console/api/tags?type=app&keyword=Alpha")
# Assert
assert response.status_code == 200
assert response.get_json() == [
{"id": "tag-1", "name": "Alpha", "type": "app", "binding_count": 2},
]
def test_create_tag_success(app: Flask, mock_tags_module_env):
# Arrange
tag = SimpleNamespace(id="tag-2", name="Beta", type="app")
with patch("controllers.console.tag.tags.TagService.save_tags", return_value=tag) as mock_save:
ext_fastopenapi.init_app(app)
client = app.test_client()
# Act
response = client.post("/console/api/tags", json={"name": "Beta", "type": "app"})
# Assert
assert response.status_code == 200
assert response.get_json() == {
"id": "tag-2",
"name": "Beta",
"type": "app",
"binding_count": 0,
}
mock_save.assert_called_once_with({"name": "Beta", "type": "app"})
def test_update_tag_success(app: Flask, mock_tags_module_env):
# Arrange
tag = SimpleNamespace(id="tag-3", name="Gamma", type="app")
with (
patch("controllers.console.tag.tags.TagService.update_tags", return_value=tag) as mock_update,
patch("controllers.console.tag.tags.TagService.get_tag_binding_count", return_value=4),
):
ext_fastopenapi.init_app(app)
client = app.test_client()
# Act
response = client.patch(
"/console/api/tags/11111111-1111-1111-1111-111111111111",
json={"name": "Gamma", "type": "app"},
)
# Assert
assert response.status_code == 200
assert response.get_json() == {
"id": "tag-3",
"name": "Gamma",
"type": "app",
"binding_count": 4,
}
mock_update.assert_called_once_with(
{"name": "Gamma", "type": "app"},
"11111111-1111-1111-1111-111111111111",
)
def test_delete_tag_success(app: Flask, mock_tags_module_env):
# Arrange
with patch("controllers.console.tag.tags.TagService.delete_tag") as mock_delete:
ext_fastopenapi.init_app(app)
client = app.test_client()
# Act
response = client.delete("/console/api/tags/11111111-1111-1111-1111-111111111111")
# Assert
assert response.status_code == 204
mock_delete.assert_called_once_with("11111111-1111-1111-1111-111111111111")
def test_create_tag_binding_success(app: Flask, mock_tags_module_env):
# Arrange
payload = {"tag_ids": ["tag-1", "tag-2"], "target_id": "target-1", "type": "app"}
with patch("controllers.console.tag.tags.TagService.save_tag_binding") as mock_bind:
ext_fastopenapi.init_app(app)
client = app.test_client()
# Act
response = client.post("/console/api/tag-bindings/create", json=payload)
# Assert
assert response.status_code == 200
assert response.get_json() == {"result": "success"}
mock_bind.assert_called_once_with(payload)
def test_delete_tag_binding_success(app: Flask, mock_tags_module_env):
# Arrange
payload = {"tag_id": "tag-1", "target_id": "target-1", "type": "app"}
with patch("controllers.console.tag.tags.TagService.delete_tag_binding") as mock_unbind:
ext_fastopenapi.init_app(app)
client = app.test_client()
# Act
response = client.post("/console/api/tag-bindings/remove", json=payload)
# Assert
assert response.status_code == 200
assert response.get_json() == {"result": "success"}
mock_unbind.assert_called_once_with(payload)

2
api/uv.lock generated
View File

@ -1368,7 +1368,7 @@ wheels = [
[[package]]
name = "dify-api"
version = "1.12.0"
version = "1.11.4"
source = { virtual = "." }
dependencies = [
{ name = "aliyun-log-python-sdk" },

View File

@ -1,5 +1,5 @@
#!/bin/bash
set -euxo pipefail
set -x
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
cd "$SCRIPT_DIR/../.."

View File

@ -21,7 +21,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.12.0
image: langgenius/dify-api:1.11.4
restart: always
environment:
# Use the shared environment variables.
@ -63,7 +63,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.12.0
image: langgenius/dify-api:1.11.4
restart: always
environment:
# Use the shared environment variables.
@ -102,7 +102,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.12.0
image: langgenius/dify-api:1.11.4
restart: always
environment:
# Use the shared environment variables.
@ -132,7 +132,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.12.0
image: langgenius/dify-web:1.11.4
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@ -662,14 +662,13 @@ services:
- "${IRIS_SUPER_SERVER_PORT:-1972}:1972"
- "${IRIS_WEB_SERVER_PORT:-52773}:52773"
volumes:
- ./volumes/iris:/durable
- ./volumes/iris:/opt/iris
- ./iris/iris-init.script:/iris-init.script
- ./iris/docker-entrypoint.sh:/custom-entrypoint.sh
entrypoint: ["/custom-entrypoint.sh"]
tty: true
environment:
TZ: ${IRIS_TIMEZONE:-UTC}
ISC_DATA_DIRECTORY: /durable/iris
# Oracle vector database
oracle:

View File

@ -707,7 +707,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.12.0
image: langgenius/dify-api:1.11.4
restart: always
environment:
# Use the shared environment variables.
@ -749,7 +749,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.12.0
image: langgenius/dify-api:1.11.4
restart: always
environment:
# Use the shared environment variables.
@ -788,7 +788,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.12.0
image: langgenius/dify-api:1.11.4
restart: always
environment:
# Use the shared environment variables.
@ -818,7 +818,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.12.0
image: langgenius/dify-web:1.11.4
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@ -1348,14 +1348,13 @@ services:
- "${IRIS_SUPER_SERVER_PORT:-1972}:1972"
- "${IRIS_WEB_SERVER_PORT:-52773}:52773"
volumes:
- ./volumes/iris:/durable
- ./volumes/iris:/opt/iris
- ./iris/iris-init.script:/iris-init.script
- ./iris/docker-entrypoint.sh:/custom-entrypoint.sh
entrypoint: ["/custom-entrypoint.sh"]
tty: true
environment:
TZ: ${IRIS_TIMEZONE:-UTC}
ISC_DATA_DIRECTORY: /durable/iris
# Oracle vector database
oracle:

View File

@ -1,33 +1,15 @@
#!/bin/bash
set -e
# IRIS configuration flag file (stored in durable directory to persist with data)
IRIS_CONFIG_DONE="/durable/.iris-configured"
# Function to wait for IRIS to be ready
wait_for_iris() {
echo "Waiting for IRIS to be ready..."
local max_attempts=30
local attempt=1
while [ "$attempt" -le "$max_attempts" ]; do
if iris qlist IRIS 2>/dev/null | grep -q "running"; then
echo "IRIS is ready."
return 0
fi
echo "Attempt $attempt/$max_attempts: IRIS not ready yet, waiting..."
sleep 2
attempt=$((attempt + 1))
done
echo "ERROR: IRIS failed to start within expected time." >&2
return 1
}
# IRIS configuration flag file
IRIS_CONFIG_DONE="/opt/iris/.iris-configured"
# Function to configure IRIS
configure_iris() {
echo "Configuring IRIS for first-time setup..."
# Wait for IRIS to be fully started
wait_for_iris
sleep 5
# Execute the initialization script
iris session IRIS < /iris-init.script

View File

@ -3,7 +3,7 @@
import type { ReactNode } from 'react'
import Cookies from 'js-cookie'
import { usePathname, useRouter, useSearchParams } from 'next/navigation'
import { parseAsBoolean, useQueryState } from 'nuqs'
import { parseAsString, useQueryState } from 'nuqs'
import { useCallback, useEffect, useState } from 'react'
import {
EDUCATION_VERIFY_URL_SEARCHPARAMS_ACTION,
@ -28,7 +28,7 @@ export const AppInitializer = ({
const [init, setInit] = useState(false)
const [oauthNewUser, setOauthNewUser] = useQueryState(
'oauth_new_user',
parseAsBoolean.withOptions({ history: 'replace' }),
parseAsString.withOptions({ history: 'replace' }),
)
const isSetupFinished = useCallback(async () => {
@ -46,7 +46,7 @@ export const AppInitializer = ({
(async () => {
const action = searchParams.get('action')
if (oauthNewUser) {
if (oauthNewUser === 'true') {
let utmInfo = null
const utmInfoStr = Cookies.get('utm_info')
if (utmInfoStr) {

View File

@ -62,19 +62,19 @@ const AppCard = ({
{app.description}
</div>
</div>
{(canCreate || isTrialApp) && (
{canCreate && (
<div className={cn('absolute bottom-0 left-0 right-0 hidden bg-gradient-to-t from-components-panel-gradient-2 from-[60.27%] to-transparent p-4 pt-8 group-hover:flex')}>
<div className={cn('grid h-8 w-full grid-cols-1 items-center space-x-2', canCreate && 'grid-cols-2')}>
{canCreate && (
<Button variant="primary" onClick={() => onCreate()}>
<PlusIcon className="mr-1 h-4 w-4" />
<span className="text-xs">{t('newApp.useTemplate', { ns: 'app' })}</span>
<div className={cn('grid h-8 w-full grid-cols-1 items-center space-x-2', isTrialApp && 'grid-cols-2')}>
<Button variant="primary" onClick={() => onCreate()}>
<PlusIcon className="mr-1 h-4 w-4" />
<span className="text-xs">{t('newApp.useTemplate', { ns: 'app' })}</span>
</Button>
{isTrialApp && (
<Button onClick={showTryAPPPanel(app.app_id)}>
<RiInformation2Line className="mr-1 size-4" />
<span>{t('appCard.try', { ns: 'explore' })}</span>
</Button>
)}
<Button onClick={showTryAPPPanel(app.app_id)}>
<RiInformation2Line className="mr-1 size-4" />
<span>{t('appCard.try', { ns: 'explore' })}</span>
</Button>
</div>
</div>
)}

View File

@ -3,6 +3,8 @@ import type { FC, ReactNode } from 'react'
import type { SliceProps } from './type'
import { autoUpdate, flip, FloatingFocusManager, offset, shift, useDismiss, useFloating, useHover, useInteractions, useRole } from '@floating-ui/react'
import { RiDeleteBinLine } from '@remixicon/react'
// @ts-expect-error no types available
import lineClamp from 'line-clamp'
import { useState } from 'react'
import ActionButton, { ActionButtonState } from '@/app/components/base/action-button'
import { cn } from '@/utils/classnames'
@ -56,8 +58,12 @@ export const EditSlice: FC<EditSliceProps> = (props) => {
<>
<SliceContainer
{...rest}
className={cn('mr-0 line-clamp-4 block', className)}
ref={refs.setReference}
className={cn('mr-0 block', className)}
ref={(ref) => {
refs.setReference(ref)
if (ref)
lineClamp(ref, 4)
}}
{...getReferenceProps()}
>
<SliceLabel

View File

@ -74,15 +74,11 @@ const AppCard = ({
</div>
{isExplore && (canCreate || isTrialApp) && (
<div className={cn('absolute bottom-0 left-0 right-0 hidden bg-gradient-to-t from-components-panel-gradient-2 from-[60.27%] to-transparent p-4 pt-8 group-hover:flex')}>
<div className={cn('grid h-8 w-full grid-cols-1 space-x-2', canCreate && 'grid-cols-2')}>
{
canCreate && (
<Button variant="primary" className="h-7" onClick={() => onCreate()}>
<PlusIcon className="mr-1 h-4 w-4" />
<span className="text-xs">{t('appCard.addToWorkspace', { ns: 'explore' })}</span>
</Button>
)
}
<div className={cn('grid h-8 w-full grid-cols-2 space-x-2')}>
<Button variant="primary" className="h-7" onClick={() => onCreate()}>
<PlusIcon className="mr-1 h-4 w-4" />
<span className="text-xs">{t('appCard.addToWorkspace', { ns: 'explore' })}</span>
</Button>
<Button className="h-7" onClick={showTryAPPPanel(app.app_id)}>
<RiInformation2Line className="mr-1 size-4" />
<span>{t('appCard.try', { ns: 'explore' })}</span>

View File

@ -16,14 +16,6 @@ vi.mock('react-i18next', () => ({
}),
}))
vi.mock('@/config', async (importOriginal) => {
const actual = await importOriginal() as object
return {
...actual,
IS_CLOUD_EDITION: true,
}
})
const mockUseGetTryAppInfo = vi.fn()
vi.mock('@/service/use-try-app', () => ({

View File

@ -14,14 +14,6 @@ vi.mock('react-i18next', () => ({
}),
}))
vi.mock('@/config', async (importOriginal) => {
const actual = await importOriginal() as object
return {
...actual,
IS_CLOUD_EDITION: true,
}
})
describe('Tab', () => {
afterEach(() => {
cleanup()

View File

@ -1,5 +1,5 @@
import type { TriggerSubscriptionBuilder } from '@/app/components/workflow/block-selector/types'
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import * as React from 'react'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
// Import after mocks
@ -821,9 +821,6 @@ describe('CommonCreateModal', () => {
expect(mockCreateBuilder).toHaveBeenCalled()
})
// Flush pending state updates from createBuilder promise resolution
await act(async () => {})
const input = screen.getByTestId('form-field-webhook_url')
fireEvent.change(input, { target: { value: 'https://example.com/webhook' } })

View File

@ -1,5 +1,5 @@
import type { PropsWithChildren } from 'react'
import { act, cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
import { cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { DSLImportStatus } from '@/models/app'
import UpdateDSLModal from './update-dsl-modal'
@ -140,13 +140,13 @@ class MockFileReader {
onload: ((e: { target: { result: string | null } }) => void) | null = null
readAsText(_file: File) {
// Simulate async file reading using queueMicrotask for more reliable async behavior
queueMicrotask(() => {
// Simulate async file reading
setTimeout(() => {
this.result = 'test file content'
if (this.onload) {
this.onload({ target: { result: this.result } })
}
})
}, 0)
}
}
@ -174,7 +174,6 @@ describe('UpdateDSLModal', () => {
status: DSLImportStatus.COMPLETED,
pipeline_id: 'test-pipeline-id',
})
mockHandleCheckPluginDependencies.mockResolvedValue(undefined)
// Mock FileReader
originalFileReader = globalThis.FileReader
@ -473,14 +472,14 @@ describe('UpdateDSLModal', () => {
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
expect(importButton).not.toBeDisabled()
}, { timeout: 1000 })
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await waitFor(() => {
expect(mockOnImport).toHaveBeenCalled()
}, { timeout: 1000 })
})
})
it('should show warning notification on import with warnings', async () => {
@ -648,8 +647,6 @@ describe('UpdateDSLModal', () => {
})
it('should show error modal when import status is PENDING', async () => {
vi.useFakeTimers({ shouldAdvanceTime: true })
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.PENDING,
@ -662,29 +659,20 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
// Flush microtasks scheduled by the FileReader mock (which uses queueMicrotask)
await new Promise<void>(resolve => queueMicrotask(resolve))
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
expect(importButton).not.toBeDisabled()
})
const importButton = screen.getByText('common.overwriteAndImport')
expect(importButton).not.toBeDisabled()
await act(async () => {
fireEvent.click(importButton)
// Flush the promise resolution from mockImportDSL
await Promise.resolve()
// Advance past the 300ms setTimeout in the component
await vi.advanceTimersByTimeAsync(350)
})
fireEvent.click(importButton)
// Wait for the error modal to be shown after setTimeout
await waitFor(() => {
expect(screen.getByText('newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
})
vi.useRealTimers()
}, { timeout: 500 })
})
it('should show version info in error modal', async () => {

View File

@ -61,12 +61,6 @@ vi.mock('@/service/use-pipeline', () => ({
}),
}))
// Mock download utility
const mockDownloadBlob = vi.fn()
vi.mock('@/utils/download', () => ({
downloadBlob: (...args: unknown[]) => mockDownloadBlob(...args),
}))
// Mock workflow service
const mockFetchWorkflowDraft = vi.fn()
vi.mock('@/service/workflow', () => ({
@ -83,9 +77,33 @@ vi.mock('@/app/components/workflow/constants', () => ({
// ============================================================================
describe('useDSL', () => {
let mockLink: { href: string, download: string, click: ReturnType<typeof vi.fn> }
let originalCreateElement: typeof document.createElement
let mockCreateObjectURL: ReturnType<typeof vi.spyOn>
let mockRevokeObjectURL: ReturnType<typeof vi.spyOn>
beforeEach(() => {
vi.clearAllMocks()
// Create a proper mock link element
mockLink = {
href: '',
download: '',
click: vi.fn(),
}
// Save original and mock selectively - only intercept 'a' elements
originalCreateElement = document.createElement.bind(document)
document.createElement = vi.fn((tagName: string) => {
if (tagName === 'a') {
return mockLink as unknown as HTMLElement
}
return originalCreateElement(tagName)
}) as typeof document.createElement
mockCreateObjectURL = vi.spyOn(URL, 'createObjectURL').mockReturnValue('blob:test-url')
mockRevokeObjectURL = vi.spyOn(URL, 'revokeObjectURL').mockImplementation(() => {})
// Default store state
mockWorkflowStoreGetState.mockReturnValue({
pipelineId: 'test-pipeline-id',
@ -100,6 +118,9 @@ describe('useDSL', () => {
})
afterEach(() => {
document.createElement = originalCreateElement
mockCreateObjectURL.mockRestore()
mockRevokeObjectURL.mockRestore()
vi.clearAllMocks()
})
@ -166,7 +187,9 @@ describe('useDSL', () => {
await result.current.handleExportDSL()
})
expect(mockDownloadBlob).toHaveBeenCalled()
expect(document.createElement).toHaveBeenCalledWith('a')
expect(mockCreateObjectURL).toHaveBeenCalled()
expect(mockRevokeObjectURL).toHaveBeenCalledWith('blob:test-url')
})
it('should use correct file extension for download', async () => {
@ -176,25 +199,17 @@ describe('useDSL', () => {
await result.current.handleExportDSL()
})
expect(mockDownloadBlob).toHaveBeenCalledWith(
expect.objectContaining({
fileName: 'Test Knowledge Base.pipeline',
}),
)
expect(mockLink.download).toBe('Test Knowledge Base.pipeline')
})
it('should pass blob data to downloadBlob', async () => {
it('should trigger download click', async () => {
const { result } = renderHook(() => useDSL())
await act(async () => {
await result.current.handleExportDSL()
})
expect(mockDownloadBlob).toHaveBeenCalledWith(
expect.objectContaining({
data: expect.any(Blob),
}),
)
expect(mockLink.click).toHaveBeenCalled()
})
it('should show error notification on export failure', async () => {

View File

@ -172,9 +172,6 @@ describe('EditCustomCollectionModal', () => {
expect(parseParamsSchemaMock).toHaveBeenCalledWith('{}')
})
// Flush pending state updates from parseParamsSchema promise resolution
await act(async () => {})
await act(async () => {
fireEvent.click(screen.getByText('common.operation.save'))
})
@ -187,10 +184,6 @@ describe('EditCustomCollectionModal', () => {
credentials: {
auth_type: 'none',
},
icon: {
content: '🕵️',
background: '#FEF7C3',
},
labels: [],
}))
expect(toastNotifySpy).not.toHaveBeenCalled()

View File

@ -0,0 +1,705 @@
/**
* Test Suite for useNodesSyncDraft Hook
*
* PURPOSE:
* This hook handles syncing workflow draft to the server. The key fix being tested
* is the error handling behavior when `draft_workflow_not_sync` error occurs.
*
* MULTI-TAB PROBLEM SCENARIO:
* 1. User opens the same workflow in Tab A and Tab B (both have hash: v1)
* 2. Tab A saves successfully, server returns new hash: v2
* 3. Tab B tries to save with old hash: v1, server returns 400 error with code
* 'draft_workflow_not_sync'
* 4. BEFORE FIX: handleRefreshWorkflowDraft() was called without args, which fetched
* draft AND overwrote canvas - user lost unsaved changes in Tab B
* 5. AFTER FIX: handleRefreshWorkflowDraft(true) is called, which fetches draft but
* only updates hash (notUpdateCanvas=true), preserving user's canvas changes
*
* TESTING STRATEGY:
* We don't simulate actual tab switching UI behavior. Instead, we mock the API to
* return `draft_workflow_not_sync` error and verify:
* - The hook calls handleRefreshWorkflowDraft(true) - not handleRefreshWorkflowDraft()
* - This ensures canvas data is preserved while hash is updated for retry
*
* This is behavior-driven testing - we verify "what the code does when receiving
* specific API errors" rather than simulating complete user interaction flows.
* True multi-tab integration testing would require E2E frameworks like Playwright.
*/
import { act, renderHook, waitFor } from '@testing-library/react'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { useNodesSyncDraft } from './use-nodes-sync-draft'
// Mock reactflow store
const mockGetNodes = vi.fn()
type MockEdge = {
id: string
source: string
target: string
data: Record<string, unknown>
}
const mockStoreState: {
getNodes: ReturnType<typeof vi.fn>
edges: MockEdge[]
transform: number[]
} = {
getNodes: mockGetNodes,
edges: [],
transform: [0, 0, 1],
}
vi.mock('reactflow', () => ({
useStoreApi: () => ({
getState: () => mockStoreState,
}),
}))
// Mock features store
const mockFeaturesState = {
features: {
opening: { enabled: false, opening_statement: '', suggested_questions: [] },
suggested: {},
text2speech: {},
speech2text: {},
citation: {},
moderation: {},
file: {},
},
}
vi.mock('@/app/components/base/features/hooks', () => ({
useFeaturesStore: () => ({
getState: () => mockFeaturesState,
}),
}))
// Mock workflow service
const mockSyncWorkflowDraft = vi.fn()
vi.mock('@/service/workflow', () => ({
syncWorkflowDraft: (...args: unknown[]) => mockSyncWorkflowDraft(...args),
}))
// Mock useNodesReadOnly
const mockGetNodesReadOnly = vi.fn()
vi.mock('@/app/components/workflow/hooks/use-workflow', () => ({
useNodesReadOnly: () => ({
getNodesReadOnly: mockGetNodesReadOnly,
}),
}))
// Mock useSerialAsyncCallback - pass through the callback
vi.mock('@/app/components/workflow/hooks/use-serial-async-callback', () => ({
useSerialAsyncCallback: (callback: (...args: unknown[]) => unknown) => callback,
}))
// Mock workflow store
const mockSetSyncWorkflowDraftHash = vi.fn()
const mockSetDraftUpdatedAt = vi.fn()
const createMockWorkflowStoreState = (overrides = {}) => ({
appId: 'test-app-id',
conversationVariables: [],
environmentVariables: [],
syncWorkflowDraftHash: 'current-hash-123',
isWorkflowDataLoaded: true,
setSyncWorkflowDraftHash: mockSetSyncWorkflowDraftHash,
setDraftUpdatedAt: mockSetDraftUpdatedAt,
...overrides,
})
const mockWorkflowStoreGetState = vi.fn()
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => ({
getState: mockWorkflowStoreGetState,
}),
}))
// Mock useWorkflowRefreshDraft (THE KEY DEPENDENCY FOR THIS TEST)
const mockHandleRefreshWorkflowDraft = vi.fn()
vi.mock('.', () => ({
useWorkflowRefreshDraft: () => ({
handleRefreshWorkflowDraft: mockHandleRefreshWorkflowDraft,
}),
}))
// Mock API_PREFIX
vi.mock('@/config', () => ({
API_PREFIX: '/api',
}))
// Create a mock error response that mimics the actual API error
const createMockErrorResponse = (code: string) => {
const errorBody = { code, message: 'Draft not in sync' }
let bodyUsed = false
return {
json: vi.fn().mockImplementation(() => {
bodyUsed = true
return Promise.resolve(errorBody)
}),
get bodyUsed() {
return bodyUsed
},
}
}
describe('useNodesSyncDraft', () => {
beforeEach(() => {
vi.clearAllMocks()
mockGetNodesReadOnly.mockReturnValue(false)
mockGetNodes.mockReturnValue([
{ id: 'node-1', type: 'start', data: { type: 'start' } },
{ id: 'node-2', type: 'llm', data: { type: 'llm' } },
])
mockStoreState.edges = [
{ id: 'edge-1', source: 'node-1', target: 'node-2', data: {} },
]
mockWorkflowStoreGetState.mockReturnValue(createMockWorkflowStoreState())
mockSyncWorkflowDraft.mockResolvedValue({
hash: 'new-hash-456',
updated_at: Date.now(),
})
})
afterEach(() => {
vi.resetAllMocks()
})
describe('doSyncWorkflowDraft function', () => {
it('should return doSyncWorkflowDraft function', () => {
const { result } = renderHook(() => useNodesSyncDraft())
expect(result.current.doSyncWorkflowDraft).toBeDefined()
expect(typeof result.current.doSyncWorkflowDraft).toBe('function')
})
it('should return syncWorkflowDraftWhenPageClose function', () => {
const { result } = renderHook(() => useNodesSyncDraft())
expect(result.current.syncWorkflowDraftWhenPageClose).toBeDefined()
expect(typeof result.current.syncWorkflowDraftWhenPageClose).toBe('function')
})
})
describe('successful sync', () => {
it('should call syncWorkflowDraft service on successful sync', async () => {
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSyncWorkflowDraft).toHaveBeenCalledWith({
url: '/apps/test-app-id/workflows/draft',
params: expect.objectContaining({
hash: 'current-hash-123',
graph: expect.objectContaining({
nodes: expect.any(Array),
edges: expect.any(Array),
viewport: expect.any(Object),
}),
}),
})
})
it('should update syncWorkflowDraftHash on success', async () => {
mockSyncWorkflowDraft.mockResolvedValue({
hash: 'new-hash-789',
updated_at: 1234567890,
})
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('new-hash-789')
})
it('should update draftUpdatedAt on success', async () => {
const updatedAt = 1234567890
mockSyncWorkflowDraft.mockResolvedValue({
hash: 'new-hash',
updated_at: updatedAt,
})
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSetDraftUpdatedAt).toHaveBeenCalledWith(updatedAt)
})
it('should call onSuccess callback on success', async () => {
const onSuccess = vi.fn()
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onSuccess })
})
expect(onSuccess).toHaveBeenCalled()
})
it('should call onSettled callback after success', async () => {
const onSettled = vi.fn()
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onSettled })
})
expect(onSettled).toHaveBeenCalled()
})
})
describe('sync error handling - draft_workflow_not_sync (THE KEY FIX)', () => {
/**
* This is THE KEY TEST for the bug fix.
*
* SCENARIO: Multi-tab editing
* 1. User opens workflow in Tab A and Tab B
* 2. Tab A saves draft successfully, gets new hash
* 3. Tab B tries to save with old hash
* 4. Server returns 400 with code 'draft_workflow_not_sync'
*
* BEFORE FIX:
* - handleRefreshWorkflowDraft() was called without arguments
* - This would fetch draft AND overwrite the canvas
* - User loses their unsaved changes in Tab B
*
* AFTER FIX:
* - handleRefreshWorkflowDraft(true) is called
* - This fetches draft but DOES NOT overwrite canvas
* - Only hash is updated for the next sync attempt
* - User's unsaved changes are preserved
*/
it('should call handleRefreshWorkflowDraft with notUpdateCanvas=true when draft_workflow_not_sync error occurs', async () => {
const mockError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
// THE KEY ASSERTION: handleRefreshWorkflowDraft must be called with true
await waitFor(() => {
expect(mockHandleRefreshWorkflowDraft).toHaveBeenCalledWith(true)
})
})
it('should NOT call handleRefreshWorkflowDraft when notRefreshWhenSyncError is true', async () => {
const mockError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
// First parameter is notRefreshWhenSyncError
await result.current.doSyncWorkflowDraft(true)
})
// Wait a bit for async operations
await new Promise(resolve => setTimeout(resolve, 100))
expect(mockHandleRefreshWorkflowDraft).not.toHaveBeenCalled()
})
it('should call onError callback when draft_workflow_not_sync error occurs', async () => {
const mockError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const onError = vi.fn()
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onError })
})
expect(onError).toHaveBeenCalled()
})
it('should call onSettled callback after error', async () => {
const mockError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const onSettled = vi.fn()
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onSettled })
})
expect(onSettled).toHaveBeenCalled()
})
})
describe('other error handling', () => {
it('should NOT call handleRefreshWorkflowDraft for non-draft_workflow_not_sync errors', async () => {
const mockError = createMockErrorResponse('some_other_error')
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
// Wait a bit for async operations
await new Promise(resolve => setTimeout(resolve, 100))
expect(mockHandleRefreshWorkflowDraft).not.toHaveBeenCalled()
})
it('should handle error without json method', async () => {
const mockError = new Error('Network error')
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const { result } = renderHook(() => useNodesSyncDraft())
const onError = vi.fn()
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onError })
})
expect(onError).toHaveBeenCalled()
expect(mockHandleRefreshWorkflowDraft).not.toHaveBeenCalled()
})
it('should handle error with bodyUsed already true', async () => {
const mockError = {
json: vi.fn(),
bodyUsed: true,
}
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
// Should not call json() when bodyUsed is true
expect(mockError.json).not.toHaveBeenCalled()
expect(mockHandleRefreshWorkflowDraft).not.toHaveBeenCalled()
})
})
describe('read-only mode', () => {
it('should not sync when nodes are read-only', async () => {
mockGetNodesReadOnly.mockReturnValue(true)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSyncWorkflowDraft).not.toHaveBeenCalled()
})
it('should not sync on page close when nodes are read-only', () => {
mockGetNodesReadOnly.mockReturnValue(true)
// Mock sendBeacon
const mockSendBeacon = vi.fn()
Object.defineProperty(navigator, 'sendBeacon', {
value: mockSendBeacon,
writable: true,
})
const { result } = renderHook(() => useNodesSyncDraft())
act(() => {
result.current.syncWorkflowDraftWhenPageClose()
})
expect(mockSendBeacon).not.toHaveBeenCalled()
})
})
describe('workflow data not loaded', () => {
it('should not sync when workflow data is not loaded', async () => {
mockWorkflowStoreGetState.mockReturnValue(
createMockWorkflowStoreState({ isWorkflowDataLoaded: false }),
)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSyncWorkflowDraft).not.toHaveBeenCalled()
})
})
describe('no appId', () => {
it('should not sync when appId is not set', async () => {
mockWorkflowStoreGetState.mockReturnValue(
createMockWorkflowStoreState({ appId: null }),
)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSyncWorkflowDraft).not.toHaveBeenCalled()
})
})
describe('node filtering', () => {
it('should filter out temp nodes', async () => {
mockGetNodes.mockReturnValue([
{ id: 'node-1', type: 'start', data: { type: 'start' } },
{ id: 'node-temp', type: 'custom', data: { type: 'custom', _isTempNode: true } },
{ id: 'node-2', type: 'llm', data: { type: 'llm' } },
])
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSyncWorkflowDraft).toHaveBeenCalledWith(
expect.objectContaining({
params: expect.objectContaining({
graph: expect.objectContaining({
nodes: expect.not.arrayContaining([
expect.objectContaining({ id: 'node-temp' }),
]),
}),
}),
}),
)
})
it('should remove internal underscore properties from nodes', async () => {
mockGetNodes.mockReturnValue([
{
id: 'node-1',
type: 'start',
data: {
type: 'start',
_internalProp: 'should be removed',
_anotherInternal: true,
publicProp: 'should remain',
},
},
])
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
const callArgs = mockSyncWorkflowDraft.mock.calls[0][0]
const sentNode = callArgs.params.graph.nodes[0]
expect(sentNode.data).not.toHaveProperty('_internalProp')
expect(sentNode.data).not.toHaveProperty('_anotherInternal')
expect(sentNode.data).toHaveProperty('publicProp', 'should remain')
})
})
describe('edge filtering', () => {
it('should filter out temp edges', async () => {
mockStoreState.edges = [
{ id: 'edge-1', source: 'node-1', target: 'node-2', data: {} },
{ id: 'edge-temp', source: 'node-1', target: 'node-3', data: { _isTemp: true } },
]
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
const callArgs = mockSyncWorkflowDraft.mock.calls[0][0]
const sentEdges = callArgs.params.graph.edges
expect(sentEdges).toHaveLength(1)
expect(sentEdges[0].id).toBe('edge-1')
})
it('should remove internal underscore properties from edges', async () => {
mockStoreState.edges = [
{
id: 'edge-1',
source: 'node-1',
target: 'node-2',
data: {
_internalEdgeProp: 'should be removed',
publicEdgeProp: 'should remain',
},
},
]
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
const callArgs = mockSyncWorkflowDraft.mock.calls[0][0]
const sentEdge = callArgs.params.graph.edges[0]
expect(sentEdge.data).not.toHaveProperty('_internalEdgeProp')
expect(sentEdge.data).toHaveProperty('publicEdgeProp', 'should remain')
})
})
describe('viewport handling', () => {
it('should send current viewport from transform', async () => {
mockStoreState.transform = [100, 200, 1.5]
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSyncWorkflowDraft).toHaveBeenCalledWith(
expect.objectContaining({
params: expect.objectContaining({
graph: expect.objectContaining({
viewport: { x: 100, y: 200, zoom: 1.5 },
}),
}),
}),
)
})
})
describe('multi-tab concurrent editing scenario (END-TO-END TEST)', () => {
/**
* Simulates the complete multi-tab scenario to verify the fix works correctly.
*
* Scenario:
* 1. Tab A and Tab B both have the workflow open with hash 'hash-v1'
* 2. Tab A saves successfully, server returns 'hash-v2'
* 3. Tab B tries to save with 'hash-v1', gets 'draft_workflow_not_sync' error
* 4. Tab B should only update hash to 'hash-v2', not overwrite canvas
* 5. Tab B can now retry save with correct hash
*/
it('should preserve canvas data during hash conflict resolution', async () => {
// Initial state: both tabs have hash-v1
mockWorkflowStoreGetState.mockReturnValue(
createMockWorkflowStoreState({ syncWorkflowDraftHash: 'hash-v1' }),
)
// Tab B tries to save with old hash, server returns error
const syncError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(syncError)
const { result } = renderHook(() => useNodesSyncDraft())
// Tab B attempts to sync
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
// Verify the sync was attempted with old hash
expect(mockSyncWorkflowDraft).toHaveBeenCalledWith(
expect.objectContaining({
params: expect.objectContaining({
hash: 'hash-v1',
}),
}),
)
// Verify handleRefreshWorkflowDraft was called with true (not overwrite canvas)
await waitFor(() => {
expect(mockHandleRefreshWorkflowDraft).toHaveBeenCalledWith(true)
})
// The key assertion: only one argument (true) was passed
expect(mockHandleRefreshWorkflowDraft).toHaveBeenCalledTimes(1)
expect(mockHandleRefreshWorkflowDraft.mock.calls[0]).toEqual([true])
})
it('should handle multiple consecutive sync failures gracefully', async () => {
// Create fresh error for each call to avoid bodyUsed issue
mockSyncWorkflowDraft
.mockRejectedValueOnce(createMockErrorResponse('draft_workflow_not_sync'))
.mockRejectedValueOnce(createMockErrorResponse('draft_workflow_not_sync'))
const { result } = renderHook(() => useNodesSyncDraft())
// First sync attempt
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
// Wait for first refresh call
await waitFor(() => {
expect(mockHandleRefreshWorkflowDraft).toHaveBeenCalledTimes(1)
})
// Second sync attempt
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
// Both should call handleRefreshWorkflowDraft with true
await waitFor(() => {
expect(mockHandleRefreshWorkflowDraft).toHaveBeenCalledTimes(2)
})
mockHandleRefreshWorkflowDraft.mock.calls.forEach((call) => {
expect(call).toEqual([true])
})
})
})
describe('callbacks behavior', () => {
it('should not call onSuccess when sync fails', async () => {
const syncError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(syncError)
const onSuccess = vi.fn()
const onError = vi.fn()
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onSuccess, onError })
})
expect(onSuccess).not.toHaveBeenCalled()
expect(onError).toHaveBeenCalled()
})
it('should always call onSettled regardless of success or failure', async () => {
const onSettled = vi.fn()
const { result } = renderHook(() => useNodesSyncDraft())
// Test success case
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onSettled })
})
expect(onSettled).toHaveBeenCalledTimes(1)
// Reset
onSettled.mockClear()
// Test failure case
const syncError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(syncError)
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onSettled })
})
expect(onSettled).toHaveBeenCalledTimes(1)
})
})
})

View File

@ -115,7 +115,7 @@ export const useNodesSyncDraft = () => {
if (error && error.json && !error.bodyUsed) {
error.json().then((err: any) => {
if (err.code === 'draft_workflow_not_sync' && !notRefreshWhenSyncError)
handleRefreshWorkflowDraft()
handleRefreshWorkflowDraft(true)
})
}
callback?.onError?.()

View File

@ -0,0 +1,556 @@
/**
* Test Suite for useWorkflowRefreshDraft Hook
*
* PURPOSE:
* This hook is responsible for refreshing workflow draft data from the server.
* The key fix being tested is the `notUpdateCanvas` parameter behavior.
*
* MULTI-TAB PROBLEM SCENARIO:
* 1. User opens the same workflow in Tab A and Tab B (both have hash: v1)
* 2. Tab A saves successfully, server returns new hash: v2
* 3. Tab B tries to save with old hash: v1, server returns 400 error (draft_workflow_not_sync)
* 4. BEFORE FIX: handleRefreshWorkflowDraft() was called without args, which fetched
* draft AND overwrote canvas - user lost unsaved changes in Tab B
* 5. AFTER FIX: handleRefreshWorkflowDraft(true) is called, which fetches draft but
* only updates hash, preserving user's canvas changes
*
* TESTING STRATEGY:
* We don't simulate actual tab switching UI behavior. Instead, we test the hook's
* response to specific inputs:
* - When notUpdateCanvas=true: should NOT call handleUpdateWorkflowCanvas
* - When notUpdateCanvas=false/undefined: should call handleUpdateWorkflowCanvas
*
* This is behavior-driven testing - we verify "what the code does when given specific
* inputs" rather than simulating complete user interaction flows.
*/
import { act, renderHook, waitFor } from '@testing-library/react'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { useWorkflowRefreshDraft } from './use-workflow-refresh-draft'
// Mock the workflow service
const mockFetchWorkflowDraft = vi.fn()
vi.mock('@/service/workflow', () => ({
fetchWorkflowDraft: (...args: unknown[]) => mockFetchWorkflowDraft(...args),
}))
// Mock the workflow update hook
const mockHandleUpdateWorkflowCanvas = vi.fn()
vi.mock('@/app/components/workflow/hooks', () => ({
useWorkflowUpdate: () => ({
handleUpdateWorkflowCanvas: mockHandleUpdateWorkflowCanvas,
}),
}))
// Mock store state
const mockSetSyncWorkflowDraftHash = vi.fn()
const mockSetIsSyncingWorkflowDraft = vi.fn()
const mockSetEnvironmentVariables = vi.fn()
const mockSetEnvSecrets = vi.fn()
const mockSetConversationVariables = vi.fn()
const mockSetIsWorkflowDataLoaded = vi.fn()
const mockCancelDebouncedSync = vi.fn()
const createMockStoreState = (overrides = {}) => ({
appId: 'test-app-id',
setSyncWorkflowDraftHash: mockSetSyncWorkflowDraftHash,
setIsSyncingWorkflowDraft: mockSetIsSyncingWorkflowDraft,
setEnvironmentVariables: mockSetEnvironmentVariables,
setEnvSecrets: mockSetEnvSecrets,
setConversationVariables: mockSetConversationVariables,
setIsWorkflowDataLoaded: mockSetIsWorkflowDataLoaded,
isWorkflowDataLoaded: true,
debouncedSyncWorkflowDraft: {
cancel: mockCancelDebouncedSync,
},
...overrides,
})
const mockWorkflowStoreGetState = vi.fn()
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => ({
getState: mockWorkflowStoreGetState,
}),
}))
// Default mock response from fetchWorkflowDraft
const createMockDraftResponse = (overrides = {}) => ({
hash: 'new-hash-12345',
graph: {
nodes: [{ id: 'node-1', type: 'start', data: {} }],
edges: [{ id: 'edge-1', source: 'node-1', target: 'node-2' }],
viewport: { x: 100, y: 200, zoom: 1.5 },
},
environment_variables: [
{ id: 'env-1', name: 'API_KEY', value: 'secret-key', value_type: 'secret' },
{ id: 'env-2', name: 'BASE_URL', value: 'https://api.example.com', value_type: 'string' },
],
conversation_variables: [
{ id: 'conv-1', name: 'user_input', value: 'test' },
],
...overrides,
})
describe('useWorkflowRefreshDraft', () => {
beforeEach(() => {
vi.clearAllMocks()
mockWorkflowStoreGetState.mockReturnValue(createMockStoreState())
mockFetchWorkflowDraft.mockResolvedValue(createMockDraftResponse())
})
afterEach(() => {
vi.resetAllMocks()
})
describe('handleRefreshWorkflowDraft function', () => {
it('should return handleRefreshWorkflowDraft function', () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
expect(result.current.handleRefreshWorkflowDraft).toBeDefined()
expect(typeof result.current.handleRefreshWorkflowDraft).toBe('function')
})
})
describe('notUpdateCanvas parameter behavior (THE KEY FIX)', () => {
it('should NOT call handleUpdateWorkflowCanvas when notUpdateCanvas is true', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockFetchWorkflowDraft).toHaveBeenCalledWith('/apps/test-app-id/workflows/draft')
})
await waitFor(() => {
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('new-hash-12345')
})
// THE KEY ASSERTION: Canvas should NOT be updated when notUpdateCanvas is true
expect(mockHandleUpdateWorkflowCanvas).not.toHaveBeenCalled()
})
it('should call handleUpdateWorkflowCanvas when notUpdateCanvas is false', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(false)
})
await waitFor(() => {
expect(mockFetchWorkflowDraft).toHaveBeenCalledWith('/apps/test-app-id/workflows/draft')
})
await waitFor(() => {
// Canvas SHOULD be updated when notUpdateCanvas is false
expect(mockHandleUpdateWorkflowCanvas).toHaveBeenCalledWith({
nodes: [{ id: 'node-1', type: 'start', data: {} }],
edges: [{ id: 'edge-1', source: 'node-1', target: 'node-2' }],
viewport: { x: 100, y: 200, zoom: 1.5 },
})
})
await waitFor(() => {
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('new-hash-12345')
})
})
it('should call handleUpdateWorkflowCanvas when notUpdateCanvas is undefined (default)', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
await waitFor(() => {
expect(mockFetchWorkflowDraft).toHaveBeenCalled()
})
await waitFor(() => {
// Canvas SHOULD be updated when notUpdateCanvas is undefined
expect(mockHandleUpdateWorkflowCanvas).toHaveBeenCalled()
})
})
it('should still update hash even when notUpdateCanvas is true', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('new-hash-12345')
})
// Verify canvas was NOT updated
expect(mockHandleUpdateWorkflowCanvas).not.toHaveBeenCalled()
})
it('should still update environment variables when notUpdateCanvas is true', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetEnvironmentVariables).toHaveBeenCalledWith([
{ id: 'env-1', name: 'API_KEY', value: '[__HIDDEN__]', value_type: 'secret' },
{ id: 'env-2', name: 'BASE_URL', value: 'https://api.example.com', value_type: 'string' },
])
})
expect(mockHandleUpdateWorkflowCanvas).not.toHaveBeenCalled()
})
it('should still update env secrets when notUpdateCanvas is true', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetEnvSecrets).toHaveBeenCalledWith({
'env-1': 'secret-key',
})
})
expect(mockHandleUpdateWorkflowCanvas).not.toHaveBeenCalled()
})
it('should still update conversation variables when notUpdateCanvas is true', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetConversationVariables).toHaveBeenCalledWith([
{ id: 'conv-1', name: 'user_input', value: 'test' },
])
})
expect(mockHandleUpdateWorkflowCanvas).not.toHaveBeenCalled()
})
})
describe('syncing state management', () => {
it('should set isSyncingWorkflowDraft to true before fetch', () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
expect(mockSetIsSyncingWorkflowDraft).toHaveBeenCalledWith(true)
})
it('should set isSyncingWorkflowDraft to false after fetch completes', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
await waitFor(() => {
expect(mockSetIsSyncingWorkflowDraft).toHaveBeenCalledWith(false)
})
})
it('should set isSyncingWorkflowDraft to false even when fetch fails', async () => {
mockFetchWorkflowDraft.mockRejectedValue(new Error('Network error'))
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
await waitFor(() => {
expect(mockSetIsSyncingWorkflowDraft).toHaveBeenCalledWith(false)
})
})
})
describe('isWorkflowDataLoaded flag management', () => {
it('should set isWorkflowDataLoaded to false before fetch when it was true', () => {
mockWorkflowStoreGetState.mockReturnValue(
createMockStoreState({ isWorkflowDataLoaded: true }),
)
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
expect(mockSetIsWorkflowDataLoaded).toHaveBeenCalledWith(false)
})
it('should set isWorkflowDataLoaded to true after fetch succeeds', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
await waitFor(() => {
expect(mockSetIsWorkflowDataLoaded).toHaveBeenCalledWith(true)
})
})
it('should restore isWorkflowDataLoaded when fetch fails and it was previously loaded', async () => {
mockWorkflowStoreGetState.mockReturnValue(
createMockStoreState({ isWorkflowDataLoaded: true }),
)
mockFetchWorkflowDraft.mockRejectedValue(new Error('Network error'))
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
await waitFor(() => {
// Should restore to true because wasLoaded was true
expect(mockSetIsWorkflowDataLoaded).toHaveBeenLastCalledWith(true)
})
})
})
describe('debounced sync cancellation', () => {
it('should cancel debounced sync before fetching draft', () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
expect(mockCancelDebouncedSync).toHaveBeenCalled()
})
it('should handle case when debouncedSyncWorkflowDraft has no cancel method', () => {
mockWorkflowStoreGetState.mockReturnValue(
createMockStoreState({ debouncedSyncWorkflowDraft: {} }),
)
const { result } = renderHook(() => useWorkflowRefreshDraft())
// Should not throw
expect(() => {
act(() => {
result.current.handleRefreshWorkflowDraft()
})
}).not.toThrow()
})
})
describe('edge cases', () => {
it('should handle empty graph in response', async () => {
mockFetchWorkflowDraft.mockResolvedValue({
hash: 'hash-empty',
graph: null,
environment_variables: [],
conversation_variables: [],
})
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(false)
})
await waitFor(() => {
expect(mockHandleUpdateWorkflowCanvas).toHaveBeenCalledWith({
nodes: [],
edges: [],
viewport: { x: 0, y: 0, zoom: 1 },
})
})
})
it('should handle missing viewport in response', async () => {
mockFetchWorkflowDraft.mockResolvedValue({
hash: 'hash-no-viewport',
graph: {
nodes: [{ id: 'node-1' }],
edges: [],
viewport: null,
},
environment_variables: [],
conversation_variables: [],
})
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(false)
})
await waitFor(() => {
expect(mockHandleUpdateWorkflowCanvas).toHaveBeenCalledWith({
nodes: [{ id: 'node-1' }],
edges: [],
viewport: { x: 0, y: 0, zoom: 1 },
})
})
})
it('should handle missing environment_variables in response', async () => {
mockFetchWorkflowDraft.mockResolvedValue({
hash: 'hash-no-env',
graph: { nodes: [], edges: [], viewport: { x: 0, y: 0, zoom: 1 } },
environment_variables: undefined,
conversation_variables: [],
})
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetEnvironmentVariables).toHaveBeenCalledWith([])
expect(mockSetEnvSecrets).toHaveBeenCalledWith({})
})
})
it('should handle missing conversation_variables in response', async () => {
mockFetchWorkflowDraft.mockResolvedValue({
hash: 'hash-no-conv',
graph: { nodes: [], edges: [], viewport: { x: 0, y: 0, zoom: 1 } },
environment_variables: [],
conversation_variables: undefined,
})
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetConversationVariables).toHaveBeenCalledWith([])
})
})
it('should filter only secret type for envSecrets', async () => {
mockFetchWorkflowDraft.mockResolvedValue({
hash: 'hash-mixed-env',
graph: { nodes: [], edges: [], viewport: { x: 0, y: 0, zoom: 1 } },
environment_variables: [
{ id: 'env-1', name: 'SECRET_KEY', value: 'secret-value', value_type: 'secret' },
{ id: 'env-2', name: 'PUBLIC_URL', value: 'https://example.com', value_type: 'string' },
{ id: 'env-3', name: 'ANOTHER_SECRET', value: 'another-secret', value_type: 'secret' },
],
conversation_variables: [],
})
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetEnvSecrets).toHaveBeenCalledWith({
'env-1': 'secret-value',
'env-3': 'another-secret',
})
})
})
it('should hide secret values in environment variables', async () => {
mockFetchWorkflowDraft.mockResolvedValue({
hash: 'hash-secrets',
graph: { nodes: [], edges: [], viewport: { x: 0, y: 0, zoom: 1 } },
environment_variables: [
{ id: 'env-1', name: 'SECRET_KEY', value: 'super-secret', value_type: 'secret' },
{ id: 'env-2', name: 'PUBLIC_URL', value: 'https://example.com', value_type: 'string' },
],
conversation_variables: [],
})
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetEnvironmentVariables).toHaveBeenCalledWith([
{ id: 'env-1', name: 'SECRET_KEY', value: '[__HIDDEN__]', value_type: 'secret' },
{ id: 'env-2', name: 'PUBLIC_URL', value: 'https://example.com', value_type: 'string' },
])
})
})
})
describe('multi-tab scenario simulation (THE BUG FIX VERIFICATION)', () => {
/**
* This test verifies the fix for the multi-tab scenario:
* 1. User opens workflow in Tab A and Tab B
* 2. Tab A saves draft successfully
* 3. Tab B tries to save but gets 'draft_workflow_not_sync' error (hash mismatch)
* 4. BEFORE FIX: Tab B would fetch draft and overwrite canvas with old data
* 5. AFTER FIX: Tab B only updates hash, preserving user's canvas changes
*/
it('should only update hash when called with notUpdateCanvas=true (simulating sync error recovery)', async () => {
const mockResponse = createMockDraftResponse()
mockFetchWorkflowDraft.mockResolvedValue(mockResponse)
const { result } = renderHook(() => useWorkflowRefreshDraft())
// Simulate the sync error recovery scenario where notUpdateCanvas is true
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockFetchWorkflowDraft).toHaveBeenCalled()
})
await waitFor(() => {
// Hash should be updated for next sync attempt
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('new-hash-12345')
})
// Canvas should NOT be updated - user's changes are preserved
expect(mockHandleUpdateWorkflowCanvas).not.toHaveBeenCalled()
// Other states should still be updated
expect(mockSetEnvironmentVariables).toHaveBeenCalled()
expect(mockSetConversationVariables).toHaveBeenCalled()
})
it('should update canvas when called with notUpdateCanvas=false (normal refresh)', async () => {
const mockResponse = createMockDraftResponse()
mockFetchWorkflowDraft.mockResolvedValue(mockResponse)
const { result } = renderHook(() => useWorkflowRefreshDraft())
// Simulate normal refresh scenario
act(() => {
result.current.handleRefreshWorkflowDraft(false)
})
await waitFor(() => {
expect(mockFetchWorkflowDraft).toHaveBeenCalled()
})
await waitFor(() => {
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('new-hash-12345')
})
// Canvas SHOULD be updated in normal refresh
await waitFor(() => {
expect(mockHandleUpdateWorkflowCanvas).toHaveBeenCalled()
})
})
})
})

View File

@ -8,7 +8,7 @@ export const useWorkflowRefreshDraft = () => {
const workflowStore = useWorkflowStore()
const { handleUpdateWorkflowCanvas } = useWorkflowUpdate()
const handleRefreshWorkflowDraft = useCallback(() => {
const handleRefreshWorkflowDraft = useCallback((notUpdateCanvas?: boolean) => {
const {
appId,
setSyncWorkflowDraftHash,
@ -31,12 +31,14 @@ export const useWorkflowRefreshDraft = () => {
fetchWorkflowDraft(`/apps/${appId}/workflows/draft`)
.then((response) => {
// Ensure we have a valid workflow structure with viewport
const workflowData: WorkflowDataUpdater = {
nodes: response.graph?.nodes || [],
edges: response.graph?.edges || [],
viewport: response.graph?.viewport || { x: 0, y: 0, zoom: 1 },
if (!notUpdateCanvas) {
const workflowData: WorkflowDataUpdater = {
nodes: response.graph?.nodes || [],
edges: response.graph?.edges || [],
viewport: response.graph?.viewport || { x: 0, y: 0, zoom: 1 },
}
handleUpdateWorkflowCanvas(workflowData)
}
handleUpdateWorkflowCanvas(workflowData)
setSyncWorkflowDraftHash(response.hash)
setEnvSecrets((response.environment_variables || []).filter(env => env.value_type === 'secret').reduce((acc, env) => {
acc[env.id] = env.value

View File

@ -1,7 +1,7 @@
{
"name": "dify-web",
"type": "module",
"version": "1.12.0",
"version": "1.11.4",
"private": true,
"packageManager": "pnpm@10.27.0+sha512.72d699da16b1179c14ba9e64dc71c9a40988cbdc65c264cb0e489db7de917f20dcf4d64d8723625f2969ba52d4b7e2a1170682d9ac2a5dcaeaab732b7e16f04a",
"imports": {
@ -117,6 +117,7 @@
"ky": "1.12.0",
"lamejs": "1.2.1",
"lexical": "0.38.2",
"line-clamp": "1.0.0",
"mermaid": "11.11.0",
"mime": "4.1.0",
"mitt": "3.0.1",

8
web/pnpm-lock.yaml generated
View File

@ -233,6 +233,9 @@ importers:
lexical:
specifier: 0.38.2
version: 0.38.2
line-clamp:
specifier: 1.0.0
version: 1.0.0
mermaid:
specifier: 11.11.0
version: 11.11.0
@ -5400,6 +5403,9 @@ packages:
resolution: {integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==}
engines: {node: '>=14'}
line-clamp@1.0.0:
resolution: {integrity: sha512-dCDlvMj572RIRBQ3x9aIX0DTdt2St1bMdpi64jVTAi5vqBck7wf+J97//+J7+pS80rFJaYa8HiyXCTp0flpnBA==}
lines-and-columns@1.2.4:
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
@ -12907,6 +12913,8 @@ snapshots:
lilconfig@3.1.3: {}
line-clamp@1.0.0: {}
lines-and-columns@1.2.4: {}
lint-staged@15.5.2:

View File

@ -1,80 +0,0 @@
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
const loadGetBaseURL = async (isClientValue: boolean) => {
vi.resetModules()
vi.doMock('@/utils/client', () => ({ isClient: isClientValue }))
const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {})
// eslint-disable-next-line next/no-assign-module-variable
const module = await import('./client')
warnSpy.mockClear()
return { getBaseURL: module.getBaseURL, warnSpy }
}
// Scenario: base URL selection and warnings.
describe('getBaseURL', () => {
beforeEach(() => {
vi.clearAllMocks()
})
afterEach(() => {
vi.restoreAllMocks()
})
// Scenario: client environment uses window origin.
it('should use window origin when running on the client', async () => {
// Arrange
const { origin } = window.location
const { getBaseURL, warnSpy } = await loadGetBaseURL(true)
// Act
const url = getBaseURL('/api')
// Assert
expect(url.href).toBe(`${origin}/api`)
expect(warnSpy).not.toHaveBeenCalled()
})
// Scenario: server environment falls back to localhost with warning.
it('should fall back to localhost and warn on the server', async () => {
// Arrange
const { getBaseURL, warnSpy } = await loadGetBaseURL(false)
// Act
const url = getBaseURL('/api')
// Assert
expect(url.href).toBe('http://localhost/api')
expect(warnSpy).toHaveBeenCalledTimes(1)
expect(warnSpy).toHaveBeenCalledWith('Using localhost as base URL in server environment, please configure accordingly.')
})
// Scenario: non-http protocols surface warnings.
it('should warn when protocol is not http or https', async () => {
// Arrange
const { getBaseURL, warnSpy } = await loadGetBaseURL(true)
// Act
const url = getBaseURL('localhost:5001/console/api')
// Assert
expect(url.protocol).toBe('localhost:')
expect(url.href).toBe('localhost:5001/console/api')
expect(warnSpy).toHaveBeenCalledTimes(1)
expect(warnSpy).toHaveBeenCalledWith(
'Unexpected protocol for API requests, expected http or https. Current protocol: localhost:. Please configure accordingly.',
)
})
// Scenario: absolute http URLs are preserved.
it('should keep absolute http URLs intact', async () => {
// Arrange
const { getBaseURL, warnSpy } = await loadGetBaseURL(true)
// Act
const url = getBaseURL('https://api.example.com/console/api')
// Assert
expect(url.href).toBe('https://api.example.com/console/api')
expect(warnSpy).not.toHaveBeenCalled()
})
})

View File

@ -13,38 +13,12 @@ import {
consoleRouterContract,
marketplaceRouterContract,
} from '@/contract/router'
import { isClient } from '@/utils/client'
import { request } from './base'
const getMarketplaceHeaders = () => new Headers({
'X-Dify-Version': !IS_MARKETPLACE ? APP_VERSION : '999.0.0',
})
function isURL(path: string) {
try {
// eslint-disable-next-line no-new
new URL(path)
return true
}
catch {
return false
}
}
export function getBaseURL(path: string) {
const url = new URL(path, isURL(path) ? undefined : isClient ? window.location.origin : 'http://localhost')
if (!isClient && !isURL(path)) {
console.warn('Using localhost as base URL in server environment, please configure accordingly.')
}
if (url.protocol !== 'http:' && url.protocol !== 'https:') {
console.warn(`Unexpected protocol for API requests, expected http or https. Current protocol: ${url.protocol}. Please configure accordingly.`)
}
return url
}
const marketplaceLink = new OpenAPILink(marketplaceRouterContract, {
url: MARKETPLACE_API_PREFIX,
headers: () => (getMarketplaceHeaders()),
@ -65,7 +39,7 @@ export const marketplaceClient: JsonifiedClient<ContractRouterClient<typeof mark
export const marketplaceQuery = createTanstackQueryUtils(marketplaceClient, { path: ['marketplace'] })
const consoleLink = new OpenAPILink(consoleRouterContract, {
url: getBaseURL(API_PREFIX),
url: API_PREFIX,
fetch: (input, init) => {
return request(
input.url,