mirror of
https://github.com/langgenius/dify.git
synced 2026-02-10 21:35:43 +08:00
Compare commits
22 Commits
feat/colla
...
refactor/d
| Author | SHA1 | Date | |
|---|---|---|---|
| 38d68ac4e3 | |||
| 4e48a1c4c3 | |||
| fb9a6bbc9f | |||
| a86765e2b6 | |||
| 5eaf0c733a | |||
| f561656a89 | |||
| f01f555146 | |||
| 47d0e400ae | |||
| 8724ba04aa | |||
| 6fd001c660 | |||
| e8e386a6b9 | |||
| eba5eac3fa | |||
| 19008dce13 | |||
| 92011d0a31 | |||
| a51ced0a4f | |||
| dad8e408b0 | |||
| d941201a3e | |||
| dd988d42c2 | |||
| a43d2ec4f0 | |||
| 7c12e923b6 | |||
| b9f1d65d4f | |||
| b4e2af96e2 |
@ -0,0 +1,27 @@
|
||||
# Notes: `large_language_model.py`
|
||||
|
||||
## Purpose
|
||||
|
||||
Provides the base `LargeLanguageModel` implementation used by the model runtime to invoke plugin-backed LLMs and to
|
||||
bridge plugin daemon streaming semantics back into API-layer entities (`LLMResult`, `LLMResultChunk`).
|
||||
|
||||
## Key behaviors / invariants
|
||||
|
||||
- `invoke(..., stream=False)` still calls the plugin in streaming mode and then synthesizes a single `LLMResult` from
|
||||
the first yielded `LLMResultChunk`.
|
||||
- Plugin invocation is wrapped by `_invoke_llm_via_plugin(...)`, and `stream=False` normalization is handled by
|
||||
`_normalize_non_stream_plugin_result(...)` / `_build_llm_result_from_first_chunk(...)`.
|
||||
- Tool call deltas are merged incrementally via `_increase_tool_call(...)` to support multiple provider chunking
|
||||
patterns (IDs anchored to first chunk, every chunk, or missing entirely).
|
||||
- A tool-call delta with an empty `id` requires at least one existing tool call; otherwise we raise `ValueError` to
|
||||
surface invalid delta sequences explicitly.
|
||||
- Callback invocation is centralized in `_run_callbacks(...)` to ensure consistent error handling/logging.
|
||||
- For compatibility with dify issue `#17799`, `prompt_messages` may be removed by the plugin daemon in chunks and must
|
||||
be re-attached in this layer before callbacks/consumers use them.
|
||||
- Callback hooks (`on_before_invoke`, `on_new_chunk`, `on_after_invoke`, `on_invoke_error`) must not break invocation
|
||||
unless `callback.raise_error` is true.
|
||||
|
||||
## Test focus
|
||||
|
||||
- `api/tests/unit_tests/core/model_runtime/__base/test_increase_tool_call.py` validates tool-call delta merging and
|
||||
patches `_gen_tool_call_id` for deterministic IDs.
|
||||
@ -33,9 +33,6 @@ TRIGGER_URL=http://localhost:5001
|
||||
# The time in seconds after the signature is rejected
|
||||
FILES_ACCESS_TIMEOUT=300
|
||||
|
||||
# Collaboration mode toggle
|
||||
ENABLE_COLLABORATION_MODE=false
|
||||
|
||||
# Access token expiration time in minutes
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||
|
||||
|
||||
19
api/app.py
19
api/app.py
@ -1,4 +1,3 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
@ -9,15 +8,10 @@ def is_db_command() -> bool:
|
||||
|
||||
|
||||
# create app
|
||||
flask_app = None
|
||||
socketio_app = None
|
||||
|
||||
if is_db_command():
|
||||
from app_factory import create_migrations_app
|
||||
|
||||
app = create_migrations_app()
|
||||
socketio_app = app
|
||||
flask_app = app
|
||||
else:
|
||||
# Gunicorn and Celery handle monkey patching automatically in production by
|
||||
# specifying the `gevent` worker class. Manual monkey patching is not required here.
|
||||
@ -28,15 +22,8 @@ else:
|
||||
|
||||
from app_factory import create_app
|
||||
|
||||
socketio_app, flask_app = create_app()
|
||||
app = flask_app
|
||||
celery = flask_app.extensions["celery"]
|
||||
app = create_app()
|
||||
celery = app.extensions["celery"]
|
||||
|
||||
if __name__ == "__main__":
|
||||
from gevent import pywsgi
|
||||
from geventwebsocket.handler import WebSocketHandler # type: ignore[reportMissingTypeStubs]
|
||||
|
||||
host = os.environ.get("HOST", "0.0.0.0")
|
||||
port = int(os.environ.get("PORT", 5001))
|
||||
server = pywsgi.WSGIServer((host, port), socketio_app, handler_class=WebSocketHandler)
|
||||
server.serve_forever()
|
||||
app.run(host="0.0.0.0", port=5001)
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
import socketio # type: ignore[reportMissingTypeStubs]
|
||||
from opentelemetry.trace import get_current_span
|
||||
from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID
|
||||
|
||||
@ -9,7 +8,6 @@ from configs import dify_config
|
||||
from contexts.wrapper import RecyclableContextVar
|
||||
from core.logging.context import init_request_context
|
||||
from dify_app import DifyApp
|
||||
from extensions.ext_socketio import sio
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -62,18 +60,14 @@ def create_flask_app_with_configs() -> DifyApp:
|
||||
return dify_app
|
||||
|
||||
|
||||
def create_app() -> tuple[socketio.WSGIApp, DifyApp]:
|
||||
def create_app() -> DifyApp:
|
||||
start_time = time.perf_counter()
|
||||
app = create_flask_app_with_configs()
|
||||
initialize_extensions(app)
|
||||
|
||||
sio.app = app
|
||||
socketio_app = socketio.WSGIApp(sio, app)
|
||||
|
||||
end_time = time.perf_counter()
|
||||
if dify_config.DEBUG:
|
||||
logger.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2))
|
||||
return socketio_app, app
|
||||
return app
|
||||
|
||||
|
||||
def initialize_extensions(app: DifyApp):
|
||||
|
||||
@ -1229,13 +1229,6 @@ class PositionConfig(BaseSettings):
|
||||
return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
|
||||
|
||||
|
||||
class CollaborationConfig(BaseSettings):
|
||||
ENABLE_COLLABORATION_MODE: bool = Field(
|
||||
description="Whether to enable collaboration mode features across the workspace",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class LoginConfig(BaseSettings):
|
||||
ENABLE_EMAIL_CODE_LOGIN: bool = Field(
|
||||
description="whether to enable email code login",
|
||||
@ -1354,7 +1347,6 @@ class FeatureConfig(
|
||||
WorkflowConfig,
|
||||
WorkflowNodeExecutionConfig,
|
||||
WorkspaceConfig,
|
||||
CollaborationConfig,
|
||||
LoginConfig,
|
||||
AccountConfig,
|
||||
SwaggerUIConfig,
|
||||
|
||||
@ -63,7 +63,6 @@ from .app import (
|
||||
statistic,
|
||||
workflow,
|
||||
workflow_app_log,
|
||||
workflow_comment,
|
||||
workflow_draft_variable,
|
||||
workflow_run,
|
||||
workflow_statistic,
|
||||
@ -115,7 +114,6 @@ from .explore import (
|
||||
saved_message,
|
||||
trial,
|
||||
)
|
||||
from .socketio import workflow as socketio_workflow # pyright: ignore[reportUnusedImport]
|
||||
|
||||
# Import tag controllers
|
||||
from .tag import tags
|
||||
@ -209,7 +207,6 @@ __all__ = [
|
||||
"website",
|
||||
"workflow",
|
||||
"workflow_app_log",
|
||||
"workflow_comment",
|
||||
"workflow_draft_variable",
|
||||
"workflow_run",
|
||||
"workflow_statistic",
|
||||
|
||||
@ -32,10 +32,8 @@ from core.trigger.debug.event_selectors import (
|
||||
from core.workflow.enums import NodeType
|
||||
from core.workflow.graph_engine.manager import GraphEngineManager
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from factories import file_factory, variable_factory
|
||||
from fields.member_fields import simple_account_fields
|
||||
from fields.online_user_fields import online_user_list_fields
|
||||
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
|
||||
from fields.workflow_run_fields import workflow_run_node_execution_fields
|
||||
from libs import helper
|
||||
@ -45,7 +43,6 @@ from libs.login import current_account_with_tenant, login_required
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
from models.workflow import Workflow
|
||||
from repositories.workflow_collaboration_repository import WORKFLOW_ONLINE_USERS_PREFIX
|
||||
from services.app_generate_service import AppGenerateService
|
||||
from services.errors.app import WorkflowHashNotEqualError
|
||||
from services.errors.llm import InvokeRateLimitError
|
||||
@ -183,14 +180,6 @@ class WorkflowUpdatePayload(BaseModel):
|
||||
marked_comment: str | None = Field(default=None, max_length=100)
|
||||
|
||||
|
||||
class WorkflowFeaturesPayload(BaseModel):
|
||||
features: dict[str, Any] = Field(..., description="Workflow feature configuration")
|
||||
|
||||
|
||||
class WorkflowOnlineUsersQuery(BaseModel):
|
||||
workflow_ids: str = Field(..., description="Comma-separated workflow IDs")
|
||||
|
||||
|
||||
class DraftWorkflowTriggerRunPayload(BaseModel):
|
||||
node_id: str
|
||||
|
||||
@ -214,8 +203,6 @@ reg(DefaultBlockConfigQuery)
|
||||
reg(ConvertToWorkflowPayload)
|
||||
reg(WorkflowListQuery)
|
||||
reg(WorkflowUpdatePayload)
|
||||
reg(WorkflowFeaturesPayload)
|
||||
reg(WorkflowOnlineUsersQuery)
|
||||
reg(DraftWorkflowTriggerRunPayload)
|
||||
reg(DraftWorkflowTriggerRunAllPayload)
|
||||
|
||||
@ -483,7 +470,7 @@ class AdvancedChatDraftRunLoopNodeApi(Resource):
|
||||
Run draft workflow loop node
|
||||
"""
|
||||
current_user, _ = current_account_with_tenant()
|
||||
args = LoopNodeRunPayload.model_validate(console_ns.payload or {}).model_dump(exclude_none=True)
|
||||
args = LoopNodeRunPayload.model_validate(console_ns.payload or {})
|
||||
|
||||
try:
|
||||
response = AppGenerateService.generate_single_loop(
|
||||
@ -521,7 +508,7 @@ class WorkflowDraftRunLoopNodeApi(Resource):
|
||||
Run draft workflow loop node
|
||||
"""
|
||||
current_user, _ = current_account_with_tenant()
|
||||
args = LoopNodeRunPayload.model_validate(console_ns.payload or {}).model_dump(exclude_none=True)
|
||||
args = LoopNodeRunPayload.model_validate(console_ns.payload or {})
|
||||
|
||||
try:
|
||||
response = AppGenerateService.generate_single_loop(
|
||||
@ -804,31 +791,6 @@ class ConvertToWorkflowApi(Resource):
|
||||
}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/features")
|
||||
class WorkflowFeaturesApi(Resource):
|
||||
"""Update draft workflow features."""
|
||||
|
||||
@console_ns.expect(console_ns.models[WorkflowFeaturesPayload.__name__])
|
||||
@console_ns.doc("update_workflow_features")
|
||||
@console_ns.doc(description="Update draft workflow features")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Workflow features updated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
||||
args = WorkflowFeaturesPayload.model_validate(console_ns.payload or {})
|
||||
features = args.features
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
workflow_service.update_draft_workflow_features(app_model=app_model, features=features, account=current_user)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows")
|
||||
class PublishedAllWorkflowApi(Resource):
|
||||
@console_ns.expect(console_ns.models[WorkflowListQuery.__name__])
|
||||
@ -1037,6 +999,7 @@ class DraftWorkflowTriggerRunApi(Resource):
|
||||
if not event:
|
||||
return jsonable_encoder({"status": "waiting", "retry_in": LISTENING_RETRY_IN})
|
||||
workflow_args = dict(event.workflow_args)
|
||||
|
||||
workflow_args[SKIP_PREPARE_USER_INPUTS_KEY] = True
|
||||
return helper.compact_generate_response(
|
||||
AppGenerateService.generate(
|
||||
@ -1185,6 +1148,7 @@ class DraftWorkflowTriggerRunAllApi(Resource):
|
||||
|
||||
try:
|
||||
workflow_args = dict(trigger_debug_event.workflow_args)
|
||||
|
||||
workflow_args[SKIP_PREPARE_USER_INPUTS_KEY] = True
|
||||
response = AppGenerateService.generate(
|
||||
app_model=app_model,
|
||||
@ -1204,32 +1168,3 @@ class DraftWorkflowTriggerRunAllApi(Resource):
|
||||
"status": "error",
|
||||
}
|
||||
), 400
|
||||
|
||||
|
||||
@console_ns.route("/apps/workflows/online-users")
|
||||
class WorkflowOnlineUsersApi(Resource):
|
||||
@console_ns.expect(console_ns.models[WorkflowOnlineUsersQuery.__name__])
|
||||
@console_ns.doc("get_workflow_online_users")
|
||||
@console_ns.doc(description="Get workflow online users")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(online_user_list_fields)
|
||||
def get(self):
|
||||
args = WorkflowOnlineUsersQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
|
||||
workflow_ids = [workflow_id.strip() for workflow_id in args.workflow_ids.split(",") if workflow_id.strip()]
|
||||
|
||||
results = []
|
||||
for workflow_id in workflow_ids:
|
||||
users_json = redis_client.hgetall(f"{WORKFLOW_ONLINE_USERS_PREFIX}{workflow_id}")
|
||||
|
||||
users = []
|
||||
for _, user_info_json in users_json.items():
|
||||
try:
|
||||
users.append(json.loads(user_info_json))
|
||||
except Exception:
|
||||
continue
|
||||
results.append({"workflow_id": workflow_id, "users": users})
|
||||
|
||||
return {"data": results}
|
||||
|
||||
@ -1,317 +0,0 @@
|
||||
import logging
|
||||
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from fields.member_fields import account_with_role_fields
|
||||
from fields.workflow_comment_fields import (
|
||||
workflow_comment_basic_fields,
|
||||
workflow_comment_create_fields,
|
||||
workflow_comment_detail_fields,
|
||||
workflow_comment_reply_create_fields,
|
||||
workflow_comment_reply_update_fields,
|
||||
workflow_comment_resolve_fields,
|
||||
workflow_comment_update_fields,
|
||||
)
|
||||
from libs.login import current_user, login_required
|
||||
from models import App
|
||||
from services.account_service import TenantService
|
||||
from services.workflow_comment_service import WorkflowCommentService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
|
||||
class WorkflowCommentCreatePayload(BaseModel):
|
||||
position_x: float = Field(..., description="Comment X position")
|
||||
position_y: float = Field(..., description="Comment Y position")
|
||||
content: str = Field(..., description="Comment content")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
class WorkflowCommentUpdatePayload(BaseModel):
|
||||
content: str = Field(..., description="Comment content")
|
||||
position_x: float | None = Field(default=None, description="Comment X position")
|
||||
position_y: float | None = Field(default=None, description="Comment Y position")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
class WorkflowCommentReplyCreatePayload(BaseModel):
|
||||
content: str = Field(..., description="Reply content")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
class WorkflowCommentReplyUpdatePayload(BaseModel):
|
||||
content: str = Field(..., description="Reply content")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
for model in (
|
||||
WorkflowCommentCreatePayload,
|
||||
WorkflowCommentUpdatePayload,
|
||||
WorkflowCommentReplyCreatePayload,
|
||||
WorkflowCommentReplyUpdatePayload,
|
||||
):
|
||||
console_ns.schema_model(model.__name__, model.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
||||
|
||||
workflow_comment_basic_model = console_ns.model("WorkflowCommentBasic", workflow_comment_basic_fields)
|
||||
workflow_comment_detail_model = console_ns.model("WorkflowCommentDetail", workflow_comment_detail_fields)
|
||||
workflow_comment_create_model = console_ns.model("WorkflowCommentCreate", workflow_comment_create_fields)
|
||||
workflow_comment_update_model = console_ns.model("WorkflowCommentUpdate", workflow_comment_update_fields)
|
||||
workflow_comment_resolve_model = console_ns.model("WorkflowCommentResolve", workflow_comment_resolve_fields)
|
||||
workflow_comment_reply_create_model = console_ns.model(
|
||||
"WorkflowCommentReplyCreate", workflow_comment_reply_create_fields
|
||||
)
|
||||
workflow_comment_reply_update_model = console_ns.model(
|
||||
"WorkflowCommentReplyUpdate", workflow_comment_reply_update_fields
|
||||
)
|
||||
workflow_comment_mention_users_model = console_ns.model(
|
||||
"WorkflowCommentMentionUsers",
|
||||
{"users": fields.List(fields.Nested(account_with_role_fields))},
|
||||
)
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments")
|
||||
class WorkflowCommentListApi(Resource):
|
||||
"""API for listing and creating workflow comments."""
|
||||
|
||||
@console_ns.doc("list_workflow_comments")
|
||||
@console_ns.doc(description="Get all comments for a workflow")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Comments retrieved successfully", workflow_comment_basic_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_basic_model, envelope="data")
|
||||
def get(self, app_model: App):
|
||||
"""Get all comments for a workflow."""
|
||||
comments = WorkflowCommentService.get_comments(tenant_id=current_user.current_tenant_id, app_id=app_model.id)
|
||||
|
||||
return comments
|
||||
|
||||
@console_ns.doc("create_workflow_comment")
|
||||
@console_ns.doc(description="Create a new workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentCreatePayload.__name__])
|
||||
@console_ns.response(201, "Comment created successfully", workflow_comment_create_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_create_model)
|
||||
def post(self, app_model: App):
|
||||
"""Create a new workflow comment."""
|
||||
payload = WorkflowCommentCreatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
result = WorkflowCommentService.create_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
created_by=current_user.id,
|
||||
content=payload.content,
|
||||
position_x=payload.position_x,
|
||||
position_y=payload.position_y,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return result, 201
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>")
|
||||
class WorkflowCommentDetailApi(Resource):
|
||||
"""API for managing individual workflow comments."""
|
||||
|
||||
@console_ns.doc("get_workflow_comment")
|
||||
@console_ns.doc(description="Get a specific workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.response(200, "Comment retrieved successfully", workflow_comment_detail_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_detail_model)
|
||||
def get(self, app_model: App, comment_id: str):
|
||||
"""Get a specific workflow comment."""
|
||||
comment = WorkflowCommentService.get_comment(
|
||||
tenant_id=current_user.current_tenant_id, app_id=app_model.id, comment_id=comment_id
|
||||
)
|
||||
|
||||
return comment
|
||||
|
||||
@console_ns.doc("update_workflow_comment")
|
||||
@console_ns.doc(description="Update a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentUpdatePayload.__name__])
|
||||
@console_ns.response(200, "Comment updated successfully", workflow_comment_update_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_update_model)
|
||||
def put(self, app_model: App, comment_id: str):
|
||||
"""Update a workflow comment."""
|
||||
payload = WorkflowCommentUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
result = WorkflowCommentService.update_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
user_id=current_user.id,
|
||||
content=payload.content,
|
||||
position_x=payload.position_x,
|
||||
position_y=payload.position_y,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@console_ns.doc("delete_workflow_comment")
|
||||
@console_ns.doc(description="Delete a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.response(204, "Comment deleted successfully")
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
def delete(self, app_model: App, comment_id: str):
|
||||
"""Delete a workflow comment."""
|
||||
WorkflowCommentService.delete_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
user_id=current_user.id,
|
||||
)
|
||||
|
||||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/resolve")
|
||||
class WorkflowCommentResolveApi(Resource):
|
||||
"""API for resolving and reopening workflow comments."""
|
||||
|
||||
@console_ns.doc("resolve_workflow_comment")
|
||||
@console_ns.doc(description="Resolve a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.response(200, "Comment resolved successfully", workflow_comment_resolve_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_resolve_model)
|
||||
def post(self, app_model: App, comment_id: str):
|
||||
"""Resolve a workflow comment."""
|
||||
comment = WorkflowCommentService.resolve_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
user_id=current_user.id,
|
||||
)
|
||||
|
||||
return comment
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/replies")
|
||||
class WorkflowCommentReplyApi(Resource):
|
||||
"""API for managing comment replies."""
|
||||
|
||||
@console_ns.doc("create_workflow_comment_reply")
|
||||
@console_ns.doc(description="Add a reply to a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentReplyCreatePayload.__name__])
|
||||
@console_ns.response(201, "Reply created successfully", workflow_comment_reply_create_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_reply_create_model)
|
||||
def post(self, app_model: App, comment_id: str):
|
||||
"""Add a reply to a workflow comment."""
|
||||
# Validate comment access first
|
||||
WorkflowCommentService.validate_comment_access(
|
||||
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
|
||||
)
|
||||
|
||||
payload = WorkflowCommentReplyCreatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
result = WorkflowCommentService.create_reply(
|
||||
comment_id=comment_id,
|
||||
content=payload.content,
|
||||
created_by=current_user.id,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return result, 201
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/replies/<string:reply_id>")
|
||||
class WorkflowCommentReplyDetailApi(Resource):
|
||||
"""API for managing individual comment replies."""
|
||||
|
||||
@console_ns.doc("update_workflow_comment_reply")
|
||||
@console_ns.doc(description="Update a comment reply")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID", "reply_id": "Reply ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentReplyUpdatePayload.__name__])
|
||||
@console_ns.response(200, "Reply updated successfully", workflow_comment_reply_update_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_reply_update_model)
|
||||
def put(self, app_model: App, comment_id: str, reply_id: str):
|
||||
"""Update a comment reply."""
|
||||
# Validate comment access first
|
||||
WorkflowCommentService.validate_comment_access(
|
||||
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
|
||||
)
|
||||
|
||||
payload = WorkflowCommentReplyUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
reply = WorkflowCommentService.update_reply(
|
||||
reply_id=reply_id,
|
||||
user_id=current_user.id,
|
||||
content=payload.content,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return reply
|
||||
|
||||
@console_ns.doc("delete_workflow_comment_reply")
|
||||
@console_ns.doc(description="Delete a comment reply")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID", "reply_id": "Reply ID"})
|
||||
@console_ns.response(204, "Reply deleted successfully")
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
def delete(self, app_model: App, comment_id: str, reply_id: str):
|
||||
"""Delete a comment reply."""
|
||||
# Validate comment access first
|
||||
WorkflowCommentService.validate_comment_access(
|
||||
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
|
||||
)
|
||||
|
||||
WorkflowCommentService.delete_reply(reply_id=reply_id, user_id=current_user.id)
|
||||
|
||||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/mention-users")
|
||||
class WorkflowCommentMentionUsersApi(Resource):
|
||||
"""API for getting mentionable users for workflow comments."""
|
||||
|
||||
@console_ns.doc("workflow_comment_mention_users")
|
||||
@console_ns.doc(description="Get all users in current tenant for mentions")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Mentionable users retrieved successfully", workflow_comment_mention_users_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_mention_users_model)
|
||||
def get(self, app_model: App):
|
||||
"""Get all users in current tenant for mentions."""
|
||||
members = TenantService.get_tenant_members(current_user.current_tenant)
|
||||
return {"users": members}
|
||||
@ -21,9 +21,9 @@ from core.variables.segments import ArrayFileSegment, FileSegment, Segment
|
||||
from core.variables.types import SegmentType
|
||||
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
|
||||
from extensions.ext_database import db
|
||||
from factories import variable_factory
|
||||
from factories.file_factory import build_from_mapping, build_from_mappings
|
||||
from libs.login import current_user, login_required
|
||||
from factories.variable_factory import build_segment_with_type
|
||||
from libs.login import login_required
|
||||
from models import App, AppMode
|
||||
from models.workflow import WorkflowDraftVariable
|
||||
from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService
|
||||
@ -43,16 +43,6 @@ class WorkflowDraftVariableUpdatePayload(BaseModel):
|
||||
value: Any | None = Field(default=None, description="Variable value")
|
||||
|
||||
|
||||
class ConversationVariableUpdatePayload(BaseModel):
|
||||
conversation_variables: list[dict[str, Any]] = Field(
|
||||
..., description="Conversation variables for the draft workflow"
|
||||
)
|
||||
|
||||
|
||||
class EnvironmentVariableUpdatePayload(BaseModel):
|
||||
environment_variables: list[dict[str, Any]] = Field(..., description="Environment variables for the draft workflow")
|
||||
|
||||
|
||||
console_ns.schema_model(
|
||||
WorkflowDraftVariableListQuery.__name__,
|
||||
WorkflowDraftVariableListQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
@ -61,14 +51,6 @@ console_ns.schema_model(
|
||||
WorkflowDraftVariableUpdatePayload.__name__,
|
||||
WorkflowDraftVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
console_ns.schema_model(
|
||||
ConversationVariableUpdatePayload.__name__,
|
||||
ConversationVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
console_ns.schema_model(
|
||||
EnvironmentVariableUpdatePayload.__name__,
|
||||
EnvironmentVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
|
||||
|
||||
def _convert_values_to_json_serializable_object(value: Segment):
|
||||
@ -401,7 +383,7 @@ class VariableApi(Resource):
|
||||
if len(raw_value) > 0 and not isinstance(raw_value[0], dict):
|
||||
raise InvalidArgumentError(description=f"expected dict for files[0], got {type(raw_value)}")
|
||||
raw_value = build_from_mappings(mappings=raw_value, tenant_id=app_model.tenant_id)
|
||||
new_value = variable_factory.build_segment_with_type(variable.value_type, raw_value)
|
||||
new_value = build_segment_with_type(variable.value_type, raw_value)
|
||||
draft_var_srv.update_variable(variable, name=new_name, value=new_value)
|
||||
db.session.commit()
|
||||
return variable
|
||||
@ -494,34 +476,6 @@ class ConversationVariableCollectionApi(Resource):
|
||||
db.session.commit()
|
||||
return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID)
|
||||
|
||||
@console_ns.expect(console_ns.models[ConversationVariableUpdatePayload.__name__])
|
||||
@console_ns.doc("update_conversation_variables")
|
||||
@console_ns.doc(description="Update conversation variables for workflow draft")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Conversation variables updated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@get_app_model(mode=AppMode.ADVANCED_CHAT)
|
||||
def post(self, app_model: App):
|
||||
payload = ConversationVariableUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
|
||||
conversation_variables_list = payload.conversation_variables
|
||||
conversation_variables = [
|
||||
variable_factory.build_conversation_variable_from_mapping(obj) for obj in conversation_variables_list
|
||||
]
|
||||
|
||||
workflow_service.update_draft_workflow_conversation_variables(
|
||||
app_model=app_model,
|
||||
account=current_user,
|
||||
conversation_variables=conversation_variables,
|
||||
)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/system-variables")
|
||||
class SystemVariableCollectionApi(Resource):
|
||||
@ -573,31 +527,3 @@ class EnvironmentVariableCollectionApi(Resource):
|
||||
)
|
||||
|
||||
return {"items": env_vars_list}
|
||||
|
||||
@console_ns.expect(console_ns.models[EnvironmentVariableUpdatePayload.__name__])
|
||||
@console_ns.doc("update_environment_variables")
|
||||
@console_ns.doc(description="Update environment variables for workflow draft")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Environment variables updated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App):
|
||||
payload = EnvironmentVariableUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
|
||||
environment_variables_list = payload.environment_variables
|
||||
environment_variables = [
|
||||
variable_factory.build_environment_variable_from_mapping(obj) for obj in environment_variables_list
|
||||
]
|
||||
|
||||
workflow_service.update_draft_workflow_environment_variables(
|
||||
app_model=app_model,
|
||||
account=current_user,
|
||||
environment_variables=environment_variables,
|
||||
)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
@ -1,20 +1,19 @@
|
||||
from typing import Literal
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource, fields
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from configs import dify_config
|
||||
from controllers.fastopenapi import console_router
|
||||
from libs.helper import EmailStr, extract_remote_ip
|
||||
from libs.password import valid_password
|
||||
from models.model import DifySetup, db
|
||||
from services.account_service import RegisterService, TenantService
|
||||
|
||||
from . import console_ns
|
||||
from .error import AlreadySetupError, NotInitValidateError
|
||||
from .init_validate import get_init_validate_status
|
||||
from .wraps import only_edition_self_hosted
|
||||
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
|
||||
class SetupRequestPayload(BaseModel):
|
||||
email: EmailStr = Field(..., description="Admin email address")
|
||||
@ -28,78 +27,66 @@ class SetupRequestPayload(BaseModel):
|
||||
return valid_password(value)
|
||||
|
||||
|
||||
console_ns.schema_model(
|
||||
SetupRequestPayload.__name__,
|
||||
SetupRequestPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
class SetupStatusResponse(BaseModel):
|
||||
step: Literal["not_started", "finished"] = Field(description="Setup step status")
|
||||
setup_at: str | None = Field(default=None, description="Setup completion time (ISO format)")
|
||||
|
||||
|
||||
class SetupResponse(BaseModel):
|
||||
result: str = Field(description="Setup result", examples=["success"])
|
||||
|
||||
|
||||
@console_router.get(
|
||||
"/setup",
|
||||
response_model=SetupStatusResponse,
|
||||
tags=["console"],
|
||||
)
|
||||
def get_setup_status_api() -> SetupStatusResponse:
|
||||
"""Get system setup status."""
|
||||
if dify_config.EDITION == "SELF_HOSTED":
|
||||
setup_status = get_setup_status()
|
||||
if setup_status and not isinstance(setup_status, bool):
|
||||
return SetupStatusResponse(step="finished", setup_at=setup_status.setup_at.isoformat())
|
||||
if setup_status:
|
||||
return SetupStatusResponse(step="finished")
|
||||
return SetupStatusResponse(step="not_started")
|
||||
return SetupStatusResponse(step="finished")
|
||||
|
||||
|
||||
@console_ns.route("/setup")
|
||||
class SetupApi(Resource):
|
||||
@console_ns.doc("get_setup_status")
|
||||
@console_ns.doc(description="Get system setup status")
|
||||
@console_ns.response(
|
||||
200,
|
||||
"Success",
|
||||
console_ns.model(
|
||||
"SetupStatusResponse",
|
||||
{
|
||||
"step": fields.String(description="Setup step status", enum=["not_started", "finished"]),
|
||||
"setup_at": fields.String(description="Setup completion time (ISO format)", required=False),
|
||||
},
|
||||
),
|
||||
@console_router.post(
|
||||
"/setup",
|
||||
response_model=SetupResponse,
|
||||
tags=["console"],
|
||||
status_code=201,
|
||||
)
|
||||
@only_edition_self_hosted
|
||||
def setup_system(payload: SetupRequestPayload) -> SetupResponse:
|
||||
"""Initialize system setup with admin account."""
|
||||
if get_setup_status():
|
||||
raise AlreadySetupError()
|
||||
|
||||
tenant_count = TenantService.get_tenant_count()
|
||||
if tenant_count > 0:
|
||||
raise AlreadySetupError()
|
||||
|
||||
if not get_init_validate_status():
|
||||
raise NotInitValidateError()
|
||||
|
||||
normalized_email = payload.email.lower()
|
||||
|
||||
RegisterService.setup(
|
||||
email=normalized_email,
|
||||
name=payload.name,
|
||||
password=payload.password,
|
||||
ip_address=extract_remote_ip(request),
|
||||
language=payload.language,
|
||||
)
|
||||
def get(self):
|
||||
"""Get system setup status"""
|
||||
if dify_config.EDITION == "SELF_HOSTED":
|
||||
setup_status = get_setup_status()
|
||||
# Check if setup_status is a DifySetup object rather than a bool
|
||||
if setup_status and not isinstance(setup_status, bool):
|
||||
return {"step": "finished", "setup_at": setup_status.setup_at.isoformat()}
|
||||
elif setup_status:
|
||||
return {"step": "finished"}
|
||||
return {"step": "not_started"}
|
||||
return {"step": "finished"}
|
||||
|
||||
@console_ns.doc("setup_system")
|
||||
@console_ns.doc(description="Initialize system setup with admin account")
|
||||
@console_ns.expect(console_ns.models[SetupRequestPayload.__name__])
|
||||
@console_ns.response(
|
||||
201, "Success", console_ns.model("SetupResponse", {"result": fields.String(description="Setup result")})
|
||||
)
|
||||
@console_ns.response(400, "Already setup or validation failed")
|
||||
@only_edition_self_hosted
|
||||
def post(self):
|
||||
"""Initialize system setup with admin account"""
|
||||
# is set up
|
||||
if get_setup_status():
|
||||
raise AlreadySetupError()
|
||||
|
||||
# is tenant created
|
||||
tenant_count = TenantService.get_tenant_count()
|
||||
if tenant_count > 0:
|
||||
raise AlreadySetupError()
|
||||
|
||||
if not get_init_validate_status():
|
||||
raise NotInitValidateError()
|
||||
|
||||
args = SetupRequestPayload.model_validate(console_ns.payload)
|
||||
normalized_email = args.email.lower()
|
||||
|
||||
# setup
|
||||
RegisterService.setup(
|
||||
email=normalized_email,
|
||||
name=args.name,
|
||||
password=args.password,
|
||||
ip_address=extract_remote_ip(request),
|
||||
language=args.language,
|
||||
)
|
||||
|
||||
return {"result": "success"}, 201
|
||||
return SetupResponse(result="success")
|
||||
|
||||
|
||||
def get_setup_status():
|
||||
def get_setup_status() -> DifySetup | bool | None:
|
||||
if dify_config.EDITION == "SELF_HOSTED":
|
||||
return db.session.query(DifySetup).first()
|
||||
else:
|
||||
return True
|
||||
|
||||
return True
|
||||
|
||||
@ -1 +0,0 @@
|
||||
|
||||
@ -1,108 +0,0 @@
|
||||
import logging
|
||||
from collections.abc import Callable
|
||||
from typing import cast
|
||||
|
||||
from flask import Request as FlaskRequest
|
||||
|
||||
from extensions.ext_socketio import sio
|
||||
from libs.passport import PassportService
|
||||
from libs.token import extract_access_token
|
||||
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository
|
||||
from services.account_service import AccountService
|
||||
from services.workflow_collaboration_service import WorkflowCollaborationService
|
||||
|
||||
repository = WorkflowCollaborationRepository()
|
||||
collaboration_service = WorkflowCollaborationService(repository, sio)
|
||||
|
||||
|
||||
def _sio_on(event: str) -> Callable[[Callable[..., object]], Callable[..., object]]:
|
||||
return cast(Callable[[Callable[..., object]], Callable[..., object]], sio.on(event))
|
||||
|
||||
|
||||
@_sio_on("connect")
|
||||
def socket_connect(sid, environ, auth):
|
||||
"""
|
||||
WebSocket connect event, do authentication here.
|
||||
"""
|
||||
try:
|
||||
request_environ = FlaskRequest(environ)
|
||||
token = extract_access_token(request_environ)
|
||||
except Exception:
|
||||
logging.exception("Failed to extract token")
|
||||
token = None
|
||||
|
||||
if not token:
|
||||
logging.warning("Socket connect rejected: missing token (sid=%s)", sid)
|
||||
return False
|
||||
|
||||
try:
|
||||
decoded = PassportService().verify(token)
|
||||
user_id = decoded.get("user_id")
|
||||
if not user_id:
|
||||
logging.warning("Socket connect rejected: missing user_id (sid=%s)", sid)
|
||||
return False
|
||||
|
||||
with sio.app.app_context():
|
||||
user = AccountService.load_logged_in_account(account_id=user_id)
|
||||
if not user:
|
||||
logging.warning("Socket connect rejected: user not found (user_id=%s, sid=%s)", user_id, sid)
|
||||
return False
|
||||
if not user.has_edit_permission:
|
||||
logging.warning("Socket connect rejected: no edit permission (user_id=%s, sid=%s)", user_id, sid)
|
||||
return False
|
||||
|
||||
collaboration_service.save_session(sid, user)
|
||||
return True
|
||||
|
||||
except Exception:
|
||||
logging.exception("Socket authentication failed")
|
||||
return False
|
||||
|
||||
|
||||
@_sio_on("user_connect")
|
||||
def handle_user_connect(sid, data):
|
||||
"""
|
||||
Handle user connect event. Each session (tab) is treated as an independent collaborator.
|
||||
"""
|
||||
workflow_id = data.get("workflow_id")
|
||||
if not workflow_id:
|
||||
return {"msg": "workflow_id is required"}, 400
|
||||
|
||||
result = collaboration_service.register_session(workflow_id, sid)
|
||||
if not result:
|
||||
return {"msg": "unauthorized"}, 401
|
||||
|
||||
user_id, is_leader = result
|
||||
return {"msg": "connected", "user_id": user_id, "sid": sid, "isLeader": is_leader}
|
||||
|
||||
|
||||
@_sio_on("disconnect")
|
||||
def handle_disconnect(sid):
|
||||
"""
|
||||
Handle session disconnect event. Remove the specific session from online users.
|
||||
"""
|
||||
collaboration_service.disconnect_session(sid)
|
||||
|
||||
|
||||
@_sio_on("collaboration_event")
|
||||
def handle_collaboration_event(sid, data):
|
||||
"""
|
||||
Handle general collaboration events, include:
|
||||
1. mouse_move
|
||||
2. vars_and_features_update
|
||||
3. sync_request (ask leader to update graph)
|
||||
4. app_state_update
|
||||
5. mcp_server_update
|
||||
6. workflow_update
|
||||
7. comments_update
|
||||
8. node_panel_presence
|
||||
"""
|
||||
return collaboration_service.relay_collaboration_event(sid, data)
|
||||
|
||||
|
||||
@_sio_on("graph_event")
|
||||
def handle_graph_event(sid, data):
|
||||
"""
|
||||
Handle graph events - simple broadcast relay.
|
||||
"""
|
||||
return collaboration_service.relay_graph_event(sid, data)
|
||||
@ -1,15 +1,11 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
import httpx
|
||||
from flask import request
|
||||
from flask_restx import Resource, fields
|
||||
from packaging import version
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
from . import console_ns
|
||||
from controllers.fastopenapi import console_router
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -18,69 +14,61 @@ class VersionQuery(BaseModel):
|
||||
current_version: str = Field(..., description="Current application version")
|
||||
|
||||
|
||||
console_ns.schema_model(
|
||||
VersionQuery.__name__,
|
||||
VersionQuery.model_json_schema(ref_template="#/definitions/{model}"),
|
||||
class VersionFeatures(BaseModel):
|
||||
can_replace_logo: bool = Field(description="Whether logo replacement is supported")
|
||||
model_load_balancing_enabled: bool = Field(description="Whether model load balancing is enabled")
|
||||
|
||||
|
||||
class VersionResponse(BaseModel):
|
||||
version: str = Field(description="Latest version number")
|
||||
release_date: str = Field(description="Release date of latest version")
|
||||
release_notes: str = Field(description="Release notes for latest version")
|
||||
can_auto_update: bool = Field(description="Whether auto-update is supported")
|
||||
features: VersionFeatures = Field(description="Feature flags and capabilities")
|
||||
|
||||
|
||||
@console_router.get(
|
||||
"/version",
|
||||
response_model=VersionResponse,
|
||||
tags=["console"],
|
||||
)
|
||||
def check_version_update(query: VersionQuery) -> VersionResponse:
|
||||
"""Check for application version updates."""
|
||||
check_update_url = dify_config.CHECK_UPDATE_URL
|
||||
|
||||
|
||||
@console_ns.route("/version")
|
||||
class VersionApi(Resource):
|
||||
@console_ns.doc("check_version_update")
|
||||
@console_ns.doc(description="Check for application version updates")
|
||||
@console_ns.expect(console_ns.models[VersionQuery.__name__])
|
||||
@console_ns.response(
|
||||
200,
|
||||
"Success",
|
||||
console_ns.model(
|
||||
"VersionResponse",
|
||||
{
|
||||
"version": fields.String(description="Latest version number"),
|
||||
"release_date": fields.String(description="Release date of latest version"),
|
||||
"release_notes": fields.String(description="Release notes for latest version"),
|
||||
"can_auto_update": fields.Boolean(description="Whether auto-update is supported"),
|
||||
"features": fields.Raw(description="Feature flags and capabilities"),
|
||||
},
|
||||
result = VersionResponse(
|
||||
version=dify_config.project.version,
|
||||
release_date="",
|
||||
release_notes="",
|
||||
can_auto_update=False,
|
||||
features=VersionFeatures(
|
||||
can_replace_logo=dify_config.CAN_REPLACE_LOGO,
|
||||
model_load_balancing_enabled=dify_config.MODEL_LB_ENABLED,
|
||||
),
|
||||
)
|
||||
def get(self):
|
||||
"""Check for application version updates"""
|
||||
args = VersionQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
check_update_url = dify_config.CHECK_UPDATE_URL
|
||||
|
||||
result = {
|
||||
"version": dify_config.project.version,
|
||||
"release_date": "",
|
||||
"release_notes": "",
|
||||
"can_auto_update": False,
|
||||
"features": {
|
||||
"can_replace_logo": dify_config.CAN_REPLACE_LOGO,
|
||||
"model_load_balancing_enabled": dify_config.MODEL_LB_ENABLED,
|
||||
},
|
||||
}
|
||||
|
||||
if not check_update_url:
|
||||
return result
|
||||
|
||||
try:
|
||||
response = httpx.get(
|
||||
check_update_url,
|
||||
params={"current_version": args.current_version},
|
||||
timeout=httpx.Timeout(timeout=10.0, connect=3.0),
|
||||
)
|
||||
except Exception as error:
|
||||
logger.warning("Check update version error: %s.", str(error))
|
||||
result["version"] = args.current_version
|
||||
return result
|
||||
|
||||
content = json.loads(response.content)
|
||||
if _has_new_version(latest_version=content["version"], current_version=f"{args.current_version}"):
|
||||
result["version"] = content["version"]
|
||||
result["release_date"] = content["releaseDate"]
|
||||
result["release_notes"] = content["releaseNotes"]
|
||||
result["can_auto_update"] = content["canAutoUpdate"]
|
||||
if not check_update_url:
|
||||
return result
|
||||
|
||||
try:
|
||||
response = httpx.get(
|
||||
check_update_url,
|
||||
params={"current_version": query.current_version},
|
||||
timeout=httpx.Timeout(timeout=10.0, connect=3.0),
|
||||
)
|
||||
content = response.json()
|
||||
except Exception as error:
|
||||
logger.warning("Check update version error: %s.", str(error))
|
||||
result.version = query.current_version
|
||||
return result
|
||||
latest_version = content.get("version", result.version)
|
||||
if _has_new_version(latest_version=latest_version, current_version=f"{query.current_version}"):
|
||||
result.version = latest_version
|
||||
result.release_date = content.get("releaseDate", "")
|
||||
result.release_notes = content.get("releaseNotes", "")
|
||||
result.can_auto_update = content.get("canAutoUpdate", False)
|
||||
return result
|
||||
|
||||
|
||||
def _has_new_version(*, latest_version: str, current_version: str) -> bool:
|
||||
try:
|
||||
|
||||
@ -36,7 +36,6 @@ from controllers.console.wraps import (
|
||||
only_edition_cloud,
|
||||
setup_required,
|
||||
)
|
||||
from core.file import helpers as file_helpers
|
||||
from extensions.ext_database import db
|
||||
from fields.member_fields import account_fields
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
@ -74,10 +73,6 @@ class AccountAvatarPayload(BaseModel):
|
||||
avatar: str
|
||||
|
||||
|
||||
class AccountAvatarQuery(BaseModel):
|
||||
avatar: str = Field(..., description="Avatar file ID")
|
||||
|
||||
|
||||
class AccountInterfaceLanguagePayload(BaseModel):
|
||||
interface_language: str
|
||||
|
||||
@ -163,7 +158,6 @@ def reg(cls: type[BaseModel]):
|
||||
reg(AccountInitPayload)
|
||||
reg(AccountNamePayload)
|
||||
reg(AccountAvatarPayload)
|
||||
reg(AccountAvatarQuery)
|
||||
reg(AccountInterfaceLanguagePayload)
|
||||
reg(AccountInterfaceThemePayload)
|
||||
reg(AccountTimezonePayload)
|
||||
@ -254,18 +248,6 @@ class AccountNameApi(Resource):
|
||||
|
||||
@console_ns.route("/account/avatar")
|
||||
class AccountAvatarApi(Resource):
|
||||
@console_ns.expect(console_ns.models[AccountAvatarQuery.__name__])
|
||||
@console_ns.doc("get_account_avatar")
|
||||
@console_ns.doc(description="Get account avatar url")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
args = AccountAvatarQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
|
||||
avatar_url = file_helpers.get_signed_file_url(args.avatar)
|
||||
return {"avatar_url": avatar_url}
|
||||
|
||||
@console_ns.expect(console_ns.models[AccountAvatarPayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
|
||||
@ -11,7 +11,9 @@ from controllers.service_api.wraps import DatasetApiResource, cloud_edition_bill
|
||||
from fields.dataset_fields import dataset_metadata_fields
|
||||
from services.dataset_service import DatasetService
|
||||
from services.entities.knowledge_entities.knowledge_entities import (
|
||||
DocumentMetadataOperation,
|
||||
MetadataArgs,
|
||||
MetadataDetail,
|
||||
MetadataOperationData,
|
||||
)
|
||||
from services.metadata_service import MetadataService
|
||||
@ -22,7 +24,13 @@ class MetadataUpdatePayload(BaseModel):
|
||||
|
||||
|
||||
register_schema_model(service_api_ns, MetadataUpdatePayload)
|
||||
register_schema_models(service_api_ns, MetadataArgs, MetadataOperationData)
|
||||
register_schema_models(
|
||||
service_api_ns,
|
||||
MetadataArgs,
|
||||
MetadataDetail,
|
||||
DocumentMetadataOperation,
|
||||
MetadataOperationData,
|
||||
)
|
||||
|
||||
|
||||
@service_api_ns.route("/datasets/<uuid:dataset_id>/metadata")
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextvars
|
||||
import logging
|
||||
import threading
|
||||
import uuid
|
||||
from collections.abc import Generator, Mapping
|
||||
from typing import Any, Literal, Union, overload
|
||||
from typing import TYPE_CHECKING, Any, Literal, Union, overload
|
||||
|
||||
from flask import Flask, current_app
|
||||
from pydantic import ValidationError
|
||||
@ -13,6 +15,9 @@ from sqlalchemy.orm import Session, sessionmaker
|
||||
import contexts
|
||||
from configs import dify_config
|
||||
from constants import UUID_NIL
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from controllers.console.app.workflow import LoopNodeRunPayload
|
||||
from core.app.app_config.features.file_upload.manager import FileUploadConfigManager
|
||||
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
|
||||
from core.app.apps.advanced_chat.app_runner import AdvancedChatAppRunner
|
||||
@ -304,7 +309,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
workflow: Workflow,
|
||||
node_id: str,
|
||||
user: Account | EndUser,
|
||||
args: Mapping,
|
||||
args: LoopNodeRunPayload,
|
||||
streaming: bool = True,
|
||||
) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], Any, None]:
|
||||
"""
|
||||
@ -320,7 +325,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
if not node_id:
|
||||
raise ValueError("node_id is required")
|
||||
|
||||
if args.get("inputs") is None:
|
||||
if args.inputs is None:
|
||||
raise ValueError("inputs is required")
|
||||
|
||||
# convert to app config
|
||||
@ -338,7 +343,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
stream=streaming,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
extras={"auto_generate_conversation_name": False},
|
||||
single_loop_run=AdvancedChatAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args["inputs"]),
|
||||
single_loop_run=AdvancedChatAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args.inputs),
|
||||
)
|
||||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextvars
|
||||
import logging
|
||||
import threading
|
||||
import uuid
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import Any, Literal, Union, overload
|
||||
from typing import TYPE_CHECKING, Any, Literal, Union, overload
|
||||
|
||||
from flask import Flask, current_app
|
||||
from pydantic import ValidationError
|
||||
@ -40,6 +42,9 @@ from models import Account, App, EndUser, Workflow, WorkflowNodeExecutionTrigger
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from services.workflow_draft_variable_service import DraftVarLoader, WorkflowDraftVariableService
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from controllers.console.app.workflow import LoopNodeRunPayload
|
||||
|
||||
SKIP_PREPARE_USER_INPUTS_KEY = "_skip_prepare_user_inputs"
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -381,7 +386,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
workflow: Workflow,
|
||||
node_id: str,
|
||||
user: Account | EndUser,
|
||||
args: Mapping[str, Any],
|
||||
args: LoopNodeRunPayload,
|
||||
streaming: bool = True,
|
||||
) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], None, None]:
|
||||
"""
|
||||
@ -397,7 +402,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
if not node_id:
|
||||
raise ValueError("node_id is required")
|
||||
|
||||
if args.get("inputs") is None:
|
||||
if args.inputs is None:
|
||||
raise ValueError("inputs is required")
|
||||
|
||||
# convert to app config
|
||||
@ -413,7 +418,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
stream=streaming,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
extras={"auto_generate_conversation_name": False},
|
||||
single_loop_run=WorkflowAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args["inputs"]),
|
||||
single_loop_run=WorkflowAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args.inputs or {}),
|
||||
workflow_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
contexts.plugin_tool_providers.set({})
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from collections.abc import Generator, Sequence
|
||||
from collections.abc import Callable, Generator, Iterator, Sequence
|
||||
from typing import Union
|
||||
|
||||
from pydantic import ConfigDict
|
||||
@ -30,6 +30,142 @@ def _gen_tool_call_id() -> str:
|
||||
return f"chatcmpl-tool-{str(uuid.uuid4().hex)}"
|
||||
|
||||
|
||||
def _run_callbacks(callbacks: Sequence[Callback] | None, *, event: str, invoke: Callable[[Callback], None]) -> None:
|
||||
if not callbacks:
|
||||
return
|
||||
|
||||
for callback in callbacks:
|
||||
try:
|
||||
invoke(callback)
|
||||
except Exception as e:
|
||||
if callback.raise_error:
|
||||
raise
|
||||
logger.warning("Callback %s %s failed with error %s", callback.__class__.__name__, event, e)
|
||||
|
||||
|
||||
def _get_or_create_tool_call(
|
||||
existing_tools_calls: list[AssistantPromptMessage.ToolCall],
|
||||
tool_call_id: str,
|
||||
) -> AssistantPromptMessage.ToolCall:
|
||||
"""
|
||||
Get or create a tool call by ID.
|
||||
|
||||
If `tool_call_id` is empty, returns the most recently created tool call.
|
||||
"""
|
||||
if not tool_call_id:
|
||||
if not existing_tools_calls:
|
||||
raise ValueError("tool_call_id is empty but no existing tool call is available to apply the delta")
|
||||
return existing_tools_calls[-1]
|
||||
|
||||
tool_call = next((tool_call for tool_call in existing_tools_calls if tool_call.id == tool_call_id), None)
|
||||
if tool_call is None:
|
||||
tool_call = AssistantPromptMessage.ToolCall(
|
||||
id=tool_call_id,
|
||||
type="function",
|
||||
function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="", arguments=""),
|
||||
)
|
||||
existing_tools_calls.append(tool_call)
|
||||
|
||||
return tool_call
|
||||
|
||||
|
||||
def _merge_tool_call_delta(
|
||||
tool_call: AssistantPromptMessage.ToolCall,
|
||||
delta: AssistantPromptMessage.ToolCall,
|
||||
) -> None:
|
||||
if delta.id:
|
||||
tool_call.id = delta.id
|
||||
if delta.type:
|
||||
tool_call.type = delta.type
|
||||
if delta.function.name:
|
||||
tool_call.function.name = delta.function.name
|
||||
if delta.function.arguments:
|
||||
tool_call.function.arguments += delta.function.arguments
|
||||
|
||||
|
||||
def _build_llm_result_from_first_chunk(
|
||||
model: str,
|
||||
prompt_messages: Sequence[PromptMessage],
|
||||
chunks: Iterator[LLMResultChunk],
|
||||
) -> LLMResult:
|
||||
"""
|
||||
Build a single `LLMResult` from the first returned chunk.
|
||||
|
||||
This is used for `stream=False` because the plugin side may still implement the response via a chunked stream.
|
||||
"""
|
||||
content = ""
|
||||
content_list: list[PromptMessageContentUnionTypes] = []
|
||||
usage = LLMUsage.empty_usage()
|
||||
system_fingerprint: str | None = None
|
||||
tools_calls: list[AssistantPromptMessage.ToolCall] = []
|
||||
|
||||
first_chunk = next(chunks, None)
|
||||
if first_chunk is not None:
|
||||
if isinstance(first_chunk.delta.message.content, str):
|
||||
content += first_chunk.delta.message.content
|
||||
elif isinstance(first_chunk.delta.message.content, list):
|
||||
content_list.extend(first_chunk.delta.message.content)
|
||||
|
||||
if first_chunk.delta.message.tool_calls:
|
||||
_increase_tool_call(first_chunk.delta.message.tool_calls, tools_calls)
|
||||
|
||||
usage = first_chunk.delta.usage or LLMUsage.empty_usage()
|
||||
system_fingerprint = first_chunk.system_fingerprint
|
||||
|
||||
return LLMResult(
|
||||
model=model,
|
||||
prompt_messages=prompt_messages,
|
||||
message=AssistantPromptMessage(
|
||||
content=content or content_list,
|
||||
tool_calls=tools_calls,
|
||||
),
|
||||
usage=usage,
|
||||
system_fingerprint=system_fingerprint,
|
||||
)
|
||||
|
||||
|
||||
def _invoke_llm_via_plugin(
|
||||
*,
|
||||
tenant_id: str,
|
||||
user_id: str,
|
||||
plugin_id: str,
|
||||
provider: str,
|
||||
model: str,
|
||||
credentials: dict,
|
||||
model_parameters: dict,
|
||||
prompt_messages: Sequence[PromptMessage],
|
||||
tools: list[PromptMessageTool] | None,
|
||||
stop: Sequence[str] | None,
|
||||
stream: bool,
|
||||
) -> Union[LLMResult, Generator[LLMResultChunk, None, None]]:
|
||||
from core.plugin.impl.model import PluginModelClient
|
||||
|
||||
plugin_model_manager = PluginModelClient()
|
||||
return plugin_model_manager.invoke_llm(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user_id,
|
||||
plugin_id=plugin_id,
|
||||
provider=provider,
|
||||
model=model,
|
||||
credentials=credentials,
|
||||
model_parameters=model_parameters,
|
||||
prompt_messages=list(prompt_messages),
|
||||
tools=tools,
|
||||
stop=list(stop) if stop else None,
|
||||
stream=stream,
|
||||
)
|
||||
|
||||
|
||||
def _normalize_non_stream_plugin_result(
|
||||
model: str,
|
||||
prompt_messages: Sequence[PromptMessage],
|
||||
result: Union[LLMResult, Iterator[LLMResultChunk]],
|
||||
) -> LLMResult:
|
||||
if isinstance(result, LLMResult):
|
||||
return result
|
||||
return _build_llm_result_from_first_chunk(model=model, prompt_messages=prompt_messages, chunks=result)
|
||||
|
||||
|
||||
def _increase_tool_call(
|
||||
new_tool_calls: list[AssistantPromptMessage.ToolCall], existing_tools_calls: list[AssistantPromptMessage.ToolCall]
|
||||
):
|
||||
@ -40,42 +176,13 @@ def _increase_tool_call(
|
||||
:param existing_tools_calls: List of existing tool calls to be modified IN-PLACE.
|
||||
"""
|
||||
|
||||
def get_tool_call(tool_call_id: str):
|
||||
"""
|
||||
Get or create a tool call by ID
|
||||
|
||||
:param tool_call_id: tool call ID
|
||||
:return: existing or new tool call
|
||||
"""
|
||||
if not tool_call_id:
|
||||
return existing_tools_calls[-1]
|
||||
|
||||
_tool_call = next((_tool_call for _tool_call in existing_tools_calls if _tool_call.id == tool_call_id), None)
|
||||
if _tool_call is None:
|
||||
_tool_call = AssistantPromptMessage.ToolCall(
|
||||
id=tool_call_id,
|
||||
type="function",
|
||||
function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="", arguments=""),
|
||||
)
|
||||
existing_tools_calls.append(_tool_call)
|
||||
|
||||
return _tool_call
|
||||
|
||||
for new_tool_call in new_tool_calls:
|
||||
# generate ID for tool calls with function name but no ID to track them
|
||||
if new_tool_call.function.name and not new_tool_call.id:
|
||||
new_tool_call.id = _gen_tool_call_id()
|
||||
# get tool call
|
||||
tool_call = get_tool_call(new_tool_call.id)
|
||||
# update tool call
|
||||
if new_tool_call.id:
|
||||
tool_call.id = new_tool_call.id
|
||||
if new_tool_call.type:
|
||||
tool_call.type = new_tool_call.type
|
||||
if new_tool_call.function.name:
|
||||
tool_call.function.name = new_tool_call.function.name
|
||||
if new_tool_call.function.arguments:
|
||||
tool_call.function.arguments += new_tool_call.function.arguments
|
||||
|
||||
tool_call = _get_or_create_tool_call(existing_tools_calls, new_tool_call.id)
|
||||
_merge_tool_call_delta(tool_call, new_tool_call)
|
||||
|
||||
|
||||
class LargeLanguageModel(AIModel):
|
||||
@ -141,10 +248,7 @@ class LargeLanguageModel(AIModel):
|
||||
result: Union[LLMResult, Generator[LLMResultChunk, None, None]]
|
||||
|
||||
try:
|
||||
from core.plugin.impl.model import PluginModelClient
|
||||
|
||||
plugin_model_manager = PluginModelClient()
|
||||
result = plugin_model_manager.invoke_llm(
|
||||
result = _invoke_llm_via_plugin(
|
||||
tenant_id=self.tenant_id,
|
||||
user_id=user or "unknown",
|
||||
plugin_id=self.plugin_id,
|
||||
@ -154,38 +258,13 @@ class LargeLanguageModel(AIModel):
|
||||
model_parameters=model_parameters,
|
||||
prompt_messages=prompt_messages,
|
||||
tools=tools,
|
||||
stop=list(stop) if stop else None,
|
||||
stop=stop,
|
||||
stream=stream,
|
||||
)
|
||||
|
||||
if not stream:
|
||||
content = ""
|
||||
content_list = []
|
||||
usage = LLMUsage.empty_usage()
|
||||
system_fingerprint = None
|
||||
tools_calls: list[AssistantPromptMessage.ToolCall] = []
|
||||
|
||||
for chunk in result:
|
||||
if isinstance(chunk.delta.message.content, str):
|
||||
content += chunk.delta.message.content
|
||||
elif isinstance(chunk.delta.message.content, list):
|
||||
content_list.extend(chunk.delta.message.content)
|
||||
if chunk.delta.message.tool_calls:
|
||||
_increase_tool_call(chunk.delta.message.tool_calls, tools_calls)
|
||||
|
||||
usage = chunk.delta.usage or LLMUsage.empty_usage()
|
||||
system_fingerprint = chunk.system_fingerprint
|
||||
break
|
||||
|
||||
result = LLMResult(
|
||||
model=model,
|
||||
prompt_messages=prompt_messages,
|
||||
message=AssistantPromptMessage(
|
||||
content=content or content_list,
|
||||
tool_calls=tools_calls,
|
||||
),
|
||||
usage=usage,
|
||||
system_fingerprint=system_fingerprint,
|
||||
result = _normalize_non_stream_plugin_result(
|
||||
model=model, prompt_messages=prompt_messages, result=result
|
||||
)
|
||||
except Exception as e:
|
||||
self._trigger_invoke_error_callbacks(
|
||||
@ -425,27 +504,21 @@ class LargeLanguageModel(AIModel):
|
||||
:param user: unique user id
|
||||
:param callbacks: callbacks
|
||||
"""
|
||||
if callbacks:
|
||||
for callback in callbacks:
|
||||
try:
|
||||
callback.on_before_invoke(
|
||||
llm_instance=self,
|
||||
model=model,
|
||||
credentials=credentials,
|
||||
prompt_messages=prompt_messages,
|
||||
model_parameters=model_parameters,
|
||||
tools=tools,
|
||||
stop=stop,
|
||||
stream=stream,
|
||||
user=user,
|
||||
)
|
||||
except Exception as e:
|
||||
if callback.raise_error:
|
||||
raise e
|
||||
else:
|
||||
logger.warning(
|
||||
"Callback %s on_before_invoke failed with error %s", callback.__class__.__name__, e
|
||||
)
|
||||
_run_callbacks(
|
||||
callbacks,
|
||||
event="on_before_invoke",
|
||||
invoke=lambda callback: callback.on_before_invoke(
|
||||
llm_instance=self,
|
||||
model=model,
|
||||
credentials=credentials,
|
||||
prompt_messages=prompt_messages,
|
||||
model_parameters=model_parameters,
|
||||
tools=tools,
|
||||
stop=stop,
|
||||
stream=stream,
|
||||
user=user,
|
||||
),
|
||||
)
|
||||
|
||||
def _trigger_new_chunk_callbacks(
|
||||
self,
|
||||
@ -473,26 +546,22 @@ class LargeLanguageModel(AIModel):
|
||||
:param stream: is stream response
|
||||
:param user: unique user id
|
||||
"""
|
||||
if callbacks:
|
||||
for callback in callbacks:
|
||||
try:
|
||||
callback.on_new_chunk(
|
||||
llm_instance=self,
|
||||
chunk=chunk,
|
||||
model=model,
|
||||
credentials=credentials,
|
||||
prompt_messages=prompt_messages,
|
||||
model_parameters=model_parameters,
|
||||
tools=tools,
|
||||
stop=stop,
|
||||
stream=stream,
|
||||
user=user,
|
||||
)
|
||||
except Exception as e:
|
||||
if callback.raise_error:
|
||||
raise e
|
||||
else:
|
||||
logger.warning("Callback %s on_new_chunk failed with error %s", callback.__class__.__name__, e)
|
||||
_run_callbacks(
|
||||
callbacks,
|
||||
event="on_new_chunk",
|
||||
invoke=lambda callback: callback.on_new_chunk(
|
||||
llm_instance=self,
|
||||
chunk=chunk,
|
||||
model=model,
|
||||
credentials=credentials,
|
||||
prompt_messages=prompt_messages,
|
||||
model_parameters=model_parameters,
|
||||
tools=tools,
|
||||
stop=stop,
|
||||
stream=stream,
|
||||
user=user,
|
||||
),
|
||||
)
|
||||
|
||||
def _trigger_after_invoke_callbacks(
|
||||
self,
|
||||
@ -521,28 +590,22 @@ class LargeLanguageModel(AIModel):
|
||||
:param user: unique user id
|
||||
:param callbacks: callbacks
|
||||
"""
|
||||
if callbacks:
|
||||
for callback in callbacks:
|
||||
try:
|
||||
callback.on_after_invoke(
|
||||
llm_instance=self,
|
||||
result=result,
|
||||
model=model,
|
||||
credentials=credentials,
|
||||
prompt_messages=prompt_messages,
|
||||
model_parameters=model_parameters,
|
||||
tools=tools,
|
||||
stop=stop,
|
||||
stream=stream,
|
||||
user=user,
|
||||
)
|
||||
except Exception as e:
|
||||
if callback.raise_error:
|
||||
raise e
|
||||
else:
|
||||
logger.warning(
|
||||
"Callback %s on_after_invoke failed with error %s", callback.__class__.__name__, e
|
||||
)
|
||||
_run_callbacks(
|
||||
callbacks,
|
||||
event="on_after_invoke",
|
||||
invoke=lambda callback: callback.on_after_invoke(
|
||||
llm_instance=self,
|
||||
result=result,
|
||||
model=model,
|
||||
credentials=credentials,
|
||||
prompt_messages=prompt_messages,
|
||||
model_parameters=model_parameters,
|
||||
tools=tools,
|
||||
stop=stop,
|
||||
stream=stream,
|
||||
user=user,
|
||||
),
|
||||
)
|
||||
|
||||
def _trigger_invoke_error_callbacks(
|
||||
self,
|
||||
@ -571,25 +634,19 @@ class LargeLanguageModel(AIModel):
|
||||
:param user: unique user id
|
||||
:param callbacks: callbacks
|
||||
"""
|
||||
if callbacks:
|
||||
for callback in callbacks:
|
||||
try:
|
||||
callback.on_invoke_error(
|
||||
llm_instance=self,
|
||||
ex=ex,
|
||||
model=model,
|
||||
credentials=credentials,
|
||||
prompt_messages=prompt_messages,
|
||||
model_parameters=model_parameters,
|
||||
tools=tools,
|
||||
stop=stop,
|
||||
stream=stream,
|
||||
user=user,
|
||||
)
|
||||
except Exception as e:
|
||||
if callback.raise_error:
|
||||
raise e
|
||||
else:
|
||||
logger.warning(
|
||||
"Callback %s on_invoke_error failed with error %s", callback.__class__.__name__, e
|
||||
)
|
||||
_run_callbacks(
|
||||
callbacks,
|
||||
event="on_invoke_error",
|
||||
invoke=lambda callback: callback.on_invoke_error(
|
||||
llm_instance=self,
|
||||
ex=ex,
|
||||
model=model,
|
||||
credentials=credentials,
|
||||
prompt_messages=prompt_messages,
|
||||
model_parameters=model_parameters,
|
||||
tools=tools,
|
||||
stop=stop,
|
||||
stream=stream,
|
||||
user=user,
|
||||
),
|
||||
)
|
||||
|
||||
@ -119,16 +119,14 @@ elif [[ "${MODE}" == "job" ]]; then
|
||||
|
||||
else
|
||||
if [[ "${DEBUG}" == "true" ]]; then
|
||||
export HOST=${DIFY_BIND_ADDRESS:-0.0.0.0}
|
||||
export PORT=${DIFY_PORT:-5001}
|
||||
exec python -m app
|
||||
exec flask run --host=${DIFY_BIND_ADDRESS:-0.0.0.0} --port=${DIFY_PORT:-5001} --debug
|
||||
else
|
||||
exec gunicorn \
|
||||
--bind "${DIFY_BIND_ADDRESS:-0.0.0.0}:${DIFY_PORT:-5001}" \
|
||||
--workers ${SERVER_WORKER_AMOUNT:-1} \
|
||||
--worker-class ${SERVER_WORKER_CLASS:-geventwebsocket.gunicorn.workers.GeventWebSocketWorker} \
|
||||
--worker-class ${SERVER_WORKER_CLASS:-gevent} \
|
||||
--worker-connections ${SERVER_WORKER_CONNECTIONS:-10} \
|
||||
--timeout ${GUNICORN_TIMEOUT:-200} \
|
||||
app:socketio_app
|
||||
app:app
|
||||
fi
|
||||
fi
|
||||
|
||||
21
api/enums/hosted_provider.py
Normal file
21
api/enums/hosted_provider.py
Normal file
@ -0,0 +1,21 @@
|
||||
from enum import StrEnum
|
||||
|
||||
|
||||
class HostedTrialProvider(StrEnum):
|
||||
"""
|
||||
Enum representing hosted model provider names for trial access.
|
||||
"""
|
||||
|
||||
OPENAI = "langgenius/openai/openai"
|
||||
ANTHROPIC = "langgenius/anthropic/anthropic"
|
||||
GEMINI = "langgenius/gemini/google"
|
||||
X = "langgenius/x/x"
|
||||
DEEPSEEK = "langgenius/deepseek/deepseek"
|
||||
TONGYI = "langgenius/tongyi/tongyi"
|
||||
|
||||
@property
|
||||
def config_key(self) -> str:
|
||||
"""Return the config key used in dify_config (e.g., HOSTED_{config_key}_PAID_ENABLED)."""
|
||||
if self == HostedTrialProvider.X:
|
||||
return "XAI"
|
||||
return self.name
|
||||
@ -28,8 +28,10 @@ def init_app(app: DifyApp) -> None:
|
||||
|
||||
# Ensure route decorators are evaluated.
|
||||
import controllers.console.ping as ping_module
|
||||
from controllers.console import setup
|
||||
|
||||
_ = ping_module
|
||||
_ = setup
|
||||
|
||||
router.include_router(console_router, prefix="/console/api")
|
||||
CORS(
|
||||
|
||||
@ -1,5 +0,0 @@
|
||||
import socketio # type: ignore[reportMissingTypeStubs]
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
sio = socketio.Server(async_mode="gevent", cors_allowed_origins=dify_config.CONSOLE_CORS_ALLOW_ORIGINS)
|
||||
@ -1,17 +0,0 @@
|
||||
from flask_restx import fields
|
||||
|
||||
online_user_partial_fields = {
|
||||
"user_id": fields.String,
|
||||
"username": fields.String,
|
||||
"avatar": fields.String,
|
||||
"sid": fields.String,
|
||||
}
|
||||
|
||||
workflow_online_users_fields = {
|
||||
"workflow_id": fields.String,
|
||||
"users": fields.List(fields.Nested(online_user_partial_fields)),
|
||||
}
|
||||
|
||||
online_user_list_fields = {
|
||||
"data": fields.List(fields.Nested(workflow_online_users_fields)),
|
||||
}
|
||||
@ -1,96 +0,0 @@
|
||||
from flask_restx import fields
|
||||
|
||||
from libs.helper import AvatarUrlField, TimestampField
|
||||
|
||||
# basic account fields for comments
|
||||
account_fields = {
|
||||
"id": fields.String,
|
||||
"name": fields.String,
|
||||
"email": fields.String,
|
||||
"avatar_url": AvatarUrlField,
|
||||
}
|
||||
|
||||
# Comment mention fields
|
||||
workflow_comment_mention_fields = {
|
||||
"mentioned_user_id": fields.String,
|
||||
"mentioned_user_account": fields.Nested(account_fields, allow_null=True),
|
||||
"reply_id": fields.String,
|
||||
}
|
||||
|
||||
# Comment reply fields
|
||||
workflow_comment_reply_fields = {
|
||||
"id": fields.String,
|
||||
"content": fields.String,
|
||||
"created_by": fields.String,
|
||||
"created_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
# Basic comment fields (for list views)
|
||||
workflow_comment_basic_fields = {
|
||||
"id": fields.String,
|
||||
"position_x": fields.Float,
|
||||
"position_y": fields.Float,
|
||||
"content": fields.String,
|
||||
"created_by": fields.String,
|
||||
"created_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
"updated_at": TimestampField,
|
||||
"resolved": fields.Boolean,
|
||||
"resolved_at": TimestampField,
|
||||
"resolved_by": fields.String,
|
||||
"resolved_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"reply_count": fields.Integer,
|
||||
"mention_count": fields.Integer,
|
||||
"participants": fields.List(fields.Nested(account_fields)),
|
||||
}
|
||||
|
||||
# Detailed comment fields (for single comment view)
|
||||
workflow_comment_detail_fields = {
|
||||
"id": fields.String,
|
||||
"position_x": fields.Float,
|
||||
"position_y": fields.Float,
|
||||
"content": fields.String,
|
||||
"created_by": fields.String,
|
||||
"created_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
"updated_at": TimestampField,
|
||||
"resolved": fields.Boolean,
|
||||
"resolved_at": TimestampField,
|
||||
"resolved_by": fields.String,
|
||||
"resolved_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"replies": fields.List(fields.Nested(workflow_comment_reply_fields)),
|
||||
"mentions": fields.List(fields.Nested(workflow_comment_mention_fields)),
|
||||
}
|
||||
|
||||
# Comment creation response fields (simplified)
|
||||
workflow_comment_create_fields = {
|
||||
"id": fields.String,
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
# Comment update response fields (simplified)
|
||||
workflow_comment_update_fields = {
|
||||
"id": fields.String,
|
||||
"updated_at": TimestampField,
|
||||
}
|
||||
|
||||
# Comment resolve response fields
|
||||
workflow_comment_resolve_fields = {
|
||||
"id": fields.String,
|
||||
"resolved": fields.Boolean,
|
||||
"resolved_at": TimestampField,
|
||||
"resolved_by": fields.String,
|
||||
}
|
||||
|
||||
# Reply creation response fields (simplified)
|
||||
workflow_comment_reply_create_fields = {
|
||||
"id": fields.String,
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
# Reply update response fields
|
||||
workflow_comment_reply_update_fields = {
|
||||
"id": fields.String,
|
||||
"updated_at": TimestampField,
|
||||
}
|
||||
@ -1,90 +0,0 @@
|
||||
"""Add workflow comments table
|
||||
|
||||
Revision ID: 227822d22895
|
||||
Revises: 9d77545f524e
|
||||
Create Date: 2025-08-22 17:26:15.255980
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '227822d22895'
|
||||
down_revision = '9d77545f524e'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('workflow_comments',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('position_x', sa.Float(), nullable=False),
|
||||
sa.Column('position_y', sa.Float(), nullable=False),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('resolved', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('resolved_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('resolved_by', models.types.StringUUID(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_comments_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflow_comments', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_comments_app_idx', ['tenant_id', 'app_id'], unique=False)
|
||||
batch_op.create_index('workflow_comments_created_at_idx', ['created_at'], unique=False)
|
||||
|
||||
op.create_table('workflow_comment_replies',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('comment_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['comment_id'], ['workflow_comments.id'], name=op.f('workflow_comment_replies_comment_id_fkey'), ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_comment_replies_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflow_comment_replies', schema=None) as batch_op:
|
||||
batch_op.create_index('comment_replies_comment_idx', ['comment_id'], unique=False)
|
||||
batch_op.create_index('comment_replies_created_at_idx', ['created_at'], unique=False)
|
||||
|
||||
op.create_table('workflow_comment_mentions',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('comment_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('reply_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('mentioned_user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['comment_id'], ['workflow_comments.id'], name=op.f('workflow_comment_mentions_comment_id_fkey'), ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['reply_id'], ['workflow_comment_replies.id'], name=op.f('workflow_comment_mentions_reply_id_fkey'), ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_comment_mentions_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflow_comment_mentions', schema=None) as batch_op:
|
||||
batch_op.create_index('comment_mentions_comment_idx', ['comment_id'], unique=False)
|
||||
batch_op.create_index('comment_mentions_reply_idx', ['reply_id'], unique=False)
|
||||
batch_op.create_index('comment_mentions_user_idx', ['mentioned_user_id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('workflow_comment_mentions', schema=None) as batch_op:
|
||||
batch_op.drop_index('comment_mentions_user_idx')
|
||||
batch_op.drop_index('comment_mentions_reply_idx')
|
||||
batch_op.drop_index('comment_mentions_comment_idx')
|
||||
|
||||
op.drop_table('workflow_comment_mentions')
|
||||
with op.batch_alter_table('workflow_comment_replies', schema=None) as batch_op:
|
||||
batch_op.drop_index('comment_replies_created_at_idx')
|
||||
batch_op.drop_index('comment_replies_comment_idx')
|
||||
|
||||
op.drop_table('workflow_comment_replies')
|
||||
with op.batch_alter_table('workflow_comments', schema=None) as batch_op:
|
||||
batch_op.drop_index('workflow_comments_created_at_idx')
|
||||
batch_op.drop_index('workflow_comments_app_idx')
|
||||
|
||||
op.drop_table('workflow_comments')
|
||||
# ### end Alembic commands ###
|
||||
@ -9,11 +9,6 @@ from .account import (
|
||||
TenantStatus,
|
||||
)
|
||||
from .api_based_extension import APIBasedExtension, APIBasedExtensionPoint
|
||||
from .comment import (
|
||||
WorkflowComment,
|
||||
WorkflowCommentMention,
|
||||
WorkflowCommentReply,
|
||||
)
|
||||
from .dataset import (
|
||||
AppDatasetJoin,
|
||||
Dataset,
|
||||
@ -210,9 +205,6 @@ __all__ = [
|
||||
"WorkflowAppLog",
|
||||
"WorkflowAppLogCreatedFrom",
|
||||
"WorkflowArchiveLog",
|
||||
"WorkflowComment",
|
||||
"WorkflowCommentMention",
|
||||
"WorkflowCommentReply",
|
||||
"WorkflowNodeExecutionModel",
|
||||
"WorkflowNodeExecutionOffload",
|
||||
"WorkflowNodeExecutionTriggeredFrom",
|
||||
|
||||
@ -1,210 +0,0 @@
|
||||
"""Workflow comment models."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Index, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .account import Account
|
||||
from .base import Base
|
||||
from .engine import db
|
||||
from .types import StringUUID
|
||||
|
||||
|
||||
class WorkflowComment(Base):
|
||||
"""Workflow comment model for canvas commenting functionality.
|
||||
|
||||
Comments are associated with apps rather than specific workflow versions,
|
||||
since an app has only one draft workflow at a time and comments should persist
|
||||
across workflow version changes.
|
||||
|
||||
Attributes:
|
||||
id: Comment ID
|
||||
tenant_id: Workspace ID
|
||||
app_id: App ID (primary association, comments belong to apps)
|
||||
position_x: X coordinate on canvas
|
||||
position_y: Y coordinate on canvas
|
||||
content: Comment content
|
||||
created_by: Creator account ID
|
||||
created_at: Creation time
|
||||
updated_at: Last update time
|
||||
resolved: Whether comment is resolved
|
||||
resolved_at: Resolution time
|
||||
resolved_by: Resolver account ID
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_comments"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="workflow_comments_pkey"),
|
||||
Index("workflow_comments_app_idx", "tenant_id", "app_id"),
|
||||
Index("workflow_comments_created_at_idx", "created_at"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuidv7()"))
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
position_x: Mapped[float] = mapped_column(db.Float)
|
||||
position_y: Mapped[float] = mapped_column(db.Float)
|
||||
content: Mapped[str] = mapped_column(db.Text, nullable=False)
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
|
||||
)
|
||||
resolved: Mapped[bool] = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
|
||||
resolved_at: Mapped[datetime | None] = mapped_column(db.DateTime)
|
||||
resolved_by: Mapped[str | None] = mapped_column(StringUUID)
|
||||
|
||||
# Relationships
|
||||
replies: Mapped[list["WorkflowCommentReply"]] = relationship(
|
||||
"WorkflowCommentReply", back_populates="comment", cascade="all, delete-orphan"
|
||||
)
|
||||
mentions: Mapped[list["WorkflowCommentMention"]] = relationship(
|
||||
"WorkflowCommentMention", back_populates="comment", cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
@property
|
||||
def created_by_account(self):
|
||||
"""Get creator account."""
|
||||
if hasattr(self, "_created_by_account_cache"):
|
||||
return self._created_by_account_cache
|
||||
return db.session.get(Account, self.created_by)
|
||||
|
||||
def cache_created_by_account(self, account: Account | None) -> None:
|
||||
"""Cache creator account to avoid extra queries."""
|
||||
self._created_by_account_cache = account
|
||||
|
||||
@property
|
||||
def resolved_by_account(self):
|
||||
"""Get resolver account."""
|
||||
if hasattr(self, "_resolved_by_account_cache"):
|
||||
return self._resolved_by_account_cache
|
||||
if self.resolved_by:
|
||||
return db.session.get(Account, self.resolved_by)
|
||||
return None
|
||||
|
||||
def cache_resolved_by_account(self, account: Account | None) -> None:
|
||||
"""Cache resolver account to avoid extra queries."""
|
||||
self._resolved_by_account_cache = account
|
||||
|
||||
@property
|
||||
def reply_count(self):
|
||||
"""Get reply count."""
|
||||
return len(self.replies)
|
||||
|
||||
@property
|
||||
def mention_count(self):
|
||||
"""Get mention count."""
|
||||
return len(self.mentions)
|
||||
|
||||
@property
|
||||
def participants(self):
|
||||
"""Get all participants (creator + repliers + mentioned users)."""
|
||||
participant_ids = set()
|
||||
|
||||
# Add comment creator
|
||||
participant_ids.add(self.created_by)
|
||||
|
||||
# Add reply creators
|
||||
participant_ids.update(reply.created_by for reply in self.replies)
|
||||
|
||||
# Add mentioned users
|
||||
participant_ids.update(mention.mentioned_user_id for mention in self.mentions)
|
||||
|
||||
# Get account objects
|
||||
participants = []
|
||||
for user_id in participant_ids:
|
||||
account = db.session.get(Account, user_id)
|
||||
if account:
|
||||
participants.append(account)
|
||||
|
||||
return participants
|
||||
|
||||
|
||||
class WorkflowCommentReply(Base):
|
||||
"""Workflow comment reply model.
|
||||
|
||||
Attributes:
|
||||
id: Reply ID
|
||||
comment_id: Parent comment ID
|
||||
content: Reply content
|
||||
created_by: Creator account ID
|
||||
created_at: Creation time
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_comment_replies"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="workflow_comment_replies_pkey"),
|
||||
Index("comment_replies_comment_idx", "comment_id"),
|
||||
Index("comment_replies_created_at_idx", "created_at"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuidv7()"))
|
||||
comment_id: Mapped[str] = mapped_column(
|
||||
StringUUID, db.ForeignKey("workflow_comments.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
content: Mapped[str] = mapped_column(db.Text, nullable=False)
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
|
||||
)
|
||||
# Relationships
|
||||
comment: Mapped["WorkflowComment"] = relationship("WorkflowComment", back_populates="replies")
|
||||
|
||||
@property
|
||||
def created_by_account(self):
|
||||
"""Get creator account."""
|
||||
if hasattr(self, "_created_by_account_cache"):
|
||||
return self._created_by_account_cache
|
||||
return db.session.get(Account, self.created_by)
|
||||
|
||||
def cache_created_by_account(self, account: Account | None) -> None:
|
||||
"""Cache creator account to avoid extra queries."""
|
||||
self._created_by_account_cache = account
|
||||
|
||||
|
||||
class WorkflowCommentMention(Base):
|
||||
"""Workflow comment mention model.
|
||||
|
||||
Mentions are only for internal accounts since end users
|
||||
cannot access workflow canvas and commenting features.
|
||||
|
||||
Attributes:
|
||||
id: Mention ID
|
||||
comment_id: Parent comment ID
|
||||
mentioned_user_id: Mentioned account ID
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_comment_mentions"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="workflow_comment_mentions_pkey"),
|
||||
Index("comment_mentions_comment_idx", "comment_id"),
|
||||
Index("comment_mentions_reply_idx", "reply_id"),
|
||||
Index("comment_mentions_user_idx", "mentioned_user_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuidv7()"))
|
||||
comment_id: Mapped[str] = mapped_column(
|
||||
StringUUID, db.ForeignKey("workflow_comments.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
reply_id: Mapped[str | None] = mapped_column(
|
||||
StringUUID, db.ForeignKey("workflow_comment_replies.id", ondelete="CASCADE"), nullable=True
|
||||
)
|
||||
mentioned_user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
|
||||
# Relationships
|
||||
comment: Mapped["WorkflowComment"] = relationship("WorkflowComment", back_populates="mentions")
|
||||
reply: Mapped[Optional["WorkflowCommentReply"]] = relationship("WorkflowCommentReply")
|
||||
|
||||
@property
|
||||
def mentioned_user_account(self):
|
||||
"""Get mentioned account."""
|
||||
if hasattr(self, "_mentioned_user_account_cache"):
|
||||
return self._mentioned_user_account_cache
|
||||
return db.session.get(Account, self.mentioned_user_id)
|
||||
|
||||
def cache_mentioned_user_account(self, account: Account | None) -> None:
|
||||
"""Cache mentioned account to avoid extra queries."""
|
||||
self._mentioned_user_account_cache = account
|
||||
@ -400,7 +400,7 @@ class Workflow(Base): # bug
|
||||
|
||||
:return: hash
|
||||
"""
|
||||
entity = {"graph": self.graph_dict}
|
||||
entity = {"graph": self.graph_dict, "features": self.features_dict}
|
||||
|
||||
return helper.generate_text_hash(json.dumps(entity, sort_keys=True))
|
||||
|
||||
|
||||
@ -21,7 +21,6 @@ dependencies = [
|
||||
"flask-orjson~=2.0.0",
|
||||
"flask-sqlalchemy~=3.1.1",
|
||||
"gevent~=25.9.1",
|
||||
"gevent-websocket~=0.10.1",
|
||||
"gmpy2~=2.2.1",
|
||||
"google-api-core==2.18.0",
|
||||
"google-api-python-client==2.90.0",
|
||||
@ -73,7 +72,6 @@ dependencies = [
|
||||
"pypdfium2==5.2.0",
|
||||
"python-docx~=1.1.0",
|
||||
"python-dotenv==1.0.1",
|
||||
"python-socketio~=5.13.0",
|
||||
"pyyaml~=6.0.1",
|
||||
"readabilipy~=0.3.0",
|
||||
"redis[hiredis]~=6.1.0",
|
||||
|
||||
@ -1,147 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import TypedDict
|
||||
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
SESSION_STATE_TTL_SECONDS = 3600
|
||||
WORKFLOW_ONLINE_USERS_PREFIX = "workflow_online_users:"
|
||||
WORKFLOW_LEADER_PREFIX = "workflow_leader:"
|
||||
WS_SID_MAP_PREFIX = "ws_sid_map:"
|
||||
|
||||
|
||||
class WorkflowSessionInfo(TypedDict):
|
||||
user_id: str
|
||||
username: str
|
||||
avatar: str | None
|
||||
sid: str
|
||||
connected_at: int
|
||||
|
||||
|
||||
class SidMapping(TypedDict):
|
||||
workflow_id: str
|
||||
user_id: str
|
||||
|
||||
|
||||
class WorkflowCollaborationRepository:
|
||||
def __init__(self) -> None:
|
||||
self._redis = redis_client
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}(redis_client={self._redis})"
|
||||
|
||||
@staticmethod
|
||||
def workflow_key(workflow_id: str) -> str:
|
||||
return f"{WORKFLOW_ONLINE_USERS_PREFIX}{workflow_id}"
|
||||
|
||||
@staticmethod
|
||||
def leader_key(workflow_id: str) -> str:
|
||||
return f"{WORKFLOW_LEADER_PREFIX}{workflow_id}"
|
||||
|
||||
@staticmethod
|
||||
def sid_key(sid: str) -> str:
|
||||
return f"{WS_SID_MAP_PREFIX}{sid}"
|
||||
|
||||
@staticmethod
|
||||
def _decode(value: str | bytes | None) -> str | None:
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, bytes):
|
||||
return value.decode("utf-8")
|
||||
return value
|
||||
|
||||
def refresh_session_state(self, workflow_id: str, sid: str) -> None:
|
||||
workflow_key = self.workflow_key(workflow_id)
|
||||
sid_key = self.sid_key(sid)
|
||||
if self._redis.exists(workflow_key):
|
||||
self._redis.expire(workflow_key, SESSION_STATE_TTL_SECONDS)
|
||||
if self._redis.exists(sid_key):
|
||||
self._redis.expire(sid_key, SESSION_STATE_TTL_SECONDS)
|
||||
|
||||
def set_session_info(self, workflow_id: str, session_info: WorkflowSessionInfo) -> None:
|
||||
workflow_key = self.workflow_key(workflow_id)
|
||||
self._redis.hset(workflow_key, session_info["sid"], json.dumps(session_info))
|
||||
self._redis.set(
|
||||
self.sid_key(session_info["sid"]),
|
||||
json.dumps({"workflow_id": workflow_id, "user_id": session_info["user_id"]}),
|
||||
ex=SESSION_STATE_TTL_SECONDS,
|
||||
)
|
||||
self.refresh_session_state(workflow_id, session_info["sid"])
|
||||
|
||||
def get_sid_mapping(self, sid: str) -> SidMapping | None:
|
||||
raw = self._redis.get(self.sid_key(sid))
|
||||
if not raw:
|
||||
return None
|
||||
value = self._decode(raw)
|
||||
if not value:
|
||||
return None
|
||||
try:
|
||||
return json.loads(value)
|
||||
except (TypeError, json.JSONDecodeError):
|
||||
return None
|
||||
|
||||
def delete_session(self, workflow_id: str, sid: str) -> None:
|
||||
self._redis.hdel(self.workflow_key(workflow_id), sid)
|
||||
self._redis.delete(self.sid_key(sid))
|
||||
|
||||
def session_exists(self, workflow_id: str, sid: str) -> bool:
|
||||
return bool(self._redis.hexists(self.workflow_key(workflow_id), sid))
|
||||
|
||||
def sid_mapping_exists(self, sid: str) -> bool:
|
||||
return bool(self._redis.exists(self.sid_key(sid)))
|
||||
|
||||
def get_session_sids(self, workflow_id: str) -> list[str]:
|
||||
raw_sids = self._redis.hkeys(self.workflow_key(workflow_id))
|
||||
decoded_sids: list[str] = []
|
||||
for sid in raw_sids:
|
||||
decoded = self._decode(sid)
|
||||
if decoded:
|
||||
decoded_sids.append(decoded)
|
||||
return decoded_sids
|
||||
|
||||
def list_sessions(self, workflow_id: str) -> list[WorkflowSessionInfo]:
|
||||
sessions_json = self._redis.hgetall(self.workflow_key(workflow_id))
|
||||
users: list[WorkflowSessionInfo] = []
|
||||
|
||||
for session_info_json in sessions_json.values():
|
||||
value = self._decode(session_info_json)
|
||||
if not value:
|
||||
continue
|
||||
try:
|
||||
session_info = json.loads(value)
|
||||
except (TypeError, json.JSONDecodeError):
|
||||
continue
|
||||
|
||||
if not isinstance(session_info, dict):
|
||||
continue
|
||||
if "user_id" not in session_info or "username" not in session_info or "sid" not in session_info:
|
||||
continue
|
||||
|
||||
users.append(
|
||||
{
|
||||
"user_id": str(session_info["user_id"]),
|
||||
"username": str(session_info["username"]),
|
||||
"avatar": session_info.get("avatar"),
|
||||
"sid": str(session_info["sid"]),
|
||||
"connected_at": int(session_info.get("connected_at") or 0),
|
||||
}
|
||||
)
|
||||
|
||||
return users
|
||||
|
||||
def get_current_leader(self, workflow_id: str) -> str | None:
|
||||
raw = self._redis.get(self.leader_key(workflow_id))
|
||||
return self._decode(raw)
|
||||
|
||||
def set_leader_if_absent(self, workflow_id: str, sid: str) -> bool:
|
||||
return bool(self._redis.set(self.leader_key(workflow_id), sid, nx=True, ex=SESSION_STATE_TTL_SECONDS))
|
||||
|
||||
def set_leader(self, workflow_id: str, sid: str) -> None:
|
||||
self._redis.set(self.leader_key(workflow_id), sid, ex=SESSION_STATE_TTL_SECONDS)
|
||||
|
||||
def delete_leader(self, workflow_id: str) -> None:
|
||||
self._redis.delete(self.leader_key(workflow_id))
|
||||
|
||||
def expire_leader(self, workflow_id: str) -> None:
|
||||
self._redis.expire(self.leader_key(workflow_id), SESSION_STATE_TTL_SECONDS)
|
||||
@ -781,15 +781,16 @@ class AppDslService:
|
||||
return dependencies
|
||||
|
||||
@classmethod
|
||||
def get_leaked_dependencies(cls, tenant_id: str, dsl_dependencies: list[dict]) -> list[PluginDependency]:
|
||||
def get_leaked_dependencies(
|
||||
cls, tenant_id: str, dsl_dependencies: list[PluginDependency]
|
||||
) -> list[PluginDependency]:
|
||||
"""
|
||||
Returns the leaked dependencies in current workspace
|
||||
"""
|
||||
dependencies = [PluginDependency.model_validate(dep) for dep in dsl_dependencies]
|
||||
if not dependencies:
|
||||
if not dsl_dependencies:
|
||||
return []
|
||||
|
||||
return DependenciesAnalysisService.get_leaked_dependencies(tenant_id=tenant_id, dependencies=dependencies)
|
||||
return DependenciesAnalysisService.get_leaked_dependencies(tenant_id=tenant_id, dependencies=dsl_dependencies)
|
||||
|
||||
@staticmethod
|
||||
def _generate_aes_key(tenant_id: str) -> bytes:
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from collections.abc import Generator, Mapping
|
||||
from typing import Any, Union
|
||||
from typing import TYPE_CHECKING, Any, Union
|
||||
|
||||
from configs import dify_config
|
||||
from core.app.apps.advanced_chat.app_generator import AdvancedChatAppGenerator
|
||||
@ -18,6 +20,9 @@ from services.errors.app import QuotaExceededError, WorkflowIdFormatError, Workf
|
||||
from services.errors.llm import InvokeRateLimitError
|
||||
from services.workflow_service import WorkflowService
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from controllers.console.app.workflow import LoopNodeRunPayload
|
||||
|
||||
|
||||
class AppGenerateService:
|
||||
@classmethod
|
||||
@ -165,7 +170,9 @@ class AppGenerateService:
|
||||
raise ValueError(f"Invalid app mode {app_model.mode}")
|
||||
|
||||
@classmethod
|
||||
def generate_single_loop(cls, app_model: App, user: Account, node_id: str, args: Any, streaming: bool = True):
|
||||
def generate_single_loop(
|
||||
cls, app_model: App, user: Account, node_id: str, args: LoopNodeRunPayload, streaming: bool = True
|
||||
):
|
||||
if app_model.mode == AppMode.ADVANCED_CHAT:
|
||||
workflow = cls._get_workflow(app_model, InvokeFrom.DEBUGGER)
|
||||
return AdvancedChatAppGenerator.convert_to_event_stream(
|
||||
|
||||
@ -4,6 +4,7 @@ from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
from configs import dify_config
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from enums.hosted_provider import HostedTrialProvider
|
||||
from services.billing_service import BillingService
|
||||
from services.enterprise.enterprise_service import EnterpriseService
|
||||
|
||||
@ -161,7 +162,6 @@ class SystemFeatureModel(BaseModel):
|
||||
enable_email_code_login: bool = False
|
||||
enable_email_password_login: bool = True
|
||||
enable_social_oauth_login: bool = False
|
||||
enable_collaboration_mode: bool = False
|
||||
is_allow_register: bool = False
|
||||
is_allow_create_workspace: bool = False
|
||||
is_email_setup: bool = False
|
||||
@ -171,6 +171,7 @@ class SystemFeatureModel(BaseModel):
|
||||
plugin_installation_permission: PluginInstallationPermissionModel = PluginInstallationPermissionModel()
|
||||
enable_change_email: bool = True
|
||||
plugin_manager: PluginManagerModel = PluginManagerModel()
|
||||
trial_models: list[str] = []
|
||||
enable_trial_app: bool = False
|
||||
enable_explore_banner: bool = False
|
||||
|
||||
@ -225,13 +226,24 @@ class FeatureService:
|
||||
system_features.enable_email_code_login = dify_config.ENABLE_EMAIL_CODE_LOGIN
|
||||
system_features.enable_email_password_login = dify_config.ENABLE_EMAIL_PASSWORD_LOGIN
|
||||
system_features.enable_social_oauth_login = dify_config.ENABLE_SOCIAL_OAUTH_LOGIN
|
||||
system_features.enable_collaboration_mode = dify_config.ENABLE_COLLABORATION_MODE
|
||||
system_features.is_allow_register = dify_config.ALLOW_REGISTER
|
||||
system_features.is_allow_create_workspace = dify_config.ALLOW_CREATE_WORKSPACE
|
||||
system_features.is_email_setup = dify_config.MAIL_TYPE is not None and dify_config.MAIL_TYPE != ""
|
||||
system_features.trial_models = cls._fulfill_trial_models_from_env()
|
||||
system_features.enable_trial_app = dify_config.ENABLE_TRIAL_APP
|
||||
system_features.enable_explore_banner = dify_config.ENABLE_EXPLORE_BANNER
|
||||
|
||||
@classmethod
|
||||
def _fulfill_trial_models_from_env(cls) -> list[str]:
|
||||
return [
|
||||
provider.value
|
||||
for provider in HostedTrialProvider
|
||||
if (
|
||||
getattr(dify_config, f"HOSTED_{provider.config_key}_PAID_ENABLED", False)
|
||||
and getattr(dify_config, f"HOSTED_{provider.config_key}_TRIAL_ENABLED", False)
|
||||
)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def _fulfill_params_from_env(cls, features: FeatureModel):
|
||||
features.can_replace_logo = dify_config.CAN_REPLACE_LOGO
|
||||
|
||||
@ -870,15 +870,16 @@ class RagPipelineDslService:
|
||||
return dependencies
|
||||
|
||||
@classmethod
|
||||
def get_leaked_dependencies(cls, tenant_id: str, dsl_dependencies: list[dict]) -> list[PluginDependency]:
|
||||
def get_leaked_dependencies(
|
||||
cls, tenant_id: str, dsl_dependencies: list[PluginDependency]
|
||||
) -> list[PluginDependency]:
|
||||
"""
|
||||
Returns the leaked dependencies in current workspace
|
||||
"""
|
||||
dependencies = [PluginDependency.model_validate(dep) for dep in dsl_dependencies]
|
||||
if not dependencies:
|
||||
if not dsl_dependencies:
|
||||
return []
|
||||
|
||||
return DependenciesAnalysisService.get_leaked_dependencies(tenant_id=tenant_id, dependencies=dependencies)
|
||||
return DependenciesAnalysisService.get_leaked_dependencies(tenant_id=tenant_id, dependencies=dsl_dependencies)
|
||||
|
||||
def _generate_aes_key(self, tenant_id: str) -> bytes:
|
||||
"""Generate AES key based on tenant_id"""
|
||||
|
||||
@ -44,7 +44,7 @@ class RagPipelineTransformService:
|
||||
doc_form = dataset.doc_form
|
||||
if not doc_form:
|
||||
return self._transform_to_empty_pipeline(dataset)
|
||||
retrieval_model = dataset.retrieval_model
|
||||
retrieval_model = RetrievalSetting.model_validate(dataset.retrieval_model) if dataset.retrieval_model else None
|
||||
pipeline_yaml = self._get_transform_yaml(doc_form, datasource_type, indexing_technique)
|
||||
# deal dependencies
|
||||
self._deal_dependencies(pipeline_yaml, dataset.tenant_id)
|
||||
@ -154,7 +154,12 @@ class RagPipelineTransformService:
|
||||
return node
|
||||
|
||||
def _deal_knowledge_index(
|
||||
self, dataset: Dataset, doc_form: str, indexing_technique: str | None, retrieval_model: dict, node: dict
|
||||
self,
|
||||
dataset: Dataset,
|
||||
doc_form: str,
|
||||
indexing_technique: str | None,
|
||||
retrieval_model: RetrievalSetting | None,
|
||||
node: dict,
|
||||
):
|
||||
knowledge_configuration_dict = node.get("data", {})
|
||||
knowledge_configuration = KnowledgeConfiguration.model_validate(knowledge_configuration_dict)
|
||||
@ -163,10 +168,9 @@ class RagPipelineTransformService:
|
||||
knowledge_configuration.embedding_model = dataset.embedding_model
|
||||
knowledge_configuration.embedding_model_provider = dataset.embedding_model_provider
|
||||
if retrieval_model:
|
||||
retrieval_setting = RetrievalSetting.model_validate(retrieval_model)
|
||||
if indexing_technique == "economy":
|
||||
retrieval_setting.search_method = RetrievalMethod.KEYWORD_SEARCH
|
||||
knowledge_configuration.retrieval_model = retrieval_setting
|
||||
retrieval_model.search_method = RetrievalMethod.KEYWORD_SEARCH
|
||||
knowledge_configuration.retrieval_model = retrieval_model
|
||||
else:
|
||||
dataset.retrieval_model = knowledge_configuration.retrieval_model.model_dump()
|
||||
|
||||
|
||||
@ -1,196 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from collections.abc import Mapping
|
||||
|
||||
from models.account import Account
|
||||
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository, WorkflowSessionInfo
|
||||
|
||||
|
||||
class WorkflowCollaborationService:
|
||||
def __init__(self, repository: WorkflowCollaborationRepository, socketio) -> None:
|
||||
self._repository = repository
|
||||
self._socketio = socketio
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}(repository={self._repository})"
|
||||
|
||||
def save_session(self, sid: str, user: Account) -> None:
|
||||
self._socketio.save_session(
|
||||
sid,
|
||||
{
|
||||
"user_id": user.id,
|
||||
"username": user.name,
|
||||
"avatar": user.avatar,
|
||||
},
|
||||
)
|
||||
|
||||
def register_session(self, workflow_id: str, sid: str) -> tuple[str, bool] | None:
|
||||
session = self._socketio.get_session(sid)
|
||||
user_id = session.get("user_id")
|
||||
if not user_id:
|
||||
return None
|
||||
|
||||
session_info: WorkflowSessionInfo = {
|
||||
"user_id": str(user_id),
|
||||
"username": str(session.get("username", "Unknown")),
|
||||
"avatar": session.get("avatar"),
|
||||
"sid": sid,
|
||||
"connected_at": int(time.time()),
|
||||
}
|
||||
|
||||
self._repository.set_session_info(workflow_id, session_info)
|
||||
|
||||
leader_sid = self.get_or_set_leader(workflow_id, sid)
|
||||
is_leader = leader_sid == sid
|
||||
|
||||
self._socketio.enter_room(sid, workflow_id)
|
||||
self.broadcast_online_users(workflow_id)
|
||||
|
||||
self._socketio.emit("status", {"isLeader": is_leader}, room=sid)
|
||||
|
||||
return str(user_id), is_leader
|
||||
|
||||
def disconnect_session(self, sid: str) -> None:
|
||||
mapping = self._repository.get_sid_mapping(sid)
|
||||
if not mapping:
|
||||
return
|
||||
|
||||
workflow_id = mapping["workflow_id"]
|
||||
self._repository.delete_session(workflow_id, sid)
|
||||
|
||||
self.handle_leader_disconnect(workflow_id, sid)
|
||||
self.broadcast_online_users(workflow_id)
|
||||
|
||||
def relay_collaboration_event(self, sid: str, data: Mapping[str, object]) -> tuple[dict[str, str], int]:
|
||||
mapping = self._repository.get_sid_mapping(sid)
|
||||
if not mapping:
|
||||
return {"msg": "unauthorized"}, 401
|
||||
|
||||
workflow_id = mapping["workflow_id"]
|
||||
user_id = mapping["user_id"]
|
||||
self.refresh_session_state(workflow_id, sid)
|
||||
|
||||
event_type = data.get("type")
|
||||
event_data = data.get("data")
|
||||
timestamp = data.get("timestamp", int(time.time()))
|
||||
|
||||
if not event_type:
|
||||
return {"msg": "invalid event type"}, 400
|
||||
|
||||
self._socketio.emit(
|
||||
"collaboration_update",
|
||||
{"type": event_type, "userId": user_id, "data": event_data, "timestamp": timestamp},
|
||||
room=workflow_id,
|
||||
skip_sid=sid,
|
||||
)
|
||||
|
||||
return {"msg": "event_broadcasted"}, 200
|
||||
|
||||
def relay_graph_event(self, sid: str, data: object) -> tuple[dict[str, str], int]:
|
||||
mapping = self._repository.get_sid_mapping(sid)
|
||||
if not mapping:
|
||||
return {"msg": "unauthorized"}, 401
|
||||
|
||||
workflow_id = mapping["workflow_id"]
|
||||
self.refresh_session_state(workflow_id, sid)
|
||||
|
||||
self._socketio.emit("graph_update", data, room=workflow_id, skip_sid=sid)
|
||||
|
||||
return {"msg": "graph_update_broadcasted"}, 200
|
||||
|
||||
def get_or_set_leader(self, workflow_id: str, sid: str) -> str:
|
||||
current_leader = self._repository.get_current_leader(workflow_id)
|
||||
|
||||
if current_leader:
|
||||
if self.is_session_active(workflow_id, current_leader):
|
||||
return current_leader
|
||||
self._repository.delete_session(workflow_id, current_leader)
|
||||
self._repository.delete_leader(workflow_id)
|
||||
|
||||
was_set = self._repository.set_leader_if_absent(workflow_id, sid)
|
||||
|
||||
if was_set:
|
||||
if current_leader:
|
||||
self.broadcast_leader_change(workflow_id, sid)
|
||||
return sid
|
||||
|
||||
current_leader = self._repository.get_current_leader(workflow_id)
|
||||
if current_leader:
|
||||
return current_leader
|
||||
|
||||
return sid
|
||||
|
||||
def handle_leader_disconnect(self, workflow_id: str, disconnected_sid: str) -> None:
|
||||
current_leader = self._repository.get_current_leader(workflow_id)
|
||||
if not current_leader:
|
||||
return
|
||||
|
||||
if current_leader != disconnected_sid:
|
||||
return
|
||||
|
||||
session_sids = self._repository.get_session_sids(workflow_id)
|
||||
if session_sids:
|
||||
new_leader_sid = session_sids[0]
|
||||
self._repository.set_leader(workflow_id, new_leader_sid)
|
||||
self.broadcast_leader_change(workflow_id, new_leader_sid)
|
||||
else:
|
||||
self._repository.delete_leader(workflow_id)
|
||||
|
||||
def broadcast_leader_change(self, workflow_id: str, new_leader_sid: str) -> None:
|
||||
for sid in self._repository.get_session_sids(workflow_id):
|
||||
try:
|
||||
is_leader = sid == new_leader_sid
|
||||
self._socketio.emit("status", {"isLeader": is_leader}, room=sid)
|
||||
except Exception:
|
||||
logging.exception("Failed to emit leader status to session %s", sid)
|
||||
|
||||
def get_current_leader(self, workflow_id: str) -> str | None:
|
||||
return self._repository.get_current_leader(workflow_id)
|
||||
|
||||
def broadcast_online_users(self, workflow_id: str) -> None:
|
||||
users = self._repository.list_sessions(workflow_id)
|
||||
users.sort(key=lambda x: x.get("connected_at") or 0)
|
||||
|
||||
leader_sid = self.get_current_leader(workflow_id)
|
||||
|
||||
self._socketio.emit(
|
||||
"online_users",
|
||||
{"workflow_id": workflow_id, "users": users, "leader": leader_sid},
|
||||
room=workflow_id,
|
||||
)
|
||||
|
||||
def refresh_session_state(self, workflow_id: str, sid: str) -> None:
|
||||
self._repository.refresh_session_state(workflow_id, sid)
|
||||
self._ensure_leader(workflow_id, sid)
|
||||
|
||||
def _ensure_leader(self, workflow_id: str, sid: str) -> None:
|
||||
current_leader = self._repository.get_current_leader(workflow_id)
|
||||
if current_leader and self.is_session_active(workflow_id, current_leader):
|
||||
self._repository.expire_leader(workflow_id)
|
||||
return
|
||||
|
||||
if current_leader:
|
||||
self._repository.delete_leader(workflow_id)
|
||||
|
||||
self._repository.set_leader(workflow_id, sid)
|
||||
self.broadcast_leader_change(workflow_id, sid)
|
||||
|
||||
def is_session_active(self, workflow_id: str, sid: str) -> bool:
|
||||
if not sid:
|
||||
return False
|
||||
|
||||
try:
|
||||
if not self._socketio.manager.is_connected(sid, "/"):
|
||||
return False
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
if not self._repository.session_exists(workflow_id, sid):
|
||||
return False
|
||||
|
||||
if not self._repository.sid_mapping_exists(sid):
|
||||
return False
|
||||
|
||||
return True
|
||||
@ -1,345 +0,0 @@
|
||||
import logging
|
||||
from collections.abc import Sequence
|
||||
|
||||
from sqlalchemy import desc, select
|
||||
from sqlalchemy.orm import Session, selectinload
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.helper import uuid_value
|
||||
from models import WorkflowComment, WorkflowCommentMention, WorkflowCommentReply
|
||||
from models.account import Account
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WorkflowCommentService:
|
||||
"""Service for managing workflow comments."""
|
||||
|
||||
@staticmethod
|
||||
def _validate_content(content: str) -> None:
|
||||
if len(content.strip()) == 0:
|
||||
raise ValueError("Comment content cannot be empty")
|
||||
|
||||
if len(content) > 1000:
|
||||
raise ValueError("Comment content cannot exceed 1000 characters")
|
||||
|
||||
@staticmethod
|
||||
def get_comments(tenant_id: str, app_id: str) -> Sequence[WorkflowComment]:
|
||||
"""Get all comments for a workflow."""
|
||||
with Session(db.engine) as session:
|
||||
# Get all comments with eager loading
|
||||
stmt = (
|
||||
select(WorkflowComment)
|
||||
.options(selectinload(WorkflowComment.replies), selectinload(WorkflowComment.mentions))
|
||||
.where(WorkflowComment.tenant_id == tenant_id, WorkflowComment.app_id == app_id)
|
||||
.order_by(desc(WorkflowComment.created_at))
|
||||
)
|
||||
|
||||
comments = session.scalars(stmt).all()
|
||||
|
||||
# Batch preload all Account objects to avoid N+1 queries
|
||||
WorkflowCommentService._preload_accounts(session, comments)
|
||||
|
||||
return comments
|
||||
|
||||
@staticmethod
|
||||
def _preload_accounts(session: Session, comments: Sequence[WorkflowComment]) -> None:
|
||||
"""Batch preload Account objects for comments, replies, and mentions."""
|
||||
# Collect all user IDs
|
||||
user_ids: set[str] = set()
|
||||
for comment in comments:
|
||||
user_ids.add(comment.created_by)
|
||||
if comment.resolved_by:
|
||||
user_ids.add(comment.resolved_by)
|
||||
user_ids.update(reply.created_by for reply in comment.replies)
|
||||
user_ids.update(mention.mentioned_user_id for mention in comment.mentions)
|
||||
|
||||
if not user_ids:
|
||||
return
|
||||
|
||||
# Batch query all accounts
|
||||
accounts = session.scalars(select(Account).where(Account.id.in_(user_ids))).all()
|
||||
account_map = {str(account.id): account for account in accounts}
|
||||
|
||||
# Cache accounts on objects
|
||||
for comment in comments:
|
||||
comment.cache_created_by_account(account_map.get(comment.created_by))
|
||||
comment.cache_resolved_by_account(account_map.get(comment.resolved_by) if comment.resolved_by else None)
|
||||
for reply in comment.replies:
|
||||
reply.cache_created_by_account(account_map.get(reply.created_by))
|
||||
for mention in comment.mentions:
|
||||
mention.cache_mentioned_user_account(account_map.get(mention.mentioned_user_id))
|
||||
|
||||
@staticmethod
|
||||
def get_comment(tenant_id: str, app_id: str, comment_id: str, session: Session | None = None) -> WorkflowComment:
|
||||
"""Get a specific comment."""
|
||||
|
||||
def _get_comment(session: Session) -> WorkflowComment:
|
||||
stmt = (
|
||||
select(WorkflowComment)
|
||||
.options(selectinload(WorkflowComment.replies), selectinload(WorkflowComment.mentions))
|
||||
.where(
|
||||
WorkflowComment.id == comment_id,
|
||||
WorkflowComment.tenant_id == tenant_id,
|
||||
WorkflowComment.app_id == app_id,
|
||||
)
|
||||
)
|
||||
comment = session.scalar(stmt)
|
||||
|
||||
if not comment:
|
||||
raise NotFound("Comment not found")
|
||||
|
||||
# Preload accounts to avoid N+1 queries
|
||||
WorkflowCommentService._preload_accounts(session, [comment])
|
||||
|
||||
return comment
|
||||
|
||||
if session is not None:
|
||||
return _get_comment(session)
|
||||
else:
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
return _get_comment(session)
|
||||
|
||||
@staticmethod
|
||||
def create_comment(
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
created_by: str,
|
||||
content: str,
|
||||
position_x: float,
|
||||
position_y: float,
|
||||
mentioned_user_ids: list[str] | None = None,
|
||||
) -> dict:
|
||||
"""Create a new workflow comment."""
|
||||
WorkflowCommentService._validate_content(content)
|
||||
|
||||
with Session(db.engine) as session:
|
||||
comment = WorkflowComment(
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
position_x=position_x,
|
||||
position_y=position_y,
|
||||
content=content,
|
||||
created_by=created_by,
|
||||
)
|
||||
|
||||
session.add(comment)
|
||||
session.flush() # Get the comment ID for mentions
|
||||
|
||||
# Create mentions if specified
|
||||
mentioned_user_ids = mentioned_user_ids or []
|
||||
for user_id in mentioned_user_ids:
|
||||
if isinstance(user_id, str) and uuid_value(user_id):
|
||||
mention = WorkflowCommentMention(
|
||||
comment_id=comment.id,
|
||||
reply_id=None, # This is a comment mention, not reply mention
|
||||
mentioned_user_id=user_id,
|
||||
)
|
||||
session.add(mention)
|
||||
|
||||
session.commit()
|
||||
|
||||
# Return only what we need - id and created_at
|
||||
return {"id": comment.id, "created_at": comment.created_at}
|
||||
|
||||
@staticmethod
|
||||
def update_comment(
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
comment_id: str,
|
||||
user_id: str,
|
||||
content: str,
|
||||
position_x: float | None = None,
|
||||
position_y: float | None = None,
|
||||
mentioned_user_ids: list[str] | None = None,
|
||||
) -> dict:
|
||||
"""Update a workflow comment."""
|
||||
WorkflowCommentService._validate_content(content)
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# Get comment with validation
|
||||
stmt = select(WorkflowComment).where(
|
||||
WorkflowComment.id == comment_id,
|
||||
WorkflowComment.tenant_id == tenant_id,
|
||||
WorkflowComment.app_id == app_id,
|
||||
)
|
||||
comment = session.scalar(stmt)
|
||||
|
||||
if not comment:
|
||||
raise NotFound("Comment not found")
|
||||
|
||||
# Only the creator can update the comment
|
||||
if comment.created_by != user_id:
|
||||
raise Forbidden("Only the comment creator can update it")
|
||||
|
||||
# Update comment fields
|
||||
comment.content = content
|
||||
if position_x is not None:
|
||||
comment.position_x = position_x
|
||||
if position_y is not None:
|
||||
comment.position_y = position_y
|
||||
|
||||
# Update mentions - first remove existing mentions for this comment only (not replies)
|
||||
existing_mentions = session.scalars(
|
||||
select(WorkflowCommentMention).where(
|
||||
WorkflowCommentMention.comment_id == comment.id,
|
||||
WorkflowCommentMention.reply_id.is_(None), # Only comment mentions, not reply mentions
|
||||
)
|
||||
).all()
|
||||
for mention in existing_mentions:
|
||||
session.delete(mention)
|
||||
|
||||
# Add new mentions
|
||||
mentioned_user_ids = mentioned_user_ids or []
|
||||
for user_id_str in mentioned_user_ids:
|
||||
if isinstance(user_id_str, str) and uuid_value(user_id_str):
|
||||
mention = WorkflowCommentMention(
|
||||
comment_id=comment.id,
|
||||
reply_id=None, # This is a comment mention
|
||||
mentioned_user_id=user_id_str,
|
||||
)
|
||||
session.add(mention)
|
||||
|
||||
session.commit()
|
||||
|
||||
return {"id": comment.id, "updated_at": comment.updated_at}
|
||||
|
||||
@staticmethod
|
||||
def delete_comment(tenant_id: str, app_id: str, comment_id: str, user_id: str) -> None:
|
||||
"""Delete a workflow comment."""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
comment = WorkflowCommentService.get_comment(tenant_id, app_id, comment_id, session)
|
||||
|
||||
# Only the creator can delete the comment
|
||||
if comment.created_by != user_id:
|
||||
raise Forbidden("Only the comment creator can delete it")
|
||||
|
||||
# Delete associated mentions (both comment and reply mentions)
|
||||
mentions = session.scalars(
|
||||
select(WorkflowCommentMention).where(WorkflowCommentMention.comment_id == comment_id)
|
||||
).all()
|
||||
for mention in mentions:
|
||||
session.delete(mention)
|
||||
|
||||
# Delete associated replies
|
||||
replies = session.scalars(
|
||||
select(WorkflowCommentReply).where(WorkflowCommentReply.comment_id == comment_id)
|
||||
).all()
|
||||
for reply in replies:
|
||||
session.delete(reply)
|
||||
|
||||
session.delete(comment)
|
||||
session.commit()
|
||||
|
||||
@staticmethod
|
||||
def resolve_comment(tenant_id: str, app_id: str, comment_id: str, user_id: str) -> WorkflowComment:
|
||||
"""Resolve a workflow comment."""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
comment = WorkflowCommentService.get_comment(tenant_id, app_id, comment_id, session)
|
||||
if comment.resolved:
|
||||
return comment
|
||||
|
||||
comment.resolved = True
|
||||
comment.resolved_at = naive_utc_now()
|
||||
comment.resolved_by = user_id
|
||||
session.commit()
|
||||
|
||||
return comment
|
||||
|
||||
@staticmethod
|
||||
def create_reply(
|
||||
comment_id: str, content: str, created_by: str, mentioned_user_ids: list[str] | None = None
|
||||
) -> dict:
|
||||
"""Add a reply to a workflow comment."""
|
||||
WorkflowCommentService._validate_content(content)
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# Check if comment exists
|
||||
comment = session.get(WorkflowComment, comment_id)
|
||||
if not comment:
|
||||
raise NotFound("Comment not found")
|
||||
|
||||
reply = WorkflowCommentReply(comment_id=comment_id, content=content, created_by=created_by)
|
||||
|
||||
session.add(reply)
|
||||
session.flush() # Get the reply ID for mentions
|
||||
|
||||
# Create mentions if specified
|
||||
mentioned_user_ids = mentioned_user_ids or []
|
||||
for user_id in mentioned_user_ids:
|
||||
if isinstance(user_id, str) and uuid_value(user_id):
|
||||
# Create mention linking to specific reply
|
||||
mention = WorkflowCommentMention(
|
||||
comment_id=comment_id, reply_id=reply.id, mentioned_user_id=user_id
|
||||
)
|
||||
session.add(mention)
|
||||
|
||||
session.commit()
|
||||
|
||||
return {"id": reply.id, "created_at": reply.created_at}
|
||||
|
||||
@staticmethod
|
||||
def update_reply(reply_id: str, user_id: str, content: str, mentioned_user_ids: list[str] | None = None) -> dict:
|
||||
"""Update a comment reply."""
|
||||
WorkflowCommentService._validate_content(content)
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
reply = session.get(WorkflowCommentReply, reply_id)
|
||||
if not reply:
|
||||
raise NotFound("Reply not found")
|
||||
|
||||
# Only the creator can update the reply
|
||||
if reply.created_by != user_id:
|
||||
raise Forbidden("Only the reply creator can update it")
|
||||
|
||||
reply.content = content
|
||||
|
||||
# Update mentions - first remove existing mentions for this reply
|
||||
existing_mentions = session.scalars(
|
||||
select(WorkflowCommentMention).where(WorkflowCommentMention.reply_id == reply.id)
|
||||
).all()
|
||||
for mention in existing_mentions:
|
||||
session.delete(mention)
|
||||
|
||||
# Add mentions
|
||||
mentioned_user_ids = mentioned_user_ids or []
|
||||
for user_id_str in mentioned_user_ids:
|
||||
if isinstance(user_id_str, str) and uuid_value(user_id_str):
|
||||
mention = WorkflowCommentMention(
|
||||
comment_id=reply.comment_id, reply_id=reply.id, mentioned_user_id=user_id_str
|
||||
)
|
||||
session.add(mention)
|
||||
|
||||
session.commit()
|
||||
session.refresh(reply) # Refresh to get updated timestamp
|
||||
|
||||
return {"id": reply.id, "updated_at": reply.updated_at}
|
||||
|
||||
@staticmethod
|
||||
def delete_reply(reply_id: str, user_id: str) -> None:
|
||||
"""Delete a comment reply."""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
reply = session.get(WorkflowCommentReply, reply_id)
|
||||
if not reply:
|
||||
raise NotFound("Reply not found")
|
||||
|
||||
# Only the creator can delete the reply
|
||||
if reply.created_by != user_id:
|
||||
raise Forbidden("Only the reply creator can delete it")
|
||||
|
||||
# Delete associated mentions first
|
||||
mentions = session.scalars(
|
||||
select(WorkflowCommentMention).where(WorkflowCommentMention.reply_id == reply_id)
|
||||
).all()
|
||||
for mention in mentions:
|
||||
session.delete(mention)
|
||||
|
||||
session.delete(reply)
|
||||
session.commit()
|
||||
|
||||
@staticmethod
|
||||
def validate_comment_access(comment_id: str, tenant_id: str, app_id: str) -> WorkflowComment:
|
||||
"""Validate that a comment belongs to the specified tenant and app."""
|
||||
return WorkflowCommentService.get_comment(tenant_id, app_id, comment_id)
|
||||
@ -249,78 +249,6 @@ class WorkflowService:
|
||||
# return draft workflow
|
||||
return workflow
|
||||
|
||||
def update_draft_workflow_environment_variables(
|
||||
self,
|
||||
*,
|
||||
app_model: App,
|
||||
environment_variables: Sequence[VariableBase],
|
||||
account: Account,
|
||||
):
|
||||
"""
|
||||
Update draft workflow environment variables
|
||||
"""
|
||||
# fetch draft workflow by app_model
|
||||
workflow = self.get_draft_workflow(app_model=app_model)
|
||||
|
||||
if not workflow:
|
||||
raise ValueError("No draft workflow found.")
|
||||
|
||||
workflow.environment_variables = environment_variables
|
||||
workflow.updated_by = account.id
|
||||
workflow.updated_at = naive_utc_now()
|
||||
|
||||
# commit db session changes
|
||||
db.session.commit()
|
||||
|
||||
def update_draft_workflow_conversation_variables(
|
||||
self,
|
||||
*,
|
||||
app_model: App,
|
||||
conversation_variables: Sequence[VariableBase],
|
||||
account: Account,
|
||||
):
|
||||
"""
|
||||
Update draft workflow conversation variables
|
||||
"""
|
||||
# fetch draft workflow by app_model
|
||||
workflow = self.get_draft_workflow(app_model=app_model)
|
||||
|
||||
if not workflow:
|
||||
raise ValueError("No draft workflow found.")
|
||||
|
||||
workflow.conversation_variables = conversation_variables
|
||||
workflow.updated_by = account.id
|
||||
workflow.updated_at = naive_utc_now()
|
||||
|
||||
# commit db session changes
|
||||
db.session.commit()
|
||||
|
||||
def update_draft_workflow_features(
|
||||
self,
|
||||
*,
|
||||
app_model: App,
|
||||
features: dict,
|
||||
account: Account,
|
||||
):
|
||||
"""
|
||||
Update draft workflow features
|
||||
"""
|
||||
# fetch draft workflow by app_model
|
||||
workflow = self.get_draft_workflow(app_model=app_model)
|
||||
|
||||
if not workflow:
|
||||
raise ValueError("No draft workflow found.")
|
||||
|
||||
# validate features structure
|
||||
self.validate_features_structure(app_model=app_model, features=features)
|
||||
|
||||
workflow.features = json.dumps(features)
|
||||
workflow.updated_by = account.id
|
||||
workflow.updated_at = naive_utc_now()
|
||||
|
||||
# commit db session changes
|
||||
db.session.commit()
|
||||
|
||||
def publish_workflow(
|
||||
self,
|
||||
*,
|
||||
|
||||
@ -17,7 +17,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3
|
||||
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_manifests:"
|
||||
CACHE_REDIS_TTL = 60 * 15 # 15 minutes
|
||||
CACHE_REDIS_TTL = 60 * 60 # 1 hour
|
||||
|
||||
|
||||
def _get_redis_cache_key(plugin_id: str) -> str:
|
||||
|
||||
@ -38,7 +38,7 @@ os.environ["OPENDAL_FS_ROOT"] = "/tmp/dify-storage"
|
||||
os.environ.setdefault("STORAGE_TYPE", "opendal")
|
||||
os.environ.setdefault("OPENDAL_SCHEME", "fs")
|
||||
|
||||
_SIO_APP, _CACHED_APP = create_app()
|
||||
_CACHED_APP = create_app()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@ -364,7 +364,7 @@ def _create_app_with_containers() -> Flask:
|
||||
|
||||
# Create and configure the Flask application
|
||||
logger.info("Initializing Flask application...")
|
||||
sio_app, app = create_app()
|
||||
app = create_app()
|
||||
logger.info("Flask application created successfully")
|
||||
|
||||
# Initialize database schema
|
||||
|
||||
@ -274,7 +274,6 @@ class TestFeatureService:
|
||||
mock_config.ENABLE_EMAIL_CODE_LOGIN = True
|
||||
mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True
|
||||
mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False
|
||||
mock_config.ENABLE_COLLABORATION_MODE = True
|
||||
mock_config.ALLOW_REGISTER = False
|
||||
mock_config.ALLOW_CREATE_WORKSPACE = False
|
||||
mock_config.MAIL_TYPE = "smtp"
|
||||
@ -299,7 +298,6 @@ class TestFeatureService:
|
||||
# Verify authentication settings
|
||||
assert result.enable_email_code_login is True
|
||||
assert result.enable_email_password_login is False
|
||||
assert result.enable_collaboration_mode is True
|
||||
assert result.is_allow_register is False
|
||||
assert result.is_allow_create_workspace is False
|
||||
|
||||
@ -404,7 +402,6 @@ class TestFeatureService:
|
||||
mock_config.ENABLE_EMAIL_CODE_LOGIN = True
|
||||
mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True
|
||||
mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False
|
||||
mock_config.ENABLE_COLLABORATION_MODE = False
|
||||
mock_config.ALLOW_REGISTER = True
|
||||
mock_config.ALLOW_CREATE_WORKSPACE = True
|
||||
mock_config.MAIL_TYPE = "smtp"
|
||||
@ -426,7 +423,6 @@ class TestFeatureService:
|
||||
assert result.enable_email_code_login is True
|
||||
assert result.enable_email_password_login is True
|
||||
assert result.enable_social_oauth_login is False
|
||||
assert result.enable_collaboration_mode is False
|
||||
assert result.is_allow_register is True
|
||||
assert result.is_allow_create_workspace is True
|
||||
assert result.is_email_setup is True
|
||||
|
||||
@ -0,0 +1,56 @@
|
||||
import builtins
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.views import MethodView
|
||||
|
||||
from extensions import ext_fastopenapi
|
||||
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
builtins.MethodView = MethodView # type: ignore[attr-defined]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app() -> Flask:
|
||||
app = Flask(__name__)
|
||||
app.config["TESTING"] = True
|
||||
return app
|
||||
|
||||
|
||||
def test_console_setup_fastopenapi_get_not_started(app: Flask):
|
||||
ext_fastopenapi.init_app(app)
|
||||
|
||||
with (
|
||||
patch("controllers.console.setup.dify_config.EDITION", "SELF_HOSTED"),
|
||||
patch("controllers.console.setup.get_setup_status", return_value=None),
|
||||
):
|
||||
client = app.test_client()
|
||||
response = client.get("/console/api/setup")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.get_json() == {"step": "not_started", "setup_at": None}
|
||||
|
||||
|
||||
def test_console_setup_fastopenapi_post_success(app: Flask):
|
||||
ext_fastopenapi.init_app(app)
|
||||
|
||||
payload = {
|
||||
"email": "admin@example.com",
|
||||
"name": "Admin",
|
||||
"password": "Passw0rd1",
|
||||
"language": "en-US",
|
||||
}
|
||||
|
||||
with (
|
||||
patch("controllers.console.wraps.dify_config.EDITION", "SELF_HOSTED"),
|
||||
patch("controllers.console.setup.get_setup_status", return_value=None),
|
||||
patch("controllers.console.setup.TenantService.get_tenant_count", return_value=0),
|
||||
patch("controllers.console.setup.get_init_validate_status", return_value=True),
|
||||
patch("controllers.console.setup.RegisterService.setup"),
|
||||
):
|
||||
client = app.test_client()
|
||||
response = client.post("/console/api/setup", json=payload)
|
||||
|
||||
assert response.status_code == 201
|
||||
assert response.get_json() == {"result": "success"}
|
||||
@ -0,0 +1,35 @@
|
||||
import builtins
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.views import MethodView
|
||||
|
||||
from configs import dify_config
|
||||
from extensions import ext_fastopenapi
|
||||
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
builtins.MethodView = MethodView # type: ignore[attr-defined]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app() -> Flask:
|
||||
app = Flask(__name__)
|
||||
app.config["TESTING"] = True
|
||||
return app
|
||||
|
||||
|
||||
def test_console_version_fastopenapi_returns_current_version(app: Flask):
|
||||
ext_fastopenapi.init_app(app)
|
||||
|
||||
with patch("controllers.console.version.dify_config.CHECK_UPDATE_URL", None):
|
||||
client = app.test_client()
|
||||
response = client.get("/console/api/version", query_string={"current_version": "0.0.0"})
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.get_json()
|
||||
assert data["version"] == dify_config.project.version
|
||||
assert data["release_date"] == ""
|
||||
assert data["release_notes"] == ""
|
||||
assert data["can_auto_update"] is False
|
||||
assert "features" in data
|
||||
@ -1,39 +0,0 @@
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
|
||||
from controllers.console.setup import SetupApi
|
||||
|
||||
|
||||
class TestSetupApi:
|
||||
def test_post_lowercases_email_before_register(self):
|
||||
"""Ensure setup registration normalizes email casing."""
|
||||
payload = {
|
||||
"email": "Admin@Example.com",
|
||||
"name": "Admin User",
|
||||
"password": "ValidPass123!",
|
||||
"language": "en-US",
|
||||
}
|
||||
setup_api = SetupApi(api=None)
|
||||
|
||||
mock_console_ns = SimpleNamespace(payload=payload)
|
||||
|
||||
with (
|
||||
patch("controllers.console.setup.console_ns", mock_console_ns),
|
||||
patch("controllers.console.setup.get_setup_status", return_value=False),
|
||||
patch("controllers.console.setup.TenantService.get_tenant_count", return_value=0),
|
||||
patch("controllers.console.setup.get_init_validate_status", return_value=True),
|
||||
patch("controllers.console.setup.extract_remote_ip", return_value="127.0.0.1"),
|
||||
patch("controllers.console.setup.request", object()),
|
||||
patch("controllers.console.setup.RegisterService.setup") as mock_register,
|
||||
):
|
||||
response, status = setup_api.post()
|
||||
|
||||
assert response == {"result": "success"}
|
||||
assert status == 201
|
||||
mock_register.assert_called_once_with(
|
||||
email="admin@example.com",
|
||||
name=payload["name"],
|
||||
password=payload["password"],
|
||||
ip_address="127.0.0.1",
|
||||
language=payload["language"],
|
||||
)
|
||||
@ -1,5 +1,7 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from core.model_runtime.entities.message_entities import AssistantPromptMessage
|
||||
from core.model_runtime.model_providers.__base.large_language_model import _increase_tool_call
|
||||
|
||||
@ -97,3 +99,14 @@ def test__increase_tool_call():
|
||||
mock_id_generator.side_effect = [_exp_case.id for _exp_case in EXPECTED_CASE_4]
|
||||
with patch("core.model_runtime.model_providers.__base.large_language_model._gen_tool_call_id", mock_id_generator):
|
||||
_run_case(INPUTS_CASE_4, EXPECTED_CASE_4)
|
||||
|
||||
|
||||
def test__increase_tool_call__no_id_no_name_first_delta_should_raise():
|
||||
inputs = [
|
||||
ToolCall(id="", type="function", function=ToolCall.ToolCallFunction(name="", arguments='{"arg1": ')),
|
||||
ToolCall(id="", type="function", function=ToolCall.ToolCallFunction(name="func_foo", arguments='"value"}')),
|
||||
]
|
||||
actual: list[ToolCall] = []
|
||||
with patch("core.model_runtime.model_providers.__base.large_language_model._gen_tool_call_id", MagicMock()):
|
||||
with pytest.raises(ValueError):
|
||||
_increase_tool_call(inputs, actual)
|
||||
|
||||
@ -0,0 +1,103 @@
|
||||
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
|
||||
from core.model_runtime.entities.message_entities import (
|
||||
AssistantPromptMessage,
|
||||
TextPromptMessageContent,
|
||||
UserPromptMessage,
|
||||
)
|
||||
from core.model_runtime.model_providers.__base.large_language_model import _normalize_non_stream_plugin_result
|
||||
|
||||
|
||||
def _make_chunk(
|
||||
*,
|
||||
model: str = "test-model",
|
||||
content: str | list[TextPromptMessageContent] | None,
|
||||
tool_calls: list[AssistantPromptMessage.ToolCall] | None = None,
|
||||
usage: LLMUsage | None = None,
|
||||
system_fingerprint: str | None = None,
|
||||
) -> LLMResultChunk:
|
||||
message = AssistantPromptMessage(content=content, tool_calls=tool_calls or [])
|
||||
delta = LLMResultChunkDelta(index=0, message=message, usage=usage)
|
||||
return LLMResultChunk(model=model, delta=delta, system_fingerprint=system_fingerprint)
|
||||
|
||||
|
||||
def test__normalize_non_stream_plugin_result__from_first_chunk_str_content_and_tool_calls():
|
||||
prompt_messages = [UserPromptMessage(content="hi")]
|
||||
|
||||
tool_calls = [
|
||||
AssistantPromptMessage.ToolCall(
|
||||
id="1",
|
||||
type="function",
|
||||
function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="func_foo", arguments=""),
|
||||
),
|
||||
AssistantPromptMessage.ToolCall(
|
||||
id="",
|
||||
type="function",
|
||||
function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="", arguments='{"arg1": '),
|
||||
),
|
||||
AssistantPromptMessage.ToolCall(
|
||||
id="",
|
||||
type="function",
|
||||
function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="", arguments='"value"}'),
|
||||
),
|
||||
]
|
||||
|
||||
usage = LLMUsage.empty_usage().model_copy(update={"prompt_tokens": 1, "total_tokens": 1})
|
||||
chunk = _make_chunk(content="hello", tool_calls=tool_calls, usage=usage, system_fingerprint="fp-1")
|
||||
|
||||
result = _normalize_non_stream_plugin_result(
|
||||
model="test-model", prompt_messages=prompt_messages, result=iter([chunk])
|
||||
)
|
||||
|
||||
assert result.model == "test-model"
|
||||
assert result.prompt_messages == prompt_messages
|
||||
assert result.message.content == "hello"
|
||||
assert result.usage.prompt_tokens == 1
|
||||
assert result.system_fingerprint == "fp-1"
|
||||
assert result.message.tool_calls == [
|
||||
AssistantPromptMessage.ToolCall(
|
||||
id="1",
|
||||
type="function",
|
||||
function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="func_foo", arguments='{"arg1": "value"}'),
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def test__normalize_non_stream_plugin_result__from_first_chunk_list_content():
|
||||
prompt_messages = [UserPromptMessage(content="hi")]
|
||||
|
||||
content_list = [TextPromptMessageContent(data="a"), TextPromptMessageContent(data="b")]
|
||||
chunk = _make_chunk(content=content_list, usage=LLMUsage.empty_usage())
|
||||
|
||||
result = _normalize_non_stream_plugin_result(
|
||||
model="test-model", prompt_messages=prompt_messages, result=iter([chunk])
|
||||
)
|
||||
|
||||
assert result.message.content == content_list
|
||||
|
||||
|
||||
def test__normalize_non_stream_plugin_result__passthrough_llm_result():
|
||||
prompt_messages = [UserPromptMessage(content="hi")]
|
||||
llm_result = LLMResult(
|
||||
model="test-model",
|
||||
prompt_messages=prompt_messages,
|
||||
message=AssistantPromptMessage(content="ok"),
|
||||
usage=LLMUsage.empty_usage(),
|
||||
)
|
||||
|
||||
assert (
|
||||
_normalize_non_stream_plugin_result(model="test-model", prompt_messages=prompt_messages, result=llm_result)
|
||||
== llm_result
|
||||
)
|
||||
|
||||
|
||||
def test__normalize_non_stream_plugin_result__empty_iterator_defaults():
|
||||
prompt_messages = [UserPromptMessage(content="hi")]
|
||||
|
||||
result = _normalize_non_stream_plugin_result(model="test-model", prompt_messages=prompt_messages, result=iter([]))
|
||||
|
||||
assert result.model == "test-model"
|
||||
assert result.prompt_messages == prompt_messages
|
||||
assert result.message.content == []
|
||||
assert result.message.tool_calls == []
|
||||
assert result.usage == LLMUsage.empty_usage()
|
||||
assert result.system_fingerprint is None
|
||||
@ -1,121 +0,0 @@
|
||||
import json
|
||||
from unittest.mock import Mock
|
||||
|
||||
import pytest
|
||||
|
||||
from repositories import workflow_collaboration_repository as repo_module
|
||||
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository
|
||||
|
||||
|
||||
class TestWorkflowCollaborationRepository:
|
||||
@pytest.fixture
|
||||
def mock_redis(self, monkeypatch: pytest.MonkeyPatch) -> Mock:
|
||||
mock_redis = Mock()
|
||||
monkeypatch.setattr(repo_module, "redis_client", mock_redis)
|
||||
return mock_redis
|
||||
|
||||
def test_get_sid_mapping_returns_mapping(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.get.return_value = b'{"workflow_id":"wf-1","user_id":"u-1"}'
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.get_sid_mapping("sid-1")
|
||||
|
||||
# Assert
|
||||
assert result == {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
|
||||
def test_list_sessions_filters_invalid_entries(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.hgetall.return_value = {
|
||||
b"sid-1": b'{"user_id":"u-1","username":"Jane","sid":"sid-1","connected_at":2}',
|
||||
b"sid-2": b'{"username":"Missing","sid":"sid-2"}',
|
||||
b"sid-3": b"not-json",
|
||||
}
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.list_sessions("wf-1")
|
||||
|
||||
# Assert
|
||||
assert result == [
|
||||
{
|
||||
"user_id": "u-1",
|
||||
"username": "Jane",
|
||||
"avatar": None,
|
||||
"sid": "sid-1",
|
||||
"connected_at": 2,
|
||||
}
|
||||
]
|
||||
|
||||
def test_set_session_info_persists_payload(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.exists.return_value = True
|
||||
repository = WorkflowCollaborationRepository()
|
||||
payload = {
|
||||
"user_id": "u-1",
|
||||
"username": "Jane",
|
||||
"avatar": None,
|
||||
"sid": "sid-1",
|
||||
"connected_at": 1,
|
||||
}
|
||||
|
||||
# Act
|
||||
repository.set_session_info("wf-1", payload)
|
||||
|
||||
# Assert
|
||||
assert mock_redis.hset.called
|
||||
workflow_key, sid, session_json = mock_redis.hset.call_args.args
|
||||
assert workflow_key == "workflow_online_users:wf-1"
|
||||
assert sid == "sid-1"
|
||||
assert json.loads(session_json)["user_id"] == "u-1"
|
||||
assert mock_redis.set.called
|
||||
|
||||
def test_refresh_session_state_expires_keys(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.exists.return_value = True
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
repository.refresh_session_state("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
assert mock_redis.expire.call_count == 2
|
||||
|
||||
def test_get_current_leader_decodes_bytes(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.get.return_value = b"sid-1"
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.get_current_leader("wf-1")
|
||||
|
||||
# Assert
|
||||
assert result == "sid-1"
|
||||
|
||||
def test_set_leader_if_absent_uses_nx(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.set.return_value = True
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.set_leader_if_absent("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
_key, _value = mock_redis.set.call_args.args
|
||||
assert _key == "workflow_leader:wf-1"
|
||||
assert _value == "sid-1"
|
||||
assert mock_redis.set.call_args.kwargs["nx"] is True
|
||||
assert "ex" in mock_redis.set.call_args.kwargs
|
||||
|
||||
def test_get_session_sids_decodes(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.hkeys.return_value = [b"sid-1", "sid-2"]
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.get_session_sids("wf-1")
|
||||
|
||||
# Assert
|
||||
assert result == ["sid-1", "sid-2"]
|
||||
@ -1,271 +0,0 @@
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository
|
||||
from services.workflow_collaboration_service import WorkflowCollaborationService
|
||||
|
||||
|
||||
class TestWorkflowCollaborationService:
|
||||
@pytest.fixture
|
||||
def service(self) -> tuple[WorkflowCollaborationService, Mock, Mock]:
|
||||
repository = Mock(spec=WorkflowCollaborationRepository)
|
||||
socketio = Mock()
|
||||
return WorkflowCollaborationService(repository, socketio), repository, socketio
|
||||
|
||||
def test_register_session_returns_leader_status(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, socketio = service
|
||||
socketio.get_session.return_value = {"user_id": "u-1", "username": "Jane", "avatar": None}
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "get_or_set_leader", return_value="sid-1"),
|
||||
patch.object(collaboration_service, "broadcast_online_users"),
|
||||
):
|
||||
# Act
|
||||
result = collaboration_service.register_session("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
assert result == ("u-1", True)
|
||||
repository.set_session_info.assert_called_once()
|
||||
socketio.enter_room.assert_called_once_with("sid-1", "wf-1")
|
||||
socketio.emit.assert_called_once_with("status", {"isLeader": True}, room="sid-1")
|
||||
|
||||
def test_register_session_returns_none_when_missing_user(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, _repository, socketio = service
|
||||
socketio.get_session.return_value = {}
|
||||
|
||||
# Act
|
||||
result = collaboration_service.register_session("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
assert result is None
|
||||
|
||||
def test_relay_collaboration_event_unauthorized(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_sid_mapping.return_value = None
|
||||
|
||||
# Act
|
||||
result = collaboration_service.relay_collaboration_event("sid-1", {})
|
||||
|
||||
# Assert
|
||||
assert result == ({"msg": "unauthorized"}, 401)
|
||||
|
||||
def test_relay_collaboration_event_emits_update(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, socketio = service
|
||||
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
payload = {"type": "mouse_move", "data": {"x": 1}, "timestamp": 123}
|
||||
|
||||
# Act
|
||||
result = collaboration_service.relay_collaboration_event("sid-1", payload)
|
||||
|
||||
# Assert
|
||||
assert result == ({"msg": "event_broadcasted"}, 200)
|
||||
socketio.emit.assert_called_once_with(
|
||||
"collaboration_update",
|
||||
{"type": "mouse_move", "userId": "u-1", "data": {"x": 1}, "timestamp": 123},
|
||||
room="wf-1",
|
||||
skip_sid="sid-1",
|
||||
)
|
||||
|
||||
def test_relay_graph_event_unauthorized(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_sid_mapping.return_value = None
|
||||
|
||||
# Act
|
||||
result = collaboration_service.relay_graph_event("sid-1", {"nodes": []})
|
||||
|
||||
# Assert
|
||||
assert result == ({"msg": "unauthorized"}, 401)
|
||||
|
||||
def test_disconnect_session_no_mapping(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_sid_mapping.return_value = None
|
||||
|
||||
# Act
|
||||
collaboration_service.disconnect_session("sid-1")
|
||||
|
||||
# Assert
|
||||
repository.delete_session.assert_not_called()
|
||||
|
||||
def test_disconnect_session_cleans_up(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "handle_leader_disconnect") as handle_leader_disconnect,
|
||||
patch.object(collaboration_service, "broadcast_online_users") as broadcast_online_users,
|
||||
):
|
||||
# Act
|
||||
collaboration_service.disconnect_session("sid-1")
|
||||
|
||||
# Assert
|
||||
repository.delete_session.assert_called_once_with("wf-1", "sid-1")
|
||||
handle_leader_disconnect.assert_called_once_with("wf-1", "sid-1")
|
||||
broadcast_online_users.assert_called_once_with("wf-1")
|
||||
|
||||
def test_get_or_set_leader_returns_active_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
|
||||
with patch.object(collaboration_service, "is_session_active", return_value=True):
|
||||
# Act
|
||||
result = collaboration_service.get_or_set_leader("wf-1", "sid-2")
|
||||
|
||||
# Assert
|
||||
assert result == "sid-1"
|
||||
repository.set_leader_if_absent.assert_not_called()
|
||||
|
||||
def test_get_or_set_leader_replaces_dead_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
repository.set_leader_if_absent.return_value = True
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "is_session_active", return_value=False),
|
||||
patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change,
|
||||
):
|
||||
# Act
|
||||
result = collaboration_service.get_or_set_leader("wf-1", "sid-2")
|
||||
|
||||
# Assert
|
||||
assert result == "sid-2"
|
||||
repository.delete_session.assert_called_once_with("wf-1", "sid-1")
|
||||
repository.delete_leader.assert_called_once_with("wf-1")
|
||||
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
|
||||
|
||||
def test_get_or_set_leader_falls_back_to_existing(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.side_effect = [None, "sid-3"]
|
||||
repository.set_leader_if_absent.return_value = False
|
||||
|
||||
# Act
|
||||
result = collaboration_service.get_or_set_leader("wf-1", "sid-2")
|
||||
|
||||
# Assert
|
||||
assert result == "sid-3"
|
||||
|
||||
def test_handle_leader_disconnect_elects_new(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
repository.get_session_sids.return_value = ["sid-2"]
|
||||
|
||||
with patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change:
|
||||
# Act
|
||||
collaboration_service.handle_leader_disconnect("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
repository.set_leader.assert_called_once_with("wf-1", "sid-2")
|
||||
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
|
||||
|
||||
def test_handle_leader_disconnect_clears_when_empty(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
repository.get_session_sids.return_value = []
|
||||
|
||||
# Act
|
||||
collaboration_service.handle_leader_disconnect("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
repository.delete_leader.assert_called_once_with("wf-1")
|
||||
|
||||
def test_broadcast_online_users_sorts_and_emits(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, socketio = service
|
||||
repository.list_sessions.return_value = [
|
||||
{"user_id": "u-1", "username": "A", "avatar": None, "sid": "sid-1", "connected_at": 3},
|
||||
{"user_id": "u-2", "username": "B", "avatar": None, "sid": "sid-2", "connected_at": 1},
|
||||
]
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
|
||||
# Act
|
||||
collaboration_service.broadcast_online_users("wf-1")
|
||||
|
||||
# Assert
|
||||
socketio.emit.assert_called_once_with(
|
||||
"online_users",
|
||||
{
|
||||
"workflow_id": "wf-1",
|
||||
"users": [
|
||||
{"user_id": "u-2", "username": "B", "avatar": None, "sid": "sid-2", "connected_at": 1},
|
||||
{"user_id": "u-1", "username": "A", "avatar": None, "sid": "sid-1", "connected_at": 3},
|
||||
],
|
||||
"leader": "sid-1",
|
||||
},
|
||||
room="wf-1",
|
||||
)
|
||||
|
||||
def test_refresh_session_state_expires_active_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
|
||||
with patch.object(collaboration_service, "is_session_active", return_value=True):
|
||||
# Act
|
||||
collaboration_service.refresh_session_state("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
repository.refresh_session_state.assert_called_once_with("wf-1", "sid-1")
|
||||
repository.expire_leader.assert_called_once_with("wf-1")
|
||||
repository.set_leader.assert_not_called()
|
||||
|
||||
def test_refresh_session_state_sets_leader_when_missing(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = None
|
||||
|
||||
with patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change:
|
||||
# Act
|
||||
collaboration_service.refresh_session_state("wf-1", "sid-2")
|
||||
|
||||
# Assert
|
||||
repository.set_leader.assert_called_once_with("wf-1", "sid-2")
|
||||
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
|
||||
|
||||
def test_relay_graph_event_emits_update(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, socketio = service
|
||||
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
|
||||
# Act
|
||||
result = collaboration_service.relay_graph_event("sid-1", {"nodes": []})
|
||||
|
||||
# Assert
|
||||
assert result == ({"msg": "graph_update_broadcasted"}, 200)
|
||||
repository.refresh_session_state.assert_called_once_with("wf-1", "sid-1")
|
||||
socketio.emit.assert_called_once_with("graph_update", {"nodes": []}, room="wf-1", skip_sid="sid-1")
|
||||
@ -1,245 +0,0 @@
|
||||
from unittest.mock import MagicMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
from services import workflow_comment_service as service_module
|
||||
from services.workflow_comment_service import WorkflowCommentService
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_session(monkeypatch: pytest.MonkeyPatch) -> Mock:
|
||||
session = Mock()
|
||||
context_manager = MagicMock()
|
||||
context_manager.__enter__.return_value = session
|
||||
context_manager.__exit__.return_value = False
|
||||
mock_db = MagicMock()
|
||||
mock_db.engine = Mock()
|
||||
monkeypatch.setattr(service_module, "Session", Mock(return_value=context_manager))
|
||||
monkeypatch.setattr(service_module, "db", mock_db)
|
||||
return session
|
||||
|
||||
|
||||
def _mock_scalars(result_list: list[object]) -> Mock:
|
||||
scalars = Mock()
|
||||
scalars.all.return_value = result_list
|
||||
return scalars
|
||||
|
||||
|
||||
class TestWorkflowCommentService:
|
||||
def test_validate_content_rejects_empty(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
WorkflowCommentService._validate_content(" ")
|
||||
|
||||
def test_validate_content_rejects_too_long(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
WorkflowCommentService._validate_content("a" * 1001)
|
||||
|
||||
def test_create_comment_creates_mentions(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.id = "comment-1"
|
||||
comment.created_at = "ts"
|
||||
|
||||
with (
|
||||
patch.object(service_module, "WorkflowComment", return_value=comment),
|
||||
patch.object(service_module, "WorkflowCommentMention", return_value=Mock()),
|
||||
patch.object(service_module, "uuid_value", side_effect=[True, False]),
|
||||
):
|
||||
result = WorkflowCommentService.create_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
created_by="user-1",
|
||||
content="hello",
|
||||
position_x=1.0,
|
||||
position_y=2.0,
|
||||
mentioned_user_ids=["user-2", "bad-id"],
|
||||
)
|
||||
|
||||
assert result == {"id": "comment-1", "created_at": "ts"}
|
||||
assert mock_session.add.call_args_list[0].args[0] is comment
|
||||
assert mock_session.add.call_count == 2
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_update_comment_raises_not_found(self, mock_session: Mock) -> None:
|
||||
mock_session.scalar.return_value = None
|
||||
|
||||
with pytest.raises(NotFound):
|
||||
WorkflowCommentService.update_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
user_id="user-1",
|
||||
content="hello",
|
||||
)
|
||||
|
||||
def test_update_comment_raises_forbidden(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.created_by = "owner"
|
||||
mock_session.scalar.return_value = comment
|
||||
|
||||
with pytest.raises(Forbidden):
|
||||
WorkflowCommentService.update_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
user_id="intruder",
|
||||
content="hello",
|
||||
)
|
||||
|
||||
def test_update_comment_replaces_mentions(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.id = "comment-1"
|
||||
comment.created_by = "owner"
|
||||
mock_session.scalar.return_value = comment
|
||||
|
||||
existing_mentions = [Mock(), Mock()]
|
||||
mock_session.scalars.return_value = _mock_scalars(existing_mentions)
|
||||
|
||||
with patch.object(service_module, "uuid_value", side_effect=[True, False]):
|
||||
result = WorkflowCommentService.update_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
user_id="owner",
|
||||
content="updated",
|
||||
mentioned_user_ids=["user-2", "bad-id"],
|
||||
)
|
||||
|
||||
assert result == {"id": "comment-1", "updated_at": comment.updated_at}
|
||||
assert mock_session.delete.call_count == 2
|
||||
assert mock_session.add.call_count == 1
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_delete_comment_raises_forbidden(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.created_by = "owner"
|
||||
|
||||
with patch.object(WorkflowCommentService, "get_comment", return_value=comment):
|
||||
with pytest.raises(Forbidden):
|
||||
WorkflowCommentService.delete_comment("tenant-1", "app-1", "comment-1", "intruder")
|
||||
|
||||
def test_delete_comment_removes_related_entities(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.created_by = "owner"
|
||||
|
||||
mentions = [Mock(), Mock()]
|
||||
replies = [Mock()]
|
||||
mock_session.scalars.side_effect = [_mock_scalars(mentions), _mock_scalars(replies)]
|
||||
|
||||
with patch.object(WorkflowCommentService, "get_comment", return_value=comment):
|
||||
WorkflowCommentService.delete_comment("tenant-1", "app-1", "comment-1", "owner")
|
||||
|
||||
assert mock_session.delete.call_count == 4
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_resolve_comment_sets_fields(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.resolved = False
|
||||
comment.resolved_at = None
|
||||
comment.resolved_by = None
|
||||
|
||||
with (
|
||||
patch.object(WorkflowCommentService, "get_comment", return_value=comment),
|
||||
patch.object(service_module, "naive_utc_now", return_value="now"),
|
||||
):
|
||||
result = WorkflowCommentService.resolve_comment("tenant-1", "app-1", "comment-1", "user-1")
|
||||
|
||||
assert result is comment
|
||||
assert comment.resolved is True
|
||||
assert comment.resolved_at == "now"
|
||||
assert comment.resolved_by == "user-1"
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_resolve_comment_noop_when_already_resolved(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.resolved = True
|
||||
|
||||
with patch.object(WorkflowCommentService, "get_comment", return_value=comment):
|
||||
result = WorkflowCommentService.resolve_comment("tenant-1", "app-1", "comment-1", "user-1")
|
||||
|
||||
assert result is comment
|
||||
mock_session.commit.assert_not_called()
|
||||
|
||||
def test_create_reply_requires_comment(self, mock_session: Mock) -> None:
|
||||
mock_session.get.return_value = None
|
||||
|
||||
with pytest.raises(NotFound):
|
||||
WorkflowCommentService.create_reply("comment-1", "hello", "user-1")
|
||||
|
||||
def test_create_reply_creates_mentions(self, mock_session: Mock) -> None:
|
||||
mock_session.get.return_value = Mock()
|
||||
reply = Mock()
|
||||
reply.id = "reply-1"
|
||||
reply.created_at = "ts"
|
||||
|
||||
with (
|
||||
patch.object(service_module, "WorkflowCommentReply", return_value=reply),
|
||||
patch.object(service_module, "WorkflowCommentMention", return_value=Mock()),
|
||||
patch.object(service_module, "uuid_value", side_effect=[True, False]),
|
||||
):
|
||||
result = WorkflowCommentService.create_reply(
|
||||
comment_id="comment-1",
|
||||
content="hello",
|
||||
created_by="user-1",
|
||||
mentioned_user_ids=["user-2", "bad-id"],
|
||||
)
|
||||
|
||||
assert result == {"id": "reply-1", "created_at": "ts"}
|
||||
assert mock_session.add.call_count == 2
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_update_reply_raises_not_found(self, mock_session: Mock) -> None:
|
||||
mock_session.get.return_value = None
|
||||
|
||||
with pytest.raises(NotFound):
|
||||
WorkflowCommentService.update_reply("reply-1", "user-1", "hello")
|
||||
|
||||
def test_update_reply_raises_forbidden(self, mock_session: Mock) -> None:
|
||||
reply = Mock()
|
||||
reply.created_by = "owner"
|
||||
mock_session.get.return_value = reply
|
||||
|
||||
with pytest.raises(Forbidden):
|
||||
WorkflowCommentService.update_reply("reply-1", "intruder", "hello")
|
||||
|
||||
def test_update_reply_replaces_mentions(self, mock_session: Mock) -> None:
|
||||
reply = Mock()
|
||||
reply.id = "reply-1"
|
||||
reply.comment_id = "comment-1"
|
||||
reply.created_by = "owner"
|
||||
reply.updated_at = "updated"
|
||||
mock_session.get.return_value = reply
|
||||
mock_session.scalars.return_value = _mock_scalars([Mock()])
|
||||
|
||||
with patch.object(service_module, "uuid_value", side_effect=[True, False]):
|
||||
result = WorkflowCommentService.update_reply(
|
||||
reply_id="reply-1",
|
||||
user_id="owner",
|
||||
content="new",
|
||||
mentioned_user_ids=["user-2", "bad-id"],
|
||||
)
|
||||
|
||||
assert result == {"id": "reply-1", "updated_at": "updated"}
|
||||
assert mock_session.delete.call_count == 1
|
||||
assert mock_session.add.call_count == 1
|
||||
mock_session.commit.assert_called_once()
|
||||
mock_session.refresh.assert_called_once_with(reply)
|
||||
|
||||
def test_delete_reply_raises_forbidden(self, mock_session: Mock) -> None:
|
||||
reply = Mock()
|
||||
reply.created_by = "owner"
|
||||
mock_session.get.return_value = reply
|
||||
|
||||
with pytest.raises(Forbidden):
|
||||
WorkflowCommentService.delete_reply("reply-1", "intruder")
|
||||
|
||||
def test_delete_reply_removes_mentions(self, mock_session: Mock) -> None:
|
||||
reply = Mock()
|
||||
reply.created_by = "owner"
|
||||
mock_session.get.return_value = reply
|
||||
mock_session.scalars.return_value = _mock_scalars([Mock(), Mock()])
|
||||
|
||||
WorkflowCommentService.delete_reply("reply-1", "owner")
|
||||
|
||||
assert mock_session.delete.call_count == 3
|
||||
mock_session.commit.assert_called_once()
|
||||
@ -10,7 +10,7 @@ This test suite covers:
|
||||
"""
|
||||
|
||||
import json
|
||||
from unittest.mock import MagicMock, Mock, patch
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
@ -630,79 +630,6 @@ class TestWorkflowService:
|
||||
with pytest.raises(ValueError, match="Invalid app mode"):
|
||||
workflow_service.validate_features_structure(app, features)
|
||||
|
||||
# ==================== Draft Workflow Variable Update Tests ====================
|
||||
# These tests verify updating draft workflow environment/conversation variables
|
||||
|
||||
def test_update_draft_workflow_environment_variables_updates_workflow(self, workflow_service, mock_db_session):
|
||||
"""Test update_draft_workflow_environment_variables updates draft fields."""
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
account = TestWorkflowAssociatedDataFactory.create_account_mock()
|
||||
workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock()
|
||||
variables = [Mock()]
|
||||
|
||||
with (
|
||||
patch.object(workflow_service, "get_draft_workflow", return_value=workflow),
|
||||
patch("services.workflow_service.naive_utc_now", return_value="now"),
|
||||
):
|
||||
workflow_service.update_draft_workflow_environment_variables(
|
||||
app_model=app,
|
||||
environment_variables=variables,
|
||||
account=account,
|
||||
)
|
||||
|
||||
assert workflow.environment_variables == variables
|
||||
assert workflow.updated_by == account.id
|
||||
assert workflow.updated_at == "now"
|
||||
mock_db_session.session.commit.assert_called_once()
|
||||
|
||||
def test_update_draft_workflow_environment_variables_raises_when_missing(self, workflow_service):
|
||||
"""Test update_draft_workflow_environment_variables raises when draft missing."""
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
account = TestWorkflowAssociatedDataFactory.create_account_mock()
|
||||
|
||||
with patch.object(workflow_service, "get_draft_workflow", return_value=None):
|
||||
with pytest.raises(ValueError, match="No draft workflow found."):
|
||||
workflow_service.update_draft_workflow_environment_variables(
|
||||
app_model=app,
|
||||
environment_variables=[],
|
||||
account=account,
|
||||
)
|
||||
|
||||
def test_update_draft_workflow_conversation_variables_updates_workflow(self, workflow_service, mock_db_session):
|
||||
"""Test update_draft_workflow_conversation_variables updates draft fields."""
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
account = TestWorkflowAssociatedDataFactory.create_account_mock()
|
||||
workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock()
|
||||
variables = [Mock()]
|
||||
|
||||
with (
|
||||
patch.object(workflow_service, "get_draft_workflow", return_value=workflow),
|
||||
patch("services.workflow_service.naive_utc_now", return_value="now"),
|
||||
):
|
||||
workflow_service.update_draft_workflow_conversation_variables(
|
||||
app_model=app,
|
||||
conversation_variables=variables,
|
||||
account=account,
|
||||
)
|
||||
|
||||
assert workflow.conversation_variables == variables
|
||||
assert workflow.updated_by == account.id
|
||||
assert workflow.updated_at == "now"
|
||||
mock_db_session.session.commit.assert_called_once()
|
||||
|
||||
def test_update_draft_workflow_conversation_variables_raises_when_missing(self, workflow_service):
|
||||
"""Test update_draft_workflow_conversation_variables raises when draft missing."""
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
account = TestWorkflowAssociatedDataFactory.create_account_mock()
|
||||
|
||||
with patch.object(workflow_service, "get_draft_workflow", return_value=None):
|
||||
with pytest.raises(ValueError, match="No draft workflow found."):
|
||||
workflow_service.update_draft_workflow_conversation_variables(
|
||||
app_model=app,
|
||||
conversation_variables=[],
|
||||
account=account,
|
||||
)
|
||||
|
||||
# ==================== Publish Workflow Tests ====================
|
||||
# These tests verify creating published versions from draft workflows
|
||||
|
||||
|
||||
74
api/uv.lock
generated
74
api/uv.lock
generated
@ -583,15 +583,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/57/f4/a69c20ee4f660081a7dedb1ac57f29be9378e04edfcb90c526b923d4bebc/beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a", size = 142979, upload-time = "2023-04-07T15:02:50.77Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bidict"
|
||||
version = "0.23.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9a/6e/026678aa5a830e07cd9498a05d3e7e650a4f56a42f267a53d22bcda1bdc9/bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71", size = 29093, upload-time = "2024-02-18T19:09:05.748Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/99/37/e8730c3587a65eb5645d4aba2d27aae48e8003614d6aaf15dda67f702f1f/bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5", size = 32764, upload-time = "2024-02-18T19:09:04.156Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "billiard"
|
||||
version = "4.2.3"
|
||||
@ -1401,7 +1392,6 @@ dependencies = [
|
||||
{ name = "flask-restx" },
|
||||
{ name = "flask-sqlalchemy" },
|
||||
{ name = "gevent" },
|
||||
{ name = "gevent-websocket" },
|
||||
{ name = "gmpy2" },
|
||||
{ name = "google-api-core" },
|
||||
{ name = "google-api-python-client" },
|
||||
@ -1452,7 +1442,6 @@ dependencies = [
|
||||
{ name = "pypdfium2" },
|
||||
{ name = "python-docx" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "python-socketio" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "readabilipy" },
|
||||
{ name = "redis", extra = ["hiredis"] },
|
||||
@ -1602,7 +1591,6 @@ requires-dist = [
|
||||
{ name = "flask-restx", specifier = "~=1.3.0" },
|
||||
{ name = "flask-sqlalchemy", specifier = "~=3.1.1" },
|
||||
{ name = "gevent", specifier = "~=25.9.1" },
|
||||
{ name = "gevent-websocket", specifier = "~=0.10.1" },
|
||||
{ name = "gmpy2", specifier = "~=2.2.1" },
|
||||
{ name = "google-api-core", specifier = "==2.18.0" },
|
||||
{ name = "google-api-python-client", specifier = "==2.90.0" },
|
||||
@ -1653,7 +1641,6 @@ requires-dist = [
|
||||
{ name = "pypdfium2", specifier = "==5.2.0" },
|
||||
{ name = "python-docx", specifier = "~=1.1.0" },
|
||||
{ name = "python-dotenv", specifier = "==1.0.1" },
|
||||
{ name = "python-socketio", specifier = "~=5.13.0" },
|
||||
{ name = "pyyaml", specifier = "~=6.0.1" },
|
||||
{ name = "readabilipy", specifier = "~=0.3.0" },
|
||||
{ name = "redis", extras = ["hiredis"], specifier = "~=6.1.0" },
|
||||
@ -2226,18 +2213,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/98/caf06d5d22a7c129c1fb2fc1477306902a2c8ddfd399cd26bbbd4caf2141/gevent-25.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acd6bcd5feabf22c7c5174bd3b9535ee9f088d2bbce789f740ad8d6554b18f3", size = 1682837, upload-time = "2025-09-17T19:48:47.318Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gevent-websocket"
|
||||
version = "0.10.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "gevent" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/98/d2/6fa19239ff1ab072af40ebf339acd91fb97f34617c2ee625b8e34bf42393/gevent-websocket-0.10.1.tar.gz", hash = "sha256:7eaef32968290c9121f7c35b973e2cc302ffb076d018c9068d2f5ca8b2d85fb0", size = 18366, upload-time = "2017-03-12T22:46:05.68Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/84/2dc373eb6493e00c884cc11e6c059ec97abae2678d42f06bf780570b0193/gevent_websocket-0.10.1-py3-none-any.whl", hash = "sha256:17b67d91282f8f4c973eba0551183fc84f56f1c90c8f6b6b30256f31f66f5242", size = 22987, upload-time = "2017-03-12T22:46:03.611Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gitdb"
|
||||
version = "4.0.12"
|
||||
@ -5243,18 +5218,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863, upload-time = "2024-01-23T06:32:58.246Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-engineio"
|
||||
version = "4.12.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "simple-websocket" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c9/d8/63e5535ab21dc4998ba1cfe13690ccf122883a38f025dca24d6e56c05eba/python_engineio-4.12.3.tar.gz", hash = "sha256:35633e55ec30915e7fc8f7e34ca8d73ee0c080cec8a8cd04faf2d7396f0a7a7a", size = 91910, upload-time = "2025-09-28T06:31:36.765Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/f0/c5aa0a69fd9326f013110653543f36ece4913c17921f3e1dbd78e1b423ee/python_engineio-4.12.3-py3-none-any.whl", hash = "sha256:7c099abb2a27ea7ab429c04da86ab2d82698cdd6c52406cb73766fe454feb7e1", size = 59637, upload-time = "2025-09-28T06:31:35.354Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-http-client"
|
||||
version = "3.3.7"
|
||||
@ -5311,19 +5274,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/4f/00be2196329ebbff56ce564aa94efb0fbc828d00de250b1980de1a34ab49/python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba", size = 472788, upload-time = "2024-08-07T17:33:28.192Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-socketio"
|
||||
version = "5.13.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "bidict" },
|
||||
{ name = "python-engineio" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/21/1a/396d50ccf06ee539fa758ce5623b59a9cb27637fc4b2dc07ed08bf495e77/python_socketio-5.13.0.tar.gz", hash = "sha256:ac4e19a0302ae812e23b712ec8b6427ca0521f7c582d6abb096e36e24a263029", size = 121125, upload-time = "2025-04-12T15:46:59.933Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/32/b4fb8585d1be0f68bde7e110dffbcf354915f77ad8c778563f0ad9655c02/python_socketio-5.13.0-py3-none-any.whl", hash = "sha256:51f68d6499f2df8524668c24bcec13ba1414117cfb3a90115c559b601ab10caf", size = 77800, upload-time = "2025-04-12T15:46:58.412Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytz"
|
||||
version = "2025.2"
|
||||
@ -5823,18 +5773,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "simple-websocket"
|
||||
version = "1.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "wsproto" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b0/d4/bfa032f961103eba93de583b161f0e6a5b63cebb8f2c7d0c6e6efe1e3d2e/simple_websocket-1.1.0.tar.gz", hash = "sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4", size = 17300, upload-time = "2024-10-10T22:39:31.412Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/52/59/0782e51887ac6b07ffd1570e0364cf901ebc36345fea669969d2084baebb/simple_websocket-1.1.0-py3-none-any.whl", hash = "sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c", size = 13842, upload-time = "2024-10-10T22:39:29.645Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.17.0"
|
||||
@ -7255,18 +7193,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/21/abdedb4cdf6ff41ebf01a74087740a709e2edb146490e4d9beea054b0b7a/wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1", size = 23362, upload-time = "2023-11-09T06:33:28.271Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wsproto"
|
||||
version = "1.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c9/4a/44d3c295350d776427904d73c189e10aeae66d7f555bb2feee16d1e4ba5a/wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065", size = 53425, upload-time = "2022-08-23T19:58:21.447Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/78/58/e860788190eba3bcce367f74d29c4675466ce8dddfba85f7827588416f01/wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736", size = 24226, upload-time = "2022-08-23T19:58:19.96Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xinference-client"
|
||||
version = "1.2.2"
|
||||
|
||||
@ -129,10 +129,6 @@ MIGRATION_ENABLED=true
|
||||
# The default value is 300 seconds.
|
||||
FILES_ACCESS_TIMEOUT=300
|
||||
|
||||
# Collaboration mode toggle
|
||||
# To open collaboration features, you also need to set SERVER_WORKER_CLASS=geventwebsocket.gunicorn.workers.GeventWebSocketWorker
|
||||
ENABLE_COLLABORATION_MODE=false
|
||||
|
||||
# Access token expiration time in minutes
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||
|
||||
@ -168,7 +164,6 @@ SERVER_WORKER_AMOUNT=1
|
||||
# Modifying it may also decrease throughput.
|
||||
#
|
||||
# It is strongly discouraged to change this parameter.
|
||||
# If enable collaboration mode, it must be set to geventwebsocket.gunicorn.workers.GeventWebSocketWorker
|
||||
SERVER_WORKER_CLASS=gevent
|
||||
|
||||
# Default number of worker connections, the default is 10.
|
||||
@ -406,8 +401,6 @@ CONSOLE_CORS_ALLOW_ORIGINS=*
|
||||
COOKIE_DOMAIN=
|
||||
# When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN=
|
||||
# WebSocket server URL.
|
||||
NEXT_PUBLIC_SOCKET_URL=ws://localhost
|
||||
NEXT_PUBLIC_BATCH_CONCURRENCY=5
|
||||
|
||||
# ------------------------------
|
||||
|
||||
@ -139,7 +139,6 @@ services:
|
||||
APP_API_URL: ${APP_API_URL:-}
|
||||
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
|
||||
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
|
||||
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
|
||||
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
|
||||
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
|
||||
|
||||
@ -33,7 +33,6 @@ x-shared-env: &shared-api-worker-env
|
||||
OPENAI_API_BASE: ${OPENAI_API_BASE:-https://api.openai.com/v1}
|
||||
MIGRATION_ENABLED: ${MIGRATION_ENABLED:-true}
|
||||
FILES_ACCESS_TIMEOUT: ${FILES_ACCESS_TIMEOUT:-300}
|
||||
ENABLE_COLLABORATION_MODE: ${ENABLE_COLLABORATION_MODE:-false}
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60}
|
||||
REFRESH_TOKEN_EXPIRE_DAYS: ${REFRESH_TOKEN_EXPIRE_DAYS:-30}
|
||||
APP_DEFAULT_ACTIVE_REQUESTS: ${APP_DEFAULT_ACTIVE_REQUESTS:-0}
|
||||
@ -110,7 +109,6 @@ x-shared-env: &shared-api-worker-env
|
||||
CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*}
|
||||
COOKIE_DOMAIN: ${COOKIE_DOMAIN:-}
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
|
||||
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
|
||||
NEXT_PUBLIC_BATCH_CONCURRENCY: ${NEXT_PUBLIC_BATCH_CONCURRENCY:-5}
|
||||
STORAGE_TYPE: ${STORAGE_TYPE:-opendal}
|
||||
OPENDAL_SCHEME: ${OPENDAL_SCHEME:-fs}
|
||||
@ -826,7 +824,6 @@ services:
|
||||
APP_API_URL: ${APP_API_URL:-}
|
||||
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
|
||||
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
|
||||
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
|
||||
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
|
||||
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
|
||||
|
||||
@ -14,14 +14,6 @@ server {
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
location /socket.io/ {
|
||||
proxy_pass http://api:5001;
|
||||
include proxy.conf;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
}
|
||||
|
||||
location /v1 {
|
||||
proxy_pass http://api:5001;
|
||||
include proxy.conf;
|
||||
|
||||
@ -5,7 +5,7 @@ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_http_version 1.1;
|
||||
# proxy_set_header Connection "";
|
||||
proxy_set_header Connection "";
|
||||
proxy_buffering off;
|
||||
proxy_read_timeout ${NGINX_PROXY_READ_TIMEOUT};
|
||||
proxy_send_timeout ${NGINX_PROXY_SEND_TIMEOUT};
|
||||
|
||||
@ -14,8 +14,6 @@ NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
|
||||
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
|
||||
# When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN=
|
||||
# WebSocket server URL.
|
||||
NEXT_PUBLIC_SOCKET_URL=ws://localhost:5001
|
||||
|
||||
# The API PREFIX for MARKETPLACE
|
||||
NEXT_PUBLIC_MARKETPLACE_API_PREFIX=https://marketplace.dify.ai/api/v1
|
||||
|
||||
@ -1,27 +1,15 @@
|
||||
import type { StorybookConfig } from '@storybook/nextjs'
|
||||
import path from 'node:path'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
|
||||
const storybookDir = path.dirname(fileURLToPath(import.meta.url))
|
||||
import type { StorybookConfig } from '@storybook/nextjs-vite'
|
||||
|
||||
const config: StorybookConfig = {
|
||||
stories: ['../app/components/**/*.stories.@(js|jsx|mjs|ts|tsx)'],
|
||||
addons: [
|
||||
'@storybook/addon-onboarding',
|
||||
// Not working with Storybook Vite framework
|
||||
// '@storybook/addon-onboarding',
|
||||
'@storybook/addon-links',
|
||||
'@storybook/addon-docs',
|
||||
'@chromatic-com/storybook',
|
||||
],
|
||||
framework: {
|
||||
name: '@storybook/nextjs',
|
||||
options: {
|
||||
builder: {
|
||||
useSWC: true,
|
||||
lazyCompilation: false,
|
||||
},
|
||||
nextConfigPath: undefined,
|
||||
},
|
||||
},
|
||||
framework: '@storybook/nextjs-vite',
|
||||
staticDirs: ['../public'],
|
||||
core: {
|
||||
disableWhatsNewNotifications: true,
|
||||
@ -29,17 +17,5 @@ const config: StorybookConfig = {
|
||||
docs: {
|
||||
defaultName: 'Documentation',
|
||||
},
|
||||
webpackFinal: async (config) => {
|
||||
// Add alias to mock problematic modules with circular dependencies
|
||||
config.resolve = config.resolve || {}
|
||||
config.resolve.alias = {
|
||||
...config.resolve.alias,
|
||||
// Mock the plugin index files to avoid circular dependencies
|
||||
[path.resolve(storybookDir, '../app/components/base/prompt-editor/plugins/context-block/index.tsx')]: path.resolve(storybookDir, '__mocks__/context-block.tsx'),
|
||||
[path.resolve(storybookDir, '../app/components/base/prompt-editor/plugins/history-block/index.tsx')]: path.resolve(storybookDir, '__mocks__/history-block.tsx'),
|
||||
[path.resolve(storybookDir, '../app/components/base/prompt-editor/plugins/query-block/index.tsx')]: path.resolve(storybookDir, '__mocks__/query-block.tsx'),
|
||||
}
|
||||
return config
|
||||
},
|
||||
}
|
||||
export default config
|
||||
|
||||
@ -43,8 +43,6 @@ NEXT_PUBLIC_EDITION=SELF_HOSTED
|
||||
# example: http://cloud.dify.ai/console/api
|
||||
NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN=
|
||||
# WebSocket server URL.
|
||||
NEXT_PUBLIC_SOCKET_URL=ws://localhost:5001
|
||||
# The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from
|
||||
# console or api domain.
|
||||
# example: http://udify.app/api
|
||||
|
||||
@ -5,8 +5,7 @@ import type { BlockEnum } from '@/app/components/workflow/types'
|
||||
import type { UpdateAppSiteCodeResponse } from '@/models/app'
|
||||
import type { App } from '@/types/app'
|
||||
import type { I18nKeysByPrefix } from '@/types/i18n'
|
||||
import * as React from 'react'
|
||||
import { useCallback, useEffect, useMemo } from 'react'
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import AppCard from '@/app/components/app/overview/app-card'
|
||||
@ -15,8 +14,6 @@ import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import MCPServiceCard from '@/app/components/tools/mcp/mcp-service-card'
|
||||
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
|
||||
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
|
||||
import { isTriggerNode } from '@/app/components/workflow/types'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import {
|
||||
@ -77,59 +74,28 @@ const CardView: FC<ICardViewProps> = ({ appId, isInPanel, className }) => {
|
||||
? buildTriggerModeMessage(t('mcp.server.title', { ns: 'tools' }))
|
||||
: null
|
||||
|
||||
const updateAppDetail = useCallback(async () => {
|
||||
const updateAppDetail = async () => {
|
||||
try {
|
||||
const res = await fetchAppDetail({ url: '/apps', id: appId })
|
||||
setAppDetail({ ...res })
|
||||
}
|
||||
catch (error) {
|
||||
console.error(error)
|
||||
}
|
||||
}, [appId, setAppDetail])
|
||||
catch (error) { console.error(error) }
|
||||
}
|
||||
|
||||
const handleCallbackResult = (err: Error | null, message?: I18nKeysByPrefix<'common', 'actionMsg.'>) => {
|
||||
const type = err ? 'error' : 'success'
|
||||
|
||||
message ||= (type === 'success' ? 'modifiedSuccessfully' : 'modifiedUnsuccessfully')
|
||||
|
||||
if (type === 'success') {
|
||||
if (type === 'success')
|
||||
updateAppDetail()
|
||||
|
||||
// Emit collaboration event to notify other clients of app state changes
|
||||
const socket = webSocketClient.getSocket(appId)
|
||||
if (socket) {
|
||||
socket.emit('collaboration_event', {
|
||||
type: 'app_state_update',
|
||||
data: { timestamp: Date.now() },
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
notify({
|
||||
type,
|
||||
message: t(`actionMsg.${message}`, { ns: 'common' }) as string,
|
||||
})
|
||||
}
|
||||
|
||||
// Listen for collaborative app state updates from other clients
|
||||
useEffect(() => {
|
||||
if (!appId)
|
||||
return
|
||||
|
||||
const unsubscribe = collaborationManager.onAppStateUpdate(async () => {
|
||||
try {
|
||||
// Update app detail when other clients modify app state
|
||||
await updateAppDetail()
|
||||
}
|
||||
catch (error) {
|
||||
console.error('app state update failed:', error)
|
||||
}
|
||||
})
|
||||
|
||||
return unsubscribe
|
||||
}, [appId, updateAppDetail])
|
||||
|
||||
const onChangeSiteStatus = async (value: boolean) => {
|
||||
const [err] = await asyncRunSafe<App>(
|
||||
updateAppSiteStatus({
|
||||
|
||||
@ -14,7 +14,7 @@ import {
|
||||
import dynamic from 'next/dynamic'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import * as React from 'react'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import CardView from '@/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/card-view'
|
||||
@ -22,12 +22,10 @@ import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import Button from '@/app/components/base/button'
|
||||
import ContentDialog from '@/app/components/base/content-dialog'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
|
||||
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { copyApp, deleteApp, exportAppConfig, fetchAppDetail, updateAppInfo } from '@/service/apps'
|
||||
import { copyApp, deleteApp, exportAppConfig, updateAppInfo } from '@/service/apps'
|
||||
import { useInvalidateAppList } from '@/service/use-apps'
|
||||
import { fetchWorkflowDraft } from '@/service/workflow'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
@ -79,19 +77,6 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
|
||||
const [secretEnvList, setSecretEnvList] = useState<EnvironmentVariable[]>([])
|
||||
const [showExportWarning, setShowExportWarning] = useState(false)
|
||||
|
||||
const emitAppMetaUpdate = useCallback(() => {
|
||||
if (!appDetail?.id)
|
||||
return
|
||||
const socket = webSocketClient.getSocket(appDetail.id)
|
||||
if (socket) {
|
||||
socket.emit('collaboration_event', {
|
||||
type: 'app_meta_update',
|
||||
data: { timestamp: Date.now() },
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
}, [appDetail])
|
||||
|
||||
const onEdit: CreateAppModalProps['onConfirm'] = useCallback(async ({
|
||||
name,
|
||||
icon_type,
|
||||
@ -120,12 +105,11 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
|
||||
message: t('editDone', { ns: 'app' }),
|
||||
})
|
||||
setAppDetail(app)
|
||||
emitAppMetaUpdate()
|
||||
}
|
||||
catch {
|
||||
notify({ type: 'error', message: t('editFailed', { ns: 'app' }) })
|
||||
}
|
||||
}, [appDetail, notify, setAppDetail, t, emitAppMetaUpdate])
|
||||
}, [appDetail, notify, setAppDetail, t])
|
||||
|
||||
const onCopy: DuplicateAppModalProps['onConfirm'] = async ({ name, icon_type, icon, icon_background }) => {
|
||||
if (!appDetail)
|
||||
@ -223,23 +207,6 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
|
||||
setShowConfirmDelete(false)
|
||||
}, [appDetail, invalidateAppList, notify, onPlanInfoChanged, replace, setAppDetail, t])
|
||||
|
||||
useEffect(() => {
|
||||
if (!appDetail?.id)
|
||||
return
|
||||
|
||||
const unsubscribe = collaborationManager.onAppMetaUpdate(async () => {
|
||||
try {
|
||||
const res = await fetchAppDetail({ url: '/apps', id: appDetail.id })
|
||||
setAppDetail({ ...res })
|
||||
}
|
||||
catch (error) {
|
||||
console.error('failed to refresh app detail from collaboration update:', error)
|
||||
}
|
||||
})
|
||||
|
||||
return unsubscribe
|
||||
}, [appDetail?.id, setAppDetail])
|
||||
|
||||
const { isCurrentWorkspaceEditor } = useAppContext()
|
||||
|
||||
if (!appDetail)
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
import type { AppPublisherProps } from '@/app/components/app/app-publisher'
|
||||
import type { ModelAndParameter } from '@/app/components/app/configuration/debug/types'
|
||||
import type { FileUpload } from '@/app/components/base/features/types'
|
||||
import type { PublishWorkflowParams } from '@/types/workflow'
|
||||
import { produce } from 'immer'
|
||||
import * as React from 'react'
|
||||
import { useCallback, useState } from 'react'
|
||||
@ -14,7 +13,7 @@ import { SupportUploadFileTypes } from '@/app/components/workflow/types'
|
||||
import { Resolution } from '@/types/app'
|
||||
|
||||
type Props = Omit<AppPublisherProps, 'onPublish'> & {
|
||||
onPublish?: (params?: ModelAndParameter | PublishWorkflowParams, features?: any) => Promise<any> | any
|
||||
onPublish?: (modelAndParameter?: ModelAndParameter, features?: any) => Promise<any> | any
|
||||
publishedConfig?: any
|
||||
resetAppConfig?: () => void
|
||||
}
|
||||
@ -63,8 +62,8 @@ const FeaturesWrappedAppPublisher = (props: Props) => {
|
||||
setRestoreConfirmOpen(false)
|
||||
}, [featuresStore, props])
|
||||
|
||||
const handlePublish = useCallback((params?: ModelAndParameter | PublishWorkflowParams) => {
|
||||
return props.onPublish?.(params, features)
|
||||
const handlePublish = useCallback((modelAndParameter?: ModelAndParameter) => {
|
||||
return props.onPublish?.(modelAndParameter, features)
|
||||
}, [features, props])
|
||||
|
||||
return (
|
||||
|
||||
@ -1,7 +1,5 @@
|
||||
import type { ModelAndParameter } from '../configuration/debug/types'
|
||||
import type { CollaborationUpdate } from '@/app/components/workflow/collaboration/types/collaboration'
|
||||
import type { InputVar, Variable } from '@/app/components/workflow/types'
|
||||
import type { InstalledApp } from '@/models/explore'
|
||||
import type { I18nKeysByPrefix } from '@/types/i18n'
|
||||
import type { PublishWorkflowParams } from '@/types/workflow'
|
||||
import {
|
||||
@ -20,7 +18,6 @@ import { useKeyPress } from 'ahooks'
|
||||
import {
|
||||
memo,
|
||||
useCallback,
|
||||
useContext,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useState,
|
||||
@ -38,9 +35,6 @@ import {
|
||||
} from '@/app/components/base/portal-to-follow-elem'
|
||||
import UpgradeBtn from '@/app/components/billing/upgrade-btn'
|
||||
import WorkflowToolConfigureButton from '@/app/components/tools/workflow-tool/configure-button'
|
||||
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
|
||||
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
|
||||
import { WorkflowContext } from '@/app/components/workflow/context'
|
||||
import { appDefaultIconBackground } from '@/config'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { useAsyncWindowOpen } from '@/hooks/use-async-window-open'
|
||||
@ -49,8 +43,6 @@ import { AccessMode } from '@/models/access-control'
|
||||
import { useAppWhiteListSubjects, useGetUserCanAccessApp } from '@/service/access-control'
|
||||
import { fetchAppDetailDirect } from '@/service/apps'
|
||||
import { fetchInstalledAppList } from '@/service/explore'
|
||||
import { useInvalidateAppWorkflow } from '@/service/use-workflow'
|
||||
import { fetchPublishedWorkflow } from '@/service/workflow'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { basePath } from '@/utils/var'
|
||||
import Divider from '../../base/divider'
|
||||
@ -64,10 +56,6 @@ import SuggestedAction from './suggested-action'
|
||||
|
||||
type AccessModeLabel = I18nKeysByPrefix<'app', 'accessControlDialog.accessItems.'>
|
||||
|
||||
type InstalledAppsResponse = {
|
||||
installed_apps?: InstalledApp[]
|
||||
}
|
||||
|
||||
const ACCESS_MODE_MAP: Record<AccessMode, { label: AccessModeLabel, icon: React.ElementType }> = {
|
||||
[AccessMode.ORGANIZATION]: {
|
||||
label: 'organization',
|
||||
@ -114,8 +102,8 @@ export type AppPublisherProps = {
|
||||
debugWithMultipleModel?: boolean
|
||||
multipleModelConfigs?: ModelAndParameter[]
|
||||
/** modelAndParameter is passed when debugWithMultipleModel is true */
|
||||
onPublish?: (params?: ModelAndParameter | PublishWorkflowParams) => Promise<void> | void
|
||||
onRestore?: () => Promise<void> | void
|
||||
onPublish?: (params?: any) => Promise<any> | any
|
||||
onRestore?: () => Promise<any> | any
|
||||
onToggle?: (state: boolean) => void
|
||||
crossAxisOffset?: number
|
||||
toolPublished?: boolean
|
||||
@ -158,7 +146,6 @@ const AppPublisher = ({
|
||||
const [isAppAccessSet, setIsAppAccessSet] = useState(true)
|
||||
const [embeddingModalOpen, setEmbeddingModalOpen] = useState(false)
|
||||
|
||||
const workflowStore = useContext(WorkflowContext)
|
||||
const appDetail = useAppStore(state => state.appDetail)
|
||||
const setAppDetail = useAppStore(s => s.setAppDetail)
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
@ -171,7 +158,6 @@ const AppPublisher = ({
|
||||
|
||||
const { data: userCanAccessApp, isLoading: isGettingUserCanAccessApp, refetch } = useGetUserCanAccessApp({ appId: appDetail?.id, enabled: false })
|
||||
const { data: appAccessSubjects, isLoading: isGettingAppWhiteListSubjects } = useAppWhiteListSubjects(appDetail?.id, open && systemFeatures.webapp_auth.enabled && appDetail?.access_mode === AccessMode.SPECIFIC_GROUPS_MEMBERS)
|
||||
const invalidateAppWorkflow = useInvalidateAppWorkflow()
|
||||
const openAsyncWindow = useAsyncWindowOpen()
|
||||
|
||||
const noAccessPermission = useMemo(() => systemFeatures.webapp_auth.enabled && appDetail && appDetail.access_mode !== AccessMode.EXTERNAL_MEMBERS && !userCanAccessApp?.result, [systemFeatures, appDetail, userCanAccessApp])
|
||||
@ -207,39 +193,12 @@ const AppPublisher = ({
|
||||
try {
|
||||
await onPublish?.(params)
|
||||
setPublished(true)
|
||||
|
||||
const appId = appDetail?.id
|
||||
const socket = appId ? webSocketClient.getSocket(appId) : null
|
||||
console.warn('[app-publisher] publish success', {
|
||||
appId,
|
||||
hasSocket: Boolean(socket),
|
||||
})
|
||||
if (appId)
|
||||
invalidateAppWorkflow(appId)
|
||||
else
|
||||
console.warn('[app-publisher] missing appId, skip workflow invalidate and socket emit')
|
||||
if (socket) {
|
||||
const timestamp = Date.now()
|
||||
socket.emit('collaboration_event', {
|
||||
type: 'app_publish_update',
|
||||
data: {
|
||||
action: 'published',
|
||||
timestamp,
|
||||
},
|
||||
timestamp,
|
||||
})
|
||||
}
|
||||
else if (appId) {
|
||||
console.warn('[app-publisher] socket not ready, skip collaboration_event emit', { appId })
|
||||
}
|
||||
|
||||
trackEvent('app_published_time', { action_mode: 'app', app_id: appDetail?.id, app_name: appDetail?.name })
|
||||
}
|
||||
catch (error) {
|
||||
console.warn('[app-publisher] publish failed', error)
|
||||
catch {
|
||||
setPublished(false)
|
||||
}
|
||||
}, [appDetail, onPublish, invalidateAppWorkflow])
|
||||
}, [appDetail, onPublish])
|
||||
|
||||
const handleRestore = useCallback(async () => {
|
||||
try {
|
||||
@ -268,10 +227,9 @@ const AppPublisher = ({
|
||||
await openAsyncWindow(async () => {
|
||||
if (!appDetail?.id)
|
||||
throw new Error('App not found')
|
||||
const response = (await fetchInstalledAppList(appDetail?.id)) as InstalledAppsResponse
|
||||
const installedApps = response?.installed_apps
|
||||
if (installedApps?.length)
|
||||
return `${basePath}/explore/installed/${installedApps[0].id}`
|
||||
const { installed_apps }: any = await fetchInstalledAppList(appDetail?.id) || {}
|
||||
if (installed_apps?.length > 0)
|
||||
return `${basePath}/explore/installed/${installed_apps[0].id}`
|
||||
throw new Error('No app found in Explore')
|
||||
}, {
|
||||
onError: (err) => {
|
||||
@ -299,29 +257,6 @@ const AppPublisher = ({
|
||||
handlePublish()
|
||||
}, { exactMatch: true, useCapture: true })
|
||||
|
||||
useEffect(() => {
|
||||
const appId = appDetail?.id
|
||||
if (!appId)
|
||||
return
|
||||
|
||||
const unsubscribe = collaborationManager.onAppPublishUpdate((update: CollaborationUpdate) => {
|
||||
const action = typeof update.data.action === 'string' ? update.data.action : undefined
|
||||
if (action === 'published') {
|
||||
invalidateAppWorkflow(appId)
|
||||
fetchPublishedWorkflow(`/apps/${appId}/workflows/publish`)
|
||||
.then((publishedWorkflow) => {
|
||||
if (publishedWorkflow?.created_at)
|
||||
workflowStore?.getState().setPublishedAt(publishedWorkflow.created_at)
|
||||
})
|
||||
.catch((error) => {
|
||||
console.warn('[app-publisher] refresh published workflow failed', error)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return unsubscribe
|
||||
}, [appDetail?.id, invalidateAppWorkflow, workflowStore])
|
||||
|
||||
const hasPublishedVersion = !!publishedAt
|
||||
const workflowToolDisabled = !hasPublishedVersion || !workflowToolAvailable
|
||||
const workflowToolMessage = workflowToolDisabled ? t('common.workflowAsToolDisabledHint', { ns: 'workflow' }) : undefined
|
||||
|
||||
@ -18,7 +18,6 @@ import type {
|
||||
TextToSpeechConfig,
|
||||
} from '@/models/debug'
|
||||
import type { ModelConfig as BackendModelConfig, UserInputFormItem, VisionSettings } from '@/types/app'
|
||||
import type { PublishWorkflowParams } from '@/types/workflow'
|
||||
import { CodeBracketIcon } from '@heroicons/react/20/solid'
|
||||
import { useBoolean, useGetState } from 'ahooks'
|
||||
import { clone } from 'es-toolkit/object'
|
||||
@ -761,8 +760,7 @@ const Configuration: FC = () => {
|
||||
else { return promptEmpty }
|
||||
})()
|
||||
const contextVarEmpty = mode === AppModeEnum.COMPLETION && dataSets.length > 0 && !hasSetContextVar
|
||||
const onPublish = async (params?: ModelAndParameter | PublishWorkflowParams, features?: FeaturesData) => {
|
||||
const modelAndParameter = params && 'model' in params ? params : undefined
|
||||
const onPublish = async (modelAndParameter?: ModelAndParameter, features?: FeaturesData) => {
|
||||
const modelId = modelAndParameter?.model || modelConfig.model_id
|
||||
const promptTemplate = modelConfig.configs.prompt_template
|
||||
const promptVariables = modelConfig.configs.prompt_variables
|
||||
|
||||
@ -1,3 +1,6 @@
|
||||
/**
|
||||
* @vitest-environment jsdom
|
||||
*/
|
||||
import type { ReactNode } from 'react'
|
||||
import type { ModalContextState } from '@/context/modal-context'
|
||||
import type { ProviderContextState } from '@/context/provider-context'
|
||||
|
||||
@ -5,7 +5,6 @@ import type { HtmlContentProps } from '@/app/components/base/popover'
|
||||
import type { Tag } from '@/app/components/base/tag-management/constant'
|
||||
import type { CreateAppModalProps } from '@/app/components/explore/create-app-modal'
|
||||
import type { EnvironmentVariable } from '@/app/components/workflow/types'
|
||||
import type { WorkflowOnlineUser } from '@/models/app'
|
||||
import type { App } from '@/types/app'
|
||||
import { RiBuildingLine, RiGlobalLine, RiLockLine, RiMoreFill, RiVerifiedBadgeLine } from '@remixicon/react'
|
||||
import dynamic from 'next/dynamic'
|
||||
@ -21,7 +20,6 @@ import CustomPopover from '@/app/components/base/popover'
|
||||
import TagSelector from '@/app/components/base/tag-management/selector'
|
||||
import Toast, { ToastContext } from '@/app/components/base/toast'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { UserAvatarList } from '@/app/components/base/user-avatar-list'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
@ -60,10 +58,9 @@ const AccessControl = dynamic(() => import('@/app/components/app/app-access-cont
|
||||
export type AppCardProps = {
|
||||
app: App
|
||||
onRefresh?: () => void
|
||||
onlineUsers?: WorkflowOnlineUser[]
|
||||
}
|
||||
|
||||
const AppCard = ({ app, onRefresh, onlineUsers = [] }: AppCardProps) => {
|
||||
const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
@ -351,19 +348,6 @@ const AppCard = ({ app, onRefresh, onlineUsers = [] }: AppCardProps) => {
|
||||
return `${t('segment.editedAt', { ns: 'datasetDocuments' })} ${timeText}`
|
||||
}, [app.updated_at, app.created_at])
|
||||
|
||||
const onlineUserAvatars = useMemo(() => {
|
||||
if (!onlineUsers.length)
|
||||
return []
|
||||
|
||||
return onlineUsers
|
||||
.map(user => ({
|
||||
id: user.user_id || user.sid || '',
|
||||
name: user.username || 'User',
|
||||
avatar_url: user.avatar || undefined,
|
||||
}))
|
||||
.filter(user => !!user.id)
|
||||
}, [onlineUsers])
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
@ -416,11 +400,6 @@ const AppCard = ({ app, onRefresh, onlineUsers = [] }: AppCardProps) => {
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
{onlineUserAvatars.length > 0 && (
|
||||
<UserAvatarList users={onlineUserAvatars} maxVisible={3} size={20} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className="title-wrapper h-[90px] px-[14px] text-xs leading-normal text-text-tertiary">
|
||||
<div
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
||||
import { act, fireEvent, render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { useStore as useTagStore } from '@/app/components/base/tag-management/store'
|
||||
@ -142,13 +141,9 @@ vi.mock('@/app/components/base/tag-management/filter', () => ({
|
||||
}))
|
||||
|
||||
// Mock config
|
||||
vi.mock('@/config', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('@/config')>()
|
||||
return {
|
||||
...actual,
|
||||
NEED_REFRESH_APP_LIST_KEY: 'needRefreshAppList',
|
||||
}
|
||||
})
|
||||
vi.mock('@/config', () => ({
|
||||
NEED_REFRESH_APP_LIST_KEY: 'needRefreshAppList',
|
||||
}))
|
||||
|
||||
// Mock pay hook
|
||||
vi.mock('@/hooks/use-pay', () => ({
|
||||
@ -239,21 +234,6 @@ beforeAll(() => {
|
||||
} as unknown as typeof IntersectionObserver
|
||||
})
|
||||
|
||||
const renderList = () => {
|
||||
const queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
return render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<List />
|
||||
</QueryClientProvider>,
|
||||
)
|
||||
}
|
||||
|
||||
describe('List', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
@ -280,13 +260,13 @@ describe('List', () => {
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
// Tab slider renders app type tabs
|
||||
expect(screen.getByText('app.types.all')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render tab slider with all app types', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('app.types.all')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.types.workflow')).toBeInTheDocument()
|
||||
@ -297,48 +277,48 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should render search input', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
// Input component renders a searchbox
|
||||
expect(screen.getByRole('textbox')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render tag filter', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
// Tag filter renders with placeholder text
|
||||
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render created by me checkbox', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
expect(screen.getByText('app.showMyCreatedAppsOnly')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render app cards when apps exist', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('app-card-app-1')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('app-card-app-2')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render new app card for editors', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
expect(screen.getByTestId('new-app-card')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render footer when branding is disabled', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
expect(screen.getByTestId('footer')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render drop DSL hint for editors', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
expect(screen.getByText('app.newApp.dropDSLToCreateApp')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Tab Navigation', () => {
|
||||
it('should call setActiveTab when tab is clicked', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
fireEvent.click(screen.getByText('app.types.workflow'))
|
||||
|
||||
@ -346,7 +326,7 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should call setActiveTab for all tab', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
fireEvent.click(screen.getByText('app.types.all'))
|
||||
|
||||
@ -356,12 +336,12 @@ describe('List', () => {
|
||||
|
||||
describe('Search Functionality', () => {
|
||||
it('should render search input field', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
expect(screen.getByRole('textbox')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle search input change', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
const input = screen.getByRole('textbox')
|
||||
fireEvent.change(input, { target: { value: 'test search' } })
|
||||
@ -370,7 +350,7 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should handle search input interaction', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
const input = screen.getByRole('textbox')
|
||||
expect(input).toBeInTheDocument()
|
||||
@ -380,7 +360,7 @@ describe('List', () => {
|
||||
// Set initial keywords to make clear button visible
|
||||
mockQueryState.keywords = 'existing search'
|
||||
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
// Find and click clear button (Input component uses .group class for clear icon container)
|
||||
const clearButton = document.querySelector('.group')
|
||||
@ -395,12 +375,12 @@ describe('List', () => {
|
||||
|
||||
describe('Tag Filter', () => {
|
||||
it('should render tag filter component', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render tag filter with placeholder', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
// Tag filter is rendered
|
||||
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
|
||||
@ -409,12 +389,12 @@ describe('List', () => {
|
||||
|
||||
describe('Created By Me Filter', () => {
|
||||
it('should render checkbox with correct label', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
expect(screen.getByText('app.showMyCreatedAppsOnly')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle checkbox change', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
// Checkbox component uses data-testid="checkbox-{id}"
|
||||
// CheckboxWithLabel doesn't pass testId, so id is undefined
|
||||
@ -429,7 +409,7 @@ describe('List', () => {
|
||||
it('should not render new app card for non-editors', () => {
|
||||
mockIsCurrentWorkspaceEditor.mockReturnValue(false)
|
||||
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(screen.queryByTestId('new-app-card')).not.toBeInTheDocument()
|
||||
})
|
||||
@ -437,7 +417,7 @@ describe('List', () => {
|
||||
it('should not render drop DSL hint for non-editors', () => {
|
||||
mockIsCurrentWorkspaceEditor.mockReturnValue(false)
|
||||
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(screen.queryByText(/drop dsl file to create app/i)).not.toBeInTheDocument()
|
||||
})
|
||||
@ -447,7 +427,7 @@ describe('List', () => {
|
||||
it('should redirect dataset operators to datasets page', () => {
|
||||
mockIsCurrentWorkspaceDatasetOperator.mockReturnValue(true)
|
||||
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(mockReplace).toHaveBeenCalledWith('/datasets')
|
||||
})
|
||||
@ -457,7 +437,7 @@ describe('List', () => {
|
||||
it('should call refetch when refresh key is set in localStorage', () => {
|
||||
localStorage.setItem('needRefreshAppList', '1')
|
||||
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(mockRefetch).toHaveBeenCalled()
|
||||
expect(localStorage.getItem('needRefreshAppList')).toBeNull()
|
||||
@ -466,23 +446,22 @@ describe('List', () => {
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle multiple renders without issues', () => {
|
||||
const { unmount } = renderList()
|
||||
const { rerender } = render(<List />)
|
||||
expect(screen.getByText('app.types.all')).toBeInTheDocument()
|
||||
|
||||
unmount()
|
||||
renderList()
|
||||
rerender(<List />)
|
||||
expect(screen.getByText('app.types.all')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render app cards correctly', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('Test App 1')).toBeInTheDocument()
|
||||
expect(screen.getByText('Test App 2')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render with all filter options visible', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByRole('textbox')).toBeInTheDocument()
|
||||
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
|
||||
@ -492,14 +471,14 @@ describe('List', () => {
|
||||
|
||||
describe('Dragging State', () => {
|
||||
it('should show drop hint when DSL feature is enabled for editors', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
expect(screen.getByText('app.newApp.dropDSLToCreateApp')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('App Type Tabs', () => {
|
||||
it('should render all app type tabs', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('app.types.all')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.types.workflow')).toBeInTheDocument()
|
||||
@ -510,7 +489,7 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should call setActiveTab for each app type', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
const appTypeTexts = [
|
||||
{ mode: AppModeEnum.WORKFLOW, text: 'app.types.workflow' },
|
||||
@ -529,7 +508,7 @@ describe('List', () => {
|
||||
|
||||
describe('Search and Filter Integration', () => {
|
||||
it('should display search input with correct attributes', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
const input = screen.getByRole('textbox')
|
||||
expect(input).toBeInTheDocument()
|
||||
@ -537,13 +516,13 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should have tag filter component', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display created by me label', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('app.showMyCreatedAppsOnly')).toBeInTheDocument()
|
||||
})
|
||||
@ -551,14 +530,14 @@ describe('List', () => {
|
||||
|
||||
describe('App List Display', () => {
|
||||
it('should display all app cards from data', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('app-card-app-1')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('app-card-app-2')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display app names correctly', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('Test App 1')).toBeInTheDocument()
|
||||
expect(screen.getByText('Test App 2')).toBeInTheDocument()
|
||||
@ -567,7 +546,7 @@ describe('List', () => {
|
||||
|
||||
describe('Footer Visibility', () => {
|
||||
it('should render footer when branding is disabled', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('footer')).toBeInTheDocument()
|
||||
})
|
||||
@ -579,14 +558,14 @@ describe('List', () => {
|
||||
describe('Additional Coverage', () => {
|
||||
it('should render dragging state overlay when dragging', () => {
|
||||
mockDragging = true
|
||||
const { container } = renderList()
|
||||
const { container } = render(<List />)
|
||||
|
||||
// Component should render successfully with dragging state
|
||||
expect(container).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle app mode filter in query params', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
const workflowTab = screen.getByText('app.types.workflow')
|
||||
fireEvent.click(workflowTab)
|
||||
@ -595,7 +574,7 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should render new app card for editors', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('new-app-card')).toBeInTheDocument()
|
||||
})
|
||||
@ -603,7 +582,7 @@ describe('List', () => {
|
||||
|
||||
describe('DSL File Drop', () => {
|
||||
it('should handle DSL file drop and show modal', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
// Simulate DSL file drop via the callback
|
||||
const mockFile = new File(['test content'], 'test.yml', { type: 'application/yaml' })
|
||||
@ -617,7 +596,7 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should close DSL modal when onClose is called', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
// Open modal via DSL file drop
|
||||
const mockFile = new File(['test content'], 'test.yml', { type: 'application/yaml' })
|
||||
@ -635,7 +614,7 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should close DSL modal and refetch when onSuccess is called', () => {
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
// Open modal via DSL file drop
|
||||
const mockFile = new File(['test content'], 'test.yml', { type: 'application/yaml' })
|
||||
@ -658,7 +637,7 @@ describe('List', () => {
|
||||
describe('Tag Filter Change', () => {
|
||||
it('should handle tag filter value change', () => {
|
||||
vi.useFakeTimers()
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
// TagFilter component is rendered
|
||||
expect(screen.getByTestId('tag-filter')).toBeInTheDocument()
|
||||
@ -682,7 +661,7 @@ describe('List', () => {
|
||||
|
||||
it('should handle empty tag filter selection', () => {
|
||||
vi.useFakeTimers()
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
// Trigger tag filter change with empty array
|
||||
act(() => {
|
||||
@ -704,7 +683,7 @@ describe('List', () => {
|
||||
describe('Infinite Scroll', () => {
|
||||
it('should call fetchNextPage when intersection observer triggers', () => {
|
||||
mockServiceState.hasNextPage = true
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
// Simulate intersection
|
||||
if (intersectionCallback) {
|
||||
@ -721,7 +700,7 @@ describe('List', () => {
|
||||
|
||||
it('should not call fetchNextPage when not intersecting', () => {
|
||||
mockServiceState.hasNextPage = true
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
// Simulate non-intersection
|
||||
if (intersectionCallback) {
|
||||
@ -739,7 +718,7 @@ describe('List', () => {
|
||||
it('should not call fetchNextPage when loading', () => {
|
||||
mockServiceState.hasNextPage = true
|
||||
mockServiceState.isLoading = true
|
||||
renderList()
|
||||
render(<List />)
|
||||
|
||||
if (intersectionCallback) {
|
||||
act(() => {
|
||||
@ -757,7 +736,7 @@ describe('List', () => {
|
||||
describe('Error State', () => {
|
||||
it('should handle error state in useEffect', () => {
|
||||
mockServiceState.error = new Error('Test error')
|
||||
const { container } = renderList()
|
||||
const { container } = render(<List />)
|
||||
|
||||
// Component should still render
|
||||
expect(container).toBeInTheDocument()
|
||||
|
||||
@ -9,14 +9,13 @@ import {
|
||||
RiMessage3Line,
|
||||
RiRobot3Line,
|
||||
} from '@remixicon/react'
|
||||
import { useQuery } from '@tanstack/react-query'
|
||||
import { useDebounceFn } from 'ahooks'
|
||||
import dynamic from 'next/dynamic'
|
||||
import {
|
||||
useRouter,
|
||||
} from 'next/navigation'
|
||||
import { parseAsString, useQueryState } from 'nuqs'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Input from '@/app/components/base/input'
|
||||
import TabSliderNew from '@/app/components/base/tab-slider-new'
|
||||
@ -27,7 +26,6 @@ import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { CheckModal } from '@/hooks/use-pay'
|
||||
import { fetchWorkflowOnlineUsers } from '@/service/apps'
|
||||
import { useInfiniteAppList } from '@/service/use-apps'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { cn } from '@/utils/classnames'
|
||||
@ -118,37 +116,6 @@ const List: FC<Props> = ({
|
||||
refetch,
|
||||
} = useInfiniteAppList(appListQueryParams, { enabled: !isCurrentWorkspaceDatasetOperator })
|
||||
|
||||
const apps = useMemo(() => data?.pages?.flatMap(page => page.data) ?? [], [data])
|
||||
|
||||
const workflowIds = useMemo(() => {
|
||||
const ids = new Set<string>()
|
||||
apps.forEach((appItem) => {
|
||||
const workflowId = appItem.id
|
||||
if (!workflowId)
|
||||
return
|
||||
|
||||
if (appItem.mode === 'workflow' || appItem.mode === 'advanced-chat')
|
||||
ids.add(workflowId)
|
||||
})
|
||||
return Array.from(ids)
|
||||
}, [apps])
|
||||
|
||||
const { data: onlineUsersByWorkflow = {}, refetch: refreshOnlineUsers } = useQuery({
|
||||
queryKey: ['apps', 'workflow-online-users', workflowIds],
|
||||
queryFn: () => fetchWorkflowOnlineUsers({ workflowIds }),
|
||||
enabled: workflowIds.length > 0,
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
const timer = window.setInterval(() => {
|
||||
refetch()
|
||||
if (workflowIds.length)
|
||||
refreshOnlineUsers()
|
||||
}, 10000)
|
||||
|
||||
return () => window.clearInterval(timer)
|
||||
}, [workflowIds, refetch, refreshOnlineUsers])
|
||||
|
||||
useEffect(() => {
|
||||
if (controlRefreshList > 0) {
|
||||
refetch()
|
||||
@ -287,7 +254,7 @@ const List: FC<Props> = ({
|
||||
|
||||
if (hasAnyApp) {
|
||||
return pages.flatMap(({ data: apps }) => apps).map(app => (
|
||||
<AppCard key={app.id} app={app} onRefresh={refetch} onlineUsers={onlineUsersByWorkflow?.[app.id] ?? []} />
|
||||
<AppCard key={app.id} app={app} onRefresh={refetch} />
|
||||
))
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import { RiAddLine, RiDeleteBinLine, RiEditLine, RiMore2Fill, RiSaveLine, RiShareLine } from '@remixicon/react'
|
||||
import ActionButton, { ActionButtonState } from '.'
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import type { IChatItem } from '@/app/components/base/chat/chat/type'
|
||||
import type { AgentLogDetailResponse } from '@/models/log'
|
||||
import { useEffect, useRef } from 'react'
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import type { ReactNode } from 'react'
|
||||
import AnswerIcon from '.'
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import type { AppIconSelection } from '.'
|
||||
import { useState } from 'react'
|
||||
import AppIconPicker from '.'
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import type { ComponentProps } from 'react'
|
||||
import AppIcon from '.'
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import type { ComponentProps } from 'react'
|
||||
import { useEffect } from 'react'
|
||||
import AudioBtn from '.'
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import AudioGallery from '.'
|
||||
|
||||
const AUDIO_SOURCES = [
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import { useState } from 'react'
|
||||
import AutoHeightTextarea from '.'
|
||||
|
||||
|
||||
@ -35,14 +35,12 @@ describe('Avatar', () => {
|
||||
it.each([
|
||||
{ size: undefined, expected: '30px', label: 'default (30px)' },
|
||||
{ size: 50, expected: '50px', label: 'custom (50px)' },
|
||||
])('should apply $label size to avatar container', ({ size, expected }) => {
|
||||
])('should apply $label size to img element', ({ size, expected }) => {
|
||||
const props = { name: 'Test', avatar: 'https://example.com/avatar.jpg', size }
|
||||
|
||||
render(<Avatar {...props} />)
|
||||
|
||||
const img = screen.getByRole('img')
|
||||
const wrapper = img.parentElement as HTMLElement
|
||||
expect(wrapper).toHaveStyle({
|
||||
expect(screen.getByRole('img')).toHaveStyle({
|
||||
width: expected,
|
||||
height: expected,
|
||||
fontSize: expected,
|
||||
@ -62,7 +60,7 @@ describe('Avatar', () => {
|
||||
})
|
||||
|
||||
describe('className prop', () => {
|
||||
it('should merge className with default avatar classes on container', () => {
|
||||
it('should merge className with default avatar classes on img', () => {
|
||||
const props = {
|
||||
name: 'Test',
|
||||
avatar: 'https://example.com/avatar.jpg',
|
||||
@ -72,9 +70,8 @@ describe('Avatar', () => {
|
||||
render(<Avatar {...props} />)
|
||||
|
||||
const img = screen.getByRole('img')
|
||||
const wrapper = img.parentElement as HTMLElement
|
||||
expect(wrapper).toHaveClass('custom-class')
|
||||
expect(wrapper).toHaveClass('shrink-0', 'flex', 'items-center', 'rounded-full', 'bg-primary-600')
|
||||
expect(img).toHaveClass('custom-class')
|
||||
expect(img).toHaveClass('shrink-0', 'flex', 'items-center', 'rounded-full', 'bg-primary-600')
|
||||
})
|
||||
|
||||
it('should merge className with default avatar classes on fallback div', () => {
|
||||
@ -280,11 +277,10 @@ describe('Avatar', () => {
|
||||
render(<Avatar {...props} />)
|
||||
|
||||
const img = screen.getByRole('img')
|
||||
const wrapper = img.parentElement as HTMLElement
|
||||
expect(img).toHaveAttribute('alt', 'Test User')
|
||||
expect(img).toHaveAttribute('src', 'https://example.com/avatar.jpg')
|
||||
expect(wrapper).toHaveStyle({ width: '64px', height: '64px' })
|
||||
expect(wrapper).toHaveClass('custom-avatar')
|
||||
expect(img).toHaveStyle({ width: '64px', height: '64px' })
|
||||
expect(img).toHaveClass('custom-avatar')
|
||||
|
||||
// Trigger load to verify onError callback
|
||||
fireEvent.load(img)
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import Avatar from '.'
|
||||
|
||||
const meta = {
|
||||
|
||||
@ -9,7 +9,6 @@ export type AvatarProps = {
|
||||
className?: string
|
||||
textClassName?: string
|
||||
onError?: (x: boolean) => void
|
||||
backgroundColor?: string
|
||||
}
|
||||
const Avatar = ({
|
||||
name,
|
||||
@ -18,18 +17,9 @@ const Avatar = ({
|
||||
className,
|
||||
textClassName,
|
||||
onError,
|
||||
backgroundColor,
|
||||
}: AvatarProps) => {
|
||||
const avatarClassName = backgroundColor
|
||||
? 'shrink-0 flex items-center rounded-full'
|
||||
: 'shrink-0 flex items-center rounded-full bg-primary-600'
|
||||
const style = {
|
||||
width: `${size}px`,
|
||||
height: `${size}px`,
|
||||
fontSize: `${size}px`,
|
||||
lineHeight: `${size}px`,
|
||||
...(backgroundColor && !avatar ? { backgroundColor } : {}),
|
||||
}
|
||||
const avatarClassName = 'shrink-0 flex items-center rounded-full bg-primary-600'
|
||||
const style = { width: `${size}px`, height: `${size}px`, fontSize: `${size}px`, lineHeight: `${size}px` }
|
||||
const [imgError, setImgError] = useState(false)
|
||||
|
||||
const handleError = () => {
|
||||
@ -45,18 +35,14 @@ const Avatar = ({
|
||||
|
||||
if (avatar && !imgError) {
|
||||
return (
|
||||
<span
|
||||
<img
|
||||
className={cn(avatarClassName, className)}
|
||||
style={style}
|
||||
>
|
||||
<img
|
||||
className="h-full w-full rounded-full object-cover"
|
||||
alt={name}
|
||||
src={avatar}
|
||||
onError={handleError}
|
||||
onLoad={() => onError?.(false)}
|
||||
/>
|
||||
</span>
|
||||
alt={name}
|
||||
src={avatar}
|
||||
onError={handleError}
|
||||
onLoad={() => onError?.(false)}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import Badge from '../badge'
|
||||
|
||||
const meta = {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import { useState } from 'react'
|
||||
import BlockInput from '.'
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import AddButton from './add-button'
|
||||
|
||||
const meta = {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
|
||||
import { RocketLaunchIcon } from '@heroicons/react/20/solid'
|
||||
import { Button } from '.'
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import SyncButton from './sync-button'
|
||||
|
||||
const meta = {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import type { ChatItem } from '../../types'
|
||||
import { WorkflowRunningStatus } from '@/app/components/workflow/types'
|
||||
import Answer from '.'
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
|
||||
import type { ChatItem } from '../types'
|
||||
import { User } from '@/app/components/base/icons/src/public/avatar'
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import { useState } from 'react'
|
||||
import Checkbox from '.'
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import type { Item } from '.'
|
||||
import { useState } from 'react'
|
||||
import Chip from '.'
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import { useState } from 'react'
|
||||
import Confirm from '.'
|
||||
import Button from '../button'
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import { useEffect, useState } from 'react'
|
||||
import ContentDialog from '.'
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ const ContentDialog = ({
|
||||
<Transition
|
||||
show={show}
|
||||
as="div"
|
||||
className="absolute left-0 top-0 z-[70] box-border h-full w-full p-2"
|
||||
className="absolute left-0 top-0 z-30 box-border h-full w-full p-2"
|
||||
>
|
||||
<TransitionChild>
|
||||
<div
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import { useState } from 'react'
|
||||
import CopyFeedback, { CopyFeedbackNew } from '.'
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import CopyIcon from '.'
|
||||
|
||||
const meta = {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import CornerLabel from '.'
|
||||
|
||||
const meta = {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import type { DatePickerProps } from './types'
|
||||
import { useState } from 'react'
|
||||
import { fn } from 'storybook/test'
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import { useEffect, useState } from 'react'
|
||||
import Dialog from '.'
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import Divider from '.'
|
||||
|
||||
const meta = {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import { useState } from 'react'
|
||||
import { fn } from 'storybook/test'
|
||||
import DrawerPlus from '.'
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs'
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import { useState } from 'react'
|
||||
import { fn } from 'storybook/test'
|
||||
import Drawer from '.'
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user