Compare commits

..

19 Commits

Author SHA1 Message Date
yyh
38d68ac4e3 fix(workflow): update stop state for preview chat 2026-01-26 22:50:38 +08:00
yyh
4e48a1c4c3 fix(workflow): keep preview run state on panel close 2026-01-26 22:42:14 +08:00
yyh
fb9a6bbc9f fix(workflow): restore fetchInspectVars and invalidAllLastRun calls lost during refactor
The previous commit (a86765e2b6) accidentally dropped these calls when splitting
useChat hook into modules. Also add race condition protection using runId to
prevent stale callbacks from updating state after a new run has started.
2026-01-26 21:55:16 +08:00
yyh
a86765e2b6 refactor(workflow): split useChat hook into modular files
Extract the 535-line useChat hook into 7 focused modules for better
maintainability and testability. Add chat-preview-slice to Zustand store
to centralize chat state management instead of local useState/useRef.
2026-01-26 21:49:10 +08:00
5eaf0c733a fix: service api doc can not gen (#31549) 2026-01-26 21:59:02 +09:00
yyh
f561656a89 chore: follow-up fixes for storybook vite migration (#31545) 2026-01-26 20:20:14 +08:00
f01f555146 chore: increase plugin cache ttl to 1 hour (#31552) 2026-01-26 19:48:33 +08:00
47d0e400ae chore: update to story book nextjs-vite (#31536) 2026-01-26 17:07:20 +08:00
8724ba04aa fix: fix Cannot read properties of null (reading 'credential_form_sch… (#31117) 2026-01-26 15:52:53 +08:00
6fd001c660 chore: eslint prune-suppressions (#31526) 2026-01-26 15:31:19 +08:00
e8e386a6b9 fix: Add vertical scrolling support for floating elements. (#30897)
Co-authored-by: zhaiguangpeng <zhaiguangpeng@didiglobal.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2026-01-26 15:17:42 +08:00
eba5eac3fa refactor: api/controllers/console/setup.py to ov3 (#31465)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-01-26 15:04:33 +08:00
19008dce13 refactor: api/controllers/console/version.py to v3 (#31463)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2026-01-26 15:04:25 +08:00
92011d0a31 refactor: LLM plugin invoke parsing (#31499)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-01-26 14:59:57 +08:00
a51ced0a4f refactor: pass BaseModel instances instead of dict (#31514)
Co-authored-by: fghpdf <fghpdf@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-01-26 14:50:14 +08:00
yyh
dad8e408b0 fix(web): upgrade tanstack devtools to fix seroval RCE vulnerability (#31515) 2026-01-26 14:49:58 +08:00
d941201a3e refactor(tool-selector): remove unused components and consolidate import (#31018)
Co-authored-by: CodingOnStar <hanxujiang@dify.ai>
2026-01-26 14:24:00 +08:00
dd988d42c2 feat: enhance quota panel to support additional model providers and integrate trial models feature (#31443)
Co-authored-by: CodingOnStar <hanxujiang@dify.ai>
2026-01-26 14:04:12 +08:00
a43d2ec4f0 refactor: restructure Completed component (#31435)
Co-authored-by: CodingOnStar <hanxujiang@dify.ai>
2026-01-26 14:03:51 +08:00
220 changed files with 24829 additions and 7558 deletions

View File

@ -0,0 +1,27 @@
# Notes: `large_language_model.py`
## Purpose
Provides the base `LargeLanguageModel` implementation used by the model runtime to invoke plugin-backed LLMs and to
bridge plugin daemon streaming semantics back into API-layer entities (`LLMResult`, `LLMResultChunk`).
## Key behaviors / invariants
- `invoke(..., stream=False)` still calls the plugin in streaming mode and then synthesizes a single `LLMResult` from
the first yielded `LLMResultChunk`.
- Plugin invocation is wrapped by `_invoke_llm_via_plugin(...)`, and `stream=False` normalization is handled by
`_normalize_non_stream_plugin_result(...)` / `_build_llm_result_from_first_chunk(...)`.
- Tool call deltas are merged incrementally via `_increase_tool_call(...)` to support multiple provider chunking
patterns (IDs anchored to first chunk, every chunk, or missing entirely).
- A tool-call delta with an empty `id` requires at least one existing tool call; otherwise we raise `ValueError` to
surface invalid delta sequences explicitly.
- Callback invocation is centralized in `_run_callbacks(...)` to ensure consistent error handling/logging.
- For compatibility with dify issue `#17799`, `prompt_messages` may be removed by the plugin daemon in chunks and must
be re-attached in this layer before callbacks/consumers use them.
- Callback hooks (`on_before_invoke`, `on_new_chunk`, `on_after_invoke`, `on_invoke_error`) must not break invocation
unless `callback.raise_error` is true.
## Test focus
- `api/tests/unit_tests/core/model_runtime/__base/test_increase_tool_call.py` validates tool-call delta merging and
patches `_gen_tool_call_id` for deterministic IDs.

View File

@ -1,20 +1,19 @@
from typing import Literal
from flask import request
from flask_restx import Resource, fields
from pydantic import BaseModel, Field, field_validator
from configs import dify_config
from controllers.fastopenapi import console_router
from libs.helper import EmailStr, extract_remote_ip
from libs.password import valid_password
from models.model import DifySetup, db
from services.account_service import RegisterService, TenantService
from . import console_ns
from .error import AlreadySetupError, NotInitValidateError
from .init_validate import get_init_validate_status
from .wraps import only_edition_self_hosted
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
class SetupRequestPayload(BaseModel):
email: EmailStr = Field(..., description="Admin email address")
@ -28,78 +27,66 @@ class SetupRequestPayload(BaseModel):
return valid_password(value)
console_ns.schema_model(
SetupRequestPayload.__name__,
SetupRequestPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
class SetupStatusResponse(BaseModel):
step: Literal["not_started", "finished"] = Field(description="Setup step status")
setup_at: str | None = Field(default=None, description="Setup completion time (ISO format)")
class SetupResponse(BaseModel):
result: str = Field(description="Setup result", examples=["success"])
@console_router.get(
"/setup",
response_model=SetupStatusResponse,
tags=["console"],
)
def get_setup_status_api() -> SetupStatusResponse:
"""Get system setup status."""
if dify_config.EDITION == "SELF_HOSTED":
setup_status = get_setup_status()
if setup_status and not isinstance(setup_status, bool):
return SetupStatusResponse(step="finished", setup_at=setup_status.setup_at.isoformat())
if setup_status:
return SetupStatusResponse(step="finished")
return SetupStatusResponse(step="not_started")
return SetupStatusResponse(step="finished")
@console_ns.route("/setup")
class SetupApi(Resource):
@console_ns.doc("get_setup_status")
@console_ns.doc(description="Get system setup status")
@console_ns.response(
200,
"Success",
console_ns.model(
"SetupStatusResponse",
{
"step": fields.String(description="Setup step status", enum=["not_started", "finished"]),
"setup_at": fields.String(description="Setup completion time (ISO format)", required=False),
},
),
@console_router.post(
"/setup",
response_model=SetupResponse,
tags=["console"],
status_code=201,
)
@only_edition_self_hosted
def setup_system(payload: SetupRequestPayload) -> SetupResponse:
"""Initialize system setup with admin account."""
if get_setup_status():
raise AlreadySetupError()
tenant_count = TenantService.get_tenant_count()
if tenant_count > 0:
raise AlreadySetupError()
if not get_init_validate_status():
raise NotInitValidateError()
normalized_email = payload.email.lower()
RegisterService.setup(
email=normalized_email,
name=payload.name,
password=payload.password,
ip_address=extract_remote_ip(request),
language=payload.language,
)
def get(self):
"""Get system setup status"""
if dify_config.EDITION == "SELF_HOSTED":
setup_status = get_setup_status()
# Check if setup_status is a DifySetup object rather than a bool
if setup_status and not isinstance(setup_status, bool):
return {"step": "finished", "setup_at": setup_status.setup_at.isoformat()}
elif setup_status:
return {"step": "finished"}
return {"step": "not_started"}
return {"step": "finished"}
@console_ns.doc("setup_system")
@console_ns.doc(description="Initialize system setup with admin account")
@console_ns.expect(console_ns.models[SetupRequestPayload.__name__])
@console_ns.response(
201, "Success", console_ns.model("SetupResponse", {"result": fields.String(description="Setup result")})
)
@console_ns.response(400, "Already setup or validation failed")
@only_edition_self_hosted
def post(self):
"""Initialize system setup with admin account"""
# is set up
if get_setup_status():
raise AlreadySetupError()
# is tenant created
tenant_count = TenantService.get_tenant_count()
if tenant_count > 0:
raise AlreadySetupError()
if not get_init_validate_status():
raise NotInitValidateError()
args = SetupRequestPayload.model_validate(console_ns.payload)
normalized_email = args.email.lower()
# setup
RegisterService.setup(
email=normalized_email,
name=args.name,
password=args.password,
ip_address=extract_remote_ip(request),
language=args.language,
)
return {"result": "success"}, 201
return SetupResponse(result="success")
def get_setup_status():
def get_setup_status() -> DifySetup | bool | None:
if dify_config.EDITION == "SELF_HOSTED":
return db.session.query(DifySetup).first()
else:
return True
return True

View File

@ -1,15 +1,11 @@
import json
import logging
import httpx
from flask import request
from flask_restx import Resource, fields
from packaging import version
from pydantic import BaseModel, Field
from configs import dify_config
from . import console_ns
from controllers.fastopenapi import console_router
logger = logging.getLogger(__name__)
@ -18,69 +14,61 @@ class VersionQuery(BaseModel):
current_version: str = Field(..., description="Current application version")
console_ns.schema_model(
VersionQuery.__name__,
VersionQuery.model_json_schema(ref_template="#/definitions/{model}"),
class VersionFeatures(BaseModel):
can_replace_logo: bool = Field(description="Whether logo replacement is supported")
model_load_balancing_enabled: bool = Field(description="Whether model load balancing is enabled")
class VersionResponse(BaseModel):
version: str = Field(description="Latest version number")
release_date: str = Field(description="Release date of latest version")
release_notes: str = Field(description="Release notes for latest version")
can_auto_update: bool = Field(description="Whether auto-update is supported")
features: VersionFeatures = Field(description="Feature flags and capabilities")
@console_router.get(
"/version",
response_model=VersionResponse,
tags=["console"],
)
def check_version_update(query: VersionQuery) -> VersionResponse:
"""Check for application version updates."""
check_update_url = dify_config.CHECK_UPDATE_URL
@console_ns.route("/version")
class VersionApi(Resource):
@console_ns.doc("check_version_update")
@console_ns.doc(description="Check for application version updates")
@console_ns.expect(console_ns.models[VersionQuery.__name__])
@console_ns.response(
200,
"Success",
console_ns.model(
"VersionResponse",
{
"version": fields.String(description="Latest version number"),
"release_date": fields.String(description="Release date of latest version"),
"release_notes": fields.String(description="Release notes for latest version"),
"can_auto_update": fields.Boolean(description="Whether auto-update is supported"),
"features": fields.Raw(description="Feature flags and capabilities"),
},
result = VersionResponse(
version=dify_config.project.version,
release_date="",
release_notes="",
can_auto_update=False,
features=VersionFeatures(
can_replace_logo=dify_config.CAN_REPLACE_LOGO,
model_load_balancing_enabled=dify_config.MODEL_LB_ENABLED,
),
)
def get(self):
"""Check for application version updates"""
args = VersionQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
check_update_url = dify_config.CHECK_UPDATE_URL
result = {
"version": dify_config.project.version,
"release_date": "",
"release_notes": "",
"can_auto_update": False,
"features": {
"can_replace_logo": dify_config.CAN_REPLACE_LOGO,
"model_load_balancing_enabled": dify_config.MODEL_LB_ENABLED,
},
}
if not check_update_url:
return result
try:
response = httpx.get(
check_update_url,
params={"current_version": args.current_version},
timeout=httpx.Timeout(timeout=10.0, connect=3.0),
)
except Exception as error:
logger.warning("Check update version error: %s.", str(error))
result["version"] = args.current_version
return result
content = json.loads(response.content)
if _has_new_version(latest_version=content["version"], current_version=f"{args.current_version}"):
result["version"] = content["version"]
result["release_date"] = content["releaseDate"]
result["release_notes"] = content["releaseNotes"]
result["can_auto_update"] = content["canAutoUpdate"]
if not check_update_url:
return result
try:
response = httpx.get(
check_update_url,
params={"current_version": query.current_version},
timeout=httpx.Timeout(timeout=10.0, connect=3.0),
)
content = response.json()
except Exception as error:
logger.warning("Check update version error: %s.", str(error))
result.version = query.current_version
return result
latest_version = content.get("version", result.version)
if _has_new_version(latest_version=latest_version, current_version=f"{query.current_version}"):
result.version = latest_version
result.release_date = content.get("releaseDate", "")
result.release_notes = content.get("releaseNotes", "")
result.can_auto_update = content.get("canAutoUpdate", False)
return result
def _has_new_version(*, latest_version: str, current_version: str) -> bool:
try:

View File

@ -11,7 +11,9 @@ from controllers.service_api.wraps import DatasetApiResource, cloud_edition_bill
from fields.dataset_fields import dataset_metadata_fields
from services.dataset_service import DatasetService
from services.entities.knowledge_entities.knowledge_entities import (
DocumentMetadataOperation,
MetadataArgs,
MetadataDetail,
MetadataOperationData,
)
from services.metadata_service import MetadataService
@ -22,7 +24,13 @@ class MetadataUpdatePayload(BaseModel):
register_schema_model(service_api_ns, MetadataUpdatePayload)
register_schema_models(service_api_ns, MetadataArgs, MetadataOperationData)
register_schema_models(
service_api_ns,
MetadataArgs,
MetadataDetail,
DocumentMetadataOperation,
MetadataOperationData,
)
@service_api_ns.route("/datasets/<uuid:dataset_id>/metadata")

View File

@ -1,7 +1,7 @@
import logging
import time
import uuid
from collections.abc import Generator, Sequence
from collections.abc import Callable, Generator, Iterator, Sequence
from typing import Union
from pydantic import ConfigDict
@ -30,6 +30,142 @@ def _gen_tool_call_id() -> str:
return f"chatcmpl-tool-{str(uuid.uuid4().hex)}"
def _run_callbacks(callbacks: Sequence[Callback] | None, *, event: str, invoke: Callable[[Callback], None]) -> None:
if not callbacks:
return
for callback in callbacks:
try:
invoke(callback)
except Exception as e:
if callback.raise_error:
raise
logger.warning("Callback %s %s failed with error %s", callback.__class__.__name__, event, e)
def _get_or_create_tool_call(
existing_tools_calls: list[AssistantPromptMessage.ToolCall],
tool_call_id: str,
) -> AssistantPromptMessage.ToolCall:
"""
Get or create a tool call by ID.
If `tool_call_id` is empty, returns the most recently created tool call.
"""
if not tool_call_id:
if not existing_tools_calls:
raise ValueError("tool_call_id is empty but no existing tool call is available to apply the delta")
return existing_tools_calls[-1]
tool_call = next((tool_call for tool_call in existing_tools_calls if tool_call.id == tool_call_id), None)
if tool_call is None:
tool_call = AssistantPromptMessage.ToolCall(
id=tool_call_id,
type="function",
function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="", arguments=""),
)
existing_tools_calls.append(tool_call)
return tool_call
def _merge_tool_call_delta(
tool_call: AssistantPromptMessage.ToolCall,
delta: AssistantPromptMessage.ToolCall,
) -> None:
if delta.id:
tool_call.id = delta.id
if delta.type:
tool_call.type = delta.type
if delta.function.name:
tool_call.function.name = delta.function.name
if delta.function.arguments:
tool_call.function.arguments += delta.function.arguments
def _build_llm_result_from_first_chunk(
model: str,
prompt_messages: Sequence[PromptMessage],
chunks: Iterator[LLMResultChunk],
) -> LLMResult:
"""
Build a single `LLMResult` from the first returned chunk.
This is used for `stream=False` because the plugin side may still implement the response via a chunked stream.
"""
content = ""
content_list: list[PromptMessageContentUnionTypes] = []
usage = LLMUsage.empty_usage()
system_fingerprint: str | None = None
tools_calls: list[AssistantPromptMessage.ToolCall] = []
first_chunk = next(chunks, None)
if first_chunk is not None:
if isinstance(first_chunk.delta.message.content, str):
content += first_chunk.delta.message.content
elif isinstance(first_chunk.delta.message.content, list):
content_list.extend(first_chunk.delta.message.content)
if first_chunk.delta.message.tool_calls:
_increase_tool_call(first_chunk.delta.message.tool_calls, tools_calls)
usage = first_chunk.delta.usage or LLMUsage.empty_usage()
system_fingerprint = first_chunk.system_fingerprint
return LLMResult(
model=model,
prompt_messages=prompt_messages,
message=AssistantPromptMessage(
content=content or content_list,
tool_calls=tools_calls,
),
usage=usage,
system_fingerprint=system_fingerprint,
)
def _invoke_llm_via_plugin(
*,
tenant_id: str,
user_id: str,
plugin_id: str,
provider: str,
model: str,
credentials: dict,
model_parameters: dict,
prompt_messages: Sequence[PromptMessage],
tools: list[PromptMessageTool] | None,
stop: Sequence[str] | None,
stream: bool,
) -> Union[LLMResult, Generator[LLMResultChunk, None, None]]:
from core.plugin.impl.model import PluginModelClient
plugin_model_manager = PluginModelClient()
return plugin_model_manager.invoke_llm(
tenant_id=tenant_id,
user_id=user_id,
plugin_id=plugin_id,
provider=provider,
model=model,
credentials=credentials,
model_parameters=model_parameters,
prompt_messages=list(prompt_messages),
tools=tools,
stop=list(stop) if stop else None,
stream=stream,
)
def _normalize_non_stream_plugin_result(
model: str,
prompt_messages: Sequence[PromptMessage],
result: Union[LLMResult, Iterator[LLMResultChunk]],
) -> LLMResult:
if isinstance(result, LLMResult):
return result
return _build_llm_result_from_first_chunk(model=model, prompt_messages=prompt_messages, chunks=result)
def _increase_tool_call(
new_tool_calls: list[AssistantPromptMessage.ToolCall], existing_tools_calls: list[AssistantPromptMessage.ToolCall]
):
@ -40,42 +176,13 @@ def _increase_tool_call(
:param existing_tools_calls: List of existing tool calls to be modified IN-PLACE.
"""
def get_tool_call(tool_call_id: str):
"""
Get or create a tool call by ID
:param tool_call_id: tool call ID
:return: existing or new tool call
"""
if not tool_call_id:
return existing_tools_calls[-1]
_tool_call = next((_tool_call for _tool_call in existing_tools_calls if _tool_call.id == tool_call_id), None)
if _tool_call is None:
_tool_call = AssistantPromptMessage.ToolCall(
id=tool_call_id,
type="function",
function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="", arguments=""),
)
existing_tools_calls.append(_tool_call)
return _tool_call
for new_tool_call in new_tool_calls:
# generate ID for tool calls with function name but no ID to track them
if new_tool_call.function.name and not new_tool_call.id:
new_tool_call.id = _gen_tool_call_id()
# get tool call
tool_call = get_tool_call(new_tool_call.id)
# update tool call
if new_tool_call.id:
tool_call.id = new_tool_call.id
if new_tool_call.type:
tool_call.type = new_tool_call.type
if new_tool_call.function.name:
tool_call.function.name = new_tool_call.function.name
if new_tool_call.function.arguments:
tool_call.function.arguments += new_tool_call.function.arguments
tool_call = _get_or_create_tool_call(existing_tools_calls, new_tool_call.id)
_merge_tool_call_delta(tool_call, new_tool_call)
class LargeLanguageModel(AIModel):
@ -141,10 +248,7 @@ class LargeLanguageModel(AIModel):
result: Union[LLMResult, Generator[LLMResultChunk, None, None]]
try:
from core.plugin.impl.model import PluginModelClient
plugin_model_manager = PluginModelClient()
result = plugin_model_manager.invoke_llm(
result = _invoke_llm_via_plugin(
tenant_id=self.tenant_id,
user_id=user or "unknown",
plugin_id=self.plugin_id,
@ -154,38 +258,13 @@ class LargeLanguageModel(AIModel):
model_parameters=model_parameters,
prompt_messages=prompt_messages,
tools=tools,
stop=list(stop) if stop else None,
stop=stop,
stream=stream,
)
if not stream:
content = ""
content_list = []
usage = LLMUsage.empty_usage()
system_fingerprint = None
tools_calls: list[AssistantPromptMessage.ToolCall] = []
for chunk in result:
if isinstance(chunk.delta.message.content, str):
content += chunk.delta.message.content
elif isinstance(chunk.delta.message.content, list):
content_list.extend(chunk.delta.message.content)
if chunk.delta.message.tool_calls:
_increase_tool_call(chunk.delta.message.tool_calls, tools_calls)
usage = chunk.delta.usage or LLMUsage.empty_usage()
system_fingerprint = chunk.system_fingerprint
break
result = LLMResult(
model=model,
prompt_messages=prompt_messages,
message=AssistantPromptMessage(
content=content or content_list,
tool_calls=tools_calls,
),
usage=usage,
system_fingerprint=system_fingerprint,
result = _normalize_non_stream_plugin_result(
model=model, prompt_messages=prompt_messages, result=result
)
except Exception as e:
self._trigger_invoke_error_callbacks(
@ -425,27 +504,21 @@ class LargeLanguageModel(AIModel):
:param user: unique user id
:param callbacks: callbacks
"""
if callbacks:
for callback in callbacks:
try:
callback.on_before_invoke(
llm_instance=self,
model=model,
credentials=credentials,
prompt_messages=prompt_messages,
model_parameters=model_parameters,
tools=tools,
stop=stop,
stream=stream,
user=user,
)
except Exception as e:
if callback.raise_error:
raise e
else:
logger.warning(
"Callback %s on_before_invoke failed with error %s", callback.__class__.__name__, e
)
_run_callbacks(
callbacks,
event="on_before_invoke",
invoke=lambda callback: callback.on_before_invoke(
llm_instance=self,
model=model,
credentials=credentials,
prompt_messages=prompt_messages,
model_parameters=model_parameters,
tools=tools,
stop=stop,
stream=stream,
user=user,
),
)
def _trigger_new_chunk_callbacks(
self,
@ -473,26 +546,22 @@ class LargeLanguageModel(AIModel):
:param stream: is stream response
:param user: unique user id
"""
if callbacks:
for callback in callbacks:
try:
callback.on_new_chunk(
llm_instance=self,
chunk=chunk,
model=model,
credentials=credentials,
prompt_messages=prompt_messages,
model_parameters=model_parameters,
tools=tools,
stop=stop,
stream=stream,
user=user,
)
except Exception as e:
if callback.raise_error:
raise e
else:
logger.warning("Callback %s on_new_chunk failed with error %s", callback.__class__.__name__, e)
_run_callbacks(
callbacks,
event="on_new_chunk",
invoke=lambda callback: callback.on_new_chunk(
llm_instance=self,
chunk=chunk,
model=model,
credentials=credentials,
prompt_messages=prompt_messages,
model_parameters=model_parameters,
tools=tools,
stop=stop,
stream=stream,
user=user,
),
)
def _trigger_after_invoke_callbacks(
self,
@ -521,28 +590,22 @@ class LargeLanguageModel(AIModel):
:param user: unique user id
:param callbacks: callbacks
"""
if callbacks:
for callback in callbacks:
try:
callback.on_after_invoke(
llm_instance=self,
result=result,
model=model,
credentials=credentials,
prompt_messages=prompt_messages,
model_parameters=model_parameters,
tools=tools,
stop=stop,
stream=stream,
user=user,
)
except Exception as e:
if callback.raise_error:
raise e
else:
logger.warning(
"Callback %s on_after_invoke failed with error %s", callback.__class__.__name__, e
)
_run_callbacks(
callbacks,
event="on_after_invoke",
invoke=lambda callback: callback.on_after_invoke(
llm_instance=self,
result=result,
model=model,
credentials=credentials,
prompt_messages=prompt_messages,
model_parameters=model_parameters,
tools=tools,
stop=stop,
stream=stream,
user=user,
),
)
def _trigger_invoke_error_callbacks(
self,
@ -571,25 +634,19 @@ class LargeLanguageModel(AIModel):
:param user: unique user id
:param callbacks: callbacks
"""
if callbacks:
for callback in callbacks:
try:
callback.on_invoke_error(
llm_instance=self,
ex=ex,
model=model,
credentials=credentials,
prompt_messages=prompt_messages,
model_parameters=model_parameters,
tools=tools,
stop=stop,
stream=stream,
user=user,
)
except Exception as e:
if callback.raise_error:
raise e
else:
logger.warning(
"Callback %s on_invoke_error failed with error %s", callback.__class__.__name__, e
)
_run_callbacks(
callbacks,
event="on_invoke_error",
invoke=lambda callback: callback.on_invoke_error(
llm_instance=self,
ex=ex,
model=model,
credentials=credentials,
prompt_messages=prompt_messages,
model_parameters=model_parameters,
tools=tools,
stop=stop,
stream=stream,
user=user,
),
)

View File

@ -28,8 +28,10 @@ def init_app(app: DifyApp) -> None:
# Ensure route decorators are evaluated.
import controllers.console.ping as ping_module
from controllers.console import setup
_ = ping_module
_ = setup
router.include_router(console_router, prefix="/console/api")
CORS(

View File

@ -781,15 +781,16 @@ class AppDslService:
return dependencies
@classmethod
def get_leaked_dependencies(cls, tenant_id: str, dsl_dependencies: list[dict]) -> list[PluginDependency]:
def get_leaked_dependencies(
cls, tenant_id: str, dsl_dependencies: list[PluginDependency]
) -> list[PluginDependency]:
"""
Returns the leaked dependencies in current workspace
"""
dependencies = [PluginDependency.model_validate(dep) for dep in dsl_dependencies]
if not dependencies:
if not dsl_dependencies:
return []
return DependenciesAnalysisService.get_leaked_dependencies(tenant_id=tenant_id, dependencies=dependencies)
return DependenciesAnalysisService.get_leaked_dependencies(tenant_id=tenant_id, dependencies=dsl_dependencies)
@staticmethod
def _generate_aes_key(tenant_id: str) -> bytes:

View File

@ -870,15 +870,16 @@ class RagPipelineDslService:
return dependencies
@classmethod
def get_leaked_dependencies(cls, tenant_id: str, dsl_dependencies: list[dict]) -> list[PluginDependency]:
def get_leaked_dependencies(
cls, tenant_id: str, dsl_dependencies: list[PluginDependency]
) -> list[PluginDependency]:
"""
Returns the leaked dependencies in current workspace
"""
dependencies = [PluginDependency.model_validate(dep) for dep in dsl_dependencies]
if not dependencies:
if not dsl_dependencies:
return []
return DependenciesAnalysisService.get_leaked_dependencies(tenant_id=tenant_id, dependencies=dependencies)
return DependenciesAnalysisService.get_leaked_dependencies(tenant_id=tenant_id, dependencies=dsl_dependencies)
def _generate_aes_key(self, tenant_id: str) -> bytes:
"""Generate AES key based on tenant_id"""

View File

@ -44,7 +44,7 @@ class RagPipelineTransformService:
doc_form = dataset.doc_form
if not doc_form:
return self._transform_to_empty_pipeline(dataset)
retrieval_model = dataset.retrieval_model
retrieval_model = RetrievalSetting.model_validate(dataset.retrieval_model) if dataset.retrieval_model else None
pipeline_yaml = self._get_transform_yaml(doc_form, datasource_type, indexing_technique)
# deal dependencies
self._deal_dependencies(pipeline_yaml, dataset.tenant_id)
@ -154,7 +154,12 @@ class RagPipelineTransformService:
return node
def _deal_knowledge_index(
self, dataset: Dataset, doc_form: str, indexing_technique: str | None, retrieval_model: dict, node: dict
self,
dataset: Dataset,
doc_form: str,
indexing_technique: str | None,
retrieval_model: RetrievalSetting | None,
node: dict,
):
knowledge_configuration_dict = node.get("data", {})
knowledge_configuration = KnowledgeConfiguration.model_validate(knowledge_configuration_dict)
@ -163,10 +168,9 @@ class RagPipelineTransformService:
knowledge_configuration.embedding_model = dataset.embedding_model
knowledge_configuration.embedding_model_provider = dataset.embedding_model_provider
if retrieval_model:
retrieval_setting = RetrievalSetting.model_validate(retrieval_model)
if indexing_technique == "economy":
retrieval_setting.search_method = RetrievalMethod.KEYWORD_SEARCH
knowledge_configuration.retrieval_model = retrieval_setting
retrieval_model.search_method = RetrievalMethod.KEYWORD_SEARCH
knowledge_configuration.retrieval_model = retrieval_model
else:
dataset.retrieval_model = knowledge_configuration.retrieval_model.model_dump()

View File

@ -17,7 +17,7 @@ logger = logging.getLogger(__name__)
RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_manifests:"
CACHE_REDIS_TTL = 60 * 15 # 15 minutes
CACHE_REDIS_TTL = 60 * 60 # 1 hour
def _get_redis_cache_key(plugin_id: str) -> str:

View File

@ -0,0 +1,56 @@
import builtins
from unittest.mock import patch
import pytest
from flask import Flask
from flask.views import MethodView
from extensions import ext_fastopenapi
if not hasattr(builtins, "MethodView"):
builtins.MethodView = MethodView # type: ignore[attr-defined]
@pytest.fixture
def app() -> Flask:
app = Flask(__name__)
app.config["TESTING"] = True
return app
def test_console_setup_fastopenapi_get_not_started(app: Flask):
ext_fastopenapi.init_app(app)
with (
patch("controllers.console.setup.dify_config.EDITION", "SELF_HOSTED"),
patch("controllers.console.setup.get_setup_status", return_value=None),
):
client = app.test_client()
response = client.get("/console/api/setup")
assert response.status_code == 200
assert response.get_json() == {"step": "not_started", "setup_at": None}
def test_console_setup_fastopenapi_post_success(app: Flask):
ext_fastopenapi.init_app(app)
payload = {
"email": "admin@example.com",
"name": "Admin",
"password": "Passw0rd1",
"language": "en-US",
}
with (
patch("controllers.console.wraps.dify_config.EDITION", "SELF_HOSTED"),
patch("controllers.console.setup.get_setup_status", return_value=None),
patch("controllers.console.setup.TenantService.get_tenant_count", return_value=0),
patch("controllers.console.setup.get_init_validate_status", return_value=True),
patch("controllers.console.setup.RegisterService.setup"),
):
client = app.test_client()
response = client.post("/console/api/setup", json=payload)
assert response.status_code == 201
assert response.get_json() == {"result": "success"}

View File

@ -0,0 +1,35 @@
import builtins
from unittest.mock import patch
import pytest
from flask import Flask
from flask.views import MethodView
from configs import dify_config
from extensions import ext_fastopenapi
if not hasattr(builtins, "MethodView"):
builtins.MethodView = MethodView # type: ignore[attr-defined]
@pytest.fixture
def app() -> Flask:
app = Flask(__name__)
app.config["TESTING"] = True
return app
def test_console_version_fastopenapi_returns_current_version(app: Flask):
ext_fastopenapi.init_app(app)
with patch("controllers.console.version.dify_config.CHECK_UPDATE_URL", None):
client = app.test_client()
response = client.get("/console/api/version", query_string={"current_version": "0.0.0"})
assert response.status_code == 200
data = response.get_json()
assert data["version"] == dify_config.project.version
assert data["release_date"] == ""
assert data["release_notes"] == ""
assert data["can_auto_update"] is False
assert "features" in data

View File

@ -1,39 +0,0 @@
from types import SimpleNamespace
from unittest.mock import patch
from controllers.console.setup import SetupApi
class TestSetupApi:
def test_post_lowercases_email_before_register(self):
"""Ensure setup registration normalizes email casing."""
payload = {
"email": "Admin@Example.com",
"name": "Admin User",
"password": "ValidPass123!",
"language": "en-US",
}
setup_api = SetupApi(api=None)
mock_console_ns = SimpleNamespace(payload=payload)
with (
patch("controllers.console.setup.console_ns", mock_console_ns),
patch("controllers.console.setup.get_setup_status", return_value=False),
patch("controllers.console.setup.TenantService.get_tenant_count", return_value=0),
patch("controllers.console.setup.get_init_validate_status", return_value=True),
patch("controllers.console.setup.extract_remote_ip", return_value="127.0.0.1"),
patch("controllers.console.setup.request", object()),
patch("controllers.console.setup.RegisterService.setup") as mock_register,
):
response, status = setup_api.post()
assert response == {"result": "success"}
assert status == 201
mock_register.assert_called_once_with(
email="admin@example.com",
name=payload["name"],
password=payload["password"],
ip_address="127.0.0.1",
language=payload["language"],
)

View File

@ -1,5 +1,7 @@
from unittest.mock import MagicMock, patch
import pytest
from core.model_runtime.entities.message_entities import AssistantPromptMessage
from core.model_runtime.model_providers.__base.large_language_model import _increase_tool_call
@ -97,3 +99,14 @@ def test__increase_tool_call():
mock_id_generator.side_effect = [_exp_case.id for _exp_case in EXPECTED_CASE_4]
with patch("core.model_runtime.model_providers.__base.large_language_model._gen_tool_call_id", mock_id_generator):
_run_case(INPUTS_CASE_4, EXPECTED_CASE_4)
def test__increase_tool_call__no_id_no_name_first_delta_should_raise():
inputs = [
ToolCall(id="", type="function", function=ToolCall.ToolCallFunction(name="", arguments='{"arg1": ')),
ToolCall(id="", type="function", function=ToolCall.ToolCallFunction(name="func_foo", arguments='"value"}')),
]
actual: list[ToolCall] = []
with patch("core.model_runtime.model_providers.__base.large_language_model._gen_tool_call_id", MagicMock()):
with pytest.raises(ValueError):
_increase_tool_call(inputs, actual)

View File

@ -0,0 +1,103 @@
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
from core.model_runtime.entities.message_entities import (
AssistantPromptMessage,
TextPromptMessageContent,
UserPromptMessage,
)
from core.model_runtime.model_providers.__base.large_language_model import _normalize_non_stream_plugin_result
def _make_chunk(
*,
model: str = "test-model",
content: str | list[TextPromptMessageContent] | None,
tool_calls: list[AssistantPromptMessage.ToolCall] | None = None,
usage: LLMUsage | None = None,
system_fingerprint: str | None = None,
) -> LLMResultChunk:
message = AssistantPromptMessage(content=content, tool_calls=tool_calls or [])
delta = LLMResultChunkDelta(index=0, message=message, usage=usage)
return LLMResultChunk(model=model, delta=delta, system_fingerprint=system_fingerprint)
def test__normalize_non_stream_plugin_result__from_first_chunk_str_content_and_tool_calls():
prompt_messages = [UserPromptMessage(content="hi")]
tool_calls = [
AssistantPromptMessage.ToolCall(
id="1",
type="function",
function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="func_foo", arguments=""),
),
AssistantPromptMessage.ToolCall(
id="",
type="function",
function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="", arguments='{"arg1": '),
),
AssistantPromptMessage.ToolCall(
id="",
type="function",
function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="", arguments='"value"}'),
),
]
usage = LLMUsage.empty_usage().model_copy(update={"prompt_tokens": 1, "total_tokens": 1})
chunk = _make_chunk(content="hello", tool_calls=tool_calls, usage=usage, system_fingerprint="fp-1")
result = _normalize_non_stream_plugin_result(
model="test-model", prompt_messages=prompt_messages, result=iter([chunk])
)
assert result.model == "test-model"
assert result.prompt_messages == prompt_messages
assert result.message.content == "hello"
assert result.usage.prompt_tokens == 1
assert result.system_fingerprint == "fp-1"
assert result.message.tool_calls == [
AssistantPromptMessage.ToolCall(
id="1",
type="function",
function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="func_foo", arguments='{"arg1": "value"}'),
)
]
def test__normalize_non_stream_plugin_result__from_first_chunk_list_content():
prompt_messages = [UserPromptMessage(content="hi")]
content_list = [TextPromptMessageContent(data="a"), TextPromptMessageContent(data="b")]
chunk = _make_chunk(content=content_list, usage=LLMUsage.empty_usage())
result = _normalize_non_stream_plugin_result(
model="test-model", prompt_messages=prompt_messages, result=iter([chunk])
)
assert result.message.content == content_list
def test__normalize_non_stream_plugin_result__passthrough_llm_result():
prompt_messages = [UserPromptMessage(content="hi")]
llm_result = LLMResult(
model="test-model",
prompt_messages=prompt_messages,
message=AssistantPromptMessage(content="ok"),
usage=LLMUsage.empty_usage(),
)
assert (
_normalize_non_stream_plugin_result(model="test-model", prompt_messages=prompt_messages, result=llm_result)
== llm_result
)
def test__normalize_non_stream_plugin_result__empty_iterator_defaults():
prompt_messages = [UserPromptMessage(content="hi")]
result = _normalize_non_stream_plugin_result(model="test-model", prompt_messages=prompt_messages, result=iter([]))
assert result.model == "test-model"
assert result.prompt_messages == prompt_messages
assert result.message.content == []
assert result.message.tool_calls == []
assert result.usage == LLMUsage.empty_usage()
assert result.system_fingerprint is None

View File

@ -1,27 +1,15 @@
import type { StorybookConfig } from '@storybook/nextjs'
import path from 'node:path'
import { fileURLToPath } from 'node:url'
const storybookDir = path.dirname(fileURLToPath(import.meta.url))
import type { StorybookConfig } from '@storybook/nextjs-vite'
const config: StorybookConfig = {
stories: ['../app/components/**/*.stories.@(js|jsx|mjs|ts|tsx)'],
addons: [
'@storybook/addon-onboarding',
// Not working with Storybook Vite framework
// '@storybook/addon-onboarding',
'@storybook/addon-links',
'@storybook/addon-docs',
'@chromatic-com/storybook',
],
framework: {
name: '@storybook/nextjs',
options: {
builder: {
useSWC: true,
lazyCompilation: false,
},
nextConfigPath: undefined,
},
},
framework: '@storybook/nextjs-vite',
staticDirs: ['../public'],
core: {
disableWhatsNewNotifications: true,
@ -29,17 +17,5 @@ const config: StorybookConfig = {
docs: {
defaultName: 'Documentation',
},
webpackFinal: async (config) => {
// Add alias to mock problematic modules with circular dependencies
config.resolve = config.resolve || {}
config.resolve.alias = {
...config.resolve.alias,
// Mock the plugin index files to avoid circular dependencies
[path.resolve(storybookDir, '../app/components/base/prompt-editor/plugins/context-block/index.tsx')]: path.resolve(storybookDir, '__mocks__/context-block.tsx'),
[path.resolve(storybookDir, '../app/components/base/prompt-editor/plugins/history-block/index.tsx')]: path.resolve(storybookDir, '__mocks__/history-block.tsx'),
[path.resolve(storybookDir, '../app/components/base/prompt-editor/plugins/query-block/index.tsx')]: path.resolve(storybookDir, '__mocks__/query-block.tsx'),
}
return config
},
}
export default config

View File

@ -1,3 +1,6 @@
/**
* @vitest-environment jsdom
*/
import type { ReactNode } from 'react'
import type { ModalContextState } from '@/context/modal-context'
import type { ProviderContextState } from '@/context/provider-context'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { RiAddLine, RiDeleteBinLine, RiEditLine, RiMore2Fill, RiSaveLine, RiShareLine } from '@remixicon/react'
import ActionButton, { ActionButtonState } from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { IChatItem } from '@/app/components/base/chat/chat/type'
import type { AgentLogDetailResponse } from '@/models/log'
import { useEffect, useRef } from 'react'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { ReactNode } from 'react'
import AnswerIcon from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { AppIconSelection } from '.'
import { useState } from 'react'
import AppIconPicker from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { ComponentProps } from 'react'
import AppIcon from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { ComponentProps } from 'react'
import { useEffect } from 'react'
import AudioBtn from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import AudioGallery from '.'
const AUDIO_SOURCES = [

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import AutoHeightTextarea from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import Avatar from '.'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import Badge from '../badge'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import BlockInput from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import AddButton from './add-button'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { RocketLaunchIcon } from '@heroicons/react/20/solid'
import { Button } from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import SyncButton from './sync-button'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { ChatItem } from '../../types'
import { WorkflowRunningStatus } from '@/app/components/workflow/types'
import Answer from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { ChatItem } from '../types'
import { User } from '@/app/components/base/icons/src/public/avatar'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import Checkbox from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { Item } from '.'
import { useState } from 'react'
import Chip from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import Confirm from '.'
import Button from '../button'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useEffect, useState } from 'react'
import ContentDialog from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import CopyFeedback, { CopyFeedbackNew } from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import CopyIcon from '.'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import CornerLabel from '.'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { DatePickerProps } from './types'
import { useState } from 'react'
import { fn } from 'storybook/test'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useEffect, useState } from 'react'
import Dialog from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import Divider from '.'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import { fn } from 'storybook/test'
import DrawerPlus from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import { fn } from 'storybook/test'
import Drawer from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { Item } from '.'
import { useState } from 'react'
import { fn } from 'storybook/test'

View File

@ -1,5 +1,5 @@
/* eslint-disable tailwindcss/classnames-order */
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import Effect from '.'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import EmojiPickerInner from './Inner'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import EmojiPicker from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { Features } from './types'
import { useState } from 'react'
import { FeaturesProvider } from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import FileIcon from '.'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import FileImageRender from './file-image-render'
const SAMPLE_IMAGE = 'data:image/svg+xml;utf8,<svg xmlns=\'http://www.w3.org/2000/svg\' width=\'320\' height=\'180\'><defs><linearGradient id=\'grad\' x1=\'0%\' y1=\'0%\' x2=\'100%\' y2=\'100%\'><stop offset=\'0%\' stop-color=\'#FEE2FF\'/><stop offset=\'100%\' stop-color=\'#E0EAFF\'/></linearGradient></defs><rect width=\'320\' height=\'180\' rx=\'18\' fill=\'url(#grad)\'/><text x=\'50%\' y=\'50%\' dominant-baseline=\'middle\' text-anchor=\'middle\' font-family=\'sans-serif\' font-size=\'24\' fill=\'#1F2937\'>Preview</text></svg>'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { FileEntity } from './types'
import { useState } from 'react'
import { SupportUploadFileTypes } from '@/app/components/workflow/types'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import FileTypeIcon from './file-type-icon'
import { FileAppearanceTypeEnum } from './types'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { FileEntity } from '../types'
import type { FileUpload } from '@/app/components/base/features/types'
import { useState } from 'react'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { FileEntity } from '../types'
import type { FileUpload } from '@/app/components/base/features/types'
import { useState } from 'react'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import { fn } from 'storybook/test'
import FloatRightContainer from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { FormStoryRender } from '../../../../.storybook/utils/form-story-wrapper'
import type { FormSchema } from './types'
import { useStore } from '@tanstack/react-form'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import FullScreenModal from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import GridMask from '.'
const meta = {

View File

@ -1,9 +1,9 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
/// <reference types="vite/client" />
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import * as React from 'react'
declare const require: any
type IconComponent = React.ComponentType<Record<string, unknown>>
type IconModule = { default: IconComponent }
type IconEntry = {
name: string
@ -12,18 +12,16 @@ type IconEntry = {
Component: IconComponent
}
const iconContext = require.context('./src', true, /\.tsx$/)
const iconModules: Record<string, IconModule> = import.meta.glob('./src/**/*.tsx', { eager: true })
const iconEntries: IconEntry[] = iconContext
.keys()
.filter((key: string) => !key.endsWith('.stories.tsx') && !key.endsWith('.spec.tsx'))
.map((key: string) => {
const mod = iconContext(key)
const Component = mod.default as IconComponent | undefined
const iconEntries: IconEntry[] = Object.entries(iconModules)
.filter(([key]) => !key.endsWith('.stories.tsx') && !key.endsWith('.spec.tsx'))
.map(([key, mod]) => {
const Component = mod.default
if (!Component)
return null
const relativePath = key.replace(/^\.\//, '')
const relativePath = key.replace(/^\.\/src\//, '')
const path = `app/components/base/icons/src/${relativePath}`
const parts = relativePath.split('/')
const fileName = parts.pop() || ''

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import ImageGallery from '.'
const IMAGE_SOURCES = [

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { ImageFile } from '@/types/app'
import { useMemo, useState } from 'react'
import { TransferMethod } from '@/types/app'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import { fn } from 'storybook/test'
import InlineDeleteConfirm from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import { InputNumber } from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import Input from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { RelatedApp } from '@/models/datasets'
import { AppModeEnum } from '@/types/app'
import LinkedAppsPanel from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import ListEmpty from '.'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import Loading from '.'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { ReactNode } from 'react'
import { ThemeProvider } from 'next-themes'
import DifyLogo from './dify-logo'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import CodeBlock from './code-block'
const SAMPLE_CODE = `const greet = (name: string) => {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import { ChatContextProvider } from '@/app/components/base/chat/chat/context'
import ThinkBlock from './think-block'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import { Markdown } from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import Flowchart from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { IChatItem } from '@/app/components/base/chat/chat/type'
import type { WorkflowRunDetailResponse } from '@/models/log'
import type { NodeTracing, NodeTracingListResponse } from '@/types/workflow'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import ModalLikeWrap from '.'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useEffect, useState } from 'react'
import Modal from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useEffect, useState } from 'react'
import Modal from './modal'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { ComponentProps } from 'react'
import { useEffect } from 'react'
import AudioBtn from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import NotionConnector from '.'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import NotionIcon from '.'
const meta = {

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { DataSourceCredential } from '@/app/components/header/account-setting/data-source-page-new/types'
import type { NotionPage } from '@/models/common'
import { useEffect, useMemo, useState } from 'react'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useMemo, useState } from 'react'
import Pagination from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import ParamItem from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import CustomPopover from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import {
PortalToFollowElem,

View File

@ -61,9 +61,12 @@ export function usePortalToFollowElem({
}),
shift({ padding: 5 }),
size({
apply({ rects, elements }) {
if (triggerPopupSameWidth)
elements.floating.style.width = `${rects.reference.width}px`
apply({ rects, elements, availableHeight }) {
Object.assign(elements.floating.style, {
maxHeight: `${Math.max(0, availableHeight)}px`,
overflowY: 'auto',
...(triggerPopupSameWidth && { width: `${rects.reference.width}px` }),
})
},
}),
],

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import PremiumBadge from '.'
const colors: Array<NonNullable<React.ComponentProps<typeof PremiumBadge>['color']>> = ['blue', 'indigo', 'gray', 'orange']

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import ProgressCircle from './progress-circle'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
// Mock component to avoid complex initialization issues

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { IChatItem } from '@/app/components/base/chat/chat/type'
import { useEffect } from 'react'
import { useStore } from '@/app/components/app/store'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import ShareQRCode from '.'
const QRDemo = ({

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { RiCloudLine, RiCpuLine, RiDatabase2Line, RiLightbulbLine, RiRocketLine, RiShieldLine } from '@remixicon/react'
import { useState } from 'react'
import RadioCard from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import Radio from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import SearchInput from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { RiLineChartLine, RiListCheck2, RiRobot2Line } from '@remixicon/react'
import { useState } from 'react'
import { SegmentedControl } from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import type { Item } from '.'
import { useState } from 'react'
import Select, { PortalSelect, SimpleSelect } from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useMemo, useState } from 'react'
import SimplePieChart from '.'

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import {
SkeletonContainer,
SkeletonPoint,

View File

@ -1,4 +1,4 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { useState } from 'react'
import Slider from '.'

Some files were not shown because too many files have changed in this diff Show More