mirror of
https://github.com/langgenius/dify.git
synced 2026-02-20 10:05:55 +08:00
Compare commits
36 Commits
release/e-
...
1.5.1
| Author | SHA1 | Date | |
|---|---|---|---|
| bb955806e0 | |||
| 0c39490bb1 | |||
| 826bf25abf | |||
| 89250a36b7 | |||
| c2e599cd85 | |||
| 71d6cf1b1d | |||
| 86179beaa5 | |||
| f53b177e1f | |||
| 58dfe2ca03 | |||
| c4b960cc1a | |||
| 70035aa9a9 | |||
| a82943a83d | |||
| 9a4c1fe834 | |||
| b9ff716c18 | |||
| 8516d15a4e | |||
| 4198a533ad | |||
| a67441689a | |||
| 5c11c22302 | |||
| 1a7ad195f0 | |||
| cf2173644e | |||
| 1b99e44e99 | |||
| b8b9c3a783 | |||
| 25de39d9c6 | |||
| 2455135eaa | |||
| dffbdd140c | |||
| 69b6f6f5d2 | |||
| 6013d90426 | |||
| 9ded6f6a40 | |||
| 7c76458b18 | |||
| eb9edf4908 | |||
| 55a6b330ec | |||
| 96d27d7087 | |||
| 9588a64487 | |||
| 18757d07c9 | |||
| 0f23e3d9ab | |||
| 765adabb32 |
8
.github/workflows/api-tests.yml
vendored
8
.github/workflows/api-tests.yml
vendored
@ -47,15 +47,17 @@ jobs:
|
||||
- name: Run Unit tests
|
||||
run: |
|
||||
uv run --project api bash dev/pytest/pytest_unit_tests.sh
|
||||
|
||||
- name: Coverage Summary
|
||||
run: |
|
||||
set -x
|
||||
# Extract coverage percentage and create a summary
|
||||
TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])')
|
||||
|
||||
# Create a detailed coverage summary
|
||||
echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY
|
||||
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
|
||||
uv run --project api coverage report >> $GITHUB_STEP_SUMMARY
|
||||
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
|
||||
uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Run dify config tests
|
||||
run: uv run --project api dev/pytest/pytest_config_tests.py
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -214,3 +214,4 @@ mise.toml
|
||||
|
||||
# AI Assistant
|
||||
.roo/
|
||||
api/.env.backup
|
||||
|
||||
@ -1,8 +1,11 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from pydantic.fields import FieldInfo
|
||||
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict
|
||||
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict, TomlConfigSettingsSource
|
||||
|
||||
from libs.file_utils import search_file_upwards
|
||||
|
||||
from .deploy import DeploymentConfig
|
||||
from .enterprise import EnterpriseFeatureConfig
|
||||
@ -99,4 +102,12 @@ class DifyConfig(
|
||||
RemoteSettingsSourceFactory(settings_cls),
|
||||
dotenv_settings,
|
||||
file_secret_settings,
|
||||
TomlConfigSettingsSource(
|
||||
settings_cls=settings_cls,
|
||||
toml_file=search_file_upwards(
|
||||
base_dir_path=Path(__file__).parent,
|
||||
target_file_name="pyproject.toml",
|
||||
max_search_parent_depth=2,
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@ -1,15 +1,16 @@
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
from configs.packaging.pyproject import PyProjectConfig, PyProjectTomlConfig
|
||||
|
||||
|
||||
class PackagingInfo(BaseSettings):
|
||||
class PackagingInfo(PyProjectTomlConfig):
|
||||
"""
|
||||
Packaging build information
|
||||
"""
|
||||
|
||||
CURRENT_VERSION: str = Field(
|
||||
description="Dify version",
|
||||
default="1.5.0",
|
||||
default="1.5.1",
|
||||
)
|
||||
|
||||
COMMIT_SHA: str = Field(
|
||||
|
||||
17
api/configs/packaging/pyproject.py
Normal file
17
api/configs/packaging/pyproject.py
Normal file
@ -0,0 +1,17 @@
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class PyProjectConfig(BaseModel):
|
||||
version: str = Field(description="Dify version", default="")
|
||||
|
||||
|
||||
class PyProjectTomlConfig(BaseSettings):
|
||||
"""
|
||||
configs in api/pyproject.toml
|
||||
"""
|
||||
|
||||
project: PyProjectConfig = Field(
|
||||
description="configs in the project section of pyproject.toml",
|
||||
default=PyProjectConfig(),
|
||||
)
|
||||
@ -41,7 +41,7 @@ class OAuthDataSource(Resource):
|
||||
if not internal_secret:
|
||||
return ({"error": "Internal secret is not set"},)
|
||||
oauth_provider.save_internal_access_token(internal_secret)
|
||||
return {"data": ""}
|
||||
return {"data": "internal"}
|
||||
else:
|
||||
auth_url = oauth_provider.get_authorization_url()
|
||||
return {"data": auth_url}, 200
|
||||
|
||||
@ -18,7 +18,7 @@ class VersionApi(Resource):
|
||||
check_update_url = dify_config.CHECK_UPDATE_URL
|
||||
|
||||
result = {
|
||||
"version": dify_config.CURRENT_VERSION,
|
||||
"version": dify_config.project.version,
|
||||
"release_date": "",
|
||||
"release_notes": "",
|
||||
"can_auto_update": False,
|
||||
|
||||
@ -3,7 +3,7 @@ import json
|
||||
from flask import request
|
||||
from flask_restful import marshal, reqparse
|
||||
from sqlalchemy import desc, select
|
||||
from werkzeug.exceptions import NotFound
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
import services
|
||||
from controllers.common.errors import FilenameNotExistsError
|
||||
@ -18,6 +18,7 @@ from controllers.service_api.app.error import (
|
||||
from controllers.service_api.dataset.error import (
|
||||
ArchivedDocumentImmutableError,
|
||||
DocumentIndexingError,
|
||||
InvalidMetadataError,
|
||||
)
|
||||
from controllers.service_api.wraps import (
|
||||
DatasetApiResource,
|
||||
@ -466,6 +467,101 @@ class DocumentIndexingStatusApi(DatasetApiResource):
|
||||
return data
|
||||
|
||||
|
||||
class DocumentDetailApi(DatasetApiResource):
|
||||
METADATA_CHOICES = {"all", "only", "without"}
|
||||
|
||||
def get(self, tenant_id, dataset_id, document_id):
|
||||
dataset_id = str(dataset_id)
|
||||
document_id = str(document_id)
|
||||
|
||||
dataset = self.get_dataset(dataset_id, tenant_id)
|
||||
|
||||
document = DocumentService.get_document(dataset.id, document_id)
|
||||
|
||||
if not document:
|
||||
raise NotFound("Document not found.")
|
||||
|
||||
if document.tenant_id != str(tenant_id):
|
||||
raise Forbidden("No permission.")
|
||||
|
||||
metadata = request.args.get("metadata", "all")
|
||||
if metadata not in self.METADATA_CHOICES:
|
||||
raise InvalidMetadataError(f"Invalid metadata value: {metadata}")
|
||||
|
||||
if metadata == "only":
|
||||
response = {"id": document.id, "doc_type": document.doc_type, "doc_metadata": document.doc_metadata_details}
|
||||
elif metadata == "without":
|
||||
dataset_process_rules = DatasetService.get_process_rules(dataset_id)
|
||||
document_process_rules = document.dataset_process_rule.to_dict()
|
||||
data_source_info = document.data_source_detail_dict
|
||||
response = {
|
||||
"id": document.id,
|
||||
"position": document.position,
|
||||
"data_source_type": document.data_source_type,
|
||||
"data_source_info": data_source_info,
|
||||
"dataset_process_rule_id": document.dataset_process_rule_id,
|
||||
"dataset_process_rule": dataset_process_rules,
|
||||
"document_process_rule": document_process_rules,
|
||||
"name": document.name,
|
||||
"created_from": document.created_from,
|
||||
"created_by": document.created_by,
|
||||
"created_at": document.created_at.timestamp(),
|
||||
"tokens": document.tokens,
|
||||
"indexing_status": document.indexing_status,
|
||||
"completed_at": int(document.completed_at.timestamp()) if document.completed_at else None,
|
||||
"updated_at": int(document.updated_at.timestamp()) if document.updated_at else None,
|
||||
"indexing_latency": document.indexing_latency,
|
||||
"error": document.error,
|
||||
"enabled": document.enabled,
|
||||
"disabled_at": int(document.disabled_at.timestamp()) if document.disabled_at else None,
|
||||
"disabled_by": document.disabled_by,
|
||||
"archived": document.archived,
|
||||
"segment_count": document.segment_count,
|
||||
"average_segment_length": document.average_segment_length,
|
||||
"hit_count": document.hit_count,
|
||||
"display_status": document.display_status,
|
||||
"doc_form": document.doc_form,
|
||||
"doc_language": document.doc_language,
|
||||
}
|
||||
else:
|
||||
dataset_process_rules = DatasetService.get_process_rules(dataset_id)
|
||||
document_process_rules = document.dataset_process_rule.to_dict()
|
||||
data_source_info = document.data_source_detail_dict
|
||||
response = {
|
||||
"id": document.id,
|
||||
"position": document.position,
|
||||
"data_source_type": document.data_source_type,
|
||||
"data_source_info": data_source_info,
|
||||
"dataset_process_rule_id": document.dataset_process_rule_id,
|
||||
"dataset_process_rule": dataset_process_rules,
|
||||
"document_process_rule": document_process_rules,
|
||||
"name": document.name,
|
||||
"created_from": document.created_from,
|
||||
"created_by": document.created_by,
|
||||
"created_at": document.created_at.timestamp(),
|
||||
"tokens": document.tokens,
|
||||
"indexing_status": document.indexing_status,
|
||||
"completed_at": int(document.completed_at.timestamp()) if document.completed_at else None,
|
||||
"updated_at": int(document.updated_at.timestamp()) if document.updated_at else None,
|
||||
"indexing_latency": document.indexing_latency,
|
||||
"error": document.error,
|
||||
"enabled": document.enabled,
|
||||
"disabled_at": int(document.disabled_at.timestamp()) if document.disabled_at else None,
|
||||
"disabled_by": document.disabled_by,
|
||||
"archived": document.archived,
|
||||
"doc_type": document.doc_type,
|
||||
"doc_metadata": document.doc_metadata_details,
|
||||
"segment_count": document.segment_count,
|
||||
"average_segment_length": document.average_segment_length,
|
||||
"hit_count": document.hit_count,
|
||||
"display_status": document.display_status,
|
||||
"doc_form": document.doc_form,
|
||||
"doc_language": document.doc_language,
|
||||
}
|
||||
|
||||
return response
|
||||
|
||||
|
||||
api.add_resource(
|
||||
DocumentAddByTextApi,
|
||||
"/datasets/<uuid:dataset_id>/document/create_by_text",
|
||||
@ -489,3 +585,4 @@ api.add_resource(
|
||||
api.add_resource(DocumentDeleteApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
|
||||
api.add_resource(DocumentListApi, "/datasets/<uuid:dataset_id>/documents")
|
||||
api.add_resource(DocumentIndexingStatusApi, "/datasets/<uuid:dataset_id>/documents/<string:batch>/indexing-status")
|
||||
api.add_resource(DocumentDetailApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
|
||||
|
||||
@ -9,7 +9,7 @@ class IndexApi(Resource):
|
||||
return {
|
||||
"welcome": "Dify OpenAPI",
|
||||
"api_version": "v1",
|
||||
"server_version": dify_config.CURRENT_VERSION,
|
||||
"server_version": dify_config.project.version,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -11,13 +11,13 @@ from flask_restful import Resource
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import Forbidden, Unauthorized
|
||||
from werkzeug.exceptions import Forbidden, NotFound, Unauthorized
|
||||
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.login import _get_user
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantStatus
|
||||
from models.dataset import RateLimitLog
|
||||
from models.dataset import Dataset, RateLimitLog
|
||||
from models.model import ApiToken, App, EndUser
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
@ -317,3 +317,11 @@ def create_or_update_end_user_for_user_id(app_model: App, user_id: Optional[str]
|
||||
|
||||
class DatasetApiResource(Resource):
|
||||
method_decorators = [validate_dataset_token]
|
||||
|
||||
def get_dataset(self, dataset_id: str, tenant_id: str) -> Dataset:
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id, Dataset.tenant_id == tenant_id).first()
|
||||
|
||||
if not dataset:
|
||||
raise NotFound("Dataset not found.")
|
||||
|
||||
return dataset
|
||||
|
||||
@ -27,6 +27,9 @@ from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.prompt.utils.get_thread_messages_length import get_thread_messages_length
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository
|
||||
from core.workflow.repositories.draft_variable_repository import (
|
||||
DraftVariableSaverFactory,
|
||||
)
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader
|
||||
@ -36,7 +39,10 @@ from libs.flask_utils import preserve_flask_contexts
|
||||
from models import Account, App, Conversation, EndUser, Message, Workflow, WorkflowNodeExecutionTriggeredFrom
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from services.conversation_service import ConversationService
|
||||
from services.workflow_draft_variable_service import DraftVarLoader, WorkflowDraftVariableService
|
||||
from services.workflow_draft_variable_service import (
|
||||
DraftVarLoader,
|
||||
WorkflowDraftVariableService,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -450,6 +456,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
stream=stream,
|
||||
draft_var_saver_factory=self._get_draft_var_saver_factory(invoke_from),
|
||||
)
|
||||
|
||||
return AdvancedChatAppGenerateResponseConverter.convert(response=response, invoke_from=invoke_from)
|
||||
@ -521,6 +528,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
user: Union[Account, EndUser],
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
draft_var_saver_factory: DraftVariableSaverFactory,
|
||||
stream: bool = False,
|
||||
) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]:
|
||||
"""
|
||||
@ -547,6 +555,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
stream=stream,
|
||||
draft_var_saver_factory=draft_var_saver_factory,
|
||||
)
|
||||
|
||||
try:
|
||||
|
||||
@ -64,6 +64,7 @@ from core.workflow.entities.workflow_execution import WorkflowExecutionStatus, W
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager
|
||||
@ -94,6 +95,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
||||
dialogue_count: int,
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
draft_var_saver_factory: DraftVariableSaverFactory,
|
||||
) -> None:
|
||||
self._base_task_pipeline = BasedGenerateTaskPipeline(
|
||||
application_generate_entity=application_generate_entity,
|
||||
@ -153,6 +155,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
||||
self._conversation_name_generate_thread: Thread | None = None
|
||||
self._recorded_files: list[Mapping[str, Any]] = []
|
||||
self._workflow_run_id: str = ""
|
||||
self._draft_var_saver_factory = draft_var_saver_factory
|
||||
|
||||
def process(self) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]:
|
||||
"""
|
||||
@ -371,6 +374,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
session.commit()
|
||||
self._save_output_for_event(event, workflow_node_execution.id)
|
||||
|
||||
if node_finish_resp:
|
||||
yield node_finish_resp
|
||||
@ -390,6 +394,8 @@ class AdvancedChatAppGenerateTaskPipeline:
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
if isinstance(event, QueueNodeExceptionEvent):
|
||||
self._save_output_for_event(event, workflow_node_execution.id)
|
||||
|
||||
if node_finish_resp:
|
||||
yield node_finish_resp
|
||||
@ -759,3 +765,15 @@ class AdvancedChatAppGenerateTaskPipeline:
|
||||
if not message:
|
||||
raise ValueError(f"Message not found: {self._message_id}")
|
||||
return message
|
||||
|
||||
def _save_output_for_event(self, event: QueueNodeSucceededEvent | QueueNodeExceptionEvent, node_execution_id: str):
|
||||
with Session(db.engine) as session, session.begin():
|
||||
saver = self._draft_var_saver_factory(
|
||||
session=session,
|
||||
app_id=self._application_generate_entity.app_config.app_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type,
|
||||
node_execution_id=node_execution_id,
|
||||
enclosing_node_id=event.in_loop_id or event.in_iteration_id,
|
||||
)
|
||||
saver.save(event.process_data, event.outputs)
|
||||
|
||||
@ -1,10 +1,20 @@
|
||||
import json
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any, Optional, Union
|
||||
from typing import TYPE_CHECKING, Any, Optional, Union, final
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.app_config.entities import VariableEntityType
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.file import File, FileUploadConfig
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
from core.workflow.repositories.draft_variable_repository import (
|
||||
DraftVariableSaver,
|
||||
DraftVariableSaverFactory,
|
||||
NoopDraftVariableSaver,
|
||||
)
|
||||
from factories import file_factory
|
||||
from services.workflow_draft_variable_service import DraftVariableSaver as DraftVariableSaverImpl
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.app.app_config.entities import VariableEntity
|
||||
@ -159,3 +169,38 @@ class BaseAppGenerator:
|
||||
yield f"event: {message}\n\n"
|
||||
|
||||
return gen()
|
||||
|
||||
@final
|
||||
@staticmethod
|
||||
def _get_draft_var_saver_factory(invoke_from: InvokeFrom) -> DraftVariableSaverFactory:
|
||||
if invoke_from == InvokeFrom.DEBUGGER:
|
||||
|
||||
def draft_var_saver_factory(
|
||||
session: Session,
|
||||
app_id: str,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
node_execution_id: str,
|
||||
enclosing_node_id: str | None = None,
|
||||
) -> DraftVariableSaver:
|
||||
return DraftVariableSaverImpl(
|
||||
session=session,
|
||||
app_id=app_id,
|
||||
node_id=node_id,
|
||||
node_type=node_type,
|
||||
node_execution_id=node_execution_id,
|
||||
enclosing_node_id=enclosing_node_id,
|
||||
)
|
||||
else:
|
||||
|
||||
def draft_var_saver_factory(
|
||||
session: Session,
|
||||
app_id: str,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
node_execution_id: str,
|
||||
enclosing_node_id: str | None = None,
|
||||
) -> DraftVariableSaver:
|
||||
return NoopDraftVariableSaver()
|
||||
|
||||
return draft_var_saver_factory
|
||||
|
||||
@ -44,6 +44,7 @@ from core.app.entities.task_entities import (
|
||||
)
|
||||
from core.file import FILE_MODEL_IDENTITY, File
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from core.variables.segments import ArrayFileSegment, FileSegment, Segment
|
||||
from core.workflow.entities.workflow_execution import WorkflowExecution
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution, WorkflowNodeExecutionStatus
|
||||
from core.workflow.nodes import NodeType
|
||||
@ -506,7 +507,8 @@ class WorkflowResponseConverter:
|
||||
# Convert to tuple to match Sequence type
|
||||
return tuple(flattened_files)
|
||||
|
||||
def _fetch_files_from_variable_value(self, value: Union[dict, list]) -> Sequence[Mapping[str, Any]]:
|
||||
@classmethod
|
||||
def _fetch_files_from_variable_value(cls, value: Union[dict, list, Segment]) -> Sequence[Mapping[str, Any]]:
|
||||
"""
|
||||
Fetch files from variable value
|
||||
:param value: variable value
|
||||
@ -515,20 +517,30 @@ class WorkflowResponseConverter:
|
||||
if not value:
|
||||
return []
|
||||
|
||||
files = []
|
||||
if isinstance(value, list):
|
||||
files: list[Mapping[str, Any]] = []
|
||||
if isinstance(value, FileSegment):
|
||||
files.append(value.value.to_dict())
|
||||
elif isinstance(value, ArrayFileSegment):
|
||||
files.extend([i.to_dict() for i in value.value])
|
||||
elif isinstance(value, File):
|
||||
files.append(value.to_dict())
|
||||
elif isinstance(value, list):
|
||||
for item in value:
|
||||
file = self._get_file_var_from_value(item)
|
||||
file = cls._get_file_var_from_value(item)
|
||||
if file:
|
||||
files.append(file)
|
||||
elif isinstance(value, dict):
|
||||
file = self._get_file_var_from_value(value)
|
||||
elif isinstance(
|
||||
value,
|
||||
dict,
|
||||
):
|
||||
file = cls._get_file_var_from_value(value)
|
||||
if file:
|
||||
files.append(file)
|
||||
|
||||
return files
|
||||
|
||||
def _get_file_var_from_value(self, value: Union[dict, list]) -> Mapping[str, Any] | None:
|
||||
@classmethod
|
||||
def _get_file_var_from_value(cls, value: Union[dict, list]) -> Mapping[str, Any] | None:
|
||||
"""
|
||||
Get file var from value
|
||||
:param value: variable value
|
||||
|
||||
@ -25,6 +25,7 @@ from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
||||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository
|
||||
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader
|
||||
@ -219,6 +220,9 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
# new thread with request context and contextvars
|
||||
context = contextvars.copy_context()
|
||||
|
||||
# release database connection, because the following new thread operations may take a long time
|
||||
db.session.close()
|
||||
|
||||
worker_thread = threading.Thread(
|
||||
target=self._generate_worker,
|
||||
kwargs={
|
||||
@ -233,6 +237,10 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
|
||||
worker_thread.start()
|
||||
|
||||
draft_var_saver_factory = self._get_draft_var_saver_factory(
|
||||
invoke_from,
|
||||
)
|
||||
|
||||
# return response or stream generator
|
||||
response = self._handle_response(
|
||||
application_generate_entity=application_generate_entity,
|
||||
@ -241,6 +249,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
user=user,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
draft_var_saver_factory=draft_var_saver_factory,
|
||||
stream=streaming,
|
||||
)
|
||||
|
||||
@ -471,6 +480,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
user: Union[Account, EndUser],
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
draft_var_saver_factory: DraftVariableSaverFactory,
|
||||
stream: bool = False,
|
||||
) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
|
||||
"""
|
||||
@ -491,6 +501,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
user=user,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
draft_var_saver_factory=draft_var_saver_factory,
|
||||
stream=stream,
|
||||
)
|
||||
|
||||
|
||||
@ -56,6 +56,7 @@ from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
||||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.workflow.entities.workflow_execution import WorkflowExecution, WorkflowExecutionStatus, WorkflowType
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager
|
||||
@ -87,6 +88,7 @@ class WorkflowAppGenerateTaskPipeline:
|
||||
stream: bool,
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
draft_var_saver_factory: DraftVariableSaverFactory,
|
||||
) -> None:
|
||||
self._base_task_pipeline = BasedGenerateTaskPipeline(
|
||||
application_generate_entity=application_generate_entity,
|
||||
@ -131,6 +133,8 @@ class WorkflowAppGenerateTaskPipeline:
|
||||
self._application_generate_entity = application_generate_entity
|
||||
self._workflow_features_dict = workflow.features_dict
|
||||
self._workflow_run_id = ""
|
||||
self._invoke_from = queue_manager._invoke_from
|
||||
self._draft_var_saver_factory = draft_var_saver_factory
|
||||
|
||||
def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
|
||||
"""
|
||||
@ -322,6 +326,8 @@ class WorkflowAppGenerateTaskPipeline:
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
|
||||
self._save_output_for_event(event, workflow_node_execution.id)
|
||||
|
||||
if node_success_response:
|
||||
yield node_success_response
|
||||
elif isinstance(
|
||||
@ -339,6 +345,8 @@ class WorkflowAppGenerateTaskPipeline:
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
if isinstance(event, QueueNodeExceptionEvent):
|
||||
self._save_output_for_event(event, workflow_node_execution.id)
|
||||
|
||||
if node_failed_response:
|
||||
yield node_failed_response
|
||||
@ -593,3 +601,15 @@ class WorkflowAppGenerateTaskPipeline:
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
def _save_output_for_event(self, event: QueueNodeSucceededEvent | QueueNodeExceptionEvent, node_execution_id: str):
|
||||
with Session(db.engine) as session, session.begin():
|
||||
saver = self._draft_var_saver_factory(
|
||||
session=session,
|
||||
app_id=self._application_generate_entity.app_config.app_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type,
|
||||
node_execution_id=node_execution_id,
|
||||
enclosing_node_id=event.in_loop_id or event.in_iteration_id,
|
||||
)
|
||||
saver.save(event.process_data, event.outputs)
|
||||
|
||||
@ -1,8 +1,6 @@
|
||||
from collections.abc import Mapping
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
||||
from core.app.apps.base_app_runner import AppRunner
|
||||
from core.app.entities.queue_entities import (
|
||||
@ -35,7 +33,6 @@ from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey
|
||||
from core.workflow.graph_engine.entities.event import (
|
||||
AgentLogEvent,
|
||||
BaseNodeEvent,
|
||||
GraphEngineEvent,
|
||||
GraphRunFailedEvent,
|
||||
GraphRunPartialSucceededEvent,
|
||||
@ -70,9 +67,6 @@ from core.workflow.workflow_entry import WorkflowEntry
|
||||
from extensions.ext_database import db
|
||||
from models.model import App
|
||||
from models.workflow import Workflow
|
||||
from services.workflow_draft_variable_service import (
|
||||
DraftVariableSaver,
|
||||
)
|
||||
|
||||
|
||||
class WorkflowBasedAppRunner(AppRunner):
|
||||
@ -400,7 +394,6 @@ class WorkflowBasedAppRunner(AppRunner):
|
||||
in_loop_id=event.in_loop_id,
|
||||
)
|
||||
)
|
||||
self._save_draft_var_for_event(event)
|
||||
|
||||
elif isinstance(event, NodeRunFailedEvent):
|
||||
self._publish_event(
|
||||
@ -464,7 +457,6 @@ class WorkflowBasedAppRunner(AppRunner):
|
||||
in_loop_id=event.in_loop_id,
|
||||
)
|
||||
)
|
||||
self._save_draft_var_for_event(event)
|
||||
|
||||
elif isinstance(event, NodeInIterationFailedEvent):
|
||||
self._publish_event(
|
||||
@ -718,30 +710,3 @@ class WorkflowBasedAppRunner(AppRunner):
|
||||
|
||||
def _publish_event(self, event: AppQueueEvent) -> None:
|
||||
self.queue_manager.publish(event, PublishFrom.APPLICATION_MANAGER)
|
||||
|
||||
def _save_draft_var_for_event(self, event: BaseNodeEvent):
|
||||
run_result = event.route_node_state.node_run_result
|
||||
if run_result is None:
|
||||
return
|
||||
process_data = run_result.process_data
|
||||
outputs = run_result.outputs
|
||||
with Session(bind=db.engine) as session, session.begin():
|
||||
draft_var_saver = DraftVariableSaver(
|
||||
session=session,
|
||||
app_id=self._get_app_id(),
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type,
|
||||
# FIXME(QuantumGhost): rely on private state of queue_manager is not ideal.
|
||||
invoke_from=self.queue_manager._invoke_from,
|
||||
node_execution_id=event.id,
|
||||
enclosing_node_id=event.in_loop_id or event.in_iteration_id or None,
|
||||
)
|
||||
draft_var_saver.save(process_data=process_data, outputs=outputs)
|
||||
|
||||
|
||||
def _remove_first_element_from_variable_string(key: str) -> str:
|
||||
"""
|
||||
Remove the first element from the prefix.
|
||||
"""
|
||||
prefix, remaining = key.split(".", maxsplit=1)
|
||||
return remaining
|
||||
|
||||
@ -395,6 +395,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
|
||||
message.provider_response_latency = time.perf_counter() - self._start_at
|
||||
message.total_price = usage.total_price
|
||||
message.currency = usage.currency
|
||||
self._task_state.llm_result.usage.latency = message.provider_response_latency
|
||||
message.message_metadata = self._task_state.metadata.model_dump_json()
|
||||
|
||||
if trace_manager:
|
||||
|
||||
@ -83,6 +83,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
||||
metadata=metadata,
|
||||
session_id=trace_info.conversation_id,
|
||||
tags=["message", "workflow"],
|
||||
version=trace_info.workflow_run_version,
|
||||
)
|
||||
self.add_trace(langfuse_trace_data=trace_data)
|
||||
workflow_span_data = LangfuseSpan(
|
||||
@ -108,6 +109,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
||||
metadata=metadata,
|
||||
session_id=trace_info.conversation_id,
|
||||
tags=["workflow"],
|
||||
version=trace_info.workflow_run_version,
|
||||
)
|
||||
self.add_trace(langfuse_trace_data=trace_data)
|
||||
|
||||
@ -172,37 +174,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
||||
}
|
||||
)
|
||||
|
||||
# add span
|
||||
if trace_info.message_id:
|
||||
span_data = LangfuseSpan(
|
||||
id=node_execution_id,
|
||||
name=node_type,
|
||||
input=inputs,
|
||||
output=outputs,
|
||||
trace_id=trace_id,
|
||||
start_time=created_at,
|
||||
end_time=finished_at,
|
||||
metadata=metadata,
|
||||
level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR),
|
||||
status_message=trace_info.error or "",
|
||||
parent_observation_id=trace_info.workflow_run_id,
|
||||
)
|
||||
else:
|
||||
span_data = LangfuseSpan(
|
||||
id=node_execution_id,
|
||||
name=node_type,
|
||||
input=inputs,
|
||||
output=outputs,
|
||||
trace_id=trace_id,
|
||||
start_time=created_at,
|
||||
end_time=finished_at,
|
||||
metadata=metadata,
|
||||
level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR),
|
||||
status_message=trace_info.error or "",
|
||||
)
|
||||
|
||||
self.add_span(langfuse_span_data=span_data)
|
||||
|
||||
# add generation span
|
||||
if process_data and process_data.get("model_mode") == "chat":
|
||||
total_token = metadata.get("total_tokens", 0)
|
||||
prompt_tokens = 0
|
||||
@ -226,10 +198,10 @@ class LangFuseDataTrace(BaseTraceInstance):
|
||||
)
|
||||
|
||||
node_generation_data = LangfuseGeneration(
|
||||
name="llm",
|
||||
id=node_execution_id,
|
||||
name=node_name,
|
||||
trace_id=trace_id,
|
||||
model=process_data.get("model_name"),
|
||||
parent_observation_id=node_execution_id,
|
||||
start_time=created_at,
|
||||
end_time=finished_at,
|
||||
input=inputs,
|
||||
@ -237,11 +209,30 @@ class LangFuseDataTrace(BaseTraceInstance):
|
||||
metadata=metadata,
|
||||
level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR),
|
||||
status_message=trace_info.error or "",
|
||||
parent_observation_id=trace_info.workflow_run_id if trace_info.message_id else None,
|
||||
usage=generation_usage,
|
||||
)
|
||||
|
||||
self.add_generation(langfuse_generation_data=node_generation_data)
|
||||
|
||||
# add normal span
|
||||
else:
|
||||
span_data = LangfuseSpan(
|
||||
id=node_execution_id,
|
||||
name=node_name,
|
||||
input=inputs,
|
||||
output=outputs,
|
||||
trace_id=trace_id,
|
||||
start_time=created_at,
|
||||
end_time=finished_at,
|
||||
metadata=metadata,
|
||||
level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR),
|
||||
status_message=trace_info.error or "",
|
||||
parent_observation_id=trace_info.workflow_run_id if trace_info.message_id else None,
|
||||
)
|
||||
|
||||
self.add_span(langfuse_span_data=span_data)
|
||||
|
||||
def message_trace(self, trace_info: MessageTraceInfo, **kwargs):
|
||||
# get message file data
|
||||
file_list = trace_info.file_list
|
||||
@ -284,7 +275,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
||||
)
|
||||
self.add_trace(langfuse_trace_data=trace_data)
|
||||
|
||||
# start add span
|
||||
# add generation
|
||||
generation_usage = GenerationUsage(
|
||||
input=trace_info.message_tokens,
|
||||
output=trace_info.answer_tokens,
|
||||
|
||||
@ -42,4 +42,4 @@ class DynamicSelectClient(BasePluginClient):
|
||||
for options in response:
|
||||
return options
|
||||
|
||||
raise ValueError("Plugin service returned no options")
|
||||
raise ValueError(f"Plugin service returned no options for parameter '{parameter}' in provider '{provider}'")
|
||||
|
||||
@ -1010,6 +1010,9 @@ class DatasetRetrieval:
|
||||
def _process_metadata_filter_func(
|
||||
self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list
|
||||
):
|
||||
if value is None:
|
||||
return
|
||||
|
||||
key = f"{metadata_name}_{sequence}"
|
||||
key_value = f"{metadata_name}_{sequence}_value"
|
||||
match condition:
|
||||
|
||||
@ -4,6 +4,7 @@ from typing import Any, Optional
|
||||
from core.helper.code_executor.code_executor import CodeExecutor, CodeLanguage
|
||||
from core.tools.builtin_tool.tool import BuiltinTool
|
||||
from core.tools.entities.tool_entities import ToolInvokeMessage
|
||||
from core.tools.errors import ToolInvokeError
|
||||
|
||||
|
||||
class SimpleCode(BuiltinTool):
|
||||
@ -25,6 +26,8 @@ class SimpleCode(BuiltinTool):
|
||||
if language not in {CodeLanguage.PYTHON3, CodeLanguage.JAVASCRIPT}:
|
||||
raise ValueError(f"Only python3 and javascript are supported, not {language}")
|
||||
|
||||
result = CodeExecutor.execute_code(language, "", code)
|
||||
|
||||
yield self.create_text_message(result)
|
||||
try:
|
||||
result = CodeExecutor.execute_code(language, "", code)
|
||||
yield self.create_text_message(result)
|
||||
except Exception as e:
|
||||
raise ToolInvokeError(str(e))
|
||||
|
||||
@ -66,11 +66,21 @@ class WorkflowNodeExecution(BaseModel):
|
||||
but they are not stored in the model.
|
||||
"""
|
||||
|
||||
# Core identification fields
|
||||
id: str # Unique identifier for this execution record
|
||||
node_execution_id: Optional[str] = None # Optional secondary ID for cross-referencing
|
||||
# --------- Core identification fields ---------
|
||||
|
||||
# Unique identifier for this execution record, used when persisting to storage.
|
||||
# Value is a UUID string (e.g., '09b3e04c-f9ae-404c-ad82-290b8d7bd382').
|
||||
id: str
|
||||
|
||||
# Optional secondary ID for cross-referencing purposes.
|
||||
#
|
||||
# NOTE: For referencing the persisted record, use `id` rather than `node_execution_id`.
|
||||
# While `node_execution_id` may sometimes be a UUID string, this is not guaranteed.
|
||||
# In most scenarios, `id` should be used as the primary identifier.
|
||||
node_execution_id: Optional[str] = None
|
||||
workflow_id: str # ID of the workflow this node belongs to
|
||||
workflow_execution_id: Optional[str] = None # ID of the specific workflow run (null for single-step debugging)
|
||||
# --------- Core identification fields ends ---------
|
||||
|
||||
# Execution positioning and flow
|
||||
index: int # Sequence number for ordering in trace visualization
|
||||
|
||||
@ -158,7 +158,10 @@ class AgentNode(ToolNode):
|
||||
# variable_pool.convert_template expects a string template,
|
||||
# but if passing a dict, convert to JSON string first before rendering
|
||||
try:
|
||||
parameter_value = json.dumps(agent_input.value, ensure_ascii=False)
|
||||
if not isinstance(agent_input.value, str):
|
||||
parameter_value = json.dumps(agent_input.value, ensure_ascii=False)
|
||||
else:
|
||||
parameter_value = str(agent_input.value)
|
||||
except TypeError:
|
||||
parameter_value = str(agent_input.value)
|
||||
segment_group = variable_pool.convert_template(parameter_value)
|
||||
@ -166,7 +169,8 @@ class AgentNode(ToolNode):
|
||||
# variable_pool.convert_template returns a string,
|
||||
# so we need to convert it back to a dictionary
|
||||
try:
|
||||
parameter_value = json.loads(parameter_value)
|
||||
if not isinstance(agent_input.value, str):
|
||||
parameter_value = json.loads(parameter_value)
|
||||
except json.JSONDecodeError:
|
||||
parameter_value = parameter_value
|
||||
else:
|
||||
|
||||
@ -2,7 +2,6 @@ import logging
|
||||
from collections.abc import Generator
|
||||
from typing import cast
|
||||
|
||||
from core.file import FILE_MODEL_IDENTITY, File
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.graph_engine.entities.event import (
|
||||
GraphEngineEvent,
|
||||
@ -201,44 +200,3 @@ class AnswerStreamProcessor(StreamProcessor):
|
||||
stream_out_answer_node_ids.append(answer_node_id)
|
||||
|
||||
return stream_out_answer_node_ids
|
||||
|
||||
@classmethod
|
||||
def _fetch_files_from_variable_value(cls, value: dict | list) -> list[dict]:
|
||||
"""
|
||||
Fetch files from variable value
|
||||
:param value: variable value
|
||||
:return:
|
||||
"""
|
||||
if not value:
|
||||
return []
|
||||
|
||||
files = []
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
file_var = cls._get_file_var_from_value(item)
|
||||
if file_var:
|
||||
files.append(file_var)
|
||||
elif isinstance(value, dict):
|
||||
file_var = cls._get_file_var_from_value(value)
|
||||
if file_var:
|
||||
files.append(file_var)
|
||||
|
||||
return files
|
||||
|
||||
@classmethod
|
||||
def _get_file_var_from_value(cls, value: dict | list):
|
||||
"""
|
||||
Get file var from value
|
||||
:param value: variable value
|
||||
:return:
|
||||
"""
|
||||
if not value:
|
||||
return None
|
||||
|
||||
if isinstance(value, dict):
|
||||
if "dify_model_identity" in value and value["dify_model_identity"] == FILE_MODEL_IDENTITY:
|
||||
return value
|
||||
elif isinstance(value, File):
|
||||
return value.to_dict()
|
||||
|
||||
return None
|
||||
|
||||
@ -333,7 +333,7 @@ class Executor:
|
||||
try:
|
||||
response = getattr(ssrf_proxy, self.method.lower())(**request_args)
|
||||
except (ssrf_proxy.MaxRetriesExceededError, httpx.RequestError) as e:
|
||||
raise HttpRequestNodeError(str(e))
|
||||
raise HttpRequestNodeError(str(e)) from e
|
||||
# FIXME: fix type ignore, this maybe httpx type issue
|
||||
return response # type: ignore
|
||||
|
||||
|
||||
@ -490,6 +490,9 @@ class KnowledgeRetrievalNode(LLMNode):
|
||||
def _process_metadata_filter_func(
|
||||
self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list
|
||||
):
|
||||
if value is None:
|
||||
return
|
||||
|
||||
key = f"{metadata_name}_{sequence}"
|
||||
key_value = f"{metadata_name}_{sequence}_value"
|
||||
match condition:
|
||||
|
||||
@ -167,7 +167,9 @@ class ToolNode(BaseNode[ToolNodeData]):
|
||||
if tool_input.type == "variable":
|
||||
variable = variable_pool.get(tool_input.value)
|
||||
if variable is None:
|
||||
raise ToolParameterError(f"Variable {tool_input.value} does not exist")
|
||||
if parameter.required:
|
||||
raise ToolParameterError(f"Variable {tool_input.value} does not exist")
|
||||
continue
|
||||
parameter_value = variable.value
|
||||
elif tool_input.type in {"mixed", "constant"}:
|
||||
segment_group = variable_pool.convert_template(str(tool_input.value))
|
||||
|
||||
32
api/core/workflow/repositories/draft_variable_repository.py
Normal file
32
api/core/workflow/repositories/draft_variable_repository.py
Normal file
@ -0,0 +1,32 @@
|
||||
import abc
|
||||
from collections.abc import Mapping
|
||||
from typing import Any, Protocol
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
|
||||
|
||||
class DraftVariableSaver(Protocol):
|
||||
@abc.abstractmethod
|
||||
def save(self, process_data: Mapping[str, Any] | None, outputs: Mapping[str, Any] | None):
|
||||
pass
|
||||
|
||||
|
||||
class DraftVariableSaverFactory(Protocol):
|
||||
@abc.abstractmethod
|
||||
def __call__(
|
||||
self,
|
||||
session: Session,
|
||||
app_id: str,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
node_execution_id: str,
|
||||
enclosing_node_id: str | None = None,
|
||||
) -> "DraftVariableSaver":
|
||||
pass
|
||||
|
||||
|
||||
class NoopDraftVariableSaver(DraftVariableSaver):
|
||||
def save(self, process_data: Mapping[str, Any] | None, outputs: Mapping[str, Any] | None):
|
||||
pass
|
||||
@ -27,6 +27,7 @@ from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_entry import WorkflowEntry
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -160,12 +161,13 @@ class WorkflowCycleManager:
|
||||
exceptions_count: int = 0,
|
||||
) -> WorkflowExecution:
|
||||
workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
|
||||
now = naive_utc_now()
|
||||
|
||||
workflow_execution.status = WorkflowExecutionStatus(status.value)
|
||||
workflow_execution.error_message = error_message
|
||||
workflow_execution.total_tokens = total_tokens
|
||||
workflow_execution.total_steps = total_steps
|
||||
workflow_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow_execution.finished_at = now
|
||||
workflow_execution.exceptions_count = exceptions_count
|
||||
|
||||
# Use the instance repository to find running executions for a workflow run
|
||||
@ -174,7 +176,6 @@ class WorkflowCycleManager:
|
||||
)
|
||||
|
||||
# Update the domain models
|
||||
now = datetime.now(UTC).replace(tzinfo=None)
|
||||
for node_execution in running_node_executions:
|
||||
if node_execution.node_execution_id:
|
||||
# Update the domain model
|
||||
|
||||
@ -12,14 +12,14 @@ def init_app(app: DifyApp):
|
||||
@app.after_request
|
||||
def after_request(response):
|
||||
"""Add Version headers to the response."""
|
||||
response.headers.add("X-Version", dify_config.CURRENT_VERSION)
|
||||
response.headers.add("X-Version", dify_config.project.version)
|
||||
response.headers.add("X-Env", dify_config.DEPLOY_ENV)
|
||||
return response
|
||||
|
||||
@app.route("/health")
|
||||
def health():
|
||||
return Response(
|
||||
json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.CURRENT_VERSION}),
|
||||
json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.project.version}),
|
||||
status=200,
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
@ -49,7 +49,7 @@ def init_app(app: DifyApp):
|
||||
logging.getLogger().addHandler(exception_handler)
|
||||
|
||||
def init_flask_instrumentor(app: DifyApp):
|
||||
meter = get_meter("http_metrics", version=dify_config.CURRENT_VERSION)
|
||||
meter = get_meter("http_metrics", version=dify_config.project.version)
|
||||
_http_response_counter = meter.create_counter(
|
||||
"http.server.response.count",
|
||||
description="Total number of HTTP responses by status code, method and target",
|
||||
@ -163,7 +163,7 @@ def init_app(app: DifyApp):
|
||||
resource = Resource(
|
||||
attributes={
|
||||
ResourceAttributes.SERVICE_NAME: dify_config.APPLICATION_NAME,
|
||||
ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.CURRENT_VERSION}-{dify_config.COMMIT_SHA}",
|
||||
ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}",
|
||||
ResourceAttributes.PROCESS_PID: os.getpid(),
|
||||
ResourceAttributes.DEPLOYMENT_ENVIRONMENT: f"{dify_config.DEPLOY_ENV}-{dify_config.EDITION}",
|
||||
ResourceAttributes.HOST_NAME: socket.gethostname(),
|
||||
|
||||
@ -35,6 +35,6 @@ def init_app(app: DifyApp):
|
||||
traces_sample_rate=dify_config.SENTRY_TRACES_SAMPLE_RATE,
|
||||
profiles_sample_rate=dify_config.SENTRY_PROFILES_SAMPLE_RATE,
|
||||
environment=dify_config.DEPLOY_ENV,
|
||||
release=f"dify-{dify_config.CURRENT_VERSION}-{dify_config.COMMIT_SHA}",
|
||||
release=f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}",
|
||||
before_send=before_send,
|
||||
)
|
||||
|
||||
30
api/libs/file_utils.py
Normal file
30
api/libs/file_utils.py
Normal file
@ -0,0 +1,30 @@
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def search_file_upwards(
|
||||
base_dir_path: Path,
|
||||
target_file_name: str,
|
||||
max_search_parent_depth: int,
|
||||
) -> Path:
|
||||
"""
|
||||
Find a target file in the current directory or its parent directories up to a specified depth.
|
||||
:param base_dir_path: Starting directory path to search from.
|
||||
:param target_file_name: Name of the file to search for.
|
||||
:param max_search_parent_depth: Maximum number of parent directories to search upwards.
|
||||
:return: Path of the file if found, otherwise None.
|
||||
"""
|
||||
current_path = base_dir_path.resolve()
|
||||
for _ in range(max_search_parent_depth):
|
||||
candidate_path = current_path / target_file_name
|
||||
if candidate_path.is_file():
|
||||
return candidate_path
|
||||
parent_path = current_path.parent
|
||||
if parent_path == current_path: # reached the root directory
|
||||
break
|
||||
else:
|
||||
current_path = parent_path
|
||||
|
||||
raise ValueError(
|
||||
f"File '{target_file_name}' not found in the directory '{base_dir_path.resolve()}' or its parent directories"
|
||||
f" in depth of {max_search_parent_depth}."
|
||||
)
|
||||
@ -140,7 +140,7 @@ class Dataset(Base):
|
||||
def word_count(self):
|
||||
return (
|
||||
db.session.query(Document)
|
||||
.with_entities(func.coalesce(func.sum(Document.word_count)))
|
||||
.with_entities(func.coalesce(func.sum(Document.word_count), 0))
|
||||
.filter(Document.dataset_id == self.id)
|
||||
.scalar()
|
||||
)
|
||||
@ -448,7 +448,7 @@ class Document(Base):
|
||||
def hit_count(self):
|
||||
return (
|
||||
db.session.query(DocumentSegment)
|
||||
.with_entities(func.coalesce(func.sum(DocumentSegment.hit_count)))
|
||||
.with_entities(func.coalesce(func.sum(DocumentSegment.hit_count), 0))
|
||||
.filter(DocumentSegment.document_id == self.id)
|
||||
.scalar()
|
||||
)
|
||||
|
||||
@ -676,7 +676,7 @@ class Conversation(Base):
|
||||
if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY:
|
||||
if value["transfer_method"] == FileTransferMethod.TOOL_FILE:
|
||||
value["tool_file_id"] = value["related_id"]
|
||||
elif value["transfer_method"] == FileTransferMethod.LOCAL_FILE:
|
||||
elif value["transfer_method"] in [FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL]:
|
||||
value["upload_file_id"] = value["related_id"]
|
||||
inputs[key] = file_factory.build_from_mapping(mapping=value, tenant_id=value["tenant_id"])
|
||||
elif isinstance(value, list) and all(
|
||||
@ -686,7 +686,7 @@ class Conversation(Base):
|
||||
for item in value:
|
||||
if item["transfer_method"] == FileTransferMethod.TOOL_FILE:
|
||||
item["tool_file_id"] = item["related_id"]
|
||||
elif item["transfer_method"] == FileTransferMethod.LOCAL_FILE:
|
||||
elif item["transfer_method"] in [FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL]:
|
||||
item["upload_file_id"] = item["related_id"]
|
||||
inputs[key].append(file_factory.build_from_mapping(mapping=item, tenant_id=item["tenant_id"]))
|
||||
|
||||
@ -946,7 +946,7 @@ class Message(Base):
|
||||
if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY:
|
||||
if value["transfer_method"] == FileTransferMethod.TOOL_FILE:
|
||||
value["tool_file_id"] = value["related_id"]
|
||||
elif value["transfer_method"] == FileTransferMethod.LOCAL_FILE:
|
||||
elif value["transfer_method"] in [FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL]:
|
||||
value["upload_file_id"] = value["related_id"]
|
||||
inputs[key] = file_factory.build_from_mapping(mapping=value, tenant_id=value["tenant_id"])
|
||||
elif isinstance(value, list) and all(
|
||||
@ -956,7 +956,7 @@ class Message(Base):
|
||||
for item in value:
|
||||
if item["transfer_method"] == FileTransferMethod.TOOL_FILE:
|
||||
item["tool_file_id"] = item["related_id"]
|
||||
elif item["transfer_method"] == FileTransferMethod.LOCAL_FILE:
|
||||
elif item["transfer_method"] in [FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL]:
|
||||
item["upload_file_id"] = item["related_id"]
|
||||
inputs[key].append(file_factory.build_from_mapping(mapping=item, tenant_id=item["tenant_id"]))
|
||||
return inputs
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dify-api"
|
||||
dynamic = ["version"]
|
||||
version = "1.5.1"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
|
||||
@ -889,7 +889,7 @@ class RegisterService:
|
||||
|
||||
TenantService.create_owner_tenant_if_not_exist(account=account, is_setup=True)
|
||||
|
||||
dify_setup = DifySetup(version=dify_config.CURRENT_VERSION)
|
||||
dify_setup = DifySetup(version=dify_config.project.version)
|
||||
db.session.add(dify_setup)
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
|
||||
@ -154,7 +154,7 @@ class WorkflowDraftVariableService:
|
||||
variables = (
|
||||
# Do not load the `value` field.
|
||||
query.options(orm.defer(WorkflowDraftVariable.value))
|
||||
.order_by(WorkflowDraftVariable.id.desc())
|
||||
.order_by(WorkflowDraftVariable.created_at.desc())
|
||||
.limit(limit)
|
||||
.offset((page - 1) * limit)
|
||||
.all()
|
||||
@ -168,7 +168,7 @@ class WorkflowDraftVariableService:
|
||||
WorkflowDraftVariable.node_id == node_id,
|
||||
)
|
||||
query = self._session.query(WorkflowDraftVariable).filter(*criteria)
|
||||
variables = query.order_by(WorkflowDraftVariable.id.desc()).all()
|
||||
variables = query.order_by(WorkflowDraftVariable.created_at.desc()).all()
|
||||
return WorkflowDraftVariableList(variables=variables)
|
||||
|
||||
def list_node_variables(self, app_id: str, node_id: str) -> WorkflowDraftVariableList:
|
||||
@ -235,7 +235,9 @@ class WorkflowDraftVariableService:
|
||||
self._session.flush()
|
||||
return variable
|
||||
|
||||
def _reset_node_var(self, workflow: Workflow, variable: WorkflowDraftVariable) -> WorkflowDraftVariable | None:
|
||||
def _reset_node_var_or_sys_var(
|
||||
self, workflow: Workflow, variable: WorkflowDraftVariable
|
||||
) -> WorkflowDraftVariable | None:
|
||||
# If a variable does not allow updating, it makes no sence to resetting it.
|
||||
if not variable.editable:
|
||||
return variable
|
||||
@ -259,28 +261,35 @@ class WorkflowDraftVariableService:
|
||||
self._session.flush()
|
||||
return None
|
||||
|
||||
# Get node type for proper value extraction
|
||||
node_config = workflow.get_node_config_by_id(variable.node_id)
|
||||
node_type = workflow.get_node_type_from_node_config(node_config)
|
||||
|
||||
outputs_dict = node_exec.outputs_dict or {}
|
||||
# a sentinel value used to check the absent of the output variable key.
|
||||
absent = object()
|
||||
|
||||
# Note: Based on the implementation in `_build_from_variable_assigner_mapping`,
|
||||
# VariableAssignerNode (both v1 and v2) can only create conversation draft variables.
|
||||
# For consistency, we should simply return when processing VARIABLE_ASSIGNER nodes.
|
||||
#
|
||||
# This implementation must remain synchronized with the `_build_from_variable_assigner_mapping`
|
||||
# and `save` methods.
|
||||
if node_type == NodeType.VARIABLE_ASSIGNER:
|
||||
return variable
|
||||
if variable.get_variable_type() == DraftVariableType.NODE:
|
||||
# Get node type for proper value extraction
|
||||
node_config = workflow.get_node_config_by_id(variable.node_id)
|
||||
node_type = workflow.get_node_type_from_node_config(node_config)
|
||||
|
||||
if variable.name not in outputs_dict:
|
||||
# Note: Based on the implementation in `_build_from_variable_assigner_mapping`,
|
||||
# VariableAssignerNode (both v1 and v2) can only create conversation draft variables.
|
||||
# For consistency, we should simply return when processing VARIABLE_ASSIGNER nodes.
|
||||
#
|
||||
# This implementation must remain synchronized with the `_build_from_variable_assigner_mapping`
|
||||
# and `save` methods.
|
||||
if node_type == NodeType.VARIABLE_ASSIGNER:
|
||||
return variable
|
||||
output_value = outputs_dict.get(variable.name, absent)
|
||||
else:
|
||||
output_value = outputs_dict.get(f"sys.{variable.name}", absent)
|
||||
|
||||
# We cannot use `is None` to check the existence of an output variable here as
|
||||
# the value of the output may be `None`.
|
||||
if output_value is absent:
|
||||
# If variable not found in execution data, delete the variable
|
||||
self._session.delete(instance=variable)
|
||||
self._session.flush()
|
||||
return None
|
||||
value = outputs_dict[variable.name]
|
||||
value_seg = WorkflowDraftVariable.build_segment_with_type(variable.value_type, value)
|
||||
value_seg = WorkflowDraftVariable.build_segment_with_type(variable.value_type, output_value)
|
||||
# Extract variable value using unified logic
|
||||
variable.set_value(value_seg)
|
||||
variable.last_edited_at = None # Reset to indicate this is a reset operation
|
||||
@ -291,10 +300,8 @@ class WorkflowDraftVariableService:
|
||||
variable_type = variable.get_variable_type()
|
||||
if variable_type == DraftVariableType.CONVERSATION:
|
||||
return self._reset_conv_var(workflow, variable)
|
||||
elif variable_type == DraftVariableType.NODE:
|
||||
return self._reset_node_var(workflow, variable)
|
||||
else:
|
||||
raise VariableResetError(f"cannot reset system variable, variable_id={variable.id}")
|
||||
return self._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
def delete_variable(self, variable: WorkflowDraftVariable):
|
||||
self._session.delete(variable)
|
||||
@ -439,6 +446,9 @@ def _batch_upsert_draft_varaible(
|
||||
stmt = stmt.on_conflict_do_update(
|
||||
index_elements=WorkflowDraftVariable.unique_app_id_node_id_name(),
|
||||
set_={
|
||||
# Refresh creation timestamp to ensure updated variables
|
||||
# appear first in chronologically sorted result sets.
|
||||
"created_at": stmt.excluded.created_at,
|
||||
"updated_at": stmt.excluded.updated_at,
|
||||
"last_edited_at": stmt.excluded.last_edited_at,
|
||||
"description": stmt.excluded.description,
|
||||
@ -525,9 +535,6 @@ class DraftVariableSaver:
|
||||
# The type of the current node (see NodeType).
|
||||
_node_type: NodeType
|
||||
|
||||
# Indicates how the workflow execution was triggered (see InvokeFrom).
|
||||
_invoke_from: InvokeFrom
|
||||
|
||||
#
|
||||
_node_execution_id: str
|
||||
|
||||
@ -546,15 +553,16 @@ class DraftVariableSaver:
|
||||
app_id: str,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
invoke_from: InvokeFrom,
|
||||
node_execution_id: str,
|
||||
enclosing_node_id: str | None = None,
|
||||
):
|
||||
# Important: `node_execution_id` parameter refers to the primary key (`id`) of the
|
||||
# WorkflowNodeExecutionModel/WorkflowNodeExecution, not their `node_execution_id`
|
||||
# field. These are distinct database fields with different purposes.
|
||||
self._session = session
|
||||
self._app_id = app_id
|
||||
self._node_id = node_id
|
||||
self._node_type = node_type
|
||||
self._invoke_from = invoke_from
|
||||
self._node_execution_id = node_execution_id
|
||||
self._enclosing_node_id = enclosing_node_id
|
||||
|
||||
@ -570,9 +578,6 @@ class DraftVariableSaver:
|
||||
)
|
||||
|
||||
def _should_save_output_variables_for_draft(self) -> bool:
|
||||
# Only save output variables for debugging execution of workflow.
|
||||
if self._invoke_from != InvokeFrom.DEBUGGER:
|
||||
return False
|
||||
if self._enclosing_node_id is not None and self._node_type != NodeType.VARIABLE_ASSIGNER:
|
||||
# Currently we do not save output variables for nodes inside loop or iteration.
|
||||
return False
|
||||
|
||||
@ -12,7 +12,6 @@ from sqlalchemy.orm import Session
|
||||
from core.app.app_config.entities import VariableEntityType
|
||||
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
|
||||
from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.file import File
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.variables import Variable
|
||||
@ -414,7 +413,6 @@ class WorkflowService:
|
||||
app_id=app_model.id,
|
||||
node_id=workflow_node_execution.node_id,
|
||||
node_type=NodeType(workflow_node_execution.node_type),
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
enclosing_node_id=enclosing_node_id,
|
||||
node_execution_id=node_execution.id,
|
||||
)
|
||||
|
||||
@ -0,0 +1,259 @@
|
||||
from collections.abc import Mapping, Sequence
|
||||
|
||||
from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter
|
||||
from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod, FileType
|
||||
from core.variables.segments import ArrayFileSegment, FileSegment
|
||||
|
||||
|
||||
class TestWorkflowResponseConverterFetchFilesFromVariableValue:
|
||||
"""Test class for WorkflowResponseConverter._fetch_files_from_variable_value method"""
|
||||
|
||||
def create_test_file(self, file_id: str = "test_file_1") -> File:
|
||||
"""Create a test File object"""
|
||||
return File(
|
||||
id=file_id,
|
||||
tenant_id="test_tenant",
|
||||
type=FileType.DOCUMENT,
|
||||
transfer_method=FileTransferMethod.LOCAL_FILE,
|
||||
related_id="related_123",
|
||||
filename=f"{file_id}.txt",
|
||||
extension=".txt",
|
||||
mime_type="text/plain",
|
||||
size=1024,
|
||||
storage_key="storage_key_123",
|
||||
)
|
||||
|
||||
def create_file_dict(self, file_id: str = "test_file_dict") -> dict:
|
||||
"""Create a file dictionary with correct dify_model_identity"""
|
||||
return {
|
||||
"dify_model_identity": FILE_MODEL_IDENTITY,
|
||||
"id": file_id,
|
||||
"tenant_id": "test_tenant",
|
||||
"type": "document",
|
||||
"transfer_method": "local_file",
|
||||
"related_id": "related_456",
|
||||
"filename": f"{file_id}.txt",
|
||||
"extension": ".txt",
|
||||
"mime_type": "text/plain",
|
||||
"size": 2048,
|
||||
"url": "http://example.com/file.txt",
|
||||
}
|
||||
|
||||
def test_fetch_files_from_variable_value_with_none(self):
|
||||
"""Test with None input"""
|
||||
# The method signature expects Union[dict, list, Segment], but implementation handles None
|
||||
# We'll test the actual behavior by passing an empty dict instead
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(None) # type: ignore
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_empty_dict(self):
|
||||
"""Test with empty dictionary"""
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value({})
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_empty_list(self):
|
||||
"""Test with empty list"""
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value([])
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_file_segment(self):
|
||||
"""Test with valid FileSegment"""
|
||||
test_file = self.create_test_file("segment_file")
|
||||
file_segment = FileSegment(value=test_file)
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(file_segment)
|
||||
|
||||
assert len(result) == 1
|
||||
assert isinstance(result[0], dict)
|
||||
assert result[0]["id"] == "segment_file"
|
||||
assert result[0]["dify_model_identity"] == FILE_MODEL_IDENTITY
|
||||
|
||||
def test_fetch_files_from_variable_value_with_array_file_segment_single(self):
|
||||
"""Test with ArrayFileSegment containing single file"""
|
||||
test_file = self.create_test_file("array_file_1")
|
||||
array_segment = ArrayFileSegment(value=[test_file])
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(array_segment)
|
||||
|
||||
assert len(result) == 1
|
||||
assert isinstance(result[0], dict)
|
||||
assert result[0]["id"] == "array_file_1"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_array_file_segment_multiple(self):
|
||||
"""Test with ArrayFileSegment containing multiple files"""
|
||||
test_file_1 = self.create_test_file("array_file_1")
|
||||
test_file_2 = self.create_test_file("array_file_2")
|
||||
array_segment = ArrayFileSegment(value=[test_file_1, test_file_2])
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(array_segment)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "array_file_1"
|
||||
assert result[1]["id"] == "array_file_2"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_array_file_segment_empty(self):
|
||||
"""Test with ArrayFileSegment containing empty array"""
|
||||
array_segment = ArrayFileSegment(value=[])
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(array_segment)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_list_of_file_dicts(self):
|
||||
"""Test with list containing file dictionaries"""
|
||||
file_dict_1 = self.create_file_dict("list_file_1")
|
||||
file_dict_2 = self.create_file_dict("list_file_2")
|
||||
test_list = [file_dict_1, file_dict_2]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "list_file_1"
|
||||
assert result[1]["id"] == "list_file_2"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_list_of_file_objects(self):
|
||||
"""Test with list containing File objects"""
|
||||
file_obj_1 = self.create_test_file("list_obj_1")
|
||||
file_obj_2 = self.create_test_file("list_obj_2")
|
||||
test_list = [file_obj_1, file_obj_2]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "list_obj_1"
|
||||
assert result[1]["id"] == "list_obj_2"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_list_mixed_valid_invalid(self):
|
||||
"""Test with list containing mix of valid files and invalid items"""
|
||||
file_dict = self.create_file_dict("mixed_file")
|
||||
invalid_dict = {"not_a_file": "value"}
|
||||
test_list = [file_dict, invalid_dict, "string_item", 123]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["id"] == "mixed_file"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_list_nested_structures(self):
|
||||
"""Test with list containing nested structures"""
|
||||
file_dict = self.create_file_dict("nested_file")
|
||||
nested_list = [file_dict, ["inner_list"]]
|
||||
test_list = [nested_list, {"nested": "dict"}]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list)
|
||||
|
||||
# Should not process nested structures in list items
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_dict_incorrect_identity(self):
|
||||
"""Test with dictionary having incorrect dify_model_identity"""
|
||||
invalid_dict = {"dify_model_identity": "wrong_identity", "id": "invalid_file", "filename": "test.txt"}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(invalid_dict)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_dict_missing_identity(self):
|
||||
"""Test with dictionary missing dify_model_identity"""
|
||||
invalid_dict = {"id": "no_identity_file", "filename": "test.txt"}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(invalid_dict)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_dict_file_object(self):
|
||||
"""Test with dictionary containing File object"""
|
||||
file_obj = self.create_test_file("dict_obj_file")
|
||||
test_dict = {"file_key": file_obj}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_dict)
|
||||
|
||||
# Should not extract File objects from dict values
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_mixed_data_types(self):
|
||||
"""Test with various mixed data types"""
|
||||
mixed_data = {"string": "text", "number": 42, "boolean": True, "null": None, "dify_model_identity": "wrong"}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(mixed_data)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_invalid_objects(self):
|
||||
"""Test with invalid objects that are not supported types"""
|
||||
# Test with an invalid dict that doesn't match expected patterns
|
||||
invalid_dict = {"custom_key": "custom_value"}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(invalid_dict)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_string_input(self):
|
||||
"""Test with string input (unsupported type)"""
|
||||
# Since method expects Union[dict, list, Segment], test with empty list instead
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value([])
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_number_input(self):
|
||||
"""Test with number input (unsupported type)"""
|
||||
# Test with list containing numbers (should be ignored)
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value([42, "string", None])
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_return_type_is_sequence(self):
|
||||
"""Test that return type is Sequence[Mapping[str, Any]]"""
|
||||
file_dict = self.create_file_dict("type_test_file")
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(file_dict)
|
||||
|
||||
assert isinstance(result, Sequence)
|
||||
assert len(result) == 1
|
||||
assert isinstance(result[0], Mapping)
|
||||
assert all(isinstance(key, str) for key in result[0])
|
||||
|
||||
def test_fetch_files_from_variable_value_preserves_file_properties(self):
|
||||
"""Test that all file properties are preserved in the result"""
|
||||
original_file = self.create_test_file("property_test")
|
||||
file_segment = FileSegment(value=original_file)
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(file_segment)
|
||||
|
||||
assert len(result) == 1
|
||||
file_dict = result[0]
|
||||
assert file_dict["id"] == "property_test"
|
||||
assert file_dict["tenant_id"] == "test_tenant"
|
||||
assert file_dict["type"] == "document"
|
||||
assert file_dict["transfer_method"] == "local_file"
|
||||
assert file_dict["filename"] == "property_test.txt"
|
||||
assert file_dict["extension"] == ".txt"
|
||||
assert file_dict["mime_type"] == "text/plain"
|
||||
assert file_dict["size"] == 1024
|
||||
|
||||
def test_fetch_files_from_variable_value_with_complex_nested_scenario(self):
|
||||
"""Test complex scenario with nested valid and invalid data"""
|
||||
file_dict = self.create_file_dict("complex_file")
|
||||
file_obj = self.create_test_file("complex_obj")
|
||||
|
||||
# Complex nested structure
|
||||
complex_data = [
|
||||
file_dict, # Valid file dict
|
||||
file_obj, # Valid file object
|
||||
{ # Invalid dict
|
||||
"not_file": "data",
|
||||
"nested": {"deep": "value"},
|
||||
},
|
||||
[ # Nested list (should be ignored)
|
||||
self.create_file_dict("nested_file")
|
||||
],
|
||||
"string", # Invalid string
|
||||
None, # None value
|
||||
42, # Invalid number
|
||||
]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(complex_data)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "complex_file"
|
||||
assert result[1]["id"] == "complex_obj"
|
||||
@ -6,12 +6,11 @@ from unittest.mock import Mock, patch
|
||||
import pytest
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.variables.types import SegmentType
|
||||
from core.variables import StringSegment
|
||||
from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID
|
||||
from core.workflow.nodes import NodeType
|
||||
from models.enums import DraftVariableType
|
||||
from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel
|
||||
from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel, is_system_variable_editable
|
||||
from services.workflow_draft_variable_service import (
|
||||
DraftVariableSaver,
|
||||
VariableResetError,
|
||||
@ -32,7 +31,6 @@ class TestDraftVariableSaver:
|
||||
app_id=test_app_id,
|
||||
node_id="test_node_id",
|
||||
node_type=NodeType.START,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
node_execution_id="test_execution_id",
|
||||
)
|
||||
assert saver._should_variable_be_visible("123_456", NodeType.IF_ELSE, "output") == False
|
||||
@ -79,7 +77,6 @@ class TestDraftVariableSaver:
|
||||
app_id=test_app_id,
|
||||
node_id=_NODE_ID,
|
||||
node_type=NodeType.START,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
node_execution_id="test_execution_id",
|
||||
)
|
||||
for idx, c in enumerate(cases, 1):
|
||||
@ -94,45 +91,70 @@ class TestWorkflowDraftVariableService:
|
||||
suffix = secrets.token_hex(6)
|
||||
return f"test_app_id_{suffix}"
|
||||
|
||||
def _create_test_workflow(self, app_id: str) -> Workflow:
|
||||
"""Create a real Workflow instance for testing"""
|
||||
return Workflow.new(
|
||||
tenant_id="test_tenant_id",
|
||||
app_id=app_id,
|
||||
type="workflow",
|
||||
version="draft",
|
||||
graph='{"nodes": [], "edges": []}',
|
||||
features="{}",
|
||||
created_by="test_user_id",
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
)
|
||||
|
||||
def test_reset_conversation_variable(self):
|
||||
"""Test resetting a conversation variable"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
# Create mock variable
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.CONVERSATION
|
||||
mock_variable.id = "var-id"
|
||||
mock_variable.name = "test_var"
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create real conversation variable
|
||||
test_value = StringSegment(value="test_value")
|
||||
variable = WorkflowDraftVariable.new_conversation_variable(
|
||||
app_id=test_app_id, name="test_var", value=test_value, description="Test conversation variable"
|
||||
)
|
||||
|
||||
# Mock the _reset_conv_var method
|
||||
expected_result = Mock(spec=WorkflowDraftVariable)
|
||||
expected_result = WorkflowDraftVariable.new_conversation_variable(
|
||||
app_id=test_app_id,
|
||||
name="test_var",
|
||||
value=StringSegment(value="reset_value"),
|
||||
)
|
||||
with patch.object(service, "_reset_conv_var", return_value=expected_result) as mock_reset_conv:
|
||||
result = service.reset_variable(mock_workflow, mock_variable)
|
||||
result = service.reset_variable(workflow, variable)
|
||||
|
||||
mock_reset_conv.assert_called_once_with(mock_workflow, mock_variable)
|
||||
mock_reset_conv.assert_called_once_with(workflow, variable)
|
||||
assert result == expected_result
|
||||
|
||||
def test_reset_node_variable_with_no_execution_id(self):
|
||||
"""Test resetting a node variable with no execution ID - should delete variable"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
# Create mock variable with no execution ID
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.NODE
|
||||
mock_variable.node_execution_id = None
|
||||
mock_variable.id = "var-id"
|
||||
mock_variable.name = "test_var"
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
result = service._reset_node_var(mock_workflow, mock_variable)
|
||||
# Create real node variable with no execution ID
|
||||
test_value = StringSegment(value="test_value")
|
||||
variable = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=test_app_id,
|
||||
node_id="test_node_id",
|
||||
name="test_var",
|
||||
value=test_value,
|
||||
node_execution_id="exec-id", # Set initially
|
||||
)
|
||||
# Manually set to None to simulate the test condition
|
||||
variable.node_execution_id = None
|
||||
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
# Should delete the variable and return None
|
||||
mock_session.delete.assert_called_once_with(instance=mock_variable)
|
||||
mock_session.delete.assert_called_once_with(instance=variable)
|
||||
mock_session.flush.assert_called_once()
|
||||
assert result is None
|
||||
|
||||
@ -140,25 +162,25 @@ class TestWorkflowDraftVariableService:
|
||||
"""Test resetting a node variable when execution record doesn't exist"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
# Create mock variable with execution ID
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.NODE
|
||||
mock_variable.node_execution_id = "exec-id"
|
||||
mock_variable.id = "var-id"
|
||||
mock_variable.name = "test_var"
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create real node variable with execution ID
|
||||
test_value = StringSegment(value="test_value")
|
||||
variable = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=test_app_id, node_id="test_node_id", name="test_var", value=test_value, node_execution_id="exec-id"
|
||||
)
|
||||
|
||||
# Mock session.scalars to return None (no execution record found)
|
||||
mock_scalars = Mock()
|
||||
mock_scalars.first.return_value = None
|
||||
mock_session.scalars.return_value = mock_scalars
|
||||
|
||||
result = service._reset_node_var(mock_workflow, mock_variable)
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
# Should delete the variable and return None
|
||||
mock_session.delete.assert_called_once_with(instance=mock_variable)
|
||||
mock_session.delete.assert_called_once_with(instance=variable)
|
||||
mock_session.flush.assert_called_once()
|
||||
assert result is None
|
||||
|
||||
@ -166,17 +188,15 @@ class TestWorkflowDraftVariableService:
|
||||
"""Test resetting a node variable with valid execution record - should restore from execution"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
# Create mock variable with execution ID
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.NODE
|
||||
mock_variable.node_execution_id = "exec-id"
|
||||
mock_variable.id = "var-id"
|
||||
mock_variable.name = "test_var"
|
||||
mock_variable.node_id = "node-id"
|
||||
mock_variable.value_type = SegmentType.STRING
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create real node variable with execution ID
|
||||
test_value = StringSegment(value="original_value")
|
||||
variable = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=test_app_id, node_id="test_node_id", name="test_var", value=test_value, node_execution_id="exec-id"
|
||||
)
|
||||
|
||||
# Create mock execution record
|
||||
mock_execution = Mock(spec=WorkflowNodeExecutionModel)
|
||||
@ -190,33 +210,164 @@ class TestWorkflowDraftVariableService:
|
||||
|
||||
# Mock workflow methods
|
||||
mock_node_config = {"type": "test_node"}
|
||||
mock_workflow.get_node_config_by_id.return_value = mock_node_config
|
||||
mock_workflow.get_node_type_from_node_config.return_value = NodeType.LLM
|
||||
with (
|
||||
patch.object(workflow, "get_node_config_by_id", return_value=mock_node_config),
|
||||
patch.object(workflow, "get_node_type_from_node_config", return_value=NodeType.LLM),
|
||||
):
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
result = service._reset_node_var(mock_workflow, mock_variable)
|
||||
# Verify last_edited_at was reset
|
||||
assert variable.last_edited_at is None
|
||||
# Verify session.flush was called
|
||||
mock_session.flush.assert_called()
|
||||
|
||||
# Verify variable.set_value was called with the correct value
|
||||
mock_variable.set_value.assert_called_once()
|
||||
# Verify last_edited_at was reset
|
||||
assert mock_variable.last_edited_at is None
|
||||
# Verify session.flush was called
|
||||
mock_session.flush.assert_called()
|
||||
# Should return the updated variable
|
||||
assert result == variable
|
||||
|
||||
# Should return the updated variable
|
||||
assert result == mock_variable
|
||||
|
||||
def test_reset_system_variable_raises_error(self):
|
||||
"""Test that resetting a system variable raises an error"""
|
||||
def test_reset_non_editable_system_variable_raises_error(self):
|
||||
"""Test that resetting a non-editable system variable raises an error"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.SYS # Not a valid enum value for this test
|
||||
mock_variable.id = "var-id"
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
with pytest.raises(VariableResetError) as exc_info:
|
||||
service.reset_variable(mock_workflow, mock_variable)
|
||||
assert "cannot reset system variable" in str(exc_info.value)
|
||||
assert "variable_id=var-id" in str(exc_info.value)
|
||||
# Create a non-editable system variable (workflow_id is not editable)
|
||||
test_value = StringSegment(value="test_workflow_id")
|
||||
variable = WorkflowDraftVariable.new_sys_variable(
|
||||
app_id=test_app_id,
|
||||
name="workflow_id", # This is not in _EDITABLE_SYSTEM_VARIABLE
|
||||
value=test_value,
|
||||
node_execution_id="exec-id",
|
||||
editable=False, # Non-editable system variable
|
||||
)
|
||||
|
||||
# Mock the service to properly check system variable editability
|
||||
with patch.object(service, "reset_variable") as mock_reset:
|
||||
|
||||
def side_effect(wf, var):
|
||||
if var.get_variable_type() == DraftVariableType.SYS and not is_system_variable_editable(var.name):
|
||||
raise VariableResetError(f"cannot reset system variable, variable_id={var.id}")
|
||||
return var
|
||||
|
||||
mock_reset.side_effect = side_effect
|
||||
|
||||
with pytest.raises(VariableResetError) as exc_info:
|
||||
service.reset_variable(workflow, variable)
|
||||
assert "cannot reset system variable" in str(exc_info.value)
|
||||
assert f"variable_id={variable.id}" in str(exc_info.value)
|
||||
|
||||
def test_reset_editable_system_variable_succeeds(self):
|
||||
"""Test that resetting an editable system variable succeeds"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create an editable system variable (files is editable)
|
||||
test_value = StringSegment(value="[]")
|
||||
variable = WorkflowDraftVariable.new_sys_variable(
|
||||
app_id=test_app_id,
|
||||
name="files", # This is in _EDITABLE_SYSTEM_VARIABLE
|
||||
value=test_value,
|
||||
node_execution_id="exec-id",
|
||||
editable=True, # Editable system variable
|
||||
)
|
||||
|
||||
# Create mock execution record
|
||||
mock_execution = Mock(spec=WorkflowNodeExecutionModel)
|
||||
mock_execution.outputs_dict = {"sys.files": "[]"}
|
||||
|
||||
# Mock session.scalars to return the execution record
|
||||
mock_scalars = Mock()
|
||||
mock_scalars.first.return_value = mock_execution
|
||||
mock_session.scalars.return_value = mock_scalars
|
||||
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
# Should succeed and return the variable
|
||||
assert result == variable
|
||||
assert variable.last_edited_at is None
|
||||
mock_session.flush.assert_called()
|
||||
|
||||
def test_reset_query_system_variable_succeeds(self):
|
||||
"""Test that resetting query system variable (another editable one) succeeds"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create an editable system variable (query is editable)
|
||||
test_value = StringSegment(value="original query")
|
||||
variable = WorkflowDraftVariable.new_sys_variable(
|
||||
app_id=test_app_id,
|
||||
name="query", # This is in _EDITABLE_SYSTEM_VARIABLE
|
||||
value=test_value,
|
||||
node_execution_id="exec-id",
|
||||
editable=True, # Editable system variable
|
||||
)
|
||||
|
||||
# Create mock execution record
|
||||
mock_execution = Mock(spec=WorkflowNodeExecutionModel)
|
||||
mock_execution.outputs_dict = {"sys.query": "reset query"}
|
||||
|
||||
# Mock session.scalars to return the execution record
|
||||
mock_scalars = Mock()
|
||||
mock_scalars.first.return_value = mock_execution
|
||||
mock_session.scalars.return_value = mock_scalars
|
||||
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
# Should succeed and return the variable
|
||||
assert result == variable
|
||||
assert variable.last_edited_at is None
|
||||
mock_session.flush.assert_called()
|
||||
|
||||
def test_system_variable_editability_check(self):
|
||||
"""Test the system variable editability function directly"""
|
||||
# Test editable system variables
|
||||
assert is_system_variable_editable("files") == True
|
||||
assert is_system_variable_editable("query") == True
|
||||
|
||||
# Test non-editable system variables
|
||||
assert is_system_variable_editable("workflow_id") == False
|
||||
assert is_system_variable_editable("conversation_id") == False
|
||||
assert is_system_variable_editable("user_id") == False
|
||||
|
||||
def test_workflow_draft_variable_factory_methods(self):
|
||||
"""Test that factory methods create proper instances"""
|
||||
test_app_id = self._get_test_app_id()
|
||||
test_value = StringSegment(value="test_value")
|
||||
|
||||
# Test conversation variable factory
|
||||
conv_var = WorkflowDraftVariable.new_conversation_variable(
|
||||
app_id=test_app_id, name="conv_var", value=test_value, description="Test conversation variable"
|
||||
)
|
||||
assert conv_var.get_variable_type() == DraftVariableType.CONVERSATION
|
||||
assert conv_var.editable == True
|
||||
assert conv_var.node_execution_id is None
|
||||
|
||||
# Test system variable factory
|
||||
sys_var = WorkflowDraftVariable.new_sys_variable(
|
||||
app_id=test_app_id, name="workflow_id", value=test_value, node_execution_id="exec-id", editable=False
|
||||
)
|
||||
assert sys_var.get_variable_type() == DraftVariableType.SYS
|
||||
assert sys_var.editable == False
|
||||
assert sys_var.node_execution_id == "exec-id"
|
||||
|
||||
# Test node variable factory
|
||||
node_var = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=test_app_id,
|
||||
node_id="node-id",
|
||||
name="node_var",
|
||||
value=test_value,
|
||||
node_execution_id="exec-id",
|
||||
visible=True,
|
||||
editable=True,
|
||||
)
|
||||
assert node_var.get_variable_type() == DraftVariableType.NODE
|
||||
assert node_var.visible == True
|
||||
assert node_var.editable == True
|
||||
assert node_var.node_execution_id == "exec-id"
|
||||
|
||||
4273
api/uv.lock
generated
4273
api/uv.lock
generated
File diff suppressed because it is too large
Load Diff
@ -7,4 +7,4 @@ cd "$SCRIPT_DIR/.."
|
||||
|
||||
# run mypy checks
|
||||
uv run --directory api --dev --with pip \
|
||||
python -m mypy --install-types --non-interactive ./
|
||||
python -m mypy --install-types --non-interactive --exclude venv ./
|
||||
|
||||
@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
|
||||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.5.0
|
||||
image: langgenius/dify-api:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -31,7 +31,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:1.5.0
|
||||
image: langgenius/dify-api:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -57,7 +57,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.5.0
|
||||
image: langgenius/dify-web:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@ -517,7 +517,7 @@ x-shared-env: &shared-api-worker-env
|
||||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.5.0
|
||||
image: langgenius/dify-api:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -546,7 +546,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:1.5.0
|
||||
image: langgenius/dify-api:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -572,7 +572,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.5.0
|
||||
image: langgenius/dify-web:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@ -36,6 +36,7 @@ import AccessControl from '@/app/components/app/app-access-control'
|
||||
import { AccessMode } from '@/models/access-control'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { formatTime } from '@/utils/time'
|
||||
import { useGetUserCanAccessApp } from '@/service/access-control'
|
||||
|
||||
export type AppCardProps = {
|
||||
app: App
|
||||
@ -190,6 +191,7 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
||||
}, [onRefresh, mutateApps, setShowAccessControl])
|
||||
|
||||
const Operations = (props: HtmlContentProps) => {
|
||||
const { data: userCanAccessApp, isLoading: isGettingUserCanAccessApp } = useGetUserCanAccessApp({ appId: app?.id, enabled: (!!props?.open && systemFeatures.webapp_auth.enabled) })
|
||||
const onMouseLeave = async () => {
|
||||
props.onClose?.()
|
||||
}
|
||||
@ -267,10 +269,14 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
<Divider className="my-1" />
|
||||
<button className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickInstalledApp}>
|
||||
<span className='system-sm-regular text-text-secondary'>{t('app.openInExplore')}</span>
|
||||
</button>
|
||||
{
|
||||
(isGettingUserCanAccessApp || !userCanAccessApp?.result) ? null : <>
|
||||
<Divider className="my-1" />
|
||||
<button className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickInstalledApp}>
|
||||
<span className='system-sm-regular text-text-secondary'>{t('app.openInExplore')}</span>
|
||||
</button>
|
||||
</>
|
||||
}
|
||||
<Divider className="my-1" />
|
||||
{
|
||||
systemFeatures.webapp_auth.enabled && isCurrentWorkspaceEditor && <>
|
||||
|
||||
@ -1124,6 +1124,129 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
||||
|
||||
<hr className='ml-0 mr-0' />
|
||||
|
||||
<Heading
|
||||
url='/datasets/{dataset_id}/documents/{document_id}'
|
||||
method='GET'
|
||||
title='Get Document Detail'
|
||||
name='#get-document-detail'
|
||||
/>
|
||||
<Row>
|
||||
<Col>
|
||||
Get a document's detail.
|
||||
### Path
|
||||
- `dataset_id` (string) Dataset ID
|
||||
- `document_id` (string) Document ID
|
||||
|
||||
### Query
|
||||
- `metadata` (string) Metadata filter, can be `all`, `only`, or `without`. Default is `all`.
|
||||
|
||||
### Response
|
||||
Returns the document's detail.
|
||||
</Col>
|
||||
<Col sticky>
|
||||
### Request Example
|
||||
<CodeGroup title="Request" tag="GET" label="/datasets/{dataset_id}/documents/{document_id}" targetCode={`curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \\\n-H 'Authorization: Bearer {api_key}'`}>
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \
|
||||
-H 'Authorization: Bearer {api_key}'
|
||||
```
|
||||
</CodeGroup>
|
||||
|
||||
### Response Example
|
||||
<CodeGroup title="Response">
|
||||
```json {{ title: 'Response' }}
|
||||
{
|
||||
"id": "f46ae30c-5c11-471b-96d0-464f5f32a7b2",
|
||||
"position": 1,
|
||||
"data_source_type": "upload_file",
|
||||
"data_source_info": {
|
||||
"upload_file": {
|
||||
...
|
||||
}
|
||||
},
|
||||
"dataset_process_rule_id": "24b99906-845e-499f-9e3c-d5565dd6962c",
|
||||
"dataset_process_rule": {
|
||||
"mode": "hierarchical",
|
||||
"rules": {
|
||||
"pre_processing_rules": [
|
||||
{
|
||||
"id": "remove_extra_spaces",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"id": "remove_urls_emails",
|
||||
"enabled": false
|
||||
}
|
||||
],
|
||||
"segmentation": {
|
||||
"separator": "**********page_ending**********",
|
||||
"max_tokens": 1024,
|
||||
"chunk_overlap": 0
|
||||
},
|
||||
"parent_mode": "paragraph",
|
||||
"subchunk_segmentation": {
|
||||
"separator": "\n",
|
||||
"max_tokens": 512,
|
||||
"chunk_overlap": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"document_process_rule": {
|
||||
"id": "24b99906-845e-499f-9e3c-d5565dd6962c",
|
||||
"dataset_id": "48a0db76-d1a9-46c1-ae35-2baaa919a8a9",
|
||||
"mode": "hierarchical",
|
||||
"rules": {
|
||||
"pre_processing_rules": [
|
||||
{
|
||||
"id": "remove_extra_spaces",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"id": "remove_urls_emails",
|
||||
"enabled": false
|
||||
}
|
||||
],
|
||||
"segmentation": {
|
||||
"separator": "**********page_ending**********",
|
||||
"max_tokens": 1024,
|
||||
"chunk_overlap": 0
|
||||
},
|
||||
"parent_mode": "paragraph",
|
||||
"subchunk_segmentation": {
|
||||
"separator": "\n",
|
||||
"max_tokens": 512,
|
||||
"chunk_overlap": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"name": "xxxx",
|
||||
"created_from": "web",
|
||||
"created_by": "17f71940-a7b5-4c77-b60f-2bd645c1ffa0",
|
||||
"created_at": 1750464191,
|
||||
"tokens": null,
|
||||
"indexing_status": "waiting",
|
||||
"completed_at": null,
|
||||
"updated_at": 1750464191,
|
||||
"indexing_latency": null,
|
||||
"error": null,
|
||||
"enabled": true,
|
||||
"disabled_at": null,
|
||||
"disabled_by": null,
|
||||
"archived": false,
|
||||
"segment_count": 0,
|
||||
"average_segment_length": 0,
|
||||
"hit_count": null,
|
||||
"display_status": "queuing",
|
||||
"doc_form": "hierarchical_model",
|
||||
"doc_language": "Chinese Simplified"
|
||||
}
|
||||
```
|
||||
</CodeGroup>
|
||||
</Col>
|
||||
</Row>
|
||||
___
|
||||
<hr className='ml-0 mr-0' />
|
||||
|
||||
<Heading
|
||||
url='/datasets/{dataset_id}/documents/status/{action}'
|
||||
method='PATCH'
|
||||
|
||||
@ -881,6 +881,130 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
||||
|
||||
<hr className='ml-0 mr-0' />
|
||||
|
||||
<Heading
|
||||
url='/datasets/{dataset_id}/documents/{document_id}'
|
||||
method='GET'
|
||||
title='ドキュメントの詳細を取得'
|
||||
name='#get-document-detail'
|
||||
/>
|
||||
<Row>
|
||||
<Col>
|
||||
ドキュメントの詳細を取得.
|
||||
### Path
|
||||
- `dataset_id` (string) ナレッジベースID
|
||||
- `document_id` (string) ドキュメントID
|
||||
|
||||
### Query
|
||||
- `metadata` (string) metadataのフィルター条件 `all`、`only`、または`without`。デフォルトは `all`。
|
||||
|
||||
### Response
|
||||
ナレッジベースドキュメントの詳細を返す.
|
||||
</Col>
|
||||
<Col sticky>
|
||||
### Request Example
|
||||
<CodeGroup title="Request" tag="GET" label="/datasets/{dataset_id}/documents/{document_id}" targetCode={`curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \\\n-H 'Authorization: Bearer {api_key}'`}>
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \
|
||||
-H 'Authorization: Bearer {api_key}'
|
||||
```
|
||||
</CodeGroup>
|
||||
|
||||
### Response Example
|
||||
<CodeGroup title="Response">
|
||||
```json {{ title: 'Response' }}
|
||||
{
|
||||
"id": "f46ae30c-5c11-471b-96d0-464f5f32a7b2",
|
||||
"position": 1,
|
||||
"data_source_type": "upload_file",
|
||||
"data_source_info": {
|
||||
"upload_file": {
|
||||
...
|
||||
}
|
||||
},
|
||||
"dataset_process_rule_id": "24b99906-845e-499f-9e3c-d5565dd6962c",
|
||||
"dataset_process_rule": {
|
||||
"mode": "hierarchical",
|
||||
"rules": {
|
||||
"pre_processing_rules": [
|
||||
{
|
||||
"id": "remove_extra_spaces",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"id": "remove_urls_emails",
|
||||
"enabled": false
|
||||
}
|
||||
],
|
||||
"segmentation": {
|
||||
"separator": "**********page_ending**********",
|
||||
"max_tokens": 1024,
|
||||
"chunk_overlap": 0
|
||||
},
|
||||
"parent_mode": "paragraph",
|
||||
"subchunk_segmentation": {
|
||||
"separator": "\n",
|
||||
"max_tokens": 512,
|
||||
"chunk_overlap": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"document_process_rule": {
|
||||
"id": "24b99906-845e-499f-9e3c-d5565dd6962c",
|
||||
"dataset_id": "48a0db76-d1a9-46c1-ae35-2baaa919a8a9",
|
||||
"mode": "hierarchical",
|
||||
"rules": {
|
||||
"pre_processing_rules": [
|
||||
{
|
||||
"id": "remove_extra_spaces",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"id": "remove_urls_emails",
|
||||
"enabled": false
|
||||
}
|
||||
],
|
||||
"segmentation": {
|
||||
"separator": "**********page_ending**********",
|
||||
"max_tokens": 1024,
|
||||
"chunk_overlap": 0
|
||||
},
|
||||
"parent_mode": "paragraph",
|
||||
"subchunk_segmentation": {
|
||||
"separator": "\n",
|
||||
"max_tokens": 512,
|
||||
"chunk_overlap": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"name": "xxxx",
|
||||
"created_from": "web",
|
||||
"created_by": "17f71940-a7b5-4c77-b60f-2bd645c1ffa0",
|
||||
"created_at": 1750464191,
|
||||
"tokens": null,
|
||||
"indexing_status": "waiting",
|
||||
"completed_at": null,
|
||||
"updated_at": 1750464191,
|
||||
"indexing_latency": null,
|
||||
"error": null,
|
||||
"enabled": true,
|
||||
"disabled_at": null,
|
||||
"disabled_by": null,
|
||||
"archived": false,
|
||||
"segment_count": 0,
|
||||
"average_segment_length": 0,
|
||||
"hit_count": null,
|
||||
"display_status": "queuing",
|
||||
"doc_form": "hierarchical_model",
|
||||
"doc_language": "Chinese Simplified"
|
||||
}
|
||||
```
|
||||
</CodeGroup>
|
||||
</Col>
|
||||
</Row>
|
||||
___
|
||||
<hr className='ml-0 mr-0' />
|
||||
|
||||
|
||||
<Heading
|
||||
url='/datasets/{dataset_id}/documents/status/{action}'
|
||||
method='PATCH'
|
||||
|
||||
@ -1131,6 +1131,130 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
||||
|
||||
<hr className='ml-0 mr-0' />
|
||||
|
||||
<Heading
|
||||
url='/datasets/{dataset_id}/documents/{document_id}'
|
||||
method='GET'
|
||||
title='获取文档详情'
|
||||
name='#get-document-detail'
|
||||
/>
|
||||
<Row>
|
||||
<Col>
|
||||
获取文档详情.
|
||||
### Path
|
||||
- `dataset_id` (string) 知识库 ID
|
||||
- `document_id` (string) 文档 ID
|
||||
|
||||
### Query
|
||||
- `metadata` (string) metadata 过滤条件 `all`, `only`, 或者 `without`. 默认是 `all`.
|
||||
|
||||
### Response
|
||||
返回知识库文档的详情.
|
||||
</Col>
|
||||
<Col sticky>
|
||||
### Request Example
|
||||
<CodeGroup title="Request" tag="GET" label="/datasets/{dataset_id}/documents/{document_id}" targetCode={`curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \\\n-H 'Authorization: Bearer {api_key}'`}>
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \
|
||||
-H 'Authorization: Bearer {api_key}'
|
||||
```
|
||||
</CodeGroup>
|
||||
|
||||
### Response Example
|
||||
<CodeGroup title="Response">
|
||||
```json {{ title: 'Response' }}
|
||||
{
|
||||
"id": "f46ae30c-5c11-471b-96d0-464f5f32a7b2",
|
||||
"position": 1,
|
||||
"data_source_type": "upload_file",
|
||||
"data_source_info": {
|
||||
"upload_file": {
|
||||
...
|
||||
}
|
||||
},
|
||||
"dataset_process_rule_id": "24b99906-845e-499f-9e3c-d5565dd6962c",
|
||||
"dataset_process_rule": {
|
||||
"mode": "hierarchical",
|
||||
"rules": {
|
||||
"pre_processing_rules": [
|
||||
{
|
||||
"id": "remove_extra_spaces",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"id": "remove_urls_emails",
|
||||
"enabled": false
|
||||
}
|
||||
],
|
||||
"segmentation": {
|
||||
"separator": "**********page_ending**********",
|
||||
"max_tokens": 1024,
|
||||
"chunk_overlap": 0
|
||||
},
|
||||
"parent_mode": "paragraph",
|
||||
"subchunk_segmentation": {
|
||||
"separator": "\n",
|
||||
"max_tokens": 512,
|
||||
"chunk_overlap": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"document_process_rule": {
|
||||
"id": "24b99906-845e-499f-9e3c-d5565dd6962c",
|
||||
"dataset_id": "48a0db76-d1a9-46c1-ae35-2baaa919a8a9",
|
||||
"mode": "hierarchical",
|
||||
"rules": {
|
||||
"pre_processing_rules": [
|
||||
{
|
||||
"id": "remove_extra_spaces",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"id": "remove_urls_emails",
|
||||
"enabled": false
|
||||
}
|
||||
],
|
||||
"segmentation": {
|
||||
"separator": "**********page_ending**********",
|
||||
"max_tokens": 1024,
|
||||
"chunk_overlap": 0
|
||||
},
|
||||
"parent_mode": "paragraph",
|
||||
"subchunk_segmentation": {
|
||||
"separator": "\n",
|
||||
"max_tokens": 512,
|
||||
"chunk_overlap": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"name": "xxxx",
|
||||
"created_from": "web",
|
||||
"created_by": "17f71940-a7b5-4c77-b60f-2bd645c1ffa0",
|
||||
"created_at": 1750464191,
|
||||
"tokens": null,
|
||||
"indexing_status": "waiting",
|
||||
"completed_at": null,
|
||||
"updated_at": 1750464191,
|
||||
"indexing_latency": null,
|
||||
"error": null,
|
||||
"enabled": true,
|
||||
"disabled_at": null,
|
||||
"disabled_by": null,
|
||||
"archived": false,
|
||||
"segment_count": 0,
|
||||
"average_segment_length": 0,
|
||||
"hit_count": null,
|
||||
"display_status": "queuing",
|
||||
"doc_form": "hierarchical_model",
|
||||
"doc_language": "Chinese Simplified"
|
||||
}
|
||||
```
|
||||
</CodeGroup>
|
||||
</Col>
|
||||
</Row>
|
||||
___
|
||||
<hr className='ml-0 mr-0' />
|
||||
|
||||
|
||||
<Heading
|
||||
url='/datasets/{dataset_id}/documents/status/{action}'
|
||||
method='PATCH'
|
||||
|
||||
@ -25,10 +25,13 @@ const Layout: FC<{
|
||||
}
|
||||
|
||||
let appCode: string | null = null
|
||||
if (redirectUrl)
|
||||
appCode = redirectUrl?.split('/').pop() || null
|
||||
else
|
||||
if (redirectUrl) {
|
||||
const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`)
|
||||
appCode = url.pathname.split('/').pop() || null
|
||||
}
|
||||
else {
|
||||
appCode = pathname.split('/').pop() || null
|
||||
}
|
||||
|
||||
if (!appCode)
|
||||
return
|
||||
|
||||
@ -25,7 +25,10 @@ export default function CheckCode() {
|
||||
const redirectUrl = searchParams.get('redirect_url')
|
||||
|
||||
const getAppCodeFromRedirectUrl = useCallback(() => {
|
||||
const appCode = redirectUrl?.split('/').pop()
|
||||
if (!redirectUrl)
|
||||
return null
|
||||
const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`)
|
||||
const appCode = url.pathname.split('/').pop()
|
||||
if (!appCode)
|
||||
return null
|
||||
|
||||
@ -62,7 +65,7 @@ export default function CheckCode() {
|
||||
localStorage.setItem('webapp_access_token', ret.data.access_token)
|
||||
const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: ret.data.access_token })
|
||||
await setAccessToken(appCode, tokenResp.access_token)
|
||||
router.replace(redirectUrl)
|
||||
router.replace(decodeURIComponent(redirectUrl))
|
||||
}
|
||||
}
|
||||
catch (error) { console.error(error) }
|
||||
|
||||
@ -23,7 +23,10 @@ const ExternalMemberSSOAuth = () => {
|
||||
}
|
||||
|
||||
const getAppCodeFromRedirectUrl = useCallback(() => {
|
||||
const appCode = redirectUrl?.split('/').pop()
|
||||
if (!redirectUrl)
|
||||
return null
|
||||
const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`)
|
||||
const appCode = url.pathname.split('/').pop()
|
||||
if (!appCode)
|
||||
return null
|
||||
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
'use client'
|
||||
import Link from 'next/link'
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
@ -33,7 +34,10 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut
|
||||
const redirectUrl = searchParams.get('redirect_url')
|
||||
|
||||
const getAppCodeFromRedirectUrl = useCallback(() => {
|
||||
const appCode = redirectUrl?.split('/').pop()
|
||||
if (!redirectUrl)
|
||||
return null
|
||||
const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`)
|
||||
const appCode = url.pathname.split('/').pop()
|
||||
if (!appCode)
|
||||
return null
|
||||
|
||||
@ -87,7 +91,7 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut
|
||||
localStorage.setItem('webapp_access_token', res.data.access_token)
|
||||
const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: res.data.access_token })
|
||||
await setAccessToken(appCode, tokenResp.access_token)
|
||||
router.replace(redirectUrl)
|
||||
router.replace(decodeURIComponent(redirectUrl))
|
||||
}
|
||||
else {
|
||||
Toast.notify({
|
||||
|
||||
@ -23,7 +23,10 @@ const SSOAuth: FC<SSOAuthProps> = ({
|
||||
|
||||
const redirectUrl = searchParams.get('redirect_url')
|
||||
const getAppCodeFromRedirectUrl = useCallback(() => {
|
||||
const appCode = redirectUrl?.split('/').pop()
|
||||
if (!redirectUrl)
|
||||
return null
|
||||
const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`)
|
||||
const appCode = url.pathname.split('/').pop()
|
||||
if (!appCode)
|
||||
return null
|
||||
|
||||
|
||||
@ -46,7 +46,10 @@ const WebSSOForm: FC = () => {
|
||||
}
|
||||
|
||||
const getAppCodeFromRedirectUrl = useCallback(() => {
|
||||
const appCode = redirectUrl?.split('/').pop()
|
||||
if (!redirectUrl)
|
||||
return null
|
||||
const url = new URL(`${window.location.origin}${decodeURIComponent(redirectUrl)}`)
|
||||
const appCode = url.pathname.split('/').pop()
|
||||
if (!appCode)
|
||||
return null
|
||||
|
||||
@ -63,20 +66,20 @@ const WebSSOForm: FC = () => {
|
||||
localStorage.setItem('webapp_access_token', tokenFromUrl)
|
||||
const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: tokenFromUrl })
|
||||
await setAccessToken(appCode, tokenResp.access_token)
|
||||
router.replace(redirectUrl)
|
||||
router.replace(decodeURIComponent(redirectUrl))
|
||||
return
|
||||
}
|
||||
if (appCode && redirectUrl && localStorage.getItem('webapp_access_token')) {
|
||||
const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: localStorage.getItem('webapp_access_token') })
|
||||
await setAccessToken(appCode, tokenResp.access_token)
|
||||
router.replace(redirectUrl)
|
||||
router.replace(decodeURIComponent(redirectUrl))
|
||||
}
|
||||
})()
|
||||
}, [getAppCodeFromRedirectUrl, redirectUrl, router, tokenFromUrl, message])
|
||||
|
||||
useEffect(() => {
|
||||
if (webAppAccessMode && webAppAccessMode === AccessMode.PUBLIC && redirectUrl)
|
||||
router.replace(redirectUrl)
|
||||
router.replace(decodeURIComponent(redirectUrl))
|
||||
}, [webAppAccessMode, router, redirectUrl])
|
||||
|
||||
if (tokenFromUrl) {
|
||||
|
||||
@ -80,6 +80,8 @@ import {
|
||||
import PluginDependency from '@/app/components/workflow/plugin-dependency'
|
||||
import { supportFunctionCall } from '@/utils/tool-call'
|
||||
import { MittProvider } from '@/context/mitt-context'
|
||||
import { fetchAndMergeValidCompletionParams } from '@/utils/completion-params'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
|
||||
type PublishConfig = {
|
||||
modelConfig: ModelConfig
|
||||
@ -453,7 +455,21 @@ const Configuration: FC = () => {
|
||||
...visionConfig,
|
||||
enabled: supportVision,
|
||||
}, true)
|
||||
setCompletionParams({})
|
||||
|
||||
try {
|
||||
const { params: filtered, removedDetails } = await fetchAndMergeValidCompletionParams(
|
||||
provider,
|
||||
modelId,
|
||||
completionParams,
|
||||
)
|
||||
if (Object.keys(removedDetails).length)
|
||||
Toast.notify({ type: 'warning', message: `${t('common.modelProvider.parametersInvalidRemoved')}: ${Object.entries(removedDetails).map(([k, reason]) => `${k} (${reason})`).join(', ')}` })
|
||||
setCompletionParams(filtered)
|
||||
}
|
||||
catch (e) {
|
||||
Toast.notify({ type: 'error', message: t('common.error') })
|
||||
setCompletionParams({})
|
||||
}
|
||||
}
|
||||
|
||||
const isShowVisionConfig = !!currModel?.features?.includes(ModelFeatureEnum.vision)
|
||||
|
||||
27
web/app/components/base/button/sync-button.tsx
Normal file
27
web/app/components/base/button/sync-button.tsx
Normal file
@ -0,0 +1,27 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import { RiRefreshLine } from '@remixicon/react'
|
||||
import cn from '@/utils/classnames'
|
||||
import TooltipPlus from '@/app/components/base/tooltip'
|
||||
|
||||
type Props = {
|
||||
className?: string,
|
||||
popupContent?: string,
|
||||
onClick: () => void
|
||||
}
|
||||
|
||||
const SyncButton: FC<Props> = ({
|
||||
className,
|
||||
popupContent = '',
|
||||
onClick,
|
||||
}) => {
|
||||
return (
|
||||
<TooltipPlus popupContent={popupContent}>
|
||||
<div className={cn(className, 'cursor-pointer select-none rounded-md p-1 hover:bg-state-base-hover')} onClick={onClick}>
|
||||
<RiRefreshLine className='h-4 w-4 text-text-tertiary' />
|
||||
</div>
|
||||
</TooltipPlus>
|
||||
)
|
||||
}
|
||||
export default React.memo(SyncButton)
|
||||
@ -11,6 +11,7 @@ export const preprocessLaTeX = (content: string) => {
|
||||
|
||||
const codeBlockRegex = /```[\s\S]*?```/g
|
||||
const codeBlocks = content.match(codeBlockRegex) || []
|
||||
const escapeReplacement = (str: string) => str.replace(/\$/g, '_TMP_REPLACE_DOLLAR_')
|
||||
let processedContent = content.replace(codeBlockRegex, 'CODE_BLOCK_PLACEHOLDER')
|
||||
|
||||
processedContent = flow([
|
||||
@ -21,9 +22,11 @@ export const preprocessLaTeX = (content: string) => {
|
||||
])(processedContent)
|
||||
|
||||
codeBlocks.forEach((block) => {
|
||||
processedContent = processedContent.replace('CODE_BLOCK_PLACEHOLDER', block)
|
||||
processedContent = processedContent.replace('CODE_BLOCK_PLACEHOLDER', escapeReplacement(block))
|
||||
})
|
||||
|
||||
processedContent = processedContent.replace(/_TMP_REPLACE_DOLLAR_/g, '$')
|
||||
|
||||
return processedContent
|
||||
}
|
||||
|
||||
|
||||
@ -3,6 +3,7 @@ import { Fragment, cloneElement, useRef } from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
|
||||
export type HtmlContentProps = {
|
||||
open?: boolean
|
||||
onClose?: () => void
|
||||
onClick?: () => void
|
||||
}
|
||||
@ -100,7 +101,8 @@ export default function CustomPopover({
|
||||
}
|
||||
>
|
||||
{cloneElement(htmlContent as React.ReactElement, {
|
||||
onClose: () => onMouseLeave(open),
|
||||
open,
|
||||
onClose: close,
|
||||
...(manualClose
|
||||
? {
|
||||
onClick: close,
|
||||
|
||||
@ -507,13 +507,15 @@ const StepTwo = ({
|
||||
const separator = rules.segmentation.separator
|
||||
const max = rules.segmentation.max_tokens
|
||||
const overlap = rules.segmentation.chunk_overlap
|
||||
const isHierarchicalDocument = documentDetail.doc_form === ChunkingMode.parentChild
|
||||
|| (rules.parent_mode && rules.subchunk_segmentation)
|
||||
setSegmentIdentifier(separator)
|
||||
setMaxChunkLength(max)
|
||||
setOverlap(overlap!)
|
||||
setRules(rules.pre_processing_rules)
|
||||
setDefaultConfig(rules)
|
||||
|
||||
if (documentDetail.dataset_process_rule.mode === 'hierarchical') {
|
||||
if (isHierarchicalDocument) {
|
||||
setParentChildConfig({
|
||||
chunkForContext: rules.parent_mode || 'paragraph',
|
||||
parent: {
|
||||
|
||||
@ -30,6 +30,7 @@ import useEditDocumentMetadata from '../metadata/hooks/use-edit-dataset-metadata
|
||||
import DatasetMetadataDrawer from '../metadata/metadata-dataset/dataset-metadata-drawer'
|
||||
import StatusWithAction from '../common/document-status-with-action/status-with-action'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
import { useFetchDefaultProcessRule } from '@/service/knowledge/use-create-dataset'
|
||||
|
||||
const FolderPlusIcon = ({ className }: React.SVGProps<SVGElement>) => {
|
||||
return <svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg" className={className ?? ''}>
|
||||
@ -178,6 +179,8 @@ const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
|
||||
router.push(`/datasets/${datasetId}/documents/create`)
|
||||
}
|
||||
|
||||
const fetchDefaultProcessRuleMutation = useFetchDefaultProcessRule()
|
||||
|
||||
const handleSaveNotionPageSelected = async (selectedPages: NotionPage[]) => {
|
||||
const workspacesMap = groupBy(selectedPages, 'workspace_id')
|
||||
const workspaces = Object.keys(workspacesMap).map((workspaceId) => {
|
||||
@ -186,6 +189,7 @@ const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
|
||||
pages: workspacesMap[workspaceId],
|
||||
}
|
||||
})
|
||||
const { rules } = await fetchDefaultProcessRuleMutation.mutateAsync('/datasets/process-rule')
|
||||
const params = {
|
||||
data_source: {
|
||||
type: dataset?.data_source_type,
|
||||
@ -209,7 +213,7 @@ const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
|
||||
},
|
||||
indexing_technique: dataset?.indexing_technique,
|
||||
process_rule: {
|
||||
rules: {},
|
||||
rules,
|
||||
mode: ProcessMode.general,
|
||||
},
|
||||
} as CreateDocumentReq
|
||||
|
||||
@ -9,6 +9,8 @@ import { useAppContext } from '@/context/app-context'
|
||||
import { fetchNotionConnection } from '@/service/common'
|
||||
import NotionIcon from '@/app/components/base/notion-icon'
|
||||
import { noop } from 'lodash-es'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
|
||||
const Icon: FC<{
|
||||
src: string
|
||||
@ -33,6 +35,7 @@ const DataSourceNotion: FC<Props> = ({
|
||||
const { isCurrentWorkspaceManager } = useAppContext()
|
||||
const [canConnectNotion, setCanConnectNotion] = useState(false)
|
||||
const { data } = useSWR(canConnectNotion ? '/oauth/data-source/notion' : null, fetchNotionConnection)
|
||||
const { t } = useTranslation()
|
||||
|
||||
const connected = !!workspaces.length
|
||||
|
||||
@ -51,9 +54,19 @@ const DataSourceNotion: FC<Props> = ({
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (data?.data)
|
||||
window.location.href = data.data
|
||||
}, [data])
|
||||
if (data && 'data' in data) {
|
||||
if (data.data && typeof data.data === 'string' && data.data.startsWith('http')) {
|
||||
window.location.href = data.data
|
||||
}
|
||||
else if (data.data === 'internal') {
|
||||
Toast.notify({
|
||||
type: 'info',
|
||||
message: t('common.dataSource.notion.integratedAlert'),
|
||||
})
|
||||
}
|
||||
}
|
||||
}, [data, t])
|
||||
|
||||
return (
|
||||
<Panel
|
||||
type={DataSourceType.notion}
|
||||
|
||||
@ -14,7 +14,7 @@ const HeaderWrapper = ({
|
||||
}: HeaderWrapperProps) => {
|
||||
const pathname = usePathname()
|
||||
const isBordered = ['/apps', '/datasets', '/datasets/create', '/tools'].includes(pathname)
|
||||
// // Check if the current path is a workflow canvas & fullscreen
|
||||
// Check if the current path is a workflow canvas & fullscreen
|
||||
const inWorkflowCanvas = pathname.endsWith('/workflow')
|
||||
const workflowCanvasMaximize = localStorage.getItem('workflow-canvas-maximize') === 'true'
|
||||
const [hideHeader, setHideHeader] = useState(workflowCanvasMaximize)
|
||||
@ -25,14 +25,12 @@ const HeaderWrapper = ({
|
||||
setHideHeader(v.payload)
|
||||
})
|
||||
|
||||
if (hideHeader && inWorkflowCanvas)
|
||||
return null
|
||||
|
||||
return (
|
||||
<div className={classNames(
|
||||
'sticky left-0 right-0 top-0 z-[15] flex min-h-[56px] shrink-0 grow-0 basis-auto flex-col',
|
||||
s.header,
|
||||
isBordered ? 'border-b border-divider-regular' : '',
|
||||
hideHeader && inWorkflowCanvas && 'hidden',
|
||||
)}
|
||||
>
|
||||
{children}
|
||||
|
||||
@ -62,7 +62,7 @@ const AppInputsPanel = ({
|
||||
return []
|
||||
let inputFormSchema = []
|
||||
if (isBasicApp) {
|
||||
inputFormSchema = currentApp.model_config.user_input_form.filter((item: any) => !item.external_data_tool).map((item: any) => {
|
||||
inputFormSchema = currentApp.model_config?.user_input_form?.filter((item: any) => !item.external_data_tool).map((item: any) => {
|
||||
if (item.paragraph) {
|
||||
return {
|
||||
...item.paragraph,
|
||||
@ -108,10 +108,10 @@ const AppInputsPanel = ({
|
||||
type: 'text-input',
|
||||
required: false,
|
||||
}
|
||||
})
|
||||
}) || []
|
||||
}
|
||||
else {
|
||||
const startNode = currentWorkflow?.graph.nodes.find(node => node.data.type === BlockEnum.Start) as any
|
||||
const startNode = currentWorkflow?.graph?.nodes.find(node => node.data.type === BlockEnum.Start) as any
|
||||
inputFormSchema = startNode?.data.variables.map((variable: any) => {
|
||||
if (variable.type === InputVarType.multiFiles) {
|
||||
return {
|
||||
@ -132,7 +132,7 @@ const AppInputsPanel = ({
|
||||
...variable,
|
||||
required: false,
|
||||
}
|
||||
})
|
||||
}) || []
|
||||
}
|
||||
if ((currentApp.mode === 'completion' || currentApp.mode === 'workflow') && basicAppFileConfig.enabled) {
|
||||
inputFormSchema.push({
|
||||
@ -144,7 +144,7 @@ const AppInputsPanel = ({
|
||||
fileUploadConfig,
|
||||
})
|
||||
}
|
||||
return inputFormSchema
|
||||
return inputFormSchema || []
|
||||
}, [basicAppFileConfig, currentApp, currentWorkflow, fileUploadConfig, isBasicApp])
|
||||
|
||||
const handleFormChange = (value: Record<string, any>) => {
|
||||
|
||||
@ -18,6 +18,15 @@ type Props = {
|
||||
onSaved: (value: Record<string, any>) => void
|
||||
}
|
||||
|
||||
const extractDefaultValues = (schemas: any[]) => {
|
||||
const result: Record<string, any> = {}
|
||||
for (const field of schemas) {
|
||||
if (field.default !== undefined)
|
||||
result[field.name] = field.default
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
const EndpointModal: FC<Props> = ({
|
||||
formSchemas,
|
||||
defaultValues = {},
|
||||
@ -26,7 +35,10 @@ const EndpointModal: FC<Props> = ({
|
||||
}) => {
|
||||
const getValueFromI18nObject = useRenderI18nObject()
|
||||
const { t } = useTranslation()
|
||||
const [tempCredential, setTempCredential] = React.useState<any>(defaultValues)
|
||||
const initialValues = Object.keys(defaultValues).length > 0
|
||||
? defaultValues
|
||||
: extractDefaultValues(formSchemas)
|
||||
const [tempCredential, setTempCredential] = React.useState<any>(initialValues)
|
||||
|
||||
const handleSave = () => {
|
||||
for (const field of formSchemas) {
|
||||
|
||||
@ -85,14 +85,6 @@ const TextGeneration: FC<IMainProps> = ({
|
||||
|
||||
const router = useRouter()
|
||||
const pathname = usePathname()
|
||||
useEffect(() => {
|
||||
const params = new URLSearchParams(searchParams)
|
||||
if (params.has('mode')) {
|
||||
params.delete('mode')
|
||||
router.replace(`${pathname}?${params.toString()}`)
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
|
||||
// Notice this situation isCallBatchAPI but not in batch tab
|
||||
const [isCallBatchAPI, setIsCallBatchAPI] = useState(false)
|
||||
|
||||
@ -6,9 +6,9 @@ import Item from './item'
|
||||
import type { Plugin } from '@/app/components/plugins/types.ts'
|
||||
import cn from '@/utils/classnames'
|
||||
import Link from 'next/link'
|
||||
import { MARKETPLACE_URL_PREFIX } from '@/config'
|
||||
import { RiArrowRightUpLine, RiSearchLine } from '@remixicon/react'
|
||||
import { noop } from 'lodash-es'
|
||||
import { getMarketplaceUrl } from '@/utils/var'
|
||||
|
||||
export type ListProps = {
|
||||
wrapElemRef: React.RefObject<HTMLElement>
|
||||
@ -32,7 +32,7 @@ const List = forwardRef<ListRef, ListProps>(({
|
||||
const { t } = useTranslation()
|
||||
const hasFilter = !searchText
|
||||
const hasRes = list.length > 0
|
||||
const urlWithSearchText = `${MARKETPLACE_URL_PREFIX}/?q=${searchText}&tags=${tags.join(',')}`
|
||||
const urlWithSearchText = getMarketplaceUrl('', { q: searchText, tags: tags.join(',') })
|
||||
const nextToStickyELemRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
const { handleScroll, scrollPosition } = useStickyScroll({
|
||||
@ -71,7 +71,7 @@ const List = forwardRef<ListRef, ListProps>(({
|
||||
return (
|
||||
<Link
|
||||
className='system-sm-medium sticky bottom-0 z-10 flex h-8 cursor-pointer items-center rounded-b-lg border-[0.5px] border-t border-components-panel-border bg-components-panel-bg-blur px-4 py-1 text-text-accent-light-mode-only shadow-lg'
|
||||
href={`${MARKETPLACE_URL_PREFIX}/`}
|
||||
href={getMarketplaceUrl('')}
|
||||
target='_blank'
|
||||
>
|
||||
<span>{t('plugin.findMoreInMarketplace')}</span>
|
||||
|
||||
@ -11,12 +11,11 @@ import {
|
||||
useEditInspectorVar,
|
||||
useInvalidateConversationVarValues,
|
||||
useInvalidateSysVarValues,
|
||||
useLastRun,
|
||||
useResetConversationVar,
|
||||
useResetToLastRunValue,
|
||||
useSysVarValues,
|
||||
} from '@/service/use-workflow'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useCallback } from 'react'
|
||||
import { isConversationVar, isENV, isSystemVar } from '../nodes/_base/components/variable/utils'
|
||||
import produce from 'immer'
|
||||
import type { Node } from '@/app/components/workflow/types'
|
||||
@ -118,15 +117,18 @@ const useInspectVarsCrud = () => {
|
||||
if (nodeInfo) {
|
||||
const index = draft.findIndex(node => node.nodeId === nodeId)
|
||||
if (index === -1) {
|
||||
draft.push({
|
||||
draft.unshift({
|
||||
nodeId,
|
||||
nodeType: nodeInfo.data.type,
|
||||
title: nodeInfo.data.title,
|
||||
vars: payload,
|
||||
nodePayload: nodeInfo.data,
|
||||
})
|
||||
}
|
||||
else {
|
||||
draft[index].vars = payload
|
||||
// put the node to the top
|
||||
draft.unshift(draft.splice(index, 1)[0])
|
||||
}
|
||||
}
|
||||
})
|
||||
@ -180,16 +182,6 @@ const useInspectVarsCrud = () => {
|
||||
invalidateSysVarValues()
|
||||
}, [doEditInspectorVar, invalidateConversationVarValues, invalidateSysVarValues, setInspectVarValue])
|
||||
|
||||
const [currNodeId, setCurrNodeId] = useState<string | null>(null)
|
||||
const [currEditVarId, setCurrEditVarId] = useState<string | null>(null)
|
||||
const { data } = useLastRun(appId, currNodeId || '', !!currNodeId)
|
||||
useEffect(() => {
|
||||
if (data && currNodeId && currEditVarId) {
|
||||
const inspectVar = getNodeInspectVars(currNodeId)?.vars?.find(item => item.id === currEditVarId)
|
||||
resetToLastRunVarInStore(currNodeId, currEditVarId, data.outputs?.[inspectVar?.selector?.[1] || ''])
|
||||
}
|
||||
}, [data, currNodeId, currEditVarId, getNodeInspectVars, editInspectVarValue, resetToLastRunVarInStore])
|
||||
|
||||
const renameInspectVarName = async (nodeId: string, oldName: string, newName: string) => {
|
||||
const varId = getVarId(nodeId, oldName)
|
||||
if (!varId)
|
||||
@ -212,9 +204,13 @@ const useInspectVarsCrud = () => {
|
||||
}, [getInspectVar])
|
||||
|
||||
const resetToLastRunVar = async (nodeId: string, varId: string) => {
|
||||
await doResetToLastRunValue(varId)
|
||||
setCurrNodeId(nodeId)
|
||||
setCurrEditVarId(varId)
|
||||
const isSysVar = nodeId === 'sys'
|
||||
const data = await doResetToLastRunValue(varId)
|
||||
|
||||
if(isSysVar)
|
||||
invalidateSysVarValues()
|
||||
else
|
||||
resetToLastRunVarInStore(nodeId, varId, data.value)
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@ -15,6 +15,7 @@ import {
|
||||
import useToggleExpend from '@/app/components/workflow/nodes/_base/hooks/use-toggle-expend'
|
||||
import type { FileEntity } from '@/app/components/base/file-uploader/types'
|
||||
import FileListInLog from '@/app/components/base/file-uploader/file-list-in-log'
|
||||
import ActionButton from '@/app/components/base/action-button'
|
||||
|
||||
type Props = {
|
||||
className?: string
|
||||
@ -88,15 +89,16 @@ const Base: FC<Props> = ({
|
||||
<CodeGeneratorButton onGenerated={onGenerated} codeLanguages={codeLanguages} />
|
||||
</div>
|
||||
)}
|
||||
{!isCopied
|
||||
? (
|
||||
<Clipboard className='mx-1 h-3.5 w-3.5 cursor-pointer text-text-tertiary' onClick={handleCopy} />
|
||||
)
|
||||
: (
|
||||
<ClipboardCheck className='mx-1 h-3.5 w-3.5 text-text-tertiary' />
|
||||
)
|
||||
}
|
||||
|
||||
<ActionButton className='ml-1' onClick={handleCopy}>
|
||||
{!isCopied
|
||||
? (
|
||||
<Clipboard className='h-4 w-4 cursor-pointer' />
|
||||
)
|
||||
: (
|
||||
<ClipboardCheck className='h-4 w-4' />
|
||||
)
|
||||
}
|
||||
</ActionButton>
|
||||
<div className='ml-1'>
|
||||
<ToggleExpandBtn isExpand={isExpand} onExpandChange={setIsExpand} />
|
||||
</div>
|
||||
|
||||
@ -15,7 +15,7 @@ import { pluginManifestToCardPluginProps } from '@/app/components/plugins/instal
|
||||
import { Badge as Badge2, BadgeState } from '@/app/components/base/badge/index'
|
||||
import Link from 'next/link'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { MARKETPLACE_URL_PREFIX } from '@/config'
|
||||
import { getMarketplaceUrl } from '@/utils/var'
|
||||
|
||||
export type SwitchPluginVersionProps = {
|
||||
uniqueIdentifier: string
|
||||
@ -82,7 +82,7 @@ export const SwitchPluginVersion: FC<SwitchPluginVersionProps> = (props) => {
|
||||
modalBottomLeft={
|
||||
<Link
|
||||
className='flex items-center justify-center gap-1'
|
||||
href={`${MARKETPLACE_URL_PREFIX}/plugins/${pluginDetail.declaration.author}/${pluginDetail.declaration.name}`}
|
||||
href={getMarketplaceUrl(`/plugins/${pluginDetail.declaration.author}/${pluginDetail.declaration.name}`)}
|
||||
target='_blank'
|
||||
>
|
||||
<span className='system-xs-regular text-xs text-text-accent'>
|
||||
|
||||
@ -65,10 +65,11 @@ const VarList: FC<Props> = ({
|
||||
}, [list, onVarNameChange, onChange])
|
||||
|
||||
const handleVarReferenceChange = useCallback((index: number) => {
|
||||
return (value: ValueSelector | string, varKindType: VarKindType) => {
|
||||
return (value: ValueSelector | string, varKindType: VarKindType, varInfo?: Var) => {
|
||||
const newList = produce(list, (draft) => {
|
||||
if (!isSupportConstantValue || varKindType === VarKindType.variable) {
|
||||
draft[index].value_selector = value as ValueSelector
|
||||
draft[index].value_type = varInfo?.type
|
||||
if (isSupportConstantValue)
|
||||
draft[index].variable_type = VarKindType.variable
|
||||
|
||||
|
||||
@ -184,9 +184,11 @@ const VarReferencePicker: FC<Props> = ({
|
||||
return startNode?.data
|
||||
|
||||
const node = getNodeInfoById(availableNodes, outputVarNodeId)?.data
|
||||
return {
|
||||
...node,
|
||||
id: outputVarNodeId,
|
||||
if (node) {
|
||||
return {
|
||||
...node,
|
||||
id: outputVarNodeId,
|
||||
}
|
||||
}
|
||||
}, [value, hasValue, isConstant, isIterationVar, iterationNode, availableNodes, outputVarNodeId, startNode, isLoopVar, loopNode])
|
||||
|
||||
|
||||
@ -14,6 +14,7 @@ import Split from '@/app/components/workflow/nodes/_base/components/split'
|
||||
import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/code-editor'
|
||||
import TypeSelector from '@/app/components/workflow/nodes/_base/components/selector'
|
||||
import type { NodePanelProps } from '@/app/components/workflow/types'
|
||||
import SyncButton from '@/app/components/base/button/sync-button'
|
||||
const i18nPrefix = 'workflow.nodes.code'
|
||||
|
||||
const codeLanguages = [
|
||||
@ -40,6 +41,7 @@ const Panel: FC<NodePanelProps<CodeNodeType>> = ({
|
||||
handleVarListChange,
|
||||
handleAddVariable,
|
||||
handleRemoveVariable,
|
||||
handleSyncFunctionSignature,
|
||||
handleCodeChange,
|
||||
handleCodeLanguageChange,
|
||||
handleVarsChange,
|
||||
@ -68,7 +70,12 @@ const Panel: FC<NodePanelProps<CodeNodeType>> = ({
|
||||
<Field
|
||||
title={t(`${i18nPrefix}.inputVars`)}
|
||||
operations={
|
||||
!readOnly ? <AddButton onClick={handleAddVariable} /> : undefined
|
||||
!readOnly ? (
|
||||
<div className="flex gap-2">
|
||||
<SyncButton popupContent={t(`${i18nPrefix}.syncFunctionSignature`)} onClick={handleSyncFunctionSignature} />
|
||||
<AddButton onClick={handleAddVariable} />
|
||||
</div>
|
||||
) : undefined
|
||||
}
|
||||
>
|
||||
<VarList
|
||||
|
||||
@ -84,6 +84,65 @@ const useConfig = (id: string, payload: CodeNodeType) => {
|
||||
setInputs(newInputs)
|
||||
}, [allLanguageDefault, inputs, setInputs])
|
||||
|
||||
const handleSyncFunctionSignature = useCallback(() => {
|
||||
const generateSyncSignatureCode = (code: string) => {
|
||||
let mainDefRe
|
||||
let newMainDef
|
||||
if (inputs.code_language === CodeLanguage.javascript) {
|
||||
mainDefRe = /function\s+main\b\s*\([\s\S]*?\)/g
|
||||
newMainDef = 'function main({{var_list}})'
|
||||
let param_list = inputs.variables?.map(item => item.variable).join(', ') || ''
|
||||
param_list = param_list ? `{${param_list}}` : ''
|
||||
newMainDef = newMainDef.replace('{{var_list}}', param_list)
|
||||
}
|
||||
|
||||
else if (inputs.code_language === CodeLanguage.python3) {
|
||||
mainDefRe = /def\s+main\b\s*\([\s\S]*?\)/g
|
||||
const param_list = []
|
||||
for (const item of inputs.variables) {
|
||||
let param = item.variable
|
||||
let param_type = ''
|
||||
switch (item.value_type) {
|
||||
case VarType.string:
|
||||
param_type = ': str'
|
||||
break
|
||||
case VarType.number:
|
||||
param_type = ': float'
|
||||
break
|
||||
case VarType.object:
|
||||
param_type = ': dict'
|
||||
break
|
||||
case VarType.array:
|
||||
param_type = ': list'
|
||||
break
|
||||
case VarType.arrayNumber:
|
||||
param_type = ': list[float]'
|
||||
break
|
||||
case VarType.arrayString:
|
||||
param_type = ': list[str]'
|
||||
break
|
||||
case VarType.arrayObject:
|
||||
param_type = ': list[dict]'
|
||||
break
|
||||
}
|
||||
param += param_type
|
||||
param_list.push(`${param}`)
|
||||
}
|
||||
|
||||
newMainDef = `def main(${param_list.join(', ')})`
|
||||
}
|
||||
else { return code }
|
||||
|
||||
const newCode = code.replace(mainDefRe, newMainDef)
|
||||
return newCode
|
||||
}
|
||||
|
||||
const newInputs = produce(inputs, (draft) => {
|
||||
draft.code = generateSyncSignatureCode(draft.code)
|
||||
})
|
||||
setInputs(newInputs)
|
||||
}, [inputs, setInputs])
|
||||
|
||||
const {
|
||||
handleVarsChange,
|
||||
handleAddVariable: handleAddOutputVariable,
|
||||
@ -119,6 +178,7 @@ const useConfig = (id: string, payload: CodeNodeType) => {
|
||||
handleVarListChange,
|
||||
handleAddVariable,
|
||||
handleRemoveVariable,
|
||||
handleSyncFunctionSignature,
|
||||
handleCodeChange,
|
||||
handleCodeLanguageChange,
|
||||
handleVarsChange,
|
||||
|
||||
@ -19,6 +19,8 @@ import Editor from '@/app/components/workflow/nodes/_base/components/prompt/edit
|
||||
import StructureOutput from './components/structure-output'
|
||||
import Switch from '@/app/components/base/switch'
|
||||
import { RiAlertFill, RiQuestionLine } from '@remixicon/react'
|
||||
import { fetchAndMergeValidCompletionParams } from '@/utils/completion-params'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
|
||||
const i18nPrefix = 'workflow.nodes.llm'
|
||||
|
||||
@ -68,10 +70,27 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
|
||||
modelId: string
|
||||
mode?: string
|
||||
}) => {
|
||||
handleCompletionParamsChange({})
|
||||
handleModelChanged(model)
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
(async () => {
|
||||
try {
|
||||
const { params: filtered, removedDetails } = await fetchAndMergeValidCompletionParams(
|
||||
model.provider,
|
||||
model.modelId,
|
||||
inputs.model.completion_params,
|
||||
)
|
||||
const keys = Object.keys(removedDetails)
|
||||
if (keys.length)
|
||||
Toast.notify({ type: 'warning', message: `${t('common.modelProvider.parametersInvalidRemoved')}: ${keys.map(k => `${k} (${removedDetails[k]})`).join(', ')}` })
|
||||
handleCompletionParamsChange(filtered)
|
||||
}
|
||||
catch (e) {
|
||||
Toast.notify({ type: 'error', message: t('common.error') })
|
||||
handleCompletionParamsChange({})
|
||||
}
|
||||
finally {
|
||||
handleModelChanged(model)
|
||||
}
|
||||
})()
|
||||
}, [inputs.model.completion_params])
|
||||
|
||||
return (
|
||||
<div className='mt-2'>
|
||||
|
||||
@ -136,6 +136,7 @@ export type Variable = {
|
||||
variable: string
|
||||
}
|
||||
value_selector: ValueSelector
|
||||
value_type?: VarType
|
||||
variable_type?: VarKindType
|
||||
value?: string
|
||||
options?: string[]
|
||||
|
||||
@ -63,6 +63,17 @@ const Right = ({
|
||||
resetConversationVar(currentNodeVar.var.id)
|
||||
}
|
||||
|
||||
const getCopyContent = () => {
|
||||
const value = currentNodeVar?.var.value
|
||||
if (value === null || value === undefined)
|
||||
return ''
|
||||
|
||||
if (typeof value === 'object')
|
||||
return JSON.stringify(value)
|
||||
|
||||
return String(value)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn('flex h-full flex-col')}>
|
||||
{/* header */}
|
||||
@ -124,7 +135,7 @@ const Right = ({
|
||||
</Tooltip>
|
||||
)}
|
||||
{currentNodeVar.var.value_type !== 'secret' && (
|
||||
<CopyFeedback content={currentNodeVar.var.value ? JSON.stringify(currentNodeVar.var.value) : ''} />
|
||||
<CopyFeedback content={getCopyContent()} />
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
@ -29,9 +29,10 @@ export const useTabSearchParams = ({
|
||||
const router = useRouter()
|
||||
const pathName = pathnameFromHook || window?.location?.pathname
|
||||
const searchParams = useSearchParams()
|
||||
const searchParamValue = searchParams.has(searchParamName) ? decodeURIComponent(searchParams.get(searchParamName)!) : defaultTab
|
||||
const [activeTab, setTab] = useState<string>(
|
||||
!disableSearchParams
|
||||
? (searchParams.get(searchParamName) || defaultTab)
|
||||
? searchParamValue
|
||||
: defaultTab,
|
||||
)
|
||||
|
||||
@ -39,7 +40,7 @@ export const useTabSearchParams = ({
|
||||
setTab(newActiveTab)
|
||||
if (disableSearchParams)
|
||||
return
|
||||
router[`${routingBehavior}`](`${pathName}?${searchParamName}=${newActiveTab}`)
|
||||
router[`${routingBehavior}`](`${pathName}?${searchParamName}=${encodeURIComponent(newActiveTab)}`)
|
||||
}
|
||||
|
||||
return [activeTab, setActiveTab] as const
|
||||
|
||||
@ -390,6 +390,8 @@ const translation = {
|
||||
addChildChunk: 'Untergeordneten Block hinzufügen',
|
||||
regenerationConfirmTitle: 'Möchten Sie untergeordnete Chunks regenerieren?',
|
||||
searchResults_one: 'ERGEBNIS',
|
||||
keywordEmpty: 'Das Schlüsselwort darf nicht leer sein.',
|
||||
keywordDuplicate: 'Das Schlüsselwort existiert bereits',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -199,9 +199,9 @@ const translation = {
|
||||
accessControl: 'Web App Access Control',
|
||||
accessItemsDescription: {
|
||||
anyone: 'Anyone can access the web app (no login required)',
|
||||
specific: 'Only specific members within the platform can access the Web application',
|
||||
organization: 'All members within the platform can access the Web application',
|
||||
external: 'Only authenticated external users can access the Web application',
|
||||
specific: 'Only specific members within the platform can access the web app',
|
||||
organization: 'All members within the platform can access the web app',
|
||||
external: 'Only authenticated external users can access the web app',
|
||||
},
|
||||
accessControlDialog: {
|
||||
title: 'Web App Access Control',
|
||||
@ -218,7 +218,7 @@ const translation = {
|
||||
members_one: '{{count}} MEMBER',
|
||||
members_other: '{{count}} MEMBERS',
|
||||
noGroupsOrMembers: 'No groups or members selected',
|
||||
webAppSSONotEnabledTip: 'Please contact your organization administrator to configure external authentication for the Web application.',
|
||||
webAppSSONotEnabledTip: 'Please contact your organization administrator to configure external authentication for the web app.',
|
||||
operateGroupAndMember: {
|
||||
searchPlaceholder: 'Search groups and members',
|
||||
allMembers: 'All members',
|
||||
|
||||
@ -456,6 +456,7 @@ const translation = {
|
||||
connected: 'Connected',
|
||||
disconnected: 'Disconnected',
|
||||
changeAuthorizedPages: 'Change authorized pages',
|
||||
integratedAlert: 'Notion is integrated via internal credential, no need to re-authorize.',
|
||||
pagesAuthorized: 'Pages authorized',
|
||||
sync: 'Sync',
|
||||
remove: 'Remove',
|
||||
|
||||
@ -549,6 +549,7 @@ const translation = {
|
||||
advancedDependencies: 'Advanced Dependencies',
|
||||
advancedDependenciesTip: 'Add some preloaded dependencies that take more time to consume or are not default built-in here',
|
||||
searchDependencies: 'Search Dependencies',
|
||||
syncFunctionSignature: 'Sync function signature to code',
|
||||
},
|
||||
templateTransform: {
|
||||
inputVars: 'Input Variables',
|
||||
|
||||
@ -389,6 +389,8 @@ const translation = {
|
||||
characters_one: 'carácter',
|
||||
regenerationSuccessMessage: 'Puede cerrar esta ventana.',
|
||||
regenerationConfirmTitle: '¿Desea regenerar fragmentos secundarios?',
|
||||
keywordEmpty: 'La palabra clave no puede estar vacía',
|
||||
keywordDuplicate: 'La palabra clave ya existe',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -388,6 +388,8 @@ const translation = {
|
||||
regeneratingMessage: 'این ممکن است یک لحظه طول بکشد، لطفا صبر کنید...',
|
||||
regenerationConfirmTitle: 'آیا می خواهید تکه های کودک را بازسازی کنید؟',
|
||||
regenerationSuccessMessage: 'می توانید این پنجره را ببندید.',
|
||||
keywordEmpty: 'کلمه کلیدی نمیتواند خالی باشد',
|
||||
keywordDuplicate: 'این کلیدواژه قبلاً وجود دارد',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -389,6 +389,8 @@ const translation = {
|
||||
searchResults_zero: 'RÉSULTAT',
|
||||
empty: 'Aucun Chunk trouvé',
|
||||
editChildChunk: 'Modifier le morceau enfant',
|
||||
keywordDuplicate: 'Le mot-clé existe déjà',
|
||||
keywordEmpty: 'Le mot-clé ne peut pas être vide.',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -390,6 +390,8 @@ const translation = {
|
||||
chunkAdded: '1 हिस्सा जोड़ा गया',
|
||||
chunkDetail: 'चंक विवरण',
|
||||
regenerationConfirmMessage: 'चाइल्ड चंक्स को रीजनरेट करने से वर्तमान चाइल्ड चंक्स ओवरराइट हो जाएंगे, जिसमें संपादित चंक्स और नए जोड़े गए चंक्स शामिल हैं। पुनरुत्थान को पूर्ववत नहीं किया जा सकता है।',
|
||||
keywordDuplicate: 'कीवर्ड पहले से मौजूद है',
|
||||
keywordEmpty: 'कीवर्ड ख़ाली नहीं हो सकता',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -391,6 +391,8 @@ const translation = {
|
||||
regenerationSuccessMessage: 'È possibile chiudere questa finestra.',
|
||||
childChunkAdded: '1 blocco figlio aggiunto',
|
||||
childChunks_other: 'BLOCCHI FIGLIO',
|
||||
keywordEmpty: 'La parola chiave non può essere vuota',
|
||||
keywordDuplicate: 'La parola chiave esiste già',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -388,6 +388,8 @@ const translation = {
|
||||
editedAt: '編集日時',
|
||||
expandChunks: 'チャンクを展開',
|
||||
collapseChunks: 'チャンクを折りたたむ',
|
||||
keywordDuplicate: 'そのキーワードは既に存在しています',
|
||||
keywordEmpty: 'キーワードは空であってはいけません',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -550,6 +550,7 @@ const translation = {
|
||||
advancedDependencies: '高度な依存関係',
|
||||
advancedDependenciesTip: '消費に時間がかかる、またはデフォルトで組み込まれていない事前ロードされた依存関係を追加します',
|
||||
searchDependencies: '依存関係を検索',
|
||||
syncFunctionSignature: 'コードの関数署名を同期',
|
||||
},
|
||||
templateTransform: {
|
||||
inputVars: '入力変数',
|
||||
|
||||
@ -388,6 +388,8 @@ const translation = {
|
||||
addChunk: '청크 추가 (Add Chunk)',
|
||||
characters_other: '문자',
|
||||
regeneratingMessage: '시간이 걸릴 수 있으니 잠시만 기다려 주십시오...',
|
||||
keywordDuplicate: '키워드가 이미 존재합니다.',
|
||||
keywordEmpty: '키워드는 비워둘 수 없습니다.',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -390,6 +390,8 @@ const translation = {
|
||||
newChildChunk: 'Nowy fragment podrzędny',
|
||||
clearFilter: 'Wyczyść filtr',
|
||||
childChunks_one: 'FRAGMENT POTOMNY',
|
||||
keywordDuplicate: 'Słowo kluczowe już istnieje',
|
||||
keywordEmpty: 'Słowo kluczowe nie może być puste',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -389,6 +389,8 @@ const translation = {
|
||||
newChildChunk: 'Novo pedaço filho',
|
||||
characters_one: 'personagem',
|
||||
parentChunk: 'Pedaço pai',
|
||||
keywordEmpty: 'A palavra-chave não pode estar vazia',
|
||||
keywordDuplicate: 'A palavra-chave já existe',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -389,6 +389,8 @@ const translation = {
|
||||
regeneratingTitle: 'Regenerarea bucăților secundare',
|
||||
addChildChunk: 'Adăugați o bucată copil',
|
||||
searchResults_other: 'REZULTATELE',
|
||||
keywordDuplicate: 'Cuvântul cheie există deja',
|
||||
keywordEmpty: 'Cuvântul cheie nu poate fi gol',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -389,6 +389,8 @@ const translation = {
|
||||
characters_one: 'характер',
|
||||
addChildChunk: 'Добавить дочерний чанк',
|
||||
newChildChunk: 'Новый дочерний чанк',
|
||||
keywordEmpty: 'Ключевое слово не может быть пустым',
|
||||
keywordDuplicate: 'Ключевое слово уже существует',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -389,6 +389,8 @@ const translation = {
|
||||
chunk: 'Kos',
|
||||
addChunk: 'Dodajanje kosa',
|
||||
childChunkAdded: 'Dodan je 1 kos otroka',
|
||||
keywordDuplicate: 'Ključna beseda že obstaja',
|
||||
keywordEmpty: 'Ključna beseda ne more biti prazna',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -388,6 +388,8 @@ const translation = {
|
||||
searchResults_other: 'ผลลัพธ์',
|
||||
regenerationSuccessMessage: 'คุณสามารถปิดหน้าต่างนี้ได้',
|
||||
childChunks_one: 'ก้อนเด็ก',
|
||||
keywordDuplicate: 'คำสำคัญมีอยู่แล้ว',
|
||||
keywordEmpty: 'คีย์เวิร์ดไม่สามารถว่างเปล่าได้',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -388,6 +388,8 @@ const translation = {
|
||||
chunks_other: 'Parçalar',
|
||||
editedAt: 'Şurada düzenlendi:',
|
||||
addChildChunk: 'Alt Parça Ekle',
|
||||
keywordDuplicate: 'Anahtar kelime zaten var',
|
||||
keywordEmpty: 'Anahtar kelime boş olamaz',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -389,6 +389,8 @@ const translation = {
|
||||
regenerationSuccessMessage: 'Ви можете закрити це вікно.',
|
||||
expandChunks: 'Розгортання фрагментів',
|
||||
regenerationConfirmTitle: 'Хочете регенерувати дитячі шматки?',
|
||||
keywordEmpty: 'Ключове слово не може бути порожнім',
|
||||
keywordDuplicate: 'Ключове слово вже існує',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -388,6 +388,8 @@ const translation = {
|
||||
clearFilter: 'Bộ lọc rõ ràng',
|
||||
chunk: 'Khúc',
|
||||
edited: 'EDITED',
|
||||
keywordDuplicate: 'Từ khóa đã tồn tại',
|
||||
keywordEmpty: 'Từ khóa không được để trống',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user