mirror of
https://github.com/langgenius/dify.git
synced 2026-03-01 05:48:40 +08:00
Merge branch 'feat/summary-index' into deploy/dev
This commit is contained in:
@ -104,9 +104,7 @@ forbidden_modules =
|
||||
ignore_imports =
|
||||
core.workflow.nodes.loop.loop_node -> core.app.workflow.node_factory
|
||||
core.workflow.graph_engine.command_channels.redis_channel -> extensions.ext_redis
|
||||
core.workflow.graph_engine.layers.observability -> configs
|
||||
core.workflow.graph_engine.layers.observability -> extensions.otel.runtime
|
||||
core.workflow.graph_engine.layers.persistence -> core.ops.ops_trace_manager
|
||||
core.workflow.workflow_entry -> core.app.workflow.layers.observability
|
||||
core.workflow.graph_engine.worker_management.worker_pool -> configs
|
||||
core.workflow.nodes.agent.agent_node -> core.model_manager
|
||||
core.workflow.nodes.agent.agent_node -> core.provider_manager
|
||||
@ -147,7 +145,6 @@ ignore_imports =
|
||||
core.workflow.workflow_entry -> models.workflow
|
||||
core.workflow.nodes.agent.agent_node -> core.agent.entities
|
||||
core.workflow.nodes.agent.agent_node -> core.agent.plugin_entities
|
||||
core.workflow.graph_engine.layers.persistence -> core.app.entities.app_invoke_entities
|
||||
core.workflow.nodes.base.node -> core.app.entities.app_invoke_entities
|
||||
core.workflow.nodes.knowledge_index.knowledge_index_node -> core.app.entities.app_invoke_entities
|
||||
core.workflow.nodes.knowledge_retrieval.knowledge_retrieval_node -> core.app.app_config.entities
|
||||
@ -217,7 +214,6 @@ ignore_imports =
|
||||
core.workflow.nodes.llm.node -> core.llm_generator.output_parser.errors
|
||||
core.workflow.nodes.llm.node -> core.llm_generator.output_parser.structured_output
|
||||
core.workflow.nodes.llm.node -> core.model_manager
|
||||
core.workflow.graph_engine.layers.persistence -> core.ops.entities.trace_entity
|
||||
core.workflow.nodes.agent.entities -> core.prompt.entities.advanced_prompt_entities
|
||||
core.workflow.nodes.knowledge_retrieval.knowledge_retrieval_node -> core.prompt.simple_prompt_transform
|
||||
core.workflow.nodes.llm.entities -> core.prompt.entities.advanced_prompt_entities
|
||||
|
||||
@ -1,7 +1,11 @@
|
||||
"""Helpers for registering Pydantic models with Flask-RESTX namespaces."""
|
||||
|
||||
from enum import StrEnum
|
||||
|
||||
from flask_restx import Namespace
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, TypeAdapter
|
||||
|
||||
from controllers.console import console_ns
|
||||
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
@ -19,8 +23,25 @@ def register_schema_models(namespace: Namespace, *models: type[BaseModel]) -> No
|
||||
register_schema_model(namespace, model)
|
||||
|
||||
|
||||
def get_or_create_model(model_name: str, field_def):
|
||||
existing = console_ns.models.get(model_name)
|
||||
if existing is None:
|
||||
existing = console_ns.model(model_name, field_def)
|
||||
return existing
|
||||
|
||||
|
||||
def register_enum_models(namespace: Namespace, *models: type[StrEnum]) -> None:
|
||||
"""Register multiple StrEnum with a namespace."""
|
||||
for model in models:
|
||||
namespace.schema_model(
|
||||
model.__name__, TypeAdapter(model).json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"DEFAULT_REF_TEMPLATE_SWAGGER_2_0",
|
||||
"get_or_create_model",
|
||||
"register_enum_models",
|
||||
"register_schema_model",
|
||||
"register_schema_models",
|
||||
]
|
||||
|
||||
@ -22,10 +22,10 @@ api_key_fields = {
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
api_key_list = {"data": fields.List(fields.Nested(api_key_fields), attribute="items")}
|
||||
|
||||
api_key_item_model = console_ns.model("ApiKeyItem", api_key_fields)
|
||||
|
||||
api_key_list = {"data": fields.List(fields.Nested(api_key_item_model), attribute="items")}
|
||||
|
||||
api_key_list_model = console_ns.model(
|
||||
"ApiKeyList", {"data": fields.List(fields.Nested(api_key_item_model), attribute="items")}
|
||||
)
|
||||
|
||||
@ -9,9 +9,11 @@ from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import BadRequest
|
||||
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.common.helpers import FileInfo
|
||||
from controllers.common.schema import register_enum_models, register_schema_models
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.workspace.models import LoadBalancingPayload
|
||||
from controllers.console.wraps import (
|
||||
account_initialization_required,
|
||||
cloud_edition_billing_resource_check,
|
||||
@ -22,18 +24,36 @@ from controllers.console.wraps import (
|
||||
)
|
||||
from core.file import helpers as file_helpers
|
||||
from core.ops.ops_trace_manager import OpsTraceManager
|
||||
from core.workflow.enums import NodeType
|
||||
from core.rag.retrieval.retrieval_methods import RetrievalMethod
|
||||
from core.workflow.enums import NodeType, WorkflowExecutionStatus
|
||||
from extensions.ext_database import db
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from models import App, Workflow
|
||||
from models import App, DatasetPermissionEnum, Workflow
|
||||
from models.model import IconType
|
||||
from services.app_dsl_service import AppDslService, ImportMode
|
||||
from services.app_service import AppService
|
||||
from services.enterprise.enterprise_service import EnterpriseService
|
||||
from services.entities.knowledge_entities.knowledge_entities import (
|
||||
DataSource,
|
||||
InfoList,
|
||||
NotionIcon,
|
||||
NotionInfo,
|
||||
NotionPage,
|
||||
PreProcessingRule,
|
||||
RerankingModel,
|
||||
Rule,
|
||||
Segmentation,
|
||||
WebsiteInfo,
|
||||
WeightKeywordSetting,
|
||||
WeightModel,
|
||||
WeightVectorSetting,
|
||||
)
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"]
|
||||
|
||||
register_enum_models(console_ns, IconType)
|
||||
|
||||
|
||||
class AppListQuery(BaseModel):
|
||||
page: int = Field(default=1, ge=1, le=99999, description="Page number (1-99999)")
|
||||
@ -151,7 +171,7 @@ def _build_icon_url(icon_type: str | IconType | None, icon: str | None) -> str |
|
||||
if icon is None or icon_type is None:
|
||||
return None
|
||||
icon_type_value = icon_type.value if isinstance(icon_type, IconType) else str(icon_type)
|
||||
if icon_type_value.lower() != IconType.IMAGE.value:
|
||||
if icon_type_value.lower() != IconType.IMAGE:
|
||||
return None
|
||||
return file_helpers.get_signed_file_url(icon)
|
||||
|
||||
@ -391,6 +411,8 @@ class AppExportResponse(ResponseModel):
|
||||
data: str
|
||||
|
||||
|
||||
register_enum_models(console_ns, RetrievalMethod, WorkflowExecutionStatus, DatasetPermissionEnum)
|
||||
|
||||
register_schema_models(
|
||||
console_ns,
|
||||
AppListQuery,
|
||||
@ -414,6 +436,22 @@ register_schema_models(
|
||||
AppDetailWithSite,
|
||||
AppPagination,
|
||||
AppExportResponse,
|
||||
Segmentation,
|
||||
PreProcessingRule,
|
||||
Rule,
|
||||
WeightVectorSetting,
|
||||
WeightKeywordSetting,
|
||||
WeightModel,
|
||||
RerankingModel,
|
||||
InfoList,
|
||||
NotionInfo,
|
||||
FileInfo,
|
||||
WebsiteInfo,
|
||||
NotionPage,
|
||||
NotionIcon,
|
||||
RerankingModel,
|
||||
DataSource,
|
||||
LoadBalancingPayload,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@ -41,14 +41,14 @@ DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
class AppImportPayload(BaseModel):
|
||||
mode: str = Field(..., description="Import mode")
|
||||
yaml_content: str | None = None
|
||||
yaml_url: str | None = None
|
||||
name: str | None = None
|
||||
description: str | None = None
|
||||
icon_type: str | None = None
|
||||
icon: str | None = None
|
||||
icon_background: str | None = None
|
||||
app_id: str | None = None
|
||||
yaml_content: str | None = Field(None)
|
||||
yaml_url: str | None = Field(None)
|
||||
name: str | None = Field(None)
|
||||
description: str | None = Field(None)
|
||||
icon_type: str | None = Field(None)
|
||||
icon: str | None = Field(None)
|
||||
icon_background: str | None = Field(None)
|
||||
app_id: str | None = Field(None)
|
||||
|
||||
|
||||
console_ns.schema_model(
|
||||
|
||||
@ -12,6 +12,7 @@ from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||
import services
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
|
||||
from controllers.console.app.workflow_run import workflow_run_node_execution_model
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
|
||||
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
|
||||
@ -35,7 +36,6 @@ from extensions.ext_database import db
|
||||
from factories import file_factory, variable_factory
|
||||
from fields.member_fields import simple_account_fields
|
||||
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
|
||||
from fields.workflow_run_fields import workflow_run_node_execution_fields
|
||||
from libs import helper
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.helper import TimestampField, uuid_value
|
||||
@ -88,26 +88,6 @@ workflow_pagination_fields_copy = workflow_pagination_fields.copy()
|
||||
workflow_pagination_fields_copy["items"] = fields.List(fields.Nested(workflow_model), attribute="items")
|
||||
workflow_pagination_model = console_ns.model("WorkflowPagination", workflow_pagination_fields_copy)
|
||||
|
||||
# Reuse workflow_run_node_execution_model from workflow_run.py if already registered
|
||||
# Otherwise register it here
|
||||
from fields.end_user_fields import simple_end_user_fields
|
||||
|
||||
simple_end_user_model = None
|
||||
try:
|
||||
simple_end_user_model = console_ns.models.get("SimpleEndUser")
|
||||
except AttributeError:
|
||||
pass
|
||||
if simple_end_user_model is None:
|
||||
simple_end_user_model = console_ns.model("SimpleEndUser", simple_end_user_fields)
|
||||
|
||||
workflow_run_node_execution_model = None
|
||||
try:
|
||||
workflow_run_node_execution_model = console_ns.models.get("WorkflowRunNodeExecution")
|
||||
except AttributeError:
|
||||
pass
|
||||
if workflow_run_node_execution_model is None:
|
||||
workflow_run_node_execution_model = console_ns.model("WorkflowRunNodeExecution", workflow_run_node_execution_fields)
|
||||
|
||||
|
||||
class SyncDraftWorkflowPayload(BaseModel):
|
||||
graph: dict[str, Any]
|
||||
|
||||
@ -1,13 +1,14 @@
|
||||
import logging
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource, marshal_with
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from configs import dify_config
|
||||
from controllers.common.schema import get_or_create_model
|
||||
from extensions.ext_database import db
|
||||
from fields.workflow_trigger_fields import trigger_fields, triggers_list_fields, webhook_trigger_fields
|
||||
from libs.login import current_user, login_required
|
||||
@ -22,6 +23,14 @@ from ..wraps import account_initialization_required, edit_permission_required, s
|
||||
logger = logging.getLogger(__name__)
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
trigger_model = get_or_create_model("WorkflowTrigger", trigger_fields)
|
||||
|
||||
triggers_list_fields_copy = triggers_list_fields.copy()
|
||||
triggers_list_fields_copy["data"] = fields.List(fields.Nested(trigger_model))
|
||||
triggers_list_model = get_or_create_model("WorkflowTriggerList", triggers_list_fields_copy)
|
||||
|
||||
webhook_trigger_model = get_or_create_model("WebhookTrigger", webhook_trigger_fields)
|
||||
|
||||
|
||||
class Parser(BaseModel):
|
||||
node_id: str
|
||||
@ -48,7 +57,7 @@ class WebhookTriggerApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=AppMode.WORKFLOW)
|
||||
@marshal_with(webhook_trigger_fields)
|
||||
@marshal_with(webhook_trigger_model)
|
||||
def get(self, app_model: App):
|
||||
"""Get webhook trigger for a node"""
|
||||
args = Parser.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
@ -80,7 +89,7 @@ class AppTriggersApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=AppMode.WORKFLOW)
|
||||
@marshal_with(triggers_list_fields)
|
||||
@marshal_with(triggers_list_model)
|
||||
def get(self, app_model: App):
|
||||
"""Get app triggers list"""
|
||||
assert isinstance(current_user, Account)
|
||||
@ -120,7 +129,7 @@ class AppTriggerEnableApi(Resource):
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@get_app_model(mode=AppMode.WORKFLOW)
|
||||
@marshal_with(trigger_fields)
|
||||
@marshal_with(trigger_model)
|
||||
def post(self, app_model: App):
|
||||
"""Update app trigger (enable/disable)"""
|
||||
args = ParserEnable.model_validate(console_ns.payload)
|
||||
|
||||
@ -3,13 +3,13 @@ from collections.abc import Generator
|
||||
from typing import Any, cast
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource, marshal_with
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from controllers.common.schema import register_schema_model
|
||||
from controllers.common.schema import get_or_create_model, register_schema_model
|
||||
from core.datasource.entities.datasource_entities import DatasourceProviderType, OnlineDocumentPagesMessage
|
||||
from core.datasource.online_document.online_document_plugin import OnlineDocumentDatasourcePlugin
|
||||
from core.indexing_runner import IndexingRunner
|
||||
@ -17,7 +17,14 @@ from core.rag.extractor.entity.datasource_type import DatasourceType
|
||||
from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo
|
||||
from core.rag.extractor.notion_extractor import NotionExtractor
|
||||
from extensions.ext_database import db
|
||||
from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields
|
||||
from fields.data_source_fields import (
|
||||
integrate_fields,
|
||||
integrate_icon_fields,
|
||||
integrate_list_fields,
|
||||
integrate_notion_info_list_fields,
|
||||
integrate_page_fields,
|
||||
integrate_workspace_fields,
|
||||
)
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from models import DataSourceOauthBinding, Document
|
||||
@ -36,9 +43,62 @@ class NotionEstimatePayload(BaseModel):
|
||||
doc_language: str = Field(default="English")
|
||||
|
||||
|
||||
class DataSourceNotionListQuery(BaseModel):
|
||||
dataset_id: str | None = Field(default=None, description="Dataset ID")
|
||||
credential_id: str = Field(..., description="Credential ID", min_length=1)
|
||||
datasource_parameters: dict[str, Any] | None = Field(default=None, description="Datasource parameters JSON string")
|
||||
|
||||
|
||||
class DataSourceNotionPreviewQuery(BaseModel):
|
||||
credential_id: str = Field(..., description="Credential ID", min_length=1)
|
||||
|
||||
|
||||
register_schema_model(console_ns, NotionEstimatePayload)
|
||||
|
||||
|
||||
integrate_icon_model = get_or_create_model("DataSourceIntegrateIcon", integrate_icon_fields)
|
||||
|
||||
integrate_page_fields_copy = integrate_page_fields.copy()
|
||||
integrate_page_fields_copy["page_icon"] = fields.Nested(integrate_icon_model, allow_null=True)
|
||||
integrate_page_model = get_or_create_model("DataSourceIntegratePage", integrate_page_fields_copy)
|
||||
|
||||
integrate_workspace_fields_copy = integrate_workspace_fields.copy()
|
||||
integrate_workspace_fields_copy["pages"] = fields.List(fields.Nested(integrate_page_model))
|
||||
integrate_workspace_model = get_or_create_model("DataSourceIntegrateWorkspace", integrate_workspace_fields_copy)
|
||||
|
||||
integrate_fields_copy = integrate_fields.copy()
|
||||
integrate_fields_copy["source_info"] = fields.Nested(integrate_workspace_model)
|
||||
integrate_model = get_or_create_model("DataSourceIntegrate", integrate_fields_copy)
|
||||
|
||||
integrate_list_fields_copy = integrate_list_fields.copy()
|
||||
integrate_list_fields_copy["data"] = fields.List(fields.Nested(integrate_model))
|
||||
integrate_list_model = get_or_create_model("DataSourceIntegrateList", integrate_list_fields_copy)
|
||||
|
||||
notion_page_fields = {
|
||||
"page_name": fields.String,
|
||||
"page_id": fields.String,
|
||||
"page_icon": fields.Nested(integrate_icon_model, allow_null=True),
|
||||
"is_bound": fields.Boolean,
|
||||
"parent_id": fields.String,
|
||||
"type": fields.String,
|
||||
}
|
||||
notion_page_model = get_or_create_model("NotionIntegratePage", notion_page_fields)
|
||||
|
||||
notion_workspace_fields = {
|
||||
"workspace_name": fields.String,
|
||||
"workspace_id": fields.String,
|
||||
"workspace_icon": fields.String,
|
||||
"pages": fields.List(fields.Nested(notion_page_model)),
|
||||
}
|
||||
notion_workspace_model = get_or_create_model("NotionIntegrateWorkspace", notion_workspace_fields)
|
||||
|
||||
integrate_notion_info_list_fields_copy = integrate_notion_info_list_fields.copy()
|
||||
integrate_notion_info_list_fields_copy["notion_info"] = fields.List(fields.Nested(notion_workspace_model))
|
||||
integrate_notion_info_list_model = get_or_create_model(
|
||||
"NotionIntegrateInfoList", integrate_notion_info_list_fields_copy
|
||||
)
|
||||
|
||||
|
||||
@console_ns.route(
|
||||
"/data-source/integrates",
|
||||
"/data-source/integrates/<uuid:binding_id>/<string:action>",
|
||||
@ -47,7 +107,7 @@ class DataSourceApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(integrate_list_fields)
|
||||
@marshal_with(integrate_list_model)
|
||||
def get(self):
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
|
||||
@ -132,30 +192,19 @@ class DataSourceNotionListApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(integrate_notion_info_list_fields)
|
||||
@marshal_with(integrate_notion_info_list_model)
|
||||
def get(self):
|
||||
current_user, current_tenant_id = current_account_with_tenant()
|
||||
|
||||
dataset_id = request.args.get("dataset_id", default=None, type=str)
|
||||
credential_id = request.args.get("credential_id", default=None, type=str)
|
||||
if not credential_id:
|
||||
raise ValueError("Credential id is required.")
|
||||
query = DataSourceNotionListQuery.model_validate(request.args.to_dict())
|
||||
|
||||
# Get datasource_parameters from query string (optional, for GitHub and other datasources)
|
||||
datasource_parameters_str = request.args.get("datasource_parameters", default=None, type=str)
|
||||
datasource_parameters = {}
|
||||
if datasource_parameters_str:
|
||||
try:
|
||||
datasource_parameters = json.loads(datasource_parameters_str)
|
||||
if not isinstance(datasource_parameters, dict):
|
||||
raise ValueError("datasource_parameters must be a JSON object.")
|
||||
except json.JSONDecodeError:
|
||||
raise ValueError("Invalid datasource_parameters JSON format.")
|
||||
datasource_parameters = query.datasource_parameters or {}
|
||||
|
||||
datasource_provider_service = DatasourceProviderService()
|
||||
credential = datasource_provider_service.get_datasource_credentials(
|
||||
tenant_id=current_tenant_id,
|
||||
credential_id=credential_id,
|
||||
credential_id=query.credential_id,
|
||||
provider="notion_datasource",
|
||||
plugin_id="langgenius/notion_datasource",
|
||||
)
|
||||
@ -164,8 +213,8 @@ class DataSourceNotionListApi(Resource):
|
||||
exist_page_ids = []
|
||||
with Session(db.engine) as session:
|
||||
# import notion in the exist dataset
|
||||
if dataset_id:
|
||||
dataset = DatasetService.get_dataset(dataset_id)
|
||||
if query.dataset_id:
|
||||
dataset = DatasetService.get_dataset(query.dataset_id)
|
||||
if not dataset:
|
||||
raise NotFound("Dataset not found.")
|
||||
if dataset.data_source_type != "notion_import":
|
||||
@ -173,7 +222,7 @@ class DataSourceNotionListApi(Resource):
|
||||
|
||||
documents = session.scalars(
|
||||
select(Document).filter_by(
|
||||
dataset_id=dataset_id,
|
||||
dataset_id=query.dataset_id,
|
||||
tenant_id=current_tenant_id,
|
||||
data_source_type="notion_import",
|
||||
enabled=True,
|
||||
@ -240,13 +289,12 @@ class DataSourceNotionApi(Resource):
|
||||
def get(self, page_id, page_type):
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
|
||||
credential_id = request.args.get("credential_id", default=None, type=str)
|
||||
if not credential_id:
|
||||
raise ValueError("Credential id is required.")
|
||||
query = DataSourceNotionPreviewQuery.model_validate(request.args.to_dict())
|
||||
|
||||
datasource_provider_service = DatasourceProviderService()
|
||||
credential = datasource_provider_service.get_datasource_credentials(
|
||||
tenant_id=current_tenant_id,
|
||||
credential_id=credential_id,
|
||||
credential_id=query.credential_id,
|
||||
provider="notion_datasource",
|
||||
plugin_id="langgenius/notion_datasource",
|
||||
)
|
||||
|
||||
@ -8,7 +8,7 @@ from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
import services
|
||||
from configs import dify_config
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.common.schema import get_or_create_model, register_schema_models
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.apikey import (
|
||||
api_key_item_model,
|
||||
@ -34,6 +34,7 @@ from core.rag.retrieval.retrieval_methods import RetrievalMethod
|
||||
from extensions.ext_database import db
|
||||
from fields.app_fields import app_detail_kernel_fields, related_app_list
|
||||
from fields.dataset_fields import (
|
||||
content_fields,
|
||||
dataset_detail_fields,
|
||||
dataset_fields,
|
||||
dataset_query_detail_fields,
|
||||
@ -41,6 +42,7 @@ from fields.dataset_fields import (
|
||||
doc_metadata_fields,
|
||||
external_knowledge_info_fields,
|
||||
external_retrieval_model_fields,
|
||||
file_info_fields,
|
||||
icon_info_fields,
|
||||
keyword_setting_fields,
|
||||
reranking_model_fields,
|
||||
@ -55,41 +57,33 @@ from models.dataset import DatasetPermissionEnum
|
||||
from models.provider_ids import ModelProviderID
|
||||
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
|
||||
|
||||
|
||||
def _get_or_create_model(model_name: str, field_def):
|
||||
existing = console_ns.models.get(model_name)
|
||||
if existing is None:
|
||||
existing = console_ns.model(model_name, field_def)
|
||||
return existing
|
||||
|
||||
|
||||
# Register models for flask_restx to avoid dict type issues in Swagger
|
||||
dataset_base_model = _get_or_create_model("DatasetBase", dataset_fields)
|
||||
dataset_base_model = get_or_create_model("DatasetBase", dataset_fields)
|
||||
|
||||
tag_model = _get_or_create_model("Tag", tag_fields)
|
||||
tag_model = get_or_create_model("Tag", tag_fields)
|
||||
|
||||
keyword_setting_model = _get_or_create_model("DatasetKeywordSetting", keyword_setting_fields)
|
||||
vector_setting_model = _get_or_create_model("DatasetVectorSetting", vector_setting_fields)
|
||||
keyword_setting_model = get_or_create_model("DatasetKeywordSetting", keyword_setting_fields)
|
||||
vector_setting_model = get_or_create_model("DatasetVectorSetting", vector_setting_fields)
|
||||
|
||||
weighted_score_fields_copy = weighted_score_fields.copy()
|
||||
weighted_score_fields_copy["keyword_setting"] = fields.Nested(keyword_setting_model)
|
||||
weighted_score_fields_copy["vector_setting"] = fields.Nested(vector_setting_model)
|
||||
weighted_score_model = _get_or_create_model("DatasetWeightedScore", weighted_score_fields_copy)
|
||||
weighted_score_model = get_or_create_model("DatasetWeightedScore", weighted_score_fields_copy)
|
||||
|
||||
reranking_model = _get_or_create_model("DatasetRerankingModel", reranking_model_fields)
|
||||
reranking_model = get_or_create_model("DatasetRerankingModel", reranking_model_fields)
|
||||
|
||||
dataset_retrieval_model_fields_copy = dataset_retrieval_model_fields.copy()
|
||||
dataset_retrieval_model_fields_copy["reranking_model"] = fields.Nested(reranking_model)
|
||||
dataset_retrieval_model_fields_copy["weights"] = fields.Nested(weighted_score_model, allow_null=True)
|
||||
dataset_retrieval_model = _get_or_create_model("DatasetRetrievalModel", dataset_retrieval_model_fields_copy)
|
||||
dataset_retrieval_model = get_or_create_model("DatasetRetrievalModel", dataset_retrieval_model_fields_copy)
|
||||
|
||||
external_knowledge_info_model = _get_or_create_model("ExternalKnowledgeInfo", external_knowledge_info_fields)
|
||||
external_knowledge_info_model = get_or_create_model("ExternalKnowledgeInfo", external_knowledge_info_fields)
|
||||
|
||||
external_retrieval_model = _get_or_create_model("ExternalRetrievalModel", external_retrieval_model_fields)
|
||||
external_retrieval_model = get_or_create_model("ExternalRetrievalModel", external_retrieval_model_fields)
|
||||
|
||||
doc_metadata_model = _get_or_create_model("DatasetDocMetadata", doc_metadata_fields)
|
||||
doc_metadata_model = get_or_create_model("DatasetDocMetadata", doc_metadata_fields)
|
||||
|
||||
icon_info_model = _get_or_create_model("DatasetIconInfo", icon_info_fields)
|
||||
icon_info_model = get_or_create_model("DatasetIconInfo", icon_info_fields)
|
||||
|
||||
dataset_detail_fields_copy = dataset_detail_fields.copy()
|
||||
dataset_detail_fields_copy["retrieval_model_dict"] = fields.Nested(dataset_retrieval_model)
|
||||
@ -98,14 +92,22 @@ dataset_detail_fields_copy["external_knowledge_info"] = fields.Nested(external_k
|
||||
dataset_detail_fields_copy["external_retrieval_model"] = fields.Nested(external_retrieval_model, allow_null=True)
|
||||
dataset_detail_fields_copy["doc_metadata"] = fields.List(fields.Nested(doc_metadata_model))
|
||||
dataset_detail_fields_copy["icon_info"] = fields.Nested(icon_info_model)
|
||||
dataset_detail_model = _get_or_create_model("DatasetDetail", dataset_detail_fields_copy)
|
||||
dataset_detail_model = get_or_create_model("DatasetDetail", dataset_detail_fields_copy)
|
||||
|
||||
dataset_query_detail_model = _get_or_create_model("DatasetQueryDetail", dataset_query_detail_fields)
|
||||
file_info_model = get_or_create_model("DatasetFileInfo", file_info_fields)
|
||||
|
||||
app_detail_kernel_model = _get_or_create_model("AppDetailKernel", app_detail_kernel_fields)
|
||||
content_fields_copy = content_fields.copy()
|
||||
content_fields_copy["file_info"] = fields.Nested(file_info_model, allow_null=True)
|
||||
content_model = get_or_create_model("DatasetContent", content_fields_copy)
|
||||
|
||||
dataset_query_detail_fields_copy = dataset_query_detail_fields.copy()
|
||||
dataset_query_detail_fields_copy["queries"] = fields.Nested(content_model)
|
||||
dataset_query_detail_model = get_or_create_model("DatasetQueryDetail", dataset_query_detail_fields_copy)
|
||||
|
||||
app_detail_kernel_model = get_or_create_model("AppDetailKernel", app_detail_kernel_fields)
|
||||
related_app_list_copy = related_app_list.copy()
|
||||
related_app_list_copy["data"] = fields.List(fields.Nested(app_detail_kernel_model))
|
||||
related_app_list_model = _get_or_create_model("RelatedAppList", related_app_list_copy)
|
||||
related_app_list_model = get_or_create_model("RelatedAppList", related_app_list_copy)
|
||||
|
||||
|
||||
def _validate_indexing_technique(value: str | None) -> str | None:
|
||||
@ -177,7 +179,18 @@ class IndexingEstimatePayload(BaseModel):
|
||||
return result
|
||||
|
||||
|
||||
register_schema_models(console_ns, DatasetCreatePayload, DatasetUpdatePayload, IndexingEstimatePayload)
|
||||
class ConsoleDatasetListQuery(BaseModel):
|
||||
page: int = Field(default=1, description="Page number")
|
||||
limit: int = Field(default=20, description="Number of items per page")
|
||||
keyword: str | None = Field(default=None, description="Search keyword")
|
||||
include_all: bool = Field(default=False, description="Include all datasets")
|
||||
ids: list[str] = Field(default_factory=list, description="Filter by dataset IDs")
|
||||
tag_ids: list[str] = Field(default_factory=list, description="Filter by tag IDs")
|
||||
|
||||
|
||||
register_schema_models(
|
||||
console_ns, DatasetCreatePayload, DatasetUpdatePayload, IndexingEstimatePayload, ConsoleDatasetListQuery
|
||||
)
|
||||
|
||||
|
||||
def _get_retrieval_methods_by_vector_type(vector_type: str | None, is_mock: bool = False) -> dict[str, list[str]]:
|
||||
@ -276,18 +289,19 @@ class DatasetListApi(Resource):
|
||||
@enterprise_license_required
|
||||
def get(self):
|
||||
current_user, current_tenant_id = current_account_with_tenant()
|
||||
page = request.args.get("page", default=1, type=int)
|
||||
limit = request.args.get("limit", default=20, type=int)
|
||||
ids = request.args.getlist("ids")
|
||||
query = ConsoleDatasetListQuery.model_validate(request.args.to_dict())
|
||||
# provider = request.args.get("provider", default="vendor")
|
||||
search = request.args.get("keyword", default=None, type=str)
|
||||
tag_ids = request.args.getlist("tag_ids")
|
||||
include_all = request.args.get("include_all", default="false").lower() == "true"
|
||||
if ids:
|
||||
datasets, total = DatasetService.get_datasets_by_ids(ids, current_tenant_id)
|
||||
if query.ids:
|
||||
datasets, total = DatasetService.get_datasets_by_ids(query.ids, current_tenant_id)
|
||||
else:
|
||||
datasets, total = DatasetService.get_datasets(
|
||||
page, limit, current_tenant_id, current_user, search, tag_ids, include_all
|
||||
query.page,
|
||||
query.limit,
|
||||
current_tenant_id,
|
||||
current_user,
|
||||
query.keyword,
|
||||
query.tag_ids,
|
||||
query.include_all,
|
||||
)
|
||||
|
||||
# check embedding setting
|
||||
@ -319,7 +333,13 @@ class DatasetListApi(Resource):
|
||||
else:
|
||||
item.update({"partial_member_list": []})
|
||||
|
||||
response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page}
|
||||
response = {
|
||||
"data": data,
|
||||
"has_more": len(datasets) == query.limit,
|
||||
"limit": query.limit,
|
||||
"total": total,
|
||||
"page": query.page,
|
||||
}
|
||||
return response, 200
|
||||
|
||||
@console_ns.doc("create_dataset")
|
||||
|
||||
@ -14,7 +14,7 @@ from sqlalchemy import asc, desc, select
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
import services
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.common.schema import get_or_create_model, register_schema_models
|
||||
from controllers.console import console_ns
|
||||
from core.errors.error import (
|
||||
LLMBadRequestError,
|
||||
@ -73,34 +73,27 @@ logger = logging.getLogger(__name__)
|
||||
DOCUMENT_BATCH_DOWNLOAD_ZIP_MAX_DOCS = 100
|
||||
|
||||
|
||||
def _get_or_create_model(model_name: str, field_def):
|
||||
existing = console_ns.models.get(model_name)
|
||||
if existing is None:
|
||||
existing = console_ns.model(model_name, field_def)
|
||||
return existing
|
||||
|
||||
|
||||
# Register models for flask_restx to avoid dict type issues in Swagger
|
||||
dataset_model = _get_or_create_model("Dataset", dataset_fields)
|
||||
dataset_model = get_or_create_model("Dataset", dataset_fields)
|
||||
|
||||
document_metadata_model = _get_or_create_model("DocumentMetadata", document_metadata_fields)
|
||||
document_metadata_model = get_or_create_model("DocumentMetadata", document_metadata_fields)
|
||||
|
||||
document_fields_copy = document_fields.copy()
|
||||
document_fields_copy["doc_metadata"] = fields.List(
|
||||
fields.Nested(document_metadata_model), attribute="doc_metadata_details"
|
||||
)
|
||||
document_model = _get_or_create_model("Document", document_fields_copy)
|
||||
document_model = get_or_create_model("Document", document_fields_copy)
|
||||
|
||||
document_with_segments_fields_copy = document_with_segments_fields.copy()
|
||||
document_with_segments_fields_copy["doc_metadata"] = fields.List(
|
||||
fields.Nested(document_metadata_model), attribute="doc_metadata_details"
|
||||
)
|
||||
document_with_segments_model = _get_or_create_model("DocumentWithSegments", document_with_segments_fields_copy)
|
||||
document_with_segments_model = get_or_create_model("DocumentWithSegments", document_with_segments_fields_copy)
|
||||
|
||||
dataset_and_document_fields_copy = dataset_and_document_fields.copy()
|
||||
dataset_and_document_fields_copy["dataset"] = fields.Nested(dataset_model)
|
||||
dataset_and_document_fields_copy["documents"] = fields.List(fields.Nested(document_model))
|
||||
dataset_and_document_model = _get_or_create_model("DatasetAndDocument", dataset_and_document_fields_copy)
|
||||
dataset_and_document_model = get_or_create_model("DatasetAndDocument", dataset_and_document_fields_copy)
|
||||
|
||||
|
||||
class DocumentRetryPayload(BaseModel):
|
||||
@ -1266,7 +1259,7 @@ class DocumentRenameApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(document_fields)
|
||||
@marshal_with(document_model)
|
||||
@console_ns.expect(console_ns.models[DocumentRenamePayload.__name__])
|
||||
def post(self, dataset_id, document_id):
|
||||
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
|
||||
|
||||
@ -108,6 +108,7 @@ register_schema_models(
|
||||
ChildChunkCreatePayload,
|
||||
ChildChunkUpdatePayload,
|
||||
ChildChunkBatchUpdatePayload,
|
||||
ChildChunkUpdateArgs,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@ -4,7 +4,7 @@ from pydantic import BaseModel, Field
|
||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.common.schema import get_or_create_model, register_schema_models
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.datasets.error import DatasetNameDuplicateError
|
||||
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
|
||||
@ -28,34 +28,27 @@ from services.hit_testing_service import HitTestingService
|
||||
from services.knowledge_service import ExternalDatasetTestService
|
||||
|
||||
|
||||
def _get_or_create_model(model_name: str, field_def):
|
||||
existing = console_ns.models.get(model_name)
|
||||
if existing is None:
|
||||
existing = console_ns.model(model_name, field_def)
|
||||
return existing
|
||||
|
||||
|
||||
def _build_dataset_detail_model():
|
||||
keyword_setting_model = _get_or_create_model("DatasetKeywordSetting", keyword_setting_fields)
|
||||
vector_setting_model = _get_or_create_model("DatasetVectorSetting", vector_setting_fields)
|
||||
keyword_setting_model = get_or_create_model("DatasetKeywordSetting", keyword_setting_fields)
|
||||
vector_setting_model = get_or_create_model("DatasetVectorSetting", vector_setting_fields)
|
||||
|
||||
weighted_score_fields_copy = weighted_score_fields.copy()
|
||||
weighted_score_fields_copy["keyword_setting"] = fields.Nested(keyword_setting_model)
|
||||
weighted_score_fields_copy["vector_setting"] = fields.Nested(vector_setting_model)
|
||||
weighted_score_model = _get_or_create_model("DatasetWeightedScore", weighted_score_fields_copy)
|
||||
weighted_score_model = get_or_create_model("DatasetWeightedScore", weighted_score_fields_copy)
|
||||
|
||||
reranking_model = _get_or_create_model("DatasetRerankingModel", reranking_model_fields)
|
||||
reranking_model = get_or_create_model("DatasetRerankingModel", reranking_model_fields)
|
||||
|
||||
dataset_retrieval_model_fields_copy = dataset_retrieval_model_fields.copy()
|
||||
dataset_retrieval_model_fields_copy["reranking_model"] = fields.Nested(reranking_model)
|
||||
dataset_retrieval_model_fields_copy["weights"] = fields.Nested(weighted_score_model, allow_null=True)
|
||||
dataset_retrieval_model = _get_or_create_model("DatasetRetrievalModel", dataset_retrieval_model_fields_copy)
|
||||
dataset_retrieval_model = get_or_create_model("DatasetRetrievalModel", dataset_retrieval_model_fields_copy)
|
||||
|
||||
tag_model = _get_or_create_model("Tag", tag_fields)
|
||||
doc_metadata_model = _get_or_create_model("DatasetDocMetadata", doc_metadata_fields)
|
||||
external_knowledge_info_model = _get_or_create_model("ExternalKnowledgeInfo", external_knowledge_info_fields)
|
||||
external_retrieval_model = _get_or_create_model("ExternalRetrievalModel", external_retrieval_model_fields)
|
||||
icon_info_model = _get_or_create_model("DatasetIconInfo", icon_info_fields)
|
||||
tag_model = get_or_create_model("Tag", tag_fields)
|
||||
doc_metadata_model = get_or_create_model("DatasetDocMetadata", doc_metadata_fields)
|
||||
external_knowledge_info_model = get_or_create_model("ExternalKnowledgeInfo", external_knowledge_info_fields)
|
||||
external_retrieval_model = get_or_create_model("ExternalRetrievalModel", external_retrieval_model_fields)
|
||||
icon_info_model = get_or_create_model("DatasetIconInfo", icon_info_fields)
|
||||
|
||||
dataset_detail_fields_copy = dataset_detail_fields.copy()
|
||||
dataset_detail_fields_copy["retrieval_model_dict"] = fields.Nested(dataset_retrieval_model)
|
||||
@ -64,7 +57,7 @@ def _build_dataset_detail_model():
|
||||
dataset_detail_fields_copy["external_retrieval_model"] = fields.Nested(external_retrieval_model, allow_null=True)
|
||||
dataset_detail_fields_copy["doc_metadata"] = fields.List(fields.Nested(doc_metadata_model))
|
||||
dataset_detail_fields_copy["icon_info"] = fields.Nested(icon_info_model)
|
||||
return _get_or_create_model("DatasetDetail", dataset_detail_fields_copy)
|
||||
return get_or_create_model("DatasetDetail", dataset_detail_fields_copy)
|
||||
|
||||
|
||||
try:
|
||||
@ -98,12 +91,19 @@ class BedrockRetrievalPayload(BaseModel):
|
||||
knowledge_id: str
|
||||
|
||||
|
||||
class ExternalApiTemplateListQuery(BaseModel):
|
||||
page: int = Field(default=1, description="Page number")
|
||||
limit: int = Field(default=20, description="Number of items per page")
|
||||
keyword: str | None = Field(default=None, description="Search keyword")
|
||||
|
||||
|
||||
register_schema_models(
|
||||
console_ns,
|
||||
ExternalKnowledgeApiPayload,
|
||||
ExternalDatasetCreatePayload,
|
||||
ExternalHitTestingPayload,
|
||||
BedrockRetrievalPayload,
|
||||
ExternalApiTemplateListQuery,
|
||||
)
|
||||
|
||||
|
||||
@ -124,19 +124,17 @@ class ExternalApiTemplateListApi(Resource):
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
page = request.args.get("page", default=1, type=int)
|
||||
limit = request.args.get("limit", default=20, type=int)
|
||||
search = request.args.get("keyword", default=None, type=str)
|
||||
query = ExternalApiTemplateListQuery.model_validate(request.args.to_dict())
|
||||
|
||||
external_knowledge_apis, total = ExternalDatasetService.get_external_knowledge_apis(
|
||||
page, limit, current_tenant_id, search
|
||||
query.page, query.limit, current_tenant_id, query.keyword
|
||||
)
|
||||
response = {
|
||||
"data": [item.to_dict() for item in external_knowledge_apis],
|
||||
"has_more": len(external_knowledge_apis) == limit,
|
||||
"limit": limit,
|
||||
"has_more": len(external_knowledge_apis) == query.limit,
|
||||
"limit": query.limit,
|
||||
"total": total,
|
||||
"page": page,
|
||||
"page": query.page,
|
||||
}
|
||||
return response, 200
|
||||
|
||||
|
||||
@ -4,14 +4,16 @@ from flask_restx import Resource, marshal_with
|
||||
from pydantic import BaseModel
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from controllers.common.schema import register_schema_model, register_schema_models
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required
|
||||
from fields.dataset_fields import dataset_metadata_fields
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from services.dataset_service import DatasetService
|
||||
from services.entities.knowledge_entities.knowledge_entities import (
|
||||
DocumentMetadataOperation,
|
||||
MetadataArgs,
|
||||
MetadataDetail,
|
||||
MetadataOperationData,
|
||||
)
|
||||
from services.metadata_service import MetadataService
|
||||
@ -21,8 +23,9 @@ class MetadataUpdatePayload(BaseModel):
|
||||
name: str
|
||||
|
||||
|
||||
register_schema_models(console_ns, MetadataArgs, MetadataOperationData)
|
||||
register_schema_model(console_ns, MetadataUpdatePayload)
|
||||
register_schema_models(
|
||||
console_ns, MetadataArgs, MetadataOperationData, MetadataUpdatePayload, DocumentMetadataOperation, MetadataDetail
|
||||
)
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/metadata")
|
||||
|
||||
@ -2,7 +2,7 @@ import logging
|
||||
from typing import Any, NoReturn
|
||||
|
||||
from flask import Response, request
|
||||
from flask_restx import Resource, fields, marshal, marshal_with
|
||||
from flask_restx import Resource, marshal, marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import Forbidden
|
||||
@ -14,7 +14,9 @@ from controllers.console.app.error import (
|
||||
)
|
||||
from controllers.console.app.workflow_draft_variable import (
|
||||
_WORKFLOW_DRAFT_VARIABLE_FIELDS, # type: ignore[private-usage]
|
||||
_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS, # type: ignore[private-usage]
|
||||
workflow_draft_variable_list_model,
|
||||
workflow_draft_variable_list_without_value_model,
|
||||
workflow_draft_variable_model,
|
||||
)
|
||||
from controllers.console.datasets.wraps import get_rag_pipeline
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
@ -27,7 +29,6 @@ from factories.variable_factory import build_segment_with_type
|
||||
from libs.login import current_user, login_required
|
||||
from models import Account
|
||||
from models.dataset import Pipeline
|
||||
from models.workflow import WorkflowDraftVariable
|
||||
from services.rag_pipeline.rag_pipeline import RagPipelineService
|
||||
from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService
|
||||
|
||||
@ -52,20 +53,6 @@ class WorkflowDraftVariablePatchPayload(BaseModel):
|
||||
register_schema_models(console_ns, WorkflowDraftVariablePatchPayload)
|
||||
|
||||
|
||||
def _get_items(var_list: WorkflowDraftVariableList) -> list[WorkflowDraftVariable]:
|
||||
return var_list.variables
|
||||
|
||||
|
||||
_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS = {
|
||||
"items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS), attribute=_get_items),
|
||||
"total": fields.Raw(),
|
||||
}
|
||||
|
||||
_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS = {
|
||||
"items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_FIELDS), attribute=_get_items),
|
||||
}
|
||||
|
||||
|
||||
def _api_prerequisite(f):
|
||||
"""Common prerequisites for all draft workflow variable APIs.
|
||||
|
||||
@ -92,7 +79,7 @@ def _api_prerequisite(f):
|
||||
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables")
|
||||
class RagPipelineVariableCollectionApi(Resource):
|
||||
@_api_prerequisite
|
||||
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
|
||||
@marshal_with(workflow_draft_variable_list_without_value_model)
|
||||
def get(self, pipeline: Pipeline):
|
||||
"""
|
||||
Get draft workflow
|
||||
@ -150,7 +137,7 @@ def validate_node_id(node_id: str) -> NoReturn | None:
|
||||
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/variables")
|
||||
class RagPipelineNodeVariableCollectionApi(Resource):
|
||||
@_api_prerequisite
|
||||
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
|
||||
@marshal_with(workflow_draft_variable_list_model)
|
||||
def get(self, pipeline: Pipeline, node_id: str):
|
||||
validate_node_id(node_id)
|
||||
with Session(bind=db.engine, expire_on_commit=False) as session:
|
||||
@ -176,7 +163,7 @@ class RagPipelineVariableApi(Resource):
|
||||
_PATCH_VALUE_FIELD = "value"
|
||||
|
||||
@_api_prerequisite
|
||||
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
|
||||
@marshal_with(workflow_draft_variable_model)
|
||||
def get(self, pipeline: Pipeline, variable_id: str):
|
||||
draft_var_srv = WorkflowDraftVariableService(
|
||||
session=db.session(),
|
||||
@ -189,7 +176,7 @@ class RagPipelineVariableApi(Resource):
|
||||
return variable
|
||||
|
||||
@_api_prerequisite
|
||||
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
|
||||
@marshal_with(workflow_draft_variable_model)
|
||||
@console_ns.expect(console_ns.models[WorkflowDraftVariablePatchPayload.__name__])
|
||||
def patch(self, pipeline: Pipeline, variable_id: str):
|
||||
# Request payload for file types:
|
||||
@ -307,7 +294,7 @@ def _get_variable_list(pipeline: Pipeline, node_id) -> WorkflowDraftVariableList
|
||||
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/system-variables")
|
||||
class RagPipelineSystemVariableCollectionApi(Resource):
|
||||
@_api_prerequisite
|
||||
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
|
||||
@marshal_with(workflow_draft_variable_list_model)
|
||||
def get(self, pipeline: Pipeline):
|
||||
return _get_variable_list(pipeline, SYSTEM_VARIABLE_NODE_ID)
|
||||
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
from flask import request
|
||||
from flask_restx import Resource, marshal_with # type: ignore
|
||||
from flask_restx import Resource, fields, marshal_with # type: ignore
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.common.schema import get_or_create_model, register_schema_models
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.datasets.wraps import get_rag_pipeline
|
||||
from controllers.console.wraps import (
|
||||
@ -12,7 +12,11 @@ from controllers.console.wraps import (
|
||||
setup_required,
|
||||
)
|
||||
from extensions.ext_database import db
|
||||
from fields.rag_pipeline_fields import pipeline_import_check_dependencies_fields, pipeline_import_fields
|
||||
from fields.rag_pipeline_fields import (
|
||||
leaked_dependency_fields,
|
||||
pipeline_import_check_dependencies_fields,
|
||||
pipeline_import_fields,
|
||||
)
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from models.dataset import Pipeline
|
||||
from services.app_dsl_service import ImportStatus
|
||||
@ -38,13 +42,25 @@ class IncludeSecretQuery(BaseModel):
|
||||
register_schema_models(console_ns, RagPipelineImportPayload, IncludeSecretQuery)
|
||||
|
||||
|
||||
pipeline_import_model = get_or_create_model("RagPipelineImport", pipeline_import_fields)
|
||||
|
||||
leaked_dependency_model = get_or_create_model("RagPipelineLeakedDependency", leaked_dependency_fields)
|
||||
pipeline_import_check_dependencies_fields_copy = pipeline_import_check_dependencies_fields.copy()
|
||||
pipeline_import_check_dependencies_fields_copy["leaked_dependencies"] = fields.List(
|
||||
fields.Nested(leaked_dependency_model)
|
||||
)
|
||||
pipeline_import_check_dependencies_model = get_or_create_model(
|
||||
"RagPipelineImportCheckDependencies", pipeline_import_check_dependencies_fields_copy
|
||||
)
|
||||
|
||||
|
||||
@console_ns.route("/rag/pipelines/imports")
|
||||
class RagPipelineImportApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@marshal_with(pipeline_import_fields)
|
||||
@marshal_with(pipeline_import_model)
|
||||
@console_ns.expect(console_ns.models[RagPipelineImportPayload.__name__])
|
||||
def post(self):
|
||||
# Check user role first
|
||||
@ -81,7 +97,7 @@ class RagPipelineImportConfirmApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@marshal_with(pipeline_import_fields)
|
||||
@marshal_with(pipeline_import_model)
|
||||
def post(self, import_id):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
||||
@ -106,7 +122,7 @@ class RagPipelineImportCheckDependenciesApi(Resource):
|
||||
@get_rag_pipeline
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@marshal_with(pipeline_import_check_dependencies_fields)
|
||||
@marshal_with(pipeline_import_check_dependencies_model)
|
||||
def get(self, pipeline: Pipeline):
|
||||
with Session(db.engine) as session:
|
||||
import_service = RagPipelineDslService(session)
|
||||
|
||||
@ -17,6 +17,13 @@ from controllers.console.app.error import (
|
||||
DraftWorkflowNotExist,
|
||||
DraftWorkflowNotSync,
|
||||
)
|
||||
from controllers.console.app.workflow import workflow_model, workflow_pagination_model
|
||||
from controllers.console.app.workflow_run import (
|
||||
workflow_run_detail_model,
|
||||
workflow_run_node_execution_list_model,
|
||||
workflow_run_node_execution_model,
|
||||
workflow_run_pagination_model,
|
||||
)
|
||||
from controllers.console.datasets.wraps import get_rag_pipeline
|
||||
from controllers.console.wraps import (
|
||||
account_initialization_required,
|
||||
@ -30,13 +37,6 @@ from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from extensions.ext_database import db
|
||||
from factories import variable_factory
|
||||
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
|
||||
from fields.workflow_run_fields import (
|
||||
workflow_run_detail_fields,
|
||||
workflow_run_node_execution_fields,
|
||||
workflow_run_node_execution_list_fields,
|
||||
workflow_run_pagination_fields,
|
||||
)
|
||||
from libs import helper
|
||||
from libs.helper import TimestampField
|
||||
from libs.login import current_account_with_tenant, current_user, login_required
|
||||
@ -145,7 +145,7 @@ class DraftRagPipelineApi(Resource):
|
||||
@account_initialization_required
|
||||
@get_rag_pipeline
|
||||
@edit_permission_required
|
||||
@marshal_with(workflow_fields)
|
||||
@marshal_with(workflow_model)
|
||||
def get(self, pipeline: Pipeline):
|
||||
"""
|
||||
Get draft rag pipeline's workflow
|
||||
@ -521,7 +521,7 @@ class RagPipelineDraftNodeRunApi(Resource):
|
||||
@edit_permission_required
|
||||
@account_initialization_required
|
||||
@get_rag_pipeline
|
||||
@marshal_with(workflow_run_node_execution_fields)
|
||||
@marshal_with(workflow_run_node_execution_model)
|
||||
def post(self, pipeline: Pipeline, node_id: str):
|
||||
"""
|
||||
Run draft workflow node
|
||||
@ -569,7 +569,7 @@ class PublishedRagPipelineApi(Resource):
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@get_rag_pipeline
|
||||
@marshal_with(workflow_fields)
|
||||
@marshal_with(workflow_model)
|
||||
def get(self, pipeline: Pipeline):
|
||||
"""
|
||||
Get published pipeline
|
||||
@ -664,7 +664,7 @@ class PublishedAllRagPipelineApi(Resource):
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@get_rag_pipeline
|
||||
@marshal_with(workflow_pagination_fields)
|
||||
@marshal_with(workflow_pagination_model)
|
||||
def get(self, pipeline: Pipeline):
|
||||
"""
|
||||
Get published workflows
|
||||
@ -708,7 +708,7 @@ class RagPipelineByIdApi(Resource):
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@get_rag_pipeline
|
||||
@marshal_with(workflow_fields)
|
||||
@marshal_with(workflow_model)
|
||||
def patch(self, pipeline: Pipeline, workflow_id: str):
|
||||
"""
|
||||
Update workflow attributes
|
||||
@ -830,7 +830,7 @@ class RagPipelineWorkflowRunListApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_rag_pipeline
|
||||
@marshal_with(workflow_run_pagination_fields)
|
||||
@marshal_with(workflow_run_pagination_model)
|
||||
def get(self, pipeline: Pipeline):
|
||||
"""
|
||||
Get workflow run list
|
||||
@ -858,7 +858,7 @@ class RagPipelineWorkflowRunDetailApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_rag_pipeline
|
||||
@marshal_with(workflow_run_detail_fields)
|
||||
@marshal_with(workflow_run_detail_model)
|
||||
def get(self, pipeline: Pipeline, run_id):
|
||||
"""
|
||||
Get workflow run detail
|
||||
@ -877,7 +877,7 @@ class RagPipelineWorkflowRunNodeExecutionListApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_rag_pipeline
|
||||
@marshal_with(workflow_run_node_execution_list_fields)
|
||||
@marshal_with(workflow_run_node_execution_list_model)
|
||||
def get(self, pipeline: Pipeline, run_id: str):
|
||||
"""
|
||||
Get workflow run node execution list
|
||||
@ -911,7 +911,7 @@ class RagPipelineWorkflowLastRunApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_rag_pipeline
|
||||
@marshal_with(workflow_run_node_execution_fields)
|
||||
@marshal_with(workflow_run_node_execution_model)
|
||||
def get(self, pipeline: Pipeline, node_id: str):
|
||||
rag_pipeline_service = RagPipelineService()
|
||||
workflow = rag_pipeline_service.get_draft_workflow(pipeline=pipeline)
|
||||
@ -952,7 +952,7 @@ class RagPipelineDatasourceVariableApi(Resource):
|
||||
@account_initialization_required
|
||||
@get_rag_pipeline
|
||||
@edit_permission_required
|
||||
@marshal_with(workflow_run_node_execution_fields)
|
||||
@marshal_with(workflow_run_node_execution_model)
|
||||
def post(self, pipeline: Pipeline):
|
||||
"""
|
||||
Set datasource variables
|
||||
|
||||
@ -2,16 +2,17 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource, marshal_with
|
||||
from pydantic import BaseModel
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy import and_, select
|
||||
from werkzeug.exceptions import BadRequest, Forbidden, NotFound
|
||||
|
||||
from controllers.common.schema import get_or_create_model
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.explore.wraps import InstalledAppResource
|
||||
from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check
|
||||
from extensions.ext_database import db
|
||||
from fields.installed_app_fields import installed_app_list_fields
|
||||
from fields.installed_app_fields import app_fields, installed_app_fields, installed_app_list_fields
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from models import App, InstalledApp, RecommendedApp
|
||||
@ -28,22 +29,37 @@ class InstalledAppUpdatePayload(BaseModel):
|
||||
is_pinned: bool | None = None
|
||||
|
||||
|
||||
class InstalledAppsListQuery(BaseModel):
|
||||
app_id: str | None = Field(default=None, description="App ID to filter by")
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
app_model = get_or_create_model("InstalledAppInfo", app_fields)
|
||||
|
||||
installed_app_fields_copy = installed_app_fields.copy()
|
||||
installed_app_fields_copy["app"] = fields.Nested(app_model)
|
||||
installed_app_model = get_or_create_model("InstalledApp", installed_app_fields_copy)
|
||||
|
||||
installed_app_list_fields_copy = installed_app_list_fields.copy()
|
||||
installed_app_list_fields_copy["installed_apps"] = fields.List(fields.Nested(installed_app_model))
|
||||
installed_app_list_model = get_or_create_model("InstalledAppList", installed_app_list_fields_copy)
|
||||
|
||||
|
||||
@console_ns.route("/installed-apps")
|
||||
class InstalledAppsListApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(installed_app_list_fields)
|
||||
@marshal_with(installed_app_list_model)
|
||||
def get(self):
|
||||
app_id = request.args.get("app_id", default=None, type=str)
|
||||
query = InstalledAppsListQuery.model_validate(request.args.to_dict())
|
||||
current_user, current_tenant_id = current_account_with_tenant()
|
||||
|
||||
if app_id:
|
||||
if query.app_id:
|
||||
installed_apps = db.session.scalars(
|
||||
select(InstalledApp).where(
|
||||
and_(InstalledApp.tenant_id == current_tenant_id, InstalledApp.app_id == app_id)
|
||||
and_(InstalledApp.tenant_id == current_tenant_id, InstalledApp.app_id == query.app_id)
|
||||
)
|
||||
).all()
|
||||
else:
|
||||
|
||||
@ -3,6 +3,7 @@ from flask_restx import Resource, fields, marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from constants.languages import languages
|
||||
from controllers.common.schema import get_or_create_model
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.wraps import account_initialization_required
|
||||
from libs.helper import AppIconUrlField
|
||||
@ -19,8 +20,10 @@ app_fields = {
|
||||
"icon_background": fields.String,
|
||||
}
|
||||
|
||||
app_model = get_or_create_model("RecommendedAppInfo", app_fields)
|
||||
|
||||
recommended_app_fields = {
|
||||
"app": fields.Nested(app_fields, attribute="app"),
|
||||
"app": fields.Nested(app_model, attribute="app"),
|
||||
"app_id": fields.String,
|
||||
"description": fields.String(attribute="description"),
|
||||
"copyright": fields.String,
|
||||
@ -32,11 +35,15 @@ recommended_app_fields = {
|
||||
"can_trial": fields.Boolean,
|
||||
}
|
||||
|
||||
recommended_app_model = get_or_create_model("RecommendedApp", recommended_app_fields)
|
||||
|
||||
recommended_app_list_fields = {
|
||||
"recommended_apps": fields.List(fields.Nested(recommended_app_fields)),
|
||||
"recommended_apps": fields.List(fields.Nested(recommended_app_model)),
|
||||
"categories": fields.List(fields.String),
|
||||
}
|
||||
|
||||
recommended_app_list_model = get_or_create_model("RecommendedAppList", recommended_app_list_fields)
|
||||
|
||||
|
||||
class RecommendedAppsQuery(BaseModel):
|
||||
language: str | None = Field(default=None)
|
||||
@ -53,7 +60,7 @@ class RecommendedAppListApi(Resource):
|
||||
@console_ns.expect(console_ns.models[RecommendedAppsQuery.__name__])
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(recommended_app_list_fields)
|
||||
@marshal_with(recommended_app_list_model)
|
||||
def get(self):
|
||||
# language args
|
||||
args = RecommendedAppsQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
|
||||
@ -2,13 +2,14 @@ import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource, marshal, marshal_with, reqparse
|
||||
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
|
||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
from controllers.common.fields import Parameters as ParametersResponse
|
||||
from controllers.common.fields import Site as SiteResponse
|
||||
from controllers.console import api
|
||||
from controllers.common.schema import get_or_create_model
|
||||
from controllers.console import api, console_ns
|
||||
from controllers.console.app.error import (
|
||||
AppUnavailableError,
|
||||
AudioTooLargeError,
|
||||
@ -42,9 +43,21 @@ from core.errors.error import (
|
||||
from core.model_runtime.errors.invoke import InvokeError
|
||||
from core.workflow.graph_engine.manager import GraphEngineManager
|
||||
from extensions.ext_database import db
|
||||
from fields.app_fields import app_detail_fields_with_site
|
||||
from fields.app_fields import (
|
||||
app_detail_fields_with_site,
|
||||
deleted_tool_fields,
|
||||
model_config_fields,
|
||||
site_fields,
|
||||
tag_fields,
|
||||
)
|
||||
from fields.dataset_fields import dataset_fields
|
||||
from fields.workflow_fields import workflow_fields
|
||||
from fields.member_fields import build_simple_account_model
|
||||
from fields.workflow_fields import (
|
||||
conversation_variable_fields,
|
||||
pipeline_variable_fields,
|
||||
workflow_fields,
|
||||
workflow_partial_fields,
|
||||
)
|
||||
from libs import helper
|
||||
from libs.helper import uuid_value
|
||||
from libs.login import current_user
|
||||
@ -74,6 +87,36 @@ from services.recommended_app_service import RecommendedAppService
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
model_config_model = get_or_create_model("TrialAppModelConfig", model_config_fields)
|
||||
workflow_partial_model = get_or_create_model("TrialWorkflowPartial", workflow_partial_fields)
|
||||
deleted_tool_model = get_or_create_model("TrialDeletedTool", deleted_tool_fields)
|
||||
tag_model = get_or_create_model("TrialTag", tag_fields)
|
||||
site_model = get_or_create_model("TrialSite", site_fields)
|
||||
|
||||
app_detail_fields_with_site_copy = app_detail_fields_with_site.copy()
|
||||
app_detail_fields_with_site_copy["model_config"] = fields.Nested(
|
||||
model_config_model, attribute="app_model_config", allow_null=True
|
||||
)
|
||||
app_detail_fields_with_site_copy["workflow"] = fields.Nested(workflow_partial_model, allow_null=True)
|
||||
app_detail_fields_with_site_copy["deleted_tools"] = fields.List(fields.Nested(deleted_tool_model))
|
||||
app_detail_fields_with_site_copy["tags"] = fields.List(fields.Nested(tag_model))
|
||||
app_detail_fields_with_site_copy["site"] = fields.Nested(site_model)
|
||||
app_detail_with_site_model = get_or_create_model("TrialAppDetailWithSite", app_detail_fields_with_site_copy)
|
||||
|
||||
simple_account_model = build_simple_account_model(console_ns)
|
||||
conversation_variable_model = get_or_create_model("TrialConversationVariable", conversation_variable_fields)
|
||||
pipeline_variable_model = get_or_create_model("TrialPipelineVariable", pipeline_variable_fields)
|
||||
|
||||
workflow_fields_copy = workflow_fields.copy()
|
||||
workflow_fields_copy["created_by"] = fields.Nested(simple_account_model, attribute="created_by_account")
|
||||
workflow_fields_copy["updated_by"] = fields.Nested(
|
||||
simple_account_model, attribute="updated_by_account", allow_null=True
|
||||
)
|
||||
workflow_fields_copy["conversation_variables"] = fields.List(fields.Nested(conversation_variable_model))
|
||||
workflow_fields_copy["rag_pipeline_variables"] = fields.List(fields.Nested(pipeline_variable_model))
|
||||
workflow_model = get_or_create_model("TrialWorkflow", workflow_fields_copy)
|
||||
|
||||
|
||||
class TrialAppWorkflowRunApi(TrialAppResource):
|
||||
def post(self, trial_app):
|
||||
"""
|
||||
@ -437,7 +480,7 @@ class TrialAppParameterApi(Resource):
|
||||
class AppApi(Resource):
|
||||
@trial_feature_enable
|
||||
@get_app_model_with_trial
|
||||
@marshal_with(app_detail_fields_with_site)
|
||||
@marshal_with(app_detail_with_site_model)
|
||||
def get(self, app_model):
|
||||
"""Get app detail"""
|
||||
|
||||
@ -450,7 +493,7 @@ class AppApi(Resource):
|
||||
class AppWorkflowApi(Resource):
|
||||
@trial_feature_enable
|
||||
@get_app_model_with_trial
|
||||
@marshal_with(workflow_fields)
|
||||
@marshal_with(workflow_model)
|
||||
def get(self, app_model):
|
||||
"""Get workflow detail"""
|
||||
if not app_model.workflow_id:
|
||||
|
||||
@ -40,6 +40,7 @@ register_schema_models(
|
||||
TagBasePayload,
|
||||
TagBindingPayload,
|
||||
TagBindingRemovePayload,
|
||||
TagListQueryParam,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@ -171,6 +171,19 @@ reg(ChangeEmailValidityPayload)
|
||||
reg(ChangeEmailResetPayload)
|
||||
reg(CheckEmailUniquePayload)
|
||||
|
||||
integrate_fields = {
|
||||
"provider": fields.String,
|
||||
"created_at": TimestampField,
|
||||
"is_bound": fields.Boolean,
|
||||
"link": fields.String,
|
||||
}
|
||||
|
||||
integrate_model = console_ns.model("AccountIntegrate", integrate_fields)
|
||||
integrate_list_model = console_ns.model(
|
||||
"AccountIntegrateList",
|
||||
{"data": fields.List(fields.Nested(integrate_model))},
|
||||
)
|
||||
|
||||
|
||||
@console_ns.route("/account/init")
|
||||
class AccountInitApi(Resource):
|
||||
@ -336,21 +349,10 @@ class AccountPasswordApi(Resource):
|
||||
|
||||
@console_ns.route("/account/integrates")
|
||||
class AccountIntegrateApi(Resource):
|
||||
integrate_fields = {
|
||||
"provider": fields.String,
|
||||
"created_at": TimestampField,
|
||||
"is_bound": fields.Boolean,
|
||||
"link": fields.String,
|
||||
}
|
||||
|
||||
integrate_list_fields = {
|
||||
"data": fields.List(fields.Nested(integrate_fields)),
|
||||
}
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(integrate_list_fields)
|
||||
@marshal_with(integrate_list_model)
|
||||
def get(self):
|
||||
account, _ = current_account_with_tenant()
|
||||
|
||||
|
||||
@ -1,11 +1,12 @@
|
||||
from urllib import parse
|
||||
|
||||
from flask import abort, request
|
||||
from flask_restx import Resource, marshal_with
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
import services
|
||||
from configs import dify_config
|
||||
from controllers.common.schema import get_or_create_model, register_enum_models
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.auth.error import (
|
||||
CannotTransferOwnerToSelfError,
|
||||
@ -24,7 +25,7 @@ from controllers.console.wraps import (
|
||||
setup_required,
|
||||
)
|
||||
from extensions.ext_database import db
|
||||
from fields.member_fields import account_with_role_list_fields
|
||||
from fields.member_fields import account_with_role_fields, account_with_role_list_fields
|
||||
from libs.helper import extract_remote_ip
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from models.account import Account, TenantAccountRole
|
||||
@ -67,6 +68,13 @@ reg(MemberRoleUpdatePayload)
|
||||
reg(OwnerTransferEmailPayload)
|
||||
reg(OwnerTransferCheckPayload)
|
||||
reg(OwnerTransferPayload)
|
||||
register_enum_models(console_ns, TenantAccountRole)
|
||||
|
||||
account_with_role_model = get_or_create_model("AccountWithRole", account_with_role_fields)
|
||||
|
||||
account_with_role_list_fields_copy = account_with_role_list_fields.copy()
|
||||
account_with_role_list_fields_copy["accounts"] = fields.List(fields.Nested(account_with_role_model))
|
||||
account_with_role_list_model = get_or_create_model("AccountWithRoleList", account_with_role_list_fields_copy)
|
||||
|
||||
|
||||
@console_ns.route("/workspaces/current/members")
|
||||
@ -76,7 +84,7 @@ class MemberListApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(account_with_role_list_fields)
|
||||
@marshal_with(account_with_role_list_model)
|
||||
def get(self):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
if not current_user.current_tenant:
|
||||
@ -227,7 +235,7 @@ class DatasetOperatorMemberListApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(account_with_role_list_fields)
|
||||
@marshal_with(account_with_role_list_model)
|
||||
def get(self):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
if not current_user.current_tenant:
|
||||
|
||||
@ -5,6 +5,7 @@ from flask import request
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from controllers.common.schema import register_enum_models, register_schema_models
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.wraps import account_initialization_required, is_admin_or_owner_required, setup_required
|
||||
from core.model_runtime.entities.model_entities import ModelType
|
||||
@ -23,12 +24,13 @@ class ParserGetDefault(BaseModel):
|
||||
model_type: ModelType
|
||||
|
||||
|
||||
class ParserPostDefault(BaseModel):
|
||||
class Inner(BaseModel):
|
||||
model_type: ModelType
|
||||
model: str | None = None
|
||||
provider: str | None = None
|
||||
class Inner(BaseModel):
|
||||
model_type: ModelType
|
||||
model: str | None = None
|
||||
provider: str | None = None
|
||||
|
||||
|
||||
class ParserPostDefault(BaseModel):
|
||||
model_settings: list[Inner]
|
||||
|
||||
|
||||
@ -105,19 +107,21 @@ class ParserParameter(BaseModel):
|
||||
model: str
|
||||
|
||||
|
||||
def reg(cls: type[BaseModel]):
|
||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
||||
register_schema_models(
|
||||
console_ns,
|
||||
ParserGetDefault,
|
||||
ParserPostDefault,
|
||||
ParserDeleteModels,
|
||||
ParserPostModels,
|
||||
ParserGetCredentials,
|
||||
ParserCreateCredential,
|
||||
ParserUpdateCredential,
|
||||
ParserDeleteCredential,
|
||||
ParserParameter,
|
||||
Inner,
|
||||
)
|
||||
|
||||
|
||||
reg(ParserGetDefault)
|
||||
reg(ParserPostDefault)
|
||||
reg(ParserDeleteModels)
|
||||
reg(ParserPostModels)
|
||||
reg(ParserGetCredentials)
|
||||
reg(ParserCreateCredential)
|
||||
reg(ParserUpdateCredential)
|
||||
reg(ParserDeleteCredential)
|
||||
reg(ParserParameter)
|
||||
register_enum_models(console_ns, ModelType)
|
||||
|
||||
|
||||
@console_ns.route("/workspaces/current/default-model")
|
||||
|
||||
@ -8,6 +8,7 @@ from pydantic import BaseModel, Field
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
from configs import dify_config
|
||||
from controllers.common.schema import register_enum_models, register_schema_models
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.workspace import plugin_permission_required
|
||||
from controllers.console.wraps import account_initialization_required, is_admin_or_owner_required, setup_required
|
||||
@ -20,57 +21,12 @@ from services.plugin.plugin_parameter_service import PluginParameterService
|
||||
from services.plugin.plugin_permission_service import PluginPermissionService
|
||||
from services.plugin.plugin_service import PluginService
|
||||
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
|
||||
def reg(cls: type[BaseModel]):
|
||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
||||
|
||||
|
||||
@console_ns.route("/workspaces/current/plugin/debugging-key")
|
||||
class PluginDebuggingKeyApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@plugin_permission_required(debug_required=True)
|
||||
def get(self):
|
||||
_, tenant_id = current_account_with_tenant()
|
||||
|
||||
try:
|
||||
return {
|
||||
"key": PluginService.get_debugging_key(tenant_id),
|
||||
"host": dify_config.PLUGIN_REMOTE_INSTALL_HOST,
|
||||
"port": dify_config.PLUGIN_REMOTE_INSTALL_PORT,
|
||||
}
|
||||
except PluginDaemonClientSideError as e:
|
||||
raise ValueError(e)
|
||||
|
||||
|
||||
class ParserList(BaseModel):
|
||||
page: int = Field(default=1, ge=1, description="Page number")
|
||||
page_size: int = Field(default=256, ge=1, le=256, description="Page size (1-256)")
|
||||
|
||||
|
||||
reg(ParserList)
|
||||
|
||||
|
||||
@console_ns.route("/workspaces/current/plugin/list")
|
||||
class PluginListApi(Resource):
|
||||
@console_ns.expect(console_ns.models[ParserList.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
_, tenant_id = current_account_with_tenant()
|
||||
args = ParserList.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
try:
|
||||
plugins_with_total = PluginService.list_with_total(tenant_id, args.page, args.page_size)
|
||||
except PluginDaemonClientSideError as e:
|
||||
raise ValueError(e)
|
||||
|
||||
return jsonable_encoder({"plugins": plugins_with_total.list, "total": plugins_with_total.total})
|
||||
|
||||
|
||||
class ParserLatest(BaseModel):
|
||||
plugin_ids: list[str]
|
||||
|
||||
@ -180,23 +136,73 @@ class ParserReadme(BaseModel):
|
||||
language: str = Field(default="en-US")
|
||||
|
||||
|
||||
reg(ParserLatest)
|
||||
reg(ParserIcon)
|
||||
reg(ParserAsset)
|
||||
reg(ParserGithubUpload)
|
||||
reg(ParserPluginIdentifiers)
|
||||
reg(ParserGithubInstall)
|
||||
reg(ParserPluginIdentifierQuery)
|
||||
reg(ParserTasks)
|
||||
reg(ParserMarketplaceUpgrade)
|
||||
reg(ParserGithubUpgrade)
|
||||
reg(ParserUninstall)
|
||||
reg(ParserPermissionChange)
|
||||
reg(ParserDynamicOptions)
|
||||
reg(ParserDynamicOptionsWithCredentials)
|
||||
reg(ParserPreferencesChange)
|
||||
reg(ParserExcludePlugin)
|
||||
reg(ParserReadme)
|
||||
register_schema_models(
|
||||
console_ns,
|
||||
ParserList,
|
||||
PluginAutoUpgradeSettingsPayload,
|
||||
PluginPermissionSettingsPayload,
|
||||
ParserLatest,
|
||||
ParserIcon,
|
||||
ParserAsset,
|
||||
ParserGithubUpload,
|
||||
ParserPluginIdentifiers,
|
||||
ParserGithubInstall,
|
||||
ParserPluginIdentifierQuery,
|
||||
ParserTasks,
|
||||
ParserMarketplaceUpgrade,
|
||||
ParserGithubUpgrade,
|
||||
ParserUninstall,
|
||||
ParserPermissionChange,
|
||||
ParserDynamicOptions,
|
||||
ParserDynamicOptionsWithCredentials,
|
||||
ParserPreferencesChange,
|
||||
ParserExcludePlugin,
|
||||
ParserReadme,
|
||||
)
|
||||
|
||||
register_enum_models(
|
||||
console_ns,
|
||||
TenantPluginPermission.DebugPermission,
|
||||
TenantPluginAutoUpgradeStrategy.UpgradeMode,
|
||||
TenantPluginAutoUpgradeStrategy.StrategySetting,
|
||||
TenantPluginPermission.InstallPermission,
|
||||
)
|
||||
|
||||
|
||||
@console_ns.route("/workspaces/current/plugin/debugging-key")
|
||||
class PluginDebuggingKeyApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@plugin_permission_required(debug_required=True)
|
||||
def get(self):
|
||||
_, tenant_id = current_account_with_tenant()
|
||||
|
||||
try:
|
||||
return {
|
||||
"key": PluginService.get_debugging_key(tenant_id),
|
||||
"host": dify_config.PLUGIN_REMOTE_INSTALL_HOST,
|
||||
"port": dify_config.PLUGIN_REMOTE_INSTALL_PORT,
|
||||
}
|
||||
except PluginDaemonClientSideError as e:
|
||||
raise ValueError(e)
|
||||
|
||||
|
||||
@console_ns.route("/workspaces/current/plugin/list")
|
||||
class PluginListApi(Resource):
|
||||
@console_ns.expect(console_ns.models[ParserList.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
_, tenant_id = current_account_with_tenant()
|
||||
args = ParserList.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
try:
|
||||
plugins_with_total = PluginService.list_with_total(tenant_id, args.page, args.page_size)
|
||||
except PluginDaemonClientSideError as e:
|
||||
raise ValueError(e)
|
||||
|
||||
return jsonable_encoder({"plugins": plugins_with_total.list, "total": plugins_with_total.total})
|
||||
|
||||
|
||||
@console_ns.route("/workspaces/current/plugin/list/latest-versions")
|
||||
|
||||
@ -2,7 +2,7 @@ from typing import Any, Literal, cast
|
||||
|
||||
from flask import request
|
||||
from flask_restx import marshal
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from pydantic import BaseModel, Field, TypeAdapter, field_validator
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
import services
|
||||
@ -26,6 +26,14 @@ from services.dataset_service import DatasetPermissionService, DatasetService, D
|
||||
from services.entities.knowledge_entities.knowledge_entities import RetrievalModel
|
||||
from services.tag_service import TagService
|
||||
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
|
||||
service_api_ns.schema_model(
|
||||
DatasetPermissionEnum.__name__,
|
||||
TypeAdapter(DatasetPermissionEnum).json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
|
||||
|
||||
class DatasetCreatePayload(BaseModel):
|
||||
name: str = Field(..., min_length=1, max_length=40)
|
||||
@ -87,6 +95,14 @@ class TagUnbindingPayload(BaseModel):
|
||||
target_id: str
|
||||
|
||||
|
||||
class DatasetListQuery(BaseModel):
|
||||
page: int = Field(default=1, description="Page number")
|
||||
limit: int = Field(default=20, description="Number of items per page")
|
||||
keyword: str | None = Field(default=None, description="Search keyword")
|
||||
include_all: bool = Field(default=False, description="Include all datasets")
|
||||
tag_ids: list[str] = Field(default_factory=list, description="Filter by tag IDs")
|
||||
|
||||
|
||||
register_schema_models(
|
||||
service_api_ns,
|
||||
DatasetCreatePayload,
|
||||
@ -96,6 +112,7 @@ register_schema_models(
|
||||
TagDeletePayload,
|
||||
TagBindingPayload,
|
||||
TagUnbindingPayload,
|
||||
DatasetListQuery,
|
||||
)
|
||||
|
||||
|
||||
@ -113,15 +130,11 @@ class DatasetListApi(DatasetApiResource):
|
||||
)
|
||||
def get(self, tenant_id):
|
||||
"""Resource for getting datasets."""
|
||||
page = request.args.get("page", default=1, type=int)
|
||||
limit = request.args.get("limit", default=20, type=int)
|
||||
query = DatasetListQuery.model_validate(request.args.to_dict())
|
||||
# provider = request.args.get("provider", default="vendor")
|
||||
search = request.args.get("keyword", default=None, type=str)
|
||||
tag_ids = request.args.getlist("tag_ids")
|
||||
include_all = request.args.get("include_all", default="false").lower() == "true"
|
||||
|
||||
datasets, total = DatasetService.get_datasets(
|
||||
page, limit, tenant_id, current_user, search, tag_ids, include_all
|
||||
query.page, query.limit, tenant_id, current_user, query.keyword, query.tag_ids, query.include_all
|
||||
)
|
||||
# check embedding setting
|
||||
provider_manager = ProviderManager()
|
||||
@ -147,7 +160,13 @@ class DatasetListApi(DatasetApiResource):
|
||||
item["embedding_available"] = False
|
||||
else:
|
||||
item["embedding_available"] = True
|
||||
response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page}
|
||||
response = {
|
||||
"data": data,
|
||||
"has_more": len(datasets) == query.limit,
|
||||
"limit": query.limit,
|
||||
"total": total,
|
||||
"page": query.page,
|
||||
}
|
||||
return response, 200
|
||||
|
||||
@service_api_ns.expect(service_api_ns.models[DatasetCreatePayload.__name__])
|
||||
|
||||
@ -16,6 +16,7 @@ from controllers.common.errors import (
|
||||
TooManyFilesError,
|
||||
UnsupportedFileTypeError,
|
||||
)
|
||||
from controllers.common.schema import register_enum_models, register_schema_models
|
||||
from controllers.service_api import service_api_ns
|
||||
from controllers.service_api.app.error import ProviderNotInitializeError
|
||||
from controllers.service_api.dataset.error import (
|
||||
@ -29,12 +30,20 @@ from controllers.service_api.wraps import (
|
||||
cloud_edition_billing_resource_check,
|
||||
)
|
||||
from core.errors.error import ProviderTokenNotInitError
|
||||
from core.rag.retrieval.retrieval_methods import RetrievalMethod
|
||||
from extensions.ext_database import db
|
||||
from fields.document_fields import document_fields, document_status_fields
|
||||
from libs.login import current_user
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
from services.dataset_service import DatasetService, DocumentService
|
||||
from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig, ProcessRule, RetrievalModel
|
||||
from services.entities.knowledge_entities.knowledge_entities import (
|
||||
KnowledgeConfig,
|
||||
PreProcessingRule,
|
||||
ProcessRule,
|
||||
RetrievalModel,
|
||||
Rule,
|
||||
Segmentation,
|
||||
)
|
||||
from services.file_service import FileService
|
||||
|
||||
|
||||
@ -69,8 +78,26 @@ class DocumentTextUpdate(BaseModel):
|
||||
return self
|
||||
|
||||
|
||||
for m in [ProcessRule, RetrievalModel, DocumentTextCreatePayload, DocumentTextUpdate]:
|
||||
service_api_ns.schema_model(m.__name__, m.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)) # type: ignore
|
||||
class DocumentListQuery(BaseModel):
|
||||
page: int = Field(default=1, description="Page number")
|
||||
limit: int = Field(default=20, description="Number of items per page")
|
||||
keyword: str | None = Field(default=None, description="Search keyword")
|
||||
status: str | None = Field(default=None, description="Document status filter")
|
||||
|
||||
|
||||
register_enum_models(service_api_ns, RetrievalMethod)
|
||||
|
||||
register_schema_models(
|
||||
service_api_ns,
|
||||
ProcessRule,
|
||||
RetrievalModel,
|
||||
DocumentTextCreatePayload,
|
||||
DocumentTextUpdate,
|
||||
DocumentListQuery,
|
||||
Rule,
|
||||
PreProcessingRule,
|
||||
Segmentation,
|
||||
)
|
||||
|
||||
|
||||
@service_api_ns.route(
|
||||
@ -460,34 +487,33 @@ class DocumentListApi(DatasetApiResource):
|
||||
def get(self, tenant_id, dataset_id):
|
||||
dataset_id = str(dataset_id)
|
||||
tenant_id = str(tenant_id)
|
||||
page = request.args.get("page", default=1, type=int)
|
||||
limit = request.args.get("limit", default=20, type=int)
|
||||
search = request.args.get("keyword", default=None, type=str)
|
||||
status = request.args.get("status", default=None, type=str)
|
||||
query_params = DocumentListQuery.model_validate(request.args.to_dict())
|
||||
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
raise NotFound("Dataset not found.")
|
||||
|
||||
query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=tenant_id)
|
||||
|
||||
if status:
|
||||
query = DocumentService.apply_display_status_filter(query, status)
|
||||
if query_params.status:
|
||||
query = DocumentService.apply_display_status_filter(query, query_params.status)
|
||||
|
||||
if search:
|
||||
search = f"%{search}%"
|
||||
if query_params.keyword:
|
||||
search = f"%{query_params.keyword}%"
|
||||
query = query.where(Document.name.like(search))
|
||||
|
||||
query = query.order_by(desc(Document.created_at), desc(Document.position))
|
||||
|
||||
paginated_documents = db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False)
|
||||
paginated_documents = db.paginate(
|
||||
select=query, page=query_params.page, per_page=query_params.limit, max_per_page=100, error_out=False
|
||||
)
|
||||
documents = paginated_documents.items
|
||||
|
||||
response = {
|
||||
"data": marshal(documents, document_fields),
|
||||
"has_more": len(documents) == limit,
|
||||
"limit": limit,
|
||||
"has_more": len(documents) == query_params.limit,
|
||||
"limit": query_params.limit,
|
||||
"total": paginated_documents.total,
|
||||
"page": page,
|
||||
"page": query_params.page,
|
||||
}
|
||||
|
||||
return response
|
||||
|
||||
@ -60,6 +60,7 @@ register_schema_models(
|
||||
service_api_ns,
|
||||
SegmentCreatePayload,
|
||||
SegmentListQuery,
|
||||
SegmentUpdateArgs,
|
||||
SegmentUpdatePayload,
|
||||
ChildChunkCreatePayload,
|
||||
ChildChunkListQuery,
|
||||
|
||||
@ -21,6 +21,7 @@ from core.app.entities.queue_entities import (
|
||||
)
|
||||
from core.app.features.annotation_reply.annotation_reply import AnnotationReplyFeature
|
||||
from core.app.layers.conversation_variable_persist_layer import ConversationVariablePersistenceLayer
|
||||
from core.app.workflow.layers.persistence import PersistenceWorkflowInfo, WorkflowPersistenceLayer
|
||||
from core.db.session_factory import session_factory
|
||||
from core.moderation.base import ModerationError
|
||||
from core.moderation.input_moderation import InputModeration
|
||||
@ -28,7 +29,6 @@ from core.variables.variables import Variable
|
||||
from core.workflow.enums import WorkflowType
|
||||
from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel
|
||||
from core.workflow.graph_engine.layers.base import GraphEngineLayer
|
||||
from core.workflow.graph_engine.layers.persistence import PersistenceWorkflowInfo, WorkflowPersistenceLayer
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.runtime import GraphRuntimeState, VariablePool
|
||||
|
||||
@ -9,12 +9,12 @@ from core.app.entities.app_invoke_entities import (
|
||||
InvokeFrom,
|
||||
RagPipelineGenerateEntity,
|
||||
)
|
||||
from core.app.workflow.layers.persistence import PersistenceWorkflowInfo, WorkflowPersistenceLayer
|
||||
from core.app.workflow.node_factory import DifyNodeFactory
|
||||
from core.variables.variables import RAGPipelineVariable, RAGPipelineVariableInput
|
||||
from core.workflow.entities.graph_init_params import GraphInitParams
|
||||
from core.workflow.enums import WorkflowType
|
||||
from core.workflow.graph import Graph
|
||||
from core.workflow.graph_engine.layers.persistence import PersistenceWorkflowInfo, WorkflowPersistenceLayer
|
||||
from core.workflow.graph_events import GraphEngineEvent, GraphRunFailedEvent
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
|
||||
@ -7,10 +7,10 @@ from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||
from core.app.apps.workflow.app_config_manager import WorkflowAppConfig
|
||||
from core.app.apps.workflow_app_runner import WorkflowBasedAppRunner
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity
|
||||
from core.app.workflow.layers.persistence import PersistenceWorkflowInfo, WorkflowPersistenceLayer
|
||||
from core.workflow.enums import WorkflowType
|
||||
from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel
|
||||
from core.workflow.graph_engine.layers.base import GraphEngineLayer
|
||||
from core.workflow.graph_engine.layers.persistence import PersistenceWorkflowInfo, WorkflowPersistenceLayer
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.runtime import GraphRuntimeState, VariablePool
|
||||
|
||||
@ -157,7 +157,7 @@ class WorkflowBasedAppRunner:
|
||||
# Create initial runtime state with variable pool containing environment variables
|
||||
graph_runtime_state = GraphRuntimeState(
|
||||
variable_pool=VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
environment_variables=workflow.environment_variables,
|
||||
),
|
||||
@ -272,7 +272,9 @@ class WorkflowBasedAppRunner:
|
||||
)
|
||||
|
||||
# init graph
|
||||
graph = Graph.init(graph_config=graph_config, node_factory=node_factory, root_node_id=node_id)
|
||||
graph = Graph.init(
|
||||
graph_config=graph_config, node_factory=node_factory, root_node_id=node_id, skip_validation=True
|
||||
)
|
||||
|
||||
if not graph:
|
||||
raise ValueError("graph not found in workflow")
|
||||
|
||||
10
api/core/app/workflow/layers/__init__.py
Normal file
10
api/core/app/workflow/layers/__init__.py
Normal file
@ -0,0 +1,10 @@
|
||||
"""Workflow-level GraphEngine layers that depend on outer infrastructure."""
|
||||
|
||||
from .observability import ObservabilityLayer
|
||||
from .persistence import PersistenceWorkflowInfo, WorkflowPersistenceLayer
|
||||
|
||||
__all__ = [
|
||||
"ObservabilityLayer",
|
||||
"PersistenceWorkflowInfo",
|
||||
"WorkflowPersistenceLayer",
|
||||
]
|
||||
@ -18,12 +18,15 @@ from typing_extensions import override
|
||||
from configs import dify_config
|
||||
from core.workflow.enums import NodeType
|
||||
from core.workflow.graph_engine.layers.base import GraphEngineLayer
|
||||
from core.workflow.graph_engine.layers.node_parsers import (
|
||||
from core.workflow.graph_events import GraphNodeEventBase
|
||||
from core.workflow.nodes.base.node import Node
|
||||
from extensions.otel.parser import (
|
||||
DefaultNodeOTelParser,
|
||||
LLMNodeOTelParser,
|
||||
NodeOTelParser,
|
||||
RetrievalNodeOTelParser,
|
||||
ToolNodeOTelParser,
|
||||
)
|
||||
from core.workflow.nodes.base.node import Node
|
||||
from extensions.otel.runtime import is_instrument_flag_enabled
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -72,6 +75,8 @@ class ObservabilityLayer(GraphEngineLayer):
|
||||
"""Initialize parser registry for node types."""
|
||||
self._parsers = {
|
||||
NodeType.TOOL: ToolNodeOTelParser(),
|
||||
NodeType.LLM: LLMNodeOTelParser(),
|
||||
NodeType.KNOWLEDGE_RETRIEVAL: RetrievalNodeOTelParser(),
|
||||
}
|
||||
|
||||
def _get_parser(self, node: Node) -> NodeOTelParser:
|
||||
@ -119,7 +124,9 @@ class ObservabilityLayer(GraphEngineLayer):
|
||||
logger.warning("Failed to create OpenTelemetry span for node %s: %s", node.id, e)
|
||||
|
||||
@override
|
||||
def on_node_run_end(self, node: Node, error: Exception | None) -> None:
|
||||
def on_node_run_end(
|
||||
self, node: Node, error: Exception | None, result_event: GraphNodeEventBase | None = None
|
||||
) -> None:
|
||||
"""
|
||||
Called when a node finishes execution.
|
||||
|
||||
@ -139,7 +146,7 @@ class ObservabilityLayer(GraphEngineLayer):
|
||||
span = node_context.span
|
||||
parser = self._get_parser(node)
|
||||
try:
|
||||
parser.parse(node=node, span=span, error=error)
|
||||
parser.parse(node=node, span=span, error=error, result_event=result_event)
|
||||
span.end()
|
||||
finally:
|
||||
token = node_context.token
|
||||
@ -45,7 +45,6 @@ from core.workflow.graph_events import (
|
||||
from core.workflow.node_events import NodeRunResult
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_entry import WorkflowEntry
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
|
||||
|
||||
@ -316,6 +315,9 @@ class WorkflowPersistenceLayer(GraphEngineLayer):
|
||||
# workflow inputs stay reusable without binding future runs to this conversation.
|
||||
continue
|
||||
inputs[f"sys.{field_name}"] = value
|
||||
# Local import to avoid circular dependency during app bootstrapping.
|
||||
from core.workflow.workflow_entry import WorkflowEntry
|
||||
|
||||
handled = WorkflowEntry.handle_special_values(inputs)
|
||||
return handled or {}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
from collections.abc import Generator, Mapping
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
|
||||
from core.datasource.__base.datasource_plugin import DatasourcePlugin
|
||||
@ -34,7 +34,7 @@ class OnlineDocumentDatasourcePlugin(DatasourcePlugin):
|
||||
def get_online_document_pages(
|
||||
self,
|
||||
user_id: str,
|
||||
datasource_parameters: Mapping[str, Any],
|
||||
datasource_parameters: dict[str, Any],
|
||||
provider_type: str,
|
||||
) -> Generator[OnlineDocumentPagesMessage, None, None]:
|
||||
manager = PluginDatasourceManager()
|
||||
|
||||
@ -288,6 +288,7 @@ class Graph:
|
||||
graph_config: Mapping[str, object],
|
||||
node_factory: NodeFactory,
|
||||
root_node_id: str | None = None,
|
||||
skip_validation: bool = False,
|
||||
) -> Graph:
|
||||
"""
|
||||
Initialize graph
|
||||
@ -339,8 +340,9 @@ class Graph:
|
||||
root_node=root_node,
|
||||
)
|
||||
|
||||
# Validate the graph structure using built-in validators
|
||||
get_graph_validator().validate(graph)
|
||||
if not skip_validation:
|
||||
# Validate the graph structure using built-in validators
|
||||
get_graph_validator().validate(graph)
|
||||
|
||||
return graph
|
||||
|
||||
|
||||
@ -8,11 +8,9 @@ with middleware-like components that can observe events and interact with execut
|
||||
from .base import GraphEngineLayer
|
||||
from .debug_logging import DebugLoggingLayer
|
||||
from .execution_limits import ExecutionLimitsLayer
|
||||
from .observability import ObservabilityLayer
|
||||
|
||||
__all__ = [
|
||||
"DebugLoggingLayer",
|
||||
"ExecutionLimitsLayer",
|
||||
"GraphEngineLayer",
|
||||
"ObservabilityLayer",
|
||||
]
|
||||
|
||||
@ -8,7 +8,7 @@ intercept and respond to GraphEngine events.
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from core.workflow.graph_engine.protocols.command_channel import CommandChannel
|
||||
from core.workflow.graph_events import GraphEngineEvent
|
||||
from core.workflow.graph_events import GraphEngineEvent, GraphNodeEventBase
|
||||
from core.workflow.nodes.base.node import Node
|
||||
from core.workflow.runtime import ReadOnlyGraphRuntimeState
|
||||
|
||||
@ -98,7 +98,7 @@ class GraphEngineLayer(ABC):
|
||||
"""
|
||||
pass
|
||||
|
||||
def on_node_run_start(self, node: Node) -> None: # noqa: B027
|
||||
def on_node_run_start(self, node: Node) -> None:
|
||||
"""
|
||||
Called immediately before a node begins execution.
|
||||
|
||||
@ -109,9 +109,11 @@ class GraphEngineLayer(ABC):
|
||||
Args:
|
||||
node: The node instance about to be executed
|
||||
"""
|
||||
pass
|
||||
return
|
||||
|
||||
def on_node_run_end(self, node: Node, error: Exception | None) -> None: # noqa: B027
|
||||
def on_node_run_end(
|
||||
self, node: Node, error: Exception | None, result_event: GraphNodeEventBase | None = None
|
||||
) -> None:
|
||||
"""
|
||||
Called after a node finishes execution.
|
||||
|
||||
@ -121,5 +123,6 @@ class GraphEngineLayer(ABC):
|
||||
Args:
|
||||
node: The node instance that just finished execution
|
||||
error: Exception instance if the node failed, otherwise None
|
||||
result_event: The final result event from node execution (succeeded/failed/paused), if any
|
||||
"""
|
||||
pass
|
||||
return
|
||||
|
||||
@ -1,61 +0,0 @@
|
||||
"""
|
||||
Node-level OpenTelemetry parser interfaces and defaults.
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Protocol
|
||||
|
||||
from opentelemetry.trace import Span
|
||||
from opentelemetry.trace.status import Status, StatusCode
|
||||
|
||||
from core.workflow.nodes.base.node import Node
|
||||
from core.workflow.nodes.tool.entities import ToolNodeData
|
||||
|
||||
|
||||
class NodeOTelParser(Protocol):
|
||||
"""Parser interface for node-specific OpenTelemetry enrichment."""
|
||||
|
||||
def parse(self, *, node: Node, span: "Span", error: Exception | None) -> None: ...
|
||||
|
||||
|
||||
class DefaultNodeOTelParser:
|
||||
"""Fallback parser used when no node-specific parser is registered."""
|
||||
|
||||
def parse(self, *, node: Node, span: "Span", error: Exception | None) -> None:
|
||||
span.set_attribute("node.id", node.id)
|
||||
if node.execution_id:
|
||||
span.set_attribute("node.execution_id", node.execution_id)
|
||||
if hasattr(node, "node_type") and node.node_type:
|
||||
span.set_attribute("node.type", node.node_type.value)
|
||||
|
||||
if error:
|
||||
span.record_exception(error)
|
||||
span.set_status(Status(StatusCode.ERROR, str(error)))
|
||||
else:
|
||||
span.set_status(Status(StatusCode.OK))
|
||||
|
||||
|
||||
class ToolNodeOTelParser:
|
||||
"""Parser for tool nodes that captures tool-specific metadata."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._delegate = DefaultNodeOTelParser()
|
||||
|
||||
def parse(self, *, node: Node, span: "Span", error: Exception | None) -> None:
|
||||
self._delegate.parse(node=node, span=span, error=error)
|
||||
|
||||
tool_data = getattr(node, "_node_data", None)
|
||||
if not isinstance(tool_data, ToolNodeData):
|
||||
return
|
||||
|
||||
span.set_attribute("tool.provider.id", tool_data.provider_id)
|
||||
span.set_attribute("tool.provider.type", tool_data.provider_type.value)
|
||||
span.set_attribute("tool.provider.name", tool_data.provider_name)
|
||||
span.set_attribute("tool.name", tool_data.tool_name)
|
||||
span.set_attribute("tool.label", tool_data.tool_label)
|
||||
if tool_data.plugin_unique_identifier:
|
||||
span.set_attribute("tool.plugin.id", tool_data.plugin_unique_identifier)
|
||||
if tool_data.credential_id:
|
||||
span.set_attribute("tool.credential.id", tool_data.credential_id)
|
||||
if tool_data.tool_configurations:
|
||||
span.set_attribute("tool.config", json.dumps(tool_data.tool_configurations, ensure_ascii=False))
|
||||
@ -17,7 +17,7 @@ from typing_extensions import override
|
||||
from core.workflow.context import IExecutionContext
|
||||
from core.workflow.graph import Graph
|
||||
from core.workflow.graph_engine.layers.base import GraphEngineLayer
|
||||
from core.workflow.graph_events import GraphNodeEventBase, NodeRunFailedEvent
|
||||
from core.workflow.graph_events import GraphNodeEventBase, NodeRunFailedEvent, is_node_result_event
|
||||
from core.workflow.nodes.base.node import Node
|
||||
|
||||
from .ready_queue import ReadyQueue
|
||||
@ -131,6 +131,7 @@ class Worker(threading.Thread):
|
||||
node.ensure_execution_id()
|
||||
|
||||
error: Exception | None = None
|
||||
result_event: GraphNodeEventBase | None = None
|
||||
|
||||
# Execute the node with preserved context if execution context is provided
|
||||
if self._execution_context is not None:
|
||||
@ -140,22 +141,26 @@ class Worker(threading.Thread):
|
||||
node_events = node.run()
|
||||
for event in node_events:
|
||||
self._event_queue.put(event)
|
||||
if is_node_result_event(event):
|
||||
result_event = event
|
||||
except Exception as exc:
|
||||
error = exc
|
||||
raise
|
||||
finally:
|
||||
self._invoke_node_run_end_hooks(node, error)
|
||||
self._invoke_node_run_end_hooks(node, error, result_event)
|
||||
else:
|
||||
self._invoke_node_run_start_hooks(node)
|
||||
try:
|
||||
node_events = node.run()
|
||||
for event in node_events:
|
||||
self._event_queue.put(event)
|
||||
if is_node_result_event(event):
|
||||
result_event = event
|
||||
except Exception as exc:
|
||||
error = exc
|
||||
raise
|
||||
finally:
|
||||
self._invoke_node_run_end_hooks(node, error)
|
||||
self._invoke_node_run_end_hooks(node, error, result_event)
|
||||
|
||||
def _invoke_node_run_start_hooks(self, node: Node) -> None:
|
||||
"""Invoke on_node_run_start hooks for all layers."""
|
||||
@ -166,11 +171,13 @@ class Worker(threading.Thread):
|
||||
# Silently ignore layer errors to prevent disrupting node execution
|
||||
continue
|
||||
|
||||
def _invoke_node_run_end_hooks(self, node: Node, error: Exception | None) -> None:
|
||||
def _invoke_node_run_end_hooks(
|
||||
self, node: Node, error: Exception | None, result_event: GraphNodeEventBase | None = None
|
||||
) -> None:
|
||||
"""Invoke on_node_run_end hooks for all layers."""
|
||||
for layer in self._layers:
|
||||
try:
|
||||
layer.on_node_run_end(node, error)
|
||||
layer.on_node_run_end(node, error, result_event)
|
||||
except Exception:
|
||||
# Silently ignore layer errors to prevent disrupting node execution
|
||||
continue
|
||||
|
||||
@ -44,6 +44,7 @@ from .node import (
|
||||
NodeRunStartedEvent,
|
||||
NodeRunStreamChunkEvent,
|
||||
NodeRunSucceededEvent,
|
||||
is_node_result_event,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
@ -73,4 +74,5 @@ __all__ = [
|
||||
"NodeRunStartedEvent",
|
||||
"NodeRunStreamChunkEvent",
|
||||
"NodeRunSucceededEvent",
|
||||
"is_node_result_event",
|
||||
]
|
||||
|
||||
@ -56,3 +56,26 @@ class NodeRunRetryEvent(NodeRunStartedEvent):
|
||||
|
||||
class NodeRunPauseRequestedEvent(GraphNodeEventBase):
|
||||
reason: PauseReason = Field(..., description="pause reason")
|
||||
|
||||
|
||||
def is_node_result_event(event: GraphNodeEventBase) -> bool:
|
||||
"""
|
||||
Check if an event is a final result event from node execution.
|
||||
|
||||
A result event indicates the completion of a node execution and contains
|
||||
runtime information such as inputs, outputs, or error details.
|
||||
|
||||
Args:
|
||||
event: The event to check
|
||||
|
||||
Returns:
|
||||
True if the event is a node result event (succeeded/failed/paused), False otherwise
|
||||
"""
|
||||
return isinstance(
|
||||
event,
|
||||
(
|
||||
NodeRunSucceededEvent,
|
||||
NodeRunFailedEvent,
|
||||
NodeRunPauseRequestedEvent,
|
||||
),
|
||||
)
|
||||
|
||||
@ -44,7 +44,7 @@ class VariablePool(BaseModel):
|
||||
)
|
||||
system_variables: SystemVariable = Field(
|
||||
description="System variables",
|
||||
default_factory=SystemVariable.empty,
|
||||
default_factory=SystemVariable.default,
|
||||
)
|
||||
environment_variables: Sequence[Variable] = Field(
|
||||
description="Environment variables.",
|
||||
@ -271,4 +271,4 @@ class VariablePool(BaseModel):
|
||||
@classmethod
|
||||
def empty(cls) -> VariablePool:
|
||||
"""Create an empty variable pool."""
|
||||
return cls(system_variables=SystemVariable.empty())
|
||||
return cls(system_variables=SystemVariable.default())
|
||||
|
||||
@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
from collections.abc import Mapping, Sequence
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from pydantic import AliasChoices, BaseModel, ConfigDict, Field, model_validator
|
||||
|
||||
@ -72,8 +73,8 @@ class SystemVariable(BaseModel):
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def empty(cls) -> SystemVariable:
|
||||
return cls()
|
||||
def default(cls) -> SystemVariable:
|
||||
return cls(workflow_execution_id=str(uuid4()))
|
||||
|
||||
def to_dict(self) -> dict[SystemVariableKey, Any]:
|
||||
# NOTE: This method is provided for compatibility with legacy code.
|
||||
|
||||
@ -7,6 +7,7 @@ from typing import Any
|
||||
from configs import dify_config
|
||||
from core.app.apps.exc import GenerateTaskStoppedError
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.app.workflow.layers.observability import ObservabilityLayer
|
||||
from core.app.workflow.node_factory import DifyNodeFactory
|
||||
from core.file.models import File
|
||||
from core.workflow.constants import ENVIRONMENT_VARIABLE_NODE_ID
|
||||
@ -15,7 +16,7 @@ from core.workflow.errors import WorkflowNodeRunFailedError
|
||||
from core.workflow.graph import Graph
|
||||
from core.workflow.graph_engine import GraphEngine
|
||||
from core.workflow.graph_engine.command_channels import InMemoryChannel
|
||||
from core.workflow.graph_engine.layers import DebugLoggingLayer, ExecutionLimitsLayer, ObservabilityLayer
|
||||
from core.workflow.graph_engine.layers import DebugLoggingLayer, ExecutionLimitsLayer
|
||||
from core.workflow.graph_engine.protocols.command_channel import CommandChannel
|
||||
from core.workflow.graph_events import GraphEngineEvent, GraphNodeEventBase, GraphRunFailedEvent
|
||||
from core.workflow.nodes import NodeType
|
||||
@ -276,7 +277,7 @@ class WorkflowEntry:
|
||||
|
||||
# init variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
)
|
||||
|
||||
@ -36,7 +36,7 @@ def init_app(app: DifyApp) -> None:
|
||||
router.include_router(console_router, prefix="/console/api")
|
||||
CORS(
|
||||
app,
|
||||
resources={r"/console/api/*": {"origins": dify_config.CONSOLE_CORS_ALLOW_ORIGINS}},
|
||||
resources={r"/console/api/.*": {"origins": dify_config.CONSOLE_CORS_ALLOW_ORIGINS}},
|
||||
supports_credentials=True,
|
||||
allow_headers=list(AUTHENTICATED_HEADERS),
|
||||
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
|
||||
|
||||
20
api/extensions/otel/parser/__init__.py
Normal file
20
api/extensions/otel/parser/__init__.py
Normal file
@ -0,0 +1,20 @@
|
||||
"""
|
||||
OpenTelemetry node parsers for workflow nodes.
|
||||
|
||||
This module provides parsers that extract node-specific metadata and set
|
||||
OpenTelemetry span attributes according to semantic conventions.
|
||||
"""
|
||||
|
||||
from extensions.otel.parser.base import DefaultNodeOTelParser, NodeOTelParser, safe_json_dumps
|
||||
from extensions.otel.parser.llm import LLMNodeOTelParser
|
||||
from extensions.otel.parser.retrieval import RetrievalNodeOTelParser
|
||||
from extensions.otel.parser.tool import ToolNodeOTelParser
|
||||
|
||||
__all__ = [
|
||||
"DefaultNodeOTelParser",
|
||||
"LLMNodeOTelParser",
|
||||
"NodeOTelParser",
|
||||
"RetrievalNodeOTelParser",
|
||||
"ToolNodeOTelParser",
|
||||
"safe_json_dumps",
|
||||
]
|
||||
117
api/extensions/otel/parser/base.py
Normal file
117
api/extensions/otel/parser/base.py
Normal file
@ -0,0 +1,117 @@
|
||||
"""
|
||||
Base parser interface and utilities for OpenTelemetry node parsers.
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Any, Protocol
|
||||
|
||||
from opentelemetry.trace import Span
|
||||
from opentelemetry.trace.status import Status, StatusCode
|
||||
from pydantic import BaseModel
|
||||
|
||||
from core.file.models import File
|
||||
from core.variables import Segment
|
||||
from core.workflow.enums import NodeType
|
||||
from core.workflow.graph_events import GraphNodeEventBase
|
||||
from core.workflow.nodes.base.node import Node
|
||||
from extensions.otel.semconv.gen_ai import ChainAttributes, GenAIAttributes
|
||||
|
||||
|
||||
def safe_json_dumps(obj: Any, ensure_ascii: bool = False) -> str:
|
||||
"""
|
||||
Safely serialize objects to JSON, handling non-serializable types.
|
||||
|
||||
Handles:
|
||||
- Segment types (ArrayFileSegment, FileSegment, etc.) - converts to their value
|
||||
- File objects - converts to dict using to_dict()
|
||||
- BaseModel objects - converts using model_dump()
|
||||
- Other types - falls back to str() representation
|
||||
|
||||
Args:
|
||||
obj: Object to serialize
|
||||
ensure_ascii: Whether to ensure ASCII encoding
|
||||
|
||||
Returns:
|
||||
JSON string representation of the object
|
||||
"""
|
||||
|
||||
def _convert_value(value: Any) -> Any:
|
||||
"""Recursively convert non-serializable values."""
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, (bool, int, float, str)):
|
||||
return value
|
||||
if isinstance(value, Segment):
|
||||
# Convert Segment to its underlying value
|
||||
return _convert_value(value.value)
|
||||
if isinstance(value, File):
|
||||
# Convert File to dict
|
||||
return value.to_dict()
|
||||
if isinstance(value, BaseModel):
|
||||
# Convert Pydantic model to dict
|
||||
return _convert_value(value.model_dump(mode="json"))
|
||||
if isinstance(value, dict):
|
||||
return {k: _convert_value(v) for k, v in value.items()}
|
||||
if isinstance(value, (list, tuple)):
|
||||
return [_convert_value(item) for item in value]
|
||||
# Fallback to string representation for unknown types
|
||||
return str(value)
|
||||
|
||||
try:
|
||||
converted = _convert_value(obj)
|
||||
return json.dumps(converted, ensure_ascii=ensure_ascii)
|
||||
except (TypeError, ValueError) as e:
|
||||
# If conversion still fails, return error message as string
|
||||
return json.dumps(
|
||||
{"error": f"Failed to serialize: {type(obj).__name__}", "message": str(e)}, ensure_ascii=ensure_ascii
|
||||
)
|
||||
|
||||
|
||||
class NodeOTelParser(Protocol):
|
||||
"""Parser interface for node-specific OpenTelemetry enrichment."""
|
||||
|
||||
def parse(
|
||||
self, *, node: Node, span: "Span", error: Exception | None, result_event: GraphNodeEventBase | None = None
|
||||
) -> None: ...
|
||||
|
||||
|
||||
class DefaultNodeOTelParser:
|
||||
"""Fallback parser used when no node-specific parser is registered."""
|
||||
|
||||
def parse(
|
||||
self, *, node: Node, span: "Span", error: Exception | None, result_event: GraphNodeEventBase | None = None
|
||||
) -> None:
|
||||
span.set_attribute("node.id", node.id)
|
||||
if node.execution_id:
|
||||
span.set_attribute("node.execution_id", node.execution_id)
|
||||
if hasattr(node, "node_type") and node.node_type:
|
||||
span.set_attribute("node.type", node.node_type.value)
|
||||
|
||||
span.set_attribute(GenAIAttributes.FRAMEWORK, "dify")
|
||||
|
||||
node_type = getattr(node, "node_type", None)
|
||||
if isinstance(node_type, NodeType):
|
||||
if node_type == NodeType.LLM:
|
||||
span.set_attribute(GenAIAttributes.SPAN_KIND, "LLM")
|
||||
elif node_type == NodeType.KNOWLEDGE_RETRIEVAL:
|
||||
span.set_attribute(GenAIAttributes.SPAN_KIND, "RETRIEVER")
|
||||
elif node_type == NodeType.TOOL:
|
||||
span.set_attribute(GenAIAttributes.SPAN_KIND, "TOOL")
|
||||
else:
|
||||
span.set_attribute(GenAIAttributes.SPAN_KIND, "TASK")
|
||||
else:
|
||||
span.set_attribute(GenAIAttributes.SPAN_KIND, "TASK")
|
||||
|
||||
# Extract inputs and outputs from result_event
|
||||
if result_event and result_event.node_run_result:
|
||||
node_run_result = result_event.node_run_result
|
||||
if node_run_result.inputs:
|
||||
span.set_attribute(ChainAttributes.INPUT_VALUE, safe_json_dumps(node_run_result.inputs))
|
||||
if node_run_result.outputs:
|
||||
span.set_attribute(ChainAttributes.OUTPUT_VALUE, safe_json_dumps(node_run_result.outputs))
|
||||
|
||||
if error:
|
||||
span.record_exception(error)
|
||||
span.set_status(Status(StatusCode.ERROR, str(error)))
|
||||
else:
|
||||
span.set_status(Status(StatusCode.OK))
|
||||
155
api/extensions/otel/parser/llm.py
Normal file
155
api/extensions/otel/parser/llm.py
Normal file
@ -0,0 +1,155 @@
|
||||
"""
|
||||
Parser for LLM nodes that captures LLM-specific metadata.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from opentelemetry.trace import Span
|
||||
|
||||
from core.workflow.graph_events import GraphNodeEventBase
|
||||
from core.workflow.nodes.base.node import Node
|
||||
from extensions.otel.parser.base import DefaultNodeOTelParser, safe_json_dumps
|
||||
from extensions.otel.semconv.gen_ai import LLMAttributes
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _format_input_messages(process_data: Mapping[str, Any]) -> str:
|
||||
"""
|
||||
Format input messages from process_data for LLM spans.
|
||||
|
||||
Args:
|
||||
process_data: Process data containing prompts
|
||||
|
||||
Returns:
|
||||
JSON string of formatted input messages
|
||||
"""
|
||||
try:
|
||||
if not isinstance(process_data, dict):
|
||||
return safe_json_dumps([])
|
||||
|
||||
prompts = process_data.get("prompts", [])
|
||||
if not prompts:
|
||||
return safe_json_dumps([])
|
||||
|
||||
valid_roles = {"system", "user", "assistant", "tool"}
|
||||
input_messages = []
|
||||
for prompt in prompts:
|
||||
if not isinstance(prompt, dict):
|
||||
continue
|
||||
|
||||
role = prompt.get("role", "")
|
||||
text = prompt.get("text", "")
|
||||
|
||||
if not role or role not in valid_roles:
|
||||
continue
|
||||
|
||||
if text:
|
||||
message = {"role": role, "parts": [{"type": "text", "content": text}]}
|
||||
input_messages.append(message)
|
||||
|
||||
return safe_json_dumps(input_messages)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to format input messages: %s", e, exc_info=True)
|
||||
return safe_json_dumps([])
|
||||
|
||||
|
||||
def _format_output_messages(outputs: Mapping[str, Any]) -> str:
|
||||
"""
|
||||
Format output messages from outputs for LLM spans.
|
||||
|
||||
Args:
|
||||
outputs: Output data containing text and finish_reason
|
||||
|
||||
Returns:
|
||||
JSON string of formatted output messages
|
||||
"""
|
||||
try:
|
||||
if not isinstance(outputs, dict):
|
||||
return safe_json_dumps([])
|
||||
|
||||
text = outputs.get("text", "")
|
||||
finish_reason = outputs.get("finish_reason", "")
|
||||
|
||||
if not text:
|
||||
return safe_json_dumps([])
|
||||
|
||||
valid_finish_reasons = {"stop", "length", "content_filter", "tool_call", "error"}
|
||||
if finish_reason not in valid_finish_reasons:
|
||||
finish_reason = "stop"
|
||||
|
||||
output_message = {
|
||||
"role": "assistant",
|
||||
"parts": [{"type": "text", "content": text}],
|
||||
"finish_reason": finish_reason,
|
||||
}
|
||||
|
||||
return safe_json_dumps([output_message])
|
||||
except Exception as e:
|
||||
logger.warning("Failed to format output messages: %s", e, exc_info=True)
|
||||
return safe_json_dumps([])
|
||||
|
||||
|
||||
class LLMNodeOTelParser:
|
||||
"""Parser for LLM nodes that captures LLM-specific metadata."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._delegate = DefaultNodeOTelParser()
|
||||
|
||||
def parse(
|
||||
self, *, node: Node, span: "Span", error: Exception | None, result_event: GraphNodeEventBase | None = None
|
||||
) -> None:
|
||||
self._delegate.parse(node=node, span=span, error=error, result_event=result_event)
|
||||
|
||||
if not result_event or not result_event.node_run_result:
|
||||
return
|
||||
|
||||
node_run_result = result_event.node_run_result
|
||||
process_data = node_run_result.process_data or {}
|
||||
outputs = node_run_result.outputs or {}
|
||||
|
||||
# Extract usage data (from process_data or outputs)
|
||||
usage_data = process_data.get("usage") or outputs.get("usage") or {}
|
||||
|
||||
# Model and provider information
|
||||
model_name = process_data.get("model_name") or ""
|
||||
model_provider = process_data.get("model_provider") or ""
|
||||
|
||||
if model_name:
|
||||
span.set_attribute(LLMAttributes.REQUEST_MODEL, model_name)
|
||||
if model_provider:
|
||||
span.set_attribute(LLMAttributes.PROVIDER_NAME, model_provider)
|
||||
|
||||
# Token usage
|
||||
if usage_data:
|
||||
prompt_tokens = usage_data.get("prompt_tokens", 0)
|
||||
completion_tokens = usage_data.get("completion_tokens", 0)
|
||||
total_tokens = usage_data.get("total_tokens", 0)
|
||||
|
||||
span.set_attribute(LLMAttributes.USAGE_INPUT_TOKENS, prompt_tokens)
|
||||
span.set_attribute(LLMAttributes.USAGE_OUTPUT_TOKENS, completion_tokens)
|
||||
span.set_attribute(LLMAttributes.USAGE_TOTAL_TOKENS, total_tokens)
|
||||
|
||||
# Prompts and completion
|
||||
prompts = process_data.get("prompts", [])
|
||||
if prompts:
|
||||
prompts_json = safe_json_dumps(prompts)
|
||||
span.set_attribute(LLMAttributes.PROMPT, prompts_json)
|
||||
|
||||
text_output = str(outputs.get("text", ""))
|
||||
if text_output:
|
||||
span.set_attribute(LLMAttributes.COMPLETION, text_output)
|
||||
|
||||
# Finish reason
|
||||
finish_reason = outputs.get("finish_reason") or ""
|
||||
if finish_reason:
|
||||
span.set_attribute(LLMAttributes.RESPONSE_FINISH_REASON, finish_reason)
|
||||
|
||||
# Structured input/output messages
|
||||
gen_ai_input_message = _format_input_messages(process_data)
|
||||
gen_ai_output_message = _format_output_messages(outputs)
|
||||
|
||||
span.set_attribute(LLMAttributes.INPUT_MESSAGE, gen_ai_input_message)
|
||||
span.set_attribute(LLMAttributes.OUTPUT_MESSAGE, gen_ai_output_message)
|
||||
105
api/extensions/otel/parser/retrieval.py
Normal file
105
api/extensions/otel/parser/retrieval.py
Normal file
@ -0,0 +1,105 @@
|
||||
"""
|
||||
Parser for knowledge retrieval nodes that captures retrieval-specific metadata.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from collections.abc import Sequence
|
||||
from typing import Any
|
||||
|
||||
from opentelemetry.trace import Span
|
||||
|
||||
from core.variables import Segment
|
||||
from core.workflow.graph_events import GraphNodeEventBase
|
||||
from core.workflow.nodes.base.node import Node
|
||||
from extensions.otel.parser.base import DefaultNodeOTelParser, safe_json_dumps
|
||||
from extensions.otel.semconv.gen_ai import RetrieverAttributes
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _format_retrieval_documents(retrieval_documents: list[Any]) -> list:
|
||||
"""
|
||||
Format retrieval documents for semantic conventions.
|
||||
|
||||
Args:
|
||||
retrieval_documents: List of retrieval document dictionaries
|
||||
|
||||
Returns:
|
||||
List of formatted semantic documents
|
||||
"""
|
||||
try:
|
||||
if not isinstance(retrieval_documents, list):
|
||||
return []
|
||||
|
||||
semantic_documents = []
|
||||
for doc in retrieval_documents:
|
||||
if not isinstance(doc, dict):
|
||||
continue
|
||||
|
||||
metadata = doc.get("metadata", {})
|
||||
content = doc.get("content", "")
|
||||
title = doc.get("title", "")
|
||||
score = metadata.get("score", 0.0)
|
||||
document_id = metadata.get("document_id", "")
|
||||
|
||||
semantic_metadata = {}
|
||||
if title:
|
||||
semantic_metadata["title"] = title
|
||||
if metadata.get("source"):
|
||||
semantic_metadata["source"] = metadata["source"]
|
||||
elif metadata.get("_source"):
|
||||
semantic_metadata["source"] = metadata["_source"]
|
||||
if metadata.get("doc_metadata"):
|
||||
doc_metadata = metadata["doc_metadata"]
|
||||
if isinstance(doc_metadata, dict):
|
||||
semantic_metadata.update(doc_metadata)
|
||||
|
||||
semantic_doc = {
|
||||
"document": {"content": content, "metadata": semantic_metadata, "score": score, "id": document_id}
|
||||
}
|
||||
semantic_documents.append(semantic_doc)
|
||||
|
||||
return semantic_documents
|
||||
except Exception as e:
|
||||
logger.warning("Failed to format retrieval documents: %s", e, exc_info=True)
|
||||
return []
|
||||
|
||||
|
||||
class RetrievalNodeOTelParser:
|
||||
"""Parser for knowledge retrieval nodes that captures retrieval-specific metadata."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._delegate = DefaultNodeOTelParser()
|
||||
|
||||
def parse(
|
||||
self, *, node: Node, span: "Span", error: Exception | None, result_event: GraphNodeEventBase | None = None
|
||||
) -> None:
|
||||
self._delegate.parse(node=node, span=span, error=error, result_event=result_event)
|
||||
|
||||
if not result_event or not result_event.node_run_result:
|
||||
return
|
||||
|
||||
node_run_result = result_event.node_run_result
|
||||
inputs = node_run_result.inputs or {}
|
||||
outputs = node_run_result.outputs or {}
|
||||
|
||||
# Extract query from inputs
|
||||
query = str(inputs.get("query", "")) if inputs else ""
|
||||
if query:
|
||||
span.set_attribute(RetrieverAttributes.QUERY, query)
|
||||
|
||||
# Extract and format retrieval documents from outputs
|
||||
result_value = outputs.get("result") if outputs else None
|
||||
retrieval_documents: list[Any] = []
|
||||
if result_value:
|
||||
value_to_check = result_value
|
||||
if isinstance(result_value, Segment):
|
||||
value_to_check = result_value.value
|
||||
|
||||
if isinstance(value_to_check, (list, Sequence)):
|
||||
retrieval_documents = list(value_to_check)
|
||||
|
||||
if retrieval_documents:
|
||||
semantic_retrieval_documents = _format_retrieval_documents(retrieval_documents)
|
||||
semantic_retrieval_documents_json = safe_json_dumps(semantic_retrieval_documents)
|
||||
span.set_attribute(RetrieverAttributes.DOCUMENT, semantic_retrieval_documents_json)
|
||||
47
api/extensions/otel/parser/tool.py
Normal file
47
api/extensions/otel/parser/tool.py
Normal file
@ -0,0 +1,47 @@
|
||||
"""
|
||||
Parser for tool nodes that captures tool-specific metadata.
|
||||
"""
|
||||
|
||||
from opentelemetry.trace import Span
|
||||
|
||||
from core.workflow.enums import WorkflowNodeExecutionMetadataKey
|
||||
from core.workflow.graph_events import GraphNodeEventBase
|
||||
from core.workflow.nodes.base.node import Node
|
||||
from core.workflow.nodes.tool.entities import ToolNodeData
|
||||
from extensions.otel.parser.base import DefaultNodeOTelParser, safe_json_dumps
|
||||
from extensions.otel.semconv.gen_ai import ToolAttributes
|
||||
|
||||
|
||||
class ToolNodeOTelParser:
|
||||
"""Parser for tool nodes that captures tool-specific metadata."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._delegate = DefaultNodeOTelParser()
|
||||
|
||||
def parse(
|
||||
self, *, node: Node, span: "Span", error: Exception | None, result_event: GraphNodeEventBase | None = None
|
||||
) -> None:
|
||||
self._delegate.parse(node=node, span=span, error=error, result_event=result_event)
|
||||
|
||||
tool_data = getattr(node, "_node_data", None)
|
||||
if not isinstance(tool_data, ToolNodeData):
|
||||
return
|
||||
|
||||
span.set_attribute(ToolAttributes.TOOL_NAME, node.title)
|
||||
span.set_attribute(ToolAttributes.TOOL_TYPE, tool_data.provider_type.value)
|
||||
|
||||
# Extract tool info from metadata (consistent with aliyun_trace)
|
||||
tool_info = {}
|
||||
if result_event and result_event.node_run_result:
|
||||
node_run_result = result_event.node_run_result
|
||||
if node_run_result.metadata:
|
||||
tool_info = node_run_result.metadata.get(WorkflowNodeExecutionMetadataKey.TOOL_INFO, {})
|
||||
|
||||
if tool_info:
|
||||
span.set_attribute(ToolAttributes.TOOL_DESCRIPTION, safe_json_dumps(tool_info))
|
||||
|
||||
if result_event and result_event.node_run_result and result_event.node_run_result.inputs:
|
||||
span.set_attribute(ToolAttributes.TOOL_CALL_ARGUMENTS, safe_json_dumps(result_event.node_run_result.inputs))
|
||||
|
||||
if result_event and result_event.node_run_result and result_event.node_run_result.outputs:
|
||||
span.set_attribute(ToolAttributes.TOOL_CALL_RESULT, safe_json_dumps(result_event.node_run_result.outputs))
|
||||
@ -1,6 +1,13 @@
|
||||
"""Semantic convention shortcuts for Dify-specific spans."""
|
||||
|
||||
from .dify import DifySpanAttributes
|
||||
from .gen_ai import GenAIAttributes
|
||||
from .gen_ai import ChainAttributes, GenAIAttributes, LLMAttributes, RetrieverAttributes, ToolAttributes
|
||||
|
||||
__all__ = ["DifySpanAttributes", "GenAIAttributes"]
|
||||
__all__ = [
|
||||
"ChainAttributes",
|
||||
"DifySpanAttributes",
|
||||
"GenAIAttributes",
|
||||
"LLMAttributes",
|
||||
"RetrieverAttributes",
|
||||
"ToolAttributes",
|
||||
]
|
||||
|
||||
@ -62,3 +62,37 @@ class ToolAttributes:
|
||||
|
||||
TOOL_CALL_RESULT = "gen_ai.tool.call.result"
|
||||
"""Tool invocation result."""
|
||||
|
||||
|
||||
class LLMAttributes:
|
||||
"""LLM operation attribute keys."""
|
||||
|
||||
REQUEST_MODEL = "gen_ai.request.model"
|
||||
"""Model identifier."""
|
||||
|
||||
PROVIDER_NAME = "gen_ai.provider.name"
|
||||
"""Provider name."""
|
||||
|
||||
USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens"
|
||||
"""Number of input tokens."""
|
||||
|
||||
USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens"
|
||||
"""Number of output tokens."""
|
||||
|
||||
USAGE_TOTAL_TOKENS = "gen_ai.usage.total_tokens"
|
||||
"""Total number of tokens."""
|
||||
|
||||
PROMPT = "gen_ai.prompt"
|
||||
"""Prompt text."""
|
||||
|
||||
COMPLETION = "gen_ai.completion"
|
||||
"""Completion text."""
|
||||
|
||||
RESPONSE_FINISH_REASON = "gen_ai.response.finish_reason"
|
||||
"""Finish reason for the response."""
|
||||
|
||||
INPUT_MESSAGE = "gen_ai.input.messages"
|
||||
"""Input messages in structured format."""
|
||||
|
||||
OUTPUT_MESSAGE = "gen_ai.output.messages"
|
||||
"""Output messages in structured format."""
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from functools import wraps
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from flask import current_app, g, has_request_context, request
|
||||
from flask_login.config import EXEMPT_METHODS
|
||||
@ -9,7 +11,9 @@ from werkzeug.local import LocalProxy
|
||||
from configs import dify_config
|
||||
from libs.token import check_csrf_token
|
||||
from models import Account
|
||||
from models.model import EndUser
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models.model import EndUser
|
||||
|
||||
|
||||
def current_account_with_tenant():
|
||||
|
||||
@ -64,7 +64,7 @@ dependencies = [
|
||||
"pandas[excel,output-formatting,performance]~=2.2.2",
|
||||
"psycogreen~=1.0.2",
|
||||
"psycopg2-binary~=2.9.6",
|
||||
"pycryptodome==3.19.1",
|
||||
"pycryptodome==3.23.0",
|
||||
"pydantic~=2.11.4",
|
||||
"pydantic-extra-types~=2.10.3",
|
||||
"pydantic-settings~=2.11.0",
|
||||
|
||||
@ -428,10 +428,10 @@ class AppDslService:
|
||||
|
||||
# Set icon type
|
||||
icon_type_value = icon_type or app_data.get("icon_type")
|
||||
if icon_type_value in [IconType.EMOJI.value, IconType.IMAGE.value, IconType.LINK.value]:
|
||||
if icon_type_value in [IconType.EMOJI, IconType.IMAGE, IconType.LINK]:
|
||||
icon_type = icon_type_value
|
||||
else:
|
||||
icon_type = IconType.EMOJI.value
|
||||
icon_type = IconType.EMOJI
|
||||
icon = icon or str(app_data.get("icon", ""))
|
||||
|
||||
if app:
|
||||
|
||||
@ -131,7 +131,7 @@ class BillingService:
|
||||
headers = {"Content-Type": "application/json", "Billing-Api-Secret-Key": cls.secret_key}
|
||||
|
||||
url = f"{cls.base_url}{endpoint}"
|
||||
response = httpx.request(method, url, json=json, params=params, headers=headers)
|
||||
response = httpx.request(method, url, json=json, params=params, headers=headers, follow_redirects=True)
|
||||
if method == "GET" and response.status_code != httpx.codes.OK:
|
||||
raise ValueError("Unable to retrieve billing information. Please try again later or contact support.")
|
||||
if method == "PUT":
|
||||
@ -143,6 +143,9 @@ class BillingService:
|
||||
raise ValueError("Invalid arguments.")
|
||||
if method == "POST" and response.status_code != httpx.codes.OK:
|
||||
raise ValueError(f"Unable to send request to {url}. Please try again later or contact support.")
|
||||
if method == "DELETE" and response.status_code != httpx.codes.OK:
|
||||
logger.error("billing_service: DELETE response: %s %s", response.status_code, response.text)
|
||||
raise ValueError(f"Unable to process delete request {url}. Please try again later or contact support.")
|
||||
return response.json()
|
||||
|
||||
@staticmethod
|
||||
@ -165,7 +168,7 @@ class BillingService:
|
||||
def delete_account(cls, account_id: str):
|
||||
"""Delete account."""
|
||||
params = {"account_id": account_id}
|
||||
return cls._send_request("DELETE", "/account/", params=params)
|
||||
return cls._send_request("DELETE", "/account", params=params)
|
||||
|
||||
@classmethod
|
||||
def is_email_in_freeze(cls, email: str) -> bool:
|
||||
|
||||
@ -436,7 +436,7 @@ class RagPipelineService:
|
||||
user_inputs=user_inputs,
|
||||
user_id=account.id,
|
||||
variable_pool=VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs=user_inputs,
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
|
||||
@ -675,7 +675,7 @@ class WorkflowService:
|
||||
|
||||
else:
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs=user_inputs,
|
||||
environment_variables=draft_workflow.environment_variables,
|
||||
conversation_variables=[],
|
||||
@ -1063,7 +1063,7 @@ def _setup_variable_pool(
|
||||
system_variable.conversation_id = conversation_id
|
||||
system_variable.dialogue_count = 1
|
||||
else:
|
||||
system_variable = SystemVariable.empty()
|
||||
system_variable = SystemVariable.default()
|
||||
|
||||
# init variable pool
|
||||
variable_pool = VariablePool(
|
||||
|
||||
@ -0,0 +1,107 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||
from core.app.apps.workflow.app_runner import WorkflowAppRunner
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity
|
||||
from core.workflow.runtime import GraphRuntimeState, VariablePool
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from models.workflow import Workflow
|
||||
|
||||
|
||||
def _make_graph_state():
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
)
|
||||
return MagicMock(), variable_pool, GraphRuntimeState(variable_pool=variable_pool, start_at=0.0)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("single_iteration_run", "single_loop_run"),
|
||||
[
|
||||
(WorkflowAppGenerateEntity.SingleIterationRunEntity(node_id="iter", inputs={}), None),
|
||||
(None, WorkflowAppGenerateEntity.SingleLoopRunEntity(node_id="loop", inputs={})),
|
||||
],
|
||||
)
|
||||
def test_run_uses_single_node_execution_branch(
|
||||
single_iteration_run: Any,
|
||||
single_loop_run: Any,
|
||||
) -> None:
|
||||
app_config = MagicMock()
|
||||
app_config.app_id = "app"
|
||||
app_config.tenant_id = "tenant"
|
||||
app_config.workflow_id = "workflow"
|
||||
|
||||
app_generate_entity = MagicMock(spec=WorkflowAppGenerateEntity)
|
||||
app_generate_entity.app_config = app_config
|
||||
app_generate_entity.inputs = {}
|
||||
app_generate_entity.files = []
|
||||
app_generate_entity.user_id = "user"
|
||||
app_generate_entity.invoke_from = InvokeFrom.SERVICE_API
|
||||
app_generate_entity.workflow_execution_id = "execution-id"
|
||||
app_generate_entity.task_id = "task-id"
|
||||
app_generate_entity.call_depth = 0
|
||||
app_generate_entity.trace_manager = None
|
||||
app_generate_entity.single_iteration_run = single_iteration_run
|
||||
app_generate_entity.single_loop_run = single_loop_run
|
||||
|
||||
workflow = MagicMock(spec=Workflow)
|
||||
workflow.tenant_id = "tenant"
|
||||
workflow.app_id = "app"
|
||||
workflow.id = "workflow"
|
||||
workflow.type = "workflow"
|
||||
workflow.version = "v1"
|
||||
workflow.graph_dict = {"nodes": [], "edges": []}
|
||||
workflow.environment_variables = []
|
||||
|
||||
runner = WorkflowAppRunner(
|
||||
application_generate_entity=app_generate_entity,
|
||||
queue_manager=MagicMock(spec=AppQueueManager),
|
||||
variable_loader=MagicMock(),
|
||||
workflow=workflow,
|
||||
system_user_id="system-user",
|
||||
workflow_execution_repository=MagicMock(),
|
||||
workflow_node_execution_repository=MagicMock(),
|
||||
)
|
||||
|
||||
graph, variable_pool, graph_runtime_state = _make_graph_state()
|
||||
mock_workflow_entry = MagicMock()
|
||||
mock_workflow_entry.graph_engine = MagicMock()
|
||||
mock_workflow_entry.graph_engine.layer = MagicMock()
|
||||
mock_workflow_entry.run.return_value = iter([])
|
||||
|
||||
with (
|
||||
patch("core.app.apps.workflow.app_runner.RedisChannel"),
|
||||
patch("core.app.apps.workflow.app_runner.redis_client"),
|
||||
patch("core.app.apps.workflow.app_runner.WorkflowEntry", return_value=mock_workflow_entry) as entry_class,
|
||||
patch.object(
|
||||
runner,
|
||||
"_prepare_single_node_execution",
|
||||
return_value=(
|
||||
graph,
|
||||
variable_pool,
|
||||
graph_runtime_state,
|
||||
),
|
||||
) as prepare_single,
|
||||
patch.object(runner, "_init_graph") as init_graph,
|
||||
):
|
||||
runner.run()
|
||||
|
||||
prepare_single.assert_called_once_with(
|
||||
workflow=workflow,
|
||||
single_iteration_run=single_iteration_run,
|
||||
single_loop_run=single_loop_run,
|
||||
)
|
||||
init_graph.assert_not_called()
|
||||
|
||||
entry_kwargs = entry_class.call_args.kwargs
|
||||
assert entry_kwargs["invoke_from"] == InvokeFrom.DEBUGGER
|
||||
assert entry_kwargs["variable_pool"] is variable_pool
|
||||
assert entry_kwargs["graph_runtime_state"] is graph_runtime_state
|
||||
@ -0,0 +1,120 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.app.workflow.node_factory import DifyNodeFactory
|
||||
from core.workflow.entities import GraphInitParams
|
||||
from core.workflow.graph import Graph
|
||||
from core.workflow.graph.validation import GraphValidationError
|
||||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.runtime import GraphRuntimeState, VariablePool
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from models.enums import UserFrom
|
||||
|
||||
|
||||
def _build_iteration_graph(node_id: str) -> dict[str, Any]:
|
||||
return {
|
||||
"nodes": [
|
||||
{
|
||||
"id": node_id,
|
||||
"data": {
|
||||
"type": "iteration",
|
||||
"title": "Iteration",
|
||||
"iterator_selector": ["start", "items"],
|
||||
"output_selector": [node_id, "output"],
|
||||
},
|
||||
}
|
||||
],
|
||||
"edges": [],
|
||||
}
|
||||
|
||||
|
||||
def _build_loop_graph(node_id: str) -> dict[str, Any]:
|
||||
return {
|
||||
"nodes": [
|
||||
{
|
||||
"id": node_id,
|
||||
"data": {
|
||||
"type": "loop",
|
||||
"title": "Loop",
|
||||
"loop_count": 1,
|
||||
"break_conditions": [],
|
||||
"logical_operator": "and",
|
||||
"loop_variables": [],
|
||||
"outputs": {},
|
||||
},
|
||||
}
|
||||
],
|
||||
"edges": [],
|
||||
}
|
||||
|
||||
|
||||
def _make_factory(graph_config: dict[str, Any]) -> DifyNodeFactory:
|
||||
graph_init_params = GraphInitParams(
|
||||
tenant_id="tenant",
|
||||
app_id="app",
|
||||
workflow_id="workflow",
|
||||
graph_config=graph_config,
|
||||
user_id="user",
|
||||
user_from=UserFrom.ACCOUNT,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
call_depth=0,
|
||||
)
|
||||
graph_runtime_state = GraphRuntimeState(
|
||||
variable_pool=VariablePool(
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
),
|
||||
start_at=0.0,
|
||||
)
|
||||
return DifyNodeFactory(graph_init_params=graph_init_params, graph_runtime_state=graph_runtime_state)
|
||||
|
||||
|
||||
def test_iteration_root_requires_skip_validation():
|
||||
node_id = "iteration-node"
|
||||
graph_config = _build_iteration_graph(node_id)
|
||||
node_factory = _make_factory(graph_config)
|
||||
|
||||
with pytest.raises(GraphValidationError):
|
||||
Graph.init(
|
||||
graph_config=graph_config,
|
||||
node_factory=node_factory,
|
||||
root_node_id=node_id,
|
||||
)
|
||||
|
||||
graph = Graph.init(
|
||||
graph_config=graph_config,
|
||||
node_factory=node_factory,
|
||||
root_node_id=node_id,
|
||||
skip_validation=True,
|
||||
)
|
||||
|
||||
assert graph.root_node.id == node_id
|
||||
assert graph.root_node.node_type == NodeType.ITERATION
|
||||
|
||||
|
||||
def test_loop_root_requires_skip_validation():
|
||||
node_id = "loop-node"
|
||||
graph_config = _build_loop_graph(node_id)
|
||||
node_factory = _make_factory(graph_config)
|
||||
|
||||
with pytest.raises(GraphValidationError):
|
||||
Graph.init(
|
||||
graph_config=graph_config,
|
||||
node_factory=node_factory,
|
||||
root_node_id=node_id,
|
||||
)
|
||||
|
||||
graph = Graph.init(
|
||||
graph_config=graph_config,
|
||||
node_factory=node_factory,
|
||||
root_node_id=node_id,
|
||||
skip_validation=True,
|
||||
)
|
||||
|
||||
assert graph.root_node.id == node_id
|
||||
assert graph.root_node.node_type == NodeType.LOOP
|
||||
@ -90,12 +90,47 @@ def mock_tool_node():
|
||||
@pytest.fixture
|
||||
def mock_is_instrument_flag_enabled_false():
|
||||
"""Mock is_instrument_flag_enabled to return False."""
|
||||
with patch("core.workflow.graph_engine.layers.observability.is_instrument_flag_enabled", return_value=False):
|
||||
with patch("core.app.workflow.layers.observability.is_instrument_flag_enabled", return_value=False):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_is_instrument_flag_enabled_true():
|
||||
"""Mock is_instrument_flag_enabled to return True."""
|
||||
with patch("core.workflow.graph_engine.layers.observability.is_instrument_flag_enabled", return_value=True):
|
||||
with patch("core.app.workflow.layers.observability.is_instrument_flag_enabled", return_value=True):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_retrieval_node():
|
||||
"""Create a mock Knowledge Retrieval Node."""
|
||||
node = MagicMock()
|
||||
node.id = "test-retrieval-node-id"
|
||||
node.title = "Retrieval Node"
|
||||
node.execution_id = "test-retrieval-execution-id"
|
||||
node.node_type = NodeType.KNOWLEDGE_RETRIEVAL
|
||||
return node
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_result_event():
|
||||
"""Create a mock result event with NodeRunResult."""
|
||||
from datetime import datetime
|
||||
|
||||
from core.workflow.graph_events.node import NodeRunSucceededEvent
|
||||
from core.workflow.node_events.base import NodeRunResult
|
||||
|
||||
node_run_result = NodeRunResult(
|
||||
inputs={"query": "test query"},
|
||||
outputs={"result": [{"content": "test content", "metadata": {}}]},
|
||||
process_data={},
|
||||
metadata={},
|
||||
)
|
||||
|
||||
return NodeRunSucceededEvent(
|
||||
id="test-execution-id",
|
||||
node_id="test-node-id",
|
||||
node_type=NodeType.LLM,
|
||||
start_at=datetime.now(),
|
||||
node_run_result=node_run_result,
|
||||
)
|
||||
|
||||
@ -4,7 +4,8 @@ Tests for ObservabilityLayer.
|
||||
Test coverage:
|
||||
- Initialization and enable/disable logic
|
||||
- Node span lifecycle (start, end, error handling)
|
||||
- Parser integration (default and tool-specific)
|
||||
- Parser integration (default, tool, LLM, and retrieval parsers)
|
||||
- Result event parameter extraction (inputs/outputs)
|
||||
- Graph lifecycle management
|
||||
- Disabled mode behavior
|
||||
"""
|
||||
@ -14,14 +15,14 @@ from unittest.mock import patch
|
||||
import pytest
|
||||
from opentelemetry.trace import StatusCode
|
||||
|
||||
from core.app.workflow.layers.observability import ObservabilityLayer
|
||||
from core.workflow.enums import NodeType
|
||||
from core.workflow.graph_engine.layers.observability import ObservabilityLayer
|
||||
|
||||
|
||||
class TestObservabilityLayerInitialization:
|
||||
"""Test ObservabilityLayer initialization logic."""
|
||||
|
||||
@patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_initialization_when_otel_enabled(self, tracer_provider_with_memory_exporter):
|
||||
"""Test that layer initializes correctly when OTel is enabled."""
|
||||
@ -31,7 +32,7 @@ class TestObservabilityLayerInitialization:
|
||||
assert NodeType.TOOL in layer._parsers
|
||||
assert layer._default_parser is not None
|
||||
|
||||
@patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", False)
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", False)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_true")
|
||||
def test_initialization_when_instrument_flag_enabled(self, tracer_provider_with_memory_exporter):
|
||||
"""Test that layer enables when instrument flag is enabled."""
|
||||
@ -45,7 +46,7 @@ class TestObservabilityLayerInitialization:
|
||||
class TestObservabilityLayerNodeSpanLifecycle:
|
||||
"""Test node span creation and lifecycle management."""
|
||||
|
||||
@patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_node_span_created_and_ended(
|
||||
self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_llm_node
|
||||
@ -62,7 +63,7 @@ class TestObservabilityLayerNodeSpanLifecycle:
|
||||
assert spans[0].name == mock_llm_node.title
|
||||
assert spans[0].status.status_code == StatusCode.OK
|
||||
|
||||
@patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_node_error_recorded_in_span(
|
||||
self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_llm_node
|
||||
@ -81,7 +82,7 @@ class TestObservabilityLayerNodeSpanLifecycle:
|
||||
assert len(spans[0].events) > 0
|
||||
assert any("exception" in event.name.lower() for event in spans[0].events)
|
||||
|
||||
@patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_node_end_without_start_handled_gracefully(
|
||||
self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_llm_node
|
||||
@ -99,7 +100,7 @@ class TestObservabilityLayerNodeSpanLifecycle:
|
||||
class TestObservabilityLayerParserIntegration:
|
||||
"""Test parser integration for different node types."""
|
||||
|
||||
@patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_default_parser_used_for_regular_node(
|
||||
self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_start_node
|
||||
@ -118,7 +119,7 @@ class TestObservabilityLayerParserIntegration:
|
||||
assert attrs["node.execution_id"] == mock_start_node.execution_id
|
||||
assert attrs["node.type"] == mock_start_node.node_type.value
|
||||
|
||||
@patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_tool_parser_used_for_tool_node(
|
||||
self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_tool_node
|
||||
@ -134,15 +135,107 @@ class TestObservabilityLayerParserIntegration:
|
||||
assert len(spans) == 1
|
||||
attrs = spans[0].attributes
|
||||
assert attrs["node.id"] == mock_tool_node.id
|
||||
assert attrs["tool.provider.id"] == mock_tool_node._node_data.provider_id
|
||||
assert attrs["tool.provider.type"] == mock_tool_node._node_data.provider_type.value
|
||||
assert attrs["tool.name"] == mock_tool_node._node_data.tool_name
|
||||
assert attrs["gen_ai.tool.name"] == mock_tool_node.title
|
||||
assert attrs["gen_ai.tool.type"] == mock_tool_node._node_data.provider_type.value
|
||||
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_llm_parser_used_for_llm_node(
|
||||
self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_llm_node, mock_result_event
|
||||
):
|
||||
"""Test that LLM parser is used for LLM nodes and extracts LLM-specific attributes."""
|
||||
from core.workflow.node_events.base import NodeRunResult
|
||||
|
||||
mock_result_event.node_run_result = NodeRunResult(
|
||||
inputs={},
|
||||
outputs={"text": "test completion", "finish_reason": "stop"},
|
||||
process_data={
|
||||
"model_name": "gpt-4",
|
||||
"model_provider": "openai",
|
||||
"usage": {"prompt_tokens": 10, "completion_tokens": 20, "total_tokens": 30},
|
||||
"prompts": [{"role": "user", "text": "test prompt"}],
|
||||
},
|
||||
metadata={},
|
||||
)
|
||||
|
||||
layer = ObservabilityLayer()
|
||||
layer.on_graph_start()
|
||||
|
||||
layer.on_node_run_start(mock_llm_node)
|
||||
layer.on_node_run_end(mock_llm_node, None, mock_result_event)
|
||||
|
||||
spans = memory_span_exporter.get_finished_spans()
|
||||
assert len(spans) == 1
|
||||
attrs = spans[0].attributes
|
||||
assert attrs["node.id"] == mock_llm_node.id
|
||||
assert attrs["gen_ai.request.model"] == "gpt-4"
|
||||
assert attrs["gen_ai.provider.name"] == "openai"
|
||||
assert attrs["gen_ai.usage.input_tokens"] == 10
|
||||
assert attrs["gen_ai.usage.output_tokens"] == 20
|
||||
assert attrs["gen_ai.usage.total_tokens"] == 30
|
||||
assert attrs["gen_ai.completion"] == "test completion"
|
||||
assert attrs["gen_ai.response.finish_reason"] == "stop"
|
||||
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_retrieval_parser_used_for_retrieval_node(
|
||||
self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_retrieval_node, mock_result_event
|
||||
):
|
||||
"""Test that retrieval parser is used for retrieval nodes and extracts retrieval-specific attributes."""
|
||||
from core.workflow.node_events.base import NodeRunResult
|
||||
|
||||
mock_result_event.node_run_result = NodeRunResult(
|
||||
inputs={"query": "test query"},
|
||||
outputs={"result": [{"content": "test content", "metadata": {"score": 0.9, "document_id": "doc1"}}]},
|
||||
process_data={},
|
||||
metadata={},
|
||||
)
|
||||
|
||||
layer = ObservabilityLayer()
|
||||
layer.on_graph_start()
|
||||
|
||||
layer.on_node_run_start(mock_retrieval_node)
|
||||
layer.on_node_run_end(mock_retrieval_node, None, mock_result_event)
|
||||
|
||||
spans = memory_span_exporter.get_finished_spans()
|
||||
assert len(spans) == 1
|
||||
attrs = spans[0].attributes
|
||||
assert attrs["node.id"] == mock_retrieval_node.id
|
||||
assert attrs["retrieval.query"] == "test query"
|
||||
assert "retrieval.document" in attrs
|
||||
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_result_event_extracts_inputs_and_outputs(
|
||||
self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_start_node, mock_result_event
|
||||
):
|
||||
"""Test that result_event parameter allows parsers to extract inputs and outputs."""
|
||||
from core.workflow.node_events.base import NodeRunResult
|
||||
|
||||
mock_result_event.node_run_result = NodeRunResult(
|
||||
inputs={"input_key": "input_value"},
|
||||
outputs={"output_key": "output_value"},
|
||||
process_data={},
|
||||
metadata={},
|
||||
)
|
||||
|
||||
layer = ObservabilityLayer()
|
||||
layer.on_graph_start()
|
||||
|
||||
layer.on_node_run_start(mock_start_node)
|
||||
layer.on_node_run_end(mock_start_node, None, mock_result_event)
|
||||
|
||||
spans = memory_span_exporter.get_finished_spans()
|
||||
assert len(spans) == 1
|
||||
attrs = spans[0].attributes
|
||||
assert "input.value" in attrs
|
||||
assert "output.value" in attrs
|
||||
|
||||
|
||||
class TestObservabilityLayerGraphLifecycle:
|
||||
"""Test graph lifecycle management."""
|
||||
|
||||
@patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_on_graph_start_clears_contexts(self, tracer_provider_with_memory_exporter, mock_llm_node):
|
||||
"""Test that on_graph_start clears node contexts."""
|
||||
@ -155,7 +248,7 @@ class TestObservabilityLayerGraphLifecycle:
|
||||
layer.on_graph_start()
|
||||
assert len(layer._node_contexts) == 0
|
||||
|
||||
@patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_on_graph_end_with_no_unfinished_spans(
|
||||
self, tracer_provider_with_memory_exporter, memory_span_exporter, mock_llm_node
|
||||
@ -171,7 +264,7 @@ class TestObservabilityLayerGraphLifecycle:
|
||||
spans = memory_span_exporter.get_finished_spans()
|
||||
assert len(spans) == 1
|
||||
|
||||
@patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", True)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_on_graph_end_with_unfinished_spans_logs_warning(
|
||||
self, tracer_provider_with_memory_exporter, mock_llm_node, caplog
|
||||
@ -192,7 +285,7 @@ class TestObservabilityLayerGraphLifecycle:
|
||||
class TestObservabilityLayerDisabledMode:
|
||||
"""Test behavior when layer is disabled."""
|
||||
|
||||
@patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", False)
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", False)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_disabled_mode_skips_node_start(self, memory_span_exporter, mock_start_node):
|
||||
"""Test that disabled layer doesn't create spans on node start."""
|
||||
@ -206,7 +299,7 @@ class TestObservabilityLayerDisabledMode:
|
||||
spans = memory_span_exporter.get_finished_spans()
|
||||
assert len(spans) == 0
|
||||
|
||||
@patch("core.workflow.graph_engine.layers.observability.dify_config.ENABLE_OTEL", False)
|
||||
@patch("core.app.workflow.layers.observability.dify_config.ENABLE_OTEL", False)
|
||||
@pytest.mark.usefixtures("mock_is_instrument_flag_enabled_false")
|
||||
def test_disabled_mode_skips_node_end(self, memory_span_exporter, mock_llm_node):
|
||||
"""Test that disabled layer doesn't process node end."""
|
||||
|
||||
@ -16,7 +16,7 @@ from core.workflow.system_variable import SystemVariable
|
||||
def test_executor_with_json_body_and_number_variable():
|
||||
# Prepare the variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
variable_pool.add(["pre_node_id", "number"], 42)
|
||||
@ -69,7 +69,7 @@ def test_executor_with_json_body_and_number_variable():
|
||||
def test_executor_with_json_body_and_object_variable():
|
||||
# Prepare the variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
variable_pool.add(["pre_node_id", "object"], {"name": "John Doe", "age": 30, "email": "john@example.com"})
|
||||
@ -124,7 +124,7 @@ def test_executor_with_json_body_and_object_variable():
|
||||
def test_executor_with_json_body_and_nested_object_variable():
|
||||
# Prepare the variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
variable_pool.add(["pre_node_id", "object"], {"name": "John Doe", "age": 30, "email": "john@example.com"})
|
||||
@ -178,7 +178,7 @@ def test_executor_with_json_body_and_nested_object_variable():
|
||||
|
||||
|
||||
def test_extract_selectors_from_template_with_newline():
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.empty())
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.default())
|
||||
variable_pool.add(("node_id", "custom_query"), "line1\nline2")
|
||||
node_data = HttpRequestNodeData(
|
||||
title="Test JSON Body with Nested Object Variable",
|
||||
@ -205,7 +205,7 @@ def test_extract_selectors_from_template_with_newline():
|
||||
def test_executor_with_form_data():
|
||||
# Prepare the variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
variable_pool.add(["pre_node_id", "text_field"], "Hello, World!")
|
||||
@ -290,7 +290,7 @@ def test_init_headers():
|
||||
return Executor(
|
||||
node_data=node_data,
|
||||
timeout=timeout,
|
||||
variable_pool=VariablePool(system_variables=SystemVariable.empty()),
|
||||
variable_pool=VariablePool(system_variables=SystemVariable.default()),
|
||||
)
|
||||
|
||||
executor = create_executor("aa\n cc:")
|
||||
@ -324,7 +324,7 @@ def test_init_params():
|
||||
return Executor(
|
||||
node_data=node_data,
|
||||
timeout=timeout,
|
||||
variable_pool=VariablePool(system_variables=SystemVariable.empty()),
|
||||
variable_pool=VariablePool(system_variables=SystemVariable.default()),
|
||||
)
|
||||
|
||||
# Test basic key-value pairs
|
||||
@ -355,7 +355,7 @@ def test_init_params():
|
||||
|
||||
def test_empty_api_key_raises_error_bearer():
|
||||
"""Test that empty API key raises AuthorizationConfigError for bearer auth."""
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.empty())
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.default())
|
||||
node_data = HttpRequestNodeData(
|
||||
title="test",
|
||||
method="get",
|
||||
@ -379,7 +379,7 @@ def test_empty_api_key_raises_error_bearer():
|
||||
|
||||
def test_empty_api_key_raises_error_basic():
|
||||
"""Test that empty API key raises AuthorizationConfigError for basic auth."""
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.empty())
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.default())
|
||||
node_data = HttpRequestNodeData(
|
||||
title="test",
|
||||
method="get",
|
||||
@ -403,7 +403,7 @@ def test_empty_api_key_raises_error_basic():
|
||||
|
||||
def test_empty_api_key_raises_error_custom():
|
||||
"""Test that empty API key raises AuthorizationConfigError for custom auth."""
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.empty())
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.default())
|
||||
node_data = HttpRequestNodeData(
|
||||
title="test",
|
||||
method="get",
|
||||
@ -427,7 +427,7 @@ def test_empty_api_key_raises_error_custom():
|
||||
|
||||
def test_whitespace_only_api_key_raises_error():
|
||||
"""Test that whitespace-only API key raises AuthorizationConfigError."""
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.empty())
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.default())
|
||||
node_data = HttpRequestNodeData(
|
||||
title="test",
|
||||
method="get",
|
||||
@ -451,7 +451,7 @@ def test_whitespace_only_api_key_raises_error():
|
||||
|
||||
def test_valid_api_key_works():
|
||||
"""Test that valid API key works correctly for bearer auth."""
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.empty())
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.default())
|
||||
node_data = HttpRequestNodeData(
|
||||
title="test",
|
||||
method="get",
|
||||
@ -487,7 +487,7 @@ def test_executor_with_json_body_and_unquoted_uuid_variable():
|
||||
test_uuid = "57eeeeb1-450b-482c-81b9-4be77e95dee2"
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
variable_pool.add(["pre_node_id", "uuid"], test_uuid)
|
||||
@ -531,7 +531,7 @@ def test_executor_with_json_body_and_unquoted_uuid_with_newlines():
|
||||
test_uuid = "57eeeeb1-450b-482c-81b9-4be77e95dee2"
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
variable_pool.add(["pre_node_id", "uuid"], test_uuid)
|
||||
@ -569,7 +569,7 @@ def test_executor_with_json_body_and_unquoted_uuid_with_newlines():
|
||||
def test_executor_with_json_body_preserves_numbers_and_strings():
|
||||
"""Test that numbers are preserved and string values are properly quoted."""
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
variable_pool.add(["node", "count"], 42)
|
||||
|
||||
@ -86,7 +86,7 @@ def graph_init_params() -> GraphInitParams:
|
||||
@pytest.fixture
|
||||
def graph_runtime_state() -> GraphRuntimeState:
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
return GraphRuntimeState(
|
||||
|
||||
@ -111,7 +111,7 @@ def test_webhook_node_file_conversion_to_file_variable():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {},
|
||||
@ -184,7 +184,7 @@ def test_webhook_node_file_conversion_with_missing_files():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {},
|
||||
@ -219,7 +219,7 @@ def test_webhook_node_file_conversion_with_none_file():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {},
|
||||
@ -256,7 +256,7 @@ def test_webhook_node_file_conversion_with_non_dict_file():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {},
|
||||
@ -300,7 +300,7 @@ def test_webhook_node_file_conversion_mixed_parameters():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {},
|
||||
@ -370,7 +370,7 @@ def test_webhook_node_different_file_types():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {},
|
||||
@ -430,7 +430,7 @@ def test_webhook_node_file_conversion_with_non_dict_wrapper():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {},
|
||||
|
||||
@ -75,7 +75,7 @@ def test_webhook_node_basic_initialization():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
|
||||
@ -118,7 +118,7 @@ def test_webhook_node_run_with_headers():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {
|
||||
@ -154,7 +154,7 @@ def test_webhook_node_run_with_query_params():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {},
|
||||
@ -190,7 +190,7 @@ def test_webhook_node_run_with_body_params():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {},
|
||||
@ -249,7 +249,7 @@ def test_webhook_node_run_with_file_params():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {},
|
||||
@ -302,7 +302,7 @@ def test_webhook_node_run_mixed_parameters():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {"Authorization": "Bearer token"},
|
||||
@ -342,7 +342,7 @@ def test_webhook_node_run_empty_webhook_data():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={}, # No webhook_data
|
||||
)
|
||||
|
||||
@ -368,7 +368,7 @@ def test_webhook_node_run_case_insensitive_headers():
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {
|
||||
@ -398,7 +398,7 @@ def test_webhook_node_variable_pool_user_inputs():
|
||||
|
||||
# Add some additional variables to the pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {"headers": {}, "query_params": {}, "body": {}, "files": {}},
|
||||
"other_var": "should_be_included",
|
||||
@ -429,7 +429,7 @@ def test_webhook_node_different_methods(method):
|
||||
)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={
|
||||
"webhook_data": {
|
||||
"headers": {},
|
||||
|
||||
@ -127,7 +127,7 @@ class TestWorkflowEntry:
|
||||
return node_config
|
||||
|
||||
workflow = StubWorkflow()
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.empty(), user_inputs={})
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.default(), user_inputs={})
|
||||
expected_limits = CodeNodeLimits(
|
||||
max_string_length=dify_config.CODE_MAX_STRING_LENGTH,
|
||||
max_number=dify_config.CODE_MAX_NUMBER,
|
||||
@ -157,7 +157,7 @@ class TestWorkflowEntry:
|
||||
# Initialize variable pool with environment variables
|
||||
env_var = StringVariable(name="API_KEY", value="existing_key")
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
environment_variables=[env_var],
|
||||
user_inputs={},
|
||||
)
|
||||
@ -198,7 +198,7 @@ class TestWorkflowEntry:
|
||||
# Initialize variable pool with conversation variables
|
||||
conv_var = StringVariable(name="last_message", value="Hello")
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
conversation_variables=[conv_var],
|
||||
user_inputs={},
|
||||
)
|
||||
@ -239,7 +239,7 @@ class TestWorkflowEntry:
|
||||
"""Test mapping regular node variables from user inputs to variable pool."""
|
||||
# Initialize empty variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
|
||||
@ -281,7 +281,7 @@ class TestWorkflowEntry:
|
||||
def test_mapping_user_inputs_with_file_handling(self):
|
||||
"""Test mapping file inputs from user inputs to variable pool."""
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
|
||||
@ -340,7 +340,7 @@ class TestWorkflowEntry:
|
||||
def test_mapping_user_inputs_missing_variable_error(self):
|
||||
"""Test that mapping raises error when required variable is missing."""
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
|
||||
@ -366,7 +366,7 @@ class TestWorkflowEntry:
|
||||
def test_mapping_user_inputs_with_alternative_key_format(self):
|
||||
"""Test mapping with alternative key format (without node prefix)."""
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
|
||||
@ -396,7 +396,7 @@ class TestWorkflowEntry:
|
||||
def test_mapping_user_inputs_with_complex_selectors(self):
|
||||
"""Test mapping with complex node variable keys."""
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
|
||||
@ -432,7 +432,7 @@ class TestWorkflowEntry:
|
||||
def test_mapping_user_inputs_invalid_node_variable(self):
|
||||
"""Test that mapping handles invalid node variable format."""
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables=SystemVariable.default(),
|
||||
user_inputs={},
|
||||
)
|
||||
|
||||
|
||||
@ -171,22 +171,26 @@ class TestBillingServiceSendRequest:
|
||||
"status_code", [httpx.codes.BAD_REQUEST, httpx.codes.INTERNAL_SERVER_ERROR, httpx.codes.NOT_FOUND]
|
||||
)
|
||||
def test_delete_request_non_200_with_valid_json(self, mock_httpx_request, mock_billing_config, status_code):
|
||||
"""Test DELETE request with non-200 status code but valid JSON response.
|
||||
"""Test DELETE request with non-200 status code raises ValueError.
|
||||
|
||||
DELETE doesn't check status code, so it returns the error JSON.
|
||||
DELETE now checks status code and raises ValueError for non-200 responses.
|
||||
"""
|
||||
# Arrange
|
||||
error_response = {"detail": "Error message"}
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = status_code
|
||||
mock_response.text = "Error message"
|
||||
mock_response.json.return_value = error_response
|
||||
mock_httpx_request.return_value = mock_response
|
||||
|
||||
# Act
|
||||
result = BillingService._send_request("DELETE", "/test", json={"key": "value"})
|
||||
|
||||
# Assert
|
||||
assert result == error_response
|
||||
# Act & Assert
|
||||
with patch("services.billing_service.logger") as mock_logger:
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
BillingService._send_request("DELETE", "/test", json={"key": "value"})
|
||||
assert "Unable to process delete request" in str(exc_info.value)
|
||||
# Verify error logging
|
||||
mock_logger.error.assert_called_once()
|
||||
assert "DELETE response" in str(mock_logger.error.call_args)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"status_code", [httpx.codes.BAD_REQUEST, httpx.codes.INTERNAL_SERVER_ERROR, httpx.codes.NOT_FOUND]
|
||||
@ -210,9 +214,9 @@ class TestBillingServiceSendRequest:
|
||||
"status_code", [httpx.codes.BAD_REQUEST, httpx.codes.INTERNAL_SERVER_ERROR, httpx.codes.NOT_FOUND]
|
||||
)
|
||||
def test_delete_request_non_200_with_invalid_json(self, mock_httpx_request, mock_billing_config, status_code):
|
||||
"""Test DELETE request with non-200 status code and invalid JSON response raises exception.
|
||||
"""Test DELETE request with non-200 status code raises ValueError before JSON parsing.
|
||||
|
||||
DELETE doesn't check status code, so it calls response.json() which raises JSONDecodeError
|
||||
DELETE now checks status code before calling response.json(), so ValueError is raised
|
||||
when the response cannot be parsed as JSON (e.g., empty response).
|
||||
"""
|
||||
# Arrange
|
||||
@ -223,8 +227,13 @@ class TestBillingServiceSendRequest:
|
||||
mock_httpx_request.return_value = mock_response
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(json.JSONDecodeError):
|
||||
BillingService._send_request("DELETE", "/test", json={"key": "value"})
|
||||
with patch("services.billing_service.logger") as mock_logger:
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
BillingService._send_request("DELETE", "/test", json={"key": "value"})
|
||||
assert "Unable to process delete request" in str(exc_info.value)
|
||||
# Verify error logging
|
||||
mock_logger.error.assert_called_once()
|
||||
assert "DELETE response" in str(mock_logger.error.call_args)
|
||||
|
||||
def test_retry_on_request_error(self, mock_httpx_request, mock_billing_config):
|
||||
"""Test that _send_request retries on httpx.RequestError."""
|
||||
@ -789,7 +798,7 @@ class TestBillingServiceAccountManagement:
|
||||
|
||||
# Assert
|
||||
assert result == expected_response
|
||||
mock_send_request.assert_called_once_with("DELETE", "/account/", params={"account_id": account_id})
|
||||
mock_send_request.assert_called_once_with("DELETE", "/account", params={"account_id": account_id})
|
||||
|
||||
def test_is_email_in_freeze_true(self, mock_send_request):
|
||||
"""Test checking if email is frozen (returns True)."""
|
||||
|
||||
33
api/uv.lock
generated
33
api/uv.lock
generated
@ -1633,7 +1633,7 @@ requires-dist = [
|
||||
{ name = "pandas", extras = ["excel", "output-formatting", "performance"], specifier = "~=2.2.2" },
|
||||
{ name = "psycogreen", specifier = "~=1.0.2" },
|
||||
{ name = "psycopg2-binary", specifier = "~=2.9.6" },
|
||||
{ name = "pycryptodome", specifier = "==3.19.1" },
|
||||
{ name = "pycryptodome", specifier = "==3.23.0" },
|
||||
{ name = "pydantic", specifier = "~=2.11.4" },
|
||||
{ name = "pydantic-extra-types", specifier = "~=2.10.3" },
|
||||
{ name = "pydantic-settings", specifier = "~=2.11.0" },
|
||||
@ -4796,20 +4796,21 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pycryptodome"
|
||||
version = "3.19.1"
|
||||
version = "3.23.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b1/38/42a8855ff1bf568c61ca6557e2203f318fb7afeadaf2eb8ecfdbde107151/pycryptodome-3.19.1.tar.gz", hash = "sha256:8ae0dd1bcfada451c35f9e29a3e5db385caabc190f98e4a80ad02a61098fb776", size = 4782144, upload-time = "2023-12-28T06:52:40.741Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8e/a6/8452177684d5e906854776276ddd34eca30d1b1e15aa1ee9cefc289a33f5/pycryptodome-3.23.0.tar.gz", hash = "sha256:447700a657182d60338bab09fdb27518f8856aecd80ae4c6bdddb67ff5da44ef", size = 4921276, upload-time = "2025-05-17T17:21:45.242Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/ef/4931bc30674f0de0ca0e827b58c8b0c17313a8eae2754976c610b866118b/pycryptodome-3.19.1-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:67939a3adbe637281c611596e44500ff309d547e932c449337649921b17b6297", size = 2417027, upload-time = "2023-12-28T06:51:50.138Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/e6/238c53267fd8d223029c0a0d3730cb1b6594d60f62e40c4184703dc490b1/pycryptodome-3.19.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:11ddf6c9b52116b62223b6a9f4741bc4f62bb265392a4463282f7f34bb287180", size = 1579728, upload-time = "2023-12-28T06:51:52.385Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/87/7181c42c8d5ba89822a4b824830506d0aeec02959bb893614767e3279846/pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3e6f89480616781d2a7f981472d0cdb09b9da9e8196f43c1234eff45c915766", size = 2051440, upload-time = "2023-12-28T06:51:55.751Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/dd/332c4c0055527d17dac317ed9f9c864fc047b627d82f4b9a56c110afc6fc/pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e1efcb68993b7ce5d1d047a46a601d41281bba9f1971e6be4aa27c69ab8065", size = 2125379, upload-time = "2023-12-28T06:51:58.567Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/9e/320b885ea336c218ff54ec2b276cd70ba6904e4f5a14a771ed39a2c47d59/pycryptodome-3.19.1-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c6273ca5a03b672e504995529b8bae56da0ebb691d8ef141c4aa68f60765700", size = 2153951, upload-time = "2023-12-28T06:52:01.699Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/54/8ae0c43d1257b41bc9d3277c3f875174fd8ad86b9567f0b8609b99c938ee/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b0bfe61506795877ff974f994397f0c862d037f6f1c0bfc3572195fc00833b96", size = 2044041, upload-time = "2023-12-28T06:52:03.737Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/93/f8450a92cc38541c3ba1f4cb4e267e15ae6d6678ca617476d52c3a3764d4/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:f34976c5c8eb79e14c7d970fb097482835be8d410a4220f86260695ede4c3e17", size = 2182446, upload-time = "2023-12-28T06:52:05.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/cd/ed6e429fb0792ce368f66e83246264dd3a7a045b0b1e63043ed22a063ce5/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7c9e222d0976f68d0cf6409cfea896676ddc1d98485d601e9508f90f60e2b0a2", size = 2144914, upload-time = "2023-12-28T06:52:07.44Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/23/b064bd4cfbf2cc5f25afcde0e7c880df5b20798172793137ba4b62d82e72/pycryptodome-3.19.1-cp35-abi3-win32.whl", hash = "sha256:4805e053571140cb37cf153b5c72cd324bb1e3e837cbe590a19f69b6cf85fd03", size = 1713105, upload-time = "2023-12-28T06:52:09.585Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/e0/ded1968a5257ab34216a0f8db7433897a2337d59e6d03be113713b346ea2/pycryptodome-3.19.1-cp35-abi3-win_amd64.whl", hash = "sha256:a470237ee71a1efd63f9becebc0ad84b88ec28e6784a2047684b693f458f41b7", size = 1749222, upload-time = "2023-12-28T06:52:11.534Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/6c/a1f71542c969912bb0e106f64f60a56cc1f0fabecf9396f45accbe63fa68/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:187058ab80b3281b1de11c2e6842a357a1f71b42cb1e15bce373f3d238135c27", size = 2495627, upload-time = "2025-05-17T17:20:47.139Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/4e/a066527e079fc5002390c8acdd3aca431e6ea0a50ffd7201551175b47323/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:cfb5cd445280c5b0a4e6187a7ce8de5a07b5f3f897f235caa11f1f435f182843", size = 1640362, upload-time = "2025-05-17T17:20:50.392Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/52/adaf4c8c100a8c49d2bd058e5b551f73dfd8cb89eb4911e25a0c469b6b4e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67bd81fcbe34f43ad9422ee8fd4843c8e7198dd88dd3d40e6de42ee65fbe1490", size = 2182625, upload-time = "2025-05-17T17:20:52.866Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/e9/a09476d436d0ff1402ac3867d933c61805ec2326c6ea557aeeac3825604e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8987bd3307a39bc03df5c8e0e3d8be0c4c3518b7f044b0f4c15d1aa78f52575", size = 2268954, upload-time = "2025-05-17T17:20:55.027Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/c5/ffe6474e0c551d54cab931918127c46d70cab8f114e0c2b5a3c071c2f484/pycryptodome-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa0698f65e5b570426fc31b8162ed4603b0c2841cbb9088e2b01641e3065915b", size = 2308534, upload-time = "2025-05-17T17:20:57.279Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/28/e199677fc15ecf43010f2463fde4c1a53015d1fe95fb03bca2890836603a/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:53ecbafc2b55353edcebd64bf5da94a2a2cdf5090a6915bcca6eca6cc452585a", size = 2181853, upload-time = "2025-05-17T17:20:59.322Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/ea/4fdb09f2165ce1365c9eaefef36625583371ee514db58dc9b65d3a255c4c/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:156df9667ad9f2ad26255926524e1c136d6664b741547deb0a86a9acf5ea631f", size = 2342465, upload-time = "2025-05-17T17:21:03.83Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/82/6edc3fc42fe9284aead511394bac167693fb2b0e0395b28b8bedaa07ef04/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:dea827b4d55ee390dc89b2afe5927d4308a8b538ae91d9c6f7a5090f397af1aa", size = 2267414, upload-time = "2025-05-17T17:21:06.72Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/fe/aae679b64363eb78326c7fdc9d06ec3de18bac68be4b612fc1fe8902693c/pycryptodome-3.23.0-cp37-abi3-win32.whl", hash = "sha256:507dbead45474b62b2bbe318eb1c4c8ee641077532067fec9c1aa82c31f84886", size = 1768484, upload-time = "2025-05-17T17:21:08.535Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/2f/e97a1b8294db0daaa87012c24a7bb714147c7ade7656973fd6c736b484ff/pycryptodome-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:c75b52aacc6c0c260f204cbdd834f76edc9fb0d8e0da9fbf8352ef58202564e2", size = 1799636, upload-time = "2025-05-17T17:21:10.393Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/3d/f9441a0d798bf2b1e645adc3265e55706aead1255ccdad3856dbdcffec14/pycryptodome-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:11eeeb6917903876f134b56ba11abe95c0b0fd5e3330def218083c7d98bbcb3c", size = 1703675, upload-time = "2025-05-17T17:21:13.146Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5003,11 +5004,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pypdf"
|
||||
version = "6.6.0"
|
||||
version = "6.6.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/f4/801632a8b62a805378b6af2b5a3fcbfd8923abf647e0ed1af846a83433b2/pypdf-6.6.0.tar.gz", hash = "sha256:4c887ef2ea38d86faded61141995a3c7d068c9d6ae8477be7ae5de8a8e16592f", size = 5281063, upload-time = "2026-01-09T11:20:11.786Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b8/bb/a44bab1ac3c54dbcf653d7b8bcdee93dddb2d3bf025a3912cacb8149a2f2/pypdf-6.6.2.tar.gz", hash = "sha256:0a3ea3b3303982333404e22d8f75d7b3144f9cf4b2970b96856391a516f9f016", size = 5281850, upload-time = "2026-01-26T11:57:55.964Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/ba/96f99276194f720e74ed99905a080f6e77810558874e8935e580331b46de/pypdf-6.6.0-py3-none-any.whl", hash = "sha256:bca9091ef6de36c7b1a81e09327c554b7ce51e88dad68f5890c2b4a4417f1fd7", size = 328963, upload-time = "2026-01-09T11:20:09.278Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/be/549aaf1dfa4ab4aed29b09703d2fb02c4366fc1f05e880948c296c5764b9/pypdf-6.6.2-py3-none-any.whl", hash = "sha256:44c0c9811cfb3b83b28f1c3d054531d5b8b81abaedee0d8cb403650d023832ba", size = 329132, upload-time = "2026-01-26T11:57:54.099Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
Reference in New Issue
Block a user