Merge commit '9c339239' into sandboxed-agent-rebase

Made-with: Cursor

# Conflicts:
#	api/README.md
#	api/controllers/console/app/workflow_draft_variable.py
#	api/core/agent/cot_agent_runner.py
#	api/core/agent/fc_agent_runner.py
#	api/core/app/apps/advanced_chat/app_runner.py
#	api/core/plugin/backwards_invocation/model.py
#	api/core/prompt/advanced_prompt_transform.py
#	api/core/workflow/nodes/base/node.py
#	api/core/workflow/nodes/llm/llm_utils.py
#	api/core/workflow/nodes/llm/node.py
#	api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py
#	api/core/workflow/nodes/question_classifier/question_classifier_node.py
#	api/core/workflow/runtime/graph_runtime_state.py
#	api/extensions/storage/base_storage.py
#	api/factories/variable_factory.py
#	api/pyproject.toml
#	api/services/variable_truncator.py
#	api/uv.lock
#	web/app/account/oauth/authorize/page.tsx
#	web/app/components/app/configuration/config-var/config-modal/field.tsx
#	web/app/components/base/alert.tsx
#	web/app/components/base/chat/chat/answer/human-input-content/executed-action.tsx
#	web/app/components/base/chat/chat/answer/more.tsx
#	web/app/components/base/chat/chat/answer/operation.tsx
#	web/app/components/base/chat/chat/answer/workflow-process.tsx
#	web/app/components/base/chat/chat/citation/index.tsx
#	web/app/components/base/chat/chat/citation/popup.tsx
#	web/app/components/base/chat/chat/citation/progress-tooltip.tsx
#	web/app/components/base/chat/chat/citation/tooltip.tsx
#	web/app/components/base/chat/chat/question.tsx
#	web/app/components/base/chat/embedded-chatbot/inputs-form/index.tsx
#	web/app/components/base/chat/embedded-chatbot/inputs-form/view-form-dropdown.tsx
#	web/app/components/base/markdown-blocks/form.tsx
#	web/app/components/base/prompt-editor/plugins/hitl-input-block/component-ui.tsx
#	web/app/components/base/tag-management/panel.tsx
#	web/app/components/base/tag-management/trigger.tsx
#	web/app/components/header/account-setting/index.tsx
#	web/app/components/header/account-setting/members-page/transfer-ownership-modal/index.tsx
#	web/app/components/header/account-setting/model-provider-page/provider-added-card/index.tsx
#	web/app/signin/utils/post-login-redirect.ts
#	web/eslint-suppressions.json
#	web/package.json
#	web/pnpm-lock.yaml
This commit is contained in:
Novice
2026-03-23 09:00:45 +08:00
1009 changed files with 76072 additions and 18166 deletions

View File

@ -289,6 +289,12 @@ class AccountService:
TenantService.create_owner_tenant_if_not_exist(account=account)
# Enterprise-only: best-effort add the account to the default workspace (does not switch current workspace).
if dify_config.ENTERPRISE_ENABLED:
from services.enterprise.enterprise_service import try_join_default_workspace
try_join_default_workspace(str(account.id))
return account
@staticmethod
@ -1407,6 +1413,12 @@ class RegisterService:
tenant_was_created.send(tenant)
db.session.commit()
# Enterprise-only: best-effort add the account to the default workspace (does not switch current workspace).
if dify_config.ENTERPRISE_ENABLED:
from services.enterprise.enterprise_service import try_join_default_workspace
try_join_default_workspace(str(account.id))
except WorkSpaceNotAllowedCreateError:
db.session.rollback()
logger.exception("Register failed")

View File

@ -131,33 +131,54 @@ class AppGenerateService:
elif app_model.mode == AppMode.ADVANCED_CHAT:
workflow_id = args.get("workflow_id")
workflow = cls._get_workflow(app_model, invoke_from, workflow_id)
with rate_limit_context(rate_limit, request_id):
payload = AppExecutionParams.new(
app_model=app_model,
workflow=workflow,
user=user,
args=args,
invoke_from=invoke_from,
streaming=streaming,
call_depth=0,
)
payload_json = payload.model_dump_json()
def on_subscribe():
workflow_based_app_execution_task.delay(payload_json)
if streaming:
# Streaming mode: subscribe to SSE and enqueue the execution on first subscriber
with rate_limit_context(rate_limit, request_id):
payload = AppExecutionParams.new(
app_model=app_model,
workflow=workflow,
user=user,
args=args,
invoke_from=invoke_from,
streaming=True,
call_depth=0,
)
payload_json = payload.model_dump_json()
on_subscribe = cls._build_streaming_task_on_subscribe(on_subscribe)
generator = AdvancedChatAppGenerator()
return rate_limit.generate(
generator.convert_to_event_stream(
generator.retrieve_events(
AppMode.ADVANCED_CHAT,
payload.workflow_run_id,
on_subscribe=on_subscribe,
def on_subscribe():
workflow_based_app_execution_task.delay(payload_json)
on_subscribe = cls._build_streaming_task_on_subscribe(on_subscribe)
generator = AdvancedChatAppGenerator()
return rate_limit.generate(
generator.convert_to_event_stream(
generator.retrieve_events(
AppMode.ADVANCED_CHAT,
payload.workflow_run_id,
on_subscribe=on_subscribe,
),
),
),
request_id=request_id,
)
request_id=request_id,
)
else:
# Blocking mode: run synchronously and return JSON instead of SSE
# Keep behaviour consistent with WORKFLOW blocking branch.
advanced_generator = AdvancedChatAppGenerator()
return rate_limit.generate(
advanced_generator.convert_to_event_stream(
advanced_generator.generate(
app_model=app_model,
workflow=workflow,
user=user,
args=args,
invoke_from=invoke_from,
workflow_run_id=str(uuid.uuid4()),
streaming=False,
)
),
request_id=request_id,
)
elif app_model.mode == AppMode.WORKFLOW:
workflow_id = args.get("workflow_id")
workflow = cls._get_workflow(app_model, invoke_from, workflow_id)

View File

@ -107,19 +107,19 @@ class AppService:
if model_instance:
if (
model_instance.model == default_model_config["model"]["name"]
model_instance.model_name == default_model_config["model"]["name"]
and model_instance.provider == default_model_config["model"]["provider"]
):
default_model_dict = default_model_config["model"]
else:
llm_model = cast(LargeLanguageModel, model_instance.model_type_instance)
model_schema = llm_model.get_model_schema(model_instance.model, model_instance.credentials)
model_schema = llm_model.get_model_schema(model_instance.model_name, model_instance.credentials)
if model_schema is None:
raise ValueError(f"model schema not found for model {model_instance.model}")
raise ValueError(f"model schema not found for model {model_instance.model_name}")
default_model_dict = {
"provider": model_instance.provider,
"name": model_instance.model,
"name": model_instance.model_name,
"mode": model_schema.model_properties.get(ModelPropertyKey.MODE),
"completion_params": {},
}

View File

@ -8,6 +8,7 @@ new GraphEngine command channel mechanism.
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.workflow.graph_engine.manager import GraphEngineManager
from extensions.ext_redis import redis_client
from models.model import AppMode
@ -42,4 +43,4 @@ class AppTaskService:
# New mechanism: Send stop command via GraphEngine for workflow-based apps
# This ensures proper workflow status recording in the persistence layer
if app_mode in (AppMode.ADVANCED_CHAT, AppMode.WORKFLOW):
GraphEngineManager.send_stop_command(task_id)
GraphEngineManager(redis_client).send_stop_command(task_id)

View File

@ -10,7 +10,7 @@ from configs import dify_config
from core.app.entities.app_invoke_entities import InvokeFrom
from core.db.session_factory import session_factory
from core.llm_generator.llm_generator import LLMGenerator
from core.variables.types import SegmentType
from core.workflow.variables.types import SegmentType
from extensions.ext_database import db
from factories import variable_factory
from libs.datetime_utils import naive_utc_now
@ -180,6 +180,14 @@ class ConversationService:
@classmethod
def delete(cls, app_model: App, conversation_id: str, user: Union[Account, EndUser] | None):
"""
Delete a conversation only if it belongs to the given user and app context.
Raises:
ConversationNotExistsError: When the conversation is not visible to the current user.
"""
conversation = cls.get_conversation(app_model, conversation_id, user)
try:
logger.info(
"Initiating conversation deletion for app_name %s, conversation_id: %s",
@ -187,10 +195,10 @@ class ConversationService:
conversation_id,
)
db.session.query(Conversation).where(Conversation.id == conversation_id).delete(synchronize_session=False)
db.session.delete(conversation)
db.session.commit()
delete_conversation_related_data.delay(conversation_id)
delete_conversation_related_data.delay(conversation.id)
except Exception as e:
db.session.rollback()

View File

@ -1,7 +1,7 @@
from sqlalchemy import select
from sqlalchemy.orm import Session, sessionmaker
from core.variables.variables import VariableBase
from core.workflow.variables.variables import VariableBase
from models import ConversationVariable

View File

@ -252,7 +252,7 @@ class DatasetService:
dataset.updated_by = account.id
dataset.tenant_id = tenant_id
dataset.embedding_model_provider = embedding_model.provider if embedding_model else None
dataset.embedding_model = embedding_model.model if embedding_model else None
dataset.embedding_model = embedding_model.model_name if embedding_model else None
dataset.retrieval_model = retrieval_model.model_dump() if retrieval_model else None
dataset.permission = permission or DatasetPermissionEnum.ONLY_ME
dataset.provider = provider
@ -384,7 +384,7 @@ class DatasetService:
model=model,
)
text_embedding_model = cast(TextEmbeddingModel, model_instance.model_type_instance)
model_schema = text_embedding_model.get_model_schema(model_instance.model, model_instance.credentials)
model_schema = text_embedding_model.get_model_schema(model_instance.model_name, model_instance.credentials)
if not model_schema:
raise ValueError("Model schema not found")
if model_schema.features and ModelFeature.VISION in model_schema.features:
@ -743,10 +743,12 @@ class DatasetService:
model_type=ModelType.TEXT_EMBEDDING,
model=data["embedding_model"],
)
filtered_data["embedding_model"] = embedding_model.model
embedding_model_name = embedding_model.model_name
filtered_data["embedding_model"] = embedding_model_name
filtered_data["embedding_model_provider"] = embedding_model.provider
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding(
embedding_model.provider, embedding_model.model
embedding_model.provider,
embedding_model_name,
)
filtered_data["collection_binding_id"] = dataset_collection_binding.id
except LLMBadRequestError:
@ -876,10 +878,12 @@ class DatasetService:
return
# Apply new embedding model settings
filtered_data["embedding_model"] = embedding_model.model
embedding_model_name = embedding_model.model_name
filtered_data["embedding_model"] = embedding_model_name
filtered_data["embedding_model_provider"] = embedding_model.provider
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding(
embedding_model.provider, embedding_model.model
embedding_model.provider,
embedding_model_name,
)
filtered_data["collection_binding_id"] = dataset_collection_binding.id
@ -955,10 +959,12 @@ class DatasetService:
knowledge_configuration.embedding_model,
)
dataset.is_multimodal = is_multimodal
dataset.embedding_model = embedding_model.model
embedding_model_name = embedding_model.model_name
dataset.embedding_model = embedding_model_name
dataset.embedding_model_provider = embedding_model.provider
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding(
embedding_model.provider, embedding_model.model
embedding_model.provider,
embedding_model_name,
)
dataset.collection_binding_id = dataset_collection_binding.id
elif knowledge_configuration.indexing_technique == "economy":
@ -989,10 +995,12 @@ class DatasetService:
model_type=ModelType.TEXT_EMBEDDING,
model=knowledge_configuration.embedding_model,
)
dataset.embedding_model = embedding_model.model
embedding_model_name = embedding_model.model_name
dataset.embedding_model = embedding_model_name
dataset.embedding_model_provider = embedding_model.provider
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding(
embedding_model.provider, embedding_model.model
embedding_model.provider,
embedding_model_name,
)
is_multimodal = DatasetService.check_is_multimodal_model(
current_user.current_tenant_id,
@ -1049,11 +1057,13 @@ class DatasetService:
skip_embedding_update = True
if not skip_embedding_update:
if embedding_model:
dataset.embedding_model = embedding_model.model
embedding_model_name = embedding_model.model_name
dataset.embedding_model = embedding_model_name
dataset.embedding_model_provider = embedding_model.provider
dataset_collection_binding = (
DatasetCollectionBindingService.get_dataset_collection_binding(
embedding_model.provider, embedding_model.model
embedding_model.provider,
embedding_model_name,
)
)
dataset.collection_binding_id = dataset_collection_binding.id
@ -1884,7 +1894,7 @@ class DocumentService:
embedding_model = model_manager.get_default_model_instance(
tenant_id=current_user.current_tenant_id, model_type=ModelType.TEXT_EMBEDDING
)
dataset_embedding_model = embedding_model.model
dataset_embedding_model = embedding_model.model_name
dataset_embedding_model_provider = embedding_model.provider
dataset.embedding_model = dataset_embedding_model
dataset.embedding_model_provider = dataset_embedding_model_provider

View File

@ -39,6 +39,9 @@ class BaseRequest:
endpoint: str,
json: Any | None = None,
params: Mapping[str, Any] | None = None,
*,
timeout: float | httpx.Timeout | None = None,
raise_for_status: bool = False,
) -> Any:
headers = {"Content-Type": "application/json", cls.secret_key_header: cls.secret_key}
url = f"{cls.base_url}{endpoint}"
@ -53,7 +56,16 @@ class BaseRequest:
logger.debug("Failed to generate traceparent header", exc_info=True)
with httpx.Client(mounts=mounts) as client:
response = client.request(method, url, json=json, params=params, headers=headers)
# IMPORTANT:
# - In httpx, passing timeout=None disables timeouts (infinite) and overrides the library default.
# - To preserve httpx's default timeout behavior for existing call sites, only pass the kwarg when set.
request_kwargs: dict[str, Any] = {"json": json, "params": params, "headers": headers}
if timeout is not None:
request_kwargs["timeout"] = timeout
response = client.request(method, url, **request_kwargs)
if raise_for_status:
response.raise_for_status()
return response.json()

View File

@ -1,9 +1,16 @@
import logging
import uuid
from datetime import datetime
from pydantic import BaseModel, Field
from pydantic import BaseModel, ConfigDict, Field, model_validator
from configs import dify_config
from services.enterprise.base import EnterpriseRequest
logger = logging.getLogger(__name__)
DEFAULT_WORKSPACE_JOIN_TIMEOUT_SECONDS = 1.0
class WebAppSettings(BaseModel):
access_mode: str = Field(
@ -30,6 +37,55 @@ class WorkspacePermission(BaseModel):
)
class DefaultWorkspaceJoinResult(BaseModel):
"""
Result of ensuring an account is a member of the enterprise default workspace.
- joined=True is idempotent (already a member also returns True)
- joined=False means enterprise default workspace is not configured or invalid/archived
"""
workspace_id: str = Field(default="", alias="workspaceId")
joined: bool
message: str
model_config = ConfigDict(extra="forbid", populate_by_name=True)
@model_validator(mode="after")
def _check_workspace_id_when_joined(self) -> "DefaultWorkspaceJoinResult":
if self.joined and not self.workspace_id:
raise ValueError("workspace_id must be non-empty when joined is True")
return self
def try_join_default_workspace(account_id: str) -> None:
"""
Enterprise-only side-effect: ensure account is a member of the default workspace.
This is a best-effort integration. Failures must not block user registration.
"""
if not dify_config.ENTERPRISE_ENABLED:
return
try:
result = EnterpriseService.join_default_workspace(account_id=account_id)
if result.joined:
logger.info(
"Joined enterprise default workspace for account %s (workspace_id=%s)",
account_id,
result.workspace_id,
)
else:
logger.info(
"Skipped joining enterprise default workspace for account %s (message=%s)",
account_id,
result.message,
)
except Exception:
logger.warning("Failed to join enterprise default workspace for account %s", account_id, exc_info=True)
class EnterpriseService:
@classmethod
def get_info(cls):
@ -39,6 +95,34 @@ class EnterpriseService:
def get_workspace_info(cls, tenant_id: str):
return EnterpriseRequest.send_request("GET", f"/workspace/{tenant_id}/info")
@classmethod
def join_default_workspace(cls, *, account_id: str) -> DefaultWorkspaceJoinResult:
"""
Call enterprise inner API to add an account to the default workspace.
NOTE: EnterpriseRequest.base_url is expected to already include the `/inner/api` prefix,
so the endpoint here is `/default-workspace/members`.
"""
# Ensure we are sending a UUID-shaped string (enterprise side validates too).
try:
uuid.UUID(account_id)
except ValueError as e:
raise ValueError(f"account_id must be a valid UUID: {account_id}") from e
data = EnterpriseRequest.send_request(
"POST",
"/default-workspace/members",
json={"account_id": account_id},
timeout=DEFAULT_WORKSPACE_JOIN_TIMEOUT_SECONDS,
raise_for_status=True,
)
if not isinstance(data, dict):
raise ValueError("Invalid response format from enterprise default workspace API")
if "joined" not in data or "message" not in data:
raise ValueError("Invalid response payload from enterprise default workspace API")
return DefaultWorkspaceJoinResult.model_validate(data)
@classmethod
def get_app_sso_settings_last_update_time(cls) -> datetime:
data = EnterpriseRequest.send_request("GET", "/sso/app/last-update-time")

View File

@ -36,7 +36,6 @@ from core.rag.entities.event import (
)
from core.repositories.factory import DifyCoreRepositoryFactory
from core.repositories.sqlalchemy_workflow_node_execution_repository import SQLAlchemyWorkflowNodeExecutionRepository
from core.variables.variables import VariableBase
from core.workflow.entities.workflow_node_execution import (
WorkflowNodeExecution,
WorkflowNodeExecutionStatus,
@ -47,10 +46,12 @@ from core.workflow.graph_events import NodeRunFailedEvent, NodeRunSucceededEvent
from core.workflow.graph_events.base import GraphNodeEventBase
from core.workflow.node_events.base import NodeRunResult
from core.workflow.nodes.base.node import Node
from core.workflow.nodes.http_request import HTTP_REQUEST_CONFIG_FILTER_KEY, build_http_request_config
from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING
from core.workflow.repositories.workflow_node_execution_repository import OrderConfig
from core.workflow.runtime import VariablePool
from core.workflow.system_variable import SystemVariable
from core.workflow.variables.variables import VariableBase
from core.workflow.workflow_entry import WorkflowEntry
from extensions.ext_database import db
from libs.infinite_scroll_pagination import InfiniteScrollPagination
@ -380,9 +381,22 @@ class RagPipelineService:
"""
# return default block config
default_block_configs: list[dict[str, Any]] = []
for node_class_mapping in NODE_TYPE_CLASSES_MAPPING.values():
for node_type, node_class_mapping in NODE_TYPE_CLASSES_MAPPING.items():
node_class = node_class_mapping[LATEST_VERSION]
default_config = node_class.get_default_config()
filters = None
if node_type is NodeType.HTTP_REQUEST:
filters = {
HTTP_REQUEST_CONFIG_FILTER_KEY: build_http_request_config(
max_connect_timeout=dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT,
max_read_timeout=dify_config.HTTP_REQUEST_MAX_READ_TIMEOUT,
max_write_timeout=dify_config.HTTP_REQUEST_MAX_WRITE_TIMEOUT,
max_binary_size=dify_config.HTTP_REQUEST_NODE_MAX_BINARY_SIZE,
max_text_size=dify_config.HTTP_REQUEST_NODE_MAX_TEXT_SIZE,
ssl_verify=dify_config.HTTP_REQUEST_NODE_SSL_VERIFY,
ssrf_default_max_retries=dify_config.SSRF_DEFAULT_MAX_RETRIES,
)
}
default_config = node_class.get_default_config(filters=filters)
if default_config:
default_block_configs.append(dict(default_config))
@ -402,7 +416,18 @@ class RagPipelineService:
return None
node_class = NODE_TYPE_CLASSES_MAPPING[node_type_enum][LATEST_VERSION]
default_config = node_class.get_default_config(filters=filters)
final_filters = dict(filters) if filters else {}
if node_type_enum is NodeType.HTTP_REQUEST and HTTP_REQUEST_CONFIG_FILTER_KEY not in final_filters:
final_filters[HTTP_REQUEST_CONFIG_FILTER_KEY] = build_http_request_config(
max_connect_timeout=dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT,
max_read_timeout=dify_config.HTTP_REQUEST_MAX_READ_TIMEOUT,
max_write_timeout=dify_config.HTTP_REQUEST_MAX_WRITE_TIMEOUT,
max_binary_size=dify_config.HTTP_REQUEST_NODE_MAX_BINARY_SIZE,
max_text_size=dify_config.HTTP_REQUEST_NODE_MAX_TEXT_SIZE,
ssl_verify=dify_config.HTTP_REQUEST_NODE_SSL_VERIFY,
ssrf_default_max_retries=dify_config.SSRF_DEFAULT_MAX_RETRIES,
)
default_config = node_class.get_default_config(filters=final_filters or None)
if not default_config:
return None

View File

@ -16,9 +16,9 @@ from werkzeug.exceptions import RequestEntityTooLarge
from configs import dify_config
from core.app.entities.app_invoke_entities import InvokeFrom
from core.tools.tool_file_manager import ToolFileManager
from core.variables.types import SegmentType
from core.workflow.enums import NodeType
from core.workflow.file.models import FileTransferMethod
from core.workflow.variables.types import SegmentType
from enums.quota_type import QuotaType
from extensions.ext_database import db
from extensions.ext_redis import redis_client

View File

@ -7,7 +7,9 @@ from typing import Any, Generic, TypeAlias, TypeVar, overload
from configs import dify_config
from core.model_runtime.entities import PromptMessage
from core.variables.segments import (
from core.workflow.file.models import File
from core.workflow.nodes.variable_assigner.common.helpers import UpdatedVariable
from core.workflow.variables.segments import (
ArrayFileSegment,
ArraySegment,
BooleanSegment,
@ -19,9 +21,7 @@ from core.variables.segments import (
Segment,
StringSegment,
)
from core.variables.utils import dumps_with_segments
from core.workflow.file.models import File
from core.workflow.nodes.variable_assigner.common.helpers import UpdatedVariable
from core.workflow.variables.utils import dumps_with_segments
_MAX_DEPTH = 100

View File

@ -8,7 +8,6 @@ from core.app.app_config.entities import (
ExternalDataVariableEntity,
ModelConfigEntity,
PromptTemplateEntity,
VariableEntity,
)
from core.app.apps.agent_chat.app_config_manager import AgentChatAppConfigManager
from core.app.apps.chat.app_config_manager import ChatAppConfigManager
@ -20,6 +19,7 @@ from core.prompt.simple_prompt_transform import SimplePromptTransform
from core.prompt.utils.prompt_template_parser import PromptTemplateParser
from core.workflow.file.models import FileUploadConfig
from core.workflow.nodes import NodeType
from core.workflow.variables.input_entities import VariableEntity
from events.app_event import app_was_created
from extensions.ext_database import db
from models import Account

View File

@ -14,20 +14,20 @@ from sqlalchemy.sql.expression import and_, or_
from configs import dify_config
from core.app.entities.app_invoke_entities import InvokeFrom
from core.variables import Segment, StringSegment, VariableBase
from core.variables.consts import SELECTORS_LENGTH
from core.variables.segments import (
ArrayFileSegment,
FileSegment,
)
from core.variables.types import SegmentType
from core.variables.utils import dumps_with_segments
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
from core.workflow.enums import SystemVariableKey
from core.workflow.file.models import File
from core.workflow.nodes import NodeType
from core.workflow.nodes.variable_assigner.common.helpers import get_updated_variables
from core.workflow.variable_loader import VariableLoader
from core.workflow.variables import Segment, StringSegment, VariableBase
from core.workflow.variables.consts import SELECTORS_LENGTH
from core.workflow.variables.segments import (
ArrayFileSegment,
FileSegment,
)
from core.workflow.variables.types import SegmentType
from core.workflow.variables.utils import dumps_with_segments
from extensions.ext_storage import storage
from factories.file_factory import StorageKeyLoader
from factories.variable_factory import build_segment, segment_to_variable

View File

@ -9,14 +9,11 @@ from sqlalchemy import exists, select
from sqlalchemy.orm import Session, sessionmaker
from configs import dify_config
from core.app.app_config.entities import VariableEntityType
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.repositories import DifyCoreRepositoryFactory
from core.repositories.human_input_repository import HumanInputFormRepositoryImpl
from core.variables import VariableBase
from core.variables.variables import Variable
from core.workflow.entities import GraphInitParams, WorkflowNodeExecution
from core.workflow.entities.pause_reason import HumanInputRequired
from core.workflow.enums import ErrorStrategy, WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus
@ -26,6 +23,7 @@ from core.workflow.graph_events import GraphNodeEventBase, NodeRunFailedEvent, N
from core.workflow.node_events import NodeRunResult
from core.workflow.nodes import NodeType
from core.workflow.nodes.base.node import Node
from core.workflow.nodes.http_request import HTTP_REQUEST_CONFIG_FILTER_KEY, build_http_request_config
from core.workflow.nodes.human_input.entities import (
DeliveryChannelConfig,
HumanInputNodeData,
@ -40,6 +38,9 @@ from core.workflow.repositories.human_input_form_repository import FormCreatePar
from core.workflow.runtime import GraphRuntimeState, VariablePool
from core.workflow.system_variable import SystemVariable
from core.workflow.variable_loader import load_into_variable_pool
from core.workflow.variables import VariableBase
from core.workflow.variables.input_entities import VariableEntityType
from core.workflow.variables.variables import Variable
from core.workflow.workflow_entry import WorkflowEntry
from enums.cloud_plan import CloudPlan
from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated
@ -695,9 +696,22 @@ class WorkflowService:
"""
# return default block config
default_block_configs: list[Mapping[str, object]] = []
for node_class_mapping in NODE_TYPE_CLASSES_MAPPING.values():
for node_type, node_class_mapping in NODE_TYPE_CLASSES_MAPPING.items():
node_class = node_class_mapping[LATEST_VERSION]
default_config = node_class.get_default_config()
filters = None
if node_type is NodeType.HTTP_REQUEST:
filters = {
HTTP_REQUEST_CONFIG_FILTER_KEY: build_http_request_config(
max_connect_timeout=dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT,
max_read_timeout=dify_config.HTTP_REQUEST_MAX_READ_TIMEOUT,
max_write_timeout=dify_config.HTTP_REQUEST_MAX_WRITE_TIMEOUT,
max_binary_size=dify_config.HTTP_REQUEST_NODE_MAX_BINARY_SIZE,
max_text_size=dify_config.HTTP_REQUEST_NODE_MAX_TEXT_SIZE,
ssl_verify=dify_config.HTTP_REQUEST_NODE_SSL_VERIFY,
ssrf_default_max_retries=dify_config.SSRF_DEFAULT_MAX_RETRIES,
)
}
default_config = node_class.get_default_config(filters=filters)
if default_config:
default_block_configs.append(default_config)
@ -719,7 +733,18 @@ class WorkflowService:
return {}
node_class = NODE_TYPE_CLASSES_MAPPING[node_type_enum][LATEST_VERSION]
default_config = node_class.get_default_config(filters=filters)
resolved_filters = dict(filters) if filters else {}
if node_type_enum is NodeType.HTTP_REQUEST and HTTP_REQUEST_CONFIG_FILTER_KEY not in resolved_filters:
resolved_filters[HTTP_REQUEST_CONFIG_FILTER_KEY] = build_http_request_config(
max_connect_timeout=dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT,
max_read_timeout=dify_config.HTTP_REQUEST_MAX_READ_TIMEOUT,
max_write_timeout=dify_config.HTTP_REQUEST_MAX_WRITE_TIMEOUT,
max_binary_size=dify_config.HTTP_REQUEST_NODE_MAX_BINARY_SIZE,
max_text_size=dify_config.HTTP_REQUEST_NODE_MAX_TEXT_SIZE,
ssl_verify=dify_config.HTTP_REQUEST_NODE_SSL_VERIFY,
ssrf_default_max_retries=dify_config.SSRF_DEFAULT_MAX_RETRIES,
)
default_config = node_class.get_default_config(filters=resolved_filters or None)
if not default_config:
return {}