fix: resolve import migrations and test failures after segment 3 merge

- Migrate core.model_runtime -> dify_graph.model_runtime across 20+ files
- Migrate core.workflow.file -> dify_graph.file across 15+ files
- Migrate core.workflow.enums -> dify_graph.enums in service files
- Fix SandboxContext phantom import in dify_graph/context/__init__.py
- Fix core.app.workflow.node_factory -> core.workflow.node_factory
- Fix toast import paths (useToastContext from toast/context)
- Fix app-info.tsx import paths for relocated app-operations
- Fix 15 frontend test files for API changes, missing QueryClientProvider,
  i18n key renames, and component behavior changes

Made-with: Cursor
This commit is contained in:
Novice
2026-03-23 10:31:11 +08:00
parent 94b01f6821
commit 6b75188ddc
58 changed files with 242 additions and 172 deletions

View File

@ -17,15 +17,12 @@ from dify_graph.context.execution_context import (
register_context_capturer,
reset_context_provider,
)
from dify_graph.context.models import SandboxContext
__all__ = [
"AppContext",
"ContextProviderNotFoundError",
"ExecutionContext",
"IExecutionContext",
"NullAppContext",
"SandboxContext",
"capture_current_context",
"read_context",
"register_context",

View File

@ -517,7 +517,7 @@ class AgentNode(Node[AgentNodeData]):
Fetch memory instance for saving node memory.
This is a simplified version that doesn't require model_instance.
"""
from core.model_runtime.entities.model_entities import ModelType
from dify_graph.model_runtime.entities.model_entities import ModelType
from core.model_manager import ModelManager

View File

@ -378,7 +378,7 @@ class Node(Generic[NodeDataT]):
Nested nodes are nodes with parent_node_id == self._node_id.
They are executed before the main node to extract values from list[PromptMessage].
"""
from core.app.workflow.node_factory import DifyNodeFactory
from core.workflow.node_factory import DifyNodeFactory
extractor_configs = self._find_extractor_node_configs()
logger.debug("[NestedNode] Found %d nested nodes for parent '%s'", len(extractor_configs), self._node_id)
@ -689,7 +689,7 @@ class Node(Generic[NodeDataT]):
@_dispatch.register
def _(self, event: StreamChunkEvent) -> NodeRunStreamChunkEvent:
from core.workflow.graph_events import ChunkType
from dify_graph.graph_events import ChunkType
return NodeRunStreamChunkEvent(
id=self.execution_id,
@ -711,7 +711,7 @@ class Node(Generic[NodeDataT]):
@_dispatch.register
def _(self, event: ToolCallChunkEvent) -> NodeRunStreamChunkEvent:
from core.workflow.graph_events import ChunkType
from dify_graph.graph_events import ChunkType
return NodeRunStreamChunkEvent(
id=self._node_execution_id,
@ -726,8 +726,8 @@ class Node(Generic[NodeDataT]):
@_dispatch.register
def _(self, event: ToolResultChunkEvent) -> NodeRunStreamChunkEvent:
from core.workflow.entities import ToolResult, ToolResultStatus
from core.workflow.graph_events import ChunkType
from dify_graph.entities import ToolResult, ToolResultStatus
from dify_graph.graph_events import ChunkType
tool_result = event.tool_result or ToolResult()
status: ToolResultStatus = tool_result.status or ToolResultStatus.SUCCESS
@ -748,7 +748,7 @@ class Node(Generic[NodeDataT]):
@_dispatch.register
def _(self, event: ThoughtChunkEvent) -> NodeRunStreamChunkEvent:
from core.workflow.graph_events import ChunkType
from dify_graph.graph_events import ChunkType
return NodeRunStreamChunkEvent(
id=self._node_execution_id,

View File

@ -199,7 +199,7 @@ def _build_messages_from_trace(
assistant_response: str,
file_suffix: str = "",
) -> list[PromptMessage]:
from core.workflow.nodes.llm.entities import ModelTraceSegment, ToolTraceSegment
from dify_graph.nodes.llm.entities import ModelTraceSegment, ToolTraceSegment
messages: list[PromptMessage] = []
covered_text_len = 0
@ -266,12 +266,12 @@ def _truncate_multimodal_content(message: PromptMessage) -> PromptMessage:
def restore_multimodal_content_in_messages(messages: Sequence[PromptMessage]) -> list[PromptMessage]:
from core.workflow.file import file_manager
return [_restore_message_content(msg, file_manager) for msg in messages]
return [_restore_message_content(msg) for msg in messages]
def _restore_message_content(message: PromptMessage, file_manager) -> PromptMessage:
def _restore_message_content(message: PromptMessage) -> PromptMessage:
from dify_graph.file.file_manager import restore_multimodal_content
content = message.content
if content is None or isinstance(content, str):
return message
@ -279,7 +279,7 @@ def _restore_message_content(message: PromptMessage, file_manager) -> PromptMess
restored_content: list[PromptMessageContentUnionTypes] = []
for item in content:
if isinstance(item, MultiModalPromptMessageContent):
restored_item = file_manager.restore_multimodal_content(item)
restored_item = restore_multimodal_content(item)
restored_content.append(cast(PromptMessageContentUnionTypes, restored_item))
else:
restored_content.append(item)

View File

@ -2201,7 +2201,7 @@ class LLMNode(Node[LLMNodeData]):
def _extract_prompt_files(self, variable_pool: VariablePool) -> list[File]:
"""Extract files from prompt template variables."""
from core.workflow.variables import ArrayFileVariable, FileVariable
from dify_graph.variables import ArrayFileVariable, FileVariable
files: list[File] = []

View File

@ -568,7 +568,7 @@ class ToolNode(Node[ToolNodeData]):
:param parent_node_id: the parent node id to find nested nodes for
:return: mapping of variable key to variable selector
"""
from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING
from dify_graph.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING
result: dict[str, Sequence[str]] = {}
nodes = graph_config.get("nodes", [])

View File

@ -3,7 +3,7 @@ from typing import Any
import orjson
from core.model_runtime.entities import PromptMessage
from dify_graph.model_runtime.entities import PromptMessage
from .segment_group import SegmentGroup
from .segments import ArrayFileSegment, ArrayPromptMessageSegment, FileSegment, Segment