feat(skill): skill support

This commit is contained in:
Harry
2026-01-20 03:02:25 +08:00
parent bc9ce23fdc
commit f5a34e9ee8
32 changed files with 608 additions and 345 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,7 +1,7 @@
from flask_restx import Resource
from controllers.cli_api import cli_api_ns
from controllers.cli_api.plugin.wraps import get_user_tenant, plugin_data
from controllers.cli_api.plugin.wraps import get_cli_user_tenant, plugin_data
from controllers.cli_api.wraps import cli_api_only
from controllers.console.wraps import setup_required
from core.file.helpers import get_signed_file_url_for_plugin
@ -23,7 +23,7 @@ from models.model import EndUser
@cli_api_ns.route("/invoke/llm")
class CliInvokeLLMApi(Resource):
@get_user_tenant
@get_cli_user_tenant
@setup_required
@cli_api_only
@plugin_data(payload_type=RequestInvokeLLM)
@ -37,7 +37,7 @@ class CliInvokeLLMApi(Resource):
@cli_api_ns.route("/invoke/tool")
class CliInvokeToolApi(Resource):
@get_user_tenant
@get_cli_user_tenant
@setup_required
@cli_api_only
@plugin_data(payload_type=RequestInvokeTool)
@ -60,7 +60,7 @@ class CliInvokeToolApi(Resource):
@cli_api_ns.route("/invoke/app")
class CliInvokeAppApi(Resource):
@get_user_tenant
@get_cli_user_tenant
@setup_required
@cli_api_only
@plugin_data(payload_type=RequestInvokeApp)
@ -81,7 +81,7 @@ class CliInvokeAppApi(Resource):
@cli_api_ns.route("/upload/file/request")
class CliUploadFileRequestApi(Resource):
@get_user_tenant
@get_cli_user_tenant
@setup_required
@cli_api_only
@plugin_data(payload_type=RequestRequestUploadFile)
@ -98,7 +98,7 @@ class CliUploadFileRequestApi(Resource):
@cli_api_ns.route("/fetch/tools/list")
class CliFetchToolsListApi(Resource):
@get_user_tenant
@get_cli_user_tenant
@setup_required
@cli_api_only
def post(self, user_model: Account | EndUser, tenant_model: Tenant):

View File

@ -72,7 +72,7 @@ def get_user(tenant_id: str, user_id: str | None) -> EndUser:
return user_model
def get_user_tenant(view_func: Callable[P, R]):
def get_cli_user_tenant(view_func: Callable[P, R]):
@wraps(view_func)
def decorated_view(*args: P.args, **kwargs: P.kwargs):
session_id = request.headers.get("X-Cli-Api-Session-Id")
@ -83,6 +83,7 @@ def get_user_tenant(view_func: Callable[P, R]):
raise ValueError("session not found")
user_id = session.user_id
tenant_id = session.tenant_id
else:
payload = TenantUserPayload.model_validate(request.get_json(silent=True) or {})
user_id = payload.user_id

View File

@ -523,6 +523,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
SandboxLayer(
tenant_id=application_generate_entity.app_config.tenant_id,
app_id=application_generate_entity.app_config.app_id,
user_id=application_generate_entity.user_id,
workflow_version=workflow.version,
sandbox_id=application_generate_entity.workflow_run_id,
sandbox_storage=ArchiveSandboxStorage(

View File

@ -497,6 +497,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
SandboxLayer(
tenant_id=application_generate_entity.app_config.tenant_id,
app_id=application_generate_entity.app_config.app_id,
user_id=application_generate_entity.user_id,
workflow_version=workflow.version,
sandbox_id=application_generate_entity.workflow_execution_id,
sandbox_storage=ArchiveSandboxStorage(

View File

@ -109,6 +109,64 @@ class AppAssetFileTree(BaseModel):
current = self.get(current.parent_id) if current.parent_id else None
return "/" + "/".join(reversed(parts))
def relative_path(self, a: AppAssetNode, b: AppAssetNode) -> str:
"""
Calculate relative path from node a to node b for Markdown references.
Path is computed from a's parent directory (where the file resides).
Examples:
/foo/a.md -> /foo/b.md => ./b.md
/foo/a.md -> /foo/sub/b.md => ./sub/b.md
/foo/sub/a.md -> /foo/b.md => ../b.md
/foo/sub/deep/a.md -> /foo/b.md => ../../b.md
"""
def get_ancestor_ids(node_id: str | None) -> list[str]:
chain: list[str] = []
current_id = node_id
while current_id:
chain.append(current_id)
node = self.get(current_id)
current_id = node.parent_id if node else None
return chain
a_dir_ancestors = get_ancestor_ids(a.parent_id)
b_ancestors = [b.id] + get_ancestor_ids(b.parent_id)
a_dir_set = set(a_dir_ancestors)
lca_id: str | None = None
lca_index_in_b = -1
for idx, ancestor_id in enumerate(b_ancestors):
if ancestor_id in a_dir_set or (a.parent_id is None and b_ancestors[idx:] == []):
lca_id = ancestor_id
lca_index_in_b = idx
break
if a.parent_id is None:
steps_up = 0
lca_index_in_b = len(b_ancestors)
elif lca_id is None:
steps_up = len(a_dir_ancestors)
lca_index_in_b = len(b_ancestors)
else:
steps_up = 0
for ancestor_id in a_dir_ancestors:
if ancestor_id == lca_id:
break
steps_up += 1
path_down: list[str] = []
for i in range(lca_index_in_b - 1, -1, -1):
node = self.get(b_ancestors[i])
if node:
path_down.append(node.name)
if steps_up == 0:
return "./" + "/".join(path_down)
parts: list[str] = [".."] * steps_up + path_down
return "/".join(parts)
def get_descendant_ids(self, node_id: str) -> list[str]:
result: list[str] = []
stack = [node_id]

View File

@ -6,6 +6,7 @@ from core.virtual_environment.__base.virtual_environment import VirtualEnvironme
from core.workflow.graph_engine.layers.base import GraphEngineLayer
from core.workflow.graph_events.base import GraphEngineEvent
from core.workflow.graph_events.graph import GraphRunPausedEvent
from core.workflow.nodes.base.node import Node
from models.workflow import Workflow
from services.app_asset_service import AppAssetService
from services.sandbox.sandbox_provider_service import SandboxProviderService
@ -22,6 +23,7 @@ class SandboxLayer(GraphEngineLayer):
self,
tenant_id: str,
app_id: str,
user_id: str,
workflow_version: str,
sandbox_id: str,
sandbox_storage: SandboxStorage,
@ -29,6 +31,7 @@ class SandboxLayer(GraphEngineLayer):
super().__init__()
self._tenant_id = tenant_id
self._app_id = app_id
self._user_id = user_id
self._workflow_version = workflow_version
self._sandbox_id = sandbox_id
self._sandbox_storage = sandbox_storage
@ -48,6 +51,8 @@ class SandboxLayer(GraphEngineLayer):
raise ValueError(
f"No assets found for tid={self._tenant_id}, app_id={self._app_id}, wf={self._workflow_version}"
)
self._assets_id = assets.id
if is_draft:
logger.info(
"Building draft assets for tenant_id=%s, app_id=%s, workflow_version=%s, assets_id=%s",
@ -69,7 +74,7 @@ class SandboxLayer(GraphEngineLayer):
builder = (
SandboxProviderService.create_sandbox_builder(self._tenant_id)
.initializer(AppAssetsInitializer(self._tenant_id, self._app_id, assets.id))
.initializer(DifyCliInitializer(self._tenant_id, self._app_id, assets.id))
.initializer(DifyCliInitializer(self._tenant_id, self._user_id, self._app_id, assets.id))
)
sandbox = builder.build()
@ -88,6 +93,10 @@ class SandboxLayer(GraphEngineLayer):
logger.exception("Failed to initialize sandbox")
raise SandboxInitializationError(f"Failed to initialize sandbox: {e}") from e
def on_node_run_start(self, node: Node) -> None:
# FIXME(Mairuis): should read from workflow run context...
node.assets_id = self._assets_id
def on_event(self, event: GraphEngineEvent) -> None:
# TODO: handle graph run paused event
if not isinstance(event, GraphRunPausedEvent):

View File

@ -5,10 +5,8 @@ from .entities import (
SkillAsset,
SkillMetadata,
ToolConfiguration,
ToolDefinition,
ToolFieldConfig,
ToolReference,
ToolType,
)
from .packager import AssetPackager, ZipPackager
from .parser import AssetItemParser, AssetParser, FileAssetParser, SkillAssetParser
@ -27,9 +25,7 @@ __all__ = [
"SkillAssetParser",
"SkillMetadata",
"ToolConfiguration",
"ToolDefinition",
"ToolFieldConfig",
"ToolReference",
"ToolType",
"ZipPackager",
]

View File

@ -4,10 +4,8 @@ from .skill import (
SkillAsset,
SkillMetadata,
ToolConfiguration,
ToolDefinition,
ToolFieldConfig,
ToolReference,
ToolType,
)
__all__ = [
@ -17,8 +15,6 @@ __all__ = [
"SkillAsset",
"SkillMetadata",
"ToolConfiguration",
"ToolDefinition",
"ToolFieldConfig",
"ToolReference",
"ToolType",
]

View File

@ -1,17 +1,13 @@
from dataclasses import dataclass, field
from enum import StrEnum
from dataclasses import dataclass
from typing import Any
from pydantic import BaseModel, ConfigDict, Field
from core.tools.entities.tool_entities import ToolProviderType
from .assets import AssetItem
class ToolType(StrEnum):
MCP = "mcp"
BUILTIN = "builtin"
class ToolFieldConfig(BaseModel):
model_config = ConfigDict(extra="forbid")
@ -25,44 +21,39 @@ class ToolConfiguration(BaseModel):
fields: list[ToolFieldConfig] = Field(default_factory=list)
class ToolDefinition(BaseModel):
model_config = ConfigDict(extra="forbid")
type: ToolType
credential_id: str | None = None
configuration: ToolConfiguration = Field(default_factory=ToolConfiguration)
def default_values(self) -> dict[str, Any]:
return {field.id: field.value for field in self.fields if field.value is not None}
class ToolReference(BaseModel):
model_config = ConfigDict(extra="forbid")
provider: str
tool_name: str
uuid: str
raw: str
uuid: str = Field(description="Unique identifier for this tool reference")
type: ToolProviderType = Field(description="Tool provider type")
provider: str = Field(description="Tool provider")
tool_name: str = Field(description="Tool name")
credential_id: str | None = Field(default=None, description="Credential ID")
configuration: ToolConfiguration | None = Field(default=None, description="Tool configuration")
class FileReference(BaseModel):
model_config = ConfigDict(extra="forbid")
source: str
uuid: str
raw: str
source: str = Field(description="Source location or identifier of the file")
uuid: str = Field(description="Unique identifier for this file reference")
class SkillMetadata(BaseModel):
model_config = ConfigDict(extra="allow")
tools: dict[str, ToolDefinition] = Field(default_factory=dict)
tools: dict[str, ToolReference] = Field(default_factory=dict, description="Map of tool references by UUID")
files: list[FileReference] = Field(default_factory=list, description="List of file references")
@dataclass
class SkillAsset(AssetItem):
storage_key: str
metadata: SkillMetadata
tool_references: list[ToolReference] = field(default_factory=list)
file_references: list[FileReference] = field(default_factory=list)
def get_storage_key(self) -> str:
return self.storage_key

View File

@ -3,13 +3,14 @@ import logging
import re
from typing import Any
from core.app.entities.app_asset_entities import AppAssetFileTree, AppAssetNode
from core.app_assets.entities import (
FileReference,
SkillAsset,
SkillMetadata,
ToolReference,
)
from core.app_assets.entities.skill import FileReference, ToolConfiguration, ToolReference
from core.app_assets.paths import AssetPaths
from core.tools.entities.tool_entities import ToolProviderType
from extensions.ext_storage import storage
from .base import AssetItemParser
@ -26,10 +27,12 @@ class SkillAssetParser(AssetItemParser):
tenant_id: str,
app_id: str,
assets_id: str,
tree: AppAssetFileTree,
) -> None:
self._tenant_id = tenant_id
self._app_id = app_id
self._assets_id = assets_id
self._tree = tree
def parse(
self,
@ -42,7 +45,7 @@ class SkillAssetParser(AssetItemParser):
try:
return self._parse_skill_asset(node_id, path, file_name, extension, storage_key)
except Exception:
logger.exception("Failed to parse skill asset %s: %s", node_id)
logger.exception("Failed to parse skill asset %s", node_id)
# handle as plain text
return SkillAsset(
node_id=node_id,
@ -51,8 +54,6 @@ class SkillAssetParser(AssetItemParser):
extension=extension,
storage_key=storage_key,
metadata=SkillMetadata(),
tool_references=[],
file_references=[],
)
def _parse_skill_asset(
@ -69,8 +70,6 @@ class SkillAssetParser(AssetItemParser):
extension=extension,
storage_key=storage_key,
metadata=SkillMetadata(),
tool_references=[],
file_references=[],
)
if not isinstance(data, dict):
@ -83,14 +82,13 @@ class SkillAssetParser(AssetItemParser):
if not isinstance(content, str):
raise ValueError(f"Skill document {node_id} 'content' must be a string")
metadata = SkillMetadata.model_validate(metadata_raw)
tool_references: list[ToolReference] = self._parse_tool_references(content)
file_references: list[FileReference] = self._parse_file_references(content)
resolved_content = self._resolve_content(content, tool_references, file_references)
resolved_key = AssetPaths.build_resolved_file(self._tenant_id, self._app_id, self._assets_id, node_id)
storage.save(resolved_key, resolved_content.encode("utf-8"))
current_file = self._tree.get(node_id)
if current_file is None:
raise ValueError(f"File not found for id={node_id}")
metadata = self._resolve_metadata(content, metadata_raw)
storage.save(resolved_key, self._resolve_content(current_file, content, metadata).encode("utf-8"))
return SkillAsset(
node_id=node_id,
@ -99,48 +97,65 @@ class SkillAssetParser(AssetItemParser):
extension=extension,
storage_key=resolved_key,
metadata=metadata,
tool_references=tool_references,
file_references=file_references,
)
def _resolve_content(
self,
content: str,
tool_references: list[ToolReference],
file_references: list[FileReference],
) -> str:
for ref in tool_references:
replacement = f"{ref.tool_name}"
content = content.replace(ref.raw, replacement)
for ref in file_references:
replacement = f"[file:{ref.uuid}]"
content = content.replace(ref.raw, replacement)
def _resolve_content(self, current_file: AppAssetNode, content: str, metadata: SkillMetadata) -> str:
for match in FILE_REFERENCE_PATTERN.finditer(content):
# replace with file relative path
file_id = match.group(2)
file = self._tree.get(file_id)
if file is None:
logger.warning("File not found for id=%s, skipping", file_id)
# replace with file not found placeholder
content = content.replace(match.group(0), "[File not found]")
continue
content = content.replace(match.group(0), self._tree.relative_path(current_file, file))
for match in TOOL_REFERENCE_PATTERN.finditer(content):
tool_id = match.group(3)
tool = metadata.tools.get(tool_id)
if tool is None:
logger.warning("Tool not found for id=%s, skipping", tool_id)
# replace with tool not found placeholder
content = content.replace(match.group(0), f"[Tool not found: {tool_id}]")
continue
content = content.replace(match.group(0), f"[Bash Command: {tool.tool_name}_{tool_id}]")
return content
def _parse_tool_references(self, content: str) -> list[ToolReference]:
tool_references: list[ToolReference] = []
for match in TOOL_REFERENCE_PATTERN.finditer(content):
tool_references.append(
ToolReference(
provider=match.group(1),
tool_name=match.group(2),
uuid=match.group(3),
raw=match.group(0),
)
)
return tool_references
def _parse_file_references(self, content: str) -> list[FileReference]:
def _resolve_file_references(self, content: str) -> list[FileReference]:
file_references: list[FileReference] = []
for match in FILE_REFERENCE_PATTERN.finditer(content):
file_references.append(
FileReference(
source=match.group(1),
uuid=match.group(2),
raw=match.group(0),
)
)
return file_references
def _resolve_tool_references(self, content: str, tools: dict[str, Any]) -> dict[str, ToolReference]:
tool_references: dict[str, ToolReference] = {}
for match in TOOL_REFERENCE_PATTERN.finditer(content):
tool_id = match.group(3)
tool_name = match.group(2)
tool_provider = match.group(1)
metadata = tools.get(tool_id)
if metadata is None:
raise ValueError(f"Tool metadata for {tool_id} not found")
configuration = ToolConfiguration.model_validate(metadata.get("configuration", {}))
tool_references[tool_id] = ToolReference(
uuid=tool_id,
type=ToolProviderType.value_of(metadata.get("type", None)),
provider=tool_provider,
tool_name=tool_name,
credential_id=metadata.get("credential_id", None),
configuration=configuration,
)
return tool_references
def _resolve_metadata(self, content: str, metadata: dict[str, Any]) -> SkillMetadata:
return SkillMetadata(
files=self._resolve_file_references(content=content),
tools=self._resolve_tool_references(content=content, tools=metadata.get("tools", {})),
)

View File

@ -7,12 +7,12 @@ class AssetPaths:
@staticmethod
def build_zip(tenant_id: str, app_id: str, assets_id: str) -> str:
return f"{AssetPaths._BASE}/{tenant_id}/{app_id}/build/{assets_id}.zip"
return f"{AssetPaths._BASE}/{tenant_id}/{app_id}/artifacts/{assets_id}.zip"
@staticmethod
def build_resolved_file(tenant_id: str, app_id: str, assets_id: str, node_id: str) -> str:
return f"{AssetPaths._BASE}/{tenant_id}/{app_id}/build/{assets_id}/resolved/{node_id}"
return f"{AssetPaths._BASE}/{tenant_id}/{app_id}/artifacts/{assets_id}/resolved/{node_id}"
@staticmethod
def build_tool_manifest(tenant_id: str, app_id: str, assets_id: str) -> str:
return f"{AssetPaths._BASE}/{tenant_id}/{app_id}/build/{assets_id}/tools.json"
def build_tool_artifact(tenant_id: str, app_id: str, assets_id: str) -> str:
return f"{AssetPaths._BASE}/{tenant_id}/{app_id}/artifacts/{assets_id}/tool_artifact.json"

View File

@ -5,6 +5,7 @@ from .bash.dify_cli import (
DifyCliLocator,
DifyCliToolConfig,
)
from .bash.session import SandboxBashSession
from .constants import (
APP_ASSETS_PATH,
APP_ASSETS_ZIP_PATH,
@ -17,7 +18,6 @@ from .constants import (
)
from .initializer import AppAssetsInitializer, DifyCliInitializer, SandboxInitializer
from .manager import SandboxManager
from .session import SandboxSession
from .storage import ArchiveSandboxStorage, SandboxStorage
from .utils.debug import sandbox_debug
from .utils.encryption import create_sandbox_config_encrypter, masked_config
@ -40,10 +40,10 @@ __all__ = [
"DifyCliInitializer",
"DifyCliLocator",
"DifyCliToolConfig",
"SandboxBashSession",
"SandboxBuilder",
"SandboxInitializer",
"SandboxManager",
"SandboxSession",
"SandboxStorage",
"SandboxType",
"VMConfig",

View File

@ -0,0 +1 @@
# refactor the package import paths

View File

@ -1,6 +1,7 @@
from collections.abc import Generator
from typing import Any
from core.sandbox.constants import DIFY_CLI_CONFIG_FILENAME
from core.tools.__base.tool import Tool
from core.tools.__base.tool_runtime import ToolRuntime
from core.tools.entities.common_entities import I18nObject
@ -21,7 +22,7 @@ COMMAND_TIMEOUT_SECONDS = 60
class SandboxBashTool(Tool):
def __init__(self, sandbox: VirtualEnvironment, tenant_id: str, tools_path: str) -> None:
def __init__(self, sandbox: VirtualEnvironment, tenant_id: str, tools_path: str | None = None) -> None:
self._sandbox = sandbox
self._tools_path = tools_path
@ -72,10 +73,20 @@ class SandboxBashTool(Tool):
try:
with with_connection(self._sandbox) as conn:
cmd_list = ["bash", "-c", command]
env_vars = {"PATH": f"{self._tools_path}:/usr/local/bin:/usr/bin:/bin"}
sandbox_debug("bash_tool", "cmd_list", cmd_list)
future = submit_command(self._sandbox, conn, cmd_list, environments=env_vars)
environments: dict[str, str] | None = None
if self._tools_path:
environments = {
"PATH": f"{self._tools_path}:/usr/local/bin:/usr/bin:/bin",
"DIFY_CLI_CONFIG": self._tools_path + f"/{DIFY_CLI_CONFIG_FILENAME}",
}
future = submit_command(
self._sandbox,
conn,
cmd_list,
environments=environments,
)
timeout = COMMAND_TIMEOUT_SECONDS if COMMAND_TIMEOUT_SECONDS > 0 else None
result = future.result(timeout=timeout)

View File

@ -5,9 +5,13 @@ from typing import TYPE_CHECKING, Any
from pydantic import BaseModel, Field
from core.app.entities.app_invoke_entities import InvokeFrom
from core.app_assets.entities import ToolReference
from core.model_runtime.utils.encoders import jsonable_encoder
from core.session.cli_api import CliApiSession
from core.skill.entities import ToolArtifact
from core.tools.entities.tool_entities import ToolParameter, ToolProviderType
from core.tools.tool_manager import ToolManager
from core.virtual_environment.__base.entities import Arch, OperatingSystem
from ..constants import DIFY_CLI_PATH_PATTERN
@ -100,16 +104,51 @@ class DifyCliToolConfig(BaseModel):
return transformed_parameter
class DifyCliToolReference(BaseModel):
id: str
tool_name: str
tool_provider: str
credential_id: str | None = None
default_value: dict[str, Any] | None = None
@classmethod
def create_from_tool_reference(cls, reference: ToolReference) -> DifyCliToolReference:
return cls(
id=reference.uuid,
tool_name=reference.tool_name,
tool_provider=reference.provider,
credential_id=reference.credential_id,
default_value=reference.configuration.default_values() if reference.configuration else None,
)
class DifyCliConfig(BaseModel):
env: DifyCliEnvConfig
tool_references: list[DifyCliToolReference]
tools: list[DifyCliToolConfig]
@classmethod
def create(cls, session: CliApiSession, tools: list[Tool]) -> DifyCliConfig:
def create(
cls,
session: CliApiSession,
tenant_id: str,
artifact: ToolArtifact,
) -> DifyCliConfig:
from configs import dify_config
cli_api_url = dify_config.CLI_API_URL
tools: list[Tool] = []
for dependency in artifact.dependencies:
tool = ToolManager.get_tool_runtime(
tenant_id=tenant_id,
provider_type=dependency.type,
provider_id=dependency.provider,
tool_name=dependency.tool_name,
invoke_from=InvokeFrom.AGENT,
)
tools.append(tool)
return cls(
env=DifyCliEnvConfig(
files_url=dify_config.FILES_URL,
@ -117,6 +156,7 @@ class DifyCliConfig(BaseModel):
cli_api_session_id=session.id,
cli_api_secret=session.secret,
),
tool_references=[DifyCliToolReference.create_from_tool_reference(ref) for ref in artifact.references],
tools=[DifyCliToolConfig.create_from_tool(tool) for tool in tools],
)
@ -131,4 +171,5 @@ __all__ = [
"DifyCliEnvConfig",
"DifyCliLocator",
"DifyCliToolConfig",
"DifyCliToolReference",
]

View File

@ -0,0 +1,158 @@
from __future__ import annotations
import json
import logging
from io import BytesIO
from types import TracebackType
from core.session.cli_api import CliApiSessionManager
from core.skill.entities.tool_artifact import ToolArtifact
from core.skill.skill_manager import SkillManager
from core.virtual_environment.__base.helpers import execute, with_connection
from core.virtual_environment.__base.virtual_environment import VirtualEnvironment
from ..bash.dify_cli import DifyCliConfig
from ..constants import (
DIFY_CLI_CONFIG_FILENAME,
DIFY_CLI_GLOBAL_TOOLS_PATH,
DIFY_CLI_PATH,
DIFY_CLI_TOOLS_ROOT,
)
from ..manager import SandboxManager
from .bash_tool import SandboxBashTool
logger = logging.getLogger(__name__)
class SandboxBashSession:
def __init__(
self,
*,
workflow_execution_id: str,
tenant_id: str,
user_id: str,
node_id: str,
app_id: str,
assets_id: str,
allow_tools: list[tuple[str, str]] | None,
) -> None:
self._workflow_execution_id = workflow_execution_id
self._tenant_id = tenant_id
self._user_id = user_id
self._node_id = node_id
self._app_id = app_id
# FIXME(Mairuis): should read from workflow run context...
self._assets_id = assets_id
self._allow_tools = allow_tools
self._sandbox = None
self._bash_tool = None
self._session_id = None
def __enter__(self) -> SandboxBashSession:
sandbox = SandboxManager.get(self._workflow_execution_id)
if sandbox is None:
raise RuntimeError(f"Sandbox not found for workflow_execution_id={self._workflow_execution_id}")
self._sandbox = sandbox
if self._allow_tools is not None:
if self._node_id is None:
raise ValueError("node_id is required when allow_tools is specified")
tools_path = self._setup_node_tools_directory(sandbox, self._node_id, self._allow_tools)
else:
tools_path = DIFY_CLI_GLOBAL_TOOLS_PATH
self._bash_tool = SandboxBashTool(sandbox=sandbox, tenant_id=self._tenant_id, tools_path=tools_path)
return self
def _setup_node_tools_directory(
self,
sandbox: VirtualEnvironment,
node_id: str,
allow_tools: list[tuple[str, str]],
) -> str | None:
with with_connection(sandbox) as conn:
artifact: ToolArtifact | None = SkillManager.load_tool_artifact(
self._tenant_id,
self._app_id,
self._assets_id,
)
if artifact is None or artifact.is_empty():
logger.info("No tools found in artifact for assets_id=%s", self._assets_id)
return None
artifact = artifact.filter(allow_tools)
if artifact.is_empty():
logger.info("No tools found in artifact for assets_id=%s", self._assets_id)
return None
self._cli_api_session = CliApiSessionManager().create(tenant_id=self._tenant_id, user_id=self._user_id)
execute(
sandbox,
["mkdir", "-p", DIFY_CLI_GLOBAL_TOOLS_PATH],
connection=conn,
error_message="Failed to create Dify CLI global tools directory",
)
execute(
sandbox,
["mkdir", "-p", f"{DIFY_CLI_TOOLS_ROOT}/{node_id}"],
connection=conn,
error_message="Failed to create Dify CLI node tools directory",
)
config_json = json.dumps(
DifyCliConfig.create(
session=self._cli_api_session, tenant_id=self._tenant_id, artifact=artifact
).model_dump(mode="json"),
ensure_ascii=False,
)
sandbox.upload_file(
f"{DIFY_CLI_TOOLS_ROOT}/{node_id}/{DIFY_CLI_CONFIG_FILENAME}", BytesIO(config_json.encode("utf-8"))
)
execute(
sandbox,
[DIFY_CLI_PATH, "init"],
connection=conn,
cwd=f"{DIFY_CLI_TOOLS_ROOT}/{node_id}",
error_message="Failed to initialize Dify CLI",
)
logger.info(
"Node %s tools initialized, path=%s, tool_count=%d",
node_id,
f"{DIFY_CLI_TOOLS_ROOT}/{node_id}",
len(artifact.references),
)
return f"{DIFY_CLI_TOOLS_ROOT}/{node_id}"
def __exit__(
self,
exc_type: type[BaseException] | None,
exc: BaseException | None,
tb: TracebackType | None,
) -> bool:
try:
self.cleanup()
except Exception:
logger.exception("Failed to cleanup SandboxSession")
return False
@property
def bash_tool(self) -> SandboxBashTool:
if self._bash_tool is None:
raise RuntimeError("SandboxSession is not initialized")
return self._bash_tool
def cleanup(self) -> None:
if self._session_id is None:
return
CliApiSessionManager().delete(self._session_id)
logger.debug("Cleaned up SandboxSession session_id=%s", self._session_id)
self._session_id = None

View File

@ -12,5 +12,5 @@ DIFY_CLI_TOOLS_ROOT: Final[str] = "/tmp/.dify/tools"
DIFY_CLI_GLOBAL_TOOLS_PATH: Final[str] = "/tmp/.dify/tools/global"
# App Assets (relative path - stays in sandbox workdir)
APP_ASSETS_PATH: Final[str] = "assets"
APP_ASSETS_PATH: Final[str] = "skills"
APP_ASSETS_ZIP_PATH: Final[str] = "/tmp/assets.zip"

View File

@ -5,14 +5,8 @@ import logging
from io import BytesIO
from pathlib import Path
from core.app.entities.app_invoke_entities import InvokeFrom
from core.app_assets.entities import ToolType
from core.session.cli_api import CliApiSessionManager
from core.skill.entities import ToolManifest
from core.skill.skill_manager import SkillManager
from core.tools.__base.tool import Tool
from core.tools.entities.tool_entities import ToolProviderType
from core.tools.tool_manager import ToolManager
from core.virtual_environment.__base.helpers import execute, with_connection
from core.virtual_environment.__base.virtual_environment import VirtualEnvironment
@ -32,12 +26,14 @@ class DifyCliInitializer(SandboxInitializer):
def __init__(
self,
tenant_id: str,
user_id: str,
app_id: str,
assets_id: str,
cli_root: str | Path | None = None,
) -> None:
self._tenant_id = tenant_id
self._app_id = app_id
self._user_id = user_id
self._assets_id = assets_id
self._locator = DifyCliLocator(root=cli_root)
@ -52,7 +48,6 @@ class DifyCliInitializer(SandboxInitializer):
env,
["mkdir", "-p", f"{DIFY_CLI_ROOT}/bin"],
connection=conn,
timeout=10,
error_message="Failed to create dify CLI directory",
)
@ -62,36 +57,33 @@ class DifyCliInitializer(SandboxInitializer):
env,
["chmod", "+x", DIFY_CLI_PATH],
connection=conn,
timeout=10,
error_message="Failed to mark dify CLI as executable",
)
logger.info("Dify CLI uploaded to sandbox, path=%s", DIFY_CLI_PATH)
manifest = SkillManager.load_tool_manifest(
artifact = SkillManager.load_tool_artifact(
self._tenant_id,
self._app_id,
self._assets_id,
)
if manifest is None or not manifest.tools:
logger.info("No tools found in manifest for assets_id=%s", self._assets_id)
if artifact is None or not artifact.references:
logger.info("No tools found in artifact for assets_id=%s", self._assets_id)
return
self._tools = self._resolve_tools_from_manifest(manifest)
self._cli_api_session = CliApiSessionManager().create(tenant_id=self._tenant_id, user_id="system")
# FIXME(Mairuis): store it in workflow context
self._cli_api_session = CliApiSessionManager().create(tenant_id=self._tenant_id, user_id=self._user_id)
execute(
env,
["mkdir", "-p", DIFY_CLI_GLOBAL_TOOLS_PATH],
connection=conn,
timeout=10,
error_message="Failed to create global tools directory",
error_message="Failed to create Dify CLI global tools directory",
)
config_json = json.dumps(
DifyCliConfig.create(self._cli_api_session, self._tools).model_dump(mode="json"), ensure_ascii=False
)
config = DifyCliConfig.create(self._cli_api_session, self._tenant_id, artifact)
config_json = json.dumps(config.model_dump(mode="json"), ensure_ascii=False)
env.upload_file(
f"{DIFY_CLI_GLOBAL_TOOLS_PATH}/{DIFY_CLI_CONFIG_FILENAME}", BytesIO(config_json.encode("utf-8"))
)
@ -100,7 +92,6 @@ class DifyCliInitializer(SandboxInitializer):
env,
[DIFY_CLI_PATH, "init"],
connection=conn,
timeout=30,
cwd=DIFY_CLI_GLOBAL_TOOLS_PATH,
error_message="Failed to initialize Dify CLI",
)
@ -110,38 +101,3 @@ class DifyCliInitializer(SandboxInitializer):
DIFY_CLI_GLOBAL_TOOLS_PATH,
len(self._tools),
)
def _resolve_tools_from_manifest(self, manifest: ToolManifest) -> list[Tool]:
tools: list[Tool] = []
for entry in manifest.tools.values():
if entry.provider is None or entry.tool_name is None:
logger.warning("Skipping tool entry with missing provider or tool_name: %s", entry.uuid)
continue
try:
provider_type = self._convert_tool_type(entry.type)
tool = ToolManager.get_tool_runtime(
tenant_id=self._tenant_id,
provider_type=provider_type,
provider_id=entry.provider,
tool_name=entry.tool_name,
invoke_from=InvokeFrom.AGENT,
credential_id=entry.credential_id,
)
tools.append(tool)
except Exception as e:
logger.warning("Failed to resolve tool %s/%s: %s", entry.provider, entry.tool_name, e)
continue
return tools
@staticmethod
def _convert_tool_type(tool_type: ToolType) -> ToolProviderType:
match tool_type:
case ToolType.BUILTIN:
return ToolProviderType.BUILT_IN
case ToolType.MCP:
return ToolProviderType.MCP
case _:
raise ValueError(f"Unsupported tool type: {tool_type}")

View File

@ -1,112 +0,0 @@
from __future__ import annotations
import logging
from types import TracebackType
from typing import TYPE_CHECKING
from core.session.cli_api import CliApiSessionManager
from core.virtual_environment.__base.virtual_environment import VirtualEnvironment
from .constants import (
DIFY_CLI_GLOBAL_TOOLS_PATH,
)
from .manager import SandboxManager
if TYPE_CHECKING:
from .bash.bash_tool import SandboxBashTool
logger = logging.getLogger(__name__)
class SandboxSession:
_workflow_execution_id: str
_tenant_id: str
_user_id: str
_node_id: str | None
_allow_tools: list[str] | None
_sandbox: VirtualEnvironment | None
_bash_tool: SandboxBashTool | None
_session_id: str | None
_tools_path: str
def __init__(
self,
*,
workflow_execution_id: str,
tenant_id: str,
user_id: str,
node_id: str | None = None,
allow_tools: list[str] | None = None,
) -> None:
self._workflow_execution_id = workflow_execution_id
self._tenant_id = tenant_id
self._user_id = user_id
self._node_id = node_id
self._allow_tools = allow_tools
self._sandbox = None
self._bash_tool = None
self._session_id = None
self._tools_path = DIFY_CLI_GLOBAL_TOOLS_PATH
def __enter__(self) -> SandboxSession:
sandbox = SandboxManager.get(self._workflow_execution_id)
if sandbox is None:
raise RuntimeError(f"Sandbox not found for workflow_execution_id={self._workflow_execution_id}")
self._sandbox = sandbox
if self._allow_tools is not None:
# TODO: Implement node tools directory setup
if self._node_id is None:
raise ValueError("node_id is required when allow_tools is specified")
# self._tools_path = self._setup_node_tools_directory(sandbox, self._node_id, self._allow_tools)
else:
self._tools_path = DIFY_CLI_GLOBAL_TOOLS_PATH
from .bash.bash_tool import SandboxBashTool
self._bash_tool = SandboxBashTool(sandbox=sandbox, tenant_id=self._tenant_id, tools_path=self._tools_path)
return self
def _setup_node_tools_directory(
self,
sandbox: VirtualEnvironment,
node_id: str,
allow_tools: list[str],
) -> None:
pass
@staticmethod
def _get_tool_name_from_config(tool_config: dict) -> str:
identity = tool_config.get("identity", {})
provider = identity.get("provider", "")
name = identity.get("name", "")
return f"{provider}__{name}"
def __exit__(
self,
exc_type: type[BaseException] | None,
exc: BaseException | None,
tb: TracebackType | None,
) -> bool:
try:
self.cleanup()
except Exception:
logger.exception("Failed to cleanup SandboxSession")
return False
@property
def bash_tool(self) -> SandboxBashTool:
if self._bash_tool is None:
raise RuntimeError("SandboxSession is not initialized")
return self._bash_tool
def cleanup(self) -> None:
if self._session_id is None:
return
CliApiSessionManager().delete(self._session_id)
logger.debug("Cleaned up SandboxSession session_id=%s", self._session_id)
self._session_id = None

View File

@ -1,8 +1,11 @@
from .entities import ToolManifest, ToolManifestEntry
from core.app_assets.entities import ToolReference
from .entities import ToolArtifact, ToolDependency
from .skill_manager import SkillManager
__all__ = [
"SkillManager",
"ToolManifest",
"ToolManifestEntry",
"ToolArtifact",
"ToolDependency",
"ToolReference",
]

View File

@ -1,6 +1,6 @@
from .tool_manifest import ToolManifest, ToolManifestEntry
from .tool_artifact import ToolArtifact, ToolDependency
__all__ = [
"ToolManifest",
"ToolManifestEntry",
"ToolArtifact",
"ToolDependency",
]

View File

@ -0,0 +1,45 @@
from pydantic import BaseModel, ConfigDict, Field
from core.app_assets.entities import ToolReference
from core.tools.entities.tool_entities import ToolProviderType
class ToolDependency(BaseModel):
model_config = ConfigDict(extra="forbid")
type: ToolProviderType
provider: str
tool_name: str
class ToolArtifact(BaseModel):
model_config = ConfigDict(extra="forbid")
dependencies: list[ToolDependency] = Field(default_factory=list, description="List of tool dependencies")
references: list[ToolReference] = Field(default_factory=list, description="List of tool references")
"""
Filter the tool artifact to only include the given tools
:param tools: Tuple of (provider, tool_name)
:return: Filtered tool artifact
"""
def is_empty(self) -> bool:
return not self.dependencies and not self.references
def filter(self, tools: list[tuple[str, str]]) -> "ToolArtifact":
tool_names = {f"{provider}.{tool_name}" for provider, tool_name in tools}
return ToolArtifact(
dependencies=[
dependency
for dependency in self.dependencies
if f"{dependency.provider}.{dependency.tool_name}" in tool_names
],
references=[
reference
for reference in self.references
if f"{reference.provider}.{reference.tool_name}" in tool_names
],
)

View File

@ -1,23 +0,0 @@
from typing import Any
from pydantic import BaseModel, ConfigDict, Field
from core.app_assets.entities import ToolType
class ToolManifestEntry(BaseModel):
model_config = ConfigDict(extra="forbid")
uuid: str
type: ToolType
provider: str | None = None
tool_name: str | None = None
credential_id: str | None = None
configuration: dict[str, Any] | None = None
class ToolManifest(BaseModel):
model_config = ConfigDict(extra="forbid")
tools: dict[str, ToolManifestEntry] = Field(default_factory=dict)
references: list[str] = Field(default_factory=list)

View File

@ -1,64 +1,57 @@
from core.app_assets.entities import SkillAsset
from core.app_assets.entities.skill import ToolReference
from core.app_assets.paths import AssetPaths
from core.skill.entities.tool_artifact import ToolDependency
from extensions.ext_storage import storage
from .entities import ToolManifest, ToolManifestEntry
from .entities import ToolArtifact
class SkillManager:
@staticmethod
def generate_tool_manifest(assets: list[SkillAsset]) -> ToolManifest:
tools: dict[str, ToolManifestEntry] = {}
references: list[str] = []
def generate_tool_artifact(assets: list[SkillAsset]) -> ToolArtifact:
# provider + tool_name -> ToolDependency
dependencies: dict[str, ToolDependency] = {}
references: list[ToolReference] = []
for asset in assets:
manifest = SkillManager._collect_asset_manifest(asset)
tools.update(manifest.tools)
references.extend(manifest.references)
for id, tool in asset.metadata.tools.items():
dependencies[f"{tool.provider}.{tool.tool_name}"] = ToolDependency(
type=tool.type,
provider=tool.provider,
tool_name=tool.tool_name,
)
return ToolManifest(tools=tools, references=references)
references.append(
ToolReference(
uuid=id,
type=tool.type,
provider=tool.provider,
tool_name=tool.tool_name,
)
)
return ToolArtifact(dependencies=list(dependencies.values()), references=references)
@staticmethod
def save_tool_manifest(
def save_tool_artifact(
tenant_id: str,
app_id: str,
assets_id: str,
manifest: ToolManifest,
artifact: ToolArtifact,
) -> None:
if not manifest.tools:
return
key = AssetPaths.build_tool_manifest(tenant_id, app_id, assets_id)
storage.save(key, manifest.model_dump_json(indent=2).encode("utf-8"))
key = AssetPaths.build_tool_artifact(tenant_id, app_id, assets_id)
storage.save(key, artifact.model_dump_json(indent=2).encode("utf-8"))
@staticmethod
def load_tool_manifest(
def load_tool_artifact(
tenant_id: str,
app_id: str,
assets_id: str,
) -> ToolManifest | None:
key = AssetPaths.build_tool_manifest(tenant_id, app_id, assets_id)
) -> ToolArtifact | None:
key = AssetPaths.build_tool_artifact(tenant_id, app_id, assets_id)
try:
data = storage.load_once(key)
return ToolManifest.model_validate_json(data)
return ToolArtifact.model_validate_json(data)
except Exception:
return None
@staticmethod
def _collect_asset_manifest(asset: SkillAsset) -> ToolManifest:
tools: dict[str, ToolManifestEntry] = {}
for uuid, tool_def in asset.metadata.tools.items():
ref = next((r for r in asset.tool_references if r.uuid == uuid), None)
tools[uuid] = ToolManifestEntry(
uuid=uuid,
type=tool_def.type,
provider=ref.provider if ref else None,
tool_name=ref.tool_name if ref else None,
credential_id=tool_def.credential_id,
configuration=tool_def.configuration.model_dump() if tool_def.configuration.fields else None,
)
references = [ref.raw for ref in asset.tool_references]
return ToolManifest(tools=tools, references=references)

View File

@ -50,7 +50,7 @@ from core.model_runtime.utils.encoders import jsonable_encoder
from core.prompt.entities.advanced_prompt_entities import CompletionModelPromptTemplate, MemoryConfig
from core.prompt.utils.prompt_message_util import PromptMessageUtil
from core.rag.entities.citation_metadata import RetrievalSourceMetadata
from core.sandbox import SandboxManager, SandboxSession
from core.sandbox import SandboxBashSession, SandboxManager
from core.tools.__base.tool import Tool
from core.tools.signature import sign_upload_file
from core.tools.tool_manager import ToolManager
@ -1580,16 +1580,15 @@ class LLMNode(Node[LLMNodeData]):
result = yield from self._process_tool_outputs(outputs)
return result
def _get_allow_tools_list(self) -> list[str] | None:
def _get_allow_tools_list(self) -> list[tuple[str, str]] | None:
if not self._node_data.tools:
return None
allow_tools = []
for tool in self._node_data.tools:
if tool.enabled:
tool_name = f"{tool.tool_name}"
allow_tools.append(tool_name)
if not tool.enabled:
continue
allow_tools.append((tool.provider_name, tool.tool_name))
return allow_tools or None
def _invoke_llm_with_sandbox(
@ -1607,11 +1606,14 @@ class LLMNode(Node[LLMNodeData]):
result: LLMGenerationData | None = None
with SandboxSession(
with SandboxBashSession(
workflow_execution_id=workflow_execution_id,
tenant_id=self.tenant_id,
user_id=self.user_id,
node_id=self.id,
app_id=self.app_id,
# FIXME(Mairuis): should read from workflow run context...
assets_id=getattr(self, "assets_id", ""),
allow_tools=allow_tools,
) as sandbox_session:
prompt_files = self._extract_prompt_files(variable_pool)

View File

@ -312,15 +312,15 @@ class AppAssetService:
)
assets = parser.parse()
manifest = SkillManager.generate_tool_manifest(
artifact = SkillManager.generate_tool_artifact(
assets=[asset for asset in assets if isinstance(asset, SkillAsset)]
)
SkillManager.save_tool_manifest(
SkillManager.save_tool_artifact(
tenant_id,
app_id,
publish_id,
manifest,
artifact,
)
# TODO: use VM zip packager and make this process async
@ -341,19 +341,19 @@ class AppAssetService:
parser = AssetParser(tree, tenant_id, app_id)
parser.register(
"md",
SkillAssetParser(tenant_id, app_id, assets.id),
SkillAssetParser(tenant_id, app_id, assets.id, tree),
)
parsed_assets = parser.parse()
manifest = SkillManager.generate_tool_manifest(
artifact = SkillManager.generate_tool_artifact(
assets=[asset for asset in parsed_assets if isinstance(asset, SkillAsset)]
)
SkillManager.save_tool_manifest(
SkillManager.save_tool_artifact(
tenant_id,
app_id,
assets.id,
manifest,
artifact,
)
packager = ZipPackager(storage)

View File

@ -702,11 +702,19 @@ class WorkflowService:
single_step_execution_id: str | None = None
if draft_workflow.get_feature(WorkflowFeatures.SANDBOX).enabled:
from core.sandbox import AppAssetsInitializer, DifyCliInitializer
from services.app_asset_service import AppAssetService
assets = AppAssetService.get_assets(draft_workflow.tenant_id, app_model.id, is_draft=True)
if not assets:
raise ValueError(f"No assets found for tid={draft_workflow.tenant_id}, app_id={app_model.id}")
# FIXME(Mairuis): single step execution
AppAssetService.build_assets(draft_workflow.tenant_id, app_model.id, assets)
sandbox = (
SandboxProviderService.create_sandbox_builder(draft_workflow.tenant_id)
.initializer(DifyCliInitializer())
.initializer(AppAssetsInitializer(draft_workflow.tenant_id, app_model.id))
.initializer(DifyCliInitializer(draft_workflow.tenant_id, account.id, app_model.id, assets.id))
.initializer(AppAssetsInitializer(draft_workflow.tenant_id, app_model.id, assets.id))
.build()
)
single_step_execution_id = f"single-step-{uuid.uuid4()}"

View File

@ -0,0 +1,112 @@
import pytest
from core.app.entities.app_asset_entities import AppAssetFileTree, AppAssetNode
class TestAppAssetFileTreeRelativePath:
@pytest.fixture
def tree(self) -> AppAssetFileTree:
tree = AppAssetFileTree()
tree.add(AppAssetNode.create_folder("root", "root"))
tree.add(AppAssetNode.create_folder("docs", "docs", "root"))
tree.add(AppAssetNode.create_folder("sub", "sub", "docs"))
tree.add(AppAssetNode.create_folder("deep", "deep", "sub"))
tree.add(AppAssetNode.create_file("a_md", "a.md", "docs"))
tree.add(AppAssetNode.create_file("b_md", "b.md", "docs"))
tree.add(AppAssetNode.create_file("c_md", "c.md", "sub"))
tree.add(AppAssetNode.create_file("d_md", "d.md", "deep"))
tree.add(AppAssetNode.create_file("root_md", "root.md", "root"))
return tree
def test_same_directory_siblings(self, tree: AppAssetFileTree):
a = tree.get("a_md")
b = tree.get("b_md")
assert a
assert b
assert tree.relative_path(a, b) == "./b.md"
def test_same_file(self, tree: AppAssetFileTree):
a = tree.get("a_md")
assert a
assert tree.relative_path(a, a) == "./a.md"
def test_child_directory(self, tree: AppAssetFileTree):
a = tree.get("a_md")
c = tree.get("c_md")
assert a
assert c
assert tree.relative_path(a, c) == "./sub/c.md"
def test_parent_directory(self, tree: AppAssetFileTree):
c = tree.get("c_md")
a = tree.get("a_md")
assert c
assert a
assert tree.relative_path(c, a) == "../a.md"
def test_two_levels_up(self, tree: AppAssetFileTree):
d = tree.get("d_md")
a = tree.get("a_md")
assert d
assert a
assert tree.relative_path(d, a) == "../../a.md"
def test_cousin_same_level(self, tree: AppAssetFileTree):
c = tree.get("c_md")
b = tree.get("b_md")
assert c
assert b
assert tree.relative_path(c, b) == "../b.md"
def test_deep_to_shallow(self, tree: AppAssetFileTree):
d = tree.get("d_md")
root_md = tree.get("root_md")
assert d
assert root_md
assert tree.relative_path(d, root_md) == "../../../root.md"
def test_shallow_to_deep(self, tree: AppAssetFileTree):
root_md = tree.get("root_md")
d = tree.get("d_md")
assert root_md
assert d
assert tree.relative_path(root_md, d) == "./docs/sub/deep/d.md"
def test_reference_to_folder(self, tree: AppAssetFileTree):
a = tree.get("a_md")
sub = tree.get("sub")
assert a
assert sub
assert tree.relative_path(a, sub) == "./sub"
class TestAppAssetFileTreeRelativePathRootLevel:
@pytest.fixture
def flat_tree(self) -> AppAssetFileTree:
tree = AppAssetFileTree()
tree.add(AppAssetNode.create_file("readme", "README.md"))
tree.add(AppAssetNode.create_file("license", "LICENSE"))
tree.add(AppAssetNode.create_folder("src", "src"))
tree.add(AppAssetNode.create_file("main", "main.py", "src"))
return tree
def test_root_level_siblings(self, flat_tree: AppAssetFileTree):
readme = flat_tree.get("readme")
license_file = flat_tree.get("license")
assert readme
assert license_file
assert flat_tree.relative_path(readme, license_file) == "./LICENSE"
def test_root_to_nested(self, flat_tree: AppAssetFileTree):
readme = flat_tree.get("readme")
main = flat_tree.get("main")
assert readme
assert main
assert flat_tree.relative_path(readme, main) == "./src/main.py"
def test_nested_to_root(self, flat_tree: AppAssetFileTree):
main = flat_tree.get("main")
readme = flat_tree.get("readme")
assert main
assert readme
assert flat_tree.relative_path(main, readme) == "../README.md"