Merge branch 'main' into feat/rag-2

This commit is contained in:
twwu
2025-08-25 15:30:18 +08:00
301 changed files with 6189 additions and 2763 deletions

View File

@ -13,8 +13,9 @@ from core.agent.strategy.plugin import PluginAgentStrategy
from core.file import File, FileTransferMethod
from core.memory.token_buffer_memory import TokenBufferMemory
from core.model_manager import ModelInstance, ModelManager
from core.model_runtime.entities.llm_entities import LLMUsage
from core.model_runtime.entities.llm_entities import LLMUsage, LLMUsageMetadata
from core.model_runtime.entities.model_entities import AIModelEntity, ModelType
from core.model_runtime.utils.encoders import jsonable_encoder
from core.plugin.entities.request import InvokeCredentials
from core.plugin.impl.exc import PluginDaemonClientSideError
from core.plugin.impl.plugin import PluginInstaller
@ -558,7 +559,7 @@ class AgentNode(BaseNode):
assert isinstance(message.message, ToolInvokeMessage.JsonMessage)
if node_type == NodeType.AGENT:
msg_metadata: dict[str, Any] = message.message.json_object.pop("execution_metadata", {})
llm_usage = LLMUsage.from_metadata(msg_metadata)
llm_usage = LLMUsage.from_metadata(cast(LLMUsageMetadata, msg_metadata))
agent_execution_metadata = {
WorkflowNodeExecutionMetadataKey(key): value
for key, value in msg_metadata.items()
@ -692,7 +693,13 @@ class AgentNode(BaseNode):
yield RunCompletedEvent(
run_result=NodeRunResult(
status=WorkflowNodeExecutionStatus.SUCCEEDED,
outputs={"text": text, "files": ArrayFileSegment(value=files), "json": json_output, **variables},
outputs={
"text": text,
"usage": jsonable_encoder(llm_usage),
"files": ArrayFileSegment(value=files),
"json": json_output,
**variables,
},
metadata={
**agent_execution_metadata,
WorkflowNodeExecutionMetadataKey.TOOL_INFO: tool_info,

View File

@ -12,6 +12,7 @@ from json_repair import repair_json
from configs import dify_config
from core.file import file_manager
from core.file.enums import FileTransferMethod
from core.helper import ssrf_proxy
from core.variables.segments import ArrayFileSegment, FileSegment
from core.workflow.entities.variable_pool import VariablePool
@ -228,7 +229,9 @@ class Executor:
files: dict[str, list[tuple[str | None, bytes, str]]] = {}
for key, files_in_segment in files_list:
for file in files_in_segment:
if file.related_id is not None:
if file.related_id is not None or (
file.transfer_method == FileTransferMethod.REMOTE_URL and file.remote_url is not None
):
file_tuple = (
file.filename,
file_manager.download(file),

View File

@ -4,7 +4,7 @@ import time
import uuid
from collections.abc import Generator, Mapping, Sequence
from concurrent.futures import Future, wait
from datetime import UTC, datetime
from datetime import datetime
from queue import Empty, Queue
from typing import TYPE_CHECKING, Any, Optional, cast
@ -41,6 +41,7 @@ from core.workflow.nodes.enums import ErrorStrategy, NodeType
from core.workflow.nodes.event import NodeEvent, RunCompletedEvent
from core.workflow.nodes.iteration.entities import ErrorHandleMode, IterationNodeData
from factories.variable_factory import build_segment
from libs.datetime_utils import naive_utc_now
from libs.flask_utils import preserve_flask_contexts
from .exc import (
@ -179,7 +180,7 @@ class IterationNode(BaseNode):
thread_pool_id=self.thread_pool_id,
)
start_at = datetime.now(UTC).replace(tzinfo=None)
start_at = naive_utc_now()
yield IterationRunStartedEvent(
iteration_id=self.id,
@ -428,7 +429,7 @@ class IterationNode(BaseNode):
"""
run single iteration
"""
iter_start_at = datetime.now(UTC).replace(tzinfo=None)
iter_start_at = naive_utc_now()
try:
rst = graph_engine.run()
@ -505,7 +506,7 @@ class IterationNode(BaseNode):
variable_pool.add([self.node_id, "index"], next_index)
if next_index < len(iterator_list_value):
variable_pool.add([self.node_id, "item"], iterator_list_value[next_index])
duration = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds()
duration = (naive_utc_now() - iter_start_at).total_seconds()
iter_run_map[iteration_run_id] = duration
yield IterationRunNextEvent(
iteration_id=self.id,
@ -526,7 +527,7 @@ class IterationNode(BaseNode):
if next_index < len(iterator_list_value):
variable_pool.add([self.node_id, "item"], iterator_list_value[next_index])
duration = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds()
duration = (naive_utc_now() - iter_start_at).total_seconds()
iter_run_map[iteration_run_id] = duration
yield IterationRunNextEvent(
iteration_id=self.id,
@ -602,7 +603,7 @@ class IterationNode(BaseNode):
if next_index < len(iterator_list_value):
variable_pool.add([self.node_id, "item"], iterator_list_value[next_index])
duration = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds()
duration = (naive_utc_now() - iter_start_at).total_seconds()
iter_run_map[iteration_run_id] = duration
yield IterationRunNextEvent(
iteration_id=self.id,

View File

@ -1,5 +1,4 @@
from collections.abc import Sequence
from datetime import UTC, datetime
from typing import Optional, cast
from sqlalchemy import select, update
@ -20,6 +19,7 @@ from core.variables.segments import ArrayAnySegment, ArrayFileSegment, FileSegme
from core.workflow.entities.variable_pool import VariablePool
from core.workflow.enums import SystemVariableKey
from core.workflow.nodes.llm.entities import ModelConfig
from libs.datetime_utils import naive_utc_now
from models import db
from models.model import Conversation
from models.provider import Provider, ProviderType
@ -149,7 +149,7 @@ def deduct_llm_quota(tenant_id: str, model_instance: ModelInstance, usage: LLMUs
)
.values(
quota_used=Provider.quota_used + used_quota,
last_used=datetime.now(tz=UTC).replace(tzinfo=None),
last_used=naive_utc_now(),
)
)
session.execute(stmt)

View File

@ -2,7 +2,7 @@ import json
import logging
import time
from collections.abc import Generator, Mapping, Sequence
from datetime import UTC, datetime
from datetime import datetime
from typing import TYPE_CHECKING, Any, Literal, Optional, cast
from configs import dify_config
@ -36,6 +36,7 @@ from core.workflow.nodes.event import NodeEvent, RunCompletedEvent
from core.workflow.nodes.loop.entities import LoopNodeData
from core.workflow.utils.condition.processor import ConditionProcessor
from factories.variable_factory import TypeMismatchError, build_segment_with_type
from libs.datetime_utils import naive_utc_now
if TYPE_CHECKING:
from core.workflow.entities.variable_pool import VariablePool
@ -143,7 +144,7 @@ class LoopNode(BaseNode):
thread_pool_id=self.thread_pool_id,
)
start_at = datetime.now(UTC).replace(tzinfo=None)
start_at = naive_utc_now()
condition_processor = ConditionProcessor()
# Start Loop event
@ -171,7 +172,7 @@ class LoopNode(BaseNode):
try:
check_break_result = False
for i in range(loop_count):
loop_start_time = datetime.now(UTC).replace(tzinfo=None)
loop_start_time = naive_utc_now()
# run single loop
loop_result = yield from self._run_single_loop(
graph_engine=graph_engine,
@ -185,7 +186,7 @@ class LoopNode(BaseNode):
start_at=start_at,
inputs=inputs,
)
loop_end_time = datetime.now(UTC).replace(tzinfo=None)
loop_end_time = naive_utc_now()
single_loop_variable = {}
for key, selector in loop_variable_selectors.items():