Merge branch 'refs/heads/main' into feat/workflow-parallel-support

# Conflicts:
#	api/core/workflow/nodes/llm/llm_node.py
#	api/core/workflow/nodes/question_classifier/question_classifier_node.py
This commit is contained in:
takatost
2024-08-23 00:32:28 +08:00
48 changed files with 381 additions and 543 deletions

View File

@ -67,7 +67,6 @@ class CodeNode(BaseNode):
language=code_language,
code=code,
inputs=variables,
dependencies=node_data.dependencies
)
# Transform result

View File

@ -3,7 +3,6 @@ from typing import Literal, Optional
from pydantic import BaseModel
from core.helper.code_executor.code_executor import CodeLanguage
from core.helper.code_executor.entities import CodeDependency
from core.workflow.entities.base_node_data_entities import BaseNodeData
from core.workflow.entities.variable_entities import VariableSelector
@ -16,8 +15,12 @@ class CodeNodeData(BaseNodeData):
type: Literal['string', 'number', 'object', 'array[string]', 'array[number]', 'array[object]']
children: Optional[dict[str, 'Output']] = None
class Dependency(BaseModel):
name: str
version: str
variables: list[VariableSelector]
code_language: Literal[CodeLanguage.PYTHON3, CodeLanguage.JAVASCRIPT]
code: str
outputs: dict[str, Output]
dependencies: Optional[list[CodeDependency]] = None
dependencies: Optional[list[Dependency]] = None

View File

@ -138,12 +138,14 @@ class LLMNode(BaseNode):
result_text = ''
usage = LLMUsage.empty_usage()
finish_reason = None
for event in generator:
if isinstance(event, RunStreamChunkEvent):
yield event
elif isinstance(event, ModelInvokeCompleted):
result_text = event.text
usage = event.usage
finish_reason = event.finish_reason
break
except Exception as e:
yield RunCompletedEvent(
@ -158,7 +160,8 @@ class LLMNode(BaseNode):
outputs = {
'text': result_text,
'usage': jsonable_encoder(usage)
'usage': jsonable_encoder(usage),
'finish_reason': finish_reason
}
yield RunCompletedEvent(
@ -227,6 +230,7 @@ class LLMNode(BaseNode):
prompt_messages: list[PromptMessage] = []
full_text = ''
usage = None
finish_reason = None
for result in invoke_result:
text = result.delta.message.content
full_text += text
@ -245,12 +249,16 @@ class LLMNode(BaseNode):
if not usage and result.delta.usage:
usage = result.delta.usage
if not finish_reason and result.delta.finish_reason:
finish_reason = result.delta.finish_reason
if not usage:
usage = LLMUsage.empty_usage()
yield ModelInvokeCompleted(
text=full_text,
usage=usage
usage=usage,
finish_reason=finish_reason
)
def _transform_chat_messages(self,

View File

@ -74,10 +74,12 @@ class QuestionClassifierNode(LLMNode):
result_text = ''
usage = LLMUsage.empty_usage()
finished_reason = None
for event in generator:
if isinstance(event, ModelInvokeCompleted):
result_text = event.text
usage = event.usage
finished_reason = event.finished_reason
break
category_name = node_data.classes[0].name
@ -104,6 +106,7 @@ class QuestionClassifierNode(LLMNode):
prompt_messages=prompt_messages
),
'usage': jsonable_encoder(usage),
'finish_reason': finish_reason
}
outputs = {
'class_name': category_name