chore: fix ruff lint check errors

This commit is contained in:
stream
2025-07-08 15:00:43 +08:00
committed by Stream
parent 689490142d
commit a3e1a15ef6
3 changed files with 4 additions and 4 deletions

View File

@ -27,7 +27,7 @@ from core.llm_generator.llm_generator import LLMGenerator
from extensions.ext_database import db from extensions.ext_database import db
from factories import file_factory, variable_factory from factories import file_factory, variable_factory
from fields.workflow_fields import workflow_fields, workflow_pagination_fields from fields.workflow_fields import workflow_fields, workflow_pagination_fields
from fields.workflow_run_fields import workflow_run_node_execution_fields, workflow_node_ai_modify_fields from fields.workflow_run_fields import workflow_node_ai_modify_fields, workflow_run_node_execution_fields
from libs import helper from libs import helper
from libs.helper import TimestampField, uuid_value from libs.helper import TimestampField, uuid_value
from libs.login import current_user, login_required from libs.login import current_user, login_required

View File

@ -4,7 +4,6 @@ import re
from typing import Optional, cast from typing import Optional, cast
import json_repair import json_repair
from pydantic import BaseModel
from core.llm_generator.output_parser.rule_config_generator import RuleConfigGeneratorOutputParser from core.llm_generator.output_parser.rule_config_generator import RuleConfigGeneratorOutputParser
from core.llm_generator.output_parser.suggested_questions_after_answer import SuggestedQuestionsAfterAnswerOutputParser from core.llm_generator.output_parser.suggested_questions_after_answer import SuggestedQuestionsAfterAnswerOutputParser
@ -12,9 +11,10 @@ from core.llm_generator.prompts import (
CONVERSATION_TITLE_PROMPT, CONVERSATION_TITLE_PROMPT,
GENERATOR_QA_PROMPT, GENERATOR_QA_PROMPT,
JAVASCRIPT_CODE_GENERATOR_PROMPT_TEMPLATE, JAVASCRIPT_CODE_GENERATOR_PROMPT_TEMPLATE,
PROMPT_OPTIMIZATION_METAPROMPT_SYSTEM,
PYTHON_CODE_GENERATOR_PROMPT_TEMPLATE, PYTHON_CODE_GENERATOR_PROMPT_TEMPLATE,
SYSTEM_STRUCTURED_OUTPUT_GENERATE, SYSTEM_STRUCTURED_OUTPUT_GENERATE,
WORKFLOW_RULE_CONFIG_PROMPT_GENERATE_TEMPLATE, PROMPT_OPTIMIZATION_METAPROMPT_SYSTEM, WORKFLOW_RULE_CONFIG_PROMPT_GENERATE_TEMPLATE,
) )
from core.model_manager import ModelManager from core.model_manager import ModelManager
from core.model_runtime.entities.llm_entities import LLMResult from core.model_runtime.entities.llm_entities import LLMResult

View File

@ -361,4 +361,4 @@ Both your input and output should be in JSON format.
! Above is the schema for output content ! ! Above is the schema for output content !
Your output must strictly follow the schema format, do not output any content outside of the JSON body. Your output must strictly follow the schema format, do not output any content outside of the JSON body.
""" """ # noqa: E501