chore: add ast-grep rule to convert Optional[T] to T | None (#25560)

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
-LAN-
2025-09-15 13:06:33 +08:00
committed by GitHub
parent 2e44ebe98d
commit bab4975809
394 changed files with 2555 additions and 2792 deletions

View File

@ -1,5 +1,5 @@
from collections.abc import Mapping, Sequence
from typing import Optional, cast
from typing import cast
from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity
from core.file import file_manager
@ -41,11 +41,11 @@ class AdvancedPromptTransform(PromptTransform):
inputs: Mapping[str, str],
query: str,
files: Sequence[File],
context: Optional[str],
memory_config: Optional[MemoryConfig],
memory: Optional[TokenBufferMemory],
context: str | None,
memory_config: MemoryConfig | None,
memory: TokenBufferMemory | None,
model_config: ModelConfigWithCredentialsEntity,
image_detail_config: Optional[ImagePromptMessageContent.DETAIL] = None,
image_detail_config: ImagePromptMessageContent.DETAIL | None = None,
) -> list[PromptMessage]:
prompt_messages = []
@ -80,13 +80,13 @@ class AdvancedPromptTransform(PromptTransform):
self,
prompt_template: CompletionModelPromptTemplate,
inputs: Mapping[str, str],
query: Optional[str],
query: str | None,
files: Sequence[File],
context: Optional[str],
memory_config: Optional[MemoryConfig],
memory: Optional[TokenBufferMemory],
context: str | None,
memory_config: MemoryConfig | None,
memory: TokenBufferMemory | None,
model_config: ModelConfigWithCredentialsEntity,
image_detail_config: Optional[ImagePromptMessageContent.DETAIL] = None,
image_detail_config: ImagePromptMessageContent.DETAIL | None = None,
) -> list[PromptMessage]:
"""
Get completion model prompt messages.
@ -141,13 +141,13 @@ class AdvancedPromptTransform(PromptTransform):
self,
prompt_template: list[ChatModelMessage],
inputs: Mapping[str, str],
query: Optional[str],
query: str | None,
files: Sequence[File],
context: Optional[str],
memory_config: Optional[MemoryConfig],
memory: Optional[TokenBufferMemory],
context: str | None,
memory_config: MemoryConfig | None,
memory: TokenBufferMemory | None,
model_config: ModelConfigWithCredentialsEntity,
image_detail_config: Optional[ImagePromptMessageContent.DETAIL] = None,
image_detail_config: ImagePromptMessageContent.DETAIL | None = None,
) -> list[PromptMessage]:
"""
Get chat model prompt messages.

View File

@ -1,4 +1,4 @@
from typing import Optional, cast
from typing import cast
from core.app.entities.app_invoke_entities import (
ModelConfigWithCredentialsEntity,
@ -23,7 +23,7 @@ class AgentHistoryPromptTransform(PromptTransform):
model_config: ModelConfigWithCredentialsEntity,
prompt_messages: list[PromptMessage],
history_messages: list[PromptMessage],
memory: Optional[TokenBufferMemory] = None,
memory: TokenBufferMemory | None = None,
):
self.model_config = model_config
self.prompt_messages = prompt_messages

View File

@ -1,4 +1,4 @@
from typing import Literal, Optional
from typing import Literal
from pydantic import BaseModel
@ -12,7 +12,7 @@ class ChatModelMessage(BaseModel):
text: str
role: PromptMessageRole
edition_type: Optional[Literal["basic", "jinja2"]] = None
edition_type: Literal["basic", "jinja2"] | None = None
class CompletionModelPromptTemplate(BaseModel):
@ -21,7 +21,7 @@ class CompletionModelPromptTemplate(BaseModel):
"""
text: str
edition_type: Optional[Literal["basic", "jinja2"]] = None
edition_type: Literal["basic", "jinja2"] | None = None
class MemoryConfig(BaseModel):
@ -43,8 +43,8 @@ class MemoryConfig(BaseModel):
"""
enabled: bool
size: Optional[int] = None
size: int | None = None
role_prefix: Optional[RolePrefix] = None
role_prefix: RolePrefix | None = None
window: WindowConfig
query_prompt_template: Optional[str] = None
query_prompt_template: str | None = None

View File

@ -1,4 +1,4 @@
from typing import Any, Optional
from typing import Any
from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity
from core.memory.token_buffer_memory import TokenBufferMemory
@ -55,8 +55,8 @@ class PromptTransform:
memory: TokenBufferMemory,
memory_config: MemoryConfig,
max_token_limit: int,
human_prefix: Optional[str] = None,
ai_prefix: Optional[str] = None,
human_prefix: str | None = None,
ai_prefix: str | None = None,
) -> str:
"""Get memory messages."""
kwargs: dict[str, Any] = {"max_token_limit": max_token_limit}

View File

@ -2,7 +2,7 @@ import json
import os
from collections.abc import Mapping, Sequence
from enum import StrEnum, auto
from typing import TYPE_CHECKING, Any, Optional, cast
from typing import TYPE_CHECKING, Any, cast
from core.app.app_config.entities import PromptTemplateEntity
from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity
@ -45,11 +45,11 @@ class SimplePromptTransform(PromptTransform):
inputs: Mapping[str, str],
query: str,
files: Sequence["File"],
context: Optional[str],
memory: Optional[TokenBufferMemory],
context: str | None,
memory: TokenBufferMemory | None,
model_config: ModelConfigWithCredentialsEntity,
image_detail_config: Optional[ImagePromptMessageContent.DETAIL] = None,
) -> tuple[list[PromptMessage], Optional[list[str]]]:
image_detail_config: ImagePromptMessageContent.DETAIL | None = None,
) -> tuple[list[PromptMessage], list[str] | None]:
inputs = {key: str(value) for key, value in inputs.items()}
model_mode = ModelMode(model_config.mode)
@ -86,9 +86,9 @@ class SimplePromptTransform(PromptTransform):
model_config: ModelConfigWithCredentialsEntity,
pre_prompt: str,
inputs: dict,
query: Optional[str] = None,
context: Optional[str] = None,
histories: Optional[str] = None,
query: str | None = None,
context: str | None = None,
histories: str | None = None,
) -> tuple[str, dict]:
# get prompt template
prompt_template_config = self.get_prompt_template(
@ -182,12 +182,12 @@ class SimplePromptTransform(PromptTransform):
pre_prompt: str,
inputs: dict,
query: str,
context: Optional[str],
context: str | None,
files: Sequence["File"],
memory: Optional[TokenBufferMemory],
memory: TokenBufferMemory | None,
model_config: ModelConfigWithCredentialsEntity,
image_detail_config: Optional[ImagePromptMessageContent.DETAIL] = None,
) -> tuple[list[PromptMessage], Optional[list[str]]]:
image_detail_config: ImagePromptMessageContent.DETAIL | None = None,
) -> tuple[list[PromptMessage], list[str] | None]:
prompt_messages: list[PromptMessage] = []
# get prompt
@ -228,12 +228,12 @@ class SimplePromptTransform(PromptTransform):
pre_prompt: str,
inputs: dict,
query: str,
context: Optional[str],
context: str | None,
files: Sequence["File"],
memory: Optional[TokenBufferMemory],
memory: TokenBufferMemory | None,
model_config: ModelConfigWithCredentialsEntity,
image_detail_config: Optional[ImagePromptMessageContent.DETAIL] = None,
) -> tuple[list[PromptMessage], Optional[list[str]]]:
image_detail_config: ImagePromptMessageContent.DETAIL | None = None,
) -> tuple[list[PromptMessage], list[str] | None]:
# get prompt
prompt, prompt_rules = self._get_prompt_str_and_rules(
app_mode=app_mode,
@ -281,7 +281,7 @@ class SimplePromptTransform(PromptTransform):
self,
prompt: str,
files: Sequence["File"],
image_detail_config: Optional[ImagePromptMessageContent.DETAIL] = None,
image_detail_config: ImagePromptMessageContent.DETAIL | None = None,
) -> UserPromptMessage:
if files:
prompt_message_contents: list[PromptMessageContentUnionTypes] = []