chore: add ast-grep rule to convert Optional[T] to T | None (#25560)

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
-LAN-
2025-09-15 13:06:33 +08:00
committed by GitHub
parent 2e44ebe98d
commit bab4975809
394 changed files with 2555 additions and 2792 deletions

View File

@ -9,7 +9,6 @@ from dataclasses import dataclass
from typing import (
Any,
Literal,
Optional,
TypeVar,
Union,
)
@ -71,7 +70,7 @@ class TextSplitter(BaseDocumentTransformer, ABC):
def split_text(self, text: str) -> list[str]:
"""Split text into multiple components."""
def create_documents(self, texts: list[str], metadatas: Optional[list[dict]] = None) -> list[Document]:
def create_documents(self, texts: list[str], metadatas: list[dict] | None = None) -> list[Document]:
"""Create documents from a list of texts."""
_metadatas = metadatas or [{}] * len(texts)
documents = []
@ -94,7 +93,7 @@ class TextSplitter(BaseDocumentTransformer, ABC):
metadatas.append(doc.metadata or {})
return self.create_documents(texts, metadatas=metadatas)
def _join_docs(self, docs: list[str], separator: str) -> Optional[str]:
def _join_docs(self, docs: list[str], separator: str) -> str | None:
text = separator.join(docs)
text = text.strip()
if text == "":
@ -194,7 +193,7 @@ class TokenTextSplitter(TextSplitter):
def __init__(
self,
encoding_name: str = "gpt2",
model_name: Optional[str] = None,
model_name: str | None = None,
allowed_special: Union[Literal["all"], Set[str]] = set(),
disallowed_special: Union[Literal["all"], Collection[str]] = "all",
**kwargs: Any,
@ -245,7 +244,7 @@ class RecursiveCharacterTextSplitter(TextSplitter):
def __init__(
self,
separators: Optional[list[str]] = None,
separators: list[str] | None = None,
keep_separator: bool = True,
**kwargs: Any,
):