Update Optional[x] -> x | None and Union[x, y] to x | y (#26633)

Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
Harry Mellor
2025-10-12 17:51:31 +01:00
committed by GitHub
parent 9bb38130cb
commit 8fcaaf6a16
944 changed files with 9490 additions and 10121 deletions

View File

@ -9,7 +9,7 @@ using the chat template defined by the model.
import os
from argparse import Namespace
from dataclasses import asdict
from typing import NamedTuple, Optional
from typing import NamedTuple
from huggingface_hub import snapshot_download
from PIL.Image import Image
@ -41,9 +41,9 @@ class ModelRequestData(NamedTuple):
engine_args: EngineArgs
prompt: str
image_data: list[Image]
stop_token_ids: Optional[list[int]] = None
chat_template: Optional[str] = None
lora_requests: Optional[list[LoRARequest]] = None
stop_token_ids: list[int] | None = None
chat_template: str | None = None
lora_requests: list[LoRARequest] | None = None
# NOTE: The default `max_num_seqs` and `max_model_len` may result in OOM on
@ -1251,7 +1251,7 @@ model_example_map = {
}
def run_generate(model, question: str, image_urls: list[str], seed: Optional[int]):
def run_generate(model, question: str, image_urls: list[str], seed: int | None):
req_data = model_example_map[model](question, image_urls)
engine_args = asdict(req_data.engine_args) | {"seed": args.seed}
@ -1277,7 +1277,7 @@ def run_generate(model, question: str, image_urls: list[str], seed: Optional[int
print("-" * 50)
def run_chat(model: str, question: str, image_urls: list[str], seed: Optional[int]):
def run_chat(model: str, question: str, image_urls: list[str], seed: int | None):
req_data = model_example_map[model](question, image_urls)
# Disable other modalities to save memory