[CORE] Prompt Embeddings Support for v1 Engine (#24278)

Signed-off-by: Andrew Sansom <andrew@protopia.ai>
Signed-off-by: Andrew Sansom <qthequartermasterman@gmail.com>
Co-authored-by: Cyrus Leung <cyrus.tl.leung@gmail.com>
This commit is contained in:
Andrew Sansom
2025-09-18 19:03:09 -05:00
committed by GitHub
parent 9fac6aa30b
commit 9a4600e4dc
20 changed files with 305 additions and 76 deletions

View File

@ -76,11 +76,6 @@ def test_models(
model_executor: str,
enable_prompt_embeds: bool,
) -> None:
if enable_prompt_embeds and envs.is_set(
"VLLM_USE_V1") and envs.VLLM_USE_V1:
pytest.skip("enable_prompt_embeds is not supported in v1.")
if not envs.VLLM_USE_V1:
if async_scheduling:
pytest.skip("async_scheduling only supported in v1.")
@ -164,11 +159,6 @@ def test_models_distributed(
extra_env: dict[str, str],
enable_prompt_embeds: bool,
) -> None:
if enable_prompt_embeds and envs.is_set(
"VLLM_USE_V1") and envs.VLLM_USE_V1:
pytest.skip("enable_prompt_embeds is not supported in v1.")
if test_suite != TARGET_TEST_SUITE:
pytest.skip(f"Skip test for {test_suite}")