From 1a03dd496b6daff8d58b41c8feaba4b520adfe1c Mon Sep 17 00:00:00 2001 From: Cyrus Leung Date: Wed, 2 Jul 2025 14:31:26 +0800 Subject: [PATCH] [Bugfix] Fix dynamic rotary embedding (#20343) Signed-off-by: DarkLight1337 --- vllm/model_executor/layers/rotary_embedding.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/vllm/model_executor/layers/rotary_embedding.py b/vllm/model_executor/layers/rotary_embedding.py index 12b204bb98..a4615132a5 100644 --- a/vllm/model_executor/layers/rotary_embedding.py +++ b/vllm/model_executor/layers/rotary_embedding.py @@ -1963,16 +1963,19 @@ def get_rope( scaling_factor, dtype, mixed_b) elif scaling_type == "dynamic": - scaling_factor = rope_scaling["factor"] - scaling_alpha = rope_scaling["alpha"] - if scaling_alpha: + if "alpha" in rope_scaling: + scaling_alpha = rope_scaling["alpha"] rotary_emb = DynamicNTKAlphaRotaryEmbedding( head_size, rotary_dim, max_position, base, is_neox_style, scaling_alpha, dtype) - else: + elif "factor" in rope_scaling: + scaling_factor = rope_scaling["factor"] rotary_emb = DynamicNTKScalingRotaryEmbedding( head_size, rotary_dim, max_position, base, is_neox_style, scaling_factor, dtype) + else: + raise ValueError("Dynamic rope scaling must contain either " + "'alpha' or 'factor' field") elif scaling_type == "yarn": scaling_factor = rope_scaling["factor"] original_max_position = rope_scaling[