raise exception for pin_lora (#19809)

Signed-off-by: Andy Xie <andy.xning@gmail.com>
This commit is contained in:
Ning Xie
2025-06-19 13:57:35 +08:00
committed by GitHub
parent aa20d10a91
commit c7b370c603

View File

@ -202,8 +202,7 @@ class LoRANotSupportedWorkerBase(WorkerBase):
raise ValueError(f"{type(self)} does not support LoRA")
def pin_lora(self, lora_id: int) -> bool:
return ValueError(
f"{type(self)} does not support LoRA") # type: ignore
raise ValueError(f"{type(self)} does not support LoRA")
def list_loras(self) -> Set[int]:
raise ValueError(f"{type(self)} does not support LoRA")
@ -398,7 +397,7 @@ class LocalOrDistributedWorkerBase(WorkerBase):
model_input, worker_input, kwargs = inputs
num_steps = worker_input.num_steps
if (execute_model_req is not None and execute_model_req.spec_step_idx):
if execute_model_req is not None and execute_model_req.spec_step_idx:
kwargs["spec_step_idx"] = execute_model_req.spec_step_idx
self.execute_worker(worker_input)