[Bugfix] Fix Apertus HF repo name (#24447)
Signed-off-by: DarkLight1337 <tlleungac@connect.ust.hk>
This commit is contained in:
@ -93,7 +93,7 @@ AITER_MODEL_LIST = [
|
||||
"allenai/OLMoE-1B-7B-0924-Instruct",
|
||||
marks=[pytest.mark.cpu_model],
|
||||
),
|
||||
pytest.param("swiss-ai/Apertus-8B"), # apertus
|
||||
pytest.param("swiss-ai/Apertus-8B-2509"), # apertus
|
||||
])
|
||||
@pytest.mark.parametrize("max_tokens", [32])
|
||||
@pytest.mark.parametrize("num_logprobs", [5])
|
||||
|
||||
@ -158,7 +158,7 @@ class _HfExamplesInfo:
|
||||
# yapf: disable
|
||||
_TEXT_GENERATION_EXAMPLE_MODELS = {
|
||||
# [Decoder-only]
|
||||
"ApertusForCausalLM": _HfExamplesInfo("swiss-ai/Apertus-8B",
|
||||
"ApertusForCausalLM": _HfExamplesInfo("swiss-ai/Apertus-8B-2509",
|
||||
min_transformers_version="4.56.0",
|
||||
trust_remote_code=True),
|
||||
"AquilaModel": _HfExamplesInfo("BAAI/AquilaChat-7B",
|
||||
|
||||
Reference in New Issue
Block a user