[Misc] Set default value of seed to None (#14274)

Signed-off-by: மனோஜ்குமார் பழனிச்சாமி <smartmanoj42857@gmail.com>
This commit is contained in:
மனோஜ்குமார் பழனிச்சாமி
2025-03-07 16:10:01 +05:30
committed by GitHub
parent 05fb6718f0
commit cc10281498
9 changed files with 15 additions and 7 deletions

View File

@ -34,7 +34,8 @@ def llm():
max_num_batched_tokens=32768,
tensor_parallel_size=1,
gpu_memory_utilization=0.75,
enforce_eager=True)
enforce_eager=True,
seed=0)
with llm.deprecate_legacy_api():
yield weakref.proxy(llm)

View File

@ -21,7 +21,7 @@ GUIDED_DECODING_BACKENDS = ["outlines", "lm-format-enforcer", "xgrammar"]
def llm():
# pytest caches the fixture so we use weakref.proxy to
# enable garbage collection
llm = LLM(model=MODEL_NAME, max_model_len=1024)
llm = LLM(model=MODEL_NAME, max_model_len=1024, seed=0)
with llm.deprecate_legacy_api():
yield weakref.proxy(llm)

View File

@ -24,6 +24,8 @@ def server():
"4080",
"--chat-template",
DUMMY_CHAT_TEMPLATE,
"--seed",
"0",
]
with RemoteOpenAIServer(MODEL_NAME, args) as remote_server:

View File

@ -47,6 +47,8 @@ def default_server_args():
"--enforce-eager",
"--max-num-seqs",
"128",
"--seed",
"0",
]

View File

@ -30,6 +30,8 @@ def server():
"/" + ROOT_PATH,
"--chat-template",
DUMMY_CHAT_TEMPLATE,
"--seed",
"0",
]
envs = os.environ.copy()