[CI/Build] Use a fixed seed to avoid flaky tests (#14480)

Signed-off-by: DarkLight1337 <tlleungac@connect.ust.hk>
This commit is contained in:
Cyrus Leung 2025-03-08 19:30:09 +08:00 committed by GitHub
parent cfd0ae8234
commit 33f227e16b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 7 additions and 6 deletions

View File

@ -24,8 +24,6 @@ def server():
"4080",
"--chat-template",
DUMMY_CHAT_TEMPLATE,
"--seed",
"0",
]
with RemoteOpenAIServer(MODEL_NAME, args) as remote_server:

View File

@ -47,8 +47,6 @@ def default_server_args():
"--enforce-eager",
"--max-num-seqs",
"128",
"--seed",
"0",
]

View File

@ -30,8 +30,6 @@ def server():
"/" + ROOT_PATH,
"--chat-template",
DUMMY_CHAT_TEMPLATE,
"--seed",
"0",
]
envs = os.environ.copy()

View File

@ -76,6 +76,7 @@ class RemoteOpenAIServer:
vllm_serve_args: list[str],
*,
env_dict: Optional[dict[str, str]] = None,
seed: Optional[int] = 0,
auto_port: bool = True,
max_wait_seconds: Optional[float] = None) -> None:
if auto_port:
@ -87,6 +88,12 @@ class RemoteOpenAIServer:
vllm_serve_args = vllm_serve_args + [
"--port", str(get_open_port())
]
if seed is not None:
if "--seed" in vllm_serve_args:
raise ValueError("You have manually specified the seed "
f"when `seed={seed}`.")
vllm_serve_args = vllm_serve_args + ["--seed", str(seed)]
parser = FlexibleArgumentParser(
description="vLLM's remote OpenAI server.")