[ci/build] fix broken tests in LLM.collective_rpc (#15350)

Signed-off-by: youkaichao <youkaichao@gmail.com>
This commit is contained in:
youkaichao 2025-03-23 14:49:48 +08:00 committed by GitHub
parent 09b6a95551
commit f68cce8e64
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 3 additions and 12 deletions

View File

@ -515,7 +515,7 @@ steps:
- vllm/worker/model_runner.py - vllm/worker/model_runner.py
- entrypoints/llm/test_collective_rpc.py - entrypoints/llm/test_collective_rpc.py
commands: commands:
- pytest -v -s entrypoints/llm/test_collective_rpc.py - VLLM_ENABLE_V1_MULTIPROCESSING=0 pytest -v -s entrypoints/llm/test_collective_rpc.py
- pytest -v -s ./compile/test_basic_correctness.py - pytest -v -s ./compile/test_basic_correctness.py
- pytest -v -s ./compile/test_wrapper.py - pytest -v -s ./compile/test_wrapper.py
- VLLM_TEST_SAME_HOST=1 torchrun --nproc-per-node=4 distributed/test_same_node.py | grep 'Same node test passed' - VLLM_TEST_SAME_HOST=1 torchrun --nproc-per-node=4 distributed/test_same_node.py | grep 'Same node test passed'

View File

@ -21,18 +21,9 @@ def test_collective_rpc(tp_size, backend):
def echo_rank(self): def echo_rank(self):
return self.rank return self.rank
from vllm.worker.worker import Worker
class MyWorker(Worker):
def echo_rank(self):
return self.rank
llm = LLM(model="meta-llama/Llama-3.2-1B-Instruct", llm = LLM(model="meta-llama/Llama-3.2-1B-Instruct",
enforce_eager=True, enforce_eager=True,
load_format="dummy", load_format="dummy",
tensor_parallel_size=tp_size, tensor_parallel_size=tp_size,
distributed_executor_backend=backend, distributed_executor_backend=backend)
worker_cls=MyWorker) assert llm.collective_rpc(echo_rank) == list(range(tp_size))
for method in ["echo_rank", echo_rank]:
assert llm.collective_rpc(method) == list(range(tp_size))