2024-06-11 10:53:59 -07:00
|
|
|
import os
|
|
|
|
|
2024-09-14 01:20:06 +08:00
|
|
|
import torch.distributed as dist
|
2024-06-11 10:53:59 -07:00
|
|
|
|
2024-07-09 18:49:11 -07:00
|
|
|
from vllm.distributed.parallel_state import in_the_same_node_as
|
2024-06-11 10:53:59 -07:00
|
|
|
|
2024-09-14 01:20:06 +08:00
|
|
|
if __name__ == "__main__":
|
|
|
|
dist.init_process_group(backend="gloo")
|
|
|
|
test_result = all(in_the_same_node_as(dist.group.WORLD, source_rank=0))
|
2024-06-11 10:53:59 -07:00
|
|
|
|
2024-09-14 01:20:06 +08:00
|
|
|
expected = os.environ.get("VLLM_TEST_SAME_HOST", "1") == "1"
|
|
|
|
assert test_result == expected, f"Expected {expected}, got {test_result}"
|
|
|
|
print("Same node test passed!")
|