[Bugfix] fix use-ep bug to enable ep by dp/tp size > 1 (#16161)

This commit is contained in:
zxfan-cpu 2025-04-08 11:48:47 +08:00 committed by GitHub
parent f2ebb6f541
commit ad971af8c7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -437,7 +437,7 @@ class FusedMoE(torch.nn.Module):
# Use expert parallelism instead of tensor parallelism?
vllm_config = get_current_vllm_config()
use_ep = (vllm_config.parallel_config.enable_expert_parallel
and self.tp_size > 1)
and self.tp_size * self.dp_size > 1)
# For smuggling this layer into the fused moe custom op
self.use_direct_call = self.dp_size == 1