[Bugfix] fix use-ep bug to enable ep by dp/tp size > 1 (#16161)
This commit is contained in:
parent
f2ebb6f541
commit
ad971af8c7
@ -437,7 +437,7 @@ class FusedMoE(torch.nn.Module):
|
||||
# Use expert parallelism instead of tensor parallelism?
|
||||
vllm_config = get_current_vllm_config()
|
||||
use_ep = (vllm_config.parallel_config.enable_expert_parallel
|
||||
and self.tp_size > 1)
|
||||
and self.tp_size * self.dp_size > 1)
|
||||
|
||||
# For smuggling this layer into the fused moe custom op
|
||||
self.use_direct_call = self.dp_size == 1
|
||||
|
Loading…
x
Reference in New Issue
Block a user