[Bugfix] Remove num_tokens_across_dp (#14302)

Signed-off-by: Tyler Michael Smith <tyler@neuralmagic.com>
This commit is contained in:
Tyler Michael Smith 2025-03-05 18:55:55 -05:00 committed by GitHub
parent 1e3e76b6cc
commit a7ea35aa67
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -27,7 +27,6 @@ batchsize_forward_time: defaultdict = defaultdict(list)
@dataclass
class DPMetadata:
num_tokens_across_dp: list[int]
cu_tokens_across_dp_cpu: torch.Tensor
@ -89,7 +88,7 @@ def set_forward_context(attn_metadata: Any,
from vllm.distributed.parallel_state import get_dp_group
dist.all_reduce(num_tokens_tensor, group=get_dp_group().cpu_group)
cu_tokens_across_dp_cpu = torch.cumsum(num_tokens_tensor, dim=0)
dp_metadata = DPMetadata(num_tokens_across_dp, cu_tokens_across_dp_cpu)
dp_metadata = DPMetadata(cu_tokens_across_dp_cpu)
global _forward_context
prev_context = _forward_context