Skip to content

Commit da006cb

Browse files
weijinqian0weijinqian_v1
andauthored
[BUGFIX][v0.9.1] ep_group is not equal to word_size in some cases. (#1862)
[BUGFIX][v0.9.1] ep_group is not equal to word_size in some cases. for examples: external_dp. --------- Signed-off-by: weijinqian_v1 <[email protected]> Co-authored-by: weijinqian_v1 <[email protected]>
1 parent 1b6797f commit da006cb

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

vllm_ascend/ascend_forward_context.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
import torch
77
from vllm.config import VllmConfig
8-
from vllm.distributed import get_dp_group, get_tp_group
8+
from vllm.distributed import get_dp_group, get_ep_group, get_tp_group
99
from vllm.forward_context import get_forward_context, set_forward_context
1010
from vllm.platforms import current_platform
1111

@@ -63,7 +63,7 @@ def set_ascend_forward_context(
6363
):
6464
forward_context = get_forward_context()
6565
forward_context.with_prefill = with_prefill
66-
ep_size = (torch.distributed.get_world_size() if
66+
ep_size = (get_ep_group().world_size if
6767
vllm_config.parallel_config.enable_expert_parallel else 1)
6868

6969
fused_moe_state = get_fused_moe_state(ep_size, with_prefill)

0 commit comments

Comments
 (0)