File tree Expand file tree Collapse file tree 1 file changed +1
-3
lines changed Expand file tree Collapse file tree 1 file changed +1
-3
lines changed Original file line number Diff line number Diff line change @@ -79,14 +79,12 @@ def initialize_dp_attention(
7979 )
8080
8181 if enable_dp_attention :
82- local_rank = tp_rank % (tp_size // dp_size )
8382 _ATTN_DP_SIZE = dp_size
8483 if moe_dense_tp_size is None :
8584 _LOCAL_ATTN_DP_SIZE = _ATTN_DP_SIZE
8685 else :
8786 _LOCAL_ATTN_DP_SIZE = max (1 , dp_size // (tp_size // moe_dense_tp_size ))
8887 else :
89- local_rank = tp_rank
9088 _ATTN_DP_SIZE = 1
9189 _LOCAL_ATTN_DP_SIZE = 1
9290
@@ -96,7 +94,7 @@ def initialize_dp_attention(
9694 list (range (head , head + _ATTN_TP_SIZE ))
9795 for head in range (0 , pp_size * tp_size , _ATTN_TP_SIZE )
9896 ],
99- local_rank ,
97+ tp_group . local_rank ,
10098 torch .distributed .get_backend (tp_group .device_group ),
10199 use_pynccl = SYNC_TOKEN_IDS_ACROSS_TP ,
102100 use_pymscclpp = False ,
You can’t perform that action at this time.
0 commit comments