[Bugfix] Fix DeepEP config for DP4TP4 (#23619)
Signed-off-by: Ming Yang <minos.future@gmail.com>
This commit is contained in:
@ -49,14 +49,14 @@ class DeepEPHTPrepareAndFinalize(mk.FusedMoEPrepareAndFinalize):
|
||||
return torch.int64
|
||||
|
||||
def _get_dispatch_config(self) -> Optional[deep_ep.Config]:
|
||||
if self.dp_size not in self.available_rank_configs:
|
||||
if self.num_dispatchers_ not in self.available_rank_configs:
|
||||
return None
|
||||
return deep_ep.Buffer.get_dispatch_config(self.dp_size)
|
||||
return deep_ep.Buffer.get_dispatch_config(self.num_dispatchers_)
|
||||
|
||||
def _get_combine_config(self) -> Optional[deep_ep.Config]:
|
||||
if self.dp_size not in self.available_rank_configs:
|
||||
if self.num_dispatchers_ not in self.available_rank_configs:
|
||||
return None
|
||||
return deep_ep.Buffer.get_combine_config(self.dp_size)
|
||||
return deep_ep.Buffer.get_combine_config(self.num_dispatchers_)
|
||||
|
||||
def _do_dispatch(
|
||||
self,
|
||||
|
||||
Reference in New Issue
Block a user