Skip to content

Commit c93ba72

Browse files
committed
fix ep error
1 parent 42deaf7 commit c93ba72

File tree

1 file changed

+2
-2
lines changed
  • fastdeploy/model_executor/layers/moe

1 file changed

+2
-2
lines changed

fastdeploy/model_executor/layers/moe/ep.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -508,7 +508,7 @@ def moe_select(self, layer: nn.Layer, gate_out: paddle.Tensor):
508508
expert_in_rank_num_list=expert_in_rank_num_list,
509509
tokens_per_expert_stats_list=tokens_per_expert_stats_list,
510510
redundant_ep_rank_num_plus_one=layer.fd_config.eplb_config.redundant_experts_num + 1,
511-
topk_reduce_func=layer.topk_reduce_func,
511+
topk_reduce_func=getattr(layer, "topk_reduce_func", None),
512512
)
513513
else:
514514
topk_idx, topk_weights = fastdeploy.model_executor.ops.gpu.moe_redundant_topk_select(
@@ -534,7 +534,7 @@ def moe_select(self, layer: nn.Layer, gate_out: paddle.Tensor):
534534
layer.routed_scaling_factor,
535535
layer.gate_correction_bias,
536536
getattr(layer, "renormalize", True),
537-
topk_reduce_func=layer.topk_reduce_func,
537+
topk_reduce_func=getattr(layer, "topk_reduce_func", None),
538538
)
539539
else:
540540
topk_idx, topk_weights = fastdeploy.model_executor.ops.gpu.moe_topk_select(

0 commit comments

Comments
 (0)