Skip to content

Commit be6d92f

Browse files
authored
[None][fix] Fix MoE load balancer config loading (#7150)
Signed-off-by: Enwei Zhu <[email protected]>
1 parent f61b74f commit be6d92f

File tree

1 file changed

+10
-2
lines changed

1 file changed

+10
-2
lines changed

tensorrt_llm/llmapi/llm_args.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ class MoeConfig(StrictBaseModel):
181181
load_balancer: Optional[Union[object, str]] = Field(
182182
default=None,
183183
description="Configuration for MoE load balancing.",
184-
json_schema_extra={"type": "Union[MoeLoadBalancerConfig, str]"})
184+
json_schema_extra={"type": "Union[MoeLoadBalancerConfig, dict, str]"})
185185

186186
disable_finalize_fusion: bool = Field(
187187
default=False,
@@ -2267,7 +2267,15 @@ def validate_load_balancer(self) -> 'TorchLlmArgs':
22672267
**moe_load_balancer_config)
22682268
except Exception as e:
22692269
raise ValueError(
2270-
f"Failed to load MoE load balancer config file: {self.load_balancer}"
2270+
f"Failed to load MoE load balancer config file: {self.moe_config.load_balancer}"
2271+
) from e
2272+
elif isinstance(self.moe_config.load_balancer, dict):
2273+
try:
2274+
self.moe_config.load_balancer = MoeLoadBalancerConfig(
2275+
**self.moe_config.load_balancer)
2276+
except Exception as e:
2277+
raise ValueError(
2278+
f"Failed to load MoE load balancer config: {self.moe_config.load_balancer}"
22712279
) from e
22722280
return self
22732281

0 commit comments

Comments
 (0)