Skip to content

Commit

Permalink
[Bugfix] Fix default weight loading for scalars (vllm-project#7534)
Browse files Browse the repository at this point in the history
  • Loading branch information
mgoin authored Aug 15, 2024
1 parent f4da5f7 commit 21313e0
Showing 1 changed file with 11 additions and 5 deletions.
16 changes: 11 additions & 5 deletions vllm/model_executor/model_loader/weight_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -516,11 +516,17 @@ def default_weight_loader(param: torch.Tensor,
loaded_weight: torch.Tensor) -> None:
"""Default weight loader."""
try:
assert param.size() == loaded_weight.size(), (
f"Attempted to load weight ({loaded_weight.size()}) "
f"into parameter ({param.size()})")

param.data.copy_(loaded_weight)
if param.numel() == 1 and loaded_weight.numel() == 1:
# Sometimes scalar values aren't considered tensors with shapes
# so if both param and loaded_weight are a scalar,
# "broadcast" instead of copy
param.data.fill_(loaded_weight.item())
else:
assert param.size() == loaded_weight.size(), (
f"Attempted to load weight ({loaded_weight.size()}) "
f"into parameter ({param.size()})")

param.data.copy_(loaded_weight)
except Exception:
# NOTE: This exception is added for the purpose of setting breakpoint to
# debug weight loading issues.
Expand Down

0 comments on commit 21313e0

Please sign in to comment.