Skip to content

Commit

Permalink
fix tp lora adapter in pytorch engine (#1300)
Browse files Browse the repository at this point in the history
  • Loading branch information
grimoire authored Mar 18, 2024
1 parent bd29205 commit 299d522
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions lmdeploy/pytorch/models/peft.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,8 +198,8 @@ def __gather_xa(xa):
if len(lora_input.ranks) > 1:
gathered_xa = rearange_all_gather(
gathered_xa,
q_start_loc=lora_input.q_start_loc,
q_seqlens=lora_input.q_seqlens,
b_start_loc=lora_input.q_start_loc,
b_seq_lens=lora_input.q_seqlens,
adapter_ids=lora_input.adapter_ids,
ranks=lora_input.ranks,
world_size=world_size,
Expand Down Expand Up @@ -230,8 +230,8 @@ def __gather_xa(xa):
if len(lora_input.ranks) > 1:
gathered_xa = rearange_all_gather(
gathered_xa,
q_start_loc=lora_input.q_start_loc,
q_seqlens=lora_input.q_seqlens,
b_start_loc=lora_input.q_start_loc,
b_seq_lens=lora_input.q_seqlens,
adapter_ids=lora_input.adapter_ids,
ranks=lora_input.ranks,
world_size=world_size,
Expand Down

0 comments on commit 299d522

Please sign in to comment.