Skip to content

Commit

Permalink
fix vllm-hpu-extension dependency
Browse files Browse the repository at this point in the history
Signed-off-by: Konrad Zawora <[email protected]>
  • Loading branch information
kzawora-intel committed Dec 9, 2024
1 parent d1c2e15 commit 61bbf51
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 1 deletion.
2 changes: 1 addition & 1 deletion requirements-hpu.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@ pandas
tabulate
setuptools>=61
setuptools-scm>=8
vllm-hpu-extension @ git+https://github.com/HabanaAI/vllm-hpu-extension.git@fd7f2e6
vllm-hpu-extension @ git+https://github.com/HabanaAI/vllm-hpu-extension.git@10deb76
11 changes: 11 additions & 0 deletions vllm/attention/backends/hpu_attn.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,8 +111,16 @@ def __init__(
self.matmul_qk = Matmul()
self.softmax = Softmax()
self.matmul_av = Matmul()
self.batch2block_matmul = Matmul()
self.block2batch_matmul = Matmul()
# NOTE(kzawora): Contiguous PA is off until model runner supports it
self.k_cache = VLLMKVCache()
self.k_cache.use_contiguous_pa = False
self.v_cache = VLLMKVCache()
self.v_cache.use_contiguous_pa = False
# NOTE(kzawora): Pipelined PA is off until model runner supports it
ops.pa_impl = ops.pa

self.num_kv_heads = num_heads if num_kv_heads is None else num_kv_heads
self.sliding_window = sliding_window
self.alibi_slopes = alibi_slopes
Expand Down Expand Up @@ -228,9 +236,12 @@ def forward(
block_mapping=attn_metadata.block_mapping,
block_bias=attn_metadata.attn_bias,
block_scales=attn_metadata.block_scales,
block_groups=None,
scale=self.scale,
matmul_qk_op=self.matmul_qk,
matmul_av_op=self.matmul_av,
batch2block_matmul_op=self.batch2block_matmul,
block2batch_matmul_op=self.block2batch_matmul,
keys_fetch_func=self.k_cache.fetch_from_cache,
values_fetch_func=self.v_cache.fetch_from_cache)
# Reshape the output tensor.
Expand Down

0 comments on commit 61bbf51

Please sign in to comment.