Skip to content

Commit

Permalink
Fix attn_bias calculation for alibi
Browse files Browse the repository at this point in the history
  • Loading branch information
itaraban committed Sep 10, 2024
1 parent 978b4d2 commit 4ddb893
Showing 1 changed file with 6 additions and 11 deletions.
17 changes: 6 additions & 11 deletions vllm/attention/backends/habana_attn.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,14 +111,8 @@ def __init__(
self.position_bias = None
self.alibi_slopes = alibi_slopes
if alibi_slopes is not None:
# FIXME(kzawora): Need a general method to set max_seq_len on
# per-model basis.
alibi_slopes_tensor = torch.tensor(alibi_slopes,
dtype=torch.bfloat16)
self.position_bias = _make_alibi_bias(alibi_slopes_tensor,
num_kv_heads,
alibi_slopes_tensor.dtype,
max_seq_len)
self.alibi_slopes = alibi_slopes_tensor
assert self.num_heads % self.num_kv_heads == 0
self.num_queries_per_kv = self.num_heads // self.num_kv_heads
Expand Down Expand Up @@ -190,11 +184,12 @@ def forward(
assert attn_metadata.attn_bias is not None, \
'attn_bias must be set before calling model.forward!'
attn_bias = attn_metadata.attn_bias
if self.alibi_slopes is not None and \
self.position_bias is not None:
attn_bias.add_(self.position_bias[:, :,
-attn_bias.size(2):,
-attn_bias.size(3):])
if self.alibi_slopes is not None:
self.position_bias = _make_alibi_bias(
self.alibi_slopes, self.num_kv_heads, attn_bias.dtype,
attn_bias.shape[-1])
attn_bias = attn_bias.tile((1, self.num_kv_heads, 1, 1))
attn_bias.add_(self.position_bias)
else:
attn_bias = None

Expand Down

0 comments on commit 4ddb893

Please sign in to comment.