Skip to content

Commit

Permalink
Attn MetaData dtype should be same as model dtype (HabanaAI#271)
Browse files Browse the repository at this point in the history
Attn MetaData was hard coded to bfloat16, leading to a runtime error for
float32 model instantiation.
  • Loading branch information
hlahkar authored and zhouyu5 committed Sep 20, 2024
1 parent aa5f80e commit 029658d
Showing 1 changed file with 4 additions and 2 deletions.
6 changes: 4 additions & 2 deletions vllm/worker/habana_model_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,11 +238,12 @@ def pad_list(list, k, v):

class HpuModelAdapter():

def __init__(self, model, block_size, enforce_eager):
def __init__(self, model, block_size, dtype, enforce_eager):
self.model = model
self.prefill_use_fusedsdpa = os.getenv('VLLM_PROMPT_USE_FUSEDSDPA',
'0').lower() in ['1', 'true']
self.block_size = block_size
self.dtype = dtype
if not htorch.utils.internal.is_lazy() and not enforce_eager:
self.model = torch.compile(self.model,
backend='hpu_backend',
Expand Down Expand Up @@ -304,7 +305,7 @@ def forward(self, *args, **kwargs):
input_ids = kwargs['input_ids']
kwargs['attn_metadata'] = self._update_metadata(
kwargs['attn_metadata'], input_ids.size(0), input_ids.size(1),
input_ids.device, torch.bfloat16)
input_ids.device, self.dtype)
LoraMask.setLoraMask(kwargs.pop('lora_mask'))
hidden_states = self.model(*args, **kwargs)
hidden_states = hidden_states.view(-1, hidden_states.shape[-1])
Expand Down Expand Up @@ -600,6 +601,7 @@ def load_model(self) -> None:
self.model = _maybe_wrap_in_hpu_graph(
self.model,
self.block_size,
dtype=self.model_config.dtype,
enforce_eager=self.enforce_eager)
msg = f"Wrapping in HPU Graph took {m_wrap.get_summary_string()}"
logger.info(msg)
Expand Down

0 comments on commit 029658d

Please sign in to comment.