From 15e308632d8f215b905631bdcb425a4ae7ba6d82 Mon Sep 17 00:00:00 2001 From: Yishuo Wang Date: Wed, 8 Jan 2025 10:33:22 +0800 Subject: [PATCH] small fix and add comment --- python/llm/src/ipex_llm/transformers/low_bit_linear.py | 2 +- python/llm/src/ipex_llm/transformers/utils.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/python/llm/src/ipex_llm/transformers/low_bit_linear.py b/python/llm/src/ipex_llm/transformers/low_bit_linear.py index 317a103bf59..848ffe179fe 100644 --- a/python/llm/src/ipex_llm/transformers/low_bit_linear.py +++ b/python/llm/src/ipex_llm/transformers/low_bit_linear.py @@ -286,7 +286,7 @@ def use_batch_forward(x: torch.Tensor, qtype: int, output_len: int): or ( qtype in [SYM_INT8, FP4, FP6, Q4_K, Q6_K] and batch_size <= 48 - and device_name in ["arc", "pvc", "mtl", "lnl", "arl"] + and device_name in ["arc", "pvc", "mtl", "arl"] and x.shape[1] % 256 == 0 and output_len % 32 == 0 ) diff --git a/python/llm/src/ipex_llm/transformers/utils.py b/python/llm/src/ipex_llm/transformers/utils.py index e86215e1671..056e2455be8 100644 --- a/python/llm/src/ipex_llm/transformers/utils.py +++ b/python/llm/src/ipex_llm/transformers/utils.py @@ -172,6 +172,8 @@ def get_xpu_device_name(device: torch.device): if device.type != "xpu": return device.type else: + # possiable device name: + # ["arc", "pvc", "mtl", "lnl", "bmg", "arl", "legacy", "unknown"] import xe_linear return xe_linear.get_xpu_device_name(device)