Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
plusbang committed Jul 5, 2024
1 parent eeab011 commit 0a74495
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion python/llm/src/ipex_llm/transformers/pipeline_parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def init_pipeline_parallel():
dist.init_process_group('ccl')


def _check_quantize_kv_cache(model, idx, batch_size) -> bool:
def _check_quantize_kv_cache(model, idx, batch_size):
# align use_quantize_kv_cache setting for different GPU in pipeline parallel
pp_quantize_kv_cache = (os.environ.get("BIGDL_QUANTIZE_KV_CACHE", None) == "1") or \
(os.environ.get("IPEX_LLM_QUANTIZE_KV_CACHE", None) == "1") or \
Expand Down

0 comments on commit 0a74495

Please sign in to comment.