Skip to content

Commit

Permalink
Migrate harness to ipexllm (#10703)
Browse files Browse the repository at this point in the history
* migrate to ipexlm

* fix workflow

* fix run_multi

* fix precision map

* rename ipexlm to ipexllm

* rename bigdl to ipex  in comments
  • Loading branch information
Chen, Zhentao authored Apr 9, 2024
1 parent 8cf26d8 commit d59e0cc
Show file tree
Hide file tree
Showing 5 changed files with 9 additions and 8 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/llm-harness-evaluation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ jobs:
fi
python run_llb.py \
--model bigdl-llm \
--model ipex-llm \
--pretrained ${MODEL_PATH} \
--precision ${{ matrix.precision }} \
--device ${{ matrix.device }} \
Expand Down
4 changes: 2 additions & 2 deletions python/llm/dev/benchmark/harness/harness_to_leaderboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
drop='f1'
)

def parse_precision(precision, model="bigdl-llm"):
def parse_precision(precision, model="ipex-llm"):
result = match(r"([a-zA-Z_]+)(\d+)([a-zA-Z_\d]*)", precision)
datatype = result.group(1)
bit = int(result.group(2))
Expand All @@ -62,6 +62,6 @@ def parse_precision(precision, model="bigdl-llm"):
else:
if model == "hf-causal":
return f"bnb_type={precision}"
if model == "bigdl-llm":
if model == "ipex-llm":
return f"load_in_low_bit={precision}"
raise RuntimeError(f"invald precision {precision}")
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def _collate(x):
utils.Reorderer = force_decrease_order(utils.Reorderer)


class BigDLLM(AutoCausalLM):
class IPEXLLM(AutoCausalLM):
AUTO_MODEL_CLASS = AutoModelForCausalLM
AutoCausalLM_ARGS = inspect.getfullargspec(AutoCausalLM.__init__).args
def __init__(self, *args, **kwargs):
Expand Down
4 changes: 2 additions & 2 deletions python/llm/dev/benchmark/harness/run_llb.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
from harness_to_leaderboard import *
from lm_eval import tasks, evaluator, utils, models

from bigdl_llm import BigDLLM
models.MODEL_REGISTRY['bigdl-llm'] = BigDLLM # patch bigdl-llm to harness
from ipexllm import IPEXLLM
models.MODEL_REGISTRY['ipex-llm'] = IPEXLLM # patch ipex-llm to harness

logging.getLogger("openai").setLevel(logging.WARNING)

Expand Down
5 changes: 3 additions & 2 deletions python/llm/dev/benchmark/harness/run_multi_llb.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,9 @@
from multiprocessing import Queue, Process
import multiprocessing as mp
from contextlib import redirect_stdout, redirect_stderr
from bigdl_llm import BigDLLM
models.MODEL_REGISTRY['bigdl-llm'] = BigDLLM # patch bigdl-llm to harness

from ipexllm import IPEXLLM
models.MODEL_REGISTRY['ipex-llm'] = IPEXLLM # patch ipex-llm to harness

logging.getLogger("openai").setLevel(logging.WARNING)

Expand Down

0 comments on commit d59e0cc

Please sign in to comment.