Skip to content

Commit

Permalink
delete
Browse files Browse the repository at this point in the history
  • Loading branch information
jenniew committed May 8, 2024
1 parent 8638cea commit a533ae8
Show file tree
Hide file tree
Showing 7 changed files with 4 additions and 198 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/llm-c-evaluation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ on:
branches: [main]
paths:
- ".github/workflows/llm-c-evaluation.yml"
## Allows you to run this workflow manually from the Actions tab
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
inputs:
model_name:
Expand Down
6 changes: 0 additions & 6 deletions .github/workflows/llm-harness-evaluation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -164,12 +164,6 @@ jobs:
shell: bash
run: |
pip install --upgrade datasets==2.14.6
#if [ "${{ matrix.model_name }}" = "Mistral-7B-v0.1" ]; then
# pip install --upgrade transformers==4.36
#else
# pip install --upgrade transformers==4.31
#fi
- name: Run harness
shell: bash
Expand Down
7 changes: 1 addition & 6 deletions .github/workflows/llm-ppl-evaluation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -148,12 +148,7 @@ jobs:
- name: Upgrade packages
shell: bash
run: |
pip install --upgrade datasets==2.14.6
#if [ "${{ matrix.model_name }}" = "Mistral-7B-v0.1" ]; then
# pip install --upgrade transformers==4.36
#else
# pip install --upgrade transformers==4.31
#fi
pip install --upgrade datasets==2.14.6
- name: Run perplexity
shell: bash
Expand Down
137 changes: 1 addition & 136 deletions .github/workflows/llm_performance_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ jobs:
source /opt/intel/oneapi/setvars.sh
bash python/llm/test/run-llm-install-tests.sh
- name: Test on xpu(transformers==4.36.0)
- name: Test on xpu(transformers==4.36.2)
shell: bash
run: |
date_for_test_version=$(date -d yesterday +%Y-%m-%d)
Expand All @@ -103,20 +103,6 @@ jobs:
sed -i 's/{today}/{today}_test1/g' run.py
python run.py
#- name: Test on xpu(transformers==4.34.0)
# shell: bash
# run: |
# source /opt/intel/oneapi/setvars.sh
# export USE_XETLA=OFF
# export SYCL_PI_LEVEL_ZERO_USE_IMMEDIATE_COMMANDLISTS=1
# upgrade transformers for model Mistral-7B-v0.1
# python -m pip install transformers==4.34.0
# cp python/llm/test/benchmark/arc-perf-transformers-434.yaml python/llm/dev/benchmark/all-in-one/config.yaml
# cd python/llm/dev/benchmark/all-in-one
# change csv name
# sed -i 's/test1/test2/g' run.py
# python run.py

- name: Test on xpu(transformers==4.37.0)
shell: bash
run: |
Expand Down Expand Up @@ -151,7 +137,6 @@ jobs:
cd python/llm/dev/benchmark/all-in-one
python ../../../test/benchmark/check_results.py -c test1 -y ../../../test/benchmark/arc-perf-test.yaml
python ../../../test/benchmark/check_results.py -c test2 -y ../../../test/benchmark/arc-perf-transformers-437.yaml
# python ../../../test/benchmark/check_results.py -c test2 -y ../../../test/benchmark/arc-perf-transformers-434.yaml
find . -name "*test*.csv" -delete
if [ ${{ github.event_name }} == "schedule" ] || [ ${{ github.event_name }} == "workflow_dispatch" ]; then
curl -T ./*.csv ${LLM_FTP_URL}/llm/nightly_perf/gpu/
Expand Down Expand Up @@ -429,30 +414,6 @@ jobs:
call conda deactivate
#- name: Prepare igpu perf test for Mistral (32-32)
# shell: bash
# run: |
# sed -i 's/{today}_test1/{today}_test2/g' python/llm/dev/benchmark/all-in-one/run.py
# sed -i "s/path to your local model hub/$MODEL_HUB_DIR/g" python/llm/test/benchmark/igpu-perf/32-32_434.yaml

#- name: Test on igpu for Mistral (32-32)
# shell: cmd
# run: |
# call conda activate igpu-perf
# pip install transformers==4.34.0

# call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat"
# set SYCL_CACHE_PERSISTENT=1
# set BIGDL_LLM_XMX_DISABLED=1

# cd python\llm\dev\benchmark\all-in-one
# move ..\..\..\test\benchmark\igpu-perf\32-32_434.yaml config.yaml
# set PYTHONIOENCODING=utf-8
# python run.py >> %CSV_SAVE_PATH%\32-32\log\%LOG_FILE% 2>&1
# if %ERRORLEVEL% neq 0 (exit /b 1)

# call conda deactivate

- name: Prepare igpu perf test for Qwen1.5 (32-32)
shell: bash
run: |
Expand Down Expand Up @@ -523,30 +484,6 @@ jobs:
call conda deactivate
#- name: Prepare igpu perf test for Mistral (1024-128)
# shell: bash
# run: |
# sed -i 's/{today}_test1/{today}_test2/g' python/llm/dev/benchmark/all-in-one/run.py
# sed -i "s/path to your local model hub/$MODEL_HUB_DIR/g" python/llm/test/benchmark/igpu-perf/1024-128_434.yaml

#- name: Test on igpu for Mistral (1024-128)
# shell: cmd
# run: |
# call conda activate igpu-perf
# pip install transformers==4.34.0

# call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat"
# set SYCL_CACHE_PERSISTENT=1
# set BIGDL_LLM_XMX_DISABLED=1

# cd python\llm\dev\benchmark\all-in-one
# move ..\..\..\test\benchmark\igpu-perf\1024-128_434.yaml config.yaml
# set PYTHONIOENCODING=utf-8
# python run.py >> %CSV_SAVE_PATH%\1024-128\log\%LOG_FILE% 2>&1
# if %ERRORLEVEL% neq 0 (exit /b 1)

# call conda deactivate

- name: Prepare igpu perf test for Qwen 1.5 (1024-128)
shell: bash
run: |
Expand Down Expand Up @@ -616,30 +553,6 @@ jobs:
call conda deactivate
#- name: Prepare igpu perf test for Mistral (2048-256)
# shell: bash
# run: |
# sed -i 's/{today}_test1/{today}_test2/g' python/llm/dev/benchmark/all-in-one/run.py
# sed -i "s/path to your local model hub/$MODEL_HUB_DIR/g" python/llm/test/benchmark/igpu-perf/2048-256_434.yaml

#- name: Test on igpu for Mistral (2048-256)
# shell: cmd
# run: |
# call conda activate igpu-perf
# pip install transformers==4.34.0

# call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat"
# set SYCL_CACHE_PERSISTENT=1
# set BIGDL_LLM_XMX_DISABLED=1

# cd python\llm\dev\benchmark\all-in-one
# move ..\..\..\test\benchmark\igpu-perf\2048-256_434.yaml config.yaml
# set PYTHONIOENCODING=utf-8
# python run.py >> %CSV_SAVE_PATH%\2048-256\log\%LOG_FILE% 2>&1
# if %ERRORLEVEL% neq 0 (exit /b 1)

# call conda deactivate

- name: Prepare igpu perf test for Qwen 1.5 (2048-256)
shell: bash
run: |
Expand Down Expand Up @@ -709,30 +622,6 @@ jobs:
call conda deactivate
#- name: Prepare igpu perf test for Mistral (load_low_bit 1024-128)
# shell: bash
# run: |
# sed -i 's/{today}_test1/{today}_test2/g' python/llm/dev/benchmark/all-in-one/run.py
# sed -i "s/path to your local model hub/$MODEL_HUB_DIR/g" python/llm/test/benchmark/igpu-perf/1024-128_loadlowbit_434.yaml

#- name: Test on igpu for Mistral (load_low_bit 1024-128)
# shell: cmd
# run: |
# call conda activate igpu-perf
# pip install transformers==4.34.0

# call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat"
# set SYCL_CACHE_PERSISTENT=1
# set BIGDL_LLM_XMX_DISABLED=1

# cd python\llm\dev\benchmark\all-in-one
# move ..\..\..\test\benchmark\igpu-perf\1024-128_loadlowbit_434.yaml config.yaml
# set PYTHONIOENCODING=utf-8
# python run.py >> %CSV_SAVE_PATH%\1024-128_loadlowbit\log\%LOG_FILE% 2>&1
# if %ERRORLEVEL% neq 0 (exit /b 1)

# call conda deactivate

- name: Prepare igpu perf test for Qwen 1.5 (load_low_bit 1024-128)
shell: bash
run: |
Expand Down Expand Up @@ -800,30 +689,6 @@ jobs:
call conda deactivate
#- name: Prepare igpu perf test for Mistral (int4+fp16 1024-128)
# shell: bash
# run: |
# sed -i 's/{today}_test1/{today}_test2/g' python/llm/dev/benchmark/all-in-one/run.py
# sed -i "s/path to your local model hub/$MODEL_HUB_DIR/g" python/llm/test/benchmark/igpu-perf/1024-128_int4_fp16_434.yaml

#- name: Test on igpu for Mistral (int4+fp16 1024-128)
# shell: cmd
# run: |
# call conda activate igpu-perf
# pip install transformers==4.34.0

# call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat"
# set SYCL_CACHE_PERSISTENT=1
# set BIGDL_LLM_XMX_DISABLED=1

# cd python\llm\dev\benchmark\all-in-one
# move ..\..\..\test\benchmark\igpu-perf\1024-128_int4_fp16_434.yaml config.yaml
# set PYTHONIOENCODING=utf-8
# python run.py >> %CSV_SAVE_PATH%\1024-128_int4_fp16\log\%LOG_FILE% 2>&1
# if %ERRORLEVEL% neq 0 (exit /b 1)

# call conda deactivate

- name: Prepare igpu perf test for Qwen 1.5 (int4+fp16 1024-128)
shell: bash
run: |
Expand Down
4 changes: 1 addition & 3 deletions .github/workflows/llm_unit_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -367,9 +367,7 @@ jobs:
source /home/arda/intel/oneapi/setvars.sh
fi
python -m pip install datasets librosa soundfile einops tiktoken transformers_stream_generator
bash python/llm/test/run-llm-inference-tests-gpu.sh
# python -m pip install transformers==4.34.0
# bash python/llm/test/run-llm-inference-tests-gpu-434.sh
bash python/llm/test/run-llm-inference-tests-gpu.sh4.sh
- name: Run LLM example tests
shell: bash
Expand Down
16 changes: 0 additions & 16 deletions python/llm/test/benchmark/arc-perf-transformers-434.yaml

This file was deleted.

30 changes: 0 additions & 30 deletions python/llm/test/run-llm-inference-tests-gpu-434.sh

This file was deleted.

0 comments on commit a533ae8

Please sign in to comment.