-
Notifications
You must be signed in to change notification settings - Fork 1.3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'main' of https://github.com/intel-analytics/BigDL into …
…fix_gptj
- Loading branch information
Showing
46 changed files
with
1,984 additions
and
192 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
129 changes: 129 additions & 0 deletions
129
.github/workflows/llm_tests_for_stable_version_on_spr.yml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,129 @@ | ||
name: SPR LLM Test for Stable Version | ||
|
||
# Cancel previous runs in the PR when you push new commits | ||
concurrency: | ||
group: ${{ github.workflow }}-llm-performance-tests-${{ github.event.pull_request.number || github.run_id }} | ||
cancel-in-progress: true | ||
|
||
# Controls when the action will run. | ||
on: | ||
# pull_request: | ||
# branches: [main] | ||
# paths: | ||
# - ".github/workflows/llm_performance_tests.yml" | ||
# - "python/llm/test/benchmark/**" | ||
# - "python/llm/dev/benchmark/all-in-one/**" | ||
workflow_dispatch: | ||
workflow_call: | ||
|
||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel | ||
jobs: | ||
llm-cpp-build: | ||
uses: ./.github/workflows/llm-binary-build.yml | ||
|
||
llm-perf-regression-test-on-spr: | ||
needs: llm-cpp-build | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
python-version: ["3.9"] | ||
runs-on: [self-hosted, llm, spr01-perf] | ||
env: | ||
OMP_NUM_THREADS: 16 | ||
THREAD_NUM: 16 | ||
ANALYTICS_ZOO_ROOT: ${{ github.workspace }} | ||
steps: | ||
- uses: actions/checkout@v3 | ||
|
||
- name: Set up Python ${{ matrix.python-version }} | ||
uses: actions/setup-python@v4 | ||
with: | ||
python-version: ${{ matrix.python-version }} | ||
|
||
- name: Install dependencies | ||
shell: bash | ||
run: | | ||
python -m pip install --upgrade pip | ||
python -m pip install --upgrade wheel | ||
python -m pip install --upgrade omegaconf | ||
python -m pip install --upgrade pandas | ||
python -m pip install --upgrade einops | ||
python -m pip install --upgrade tiktoken | ||
python -m pip install --upgrade transformers_stream_generator | ||
- name: Download llm binary | ||
uses: ./.github/actions/llm/download-llm-binary | ||
|
||
- name: Run LLM install (all) test | ||
uses: ./.github/actions/llm/setup-llm-env | ||
|
||
- name: Test on cpu | ||
shell: bash | ||
run: | | ||
mv python/llm/test/benchmark/stable-version-cpu-perf-test.yaml python/llm/dev/benchmark/all-in-one/config.yaml | ||
cd python/llm/dev/benchmark/all-in-one | ||
export http_proxy=${HTTP_PROXY} | ||
export https_proxy=${HTTPS_PROXY} | ||
source bigdl-llm-init -t | ||
export OMP_NUM_THREADS=48 | ||
# hide time info | ||
sed -i 's/str(end - st)/"xxxxxx"/g' run.py | ||
python run.py | ||
cp ./*.csv /models/stable_version_perf_regression_test_cpu/ | ||
cd ../../../test/benchmark | ||
python -m pip install pandas==1.5.3 | ||
python csv_to_html.py -f /models/stable_version_perf_regression_test_cpu/ -b /models/stable_version_perf_regression_test_cpu/transformer_int4-results-1baseline.csv -t 5.0 | ||
|
||
llm-stress-test-on-spr: | ||
needs: llm-perf-regression-test-on-spr | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
python-version: ["3.9"] | ||
runs-on: [self-hosted, llm, spr01-perf] | ||
env: | ||
OMP_NUM_THREADS: 16 | ||
THREAD_NUM: 16 | ||
ANALYTICS_ZOO_ROOT: ${{ github.workspace }} | ||
steps: | ||
- uses: actions/checkout@v3 | ||
|
||
- name: Set up Python ${{ matrix.python-version }} | ||
uses: actions/setup-python@v4 | ||
with: | ||
python-version: ${{ matrix.python-version }} | ||
|
||
- name: Install dependencies | ||
shell: bash | ||
run: | | ||
python -m pip install --upgrade pip | ||
python -m pip install --upgrade wheel | ||
python -m pip install --upgrade omegaconf | ||
python -m pip install --upgrade pandas | ||
python -m pip install --upgrade einops | ||
python -m pip install --upgrade tiktoken | ||
python -m pip install --upgrade transformers_stream_generator | ||
- name: Download llm binary | ||
uses: ./.github/actions/llm/download-llm-binary | ||
|
||
- name: Run LLM install (all) test | ||
uses: ./.github/actions/llm/setup-llm-env | ||
|
||
- name: Test on cpu | ||
shell: bash | ||
run: | | ||
mv python/llm/test/benchmark/stable-version-cpu-stress-test.yaml python/llm/dev/benchmark/all-in-one/config.yaml | ||
cd python/llm/dev/benchmark/all-in-one | ||
export http_proxy=${HTTP_PROXY} | ||
export https_proxy=${HTTPS_PROXY} | ||
source bigdl-llm-init -t | ||
export OMP_NUM_THREADS=48 | ||
# hide time info | ||
sed -i 's/str(end - st)/"xxxxxx"/g' run-stress-test.py | ||
python run-stress-test.py | ||
cp ./*.csv /models/stable_version_stress_test_cpu/ | ||
cd ../../../test/benchmark | ||
python -m pip install pandas==1.5.3 | ||
python csv_to_html.py -f /models/stable_version_stress_test_cpu/ |
Oops, something went wrong.