restore original model_index.json after save_pretrained call #4860
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions | |
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions | |
name: OpenVINO - Test | |
on: | |
push: | |
branches: | |
- main | |
- v*-release | |
pull_request: | |
branches: | |
- main | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} | |
cancel-in-progress: true | |
jobs: | |
build: | |
strategy: | |
fail-fast: false | |
matrix: | |
python-version: ["3.8", "3.12"] | |
transformers-version: ["4.36.0", "4.45.*"] | |
os: [ubuntu-latest] | |
runs-on: ${{ matrix.os }} | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Setup Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Install dependencies | |
run: | | |
python -m pip install --upgrade pip | |
# install PyTorch CPU version to avoid installing CUDA packages on GitHub runner without GPU | |
pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu | |
pip install .[openvino,openvino-tokenizers,tests,diffusers] onnxruntime | |
pip install transformers==${{ matrix.transformers-version }} | |
- if: ${{ matrix.transformers-version == '4.36.0' }} | |
run: pip install accelerate==0.* | |
- name: Test with Pytest | |
env: | |
HF_HUB_READ_TOKEN: ${{ secrets.HF_HUB_READ_TOKEN }} | |
run: | | |
pytest tests/openvino/ --ignore tests/openvino/test_modeling_basic.py --durations=0 | |
- name: Test basic | |
run: | | |
pip uninstall -y nncf | |
pytest tests/openvino/test_modeling_basic.py | |
- name: Test openvino-nightly | |
run: | | |
pip install -U --pre openvino openvino-tokenizers --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly | |
python -c "from optimum.intel import OVModelForCausalLM; OVModelForCausalLM.from_pretrained('hf-internal-testing/tiny-random-gpt2', export=True, compile=False)" | |
optimum-cli export openvino -m hf-internal-testing/tiny-random-gpt2 gpt2-ov |