Skip to content

Commit

Permalink
workaround issue with weights loading in 8bit in optimum notebooks
Browse files Browse the repository at this point in the history
  • Loading branch information
eaidova committed Dec 26, 2023
1 parent c6a92b2 commit 37bf611
Show file tree
Hide file tree
Showing 19 changed files with 55 additions and 933 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
"outputs": [],
"source": [
"%pip install -q \"openvino>=2023.1.0\"\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\" datasets onnx transformers>=4.33.0 --extra-index-url https://download.pytorch.org/whl/cpu"
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" datasets onnx transformers>=4.33.0 --extra-index-url https://download.pytorch.org/whl/cpu"
]
},
{
Expand Down
38 changes: 3 additions & 35 deletions notebooks/124-hugging-face-hub/124-hugging-face-hub.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@
}
],
"source": [
"%pip install -q \"optimum-intel\"@git+https://github.com/huggingface/optimum-intel.git onnx"
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" onnx"
]
},
{
Expand Down Expand Up @@ -428,7 +428,6 @@
"source": [
"### Convert model using Optimum CLI interface\n",
"[back to top ⬆️](#Table-of-contents:)\n",
"",
"\n",
"Alternatively, you can use the Optimum CLI interface for converting models (supported starting optimum-intel 1.12 version).\n",
"General command format:\n",
Expand Down Expand Up @@ -665,43 +664,12 @@
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"state": {
"076e75b32a964983a4a6df36c1c3d1e0": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "DropdownModel",
"state": {
"_options_labels": [
"CPU",
"GPU",
"AUTO"
],
"description": "Device:",
"index": 2,
"layout": "IPY_MODEL_6b2f876d11c646609ac313f511c02e54",
"style": "IPY_MODEL_afbbe0593d5f41fb8538ae616adaf924"
}
},
"6b2f876d11c646609ac313f511c02e54": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "2.0.0",
"model_name": "LayoutModel",
"state": {}
},
"afbbe0593d5f41fb8538ae616adaf924": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "DescriptionStyleModel",
"state": {
"description_width": ""
}
}
},
"state": {},
"version_major": 2,
"version_minor": 0
}
}
},
"nbformat": 4,
"nbformat_minor": 4
}
}
4 changes: 2 additions & 2 deletions notebooks/214-grammar-correction/214-grammar-correction.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@
}
],
"source": [
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\" \"openvino>=2023.1.0\" onnx gradio \"transformers>=4.33.0\" --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" \"openvino>=2023.1.0\" onnx gradio \"transformers>=4.33.0\" --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q \"nncf>=2.7.0\" datasets jiwer"
]
},
Expand Down Expand Up @@ -284,7 +284,7 @@
"if grammar_checker_dir.exists():\n",
" grammar_checker_model = OVModelForSequenceClassification.from_pretrained(grammar_checker_dir, device=device.value)\n",
"else:\n",
" grammar_checker_model = OVModelForSequenceClassification.from_pretrained(grammar_checker_model_id, export=True, device=device.value)\n",
" grammar_checker_model = OVModelForSequenceClassification.from_pretrained(grammar_checker_model_id, export=True, device=device.value, load_in_8bit=False)\n",
" grammar_checker_model.save_pretrained(grammar_checker_dir)"
]
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install -q \"optimum-intel[openvino,diffusers]@git+https://github.com/huggingface/optimum-intel.git\" \"ipywidgets\" \"transformers>=4.33.0\" --extra-index-url https://download.pytorch.org/whl/cpu"
"%pip install -q \"optimum-intel[openvino,diffusers]@git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" \"ipywidgets\" \"transformers>=4.33.0\" --extra-index-url https://download.pytorch.org/whl/cpu"
]
},
{
Expand Down Expand Up @@ -412,4 +412,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
}
],
"source": [
"%pip install -q \"optimum-intel[openvino,diffusers]@git+https://github.com/huggingface/optimum-intel.git\" \"ipywidgets\" \"transformers>=4.33\" --extra-index-url https://download.pytorch.org/whl/cpu"
"%pip install -q \"optimum-intel[openvino,diffusers]@git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" \"ipywidgets\" \"transformers>=4.33\" --extra-index-url https://download.pytorch.org/whl/cpu"
]
},
{
Expand Down Expand Up @@ -345,4 +345,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install -q \"diffusers>=0.16.1\" \"transformers>=4.33.0\" \"openvino>=2023.2.0\" \"nncf>=2.6.0\" datasets onnx gradio --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q \"diffusers>=0.16.1\" \"transformers>=4.33.0\" \"openvino>=2023.2.0\" \"nncf>=2.6.0\" onnx gradio --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q --upgrade \"git+https://github.com/huggingface/optimum-intel.git\" "
]
},
Expand Down Expand Up @@ -716,64 +716,7 @@
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"state": {
"2103f879d27c4e3398d099b0e053104f": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "DescriptionStyleModel",
"state": {
"description_width": ""
}
},
"21ad65086ed34572ab6917206e71df50": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "CheckboxModel",
"state": {
"description": "INT8 Compression",
"disabled": false,
"layout": "IPY_MODEL_c9a3e76301244505947a6863da7adb73",
"style": "IPY_MODEL_cb768e1a46314d8e9cb45d0c96280edc",
"value": true
}
},
"8bf68d1542064c918ed7f29ce76f442e": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "DropdownModel",
"state": {
"_options_labels": [
"CPU",
"GPU",
"AUTO"
],
"description": "Device:",
"index": 0,
"layout": "IPY_MODEL_ccc2b51fc350449d9e8abe13ccd03ed6",
"style": "IPY_MODEL_2103f879d27c4e3398d099b0e053104f"
}
},
"c9a3e76301244505947a6863da7adb73": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "2.0.0",
"model_name": "LayoutModel",
"state": {}
},
"cb768e1a46314d8e9cb45d0c96280edc": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "CheckboxStyleModel",
"state": {
"description_width": ""
}
},
"ccc2b51fc350449d9e8abe13ccd03ed6": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "2.0.0",
"model_name": "LayoutModel",
"state": {}
}
},
"state": {},
"version_major": 2,
"version_minor": 0
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@
"outputs": [],
"source": [
"%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu torch torchaudio \"diffusers>=0.16.1\" \"transformers>=4.33.0\"\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\" onnx \"gradio>=3.34.0\" \"openvino>=2023.1.0\""
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" onnx \"gradio>=3.34.0\" \"openvino>=2023.1.0\""
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
"outputs": [],
"source": [
"%pip install -q \"diffusers>=0.17.1\" \"openvino>=2023.1.0\" \"nncf>=2.5.0\" \"gradio\" \"onnx>=1.11.0\" \"transformers>=4.33.0\" --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\""
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\""
]
},
{
Expand Down Expand Up @@ -217,7 +217,6 @@
"source": [
"## Compare the Original and Quantized Models\n",
"[back to top ⬆️](#Table-of-contents:)\n",
"",
"\n",
"Compare the original [`distilbert-base-cased-finetuned-conll03-english`](https://huggingface.co/elastic/distilbert-base-cased-finetuned-conll03-english) model with quantized and converted to OpenVINO IR format models to see the difference."
]
Expand All @@ -228,7 +227,6 @@
"source": [
"### Compare performance\n",
"[back to top ⬆️](#Table-of-contents:)\n",
"",
"\n",
"As the Optimum Inference models are API compatible with Hugging Face Transformers models, we can just use `pipleine()` from [Hugging Face Transformers API](https://huggingface.co/docs/transformers/index) for inference."
]
Expand Down Expand Up @@ -324,7 +322,6 @@
"source": [
"## Prepare demo for Named Entity Recognition OpenVINO Runtime\n",
"[back to top ⬆️](#Table-of-contents:)\n",
"",
"\n",
"Now, you can try NER model on own text. Put your sentence to input text box, click Submit button, the model label the recognized entities in the text."
]
Expand Down Expand Up @@ -401,4 +398,4 @@
},
"nbformat": 4,
"nbformat_minor": 4
}
}
2 changes: 1 addition & 1 deletion notebooks/245-typo-detector/245-typo-detector.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
"outputs": [],
"source": [
"%pip install -q \"diffusers>=0.17.1\" \"openvino>=2023.1.0\" \"nncf>=2.5.0\" \"gradio\" \"onnx>=1.11.0\" \"transformers>=4.33.0\" --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\""
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\""
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion notebooks/247-code-language-id/247-code-language-id.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@
"outputs": [],
"source": [
"%pip install -q \"diffusers>=0.17.1\" \"openvino>=2023.1.0\" \"nncf>=2.5.0\" \"gradio\" \"onnx>=1.11.0\" \"transformers>=4.33.0\" \"evaluate\" --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\""
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\""
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion notebooks/248-stable-diffusion-xl/248-segmind-vegart.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
"source": [
"%pip uninstall -q -y openvino-dev openvino openvino-nightly\n",
"%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu\\\n",
"torch transformers diffusers \"git+https://github.com/huggingface/optimum-intel.git\" gradio openvino-nightly"
"torch transformers diffusers \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" gradio openvino-nightly"
]
},
{
Expand Down
4 changes: 2 additions & 2 deletions notebooks/248-stable-diffusion-xl/248-ssd-b1.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@
},
"outputs": [],
"source": [
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\"\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\"\n",
"%pip install -q \"openvino>=2023.1.0\"\n",
"%pip install -q --upgrade-strategy eager \"invisible-watermark>=0.2.0\" \"transformers>=4.33\" \"accelerate\" \"onnx\" \"onnxruntime\" safetensors \"diffusers>=0.22.0\"\n",
"%pip install -q gradio"
Expand Down Expand Up @@ -210,7 +210,7 @@
"\n",
"\n",
"if not model_dir.exists():\n",
" text2image_pipe = OVStableDiffusionXLPipeline.from_pretrained(model_id, compile=False, device=device.value, export=True)\n",
" text2image_pipe = OVStableDiffusionXLPipeline.from_pretrained(model_id, compile=False, device=device.value, export=True, load_in_8bit=False)\n",
" text2image_pipe.half()\n",
" text2image_pipe.save_pretrained(model_dir)\n",
" text2image_pipe.compile()\n",
Expand Down
Loading

0 comments on commit 37bf611

Please sign in to comment.