Skip to content

Commit

Permalink
workaround issue with weights loading in 8bit in optimum notebooks
Browse files Browse the repository at this point in the history
  • Loading branch information
eaidova committed Dec 26, 2023
1 parent c6a92b2 commit ffd83bc
Show file tree
Hide file tree
Showing 19 changed files with 69 additions and 934 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
"outputs": [],
"source": [
"%pip install -q \"openvino>=2023.1.0\"\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\" datasets onnx transformers>=4.33.0 --extra-index-url https://download.pytorch.org/whl/cpu"
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" datasets onnx transformers>=4.33.0 --extra-index-url https://download.pytorch.org/whl/cpu"
]
},
{
Expand Down
38 changes: 3 additions & 35 deletions notebooks/124-hugging-face-hub/124-hugging-face-hub.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@
}
],
"source": [
"%pip install -q \"optimum-intel\"@git+https://github.com/huggingface/optimum-intel.git onnx"
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" onnx"
]
},
{
Expand Down Expand Up @@ -428,7 +428,6 @@
"source": [
"### Convert model using Optimum CLI interface\n",
"[back to top ⬆️](#Table-of-contents:)\n",
"",
"\n",
"Alternatively, you can use the Optimum CLI interface for converting models (supported starting optimum-intel 1.12 version).\n",
"General command format:\n",
Expand Down Expand Up @@ -665,43 +664,12 @@
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"state": {
"076e75b32a964983a4a6df36c1c3d1e0": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "DropdownModel",
"state": {
"_options_labels": [
"CPU",
"GPU",
"AUTO"
],
"description": "Device:",
"index": 2,
"layout": "IPY_MODEL_6b2f876d11c646609ac313f511c02e54",
"style": "IPY_MODEL_afbbe0593d5f41fb8538ae616adaf924"
}
},
"6b2f876d11c646609ac313f511c02e54": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "2.0.0",
"model_name": "LayoutModel",
"state": {}
},
"afbbe0593d5f41fb8538ae616adaf924": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "DescriptionStyleModel",
"state": {
"description_width": ""
}
}
},
"state": {},
"version_major": 2,
"version_minor": 0
}
}
},
"nbformat": 4,
"nbformat_minor": 4
}
}
4 changes: 2 additions & 2 deletions notebooks/214-grammar-correction/214-grammar-correction.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@
}
],
"source": [
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\" \"openvino>=2023.1.0\" onnx gradio \"transformers>=4.33.0\" --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" \"openvino>=2023.1.0\" onnx gradio \"transformers>=4.33.0\" --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q \"nncf>=2.7.0\" datasets jiwer"
]
},
Expand Down Expand Up @@ -284,7 +284,7 @@
"if grammar_checker_dir.exists():\n",
" grammar_checker_model = OVModelForSequenceClassification.from_pretrained(grammar_checker_dir, device=device.value)\n",
"else:\n",
" grammar_checker_model = OVModelForSequenceClassification.from_pretrained(grammar_checker_model_id, export=True, device=device.value)\n",
" grammar_checker_model = OVModelForSequenceClassification.from_pretrained(grammar_checker_model_id, export=True, device=device.value, load_in_8bit=False)\n",
" grammar_checker_model.save_pretrained(grammar_checker_dir)"
]
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install -q \"optimum-intel[openvino,diffusers]@git+https://github.com/huggingface/optimum-intel.git\" \"ipywidgets\" \"transformers>=4.33.0\" --extra-index-url https://download.pytorch.org/whl/cpu"
"%pip install -q \"optimum-intel[openvino,diffusers]@git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" \"ipywidgets\" \"transformers>=4.33.0\" --extra-index-url https://download.pytorch.org/whl/cpu"
]
},
{
Expand Down Expand Up @@ -412,4 +412,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
}
],
"source": [
"%pip install -q \"optimum-intel[openvino,diffusers]@git+https://github.com/huggingface/optimum-intel.git\" \"ipywidgets\" \"transformers>=4.33\" --extra-index-url https://download.pytorch.org/whl/cpu"
"%pip install -q \"optimum-intel[openvino,diffusers]@git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" \"ipywidgets\" \"transformers>=4.33\" --extra-index-url https://download.pytorch.org/whl/cpu"
]
},
{
Expand Down Expand Up @@ -345,4 +345,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"id": "ef2ed242-3561-464c-8d1c-cc3862e23702",
"metadata": {},
Expand Down Expand Up @@ -32,6 +33,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"id": "f97c435a",
"metadata": {},
Expand All @@ -51,6 +53,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"id": "08aa16b1-d2f6-4a3a-abfb-5ec278133c80",
"metadata": {},
Expand All @@ -69,11 +72,12 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install -q \"diffusers>=0.16.1\" \"transformers>=4.33.0\" \"openvino>=2023.2.0\" \"nncf>=2.6.0\" datasets onnx gradio --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q --upgrade \"git+https://github.com/huggingface/optimum-intel.git\" "
"%pip install -q \"diffusers>=0.16.1\" \"transformers>=4.33.0\" \"openvino>=2023.2.0\" \"nncf>=2.6.0\" onnx gradio --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q --upgrade \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" "
]
},
{
"attachments": {},
"cell_type": "markdown",
"id": "367f84f8-33e8-4ad6-bd40-e6fd41d2d703",
"metadata": {},
Expand Down Expand Up @@ -123,6 +127,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"id": "93fec698-344d-48aa-8899-6821bf3e16bf",
"metadata": {},
Expand Down Expand Up @@ -199,6 +204,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"id": "5b1238c8-dcc9-4495-aeff-1ecbd8bd5082",
"metadata": {},
Expand Down Expand Up @@ -296,6 +302,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"id": "b6d9c4a5-ef75-4076-9f1c-f45a2259ec46",
"metadata": {},
Expand Down Expand Up @@ -338,6 +345,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"id": "b9b5da4d-d2fd-440b-b204-7fbc6966dd1f",
"metadata": {},
Expand All @@ -362,6 +370,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"id": "c58611d6-0a91-4efd-976e-4221acbb43cd",
"metadata": {},
Expand Down Expand Up @@ -403,6 +412,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"id": "27a01739-1363-42ef-927f-6a340bdbe7ba",
"metadata": {},
Expand Down Expand Up @@ -454,6 +464,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"id": "583202d2-6d29-4729-af2e-232d3ee0bc2c",
"metadata": {},
Expand Down Expand Up @@ -524,6 +535,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"id": "562f2dcf-75ef-4554-85e3-e04f486776cc",
"metadata": {},
Expand Down Expand Up @@ -600,6 +612,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"id": "50d918a9-1cbe-49a5-85ad-5e370c8af7f5",
"metadata": {},
Expand Down Expand Up @@ -716,64 +729,7 @@
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"state": {
"2103f879d27c4e3398d099b0e053104f": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "DescriptionStyleModel",
"state": {
"description_width": ""
}
},
"21ad65086ed34572ab6917206e71df50": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "CheckboxModel",
"state": {
"description": "INT8 Compression",
"disabled": false,
"layout": "IPY_MODEL_c9a3e76301244505947a6863da7adb73",
"style": "IPY_MODEL_cb768e1a46314d8e9cb45d0c96280edc",
"value": true
}
},
"8bf68d1542064c918ed7f29ce76f442e": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "DropdownModel",
"state": {
"_options_labels": [
"CPU",
"GPU",
"AUTO"
],
"description": "Device:",
"index": 0,
"layout": "IPY_MODEL_ccc2b51fc350449d9e8abe13ccd03ed6",
"style": "IPY_MODEL_2103f879d27c4e3398d099b0e053104f"
}
},
"c9a3e76301244505947a6863da7adb73": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "2.0.0",
"model_name": "LayoutModel",
"state": {}
},
"cb768e1a46314d8e9cb45d0c96280edc": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "CheckboxStyleModel",
"state": {
"description_width": ""
}
},
"ccc2b51fc350449d9e8abe13ccd03ed6": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "2.0.0",
"model_name": "LayoutModel",
"state": {}
}
},
"state": {},
"version_major": 2,
"version_minor": 0
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@
"outputs": [],
"source": [
"%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu torch torchaudio \"diffusers>=0.16.1\" \"transformers>=4.33.0\"\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\" onnx \"gradio>=3.34.0\" \"openvino>=2023.1.0\""
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" onnx \"gradio>=3.34.0\" \"openvino>=2023.1.0\""
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
"outputs": [],
"source": [
"%pip install -q \"diffusers>=0.17.1\" \"openvino>=2023.1.0\" \"nncf>=2.5.0\" \"gradio\" \"onnx>=1.11.0\" \"transformers>=4.33.0\" --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\""
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\""
]
},
{
Expand Down Expand Up @@ -217,7 +217,6 @@
"source": [
"## Compare the Original and Quantized Models\n",
"[back to top ⬆️](#Table-of-contents:)\n",
"",
"\n",
"Compare the original [`distilbert-base-cased-finetuned-conll03-english`](https://huggingface.co/elastic/distilbert-base-cased-finetuned-conll03-english) model with quantized and converted to OpenVINO IR format models to see the difference."
]
Expand All @@ -228,7 +227,6 @@
"source": [
"### Compare performance\n",
"[back to top ⬆️](#Table-of-contents:)\n",
"",
"\n",
"As the Optimum Inference models are API compatible with Hugging Face Transformers models, we can just use `pipleine()` from [Hugging Face Transformers API](https://huggingface.co/docs/transformers/index) for inference."
]
Expand Down Expand Up @@ -324,7 +322,6 @@
"source": [
"## Prepare demo for Named Entity Recognition OpenVINO Runtime\n",
"[back to top ⬆️](#Table-of-contents:)\n",
"",
"\n",
"Now, you can try NER model on own text. Put your sentence to input text box, click Submit button, the model label the recognized entities in the text."
]
Expand Down Expand Up @@ -401,4 +398,4 @@
},
"nbformat": 4,
"nbformat_minor": 4
}
}
2 changes: 1 addition & 1 deletion notebooks/245-typo-detector/245-typo-detector.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
"outputs": [],
"source": [
"%pip install -q \"diffusers>=0.17.1\" \"openvino>=2023.1.0\" \"nncf>=2.5.0\" \"gradio\" \"onnx>=1.11.0\" \"transformers>=4.33.0\" --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\""
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\""
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion notebooks/247-code-language-id/247-code-language-id.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@
"outputs": [],
"source": [
"%pip install -q \"diffusers>=0.17.1\" \"openvino>=2023.1.0\" \"nncf>=2.5.0\" \"gradio\" \"onnx>=1.11.0\" \"transformers>=4.33.0\" \"evaluate\" --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\""
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\""
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion notebooks/248-stable-diffusion-xl/248-segmind-vegart.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
"source": [
"%pip uninstall -q -y openvino-dev openvino openvino-nightly\n",
"%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu\\\n",
"torch transformers diffusers \"git+https://github.com/huggingface/optimum-intel.git\" gradio openvino-nightly"
"torch transformers diffusers \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\" gradio openvino-nightly"
]
},
{
Expand Down
4 changes: 2 additions & 2 deletions notebooks/248-stable-diffusion-xl/248-ssd-b1.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@
},
"outputs": [],
"source": [
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\"\n",
"%pip install -q \"git+https://github.com/huggingface/optimum-intel.git@478ad6985647fd581712aafc0f948da56dbf0f94\"\n",
"%pip install -q \"openvino>=2023.1.0\"\n",
"%pip install -q --upgrade-strategy eager \"invisible-watermark>=0.2.0\" \"transformers>=4.33\" \"accelerate\" \"onnx\" \"onnxruntime\" safetensors \"diffusers>=0.22.0\"\n",
"%pip install -q gradio"
Expand Down Expand Up @@ -210,7 +210,7 @@
"\n",
"\n",
"if not model_dir.exists():\n",
" text2image_pipe = OVStableDiffusionXLPipeline.from_pretrained(model_id, compile=False, device=device.value, export=True)\n",
" text2image_pipe = OVStableDiffusionXLPipeline.from_pretrained(model_id, compile=False, device=device.value, export=True, load_in_8bit=False)\n",
" text2image_pipe.half()\n",
" text2image_pipe.save_pretrained(model_dir)\n",
" text2image_pipe.compile()\n",
Expand Down
Loading

0 comments on commit ffd83bc

Please sign in to comment.