-
Notifications
You must be signed in to change notification settings - Fork 124
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
f32 precision for compare-with-transformers tests #508
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -25,6 +25,7 @@ | |
import torch | ||
from datasets import load_dataset | ||
from evaluate import evaluator | ||
from openvino.runtime import get_version | ||
from parameterized import parameterized | ||
from PIL import Image | ||
from transformers import ( | ||
|
@@ -125,7 +126,10 @@ def test_load_from_hub_and_save_model(self): | |
loaded_model = OVModelForSequenceClassification.from_pretrained(self.OV_MODEL_ID, ov_config=ov_config) | ||
self.assertTrue(manual_openvino_cache_dir.is_dir()) | ||
self.assertGreaterEqual(len(list(manual_openvino_cache_dir.glob("*.blob"))), 1) | ||
self.assertEqual(loaded_model.request.get_property("PERFORMANCE_HINT").name, "THROUGHPUT") | ||
if get_version() < "2023.3": | ||
self.assertEqual(loaded_model.request.get_property("PERFORMANCE_HINT").name, "THROUGHPUT") | ||
else: | ||
self.assertEqual(loaded_model.request.get_property("PERFORMANCE_HINT"), "THROUGHPUT") | ||
|
||
with tempfile.TemporaryDirectory() as tmpdirname: | ||
loaded_model.save_pretrained(tmpdirname) | ||
|
@@ -247,7 +251,9 @@ class OVModelForSequenceClassificationIntegrationTest(unittest.TestCase): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForSequenceClassification.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForSequenceClassification.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
self.assertIsInstance(ov_model.config, PretrainedConfig) | ||
transformers_model = AutoModelForSequenceClassification.from_pretrained(model_id) | ||
tokenizer = AutoTokenizer.from_pretrained(model_id) | ||
|
@@ -313,7 +319,9 @@ class OVModelForQuestionAnsweringIntegrationTest(unittest.TestCase): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForQuestionAnswering.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForQuestionAnswering.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
self.assertIsInstance(ov_model.config, PretrainedConfig) | ||
transformers_model = AutoModelForQuestionAnswering.from_pretrained(model_id) | ||
tokenizer = AutoTokenizer.from_pretrained(model_id) | ||
|
@@ -386,7 +394,9 @@ class OVModelForTokenClassificationIntegrationTest(unittest.TestCase): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForTokenClassification.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForTokenClassification.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
self.assertIsInstance(ov_model.config, PretrainedConfig) | ||
transformers_model = AutoModelForTokenClassification.from_pretrained(model_id) | ||
tokenizer = AutoTokenizer.from_pretrained(model_id) | ||
|
@@ -430,7 +440,9 @@ class OVModelForFeatureExtractionIntegrationTest(unittest.TestCase): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForFeatureExtraction.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForFeatureExtraction.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I suggest using OV_CONFIG const in utils and loading it via: There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. also setting NUN_STREAMS = 1 could be useful There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
I like being explicit in tests, especially when we deviate from defaults. But I can add an F32_CONFIG at the top of this file.
Agreed that this can be useful, but we should run some tests and assuming no issues, set this by default in optimum-intel, not just for the tests. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Done There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. the same will be needed also in stable diffusion tests There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. |
||
) | ||
self.assertIsInstance(ov_model.config, PretrainedConfig) | ||
transformers_model = AutoModel.from_pretrained(model_id) | ||
tokenizer = AutoTokenizer.from_pretrained(model_id) | ||
|
@@ -492,7 +504,9 @@ class OVModelForCausalLMIntegrationTest(unittest.TestCase): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForCausalLM.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForCausalLM.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
self.assertIsInstance(ov_model.config, PretrainedConfig) | ||
transformers_model = AutoModelForCausalLM.from_pretrained(model_id) | ||
tokenizer = AutoTokenizer.from_pretrained(model_id) | ||
|
@@ -637,7 +651,9 @@ class OVModelForMaskedLMIntegrationTest(unittest.TestCase): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForMaskedLM.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForMaskedLM.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
self.assertIsInstance(ov_model.config, PretrainedConfig) | ||
transformers_model = AutoModelForMaskedLM.from_pretrained(model_id) | ||
tokenizer = AutoTokenizer.from_pretrained(model_id) | ||
|
@@ -693,7 +709,9 @@ class OVModelForImageClassificationIntegrationTest(unittest.TestCase): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForImageClassification.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForImageClassification.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
self.assertIsInstance(ov_model.config, PretrainedConfig) | ||
transformers_model = AutoModelForImageClassification.from_pretrained(model_id) | ||
preprocessor = AutoFeatureExtractor.from_pretrained(model_id) | ||
|
@@ -729,7 +747,9 @@ def test_pipeline(self, model_arch): | |
|
||
@parameterized.expand(TIMM_MODELS) | ||
def test_compare_to_timm(self, model_id): | ||
ov_model = OVModelForImageClassification.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForImageClassification.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
self.assertIsInstance(ov_model.config, PretrainedConfig) | ||
timm_model = timm.create_model(model_id, pretrained=True) | ||
preprocessor = TimmImageProcessor.from_pretrained(model_id) | ||
|
@@ -781,7 +801,9 @@ class OVModelForSeq2SeqLMIntegrationTest(unittest.TestCase): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForSeq2SeqLM.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForSeq2SeqLM.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
|
||
self.assertIsInstance(ov_model.encoder, OVEncoder) | ||
self.assertIsInstance(ov_model.decoder, OVDecoder) | ||
|
@@ -920,7 +942,9 @@ def _generate_random_audio_data(self): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForAudioClassification.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForAudioClassification.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
self.assertIsInstance(ov_model.config, PretrainedConfig) | ||
transformers_model = AutoModelForAudioClassification.from_pretrained(model_id) | ||
preprocessor = AutoFeatureExtractor.from_pretrained(model_id) | ||
|
@@ -985,7 +1009,9 @@ def test_load_vanilla_transformers_which_is_not_supported(self): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForCTC.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForCTC.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
self.assertIsInstance(ov_model.config, PretrainedConfig) | ||
|
||
set_seed(SEED) | ||
|
@@ -1037,7 +1063,9 @@ def test_load_vanilla_transformers_which_is_not_supported(self): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForAudioXVector.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForAudioXVector.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
self.assertIsInstance(ov_model.config, PretrainedConfig) | ||
|
||
set_seed(SEED) | ||
|
@@ -1091,7 +1119,9 @@ def test_load_vanilla_transformers_which_is_not_supported(self): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForAudioFrameClassification.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForAudioFrameClassification.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
self.assertIsInstance(ov_model.config, PretrainedConfig) | ||
|
||
set_seed(SEED) | ||
|
@@ -1134,7 +1164,9 @@ class OVModelForPix2StructIntegrationTest(unittest.TestCase): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForPix2Struct.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForPix2Struct.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
|
||
self.assertIsInstance(ov_model.encoder, OVEncoder) | ||
self.assertIsInstance(ov_model.decoder, OVDecoder) | ||
|
@@ -1223,7 +1255,9 @@ def _generate_random_audio_data(self): | |
def test_compare_to_transformers(self, model_arch): | ||
model_id = MODEL_NAMES[model_arch] | ||
set_seed(SEED) | ||
ov_model = OVModelForSpeechSeq2Seq.from_pretrained(model_id, export=True) | ||
ov_model = OVModelForSpeechSeq2Seq.from_pretrained( | ||
model_id, export=True, ov_config={"CACHE_DIR": "", "INFERENCE_PRECISION_HINT": "f32"} | ||
) | ||
self.assertIsInstance(ov_model.config, PretrainedConfig) | ||
transformers_model = AutoModelForSpeechSeq2Seq.from_pretrained(model_id) | ||
processor = get_preprocessor(model_id) | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
minor comment : it could be replaced with
is_openvino_version
for clarityThere was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Thanks! Changed.