From 95b223caf544b9d270233e2677487f9048fb623d Mon Sep 17 00:00:00 2001 From: Ella Charlaix Date: Thu, 28 Sep 2023 16:56:06 +0200 Subject: [PATCH] format --- optimum/intel/openvino/utils.py | 2 +- tests/openvino/test_exporters_cli.py | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/optimum/intel/openvino/utils.py b/optimum/intel/openvino/utils.py index b1a473cdcc..8d65eae759 100644 --- a/optimum/intel/openvino/utils.py +++ b/optimum/intel/openvino/utils.py @@ -78,7 +78,7 @@ _HEAD_TO_AUTOMODELS = { - "feature-extraction" : "OVModelForFeatureExtraction", + "feature-extraction": "OVModelForFeatureExtraction", "fill-mask": "OVModelForMaskedLM", "text-generation": "OVModelForCausalLM", "text2text-generation": "OVModelForSeq2SeqLM", diff --git a/tests/openvino/test_exporters_cli.py b/tests/openvino/test_exporters_cli.py index 4587274774..9983f690f7 100644 --- a/tests/openvino/test_exporters_cli.py +++ b/tests/openvino/test_exporters_cli.py @@ -19,8 +19,7 @@ from utils_tests import MODEL_NAMES from optimum.exporters.openvino.__main__ import main_export -from optimum.intel.openvino.utils import _HEAD_TO_AUTOMODELS -from optimum.intel import ( +from optimum.intel import ( # noqa OVModelForAudioClassification, OVModelForCausalLM, OVModelForFeatureExtraction, @@ -33,6 +32,8 @@ OVStableDiffusionPipeline, OVStableDiffusionXLPipeline, ) +from optimum.intel.openvino.utils import _HEAD_TO_AUTOMODELS + class OVCLIExportTestCase(unittest.TestCase): """ @@ -66,11 +67,11 @@ def test_export(self, task: str, model_type: str): @parameterized.expand(SUPPORTED_ARCHITECTURES) def test_exporters_cli(self, task: str, model_type: str): - with TemporaryDirectory() as tmpdirname: + with TemporaryDirectory() as tmpdir: subprocess.run( f"optimum-cli export openvino --model {MODEL_NAMES[model_type]} --task {task} {tmpdirname}", shell=True, check=True, ) - model_kwargs = {"use_cache" : task.endswith("with-past")} if "generation" in task else {} - ov_model = eval(_HEAD_TO_AUTOMODELS[task.replace("-with-past", "")]).from_pretrained(tmpdirname, **model_kwargs) + model_kwargs = {"use_cache": task.endswith("with-past")} if "generation" in task else {} + eval(_HEAD_TO_AUTOMODELS[task.replace("-with-past", "")]).from_pretrained(tmpdir, **model_kwargs)