diff --git a/optimum/intel/openvino/modeling_diffusion.py b/optimum/intel/openvino/modeling_diffusion.py index 4443381cd6..0f11985ef5 100644 --- a/optimum/intel/openvino/modeling_diffusion.py +++ b/optimum/intel/openvino/modeling_diffusion.py @@ -55,7 +55,7 @@ from ...exporters.openvino import main_export from .loaders import OVTextualInversionLoaderMixin from .modeling_base import OVBaseModel -from .utils import ONNX_WEIGHTS_NAME, OV_TO_NP_TYPE, OV_XML_FILE_NAME +from .utils import ONNX_WEIGHTS_NAME, OV_TO_NP_TYPE, OV_XML_FILE_NAME, print_compiled_model_properties core = Core() @@ -540,6 +540,10 @@ def _compile(self): if self.request is None: logger.info(f"Compiling the {self._model_name} to {self.device} ...") self.request = core.compile_model(self.model, self.device, self.ov_config) + # OPENVINO_LOG_LEVEL can be found in https://docs.openvino.ai/2023.2/openvino_docs_OV_UG_supported_plugins_AUTO_debugging.html + if "OPENVINO_LOG_LEVEL" in os.environ and int(os.environ["OPENVINO_LOG_LEVEL"]) > 2: + logger.info(f"{self.device} SUPPORTED_PROPERTIES:") + print_compiled_model_properties(self.request) @property def device(self): diff --git a/optimum/intel/openvino/modeling_seq2seq.py b/optimum/intel/openvino/modeling_seq2seq.py index 6b759054d0..a5df33b658 100644 --- a/optimum/intel/openvino/modeling_seq2seq.py +++ b/optimum/intel/openvino/modeling_seq2seq.py @@ -13,6 +13,7 @@ # limitations under the License. import logging +import os from pathlib import Path from tempfile import gettempdir from typing import Dict, Optional, Tuple @@ -26,6 +27,7 @@ from transformers.file_utils import add_start_docstrings, add_start_docstrings_to_model_forward from transformers.modeling_outputs import BaseModelOutput, Seq2SeqLMOutput +from .utils import print_compiled_model_properties from ..utils.import_utils import is_transformers_version from .modeling_base_seq2seq import OVBaseModelForSeq2SeqLM @@ -407,6 +409,10 @@ def _compile(self): if self.request is None: logger.info(f"Compiling the encoder to {self._device} ...") self.request = core.compile_model(self.model, self._device, self.ov_config) + # OPENVINO_LOG_LEVEL can be found in https://docs.openvino.ai/2023.2/openvino_docs_OV_UG_supported_plugins_AUTO_debugging.html + if "OPENVINO_LOG_LEVEL" in os.environ and int(os.environ["OPENVINO_LOG_LEVEL"]) > 2: + logger.info(f"{self._device} SUPPORTED_PROPERTIES:") + print_compiled_model_properties(self.request) class OVDecoder: @@ -505,7 +511,12 @@ def __call__(self, *args, **kwargs): def _compile(self): if self.request is None: logger.info(f"Compiling the decoder to {self._device} ...") - self.request = core.compile_model(self.model, self._device, self.ov_config).create_infer_request() + compiled_model = core.compile_model(self.model, self._device, self.ov_config) + self.request = compiled_model.create_infer_request() + # OPENVINO_LOG_LEVEL can be found in https://docs.openvino.ai/2023.2/openvino_docs_OV_UG_supported_plugins_AUTO_debugging.html + if "OPENVINO_LOG_LEVEL" in os.environ and int(os.environ["OPENVINO_LOG_LEVEL"]) > 2: + logger.info(f"{self._device} SUPPORTED_PROPERTIES:") + print_compiled_model_properties(compiled_model) @add_start_docstrings(