diff --git a/README.md b/README.md index 2e75443c..67838483 100644 --- a/README.md +++ b/README.md @@ -360,7 +360,7 @@ In the example above for options B.1 and B.2 we provided a dataset containing a ``` >>> # FOR EACH OPTION ->>> from nebullvm import optimize_torch_model +>>> from nebullvm import optimize_onnx_model >>> import numpy as np >>> model_path = "path-to-onnx-model" >>> save_dir = "." diff --git a/nebullvm/inference_learners/openvino.py b/nebullvm/inference_learners/openvino.py index 01f4060e..5c17d2f9 100644 --- a/nebullvm/inference_learners/openvino.py +++ b/nebullvm/inference_learners/openvino.py @@ -22,8 +22,7 @@ from nebullvm.transformations.base import MultiStageTransformation try: - import openvino - from openvino.runtime import Core + from openvino.runtime import Core, Model, CompiledModel, InferRequest except ImportError: if "intel" in cpuinfo.get_cpu_info()["brand_raw"].lower(): warnings.warn( @@ -33,13 +32,13 @@ from nebullvm.installers.installers import install_openvino install_openvino(with_optimization=True) - import openvino - from openvino.runtime import Core + from openvino.runtime import Core, Model, CompiledModel, InferRequest else: warnings.warn( "No Openvino library detected. " "The Openvino Inference learner should not be used." ) + Model = CompiledModel = InferRequest = object class OpenVinoInferenceLearner(BaseInferenceLearner, ABC): @@ -62,9 +61,9 @@ class OpenVinoInferenceLearner(BaseInferenceLearner, ABC): def __init__( self, - model: openvino.runtime.Model, - compiled_model: openvino.runtime.CompiledModel, - infer_request: openvino.runtime.InferRequest, + model: Model, + compiled_model: CompiledModel, + infer_request: InferRequest, input_keys: List, output_keys: List, description_file: str,