Skip to content

Commit

Permalink
fix imports in openvino cli command (#1020)
Browse files Browse the repository at this point in the history
  • Loading branch information
eaidova authored Nov 22, 2024
1 parent cf5b951 commit ee96c82
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 25 deletions.
2 changes: 1 addition & 1 deletion optimum/commands/export/openvino.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
from huggingface_hub.constants import HUGGINGFACE_HUB_CACHE

from ...exporters import TasksManager
from ...exporters.openvino.convert import save_preprocessors
from ...intel.utils.import_utils import DIFFUSERS_IMPORT_ERROR, is_diffusers_available
from ...intel.utils.modeling_utils import _infer_library_from_model_name_or_path
from ...utils.save_utils import maybe_load_preprocessors
Expand Down Expand Up @@ -244,6 +243,7 @@ def parse_args(parser: "ArgumentParser"):

def run(self):
from ...exporters.openvino.__main__ import infer_task, main_export, maybe_convert_tokenizers
from ...exporters.openvino.utils import save_preprocessors
from ...intel.openvino.configuration import _DEFAULT_4BIT_CONFIG, OVConfig, get_default_int4_config

if self.args.library is None:
Expand Down
25 changes: 1 addition & 24 deletions optimum/exporters/openvino/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union

import onnx
from transformers import PretrainedConfig
from transformers.generation import GenerationMixin
from transformers.utils import is_tf_available, is_torch_available

Expand Down Expand Up @@ -54,7 +53,6 @@
is_transformers_version,
)
from optimum.utils import DEFAULT_DUMMY_SHAPES, is_diffusers_available
from optimum.utils.save_utils import maybe_save_preprocessors

from ...intel.utils.import_utils import is_nncf_available
from ...intel.utils.modeling_utils import _infer_library_from_model_or_model_class
Expand All @@ -73,6 +71,7 @@
clear_class_registry,
remove_none_from_dummy_inputs,
save_config,
save_preprocessors,
)


Expand Down Expand Up @@ -827,28 +826,6 @@ def export_tokenizer(
save_model(model, output / file_name.format(suffix))


def save_preprocessors(
preprocessors: List, config: PretrainedConfig, output: Union[str, Path], trust_remote_code: bool
):
model_name_or_path = config._name_or_path
if hasattr(config, "export_model_type"):
model_type = config.export_model_type.replace("_", "-")
else:
model_type = config.model_type.replace("_", "-")
if preprocessors is not None:
# phi3-vision processor does not have chat_template attribute that breaks Processor saving on disk
if is_transformers_version(">=", "4.45") and model_type == "phi3-v" and len(preprocessors) > 1:
if not hasattr(preprocessors[1], "chat_template"):
preprocessors[1].chat_template = getattr(preprocessors[0], "chat_template", None)
for processor in preprocessors:
try:
processor.save_pretrained(output)
except Exception as ex:
logger.error(f"Saving {type(processor)} failed with {ex}")
else:
maybe_save_preprocessors(model_name_or_path, output, trust_remote_code=trust_remote_code)


def _add_runtime_options_to_rt_info(model: Model, options: Dict):
"""
Add runtime optinos
Expand Down
25 changes: 25 additions & 0 deletions optimum/exporters/openvino/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,16 @@
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Tuple, Union

from transformers import PretrainedConfig
from transformers.utils import is_torch_available

from openvino.runtime import Dimension, PartialShape, Symbol
from openvino.runtime.utils.types import get_element_type
from optimum.exporters import TasksManager
from optimum.exporters.onnx.base import OnnxConfig
from optimum.intel.utils import is_transformers_version
from optimum.utils import is_diffusers_available
from optimum.utils.save_utils import maybe_save_preprocessors


logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -227,3 +230,25 @@ def save_config(config, save_dir):
save_dir.mkdir(exist_ok=True, parents=True)
output_config_file = Path(save_dir / "config.json")
config.to_json_file(output_config_file, use_diff=True)


def save_preprocessors(
preprocessors: List, config: PretrainedConfig, output: Union[str, Path], trust_remote_code: bool
):
model_name_or_path = config._name_or_path
if hasattr(config, "export_model_type"):
model_type = config.export_model_type.replace("_", "-")
else:
model_type = config.model_type.replace("_", "-")
if preprocessors is not None:
# phi3-vision processor does not have chat_template attribute that breaks Processor saving on disk
if is_transformers_version(">=", "4.45") and model_type == "phi3-v" and len(preprocessors) > 1:
if not hasattr(preprocessors[1], "chat_template"):
preprocessors[1].chat_template = getattr(preprocessors[0], "chat_template", None)
for processor in preprocessors:
try:
processor.save_pretrained(output)
except Exception as ex:
logger.error(f"Saving {type(processor)} failed with {ex}")
else:
maybe_save_preprocessors(model_name_or_path, output, trust_remote_code=trust_remote_code)

0 comments on commit ee96c82

Please sign in to comment.