Skip to content

Commit

Permalink
fix input names
Browse files Browse the repository at this point in the history
  • Loading branch information
echarlaix committed Oct 23, 2023
1 parent f8ba216 commit f52960b
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 1 deletion.
8 changes: 7 additions & 1 deletion optimum/intel/openvino/modeling_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,13 @@ def __init__(
height = -1 if self.export_feature == "image-classification" else None
width = -1 if self.export_feature == "image-classification" else None
model = self._reshape(model, -1, -1, height, width)
self.input_names = {key.get_any_name(): idx for idx, key in enumerate(model.inputs)}

input_names = {}
for idx, key in enumerate(model.inputs):
names = tuple(key.get_names())
input_names[next((name for name in names if "/" not in name), names[0])] = idx
self.input_names = input_names

self.model = model
self.request = None
if enable_compilation:
Expand Down
5 changes: 5 additions & 0 deletions optimum/intel/utils/modeling_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@
from transformers.modeling_utils import PreTrainedModel


# from ...utils.modeling_utils import _prepare_decoder_sliding_window_attention_mask


MULTI_QUERY_ATTN_MODELS = {"falcon", "gpt_bigcode"}


Expand Down Expand Up @@ -109,6 +112,8 @@ def patch_decoder_attention_mask(model: "PreTrainedModel"):
model.transformer._prepare_attn_mask = _prepare_attn_mask
elif model.config.model_type == "llama":
model.model._prepare_decoder_attention_mask = _prepare_decoder_attention_mask
# elif model.config.model_type == "mistral":
# model.model._prepare_decoder_attention_mask = _prepare_decoder_sliding_window_attention_mask
elif model.config.model_type in {"blenderbot-small", "blenderbot", "opt", "pegasus", "bart"}:
model.model.decoder._prepare_decoder_attention_mask = _prepare_decoder_attention_mask
return model

0 comments on commit f52960b

Please sign in to comment.