diff --git a/optimum/intel/openvino/modeling_decoder.py b/optimum/intel/openvino/modeling_decoder.py index 22a48ec4ae..577aa7e81a 100644 --- a/optimum/intel/openvino/modeling_decoder.py +++ b/optimum/intel/openvino/modeling_decoder.py @@ -431,7 +431,7 @@ def forward( inputs['beam_idx'] = self.next_beam_idx # Run inference - self.request.start_async(inputs, shared_inputs=True) + self.request.start_async(inputs, share_inputs=True) self.request.wait() logits = torch.from_numpy(self.request.get_tensor("logits").data).to(self.device)