Skip to content

Commit

Permalink
fix when attention_mask=None
Browse files Browse the repository at this point in the history
Signed-off-by: jiqing-feng <[email protected]>
  • Loading branch information
jiqing-feng committed Dec 12, 2024
1 parent bb51139 commit ba910ed
Showing 1 changed file with 2 additions and 0 deletions.
2 changes: 2 additions & 0 deletions optimum/intel/ipex/modeling_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,6 +276,8 @@ def forward(
attention_mask: Optional[torch.FloatTensor] = None,
**kwargs,
) -> CausalLMOutputWithPast:
if self.add_patch and input_ids is not None and attention_mask is None:
attention_mask = torch.ones_like(input_ids)
return self.model(input_ids=input_ids, attention_mask=attention_mask, **kwargs)

def _prepare_generation_config(
Expand Down

0 comments on commit ba910ed

Please sign in to comment.