diff --git a/tests/ipex/test_modeling.py b/tests/ipex/test_modeling.py index 68ca27534c..679783741b 100644 --- a/tests/ipex/test_modeling.py +++ b/tests/ipex/test_modeling.py @@ -47,7 +47,7 @@ IPEXSentenceTransformer, ) from optimum.utils.testing_utils import grid_parameters, require_sentence_transformers -from optimum.intel.utils.import_utils import is_sentence_transformers_available +from optimum.intel.utils.import_utils import is_sentence_transformers_available, is_torch_version if is_sentence_transformers_available(): from sentence_transformers import SentenceTransformer @@ -331,6 +331,9 @@ def test_ipex_beam_search(self, test_name, model_arch, use_cache): model = IPEXModelForCausalLM.from_pretrained( model_id, use_cache=use_cache, torch_dtype=dtype, device_map=DEVICE ) + # It will be removed when torch 2.6 released + if model_arch == "opt" and not use_cache and model.compiled and is_torch_version("<", "2.6.0"): + return if use_cache and model_arch in self.IPEX_PATCHED_SUPPORTED_ARCHITECTURES: self.assertTrue(model.add_patch) transformers_model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=dtype, device_map=DEVICE)