From 734ea9a0143a6789c6eced3adf30b7e08d9e1909 Mon Sep 17 00:00:00 2001 From: Benjamin Bossan Date: Wed, 11 Sep 2024 12:13:24 +0200 Subject: [PATCH] TST Make X-LoRA tests faster (#2059) After some recent optimizations, the X-LoRA tests are now the slowest ones. Part of that is that the lora adapters are re-created for each test. By changing the fixture scope, they're now only created once. I think this should be safe, as these files are not modified in the tests. I also enabled test_scalings_logging_methods with the latest transformers to ensure that this test also passes. --- tests/test_xlora.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/test_xlora.py b/tests/test_xlora.py index d6c8730bf0..b84635e6ec 100644 --- a/tests/test_xlora.py +++ b/tests/test_xlora.py @@ -36,15 +36,15 @@ class TestXlora: model_id = "facebook/opt-125m" num_loras = 4 - @pytest.fixture(scope="function") + @pytest.fixture(scope="class") def lora_dir(self, tmp_path_factory): return tmp_path_factory.mktemp("lora") - @pytest.fixture(scope="function") + @pytest.fixture(scope="class") def lora_embedding_dir(self, tmp_path_factory): return tmp_path_factory.mktemp("lora_embedding") - @pytest.fixture(scope="function") + @pytest.fixture(scope="class") def saved_lora_adapters(self, lora_dir): file_names = [] for i in range(1, self.num_loras + 1): @@ -57,7 +57,7 @@ def saved_lora_adapters(self, lora_dir): file_names.append(file_name) return file_names - @pytest.fixture(scope="function") + @pytest.fixture(scope="class") def saved_lora_embedding_adapters(self, lora_embedding_dir): file_names = [] for i in range(1, self.num_loras + 1): @@ -70,7 +70,7 @@ def saved_lora_embedding_adapters(self, lora_embedding_dir): file_names.append(file_name) return file_names - @pytest.fixture(scope="function") + @pytest.fixture(scope="class") def tokenizer(self): tokenizer = AutoTokenizer.from_pretrained(self.model_id, trust_remote_code=True, device_map=self.torch_device) return tokenizer