From 8fcb1951a51c3415ecebae4a8b0a62c770d55454 Mon Sep 17 00:00:00 2001 From: Benjamin Bossan Date: Thu, 22 Aug 2024 15:23:23 +0200 Subject: [PATCH] MAINT: Update ruff version to ~0.6.1 (#1965) Moving to ruff ~0.6.1. Changes: - type comparisons now require is: str is str - remove overridden class attribute active_adapter - remove secondary import of fbd_cuda Omit jupyter notebooks for now. We can think about adding that in a separate PR. --- .pre-commit-config.yaml | 4 ++-- pyproject.toml | 1 + setup.py | 2 +- src/peft/tuners/boft/layer.py | 1 - src/peft/tuners/lora/model.py | 4 ++-- src/peft/tuners/tuners_utils.py | 2 -- tests/test_mixed.py | 6 +++--- tests/test_tuners_utils.py | 2 +- 8 files changed, 10 insertions(+), 12 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1563cffda8..2a801ed82c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,13 +1,13 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.2.1 + rev: v0.6.1 hooks: - id: ruff args: - --fix - id: ruff-format - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v0.6.1 hooks: - id: check-merge-conflict - id: check-yaml diff --git a/pyproject.toml b/pyproject.toml index c920f4f9d2..465d029e52 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,6 +6,7 @@ target-version = ['py38'] [tool.ruff] target-version = "py38" line-length = 119 +extend-exclude = ["*.ipynb"] [tool.ruff.lint] extend-select = [ diff --git a/setup.py b/setup.py index 847367af08..15ecd1d25a 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ extras["quality"] = [ "black", # doc-builder has an implicit dependency on Black, see huggingface/doc-builder#434 "hf-doc-builder", - "ruff~=0.4.8", + "ruff~=0.6.1", ] extras["docs_specific"] = [ "black", # doc-builder has an implicit dependency on Black, see huggingface/doc-builder#434 diff --git a/src/peft/tuners/boft/layer.py b/src/peft/tuners/boft/layer.py index f773167b92..05e145b798 100644 --- a/src/peft/tuners/boft/layer.py +++ b/src/peft/tuners/boft/layer.py @@ -91,7 +91,6 @@ def get_fbd_cuda(): # build_directory='/tmp/' # for debugging ) # extra_cuda_cflags = ['-std=c++14', '-ccbin=$$(which gcc-7)']) # cuda10.2 is not compatible with gcc9. Specify gcc 7 - import fbd_cuda except Exception as e: warnings.warn(f"Failed to load the CUDA extension: {e}, check if ninja is available.") warnings.warn("Setting boft_n_butterfly_factor to 1 to speed up the finetuning process.") diff --git a/src/peft/tuners/lora/model.py b/src/peft/tuners/lora/model.py index 9d3f3bf62f..5f1b0a6b49 100644 --- a/src/peft/tuners/lora/model.py +++ b/src/peft/tuners/lora/model.py @@ -553,9 +553,9 @@ def _check_add_weighted_adapter( "Combining adapters with `target_modules` type being a mix of list/set and string is not supported." ) - if target_module_types[0] == str: + if target_module_types[0] is str: new_target_modules = "|".join(f"({self.peft_config[adapter].target_modules})" for adapter in adapters) - elif target_module_types[0] == set: + elif target_module_types[0] is set: new_target_modules = reduce( operator.or_, (self.peft_config[adapter].target_modules for adapter in adapters) ) diff --git a/src/peft/tuners/tuners_utils.py b/src/peft/tuners/tuners_utils.py index 97e5cfbe90..45617fd543 100644 --- a/src/peft/tuners/tuners_utils.py +++ b/src/peft/tuners/tuners_utils.py @@ -506,8 +506,6 @@ class BaseTunerLayer(ABC): The name of the active adapter. """ - active_adapter = None - # All names of layers that may contain adapter (trainable) weights adapter_layer_names: tuple[str, ...] = () # All names of other parameters that may contain adapter-related parameters diff --git a/tests/test_mixed.py b/tests/test_mixed.py index a8dd177f4e..41e9aceae0 100644 --- a/tests/test_mixed.py +++ b/tests/test_mixed.py @@ -122,7 +122,7 @@ def _check_mixed_outputs(self, model_cls, config0, config1, input, *, is_commuta # check the number of tuner layer types tuner_layers = [mod for mod in peft_model_01.modules() if isinstance(mod, BaseTunerLayer)] tuner_types = {type(tuner_layer) for tuner_layer in tuner_layers} - if type(config0) == type(config1): + if type(config0) is type(config1): assert len(tuner_types) == 1 else: assert len(tuner_types) == 2 @@ -147,7 +147,7 @@ def _check_mixed_outputs(self, model_cls, config0, config1, input, *, is_commuta # check the number of tuner layer types tuner_layers = [mod for mod in peft_model_10.modules() if isinstance(mod, BaseTunerLayer)] tuner_types = {type(tuner_layer) for tuner_layer in tuner_layers} - if type(config0) == type(config1): + if type(config0) is type(config1): assert len(tuner_types) == 1 else: assert len(tuner_types) == 2 @@ -166,7 +166,7 @@ def _check_mixed_outputs(self, model_cls, config0, config1, input, *, is_commuta # check the number of tuner layer types tuner_layers = [mod for mod in peft_model_10.modules() if isinstance(mod, BaseTunerLayer)] tuner_types = {type(tuner_layer) for tuner_layer in tuner_layers} - if type(config0) == type(config1): + if type(config0) is type(config1): assert len(tuner_types) == 1 else: assert len(tuner_types) == 2 diff --git a/tests/test_tuners_utils.py b/tests/test_tuners_utils.py index aa8758d22a..c93c1b9267 100644 --- a/tests/test_tuners_utils.py +++ b/tests/test_tuners_utils.py @@ -291,7 +291,7 @@ def _check_match_with_expected_target_modules( # compare the two models and assert that all layers are of the same type for name, actual_module in actual_model.named_modules(): expected_module = expected_model_module_dict[name] - assert type(actual_module) == type(expected_module) + assert type(actual_module) is type(expected_module) def test_maybe_include_all_linear_layers_ia3_loha(self): model_id, initial_target_modules, expected_target_modules = (