Skip to content

Commit

Permalink
MAINT: Update ruff version to ~0.6.1 (huggingface#1965)
Browse files Browse the repository at this point in the history
Moving to ruff ~0.6.1. Changes:

- type comparisons now require is: str is str
- remove overridden class attribute active_adapter
- remove secondary import of fbd_cuda

Omit jupyter notebooks for now. We can think about adding that in a
separate PR.
  • Loading branch information
BenjaminBossan authored Aug 22, 2024
1 parent fa218e1 commit 8fcb195
Show file tree
Hide file tree
Showing 8 changed files with 10 additions and 12 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.2.1
rev: v0.6.1
hooks:
- id: ruff
args:
- --fix
- id: ruff-format
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v0.6.1
hooks:
- id: check-merge-conflict
- id: check-yaml
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ target-version = ['py38']
[tool.ruff]
target-version = "py38"
line-length = 119
extend-exclude = ["*.ipynb"]

[tool.ruff.lint]
extend-select = [
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
extras["quality"] = [
"black", # doc-builder has an implicit dependency on Black, see huggingface/doc-builder#434
"hf-doc-builder",
"ruff~=0.4.8",
"ruff~=0.6.1",
]
extras["docs_specific"] = [
"black", # doc-builder has an implicit dependency on Black, see huggingface/doc-builder#434
Expand Down
1 change: 0 additions & 1 deletion src/peft/tuners/boft/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ def get_fbd_cuda():
# build_directory='/tmp/' # for debugging
)
# extra_cuda_cflags = ['-std=c++14', '-ccbin=$$(which gcc-7)']) # cuda10.2 is not compatible with gcc9. Specify gcc 7
import fbd_cuda
except Exception as e:
warnings.warn(f"Failed to load the CUDA extension: {e}, check if ninja is available.")
warnings.warn("Setting boft_n_butterfly_factor to 1 to speed up the finetuning process.")
Expand Down
4 changes: 2 additions & 2 deletions src/peft/tuners/lora/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,9 +553,9 @@ def _check_add_weighted_adapter(
"Combining adapters with `target_modules` type being a mix of list/set and string is not supported."
)

if target_module_types[0] == str:
if target_module_types[0] is str:
new_target_modules = "|".join(f"({self.peft_config[adapter].target_modules})" for adapter in adapters)
elif target_module_types[0] == set:
elif target_module_types[0] is set:
new_target_modules = reduce(
operator.or_, (self.peft_config[adapter].target_modules for adapter in adapters)
)
Expand Down
2 changes: 0 additions & 2 deletions src/peft/tuners/tuners_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -506,8 +506,6 @@ class BaseTunerLayer(ABC):
The name of the active adapter.
"""

active_adapter = None

# All names of layers that may contain adapter (trainable) weights
adapter_layer_names: tuple[str, ...] = ()
# All names of other parameters that may contain adapter-related parameters
Expand Down
6 changes: 3 additions & 3 deletions tests/test_mixed.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def _check_mixed_outputs(self, model_cls, config0, config1, input, *, is_commuta
# check the number of tuner layer types
tuner_layers = [mod for mod in peft_model_01.modules() if isinstance(mod, BaseTunerLayer)]
tuner_types = {type(tuner_layer) for tuner_layer in tuner_layers}
if type(config0) == type(config1):
if type(config0) is type(config1):
assert len(tuner_types) == 1
else:
assert len(tuner_types) == 2
Expand All @@ -147,7 +147,7 @@ def _check_mixed_outputs(self, model_cls, config0, config1, input, *, is_commuta
# check the number of tuner layer types
tuner_layers = [mod for mod in peft_model_10.modules() if isinstance(mod, BaseTunerLayer)]
tuner_types = {type(tuner_layer) for tuner_layer in tuner_layers}
if type(config0) == type(config1):
if type(config0) is type(config1):
assert len(tuner_types) == 1
else:
assert len(tuner_types) == 2
Expand All @@ -166,7 +166,7 @@ def _check_mixed_outputs(self, model_cls, config0, config1, input, *, is_commuta
# check the number of tuner layer types
tuner_layers = [mod for mod in peft_model_10.modules() if isinstance(mod, BaseTunerLayer)]
tuner_types = {type(tuner_layer) for tuner_layer in tuner_layers}
if type(config0) == type(config1):
if type(config0) is type(config1):
assert len(tuner_types) == 1
else:
assert len(tuner_types) == 2
Expand Down
2 changes: 1 addition & 1 deletion tests/test_tuners_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ def _check_match_with_expected_target_modules(
# compare the two models and assert that all layers are of the same type
for name, actual_module in actual_model.named_modules():
expected_module = expected_model_module_dict[name]
assert type(actual_module) == type(expected_module)
assert type(actual_module) is type(expected_module)

def test_maybe_include_all_linear_layers_ia3_loha(self):
model_id, initial_target_modules, expected_target_modules = (
Expand Down

0 comments on commit 8fcb195

Please sign in to comment.