Skip to content

Commit

Permalink
tweak auto_attach_loras so debugging is easier when it fails
Browse files Browse the repository at this point in the history
  • Loading branch information
catwell committed Mar 26, 2024
1 parent 2345f01 commit 404a15a
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions src/refiners/fluxion/adapters/lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -505,6 +505,8 @@ def auto_attach_loras(
loras_copy = {key: Lora.from_weights(lora.name, lora.down.weight, lora.up.weight) for key, lora in loras.items()}
debug_map_1: list[tuple[str, str]] = []
failed_keys_1 = _auto_attach_loras(loras, target, include=include, exclude=exclude, debug_map=debug_map_1)
if debug_map is not None:
debug_map += debug_map_1
if len(debug_map_1) != len(loras) or failed_keys_1:
raise ValueError(
f"sanity check failed: {len(debug_map_1)} / {len(loras)} LoRA layers attached, {len(failed_keys_1)} failed"
Expand All @@ -518,6 +520,4 @@ def auto_attach_loras(
f"sanity check failed: {len(debug_map_2)} / {len(loras)} LoRA layers attached twice, {len(failed_keys_2)} skipped"
)

if debug_map is not None:
debug_map += debug_map_1
return failed_keys_1

0 comments on commit 404a15a

Please sign in to comment.