Skip to content

Commit

Permalink
Merge pull request #463 from Kosinkadink/calculate_weight_change
Browse files Browse the repository at this point in the history
calculate_weight ComfyUI update
  • Loading branch information
Kosinkadink authored Aug 23, 2024
2 parents c5c2780 + 12a5dd5 commit 675241c
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 2 deletions.
8 changes: 7 additions & 1 deletion animatediff/model_injection.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,12 @@ def __init__(self, m: ModelPatcher):
self.motion_injection_params: InjectionParams = InjectionParams()
self.sample_settings: SampleSettings = SampleSettings()
self.motion_models: MotionModelGroup = None
# backwards-compatible calculate_weight
if hasattr(comfy.lora, "calculate_weight"):
self.do_calculate_weight = comfy.lora.calculate_weight
else:
self.do_calculate_weight = self.calculate_weight


def clone(self, hooks_only=False):
cloned = ModelPatcherAndInjector(self)
Expand Down Expand Up @@ -379,7 +385,7 @@ def patch_hooked_weight_to_device(self, lora_hooks: LoraHookGroup, combined_patc

# TODO: handle model_params_lowvram stuff if necessary
temp_weight = comfy.model_management.cast_to_device(weight, weight.device, torch.float32, copy=True)
out_weight = self.calculate_weight(combined_patches[key], temp_weight, key).to(weight.dtype)
out_weight = self.do_calculate_weight(combined_patches[key], temp_weight, key).to(weight.dtype)
if self.lora_hook_mode == LoraHookMode.MAX_SPEED:
self.cached_hooked_patches.setdefault(lora_hooks, {})
self.cached_hooked_patches[lora_hooks][key] = out_weight
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[project]
name = "comfyui-animatediff-evolved"
description = "Improved AnimateDiff integration for ComfyUI."
version = "1.2.0"
version = "1.2.1"
license = { file = "LICENSE" }
dependencies = []

Expand Down

0 comments on commit 675241c

Please sign in to comment.