Skip to content

Commit

Permalink
modify ip_adapter's ImageCrossAttention scale getter and setter
Browse files Browse the repository at this point in the history
this new version makes it robust in case mulitple Mulitply-s are inside the Chain (e.g. if the Linear layers are LoRA-ified)
  • Loading branch information
Laurent2916 authored and deltheil committed Mar 26, 2024
1 parent 7e64ba4 commit a071580
Showing 1 changed file with 8 additions and 6 deletions.
14 changes: 8 additions & 6 deletions src/refiners/foundationals/latent_diffusion/image_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ def init_context(self) -> Contexts:

class ImageCrossAttention(fl.Chain):
def __init__(self, text_cross_attention: fl.Attention, scale: float = 1.0) -> None:
self._scale = scale
self._multiply = [fl.Multiply(scale)]
super().__init__(
fl.Distribute(
fl.Identity(),
Expand Down Expand Up @@ -263,17 +263,20 @@ def __init__(self, text_cross_attention: fl.Attention, scale: float = 1.0) -> No
ScaledDotProductAttention(
num_heads=text_cross_attention.num_heads, is_causal=text_cross_attention.is_causal
),
fl.Multiply(self.scale),
self.multiply,
)

@property
def multiply(self) -> fl.Multiply:
return self._multiply[0]

@property
def scale(self) -> float:
return self._scale
return self.multiply.scale

@scale.setter
def scale(self, value: float) -> None:
self._scale = value
self.ensure_find(fl.Multiply).scale = value
self.multiply.scale = value


class CrossAttentionAdapter(fl.Chain, Adapter[fl.Attention]):
Expand Down Expand Up @@ -335,7 +338,6 @@ def scale(self) -> float:

@scale.setter
def scale(self, value: float) -> None:
self._scale = value
self.image_cross_attention.scale = value

def load_weights(self, key_tensor: Tensor, value_tensor: Tensor) -> None:
Expand Down

0 comments on commit a071580

Please sign in to comment.