Skip to content
This repository has been archived by the owner on Aug 16, 2024. It is now read-only.

Commit

Permalink
[bugfix] remove useless codes
Browse files Browse the repository at this point in the history
  • Loading branch information
mikecovlee committed Jul 30, 2024
1 parent 4bd6362 commit 6cd8dc2
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions mlora/models/modeling_phi3.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,12 +305,11 @@ def _mixlora_forward(

lora_name = f"moe.{moe_name}.experts.{expert_idx}"
if lora_name in self.gate_up_proj_.loras_:
lora_data = _mixtral_slice_tensor(hidden_states, top_x, input_dtype)
gate_up_states = self.gate_up_proj_.loras_[lora_name].forward(
_mixtral_slice_tensor(common_gate_up, top_x, input_dtype), lora_data
_mixtral_slice_tensor(common_gate_up, top_x, input_dtype),
_mixtral_slice_tensor(hidden_states, top_x, input_dtype),
)
else:
lora_data = None
gate_up_states = _mixtral_slice_tensor(
common_gate_up, top_x, input_dtype
)
Expand All @@ -320,7 +319,7 @@ def _mixlora_forward(

if lora_name in self.down_proj_.loras_:
final_expert_states.append(
self.down_proj_.loras_[lora_name].forward( # LoRA a,b
self.down_proj_.loras_[lora_name].forward(
self.down_proj_.base_layer_.forward(act_result),
act_result,
)
Expand Down

0 comments on commit 6cd8dc2

Please sign in to comment.