diff --git a/use_case_examples/lora_finetune/gpt2_finetune_hybrid.ipynb b/use_case_examples/lora_finetune/gpt2_finetune_hybrid.ipynb index 3ee688f37c..a51c6ce6cd 100644 --- a/use_case_examples/lora_finetune/gpt2_finetune_hybrid.ipynb +++ b/use_case_examples/lora_finetune/gpt2_finetune_hybrid.ipynb @@ -244,7 +244,8 @@ " # parameters. Side note: \"lm_head\" does not appear in model.parameters() because the weights\n", " # are directly tied to the embedding ones, but we still need to remove both modules in\n", " # order to get rid of the weights\n", - " if isinstance(module, (Conv1D, Embedding)) or \"lm_head\" in name:\n", + " if isinstance(module, (Conv1D)):\n", + " # if isinstance(module, (Conv1D, Embedding)) or \"lm_head\" in name:\n", " remote_names.append(name)\n", "\n", " elif isinstance(module, CustomConv1D):\n",