From 3537f2e3a999da84a539a6b5b00b7c1bfc6f148a Mon Sep 17 00:00:00 2001 From: Roman Bredehoft Date: Wed, 7 Aug 2024 10:34:36 +0200 Subject: [PATCH] chore: temporarily remove embedding/lm_head from remote --- use_case_examples/lora_finetune/gpt2_finetune_hybrid.ipynb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/use_case_examples/lora_finetune/gpt2_finetune_hybrid.ipynb b/use_case_examples/lora_finetune/gpt2_finetune_hybrid.ipynb index 3ee688f37c..a51c6ce6cd 100644 --- a/use_case_examples/lora_finetune/gpt2_finetune_hybrid.ipynb +++ b/use_case_examples/lora_finetune/gpt2_finetune_hybrid.ipynb @@ -244,7 +244,8 @@ " # parameters. Side note: \"lm_head\" does not appear in model.parameters() because the weights\n", " # are directly tied to the embedding ones, but we still need to remove both modules in\n", " # order to get rid of the weights\n", - " if isinstance(module, (Conv1D, Embedding)) or \"lm_head\" in name:\n", + " if isinstance(module, (Conv1D)):\n", + " # if isinstance(module, (Conv1D, Embedding)) or \"lm_head\" in name:\n", " remote_names.append(name)\n", "\n", " elif isinstance(module, CustomConv1D):\n",