Skip to content

Commit

Permalink
Merge branch 'main' into add-s3-uploader
Browse files Browse the repository at this point in the history
  • Loading branch information
eliebak authored Sep 3, 2024
2 parents 2651a17 + 4a2ddca commit 8c15d3b
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 3 deletions.
2 changes: 1 addition & 1 deletion src/nanotron/models/llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def __init__(
bias=False,
async_communication=tp_linear_async_communication and tp_mode is TensorParallelLinearMode.REDUCE_SCATTER,
)
self.split_silu_mul = torch.compile(GLUActivation(config.hidden_act))
self.split_silu_mul = GLUActivation(config.hidden_act)

def forward(self, hidden_states): # [seq_length, batch_size, hidden_dim]
merged_states = self.gate_up_proj(hidden_states)
Expand Down
3 changes: 1 addition & 2 deletions src/nanotron/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -723,8 +723,7 @@ def _load_model_checkpoint(self, model: NanotronModel) -> NanotronModel:
)
reloaded_from_checkpoint=True
if not reloaded_from_checkpoint:
# TODO @eliebak add s3 support also here
log_rank("No checkpoint path provided.", logger=logger, level=logging.INFO)
log_rank("No checkpoint path provided.", logger=logger, level=logging.INFO, rank=0)
if isinstance(self.config.model.init_method, ExistingCheckpointInit):
# Initialize model from an pretrained model checkpoint (without optimizer, lr_scheduler...)
self.param_shard_metadata = load_weights(
Expand Down

0 comments on commit 8c15d3b

Please sign in to comment.