Skip to content

Commit

Permalink
work
Browse files Browse the repository at this point in the history
  • Loading branch information
kylematoba committed Nov 26, 2024
1 parent c4effa9 commit 36d5819
Show file tree
Hide file tree
Showing 5 changed files with 11 additions and 368 deletions.
39 changes: 0 additions & 39 deletions examples/custom-dataloader/README.md

This file was deleted.

103 changes: 0 additions & 103 deletions examples/custom-dataloader/config_custom_dl.yaml

This file was deleted.

222 changes: 0 additions & 222 deletions examples/custom-dataloader/run_train.py

This file was deleted.

4 changes: 2 additions & 2 deletions src/nanotron/serialize/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ def get_checkpoint_state_metadata(param_name: str, pp_rank: int, tp_rank: int) -
state_dict = torch.load(
root_folder
/ optimizer_filename(parallel_context, is_zero=optimizer.inherit_from(optim.ZeroDistributedOptimizer)),
map_location=map_location,
map_location=map_location, weights_only=True
)

if isinstance(optimizer, ZeroDistributedOptimizer):
Expand Down Expand Up @@ -315,5 +315,5 @@ def load_lr_scheduler(
):
root_folder = root_folder / "lr_scheduler"

state_dict = torch.load(root_folder / lr_scheduler_filename(parallel_context))
state_dict = torch.load(root_folder / lr_scheduler_filename(parallel_context), weights_only=True)
lr_scheduler.load_state_dict(state_dict)
Loading

0 comments on commit 36d5819

Please sign in to comment.