Skip to content

Commit

Permalink
Remove useless comments
Browse files Browse the repository at this point in the history
Co-authored-by: kylematoba <kyle.matoba@epfl.ch>
  • Loading branch information
AleHD and kylematoba committed Dec 2, 2024
1 parent f229c9d commit 4d3149e
Showing 1 changed file with 0 additions and 7 deletions.
7 changes: 0 additions & 7 deletions src/nanotron/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,13 +209,6 @@ def __init__(
parallel_context=self.parallel_context,
root_folder=self.init_checkpoint_path,
)
# Update optimizer learning rate because otherwise it is set to zero in the first iteration.
#param_groups = self.optimizer.get_base_optimizer().param_groups
#last_lrs = self.lr_scheduler.get_last_lr()
#assert len(param_groups) == len(last_lrs)
#for group, last_lr in zip(param_groups, last_lrs):
# assert "lr" in group
# group["lr"] = last_lr

# Define iteration start state
if self.init_checkpoint_path is not None:
Expand Down

0 comments on commit 4d3149e

Please sign in to comment.