Skip to content

Commit

Permalink
fix lr_scheduler for BertAdam optimizer
Browse files Browse the repository at this point in the history
  • Loading branch information
hlh981029 committed Jul 2, 2022
1 parent bf17be3 commit 169bf0a
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 1 deletion.
2 changes: 1 addition & 1 deletion easynlp/core/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,7 +495,7 @@ def get_optimizer(
optimizer_grouped_parameters.append({
'params': [p],
'weight_decay':
0.0 if any(nd in n for nd in no_decay) else weight_decay
0.0 if len(p.shape) == 1 or any(nd in n for nd in no_decay) else weight_decay
})
if optimizer_type == 'BertAdam':
optimizer = BertAdam(
Expand Down
1 change: 1 addition & 0 deletions easynlp/core/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ def set_model_and_optimizer(self, model, args):
# Build Optimizer
self._optimizer, self._lr_scheduler = get_optimizer(
optimizer_type=self.optimizer_type,
schedule=args.lr_scheduler,
learning_rate=args.learning_rate,
warmup_proportion=args.warmup_proportion,
max_grad_norm=self.max_grad_norm,
Expand Down
10 changes: 10 additions & 0 deletions easynlp/utils/arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,16 @@ def _add_easynlp_args(parser: argparse.ArgumentParser):
],
help='name of the optimizer')

group.add_argument('--lr_scheduler',
'--schedule',
default='warmup_linear',
type=str,
choices=[
'warmup_linear', 'warmup_cosine',
'warmup_constant', 'none',
],
help='name of the learning rate scheduler')

group.add_argument(
'--warmup_proportion',
default=0.1,
Expand Down

0 comments on commit 169bf0a

Please sign in to comment.