diff --git a/deepmd/pt/train/training.py b/deepmd/pt/train/training.py index 72e84d577a..8ca510492c 100644 --- a/deepmd/pt/train/training.py +++ b/deepmd/pt/train/training.py @@ -579,7 +579,7 @@ def warm_up_linear(step, warmup_steps): # author: iProzd if self.opt_type == "Adam": self.optimizer = torch.optim.Adam( - self.wrapper.parameters(), lr=self.lr_exp.start_lr + self.wrapper.parameters(), lr=self.lr_exp.start_lr, fused=True ) if optimizer_state_dict is not None and self.restart_training: self.optimizer.load_state_dict(optimizer_state_dict)