From 1c8ccf6f7d7f8aa25fc9375e255701e373d5333b Mon Sep 17 00:00:00 2001 From: Chun Cai Date: Mon, 9 Dec 2024 12:10:34 +0800 Subject: [PATCH] Perf: use fused Adam optimizer --- deepmd/pt/train/training.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deepmd/pt/train/training.py b/deepmd/pt/train/training.py index 72e84d577a..8ca510492c 100644 --- a/deepmd/pt/train/training.py +++ b/deepmd/pt/train/training.py @@ -579,7 +579,7 @@ def warm_up_linear(step, warmup_steps): # author: iProzd if self.opt_type == "Adam": self.optimizer = torch.optim.Adam( - self.wrapper.parameters(), lr=self.lr_exp.start_lr + self.wrapper.parameters(), lr=self.lr_exp.start_lr, fused=True ) if optimizer_state_dict is not None and self.restart_training: self.optimizer.load_state_dict(optimizer_state_dict)