Skip to content

Commit

Permalink
update code 2
Browse files Browse the repository at this point in the history
  • Loading branch information
lijialin03 committed Dec 17, 2024
1 parent fd71d37 commit 16c17a3
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions deepxde/optimizers/paddle/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ def get(params, optimizer, learning_rate=None, decay=None, weight_decay=None):
return optimizer

if optimizer in ["L-BFGS", "L-BFGS-B"]:
if weight_decay is not None:
raise ValueError("L-BFGS optimizer doesn't support weight_decay")
if learning_rate is not None or decay is not None:
print("Warning: learning rate is ignored for {}".format(optimizer))
if weight_decay is not None:
raise ValueError("L-BFGS optimizer doesn't support weight_decay")
optim = paddle.optimizer.LBFGS(
learning_rate=1,
max_iter=LBFGS_options["iter_per_step"],
Expand Down Expand Up @@ -67,7 +67,7 @@ def get(params, optimizer, learning_rate=None, decay=None, weight_decay=None):
or weight_decay._coeff == 0
):
raise ValueError(
"AdamW optimizer requires L2 regularizer and non-zero weight decay"
"AdamW optimizer requires non-zero L2 regularizer"
)
return paddle.optimizer.AdamW(
learning_rate=learning_rate,
Expand Down

0 comments on commit 16c17a3

Please sign in to comment.