Skip to content

Commit

Permalink
update code with black
Browse files Browse the repository at this point in the history
  • Loading branch information
lijialin03 committed Dec 17, 2024
1 parent eba9b55 commit fd71d37
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 4 deletions.
3 changes: 1 addition & 2 deletions deepxde/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -517,13 +517,12 @@ def outputs_losses_test(inputs, targets, auxiliary_vars):
trainable_variables = (
list(self.net.parameters()) + self.external_trainable_variables
)
weight_decay = getattr(self.net, "regularizer", None)
self.opt = optimizers.get(
trainable_variables,
self.opt_name,
learning_rate=lr,
decay=decay,
weight_decay=weight_decay,
weight_decay=self.net.regularizer,
)

def train_step(inputs, targets, auxiliary_vars):
Expand Down
9 changes: 7 additions & 2 deletions deepxde/optimizers/paddle/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,13 @@ def get(params, optimizer, learning_rate=None, decay=None, weight_decay=None):
weight_decay=weight_decay,
)
if optimizer == "adamw":
if not isinstance(weight_decay, paddle.regularizer.L2Decay) or weight_decay._coeff == 0:
raise ValueError("AdamW optimizer requires L2 regularizer and non-zero weight decay")
if (
not isinstance(weight_decay, paddle.regularizer.L2Decay)
or weight_decay._coeff == 0
):
raise ValueError(
"AdamW optimizer requires L2 regularizer and non-zero weight decay"
)
return paddle.optimizer.AdamW(
learning_rate=learning_rate,
parameters=params,
Expand Down

0 comments on commit fd71d37

Please sign in to comment.