Skip to content

Commit

Permalink
update code and add regularizer for nn
Browse files Browse the repository at this point in the history
  • Loading branch information
lijialin03 committed Dec 17, 2024
1 parent 63d6fcd commit eba9b55
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 9 deletions.
1 change: 1 addition & 0 deletions deepxde/nn/paddle/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ class NN(paddle.nn.Layer):

def __init__(self):
super().__init__()
self.regularizer = None
self._input_transform = None
self._output_transform = None

Expand Down
16 changes: 7 additions & 9 deletions deepxde/optimizers/paddle/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,13 +62,11 @@ def get(params, optimizer, learning_rate=None, decay=None, weight_decay=None):
weight_decay=weight_decay,
)
if optimizer == "adamw":
if isinstance(weight_decay, paddle.regularizer.L2Decay):
if weight_decay._coeff == 0:
raise ValueError("AdamW optimizer requires non-zero weight decay")
return paddle.optimizer.AdamW(
learning_rate=learning_rate,
parameters=params,
weight_decay=weight_decay._coeff,
)
raise ValueError("AdamW optimizer requires l2 regularizer")
if not isinstance(weight_decay, paddle.regularizer.L2Decay) or weight_decay._coeff == 0:
raise ValueError("AdamW optimizer requires L2 regularizer and non-zero weight decay")
return paddle.optimizer.AdamW(
learning_rate=learning_rate,
parameters=params,
weight_decay=weight_decay._coeff,
)
raise NotImplementedError(f"{optimizer} to be implemented for backend Paddle.")

0 comments on commit eba9b55

Please sign in to comment.