Skip to content

Commit

Permalink
Refactor files using black
Browse files Browse the repository at this point in the history
  • Loading branch information
lijialin03 committed Dec 11, 2024
1 parent 8910cde commit b9ab3a9
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 25 deletions.
32 changes: 18 additions & 14 deletions deepxde/nn/paddle/fnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@ def __init__(
initializer = initializers.get(kernel_initializer)
initializer_zero = initializers.get("zeros")
self.regularizer = regularizers.get(regularization)
self.regularizer_value = regularization[1:] if regularization is not None else None
self.regularizer_value = (
regularization[1:] if regularization is not None else None
)
self.dropout_rate = dropout_rate

self.linears = paddle.nn.LayerList()
Expand Down Expand Up @@ -87,7 +89,6 @@ def __init__(self, layer_sizes, activation, kernel_initializer):

n_output = layer_sizes[-1]


def make_linear(n_input, n_output):
linear = paddle.nn.Linear(n_input, n_output)
initializer(linear.weight)
Expand All @@ -106,18 +107,22 @@ def make_linear(n_input, n_output):
if isinstance(prev_layer_size, (list, tuple)):
# e.g. [8, 8, 8] -> [16, 16, 16]
self.layers.append(
paddle.nn.LayerList([
make_linear(prev_layer_size[j], curr_layer_size[j])
for j in range(n_output)
])
paddle.nn.LayerList(
[
make_linear(prev_layer_size[j], curr_layer_size[j])
for j in range(n_output)
]
)
)
else:
# e.g. 64 -> [8, 8, 8]
self.layers.append(
paddle.nn.LayerList([
make_linear(prev_layer_size, curr_layer_size[j])
for j in range(n_output)
])
paddle.nn.LayerList(
[
make_linear(prev_layer_size, curr_layer_size[j])
for j in range(n_output)
]
)
)
else: # e.g. 64 -> 64
if not isinstance(prev_layer_size, int):
Expand All @@ -129,10 +134,9 @@ def make_linear(n_input, n_output):
# output layers
if isinstance(layer_sizes[-2], (list, tuple)): # e.g. [3, 3, 3] -> 3
self.layers.append(
paddle.nn.LayerList([
make_linear(layer_sizes[-2][j], 1)
for j in range(n_output)
])
paddle.nn.LayerList(
[make_linear(layer_sizes[-2][j], 1) for j in range(n_output)]
)
)
else:
self.layers.append(make_linear(layer_sizes[-2], n_output))
Expand Down
26 changes: 15 additions & 11 deletions deepxde/nn/paddle/mfnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,13 @@ def __init__(
self.trainable_hi = trainable_high_fidelity
self.residue = residue
self.regularizer = regularizers.get(regularization)
self.regularizer_value = regularization[1:] if regularization is not None else None

self.regularizer_value = (
regularization[1:] if regularization is not None else None
)

# low fidelity
self.linears_lo = self.init_dense(self.layer_size_lo, self.trainable_lo)

# high fidelity
# linear part
self.linears_hi_l = paddle.nn.Linear(
Expand All @@ -50,18 +52,20 @@ def __init__(
for param in self.linears_hi_l.parameters():
param.stop_gradient = False
# nonlinear part
self.layer_size_hi = [self.layer_size_lo[0] + self.layer_size_lo[-1]] + self.layer_size_hi
self.layer_size_hi = [
self.layer_size_lo[0] + self.layer_size_lo[-1]
] + self.layer_size_hi
self.linears_hi = self.init_dense(self.layer_size_hi, self.trainable_hi)
# linear + nonlinear
if not self.residue:
alpha = self.init_alpha(0.0, self.trainable_hi)
self.add_parameter("alpha",alpha)
self.add_parameter("alpha", alpha)
else:
alpha1 = self.init_alpha(0.0, self.trainable_hi)
alpha2 = self.init_alpha(0.0, self.trainable_hi)
self.add_parameter("alpha1",alpha1)
self.add_parameter("alpha2",alpha2)
self.add_parameter("alpha1", alpha1)
self.add_parameter("alpha2", alpha2)

def init_dense(self, layer_size, trainable):
linears = paddle.nn.LayerList()
for i in range(len(layer_size) - 1):
Expand All @@ -79,11 +83,11 @@ def init_dense(self, layer_size, trainable):

def init_alpha(self, value, trainable):
alpha = paddle.create_parameter(
shape=[1],
dtype=config.real(paddle),
shape=[1],
dtype=config.real(paddle),
default_initializer=paddle.nn.initializer.Constant(value),
)
alpha.stop_gradient=not trainable
alpha.stop_gradient = not trainable
return alpha

def forward(self, inputs):
Expand Down

0 comments on commit b9ab3a9

Please sign in to comment.