Skip to content

Commit

Permalink
Fix regularization loss
Browse files Browse the repository at this point in the history
  • Loading branch information
vl-dud committed Oct 23, 2024
1 parent fba2305 commit afe33ee
Showing 1 changed file with 40 additions and 14 deletions.
54 changes: 40 additions & 14 deletions deepxde/nn/tensorflow_compat_v1/deeponet.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,14 +422,18 @@ def _dense(
regularizer=None,
trainable=True,
):
return tf.keras.layers.Dense(
dense = tf.keras.layers.Dense(
units,
activation=activation,
use_bias=use_bias,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=regularizer,
trainable=trainable,
)(inputs)
)
out = dense(inputs)
if regularizer:
self.regularization_loss += tf.math.add_n(dense.losses)
return out

def _stacked_dense(
self, inputs, units, stack_size, activation=None, use_bias=True, trainable=True
Expand Down Expand Up @@ -636,23 +640,23 @@ def build_branch_net(self):
else:
# Fully connected network
for i in range(1, len(self.layer_size_func) - 1):
y_func = tf.keras.layers.Dense(
y_func = self._dense(
y_func,
self.layer_size_func[i],
activation=self.activation_branch,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.regularizer,
)(y_func)
regularizer=self.regularizer,
)
if self.dropout_rate_branch[i - 1] > 0:
y_func = tf.layers.dropout(
y_func,
rate=self.dropout_rate_branch[i - 1],
training=self.training,
)
y_func = tf.keras.layers.Dense(
y_func = self._dense(
y_func,
self.layer_size_func[-1],
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.regularizer,
)(y_func)
regularizer=self.regularizer,
)
return y_func

def build_trunk_net(self):
Expand All @@ -661,12 +665,12 @@ def build_trunk_net(self):
if self._input_transform is not None:
y_loc = self._input_transform(y_loc)
for i in range(1, len(self.layer_size_loc)):
y_loc = tf.keras.layers.Dense(
y_loc = self._dense(
y_loc,
self.layer_size_loc[i],
activation=self.activation_trunk,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.regularizer,
)(y_loc)
regularizer=self.regularizer,
)
if self.dropout_rate_trunk[i - 1] > 0:
y_loc = tf.layers.dropout(
y_loc, rate=self.dropout_rate_trunk[i - 1], training=self.training
Expand All @@ -683,3 +687,25 @@ def merge_branch_trunk(self, branch, trunk):
@staticmethod
def concatenate_outputs(ys):
return tf.stack(ys, axis=2)

def _dense(
self,
inputs,
units,
activation=None,
use_bias=True,
regularizer=None,
trainable=True,
):
dense = tf.keras.layers.Dense(
units,
activation=activation,
use_bias=use_bias,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=regularizer,
trainable=trainable,
)
out = dense(inputs)
if regularizer:
self.regularization_loss += tf.math.add_n(dense.losses)
return out

0 comments on commit afe33ee

Please sign in to comment.