Skip to content

Commit

Permalink
Backend Tensorflow 1.x: replace tf.layers.dense to tf.keras.layers.De…
Browse files Browse the repository at this point in the history
…nse (#1859)
  • Loading branch information
vl-dud authored Oct 27, 2024
1 parent 04bbe7f commit 3ec0bc7
Showing 1 changed file with 34 additions and 12 deletions.
46 changes: 34 additions & 12 deletions deepxde/nn/tensorflow_compat_v1/deeponet.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ def build_branch_net(self):
if callable(self.layer_size_func[1]):
# User-defined network
return self.layer_size_func[1](self.X_func)

if self.stacked:
# Stacked fully connected network
return self._build_stacked_branch_net()
Expand Down Expand Up @@ -422,15 +422,18 @@ def _dense(
regularizer=None,
trainable=True,
):
return tf.layers.dense(
inputs,
dense = tf.keras.layers.Dense(
units,
activation=activation,
use_bias=use_bias,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=regularizer,
trainable=trainable,
)
out = dense(inputs)
if regularizer:
self.regularization_loss += tf.math.add_n(dense.losses)
return out

def _stacked_dense(
self, inputs, units, stack_size, activation=None, use_bias=True, trainable=True
Expand Down Expand Up @@ -637,24 +640,22 @@ def build_branch_net(self):
else:
# Fully connected network
for i in range(1, len(self.layer_size_func) - 1):
y_func = tf.layers.dense(
y_func = self._dense(
y_func,
self.layer_size_func[i],
activation=self.activation_branch,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.regularizer,
regularizer=self.regularizer,
)
if self.dropout_rate_branch[i - 1] > 0:
y_func = tf.layers.dropout(
y_func,
rate=self.dropout_rate_branch[i - 1],
training=self.training,
)
y_func = tf.layers.dense(
y_func = self._dense(
y_func,
self.layer_size_func[-1],
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.regularizer,
regularizer=self.regularizer,
)
return y_func

Expand All @@ -664,12 +665,11 @@ def build_trunk_net(self):
if self._input_transform is not None:
y_loc = self._input_transform(y_loc)
for i in range(1, len(self.layer_size_loc)):
y_loc = tf.layers.dense(
y_loc = self._dense(
y_loc,
self.layer_size_loc[i],
activation=self.activation_trunk,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.regularizer,
regularizer=self.regularizer,
)
if self.dropout_rate_trunk[i - 1] > 0:
y_loc = tf.layers.dropout(
Expand All @@ -687,3 +687,25 @@ def merge_branch_trunk(self, branch, trunk):
@staticmethod
def concatenate_outputs(ys):
return tf.stack(ys, axis=2)

def _dense(
self,
inputs,
units,
activation=None,
use_bias=True,
regularizer=None,
trainable=True,
):
dense = tf.keras.layers.Dense(
units,
activation=activation,
use_bias=use_bias,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=regularizer,
trainable=trainable,
)
out = dense(inputs)
if regularizer:
self.regularization_loss += tf.math.add_n(dense.losses)
return out

0 comments on commit 3ec0bc7

Please sign in to comment.