Skip to content

Commit

Permalink
Coupling flow working but not performing well with current clamping
Browse files Browse the repository at this point in the history
  • Loading branch information
stefanradev93 committed May 16, 2024
1 parent 3edb1e6 commit 51cb6a7
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

class AffineTransform(Transform):

def __init__(self, clamp_factor=10.0, **kwargs):
def __init__(self, clamp_factor=5.0, **kwargs):
super().__init__(**kwargs)
self.clamp_factor = clamp_factor

Expand All @@ -19,8 +19,10 @@ def split_parameters(self, parameters: Tensor) -> dict[str, Tensor]:
return {"scale": scale, "shift": shift}

def constrain_parameters(self, parameters: dict[str, Tensor]) -> dict[str, Tensor]:
shift = math.log(math.e - 1)
parameters["scale"] = self.clamp_factor * ops.sigmoid(ops.softplus(parameters["scale"] + shift))
# shift = math.log(math.e - 1)
s = parameters["scale"]
# parameters["scale"] = self.clamp_factor * ops.sigmoid(ops.softplus(parameters["scale"] + shift))
parameters["scale"] = 1 / (1 + ops.exp(-s)) * ops.sqrt(1 + ops.abs(s + self.clamp_factor))

return parameters

Expand Down
2 changes: 1 addition & 1 deletion bayesflow/experimental/networks/resnet/hidden_block.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ class ConfigurableHiddenBlock(keras.layers.Layer):
def __init__(
self,
num_units,
activation="gelu",
activation="relu",
residual=True,
dropout_rate=0.05,
spectral_norm=False,
Expand Down

0 comments on commit 51cb6a7

Please sign in to comment.