diff --git a/bayesflow/adapters/transforms/as_set.py b/bayesflow/adapters/transforms/as_set.py index cf6306316..6ac73c315 100644 --- a/bayesflow/adapters/transforms/as_set.py +++ b/bayesflow/adapters/transforms/as_set.py @@ -4,6 +4,21 @@ class AsSet(ElementwiseTransform): + """ + The `.as_set(["x", "y"])` transform indicates that both `x` and `y` are treated as sets. + That is, their values will be treated as *exchangable* such that they will imply + the same inference regardless of the values' order. + This is useful, for example, in a linear regression context where we can index + the observations in arbitrary order and always get the same regression line. + + Useage: + + adapter = ( + bf.Adapter() + .as_set(["x", "y"]) + ) + """ + def forward(self, data: np.ndarray, **kwargs) -> np.ndarray: return np.atleast_3d(data) diff --git a/bayesflow/adapters/transforms/concatenate.py b/bayesflow/adapters/transforms/concatenate.py index a09e9b699..19d4cedac 100644 --- a/bayesflow/adapters/transforms/concatenate.py +++ b/bayesflow/adapters/transforms/concatenate.py @@ -12,7 +12,14 @@ @serializable(package="bayesflow.adapters") class Concatenate(Transform): - """Concatenate multiple arrays into a new key.""" + """Concatenate multiple arrays into a new key. + Parameters: + + keys: + + into: + + """ def __init__(self, keys: Sequence[str], *, into: str, axis: int = -1): self.keys = keys diff --git a/bayesflow/adapters/transforms/constrain.py b/bayesflow/adapters/transforms/constrain.py index c0ac36692..904492765 100644 --- a/bayesflow/adapters/transforms/constrain.py +++ b/bayesflow/adapters/transforms/constrain.py @@ -15,6 +15,42 @@ @serializable(package="bayesflow.adapters") class Constrain(ElementwiseTransform): + """ + Constrains neural network predictions of a data variable to specificied bounds. + + Parameters: + String containing the name of the data variable to be transformed e.g. "sigma". See examples below. + + Named Parameters: + lower: Lower bound for named data variable. + upper: Upper bound for named data variable. + method: Method by which to shrink the network predictions space to specified bounds. Choose from + - Double bounded methods: sigmoid, expit, (default = sigmoid) + - Lower bound only methods: softplus, exp, (default = softplus) + - Upper bound only methods: softplus, exp, (default = softplus) + + + + Examples: + Let sigma be the standard deviation of a normal distribution, + then sigma should always be greater than zero. + + Useage: + adapter = ( + bf.Adapter() + .constrain("sigma", lower=0) + ) + + Suppose p is the parameter for a binomial distribution where p must be in [0,1] + then we would constrain the neural network to estimate p in the following way. + + Usage: + adapter = ( + bf.Adapter() + .constrain("p", lower=0, upper=1, method = "sigmoid") + ) + """ + def __init__( self, *, lower: int | float | np.ndarray = None, upper: int | float | np.ndarray = None, method: str = "default" ): diff --git a/bayesflow/adapters/transforms/convert_dtype.py b/bayesflow/adapters/transforms/convert_dtype.py index be3059a67..e392cf17f 100644 --- a/bayesflow/adapters/transforms/convert_dtype.py +++ b/bayesflow/adapters/transforms/convert_dtype.py @@ -10,6 +10,10 @@ @serializable(package="bayesflow.adapters") class ConvertDType(ElementwiseTransform): + """ + Default transform used to convert all floats from float64 to float32 to be in line with keras framework. + """ + def __init__(self, from_dtype: str, to_dtype: str): super().__init__() diff --git a/bayesflow/adapters/transforms/keep.py b/bayesflow/adapters/transforms/keep.py index 164616b62..165a0f887 100644 --- a/bayesflow/adapters/transforms/keep.py +++ b/bayesflow/adapters/transforms/keep.py @@ -11,6 +11,36 @@ @serializable(package="bayesflow.adapters") class Keep(Transform): + """ + Name the data parameters that should be kept for futher calculation. + + Parameters: + + cls: tuple containing the names of kept data variables as strings. + + Useage: + + Two moons simulator generates data for priors alpha, r and theta as well as observation data x. + We are interested only in theta and x, to keep only theta and x we should use the following; + + adapter = ( + bf.adapters.Adapter() + # only keep theta and x + .keep(("theta", "x")) + ) + + Example: + >>> a = [1, 2, 3, 4] + >>> b = [[1, 2], [3, 4]] + >>> c = [[5, 6, 7, 8]] + >>> dat = dict(a=a, b=b, c=c) + # Here we want to only keep elements b and c + >>> keeper = bf.adapters.transforms.Keep(("b", "c")) + >>> keeper.forward(dat) + {'b': [[1, 2], [3, 4]], 'c': [[5, 6, 7, 8]]} + + """ + def __init__(self, keys: Sequence[str]): self.keys = keys diff --git a/bayesflow/adapters/transforms/to_array.py b/bayesflow/adapters/transforms/to_array.py index 7c62af9a0..14abb9b3c 100644 --- a/bayesflow/adapters/transforms/to_array.py +++ b/bayesflow/adapters/transforms/to_array.py @@ -11,6 +11,21 @@ @serializable(package="bayesflow.adapters") class ToArray(ElementwiseTransform): + """ + Checks provided data for any non-arrays and converts them to numpy arrays. + This ensures all data is in a format suitable for training. + + Example: + >>> ta = bf.adapters.transforms.ToArray() + >>> a = [1, 2, 3, 4] + >>> ta.forward(a) + array([1, 2, 3, 4]) + >>> b = [[1, 2], [3, 4]] + >>> ta.forward(b) + array([[1, 2], + [3, 4]]) + """ + def __init__(self): super().__init__() self.original_type = None