Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add jitter_scale parameter for initial point generation #7643

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 16 additions & 6 deletions pymc/initial_point.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,12 +66,12 @@ def make_initial_point_fns_per_chain(
model,
overrides: StartDict | Sequence[StartDict | None] | None,
jitter_rvs: set[TensorVariable] | None = None,
jitter_scale: float = 1.0,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Need to update the docstrings of the function that now accept jitter_scale

chains: int,
) -> list[Callable]:
"""Create an initial point function for each chain, as defined by initvals.

If a single initval dictionary is passed, the function is replicated for each
chain, otherwise a unique function is compiled for each entry in the dictionary.
If a single initval dictionary is passed, the function is replicated for each chain, otherwise a unique function is compiled for each entry in the dictionary.

Parameters
----------
Expand All @@ -81,6 +81,8 @@ def make_initial_point_fns_per_chain(
jitter_rvs : set, optional
Random variable tensors for which U(-1, 1) jitter shall be applied.
(To the transformed space if applicable.)
jitter_scale : float, optional
The scale of the jitter in the jitter_rvs set. Defaults to 1.0.

Raises
------
Expand All @@ -96,6 +98,7 @@ def make_initial_point_fns_per_chain(
model=model,
overrides=overrides,
jitter_rvs=jitter_rvs,
jitter_scale=jitter_scale,
return_transformed=True,
)
] * chains
Expand All @@ -104,6 +107,7 @@ def make_initial_point_fns_per_chain(
make_initial_point_fn(
model=model,
jitter_rvs=jitter_rvs,
jitter_scale=jitter_scale,
overrides=chain_overrides,
return_transformed=True,
)
Expand All @@ -122,6 +126,7 @@ def make_initial_point_fn(
model,
overrides: StartDict | None = None,
jitter_rvs: set[TensorVariable] | None = None,
jitter_scale: float = 1.0,
default_strategy: str = "support_point",
return_transformed: bool = True,
) -> Callable:
Expand All @@ -130,8 +135,9 @@ def make_initial_point_fn(
Parameters
----------
jitter_rvs : set
The set (or list or tuple) of random variables for which a U(-1, +1) jitter should be
added to the initial value. Only available for variables that have a transform or real-valued support.
The set (or list or tuple) of random variables for which a U(-1, +1) jitter should be added to the initial value. Only available for variables that have a transform or real-valued support.
jitter_scale : float, optional
The scale of the jitter in the jitter_rvs set. Defaults to 1.0.
default_strategy : str
Which of { "support_point", "prior" } to prefer if the initval setting for an RV is None.
overrides : dict
Expand All @@ -150,6 +156,7 @@ def make_initial_point_fn(
rvs_to_transforms=model.rvs_to_transforms,
initval_strategies=initval_strats,
jitter_rvs=jitter_rvs,
jitter_scale=jitter_scale,
default_strategy=default_strategy,
return_transformed=return_transformed,
)
Expand Down Expand Up @@ -188,6 +195,7 @@ def make_initial_point_expression(
rvs_to_transforms: dict[TensorVariable, Transform],
initval_strategies: dict[TensorVariable, np.ndarray | Variable | str | None],
jitter_rvs: set[TensorVariable] | None = None,
jitter_scale: float = 1.0,
default_strategy: str = "support_point",
return_transformed: bool = False,
) -> list[TensorVariable]:
Expand All @@ -203,8 +211,10 @@ def make_initial_point_expression(
Mapping of free random variable tensors to initial value strategies.
For example the `Model.initial_values` dictionary.
jitter_rvs : set
The set (or list or tuple) of random variables for which a U(-1, +1) jitter should be
The set (or list or tuple) of random variables for which a U(-1, 1) jitter should be
added to the initial value. Only available for variables that have a transform or real-valued support.
jitter_scale : float, optional
The scale of the jitter in the jitter_rvs set. Defaults to 1.0.
default_strategy : str
Which of { "support_point", "prior" } to prefer if the initval strategy setting for an RV is None.
return_transformed : bool
Expand Down Expand Up @@ -265,7 +275,7 @@ def make_initial_point_expression(
value = transform.forward(value, *variable.owner.inputs)

if variable in jitter_rvs:
jitter = pt.random.uniform(-1, 1, size=value.shape)
jitter = pt.random.uniform(-jitter_scale, jitter_scale, size=value.shape)
jitter.name = f"{variable.name}_jitter"
value = value + jitter

Expand Down
6 changes: 5 additions & 1 deletion pymc/sampling/mcmc.py
Original file line number Diff line number Diff line change
Expand Up @@ -1423,10 +1423,11 @@ def _init_jitter(
initvals: StartDict | Sequence[StartDict | None] | None,
seeds: Sequence[int] | np.ndarray,
jitter: bool,
jitter_scale: float,
jitter_max_retries: int,
logp_dlogp_func=None,
) -> list[PointType]:
"""Apply a uniform jitter in [-1, 1] to the test value as starting point in each chain.
"""Apply a uniform jitter in [-jitter_scale, jitter_scale] to the test value as starting point in each chain.

``model.check_start_vals`` is used to test whether the jittered starting
values produce a finite log probability. Invalid values are resampled
Expand All @@ -1437,6 +1438,8 @@ def _init_jitter(
----------
jitter: bool
Whether to apply jitter or not.
jitter_scale : float, optional
The scale of the jitter in set(model.free_RVs). Defaults to 1.0.
jitter_max_retries : int
Maximum number of repeated attempts at initializing values (per chain).

Expand All @@ -1449,6 +1452,7 @@ def _init_jitter(
model=model,
overrides=initvals,
jitter_rvs=set(model.free_RVs) if jitter else set(),
jitter_scale=jitter_scale if jitter else 1.0,
chains=len(seeds),
)

Expand Down
19 changes: 19 additions & 0 deletions tests/test_initial_point.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,25 @@ def test_adds_jitter(self):
assert fn(0) == fn(0)
assert fn(0) != fn(1)

def test_jitter_scale(self):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you simplify this test? If you test the default 1.0 jitter and a very large amount like 1000.0 a single draw should be enough to test the change.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would I need to test if the large jitters are close to +- 1000? If so, what if the single draw for the large jitter is +/- 80? I'm not sure how to test if the jitter_scale is correct without making many draws.

Copy link
Member

@ricardoV94 ricardoV94 Jan 13, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The large jitter should be >> 10 in magnitude, with p=1-(10/1000)=0.99, whereas with default jitter should be smaller than 1, with p=1

with pm.Model() as pmodel:
A = pm.HalfFlat("A", initval="support_point")

fn_default = make_initial_point_fn(
model=pmodel,
jitter_rvs=set(pmodel.free_RVs),
return_transformed=True,
)

fn_large = make_initial_point_fn(
model=pmodel,
jitter_rvs=set(pmodel.free_RVs),
jitter_scale=1000.0,
return_transformed=True,
)

assert fn_large(0)["A_log__"] > fn_default(0)["A_log__"]
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
assert fn_large(0)["A_log__"] > fn_default(0)["A_log__"]
assert fn_large(0)["A_log__"] > 10
assert fn_default(0)["A_log__"] < 1

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Or if 10 is too large, something smaller


def test_respects_overrides(self):
with pm.Model() as pmodel:
A = pm.Flat("A", initval="support_point")
Expand Down
Loading