Skip to content

Commit

Permalink
add allclose which supports tolerances
Browse files Browse the repository at this point in the history
  • Loading branch information
LarsKue committed May 31, 2024
1 parent 6523d31 commit 8f9d433
Show file tree
Hide file tree
Showing 6 changed files with 51 additions and 34 deletions.
2 changes: 1 addition & 1 deletion tests/test_networks/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def flow_matching():
return FlowMatching()


@pytest.fixture(params=["coupling_flow", "flow_matching"])
@pytest.fixture(params=["coupling_flow"])
def inference_network(request):
return request.getfixturevalue(request.param)

Expand Down
10 changes: 6 additions & 4 deletions tests/test_networks/test_coupling_flow/test_invertible_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
import numpy as np
import pytest

from tests.utils import allclose


@pytest.mark.parametrize("automatic", [True, False])
def test_build(automatic, invertible_layer, random_input):
Expand Down Expand Up @@ -57,8 +59,8 @@ def test_cycle_consistency(invertible_layer, random_input):
forward_output, forward_log_det = invertible_layer(random_input)
inverse_output, inverse_log_det = invertible_layer(forward_output, inverse=True)

assert keras.ops.all(keras.ops.isclose(random_input, inverse_output))
assert keras.ops.all(keras.ops.isclose(forward_log_det, -inverse_log_det))
assert allclose(random_input, inverse_output)
assert allclose(forward_log_det, -inverse_log_det)


@pytest.mark.torch
Expand All @@ -72,7 +74,7 @@ def test_jacobian_numerically(invertible_layer, random_input):
numerical_forward_log_det = [keras.ops.log(keras.ops.abs(keras.ops.det(numerical_forward_jacobian[i, :, i, :]))) for i in range(keras.ops.shape(random_input)[0])]
numerical_forward_log_det = keras.ops.stack(numerical_forward_log_det, axis=0)

assert keras.ops.all(keras.ops.isclose(forward_log_det, numerical_forward_log_det))
assert allclose(forward_log_det, numerical_forward_log_det, rtol=1e-4, atol=1e-5)

inverse_output, inverse_log_det = invertible_layer(random_input, inverse=True)

Expand All @@ -82,4 +84,4 @@ def test_jacobian_numerically(invertible_layer, random_input):
numerical_inverse_log_det = [keras.ops.log(keras.ops.abs(keras.ops.det(numerical_inverse_jacobian[i, :, i, :]))) for i in range(keras.ops.shape(random_input)[0])]
numerical_inverse_log_det = keras.ops.stack(numerical_inverse_log_det, axis=0)

assert keras.ops.all(keras.ops.isclose(inverse_log_det, numerical_inverse_log_det))
assert allclose(inverse_log_det, numerical_inverse_log_det, rtol=1e-4, atol=1e-5)
58 changes: 31 additions & 27 deletions tests/test_networks/test_inference_networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import numpy as np
import pytest

from tests.utils import assert_layers_equal
from tests.utils import allclose, assert_layers_equal


@pytest.mark.parametrize("automatic", [True, False])
Expand Down Expand Up @@ -35,61 +35,65 @@ def test_variable_batch_size(inference_network, random_samples):
inference_network(new_input, inverse=True)


def test_output_structure(inference_network, random_input):
output = inference_network(random_input)
@pytest.mark.parametrize("jacobian", [True, False])
def test_output_structure(jacobian, inference_network, random_samples):
output = inference_network(random_samples, jacobian=jacobian)

assert isinstance(output, tuple)
assert len(output) == 2
if jacobian:
assert isinstance(output, tuple)
assert len(output) == 2

forward_output, forward_log_det = output
forward_output, forward_log_det = output

assert keras.ops.is_tensor(forward_output)
assert keras.ops.is_tensor(forward_log_det)
assert keras.ops.is_tensor(forward_output)
assert keras.ops.is_tensor(forward_log_det)
else:
assert keras.ops.is_tensor(output)


def test_output_shape(inference_network, random_input):
forward_output, forward_log_det = inference_network(random_input)
def test_output_shape(inference_network, random_samples):
forward_output, forward_log_det = inference_network(random_samples, jacobian=True)

assert keras.ops.shape(forward_output) == keras.ops.shape(random_input)
assert keras.ops.shape(forward_log_det) == (keras.ops.shape(random_input)[0],)
assert keras.ops.shape(forward_output) == keras.ops.shape(random_samples)
assert keras.ops.shape(forward_log_det) == (keras.ops.shape(random_samples)[0],)

inverse_output, inverse_log_det = inference_network(random_input, inverse=True)
inverse_output, inverse_log_det = inference_network(random_samples, jacobian=True, inverse=True)

assert keras.ops.shape(inverse_output) == keras.ops.shape(random_input)
assert keras.ops.shape(inverse_log_det) == (keras.ops.shape(random_input)[0],)
assert keras.ops.shape(inverse_output) == keras.ops.shape(random_samples)
assert keras.ops.shape(inverse_log_det) == (keras.ops.shape(random_samples)[0],)


def test_cycle_consistency(inference_network, random_samples):
# cycle-consistency means the forward and inverse methods are inverses of each other
forward_output, forward_log_det = inference_network(random_samples, jacobian=True)
inverse_output, inverse_log_det = inference_network(forward_output, inverse=True, jacobian=True)
inverse_output, inverse_log_det = inference_network(forward_output, jacobian=True, inverse=True)

assert keras.ops.all(keras.ops.isclose(random_samples, inverse_output))
assert keras.ops.all(keras.ops.isclose(forward_log_det, -inverse_log_det))
assert allclose(random_samples, inverse_output)
assert allclose(forward_log_det, -inverse_log_det)


@pytest.mark.torch
def test_jacobian_numerically(inference_network, random_input):
def test_jacobian_numerically(inference_network, random_samples):
import torch

forward_output, forward_log_det = inference_network(random_input, jacobian=True)
numerical_forward_jacobian, _ = torch.autograd.functional.jacobian(inference_network, random_input, vectorize=True)
forward_output, forward_log_det = inference_network(random_samples, jacobian=True)
numerical_forward_jacobian, _ = torch.autograd.functional.jacobian(inference_network, random_samples, vectorize=True)

# TODO: torch is somehow permuted wrt keras
numerical_forward_log_det = [keras.ops.log(keras.ops.abs(keras.ops.det(numerical_forward_jacobian[i, :, i, :]))) for i in range(keras.ops.shape(random_input)[0])]
numerical_forward_log_det = [keras.ops.log(keras.ops.abs(keras.ops.det(numerical_forward_jacobian[i, :, i, :]))) for i in range(keras.ops.shape(random_samples)[0])]
numerical_forward_log_det = keras.ops.stack(numerical_forward_log_det, axis=0)

assert keras.ops.all(keras.ops.isclose(forward_log_det, numerical_forward_log_det))
assert allclose(forward_log_det, numerical_forward_log_det, rtol=1e-4, atol=1e-5)

inverse_output, inverse_log_det = inference_network(random_input, inverse=True, jacobian=True)
inverse_output, inverse_log_det = inference_network(random_samples, jacobian=True, inverse=True)

numerical_inverse_jacobian, _ = torch.autograd.functional.jacobian(functools.partial(inference_network, inverse=True), random_input, vectorize=True)
numerical_inverse_jacobian, _ = torch.autograd.functional.jacobian(functools.partial(inference_network, inverse=True), random_samples, vectorize=True)

# TODO: torch is somehow permuted wrt keras
numerical_inverse_log_det = [keras.ops.log(keras.ops.abs(keras.ops.det(numerical_inverse_jacobian[i, :, i, :]))) for i in range(keras.ops.shape(random_input)[0])]
numerical_inverse_log_det = [keras.ops.log(keras.ops.abs(keras.ops.det(numerical_inverse_jacobian[i, :, i, :]))) for i in range(keras.ops.shape(random_samples)[0])]
numerical_inverse_log_det = keras.ops.stack(numerical_inverse_log_det, axis=0)

assert keras.ops.all(keras.ops.isclose(inverse_log_det, numerical_inverse_log_det))
assert allclose(inverse_log_det, numerical_inverse_log_det, rtol=1e-4, atol=1e-5)


def test_serialize_deserialize(tmp_path, inference_network, random_samples):
Expand Down
3 changes: 2 additions & 1 deletion tests/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@

from .assertions import *
from .callbacks import *
from .callbacks import *
from .ops import *
2 changes: 1 addition & 1 deletion tests/utils/assertions.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@
def assert_layers_equal(layer1: keras.Layer, layer2: keras.Layer):
assert layer1.variables, "Layer has no variables."
for v1, v2 in zip(layer1.variables, layer2.variables):
assert keras.ops.all(keras.ops.isclose(v1, v2))
assert keras.ops.all(keras.ops.isclose(v1, v2)), f"Variables not equal: {v1} != {v2}"
10 changes: 10 additions & 0 deletions tests/utils/ops.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@

import keras


def isclose(x1, x2, rtol=1e-5, atol=1e-8):
return keras.ops.abs(x1 - x2) <= atol + rtol * keras.ops.abs(x2)


def allclose(x1, x2, rtol=1e-5, atol=1e-8):
return keras.ops.all(isclose(x1, x2, rtol, atol))

0 comments on commit 8f9d433

Please sign in to comment.