From 3f222a79f6cfd7e3f9eaf76ab501626111e2332f Mon Sep 17 00:00:00 2001 From: Giuseppe Franco Date: Tue, 26 Mar 2024 14:57:20 +0000 Subject: [PATCH] More tests fix --- tests/brevitas/core/test_clamp.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/brevitas/core/test_clamp.py b/tests/brevitas/core/test_clamp.py index 9cd9d94f6..a424b49ba 100644 --- a/tests/brevitas/core/test_clamp.py +++ b/tests/brevitas/core/test_clamp.py @@ -26,9 +26,9 @@ def test_max_value(minifloat, expected_max_val): nan_values = minifloat.float_clamp_impl.nan_values saturating = minifloat.float_clamp_impl.saturating max_val = max_float( - minifloat.exponent_bit_width, - minifloat.mantissa_bit_width, - minifloat.exponent_bias, + torch.tensor(minifloat.exponent_bit_width, dtype=torch.float32), + torch.tensor(minifloat.mantissa_bit_width, dtype=torch.float32), + torch.tensor(minifloat.exponent_bias, dtype=torch.float32), nan_values, inf_values, saturating) @@ -42,9 +42,9 @@ def test_float_clamp(inp, fp8_clamp): nan_values = fp8_clamp.float_clamp_impl.nan_values saturating = fp8_clamp.float_clamp_impl.saturating max_val = max_float( - fp8_clamp.exponent_bit_width, - fp8_clamp.mantissa_bit_width, - fp8_clamp.exponent_bias, + torch.tensor(fp8_clamp.exponent_bit_width, dtype=torch.float32), + torch.tensor(fp8_clamp.mantissa_bit_width, dtype=torch.float32), + torch.tensor(fp8_clamp.exponent_bias, dtype=torch.float32), nan_values, inf_values, saturating)