Skip to content

Commit

Permalink
[TFLite][Frontend] Fix test failures caused by div-by-zero (#15844)
Browse files Browse the repository at this point in the history
* [TFLite][Frontend] Support quantized floor_mod

* [TVM][Frontend] Fix zero-point issues in quantized div/floor_div

* [TVM][Frontend] Fix zero-point issues in quantized div/floor_div
  • Loading branch information
p3achyjr authored Oct 10, 2023
1 parent a79f632 commit eb2a4bc
Showing 1 changed file with 14 additions and 1 deletion.
15 changes: 14 additions & 1 deletion tests/python/frontend/tflite/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -2452,6 +2452,7 @@ def _test_elemwise(
qnn_op=None,
same_qnn_params=False,
comparison_op=False,
exclude_zero_point=False,
):
"""One iteration of elemwise"""

Expand Down Expand Up @@ -2480,6 +2481,16 @@ def __test_elemwise(in_data):
inq0_min, inq0_max = (out_min, out_max)
inq1_min, inq1_max = (out_min, out_max)

if exclude_zero_point:
if inq1_max == inq1_min:
raise ZeroDivisionError("Input range is 0.")

# only compute for rhs.
quant_scale = 255 / (inq1_max - inq1_min)
zero_point = int(round(-inq1_min * quant_scale))
data[1][data[1] == zero_point] += 1
data[1][data[1] == 0] += 1

# fake_quant will keep the tensors in float32 until the conversion in the session
inq_data = [
tf.quantization.fake_quant_with_min_max_args(
Expand Down Expand Up @@ -2619,6 +2630,7 @@ def _test_div(data, fused_activation_function=None, quantized=False, qnn_op=None
quantized,
qnn_op,
same_qnn_params=True,
exclude_zero_point=True,
)


Expand Down Expand Up @@ -2802,6 +2814,7 @@ def _test_floor_divide(data, fused_activation_function=None, quantized=False, qn
quantized,
qnn_op,
same_qnn_params=True,
exclude_zero_point=True,
)


Expand Down Expand Up @@ -2882,7 +2895,7 @@ def _test_elemwise_qnn_out_range(qnn_op):


def test_all_elemwise():
"""All_elewise"""
"""All_elemwise"""
_test_forward_elemwise(_test_add)
_test_forward_elemwise_quantized(_test_add)
_test_forward_elemwise(partial(_test_add, fused_activation_function="RELU"))
Expand Down

0 comments on commit eb2a4bc

Please sign in to comment.