Skip to content

Commit

Permalink
fix import
Browse files Browse the repository at this point in the history
  • Loading branch information
Giuseppe5 committed Sep 24, 2024
1 parent 7992bec commit 9bb9d5f
Show file tree
Hide file tree
Showing 7 changed files with 13 additions and 13 deletions.
2 changes: 1 addition & 1 deletion src/brevitas/core/function_wrapper/clamp.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from brevitas.core.utils import StatelessBuffer
from brevitas.function import tensor_clamp
from brevitas.function.ops import max_float
from brevitas.utils.quant_utils import MAX_MANTISSA_DICT
from brevitas.utils.torch_utils import MAX_MANTISSA_DICT


class TensorClamp(brevitas.jit.ScriptModule):
Expand Down
2 changes: 1 addition & 1 deletion src/brevitas/core/scaling/float_scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import brevitas
from brevitas.core.utils import StatelessBuffer
from brevitas.function.ops import max_float
from brevitas.utils.quant_utils import MAX_MANTISSA_DICT
from brevitas.utils.torch_utils import MAX_MANTISSA_DICT


class FloatScaling(brevitas.jit.ScriptModule):
Expand Down
2 changes: 1 addition & 1 deletion src/brevitas/export/inference/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@
from brevitas.proxy.parameter_quant import BiasQuantProxyFromInjector
from brevitas.proxy.parameter_quant import WeightQuantProxyFromInjector
from brevitas.proxy.runtime_quant import ActQuantProxyFromInjector
from brevitas.utils.quant_utils import MAX_MANTISSA_DICT
from brevitas.utils.torch_utils import float_internal_scale
from brevitas.utils.torch_utils import MAX_MANTISSA_DICT


class InferenceHandler(torch.nn.Module, ABC):
Expand Down
8 changes: 0 additions & 8 deletions src/brevitas/utils/quant_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,11 +221,3 @@ def float_to_int_impl_to_enum(module):
return FloatToIntImplType.STOCHASTIC_ROUND
else:
return None


def max_mantissa_func(val):
import torch
return torch.sum((2. ** torch.arange(0, -1. * val - 1., -1.)))


MAX_MANTISSA_DICT = {x: max_mantissa_func(x) for x in range(0, 16)}
8 changes: 8 additions & 0 deletions src/brevitas/utils/torch_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,3 +113,11 @@ def padding(x: torch.Tensor, group_size: int, group_dim: int) -> List[int]:
padding[2 * group_dim] = group_size - size[group_dim] % group_size
padding = list(reversed(padding))
return padding


def max_mantissa_func(val):
import torch
return torch.sum((2. ** torch.arange(0, -1. * val - 1., -1.)))


MAX_MANTISSA_DICT = {x: max_mantissa_func(x) for x in range(0, 16)}
2 changes: 1 addition & 1 deletion tests/brevitas/core/test_clamp.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from brevitas.quant.experimental.float_quant_ocp import Fp8e5m2OCPWeight
from brevitas.utils.float_quant_utils import get_max_available_float
from brevitas.utils.float_quant_utils import get_min_available_float
from brevitas.utils.quant_utils import MAX_MANTISSA_DICT
from brevitas.utils.torch_utils import MAX_MANTISSA_DICT
from tests.brevitas.hyp_helper import float_tensor_random_shape_st

from .minifloat_fixtures import *
Expand Down
2 changes: 1 addition & 1 deletion tests/brevitas/core/test_float_quant.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
from brevitas.core.scaling import ConstScaling
from brevitas.core.scaling import FloatScaling
from brevitas.function.ops import max_float
from brevitas.utils.quant_utils import MAX_MANTISSA_DICT
from brevitas.utils.torch_utils import float_internal_scale
from brevitas.utils.torch_utils import MAX_MANTISSA_DICT
from tests.brevitas.hyp_helper import float_st
from tests.brevitas.hyp_helper import float_tensor_random_shape_st
from tests.brevitas.hyp_helper import random_minifloat_format
Expand Down

0 comments on commit 9bb9d5f

Please sign in to comment.