Skip to content

Commit

Permalink
fix issue
Browse files Browse the repository at this point in the history
  • Loading branch information
wenhuach21 committed Jun 4, 2024
1 parent db9ac04 commit fe64795
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions auto_round/quantizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,8 @@ def quant_weight_sym(weight, num_bits=4, v=0, min_scale=1.0, max_scale=1.0, scal
"""
maxq = torch.tensor(2 ** num_bits - 1)
if isinstance(min_scale, torch.Tensor):
wmin_tmp = torch.clamp(weight.min(1, keepdim=True)[0], max=0)
wmax_tmp = torch.clamp(weight.max(1, keepdim=True)[0], min=0)
wmin_tmp = torch.clamp(weight.min(1,)[0], max=0)
wmax_tmp = torch.clamp(weight.max(1)[0], min=0)
wmin_tmp *= min_scale
wmax_tmp *= max_scale
wmax = torch.maximum(wmax_tmp, wmin_tmp)
Expand Down

0 comments on commit fe64795

Please sign in to comment.