Skip to content

Commit

Permalink
Fixing default weight_quant
Browse files Browse the repository at this point in the history
  • Loading branch information
i-colbert committed Jul 12, 2023
1 parent 2cdf3d4 commit 984c8ab
Showing 1 changed file with 2 additions and 1 deletion.
3 changes: 2 additions & 1 deletion src/brevitas_examples/bnn_pynq/models/resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

import brevitas.nn as qnn
from brevitas.quant import Int8WeightPerChannelFloat
from brevitas.quant import Int8WeightPerTensorFloat
from brevitas.quant import TruncTo8bit
from brevitas.quant_tensor import QuantTensor

Expand Down Expand Up @@ -121,7 +122,7 @@ def __init__(
round_average_pool=False,
weight_quant=Int8WeightPerChannelFloat,
first_layer_weight_quant=Int8WeightPerChannelFloat,
last_layer_weight_quant=Int8WeightPerChannelFloat):
last_layer_weight_quant=Int8WeightPerTensorFloat):
super(QuantResNet, self).__init__()
self.in_planes = 64
self.conv1 = make_quant_conv2d(
Expand Down

0 comments on commit 984c8ab

Please sign in to comment.