Skip to content

Commit

Permalink
Fix (GPFQ): change default value for p to 1
Browse files Browse the repository at this point in the history
  • Loading branch information
fabianandresgrob authored and Giuseppe5 committed Nov 15, 2023
1 parent e0d991c commit 6695e8d
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 5 deletions.
8 changes: 4 additions & 4 deletions src/brevitas/graph/gpfq.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def __init__(
inplace: bool = True,
create_weight_orig: bool = True,
use_quant_activations: bool = True,
p: int = 0.25,
p: float = 1.0,
return_forward_output: bool = False,
act_order: bool = False) -> None:
if not inplace:
Expand Down Expand Up @@ -117,10 +117,10 @@ def __init__(
act_order,
len_parallel_layers=1,
create_weight_orig=True,
p=0.25) -> None:
p=1.0) -> None:

super().__init__(layer, name, act_order, len_parallel_layers, create_weight_orig)

self.float_input = None
self.quantized_input = None
self.index_computed = False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@
default=True,
help='Narrow range for weight quantization (default: enabled)')
parser.add_argument(
'--gpfq-p', default=0.25, type=float, help='P parameter for GPFQ (default: 0.25)')
'--gpfq-p', default=1.0, type=float, help='P parameter for GPFQ (default: 0.25)')
parser.add_argument(
'--quant-format',
default='int',
Expand Down

0 comments on commit 6695e8d

Please sign in to comment.