Skip to content

Commit

Permalink
Fix for act eq
Browse files Browse the repository at this point in the history
  • Loading branch information
Giuseppe5 committed Jul 6, 2023
1 parent 408439e commit a454afc
Showing 1 changed file with 4 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
'bias_corr': [True], # Bias Correction
'graph_eq_iterations': [0, 20], # Graph Equalization
'graph_eq_merge_bias': [False, True], # Merge bias for Graph Equalization
'act_eq': ['fx', 'layerwise', None], # Perform Activation Equalization (Smoothquant)
'act_equalization': ['fx', 'layerwise', None], # Perform Activation Equalization (Smoothquant)
'learned_round': [False, True], # Enable/Disable Learned Round
'gptq': [False, True], # Enable/Disable GPTQ
'gptq_act_order': [False, True], # Use act_order euristics for GPTQ
Expand All @@ -73,7 +73,7 @@
'bias_corr': [True], # Bias Correction
'graph_eq_iterations': [20], # Graph Equalization
'graph_eq_merge_bias': [True], # Merge bias for Graph Equalization
'act_eq': ['fx'], # Perform Activation Equalization (Smoothquant)
'act_equalization': ['fx'], # Perform Activation Equalization (Smoothquant)
'learned_round': [False], # Enable/Disable Learned Round
'gptq': [True], # Enable/Disable GPTQ
'gptq_act_order': [False], # Use act_order euristics for GPTQ
Expand Down Expand Up @@ -204,7 +204,8 @@ def ptq_torchvision_models(df, args):

if config_namespace.act_equalization is not None:
print("Applying activation equalization:")
apply_act_equalization(model, calib_loader, layerwise=args.act_equalization == 'layerwise')
apply_act_equalization(
model, calib_loader, layerwise=config_namespace.act_equalization == 'layerwise')

# Define the quantized model
quant_model = quantize_model(
Expand Down

0 comments on commit a454afc

Please sign in to comment.