Skip to content

Commit

Permalink
Merge pull request #19 from WizardOfMenlo/wiz/clippy
Browse files Browse the repository at this point in the history
Clippy lints
  • Loading branch information
WizardOfMenlo authored Nov 25, 2024
2 parents cb5acff + 7a55e97 commit 02608a9
Show file tree
Hide file tree
Showing 19 changed files with 73 additions and 78 deletions.
2 changes: 1 addition & 1 deletion src/bin/benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,7 @@ fn run_whir<F, MerkleConfig>(
.collect();
let evaluations = points
.iter()
.map(|point| polynomial.evaluate_at_extension(&point))
.map(|point| polynomial.evaluate_at_extension(point))
.collect();
let statement = Statement {
points,
Expand Down
2 changes: 1 addition & 1 deletion src/bin/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ fn run_whir_pcs<F, MerkleConfig>(
.collect();
let evaluations = points
.iter()
.map(|point| polynomial.evaluate_at_extension(&point))
.map(|point| polynomial.evaluate_at_extension(point))
.collect();

let statement = Statement {
Expand Down
6 changes: 3 additions & 3 deletions src/crypto/merkle_tree/blake3.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,8 @@ pub fn default_config<F: CanonicalSerialize + Send>(
<LeafH<F> as CRHScheme>::Parameters,
<CompressH as TwoToOneCRHScheme>::Parameters,
) {
let leaf_hash_params = <LeafH<F> as CRHScheme>::setup(rng).unwrap();
let two_to_one_params = <CompressH as TwoToOneCRHScheme>::setup(rng).unwrap();
<LeafH<F> as CRHScheme>::setup(rng).unwrap();
<CompressH as TwoToOneCRHScheme>::setup(rng).unwrap();

(leaf_hash_params, two_to_one_params)
((), ())
}
10 changes: 5 additions & 5 deletions src/crypto/merkle_tree/keccak.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,8 @@ impl TwoToOneCRHScheme for KeccakTwoToOneCRHScheme {
right_input: T,
) -> Result<Self::Output, ark_crypto_primitives::Error> {
let mut h = sha3::Keccak256::new();
h.update(&left_input.borrow().0);
h.update(&right_input.borrow().0);
h.update(left_input.borrow().0);
h.update(right_input.borrow().0);
let mut output = [0; 32];
output.copy_from_slice(&h.finalize()[..]);
HashCounter::add();
Expand Down Expand Up @@ -123,8 +123,8 @@ pub fn default_config<F: CanonicalSerialize + Send>(
<LeafH<F> as CRHScheme>::Parameters,
<CompressH as TwoToOneCRHScheme>::Parameters,
) {
let leaf_hash_params = <LeafH<F> as CRHScheme>::setup(rng).unwrap();
let two_to_one_params = <CompressH as TwoToOneCRHScheme>::setup(rng).unwrap();
<LeafH<F> as CRHScheme>::setup(rng).unwrap();
<CompressH as TwoToOneCRHScheme>::setup(rng).unwrap();

(leaf_hash_params, two_to_one_params)
((), ())
}
12 changes: 7 additions & 5 deletions src/crypto/merkle_tree/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,12 @@ pub fn default_config<F: CanonicalSerialize + Send>(
<LeafH<F> as CRHScheme>::Parameters,
<CompressH as TwoToOneCRHScheme>::Parameters,
) {
let leaf_hash_params = <LeafH<F> as CRHScheme>::setup(rng).unwrap();
let two_to_one_params = <CompressH as TwoToOneCRHScheme>::setup(rng)
.unwrap()
.clone();
<LeafH<F> as CRHScheme>::setup(rng).unwrap();
{
<CompressH as TwoToOneCRHScheme>::setup(rng)
.unwrap();

};

(leaf_hash_params, two_to_one_params)
((), ())
}
12 changes: 6 additions & 6 deletions src/ntt/transpose.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ fn transpose_copy<F: Sized + Copy + Send>(src: MatrixMut<F>, dst: MatrixMut<F>)

/// Sets `dst` to the transpose of `src`. This will panic if the sizes of `src` and `dst` are not compatible.
#[cfg(feature = "parallel")]
fn transpose_copy_parallel<'a, 'b, F: Sized + Copy + Send>(
src: MatrixMut<'a, F>,
mut dst: MatrixMut<'b, F>,
fn transpose_copy_parallel<F: Sized + Copy + Send>(
src: MatrixMut<'_, F>,
mut dst: MatrixMut<'_, F>,
) {
assert_eq!(src.rows(), dst.cols());
assert_eq!(src.cols(), dst.rows());
Expand Down Expand Up @@ -85,9 +85,9 @@ fn transpose_copy_parallel<'a, 'b, F: Sized + Copy + Send>(

/// Sets `dst` to the transpose of `src`. This will panic if the sizes of `src` and `dst` are not compatible.
/// This is the non-parallel version
fn transpose_copy_not_parallel<'a, 'b, F: Sized + Copy>(
src: MatrixMut<'a, F>,
mut dst: MatrixMut<'b, F>,
fn transpose_copy_not_parallel<F: Sized + Copy>(
src: MatrixMut<'_, F>,
mut dst: MatrixMut<'_, F>,
) {
assert_eq!(src.rows(), dst.cols());
assert_eq!(src.cols(), dst.rows());
Expand Down
1 change: 0 additions & 1 deletion src/ntt/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,6 @@ mod tests {
);
let should_not_work = std::panic::catch_unwind(|| {
as_chunks_exact_mut::<_, 2>(&mut [1, 2, 3]);
return;
});
assert!(should_not_work.is_err())
}
Expand Down
6 changes: 3 additions & 3 deletions src/poly_utils/fold.rs
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ mod tests {

// Evaluate the polynomial on the domain
let domain_evaluations: Vec<_> = (0..domain_size)
.map(|w| root_of_unity.pow([w as u64]))
.map(|w| root_of_unity.pow([w]))
.map(|point| {
poly.evaluate(&MultilinearPoint::expand_from_univariate(
point,
Expand All @@ -199,10 +199,10 @@ mod tests {
);

let num = domain_size / folding_factor_exp;
let coset_gen_inv = root_of_unity_inv.pow(&[num]);
let coset_gen_inv = root_of_unity_inv.pow([num]);

for index in 0..num {
let offset_inv = root_of_unity_inv.pow(&[index]);
let offset_inv = root_of_unity_inv.pow([index]);
let span =
(index * folding_factor_exp) as usize..((index + 1) * folding_factor_exp) as usize;

Expand Down
3 changes: 2 additions & 1 deletion src/poly_utils/sequential_lag_poly.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use super::{hypercube::BinaryHypercubePoint, MultilinearPoint};

/// There is an alternative (possibly more efficient) implementation that iterates over the x in Gray code ordering.
///
/// LagrangePolynomialIterator for a given multilinear n-dimensional `point` iterates over pairs (x, y)
/// where x ranges over all possible {0,1}^n
/// and y equals the product y_1 * ... * y_n where
Expand Down Expand Up @@ -60,7 +61,7 @@ impl<F: Field> Iterator for LagrangePolynomialIterator<F> {
// Iterator implementation for the struct
fn next(&mut self) -> Option<Self::Item> {
// a) Check if this is the first iteration
if self.last_position == None {
if self.last_position.is_none() {
// Initialize last position
self.last_position = Some(0);
// Return the top of the stack
Expand Down
2 changes: 1 addition & 1 deletion src/poly_utils/streaming_evaluation_helper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ impl<F: Field> Iterator for TermPolynomialIterator<F> {
// Iterator implementation for the struct
fn next(&mut self) -> Option<Self::Item> {
// a) Check if this is the first iteration
if self.last_position == None {
if self.last_position.is_none() {
// Initialize last position
self.last_position = Some(0);
// Return the top of the stack
Expand Down
10 changes: 5 additions & 5 deletions src/sumcheck/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ mod tests {
// First, check that is sums to the right value over the hypercube
assert_eq!(poly_1.sum_over_hypercube(), claimed_value);

let combination_randomness = vec![F::from(293), F::from(42)];
let combination_randomness = [F::from(293), F::from(42)];
let folding_randomness = MultilinearPoint(vec![F::from(335), F::from(222)]);

let new_eval_point = MultilinearPoint(vec![F::from(32); num_variables - folding_factor]);
Expand Down Expand Up @@ -146,7 +146,7 @@ mod tests {
let [epsilon_1, epsilon_2] = [F::from(15), F::from(32)];
let folding_randomness_1 = MultilinearPoint(vec![F::from(11), F::from(31)]);
let fold_point = MultilinearPoint(vec![F::from(31), F::from(15)]);
let combination_randomness = vec![F::from(31), F::from(4999)];
let combination_randomness = [F::from(31), F::from(4999)];
let folding_randomness_2 = MultilinearPoint(vec![F::from(97), F::from(36)]);

let mut prover = SumcheckCore::new(
Expand Down Expand Up @@ -184,7 +184,7 @@ mod tests {
);

let full_folding =
MultilinearPoint(vec![folding_randomness_2.0.clone(), folding_randomness_1.0].concat());
MultilinearPoint([folding_randomness_2.0.clone(), folding_randomness_1.0].concat());
let eval_coeff = folded_poly_1.fold(&folding_randomness_2).coeffs()[0];
assert_eq!(
sumcheck_poly_2.evaluate_at_point(&folding_randomness_2),
Expand Down Expand Up @@ -217,8 +217,8 @@ mod tests {
let fold_point_12 =
MultilinearPoint(vec![F::from(1231), F::from(15), F::from(4231), F::from(15)]);
let fold_point_2 = MultilinearPoint(vec![F::from(311), F::from(115)]);
let combination_randomness_1 = vec![F::from(1289), F::from(3281), F::from(10921)];
let combination_randomness_2 = vec![F::from(3281), F::from(3232)];
let combination_randomness_1 = [F::from(1289), F::from(3281), F::from(10921)];
let combination_randomness_2 = [F::from(3281), F::from(3232)];

let mut prover = SumcheckCore::new(
polynomial.clone(),
Expand Down
2 changes: 1 addition & 1 deletion src/sumcheck/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ mod tests {
let num_evaluation_points = 3_usize.pow(num_variables as u32);
let evaluations = (0..num_evaluation_points as u64).map(F::from).collect();

let poly = SumcheckPolynomial::new(evaluations, num_variables as usize);
let poly = SumcheckPolynomial::new(evaluations, num_variables);

for i in 0..num_evaluation_points {
let decomp = base_decomposition(i, 3, num_variables);
Expand Down
10 changes: 5 additions & 5 deletions src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,11 +121,11 @@ mod tests {

#[test]
fn test_is_power_of_two() {
assert_eq!(is_power_of_two(0), false);
assert_eq!(is_power_of_two(1), true);
assert_eq!(is_power_of_two(2), true);
assert_eq!(is_power_of_two(3), false);
assert_eq!(is_power_of_two(usize::MAX), false);
assert!(!is_power_of_two(0));
assert!(is_power_of_two(1));
assert!(is_power_of_two(2));
assert!(!is_power_of_two(3));
assert!(!is_power_of_two(usize::MAX));
}

#[test]
Expand Down
4 changes: 0 additions & 4 deletions src/whir/fs_utils.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
use crate::domain::Domain;
use crate::utils::dedup;
use crate::whir::parameters::{RoundConfig, WhirConfig};
use ark_crypto_primitives::merkle_tree;
use ark_ff::FftField;
use nimue::{ByteChallenges, ProofResult};

pub fn get_challenge_stir_queries<T>(
Expand Down
28 changes: 14 additions & 14 deletions src/whir/parameters.rs
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ where
final_pow_bits,
final_sumcheck_rounds,
final_folding_pow_bits,
pow_strategy: PhantomData::default(),
pow_strategy: PhantomData,
fold_optimisation: whir_parameters.fold_optimisation,
final_log_inv_rate: log_inv_rate,
leaf_hash_params: whir_parameters.leaf_hash_params,
Expand Down Expand Up @@ -241,13 +241,13 @@ where
) -> f64 {
match soundness_type {
SoundnessType::ConjectureList => {
let result = (num_variables + log_inv_rate) as f64 - log_eta;
result

(num_variables + log_inv_rate) as f64 - log_eta
}
SoundnessType::ProvableList => {
let log_inv_sqrt_rate: f64 = log_inv_rate as f64 / 2.;
let result = log_inv_sqrt_rate - (1. + log_eta);
result

log_inv_sqrt_rate - (1. + log_eta)
}
SoundnessType::UniqueDecoding => 0.0,
}
Expand Down Expand Up @@ -385,7 +385,9 @@ where
num_queries: usize,
) -> f64 {
let num_queries = num_queries as f64;
let bits_of_sec_queries = match soundness_type {


match soundness_type {
SoundnessType::UniqueDecoding => {
let rate = 1. / ((1 << log_inv_rate) as f64);
let denom = -(0.5 * (1. + rate)).log2();
Expand All @@ -394,9 +396,7 @@ where
}
SoundnessType::ProvableList => num_queries * 0.5 * log_inv_rate as f64,
SoundnessType::ConjectureList => num_queries * log_inv_rate as f64,
};

bits_of_sec_queries
}
}

pub fn rbr_soundness_queries_combination(
Expand Down Expand Up @@ -488,7 +488,7 @@ where
writeln!(
f,
"{:.1} bits -- (x{}) prox gaps: {:.1}, sumcheck: {:.1}, pow: {:.1}",
prox_gaps_error.min(sumcheck_error) + self.starting_folding_pow_bits as f64,
prox_gaps_error.min(sumcheck_error) + self.starting_folding_pow_bits,
self.folding_factor,
prox_gaps_error,
sumcheck_error,
Expand Down Expand Up @@ -529,7 +529,7 @@ where
writeln!(
f,
"{:.1} bits -- query error: {:.1}, combination: {:.1}, pow: {:.1}",
query_error.min(combination_error) + r.pow_bits as f64,
query_error.min(combination_error) + r.pow_bits,
query_error,
combination_error,
r.pow_bits,
Expand All @@ -553,7 +553,7 @@ where
writeln!(
f,
"{:.1} bits -- (x{}) prox gaps: {:.1}, sumcheck: {:.1}, pow: {:.1}",
prox_gaps_error.min(sumcheck_error) + r.folding_pow_bits as f64,
prox_gaps_error.min(sumcheck_error) + r.folding_pow_bits,
self.folding_factor,
prox_gaps_error,
sumcheck_error,
Expand All @@ -571,7 +571,7 @@ where
writeln!(
f,
"{:.1} bits -- query error: {:.1}, pow: {:.1}",
query_error + self.final_pow_bits as f64,
query_error + self.final_pow_bits,
query_error,
self.final_pow_bits,
)?;
Expand All @@ -581,7 +581,7 @@ where
writeln!(
f,
"{:.1} bits -- (x{}) combination: {:.1}, pow: {:.1}",
combination_error + self.final_pow_bits as f64,
combination_error + self.final_pow_bits,
self.final_sumcheck_rounds,
combination_error,
self.final_folding_pow_bits,
Expand Down
4 changes: 1 addition & 3 deletions src/whir/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,9 @@ use ark_crypto_primitives::merkle_tree::{Config, MerkleTree, MultiPath};
use ark_ff::FftField;
use ark_poly::EvaluationDomain;
use nimue::{
plugins::ark::{FieldChallenges, FieldWriter},
ByteChallenges, ByteWriter, Merlin, ProofResult,
plugins::ark::{FieldChallenges, FieldWriter}, ByteWriter, Merlin, ProofResult,
};
use nimue_pow::{self, PoWChallenge};
use rand::{Rng, SeedableRng};

use crate::whir::fs_utils::get_challenge_stir_queries;
#[cfg(feature = "parallel")]
Expand Down
7 changes: 3 additions & 4 deletions src/whir/verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,17 @@ use ark_ff::FftField;
use ark_poly::EvaluationDomain;
use nimue::{
plugins::ark::{FieldChallenges, FieldReader},
Arthur, ByteChallenges, ByteReader, ProofError, ProofResult,
Arthur, ByteReader, ProofError, ProofResult,
};
use nimue_pow::{self, PoWChallenge};
use rand::{Rng, SeedableRng};

use super::{parameters::WhirConfig, Statement, WhirProof};
use crate::whir::fs_utils::get_challenge_stir_queries;
use crate::{
parameters::FoldType,
poly_utils::{coeffs::CoefficientList, eq_poly_outside, fold::compute_fold, MultilinearPoint},
sumcheck::proof::SumcheckPolynomial,
utils::{self, expand_randomness},
utils::{expand_randomness},
};

pub struct Verifier<F, MerkleConfig, PowStrategy>
Expand Down Expand Up @@ -335,7 +334,7 @@ where
.map(|(point, rand)| point * rand)
.sum();

value = value + sum_of_claims;
value += sum_of_claims;
}

value
Expand Down
Loading

0 comments on commit 02608a9

Please sign in to comment.