Skip to content

Commit

Permalink
parallelize commitments and prep
Browse files Browse the repository at this point in the history
  • Loading branch information
alexander-camuto committed Aug 13, 2024
1 parent 65205fa commit 0a0dc34
Show file tree
Hide file tree
Showing 3 changed files with 119 additions and 99 deletions.
53 changes: 25 additions & 28 deletions halo2_proofs/src/plonk/evaluation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -604,7 +604,7 @@ impl<C: CurveAffine> Evaluator<C> {
#[cfg(feature = "mv-lookup")]
log::trace!(" - Lookups inv sum: {:?}", start.elapsed());

#[cfg(all(feature = "mv-lookup", feature = "precompute-coset"))]
#[cfg(all(feature = "mv-lookup"))]
let mut cosets: Vec<_> = {
let domain = &pk.vk.domain;
lookups
Expand All @@ -622,31 +622,25 @@ impl<C: CurveAffine> Evaluator<C> {
let start = std::time::Instant::now();
// Lookups
#[cfg(feature = "mv-lookup")]
for (n, lookup) in lookups.iter().enumerate() {
// Polynomials required for this lookup.
// Calculated here so these only have to be kept in memory for the short time
// they are actually needed.
let start = std::time::Instant::now();

#[cfg(feature = "precompute-coset")]
let (phi_coset, m_coset) = &cosets.remove(0);

#[cfg(not(feature = "precompute-coset"))]
let phi_coset = pk.vk.domain.coeff_to_extended(lookup.phi_poly.clone());
#[cfg(not(feature = "precompute-coset"))]
let m_coset = pk.vk.domain.coeff_to_extended(lookup.m_poly.clone());
parallelize(&mut values, |values, start| {
for (n, _lookup) in lookups.iter().enumerate() {
// Polynomials required for this lookup.
// Calculated here so these only have to be kept in memory for the short time
// they are actually needed.

#[cfg(feature = "precompute-coset")]
let (phi_coset, m_coset) = &cosets[n];

// Lookup constraints
/*
φ_i(X) = f_i(X) + α
τ(X) = t(X) + α
LHS = τ(X) * Π(φ_i(X)) * (ϕ(gX) - ϕ(X))
RHS = τ(X) * Π(φ_i(X)) * (∑ 1/(φ_i(X)) - m(X) / τ(X))))
= (τ(X) * Π(φ_i(X)) * ∑ 1/(φ_i(X))) - Π(φ_i(X)) * m(X)
= Π(φ_i(X)) * (τ(X) * ∑ 1/(φ_i(X)) - m(X))
*/

// Lookup constraints
/*
φ_i(X) = f_i(X) + α
τ(X) = t(X) + α
LHS = τ(X) * Π(φ_i(X)) * (ϕ(gX) - ϕ(X))
RHS = τ(X) * Π(φ_i(X)) * (∑ 1/(φ_i(X)) - m(X) / τ(X))))
= (τ(X) * Π(φ_i(X)) * ∑ 1/(φ_i(X))) - Π(φ_i(X)) * m(X)
= Π(φ_i(X)) * (τ(X) * ∑ 1/(φ_i(X)) - m(X))
*/
let start = std::time::Instant::now();
parallelize(&mut values, |values, start| {
let (inputs_lookup_evaluator, table_lookup_evaluator) = &self.lookups[n];
let mut inputs_eval_data: Vec<_> = inputs_lookup_evaluator
.iter()
Expand Down Expand Up @@ -732,9 +726,12 @@ impl<C: CurveAffine> Evaluator<C> {
// q(X) = LHS - RHS mod zH(X)
*value = *value * y + (lhs - rhs) * l_active_row[idx];
}
});
log::trace!(" - Lookups constraints: {:?}", start.elapsed());
}
}
});

// delete the cosets
#[cfg(feature = "mv-lookup")]
cosets.clear();

#[cfg(all(not(feature = "mv-lookup"), feature = "precompute-coset"))]
let mut cosets: Vec<_> = {
Expand Down
123 changes: 64 additions & 59 deletions halo2_proofs/src/plonk/mv_lookup/prover.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use super::super::{
circuit::Expression, ChallengeBeta, ChallengeTheta, ChallengeX, Error, ProvingKey,
circuit::Expression, ChallengeBeta, ChallengeTheta, ChallengeX, Error, ProvingKey, VerifyingKey,
};
use super::Argument;
use crate::helpers::SerdeCurveAffine;
Expand Down Expand Up @@ -34,6 +34,7 @@ pub(in crate::plonk) struct Prepared<C: CurveAffine> {
compressed_inputs_expressions: Vec<Polynomial<C::Scalar, LagrangeCoeff>>,
compressed_table_expression: Polynomial<C::Scalar, LagrangeCoeff>,
m_values: Polynomial<C::Scalar, LagrangeCoeff>,
pub(in crate::plonk) commitment: C,
}

#[derive(Debug)]
Expand Down Expand Up @@ -78,39 +79,30 @@ pub(in crate::plonk) struct Evaluated<C: CurveAffine> {
}

impl<F: WithSmallOrderMulGroup<3>> Argument<F> {
pub(in crate::plonk) fn prepare<
'a,
'params: 'a,
C,
P: Params<'params, C>,
E: EncodedChallenge<C>,
R: RngCore,
T: TranscriptWrite<C, E>,
>(
pub(in crate::plonk) fn prepare<'a, 'params: 'a, C, P: Params<'params, C>>(
&self,
pk: &ProvingKey<C>,
vk: &VerifyingKey<C>,
params: &P,
domain: &EvaluationDomain<C::Scalar>,
theta: ChallengeTheta<C>,
advice_values: &'a [Polynomial<C::Scalar, LagrangeCoeff>],
fixed_values: &'a [Polynomial<C::Scalar, LagrangeCoeff>],
instance_values: &'a [Polynomial<C::Scalar, LagrangeCoeff>],
challenges: &'a [C::Scalar],
_rng: R, // in case we want to blind (do we actually need zk?)
transcript: &mut T,
) -> Result<Prepared<C>, Error>
where
C: CurveAffine<ScalarExt = F>,
C::Curve: Mul<F, Output = C::Curve> + MulAssign<F>,
{
let n = params.n() as usize;
// Closure to get values of expressions and compress them
let compress_expressions = |expressions: &[Expression<C::Scalar>]| {
let compressed_expression = expressions
.iter()
.map(|expression| {
pk.vk.domain.lagrange_from_vec(evaluate(
vk.domain.lagrange_from_vec(evaluate(
expression,
params.n() as usize,
n,
1,
fixed_values,
advice_values,
Expand All @@ -128,7 +120,7 @@ impl<F: WithSmallOrderMulGroup<3>> Argument<F> {
// Get values of input expressions involved in the lookup and compress them
let compressed_inputs_expressions: Vec<_> = self
.inputs_expressions
.iter()
.par_iter()
.map(|input_expressions| compress_expressions(input_expressions))
.collect();
log::trace!("compressed_inputs_expressions {:?}", start.elapsed());
Expand All @@ -138,13 +130,15 @@ impl<F: WithSmallOrderMulGroup<3>> Argument<F> {
let compressed_table_expression = compress_expressions(&self.table_expressions);
log::trace!("compressed_table_expression {:?}", start.elapsed());

let blinding_factors = pk.vk.cs.blinding_factors();
let blinding_factors = vk.cs.blinding_factors();

let chunk_size = n - blinding_factors - 1;

// compute m(X)
let start = std::time::Instant::now();
let table_index_value_mapping: HashMap<Vec<u8>, usize> = compressed_table_expression
.iter()
.take(params.n() as usize - blinding_factors - 1)
.par_iter()
.take(chunk_size)
.enumerate()
.map(|(i, &x)| (x.to_repr().as_ref().to_owned(), i))
.collect();
Expand All @@ -155,34 +149,33 @@ impl<F: WithSmallOrderMulGroup<3>> Argument<F> {
use std::sync::atomic::{AtomicU64, Ordering};
let m_values: Vec<AtomicU64> = (0..params.n()).map(|_| AtomicU64::new(0)).collect();

for compressed_input_expression in compressed_inputs_expressions.iter() {
let res: Result<(), Error> = compressed_input_expression
.par_iter()
.take(params.n() as usize - blinding_factors - 1)
.map(|fi| {
let index = match table_index_value_mapping
.get(&fi.to_repr().as_ref().to_owned())
{
Some(value) => value,
None => {
log::error!("value is OOR of lookup");
return Err(Error::Synthesis);
}
};
m_values[*index].fetch_add(1, Ordering::Relaxed);
Ok(())
})
.collect();
res?
}
compressed_inputs_expressions
.par_iter()
.for_each(|compressed_input_expression| {
compressed_input_expression
.iter()
.take(chunk_size)
.for_each(|fi| {
let index = match table_index_value_mapping
.get(&fi.to_repr().as_ref().to_owned())
{
Some(value) => value,
None => {
log::error!("value is OOR of lookup");
return;
}
};
m_values[*index].fetch_add(1, Ordering::Relaxed);
});
});

m_values
.par_iter()
.map(|mi| F::from(mi.load(Ordering::Relaxed)))
.collect()
};
log::trace!("m_values {:?}", start.elapsed());
let m_values = pk.vk.domain.lagrange_from_vec(m_values);
let m_values = vk.domain.lagrange_from_vec(m_values);

#[cfg(feature = "sanity-checks")]
{
Expand Down Expand Up @@ -232,30 +225,23 @@ impl<F: WithSmallOrderMulGroup<3>> Argument<F> {
log::trace!("m_commitment {:?}", start.elapsed());

// write commitment of m(X) to transcript
transcript.write_point(m_commitment)?;
// transcript.write_point(m_commitment)?;

Ok(Prepared {
compressed_inputs_expressions,
compressed_table_expression,
m_values,
commitment: m_commitment,
})
}
}

impl<C: CurveAffine> Prepared<C> {
pub(in crate::plonk) fn commit_grand_sum<
'params,
P: Params<'params, C>,
E: EncodedChallenge<C>,
R: RngCore,
T: TranscriptWrite<C, E>,
>(
pub(in crate::plonk) fn commit_grand_sum<'params, P: Params<'params, C>>(
self,
pk: &ProvingKey<C>,
vk: &VerifyingKey<C>,
params: &P,
beta: ChallengeBeta<C>,
mut rng: R,
transcript: &mut T,
) -> Result<Committed<C>, Error> {
/*
φ_i(X) = f_i(X) + α
Expand All @@ -264,6 +250,7 @@ impl<C: CurveAffine> Prepared<C> {
RHS = τ(X) * Π(φ_i(X)) * (∑ 1/(φ_i(X)) - m(X) / τ(X))))
*/

let start = std::time::Instant::now();
// ∑ 1/(φ_i(X))
let mut inputs_log_derivatives = vec![C::Scalar::ZERO; params.n() as usize];
for compressed_input_expression in self.compressed_inputs_expressions.iter() {
Expand All @@ -288,6 +275,9 @@ impl<C: CurveAffine> Prepared<C> {
}
}

log::trace!(" - inputs_log_derivatives {:?}", start.elapsed());

let start = std::time::Instant::now();
// 1 / τ(X)
let mut table_log_derivatives = vec![C::Scalar::ZERO; params.n() as usize];
parallelize(
Expand All @@ -302,8 +292,16 @@ impl<C: CurveAffine> Prepared<C> {
},
);

log::trace!(" - table_log_derivatives {:?}", start.elapsed());

let start = std::time::Instant::now();
table_log_derivatives.iter_mut().batch_invert();
log::trace!(
" - table_log_derivatives batch_invert {:?}",
start.elapsed()
);

let start = std::time::Instant::now();
// (Σ 1/(φ_i(X)) - m(X) / τ(X))
let mut log_derivatives_diff = vec![C::Scalar::ZERO; params.n() as usize];
parallelize(&mut log_derivatives_diff, |log_derivatives_diff, start| {
Expand All @@ -318,9 +316,12 @@ impl<C: CurveAffine> Prepared<C> {
}
});

log::trace!(" - log_derivatives_diff {:?}", start.elapsed());

let start = std::time::Instant::now();
// Compute the evaluations of the lookup grand sum polynomial
// over our domain, starting with phi[0] = 0
let blinding_factors = pk.vk.cs.blinding_factors();
let blinding_factors = vk.cs.blinding_factors();
let phi = iter::once(C::Scalar::ZERO)
.chain(log_derivatives_diff)
.scan(C::Scalar::ZERO, |state, cur| {
Expand All @@ -331,10 +332,12 @@ impl<C: CurveAffine> Prepared<C> {
// be a 0
.take(params.n() as usize - blinding_factors)
// Chain random blinding factors.
.chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng)))
.chain((0..blinding_factors).map(|_| C::Scalar::ZERO))
.collect::<Vec<_>>();
assert_eq!(phi.len(), params.n() as usize);
let phi = pk.vk.domain.lagrange_from_vec(phi);
let phi = vk.domain.lagrange_from_vec(phi);

log::trace!(" - phi {:?}", start.elapsed());

#[cfg(feature = "sanity-checks")]
// This test works only with intermediate representations in this method.
Expand Down Expand Up @@ -396,14 +399,16 @@ impl<C: CurveAffine> Prepared<C> {
}

let grand_sum_blind = Blind(C::Scalar::ZERO);
let start = std::time::Instant::now();
let phi_commitment = params.commit_lagrange(&phi, grand_sum_blind).to_affine();
log::trace!(" - phi_commitment {:?}", start.elapsed());

// Hash grand sum commitment
transcript.write_point(phi_commitment)?;
// transcript.write_point(phi_commitment)?;

Ok(Committed {
m_poly: pk.vk.domain.lagrange_to_coeff(self.m_values),
phi_poly: pk.vk.domain.lagrange_to_coeff(phi),
m_poly: vk.domain.lagrange_to_coeff(self.m_values),
phi_poly: vk.domain.lagrange_to_coeff(phi),
commitment: phi_commitment,
})
}
Expand All @@ -412,11 +417,11 @@ impl<C: CurveAffine> Prepared<C> {
impl<C: CurveAffine> Committed<C> {
pub(in crate::plonk) fn evaluate<E: EncodedChallenge<C>, T: TranscriptWrite<C, E>>(
self,
pk: &ProvingKey<C>,
vk: &VerifyingKey<C>,
x: ChallengeX<C>,
transcript: &mut T,
) -> Result<Evaluated<C>, Error> {
let domain = &pk.vk.domain;
let domain = &vk.domain;
let x_next = domain.rotate_omega(*x, Rotation::next());

let phi_eval = eval_polynomial(&self.phi_poly, *x);
Expand Down
Loading

0 comments on commit 0a0dc34

Please sign in to comment.