Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add STARK batching #388

Draft
wants to merge 24 commits into
base: feat/continuations
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
292 changes: 139 additions & 153 deletions Cargo.lock

Large diffs are not rendered by default.

8 changes: 4 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -113,10 +113,10 @@ rpc = { path = "zero_bin/rpc" }
zero_bin_common = { path = "zero_bin/common" }

# plonky2-related dependencies
plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "dc77c77f2b06500e16ad4d7f1c2b057903602eed" }
plonky2_maybe_rayon = "0.2.0"
plonky2_util = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "dc77c77f2b06500e16ad4d7f1c2b057903602eed" }
starky = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "dc77c77f2b06500e16ad4d7f1c2b057903602eed" }
plonky2 = { path = "../plonky2/plonky2"}
plonky2_maybe_rayon = { path = "../plonky2/maybe_rayon"}
plonky2_util = { path = "../plonky2/util"}
starky = { path = "../plonky2/starky"}

# proc macro related dependencies
proc-macro2 = "1.0"
Expand Down
102 changes: 102 additions & 0 deletions evm_arithmetization/src/all_stark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,26 @@ impl Deref for Table {
/// Number of STARK tables.
pub(crate) const NUM_TABLES: usize = Table::MemAfter as usize + 1;

pub(crate) const TABLE_DEGREES: [usize; NUM_TABLES] = [
18, // Arithmetic
18, // BytePacking,
18, // Cpu,
14, // Keccak,
14, // KeccakSponge,
18, // Logic,
18, // Memory,
18, // MemBefore,
15, // MemAfter,
];

pub(crate) const ALL_SORTED_TABLES: [Table; NUM_TABLES] = Table::all_sorted();

pub(crate) const TABLE_TO_SORTED_INDEX: [usize; NUM_TABLES] = Table::table_to_sorted_index();

pub(crate) const SORTED_INDEX_PAIR: [(usize, usize); NUM_TABLES] = Table::sorted_index_pair();

pub(crate) const ALL_DEGREE_LOGS: [usize; NUM_TABLES] = Table::all_degree_logs();

impl Table {
/// Returns all STARK table indices.
pub(crate) const fn all() -> [Self; NUM_TABLES] {
Expand All @@ -120,6 +140,88 @@ impl Table {
Self::MemAfter,
]
}

/// Returns all STARK table indices in descending order of their padded
/// trace degrees.
const fn all_sorted() -> [Self; NUM_TABLES] {
let mut sorted_pairs = [(0, Table::Arithmetic); NUM_TABLES];
let mut i = 0;
while i < NUM_TABLES {
sorted_pairs[i] = (TABLE_DEGREES[i], Self::all()[i]);
i += 1;
}

// Simple bubble sort.
let mut i = 0;
while i < NUM_TABLES - 1 {
let mut j = 0;
while j < NUM_TABLES - i - 1 {
let (pair_a, pair_b) = (sorted_pairs[j], sorted_pairs[j + 1]);
if pair_a.0 < pair_b.0 {
sorted_pairs[j] = pair_b;
sorted_pairs[j + 1] = pair_a;
}
j += 1;
}
i += 1;
}

let mut sorted_tables = [Table::Arithmetic; NUM_TABLES];
let mut i = 0;
while i < NUM_TABLES {
sorted_tables[i] = sorted_pairs[i].1;
i += 1;
}

sorted_tables
}

/// Returns the ordered position of the tables. This is the inverse of
/// `all_sorted()`.
const fn table_to_sorted_index() -> [usize; NUM_TABLES] {
let mut res = [0; NUM_TABLES];
let mut i = 0;
while i < NUM_TABLES {
res[Self::all_sorted()[i] as usize] = i;
i += 1;
}

res
}

/// Returns the ordered position of the tables in a batch Merkle tree. Each
/// entry is a couple to account for duplicate sizes.
const fn sorted_index_pair() -> [(usize, usize); NUM_TABLES] {
let mut pairs = [(0, 0); NUM_TABLES];

let mut outer = 0;
let mut inner = 0;
let mut i = 1;
while i < NUM_TABLES {
if Self::all_degree_logs()[i] < Self::all_degree_logs()[i - 1] {
outer += 1;
inner = 0;
} else {
inner += 1;
}
pairs[i] = (outer, inner);
i += 1;
}

pairs
}

/// Returns all STARK padded trace degrees in descending order.
const fn all_degree_logs() -> [usize; NUM_TABLES] {
let mut res = [0; NUM_TABLES];
let mut i = 0;
while i < NUM_TABLES {
res[i] = TABLE_DEGREES[Self::all_sorted()[i] as usize];
i += 1;
}

res
}
}

/// Returns all the `CrossTableLookups` used for proving the EVM.
Expand Down
10 changes: 8 additions & 2 deletions evm_arithmetization/src/arithmetic/arithmetic_stark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use static_assertions::const_assert;

use super::columns::{op_flags, NUM_ARITH_COLUMNS};
use super::shift;
use crate::all_stark::{EvmStarkFrame, Table};
use crate::all_stark::{EvmStarkFrame, Table, ALL_DEGREE_LOGS, TABLE_TO_SORTED_INDEX};
use crate::arithmetic::columns::{NUM_SHARED_COLS, RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS};
use crate::arithmetic::{addcy, byte, columns, divmod, modular, mul, Operation};

Expand Down Expand Up @@ -178,7 +178,13 @@ impl<F: RichField, const D: usize> ArithmeticStark<F, D> {
// Pad the trace with zero rows if it doesn't have enough rows
// to accommodate the range check columns. Also make sure the
// trace length is a power of two.
let padded_len = trace_rows.len().next_power_of_two();
let padded_len = 1 << ALL_DEGREE_LOGS[TABLE_TO_SORTED_INDEX[*Table::Arithmetic]];
assert!(
padded_len >= trace_rows.len(),
"Padded length {:?} is smaller than actual trace length {:?}",
padded_len,
trace_rows.len()
);
for _ in trace_rows.len()..std::cmp::max(padded_len, RANGE_MAX) {
trace_rows.push(vec![F::ZERO; columns::NUM_ARITH_COLUMNS]);
}
Expand Down
101 changes: 101 additions & 0 deletions evm_arithmetization/src/batch_proof.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
use ethereum_types::{Address, H256, U256};
use plonky2::field::extension::Extendable;
use plonky2::fri::proof::FriProof;
use plonky2::hash::hash_types::{HashOutTarget, MerkleCapTarget, RichField, NUM_HASH_OUT_ELTS};
use plonky2::iop::challenger::Challenger;
use plonky2::iop::target::{BoolTarget, Target};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::GenericConfig;
use plonky2::util::serialization::{Buffer, IoResult, Read, Write};
use serde::{Deserialize, Serialize};
use starky::batch_proof::BatchStarkProof;
use starky::config::StarkConfig;
use starky::lookup::{get_grand_product_challenge_set, GrandProductChallengeSet};
use starky::proof::{MultiProof, StarkProofChallenges};

use crate::all_stark::NUM_TABLES;
use crate::get_challenges::observe_public_values;
use crate::proof::PublicValues;
use crate::util::{get_h160, get_h256, get_u256, h2u};
use crate::witness::errors::ProgramError;
use crate::witness::state::RegistersState;

/// A batched STARK proof for all tables, plus some metadata used to create
/// recursive wrapper proof.
#[derive(Debug, Clone)]
pub struct EvmProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
/// A multi-proof containing all proofs for the different STARK modules and
/// their cross-table lookup challenges.
pub batch_proof: BatchStarkProof<F, C, D, NUM_TABLES>,
/// Public memory values used for the recursive proofs.
pub public_values: PublicValues,
}

impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> EvmProof<F, C, D> {
/// Returns the degree of the batched STARK proof.
pub fn degree_bits(&self, config: &StarkConfig) -> usize {
self.batch_proof.recover_degree_bits(config)
}
}

impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> EvmProof<F, C, D> {
/// Computes all Fiat-Shamir challenges used in the STARK proof.
pub(crate) fn get_challenges(
&self,
config: &StarkConfig,
) -> Result<StarkProofChallenges<F, D>, anyhow::Error> {
let mut challenger = Challenger::<F, C::Hasher>::new();

challenger.observe_cap(&self.batch_proof.trace_cap);
observe_public_values::<F, C, D>(&mut challenger, &self.public_values)
.map_err(|_| anyhow::Error::msg("Invalid conversion of public values."))?;

let ctl_challenges =
get_grand_product_challenge_set(&mut challenger, config.num_challenges);

challenger.observe_cap(
&self
.batch_proof
.auxiliary_polys_cap
.as_ref()
.expect("No auxiliary cap?"),
);
let stark_alphas = challenger.get_n_challenges(config.num_challenges);

challenger.observe_cap(
&self
.batch_proof
.quotient_polys_cap
.as_ref()
.expect("No quotient cap?"),
);
let stark_zeta = challenger.get_extension_challenge::<D>();

for opening in &self.batch_proof.openings {
challenger.observe_openings(&opening.to_fri_openings());
}

let FriProof {
commit_phase_merkle_caps,
final_poly,
pow_witness,
..
} = &self.batch_proof.opening_proof;
let degree_bits = self.degree_bits(config);

let fri_challenges = challenger.fri_challenges::<C, D>(
commit_phase_merkle_caps,
final_poly,
*pow_witness,
degree_bits,
&config.fri_config,
);

Ok(StarkProofChallenges {
lookup_challenge_set: Some(ctl_challenges), // CTL challenge contains lookup challenges.
stark_alphas,
stark_zeta,
fri_challenges,
})
}
}
Loading