diff --git a/Cargo.lock b/Cargo.lock index b4daeaaad..0896f872a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3573,8 +3573,6 @@ checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" [[package]] name = "plonky2" version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85f26b090b989aebdeaf6a4eed748c1fbcabf67e7273a22e4e0c877b63846d0f" dependencies = [ "ahash", "anyhow", @@ -3598,8 +3596,6 @@ dependencies = [ [[package]] name = "plonky2_field" version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a1dca60ad900d81b1fe2df3d0b88d43345988e2935e6709176e96573f4bcf5d" dependencies = [ "anyhow", "itertools 0.11.0", @@ -3614,8 +3610,6 @@ dependencies = [ [[package]] name = "plonky2_maybe_rayon" version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92ff44a90aaca13e10e7ddf8fab815ba1b404c3f7c3ca82aaf11c46beabaa923" dependencies = [ "rayon", ] @@ -3623,8 +3617,6 @@ dependencies = [ [[package]] name = "plonky2_util" version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16136f5f3019c1e83035af76cccddd56d789a5e2933306270185c3f99f12259" [[package]] name = "plotters" @@ -4651,8 +4643,6 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "starky" version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a8480ca5b8eedf83ad070a780783b4e21a56c6ef66b4c0d1b7520b72bdfda1b" dependencies = [ "ahash", "anyhow", diff --git a/Cargo.toml b/Cargo.toml index 1ff61b81d..40ca36dbe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -112,10 +112,10 @@ rpc = { path = "zero_bin/rpc" } zero_bin_common = { path = "zero_bin/common" } # plonky2-related dependencies -plonky2 = "0.2.2" -plonky2_maybe_rayon = "0.2.0" -plonky2_util = "0.2.0" -starky = "0.4.0" +plonky2 = { path = "../plonky2/plonky2"} +plonky2_maybe_rayon = { path = "../plonky2/maybe_rayon"} +plonky2_util = { path = "../plonky2/util"} +starky = { path = "../plonky2/starky"} # proc macro related dependencies proc-macro2 = "1.0" diff --git a/evm_arithmetization/src/all_stark.rs b/evm_arithmetization/src/all_stark.rs index f8422b300..e56fe6589 100644 --- a/evm_arithmetization/src/all_stark.rs +++ b/evm_arithmetization/src/all_stark.rs @@ -108,6 +108,31 @@ impl Table { Self::Memory, ] } + + /// Returns all STARK table indices in descending order of their padded + /// trace degrees. + pub(crate) const fn all_sorted() -> [Self; NUM_TABLES] { + [ + Self::Memory, + Self::Cpu, + Self::Arithmetic, + Self::BytePacking, + Self::Keccak, + Self::Logic, + Self::KeccakSponge, + ] + } + + /// Returns the ordered position of the tables. This is the inverse of + /// `all_sorted()`. + pub(crate) const fn table_to_sorted_index() -> [usize; NUM_TABLES] { + [2, 3, 1, 4, 6, 5, 0] + } + + /// Returns all STARK padded trace degrees in descending order. + pub(crate) const fn all_degree_logs() -> [usize; NUM_TABLES] { + [27, 24, 22, 20, 19, 17, 14] + } } /// Returns all the `CrossTableLookups` used for proving the EVM. diff --git a/evm_arithmetization/src/cpu/kernel/tests/bignum/mod.rs b/evm_arithmetization/src/cpu/kernel/tests/bignum/mod.rs index c18ad5f76..0bd6ec14f 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/bignum/mod.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/bignum/mod.rs @@ -9,7 +9,6 @@ use itertools::Itertools; use num::{BigUint, One, Zero}; use num_bigint::RandBigInt; use plonky2::field::goldilocks_field::GoldilocksField as F; -use plonky2_util::ceil_div_usize; use rand::Rng; use crate::cpu::kernel::aggregator::KERNEL; @@ -90,7 +89,7 @@ fn max_bignum(bit_size: usize) -> BigUint { } fn bignum_len(a: &BigUint) -> usize { - ceil_div_usize(a.bits() as usize, BIGNUM_LIMB_BITS) + (a.bits() as usize).div_ceil(BIGNUM_LIMB_BITS) } fn run_test(fn_label: &str, memory: Vec, stack: Vec) -> Result<(Vec, Vec)> { diff --git a/evm_arithmetization/src/cpu/kernel/utils.rs b/evm_arithmetization/src/cpu/kernel/utils.rs index adda086e8..082086d17 100644 --- a/evm_arithmetization/src/cpu/kernel/utils.rs +++ b/evm_arithmetization/src/cpu/kernel/utils.rs @@ -1,7 +1,6 @@ use core::fmt::Debug; use ethereum_types::U256; -use plonky2_util::ceil_div_usize; /// Enumerate the length `W` windows of `vec`, and run `maybe_replace` on each /// one. @@ -28,7 +27,7 @@ where } pub(crate) fn u256_to_trimmed_be_bytes(u256: &U256) -> Vec { - let num_bytes = ceil_div_usize(u256.bits(), 8); + let num_bytes = u256.bits().div_ceil(8); // `byte` is little-endian, so we manually reverse it. (0..num_bytes).rev().map(|i| u256.byte(i)).collect() } diff --git a/evm_arithmetization/src/fixed_recursive_verifier.rs b/evm_arithmetization/src/fixed_recursive_verifier.rs index ed62014f5..cac2da046 100644 --- a/evm_arithmetization/src/fixed_recursive_verifier.rs +++ b/evm_arithmetization/src/fixed_recursive_verifier.rs @@ -587,26 +587,28 @@ where // Extra sums to add to the looked last value. // Only necessary for the Memory values. - let mut extra_looking_sums = - vec![vec![builder.zero(); stark_config.num_challenges]; NUM_TABLES]; + let mut extra_looking_sums = HashMap::new(); // Memory - extra_looking_sums[*Table::Memory] = (0..stark_config.num_challenges) - .map(|c| { - get_memory_extra_looking_sum_circuit( - &mut builder, - &public_values, - ctl_challenges.challenges[c], - ) - }) - .collect_vec(); + extra_looking_sums.insert( + Table::Memory as usize, + (0..stark_config.num_challenges) + .map(|c| { + get_memory_extra_looking_sum_circuit( + &mut builder, + &public_values, + ctl_challenges.challenges[c], + ) + }) + .collect_vec(), + ); // Verify the CTL checks. verify_cross_table_lookups_circuit::( &mut builder, all_cross_table_lookups(), pis.map(|p| p.ctl_zs_first), - Some(&extra_looking_sums), + &extra_looking_sums, stark_config, ); diff --git a/evm_arithmetization/src/keccak_sponge/keccak_sponge_stark.rs b/evm_arithmetization/src/keccak_sponge/keccak_sponge_stark.rs index 131e69560..27c1ebef4 100644 --- a/evm_arithmetization/src/keccak_sponge/keccak_sponge_stark.rs +++ b/evm_arithmetization/src/keccak_sponge/keccak_sponge_stark.rs @@ -4,6 +4,7 @@ use core::marker::PhantomData; use core::mem::size_of; use itertools::Itertools; +use num::integer::div_ceil; use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::packed::PackedField; use plonky2::field::polynomial::PolynomialValues; @@ -13,7 +14,7 @@ use plonky2::iop::ext_target::ExtensionTarget; use plonky2::timed; use plonky2::util::timing::TimingTree; use plonky2::util::transpose; -use plonky2_util::ceil_div_usize; +// use plonky2_util::ceil_div_usize; use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use starky::evaluation_frame::StarkEvaluationFrame; use starky::lookup::{Column, Filter, Lookup}; @@ -137,7 +138,7 @@ pub(crate) fn ctl_looking_memory(i: usize) -> Vec> { /// Returns the number of `KeccakSponge` tables looking into the `LogicStark`. pub(crate) const fn num_logic_ctls() -> usize { const U8S_PER_CTL: usize = 32; - ceil_div_usize(KECCAK_RATE_BYTES, U8S_PER_CTL) + KECCAK_RATE_BYTES.div_ceil(U8S_PER_CTL) } /// Creates the vector of `Columns` required to perform the `i`th logic CTL. diff --git a/evm_arithmetization/src/logic.rs b/evm_arithmetization/src/logic.rs index d411a4482..eae7c972b 100644 --- a/evm_arithmetization/src/logic.rs +++ b/evm_arithmetization/src/logic.rs @@ -11,7 +11,6 @@ use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::timed; use plonky2::util::timing::TimingTree; -use plonky2_util::ceil_div_usize; use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use starky::evaluation_frame::StarkEvaluationFrame; use starky::lookup::{Column, Filter}; @@ -29,7 +28,7 @@ const VAL_BITS: usize = 256; pub(crate) const PACKED_LIMB_BITS: usize = 32; /// Number of field elements needed to store each input/output at the specified /// packing. -const PACKED_LEN: usize = ceil_div_usize(VAL_BITS, PACKED_LIMB_BITS); +const PACKED_LEN: usize = VAL_BITS.div_ceil(PACKED_LIMB_BITS); /// `LogicStark` columns. pub(crate) mod columns { diff --git a/evm_arithmetization/src/prover.rs b/evm_arithmetization/src/prover.rs index f6c40cf89..d96e4571c 100644 --- a/evm_arithmetization/src/prover.rs +++ b/evm_arithmetization/src/prover.rs @@ -1,30 +1,64 @@ +use std::iter::once; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, ensure, Result}; use itertools::Itertools; use once_cell::sync::Lazy; +use plonky2::batch_fri::oracle::BatchFriOracle; use plonky2::field::extension::Extendable; -use plonky2::field::polynomial::PolynomialValues; +use plonky2::field::packable::Packable; +use plonky2::field::packed::PackedField; +use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues}; +use plonky2::field::types::Field; use plonky2::fri::oracle::PolynomialBatch; +use plonky2::fri::reduction_strategies::FriReductionStrategy; +use plonky2::fri::structure::FriInstanceInfo; +use plonky2::fri::FriConfig; use plonky2::hash::hash_types::RichField; use plonky2::iop::challenger::Challenger; use plonky2::plonk::config::GenericConfig; +use plonky2::plonk::proof::ProofWithPublicInputs; use plonky2::timed; use plonky2::util::timing::TimingTree; use starky::config::StarkConfig; -use starky::cross_table_lookup::{get_ctl_data, CtlData}; -use starky::lookup::GrandProductChallengeSet; -use starky::proof::{MultiProof, StarkProofWithMetadata}; -use starky::prover::prove_with_commitment; +use starky::cross_table_lookup::{get_ctl_auxiliary_polys, get_ctl_data, CtlData}; +use starky::lookup::{lookup_helper_columns, GrandProductChallengeSet}; +use starky::proof::{ + MultiProof, StarkOpeningSet, StarkProof, StarkProofWithMetadata, StarkProofWithPublicInputs, +}; +use starky::prover::{compute_quotient_polys, prove_with_commitment}; use starky::stark::Stark; -use crate::all_stark::{AllStark, Table, NUM_TABLES}; +use crate::all_stark::{all_cross_table_lookups, AllStark, Table, NUM_TABLES}; +use crate::arithmetic::arithmetic_stark::ArithmeticStark; +use crate::byte_packing::byte_packing_stark::BytePackingStark; +use crate::cpu::cpu_stark::CpuStark; use crate::cpu::kernel::aggregator::KERNEL; use crate::generation::{generate_traces, GenerationInputs}; use crate::get_challenges::observe_public_values; +use crate::keccak::keccak_stark::KeccakStark; +use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeStark; +use crate::logic::LogicStark; +use crate::memory::memory_stark::MemoryStark; use crate::proof::{AllProof, PublicValues}; +pub(crate) fn zkevm_fast_config() -> StarkConfig { + StarkConfig { + security_bits: 100, + num_challenges: 2, + fri_config: FriConfig { + rate_bits: 1, + cap_height: 4, + proof_of_work_bits: 16, + // This strategy allows us to hit all intermediary STARK leaves while going through the + // batched Field Merkle Trees. + reduction_strategy: FriReductionStrategy::Fixed(vec![3, 2, 2, 1, 2, 3, 4, 4, 2]), + num_query_rounds: 84, + }, + } +} + /// Generate traces, then create all STARK proofs. pub fn prove( all_stark: &AllStark, @@ -170,6 +204,757 @@ where }) } +/// Compute all STARK proofs. STARK-batching version. +pub(crate) fn prove_with_traces_batch( + all_stark: &AllStark, + config: &StarkConfig, + trace_poly_values: [Vec>; NUM_TABLES], + public_values: PublicValues, + timing: &mut TimingTree, + abort_signal: Option>, +) -> Result> +where + F: RichField + Extendable, + P: PackedField, + C: GenericConfig, +{ + let rate_bits = config.fri_config.rate_bits; + let cap_height = config.fri_config.cap_height; + + let trace_poly_values_sorted: [_; NUM_TABLES] = Table::all_sorted() + .iter() + .map(|&table| trace_poly_values[*table].clone()) + .collect::>() + .try_into() + .unwrap(); + + // We compute the Field Merkle Tree of all STARK traces. + let trace_polys_values_sorted_flat = trace_poly_values_sorted + .clone() + .into_iter() + .flatten() + .collect(); + let trace_commitment = timed!( + timing, + "compute trace commitments", + BatchFriOracle::::from_values( + trace_polys_values_sorted_flat, + rate_bits, + false, + cap_height, + timing, + &[None; NUM_TABLES], + ) + ); + + let mut challenger = Challenger::::new(); + challenger.observe_cap(&trace_commitment.batch_merkle_tree.cap); + + observe_public_values::(&mut challenger, &public_values) + .map_err(|_| anyhow::Error::msg("Invalid conversion of public values."))?; + + // For each STARK, compute its cross-table lookup Z polynomials and get the + // associated `CtlData`. + let (ctl_challenges, ctl_data_per_table) = timed!( + timing, + "compute CTL data", + get_ctl_data::( + config, + &trace_poly_values, + &all_stark.cross_table_lookups, + &mut challenger, + all_stark.arithmetic_stark.constraint_degree() + ) + ); + + check_abort_signal(abort_signal)?; + let lookup_challenges = ctl_challenges + .challenges + .iter() + .map(|ch| ch.beta) + .collect::>(); + + let auxiliary_columns = all_auxiliary_columns::( + all_stark, + config, + &trace_poly_values, + &ctl_data_per_table, + &ctl_challenges, + ); + + // We compute the Field Merkle Tree of all auxiliary columns. + let auxiliary_columns_sorted: Vec<_> = Table::all_sorted() + .iter() + .map(|&table| auxiliary_columns[*table].clone()) + .collect(); + let auxiliary_columns_sorted_flat = auxiliary_columns_sorted + .clone() + .into_iter() + .flatten() + .collect(); + let auxiliary_commitment = timed!( + timing, + "compute auxiliary commitments", + BatchFriOracle::::from_values( + auxiliary_columns_sorted_flat, + rate_bits, + false, + cap_height, + timing, + &[None; NUM_TABLES], + ) + ); + challenger.observe_cap(&auxiliary_commitment.batch_merkle_tree.cap); + + // Quotient polynomials. + let alphas = challenger.get_n_challenges(config.num_challenges); + let quotient_polys = all_quotient_polys::( + all_stark, + &trace_commitment, + &auxiliary_commitment, + &auxiliary_columns, + None, + &ctl_data_per_table, + alphas.clone(), + config, + ); + + // We compute the Field Merkle Tree of all quotient polynomials. + let quotient_polys_sorted: Vec<_> = Table::all_sorted() + .iter() + .map(|&table| quotient_polys[*table].clone()) + .collect(); + let quotient_polys_sorted_flat = quotient_polys_sorted + .clone() + .into_iter() + .flatten() + .collect(); + let quotient_commitment = timed!( + timing, + "compute quotient commitments", + BatchFriOracle::::from_coeffs( + quotient_polys_sorted_flat, + rate_bits, + false, + cap_height, + timing, + &[None; NUM_TABLES], + ) + ); + challenger.observe_cap("ient_commitment.batch_merkle_tree.cap); + + let zeta = challenger.get_extension_challenge::(); + + // To avoid leaking witness data, we want to ensure that our opening locations, + // `zeta` and `g * zeta`, are not in our subgroup `H`. It suffices to check + // `zeta` only, since `(g * zeta)^n = zeta^n`, where `n` is the order of + // `g`. + let degree_bits = trace_commitment.degree_bits[0]; + let g = F::primitive_root_of_unity(degree_bits); + ensure!( + zeta.exp_power_of_2(degree_bits) != F::Extension::ONE, + "Opening point is in the subgroup." + ); + + let mut all_fri_instances = all_fri_instance_info( + all_stark, + &trace_commitment, + &auxiliary_commitment, + &ctl_data_per_table, + alphas, + zeta, + config, + ); + + // Get the FRI openings and observe them. + // Compute all openings: evaluate all committed polynomials at `zeta` and, when + // necessary, at `g * zeta`. + // TODO: Need batched openings. + let openings = StarkOpeningSet { + local_values: Vec::new(), + next_values: Vec::new(), + auxiliary_polys: None, + auxiliary_polys_next: None, + ctl_zs_first: None, + quotient_polys: None, + }; + + challenger.observe_openings(&openings.to_fri_openings()); + + let initial_merkle_trees = [ + &trace_commitment, + &auxiliary_commitment, + "ient_commitment, + ]; + + let opening_proof = BatchFriOracle::prove_openings( + &Table::all_degree_logs(), + &all_fri_instances, + &initial_merkle_trees, + &mut challenger, + &config.fri_params(degree_bits), + timing, + ); + + // This is an expensive check, hence is only run when `debug_assertions` are + // enabled. + #[cfg(debug_assertions)] + { + use hashbrown::HashMap; + use starky::cross_table_lookup::debug_utils::check_ctls; + + use crate::verifier::debug_utils::get_memory_extra_looking_values; + + let mut extra_values = HashMap::new(); + extra_values.insert( + *Table::Memory, + get_memory_extra_looking_values(&public_values), + ); + check_ctls( + &trace_poly_values_sorted, + &all_stark.cross_table_lookups, + &extra_values, + ); + } + + let stark_proof = StarkProof { + trace_cap: trace_commitment.batch_merkle_tree.cap.clone(), + auxiliary_polys_cap: Some(auxiliary_commitment.batch_merkle_tree.cap), + quotient_polys_cap: Some(quotient_commitment.batch_merkle_tree.cap), + openings, + opening_proof, + }; + + Ok(StarkProofWithPublicInputs { + proof: stark_proof, + public_inputs: vec![], + }) +} + +/// Generates all auxiliary columns. +fn all_auxiliary_columns( + all_stark: &AllStark, + config: &StarkConfig, + trace_poly_values: &[Vec>; NUM_TABLES], + ctl_data_per_table: &[CtlData; NUM_TABLES], + ctl_challenges: &GrandProductChallengeSet, +) -> Vec>> +where + F: RichField + Extendable, + C: GenericConfig, +{ + let mut res = Vec::new(); + + // Arithmetic. + res.push(auxiliary_columns_single_stark::< + F, + C, + ArithmeticStark, + D, + >( + all_stark.arithmetic_stark, + config, + &trace_poly_values[*Table::Arithmetic], + &ctl_data_per_table[*Table::Arithmetic], + ctl_challenges, + )); + + // BytePacking. + res.push(auxiliary_columns_single_stark::< + F, + C, + BytePackingStark, + D, + >( + all_stark.byte_packing_stark, + config, + &trace_poly_values[*Table::BytePacking], + &ctl_data_per_table[*Table::BytePacking], + ctl_challenges, + )); + + // Cpu. + res.push(auxiliary_columns_single_stark::, D>( + all_stark.cpu_stark, + config, + &trace_poly_values[*Table::Cpu], + &ctl_data_per_table[*Table::Cpu], + ctl_challenges, + )); + + // Keccak. + res.push( + auxiliary_columns_single_stark::, D>( + all_stark.keccak_stark, + config, + &trace_poly_values[*Table::Keccak], + &ctl_data_per_table[*Table::Keccak], + ctl_challenges, + ), + ); + + // KeccakSponge. + res.push(auxiliary_columns_single_stark::< + F, + C, + KeccakSpongeStark, + D, + >( + all_stark.keccak_sponge_stark, + config, + &trace_poly_values[*Table::KeccakSponge], + &ctl_data_per_table[*Table::KeccakSponge], + ctl_challenges, + )); + + // Logic. + res.push(auxiliary_columns_single_stark::, D>( + all_stark.logic_stark, + config, + &trace_poly_values[*Table::Logic], + &ctl_data_per_table[*Table::Logic], + ctl_challenges, + )); + + // Memory. + res.push( + auxiliary_columns_single_stark::, D>( + all_stark.memory_stark, + config, + &trace_poly_values[*Table::Memory], + &ctl_data_per_table[*Table::Memory], + ctl_challenges, + ), + ); + + res +} + +fn auxiliary_columns_single_stark( + stark: S, + config: &StarkConfig, + trace_poly_values: &[PolynomialValues], + ctl_data: &CtlData, + ctl_challenges: &GrandProductChallengeSet, +) -> Vec> +where + F: RichField + Extendable, + C: GenericConfig, + S: Stark, +{ + let rate_bits = config.fri_config.rate_bits; + let constraint_degree = stark.constraint_degree(); + assert!( + constraint_degree <= (1 << rate_bits) + 1, + "The degree of the Stark constraints must be <= blowup_factor + 1" + ); + + let lookup_challenges: Vec<_> = ctl_challenges.challenges.iter().map(|ch| ch.beta).collect(); + // Add lookup columns. + let lookups = stark.lookups(); + let mut res = { + let mut columns = Vec::new(); + for lookup in &lookups { + for &challenge in lookup_challenges.iter() { + columns.extend(lookup_helper_columns( + lookup, + trace_poly_values, + challenge, + constraint_degree, + )); + } + } + columns + }; + let num_lookup_columns = res.len(); + + // Add CTL columns. + if let Some(p) = get_ctl_auxiliary_polys(Some(ctl_data)) { + res.extend(p); + } + + debug_assert!( + (stark.uses_lookups() || stark.requires_ctls()) || get_ctl_auxiliary_polys(Some(ctl_data)).is_none(), + "There should be auxiliary polynomials if and only if we have either lookups or require cross-table lookups." + ); + + res +} + +/// Generates all quotient polynomials. +fn all_quotient_polys( + all_stark: &AllStark, + trace_commitment: &BatchFriOracle, + auxiliary_commitment: &BatchFriOracle, + all_auxiliary_columns: &Vec>>, + lookup_challenges: Option<&Vec>, + ctl_data_per_table: &[CtlData; NUM_TABLES], + alphas: Vec, + config: &StarkConfig, +) -> Vec>> +where + F: RichField + Extendable, + P: PackedField, + C: GenericConfig, +{ + let mut res = Vec::new(); + + // This method assumes that all STARKs have distinct degrees. + // TODO: Relax this. + assert!(Table::all_degree_logs() + .windows(2) + .all(|pair| { pair[0] > pair[1] })); + + // Arithmetic. + { + let trace_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::Arithmetic]][0] + .len(); + let get_trace_packed = |index, step| { + trace_commitment.get_lde_values_packed::

(0, index, step, 0, trace_leave_len) + }; + let aux_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::Arithmetic]][0] + .len(); + let get_aux_packed = |index, step| { + auxiliary_commitment.get_lde_values_packed(0, index, step, 0, aux_leave_len) + }; + let num_lookup_columns = all_auxiliary_columns[*Table::Arithmetic].len(); + res.push( + compute_quotient_polys::, D>( + &all_stark.arithmetic_stark, + &get_trace_packed, + &get_aux_packed, + lookup_challenges, + Some(&ctl_data_per_table[*Table::Arithmetic]), + &vec![], + alphas.clone(), + Table::all_degree_logs()[Table::table_to_sorted_index()[*Table::Arithmetic]], + num_lookup_columns, + config, + ) + .expect("Couldn't compute quotient polys."), + ); + } + + // Bytepacking. + { + let trace_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::BytePacking]][0] + .len(); + let get_trace_packed = |index, step| { + trace_commitment.get_lde_values_packed::

(0, index, step, 0, trace_leave_len) + }; + let aux_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::BytePacking]][0] + .len(); + let get_aux_packed = |index, step| { + auxiliary_commitment.get_lde_values_packed(0, index, step, 0, aux_leave_len) + }; + let num_lookup_columns = all_auxiliary_columns[*Table::BytePacking].len(); + res.push( + compute_quotient_polys::, D>( + &all_stark.byte_packing_stark, + &get_trace_packed, + &get_aux_packed, + lookup_challenges, + Some(&ctl_data_per_table[*Table::BytePacking]), + &vec![], + alphas.clone(), + Table::all_degree_logs()[Table::table_to_sorted_index()[*Table::BytePacking]], + num_lookup_columns, + config, + ) + .expect("Couldn't compute quotient polys."), + ); + } + + // Cpu. + { + let trace_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::Cpu]][0] + .len(); + let get_trace_packed = |index, step| { + trace_commitment.get_lde_values_packed::

(0, index, step, 0, trace_leave_len) + }; + let aux_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::Cpu]][0] + .len(); + let get_aux_packed = |index, step| { + auxiliary_commitment.get_lde_values_packed(0, index, step, 0, aux_leave_len) + }; + let num_lookup_columns = all_auxiliary_columns[*Table::Cpu].len(); + res.push( + compute_quotient_polys::, D>( + &all_stark.cpu_stark, + &get_trace_packed, + &get_aux_packed, + lookup_challenges, + Some(&ctl_data_per_table[*Table::Cpu]), + &vec![], + alphas.clone(), + Table::all_degree_logs()[Table::table_to_sorted_index()[*Table::Cpu]], + num_lookup_columns, + config, + ) + .expect("Couldn't compute quotient polys."), + ); + } + + // Keccak. + { + let trace_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::Keccak]][0] + .len(); + let get_trace_packed = |index, step| { + trace_commitment.get_lde_values_packed::

(0, index, step, 0, trace_leave_len) + }; + let aux_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::Keccak]][0] + .len(); + let get_aux_packed = |index, step| { + auxiliary_commitment.get_lde_values_packed(0, index, step, 0, aux_leave_len) + }; + let num_lookup_columns = all_auxiliary_columns[*Table::Keccak].len(); + res.push( + compute_quotient_polys::, D>( + &all_stark.keccak_stark, + &get_trace_packed, + &get_aux_packed, + lookup_challenges, + Some(&ctl_data_per_table[*Table::Keccak]), + &vec![], + alphas.clone(), + Table::all_degree_logs()[Table::table_to_sorted_index()[*Table::Keccak]], + num_lookup_columns, + config, + ) + .expect("Couldn't compute quotient polys."), + ); + } + + // KeccakSponge. + { + let trace_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::KeccakSponge]][0] + .len(); + let get_trace_packed = |index, step| { + trace_commitment.get_lde_values_packed::

(0, index, step, 0, trace_leave_len) + }; + let aux_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::KeccakSponge]][0] + .len(); + let get_aux_packed = |index, step| { + auxiliary_commitment.get_lde_values_packed(0, index, step, 0, aux_leave_len) + }; + let num_lookup_columns = all_auxiliary_columns[*Table::KeccakSponge].len(); + res.push( + compute_quotient_polys::, D>( + &all_stark.keccak_sponge_stark, + &get_trace_packed, + &get_aux_packed, + lookup_challenges, + Some(&ctl_data_per_table[*Table::KeccakSponge]), + &vec![], + alphas.clone(), + Table::all_degree_logs()[Table::table_to_sorted_index()[*Table::KeccakSponge]], + num_lookup_columns, + config, + ) + .expect("Couldn't compute quotient polys."), + ); + } + + // Logic. + { + let trace_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::Logic]][0] + .len(); + let get_trace_packed = |index, step| { + trace_commitment.get_lde_values_packed::

(0, index, step, 0, trace_leave_len) + }; + let aux_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::Logic]][0] + .len(); + let get_aux_packed = |index, step| { + auxiliary_commitment.get_lde_values_packed(0, index, step, 0, aux_leave_len) + }; + let num_lookup_columns = all_auxiliary_columns[*Table::Logic].len(); + res.push( + compute_quotient_polys::, D>( + &all_stark.logic_stark, + &get_trace_packed, + &get_aux_packed, + lookup_challenges, + Some(&ctl_data_per_table[*Table::Logic]), + &vec![], + alphas.clone(), + Table::all_degree_logs()[Table::table_to_sorted_index()[*Table::Logic]], + num_lookup_columns, + config, + ) + .expect("Couldn't compute quotient polys."), + ); + } + + // Memory. + { + let trace_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::Memory]][0] + .len(); + let get_trace_packed = |index, step| { + trace_commitment.get_lde_values_packed::

(0, index, step, 0, trace_leave_len) + }; + let aux_leave_len = trace_commitment.batch_merkle_tree.leaves + [Table::table_to_sorted_index()[*Table::Memory]][0] + .len(); + let get_aux_packed = |index, step| { + auxiliary_commitment.get_lde_values_packed(0, index, step, 0, aux_leave_len) + }; + let num_lookup_columns = all_auxiliary_columns[*Table::Memory].len(); + res.push( + compute_quotient_polys::, D>( + &all_stark.memory_stark, + &get_trace_packed, + &get_aux_packed, + lookup_challenges, + Some(&ctl_data_per_table[*Table::Memory]), + &vec![], + alphas.clone(), + Table::all_degree_logs()[Table::table_to_sorted_index()[*Table::Memory]], + num_lookup_columns, + config, + ) + .expect("Couldn't compute quotient polys."), + ); + } + + res +} + +/// Generates all FRI instances. They are sorted by decreasing degree. +fn all_fri_instance_info( + all_stark: &AllStark, + trace_commitment: &BatchFriOracle, + auxiliary_commitment: &BatchFriOracle, + ctl_data_per_table: &[CtlData; NUM_TABLES], + alphas: Vec, + zeta: F::Extension, + config: &StarkConfig, + // ctl_data_per_table: &[CtlData; NUM_TABLES], + // ctl_challenges: &GrandProductChallengeSet, +) -> Vec> +where + F: RichField + Extendable, + C: GenericConfig, +{ + let degree_bits = Table::all_degree_logs(); + let mut res = Vec::new(); + + // Arithmetic. + { + let g = F::primitive_root_of_unity( + degree_bits[Table::table_to_sorted_index()[*Table::Arithmetic]], + ); + let num_ctl_helper_polys = ctl_data_per_table[*Table::Arithmetic].num_ctl_helper_polys(); + res.push(all_stark.arithmetic_stark.fri_instance( + zeta, + g, + num_ctl_helper_polys.iter().sum(), + num_ctl_helper_polys, + config, + )); + } + + // BytePacking. + { + let g = F::primitive_root_of_unity( + degree_bits[Table::table_to_sorted_index()[*Table::BytePacking]], + ); + let num_ctl_helper_polys = ctl_data_per_table[*Table::BytePacking].num_ctl_helper_polys(); + res.push(all_stark.byte_packing_stark.fri_instance( + zeta, + g, + num_ctl_helper_polys.iter().sum(), + num_ctl_helper_polys, + config, + )); + } + + // Cpu. + { + let g = + F::primitive_root_of_unity(degree_bits[Table::table_to_sorted_index()[*Table::Cpu]]); + let num_ctl_helper_polys = ctl_data_per_table[*Table::Cpu].num_ctl_helper_polys(); + res.push(all_stark.cpu_stark.fri_instance( + zeta, + g, + num_ctl_helper_polys.iter().sum(), + num_ctl_helper_polys, + config, + )); + } + + // Keccak. + { + let g = + F::primitive_root_of_unity(degree_bits[Table::table_to_sorted_index()[*Table::Keccak]]); + let num_ctl_helper_polys = ctl_data_per_table[*Table::Keccak].num_ctl_helper_polys(); + res.push(all_stark.keccak_stark.fri_instance( + zeta, + g, + num_ctl_helper_polys.iter().sum(), + num_ctl_helper_polys, + config, + )); + } + + // KeccakSponge. + { + let g = F::primitive_root_of_unity( + degree_bits[Table::table_to_sorted_index()[*Table::KeccakSponge]], + ); + let num_ctl_helper_polys = ctl_data_per_table[*Table::KeccakSponge].num_ctl_helper_polys(); + res.push(all_stark.keccak_sponge_stark.fri_instance( + zeta, + g, + num_ctl_helper_polys.iter().sum(), + num_ctl_helper_polys, + config, + )); + } + + // Logic. + { + let g = + F::primitive_root_of_unity(degree_bits[Table::table_to_sorted_index()[*Table::Logic]]); + let num_ctl_helper_polys = ctl_data_per_table[*Table::Logic].num_ctl_helper_polys(); + res.push(all_stark.logic_stark.fri_instance( + zeta, + g, + num_ctl_helper_polys.iter().sum(), + num_ctl_helper_polys, + config, + )); + } + + // Memory. + { + let g = + F::primitive_root_of_unity(degree_bits[Table::table_to_sorted_index()[*Table::Memory]]); + let num_ctl_helper_polys = ctl_data_per_table[*Table::Memory].num_ctl_helper_polys(); + res.push(all_stark.memory_stark.fri_instance( + zeta, + g, + num_ctl_helper_polys.iter().sum(), + num_ctl_helper_polys, + config, + )); + } + + res +} + /// Generates a proof for each STARK. /// At this stage, we have computed the trace polynomials commitments for the /// various STARKs, and we have the cross-table lookup data for each table, diff --git a/evm_arithmetization/src/verifier.rs b/evm_arithmetization/src/verifier.rs index 52fa5304f..032290bde 100644 --- a/evm_arithmetization/src/verifier.rs +++ b/evm_arithmetization/src/verifier.rs @@ -1,5 +1,6 @@ use anyhow::Result; use ethereum_types::{BigEndianHash, U256}; +use hashbrown::HashMap; use itertools::Itertools; use plonky2::field::extension::Extendable; use plonky2::hash::hash_types::RichField; @@ -117,12 +118,15 @@ where // Extra sums to add to the looked last value. // Only necessary for the Memory values. - let mut extra_looking_sums = vec![vec![F::ZERO; config.num_challenges]; NUM_TABLES]; + let mut extra_looking_sums = HashMap::new(); // Memory - extra_looking_sums[Table::Memory as usize] = (0..config.num_challenges) - .map(|i| get_memory_extra_looking_sum(&public_values, ctl_challenges.challenges[i])) - .collect_vec(); + extra_looking_sums.insert( + Table::Memory as usize, + (0..config.num_challenges) + .map(|i| get_memory_extra_looking_sum(&public_values, ctl_challenges.challenges[i])) + .collect_vec(), + ); verify_cross_table_lookups::( cross_table_lookups, @@ -130,7 +134,7 @@ where .multi_proof .stark_proofs .map(|p| p.proof.openings.ctl_zs_first.unwrap()), - Some(&extra_looking_sums), + &extra_looking_sums, config, ) }