Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add STARK batching #366

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 0 additions & 10 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -112,10 +112,10 @@ rpc = { path = "zero_bin/rpc" }
zero_bin_common = { path = "zero_bin/common" }

# plonky2-related dependencies
plonky2 = "0.2.2"
plonky2_maybe_rayon = "0.2.0"
plonky2_util = "0.2.0"
starky = "0.4.0"
plonky2 = { path = "../plonky2/plonky2"}
plonky2_maybe_rayon = { path = "../plonky2/maybe_rayon"}
plonky2_util = { path = "../plonky2/util"}
starky = { path = "../plonky2/starky"}

# proc macro related dependencies
proc-macro2 = "1.0"
Expand Down
25 changes: 25 additions & 0 deletions evm_arithmetization/src/all_stark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,31 @@ impl Table {
Self::Memory,
]
}

/// Returns all STARK table indices in descending order of their padded
/// trace degrees.
pub(crate) const fn all_sorted() -> [Self; NUM_TABLES] {
[
Self::Memory,
Self::Cpu,
Self::Arithmetic,
Self::BytePacking,
Self::Keccak,
Self::Logic,
Self::KeccakSponge,
]
}

/// Returns the ordered position of the tables. This is the inverse of
/// `all_sorted()`.
pub(crate) const fn table_to_sorted_index() -> [usize; NUM_TABLES] {
[2, 3, 1, 4, 6, 5, 0]
}

/// Returns all STARK padded trace degrees in descending order.
pub(crate) const fn all_degree_logs() -> [usize; NUM_TABLES] {
[27, 24, 22, 20, 19, 17, 14]
}
}

/// Returns all the `CrossTableLookups` used for proving the EVM.
Expand Down
3 changes: 1 addition & 2 deletions evm_arithmetization/src/cpu/kernel/tests/bignum/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ use itertools::Itertools;
use num::{BigUint, One, Zero};
use num_bigint::RandBigInt;
use plonky2::field::goldilocks_field::GoldilocksField as F;
use plonky2_util::ceil_div_usize;
use rand::Rng;

use crate::cpu::kernel::aggregator::KERNEL;
Expand Down Expand Up @@ -90,7 +89,7 @@ fn max_bignum(bit_size: usize) -> BigUint {
}

fn bignum_len(a: &BigUint) -> usize {
ceil_div_usize(a.bits() as usize, BIGNUM_LIMB_BITS)
(a.bits() as usize).div_ceil(BIGNUM_LIMB_BITS)
}

fn run_test(fn_label: &str, memory: Vec<U256>, stack: Vec<U256>) -> Result<(Vec<U256>, Vec<U256>)> {
Expand Down
3 changes: 1 addition & 2 deletions evm_arithmetization/src/cpu/kernel/utils.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use core::fmt::Debug;

use ethereum_types::U256;
use plonky2_util::ceil_div_usize;

/// Enumerate the length `W` windows of `vec`, and run `maybe_replace` on each
/// one.
Expand All @@ -28,7 +27,7 @@ where
}

pub(crate) fn u256_to_trimmed_be_bytes(u256: &U256) -> Vec<u8> {
let num_bytes = ceil_div_usize(u256.bits(), 8);
let num_bytes = u256.bits().div_ceil(8);
// `byte` is little-endian, so we manually reverse it.
(0..num_bytes).rev().map(|i| u256.byte(i)).collect()
}
Expand Down
26 changes: 14 additions & 12 deletions evm_arithmetization/src/fixed_recursive_verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -587,26 +587,28 @@ where

// Extra sums to add to the looked last value.
// Only necessary for the Memory values.
let mut extra_looking_sums =
vec![vec![builder.zero(); stark_config.num_challenges]; NUM_TABLES];
let mut extra_looking_sums = HashMap::new();

// Memory
extra_looking_sums[*Table::Memory] = (0..stark_config.num_challenges)
.map(|c| {
get_memory_extra_looking_sum_circuit(
&mut builder,
&public_values,
ctl_challenges.challenges[c],
)
})
.collect_vec();
extra_looking_sums.insert(
Table::Memory as usize,
(0..stark_config.num_challenges)
.map(|c| {
get_memory_extra_looking_sum_circuit(
&mut builder,
&public_values,
ctl_challenges.challenges[c],
)
})
.collect_vec(),
);

// Verify the CTL checks.
verify_cross_table_lookups_circuit::<F, D, NUM_TABLES>(
&mut builder,
all_cross_table_lookups(),
pis.map(|p| p.ctl_zs_first),
Some(&extra_looking_sums),
&extra_looking_sums,
stark_config,
);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use core::marker::PhantomData;
use core::mem::size_of;

use itertools::Itertools;
use num::integer::div_ceil;
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::packed::PackedField;
use plonky2::field::polynomial::PolynomialValues;
Expand All @@ -13,7 +14,7 @@ use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::timed;
use plonky2::util::timing::TimingTree;
use plonky2::util::transpose;
use plonky2_util::ceil_div_usize;
// use plonky2_util::ceil_div_usize;
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use starky::evaluation_frame::StarkEvaluationFrame;
use starky::lookup::{Column, Filter, Lookup};
Expand Down Expand Up @@ -137,7 +138,7 @@ pub(crate) fn ctl_looking_memory<F: Field>(i: usize) -> Vec<Column<F>> {
/// Returns the number of `KeccakSponge` tables looking into the `LogicStark`.
pub(crate) const fn num_logic_ctls() -> usize {
const U8S_PER_CTL: usize = 32;
ceil_div_usize(KECCAK_RATE_BYTES, U8S_PER_CTL)
KECCAK_RATE_BYTES.div_ceil(U8S_PER_CTL)
}

/// Creates the vector of `Columns` required to perform the `i`th logic CTL.
Expand Down
3 changes: 1 addition & 2 deletions evm_arithmetization/src/logic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::timed;
use plonky2::util::timing::TimingTree;
use plonky2_util::ceil_div_usize;
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use starky::evaluation_frame::StarkEvaluationFrame;
use starky::lookup::{Column, Filter};
Expand All @@ -29,7 +28,7 @@ const VAL_BITS: usize = 256;
pub(crate) const PACKED_LIMB_BITS: usize = 32;
/// Number of field elements needed to store each input/output at the specified
/// packing.
const PACKED_LEN: usize = ceil_div_usize(VAL_BITS, PACKED_LIMB_BITS);
const PACKED_LEN: usize = VAL_BITS.div_ceil(PACKED_LIMB_BITS);

/// `LogicStark` columns.
pub(crate) mod columns {
Expand Down
Loading
Loading