diff --git a/math/src/field/fields/fft_friendly/quartic_babybear.rs b/math/src/field/fields/fft_friendly/quartic_babybear.rs index 361de0e0b..23cc0227e 100644 --- a/math/src/field/fields/fft_friendly/quartic_babybear.rs +++ b/math/src/field/fields/fft_friendly/quartic_babybear.rs @@ -8,6 +8,9 @@ use crate::field::{ #[cfg(feature = "lambdaworks-serde-binary")] use crate::traits::ByteConversion; +#[cfg(all(feature = "lambdaworks-serde-binary", feature = "alloc"))] +use crate::traits::AsBytes; + /// We are implementig the extension of Baby Bear of degree 4 using the irreducible polynomial x^4 + 11. /// BETA = 11 and -BETA = -11 is the non-residue. pub const BETA: FieldElement = @@ -262,6 +265,59 @@ impl ByteConversion for [FieldElement; 4] { } } +#[cfg(feature = "lambdaworks-serde-binary")] +impl ByteConversion for FieldElement { + fn to_bytes_be(&self) -> alloc::vec::Vec { + let mut byte_slice = ByteConversion::to_bytes_be(&self.value()[0]); + byte_slice.extend(ByteConversion::to_bytes_be(&self.value()[1])); + byte_slice.extend(ByteConversion::to_bytes_be(&self.value()[2])); + byte_slice.extend(ByteConversion::to_bytes_be(&self.value()[3])); + byte_slice + } + + fn to_bytes_le(&self) -> alloc::vec::Vec { + let mut byte_slice = ByteConversion::to_bytes_le(&self.value()[0]); + byte_slice.extend(ByteConversion::to_bytes_le(&self.value()[1])); + byte_slice.extend(ByteConversion::to_bytes_le(&self.value()[2])); + byte_slice.extend(ByteConversion::to_bytes_le(&self.value()[3])); + byte_slice + } + + fn from_bytes_be(bytes: &[u8]) -> Result + where + Self: Sized, + { + const BYTES_PER_FIELD: usize = 8; + let x0 = FieldElement::from_bytes_be(&bytes[0..BYTES_PER_FIELD])?; + let x1 = FieldElement::from_bytes_be(&bytes[BYTES_PER_FIELD..BYTES_PER_FIELD * 2])?; + let x2 = FieldElement::from_bytes_be(&bytes[BYTES_PER_FIELD * 2..BYTES_PER_FIELD * 3])?; + let x3 = FieldElement::from_bytes_be(&bytes[BYTES_PER_FIELD * 3..BYTES_PER_FIELD * 4])?; + + Ok(Self::new([x0, x1, x2, x3])) + } + + fn from_bytes_le(bytes: &[u8]) -> Result + where + Self: Sized, + { + const BYTES_PER_FIELD: usize = 8; + let x0 = FieldElement::from_bytes_le(&bytes[0..BYTES_PER_FIELD])?; + let x1 = FieldElement::from_bytes_le(&bytes[BYTES_PER_FIELD..BYTES_PER_FIELD * 2])?; + let x2 = FieldElement::from_bytes_le(&bytes[BYTES_PER_FIELD * 2..BYTES_PER_FIELD * 3])?; + let x3 = FieldElement::from_bytes_le(&bytes[BYTES_PER_FIELD * 3..BYTES_PER_FIELD * 4])?; + + Ok(Self::new([x0, x1, x2, x3])) + } +} + +#[cfg(feature = "lambdaworks-serde-binary")] +#[cfg(feature = "alloc")] +impl AsBytes for FieldElement { + fn as_bytes(&self) -> alloc::vec::Vec { + self.to_bytes_be() + } +} + impl IsFFTField for Degree4BabyBearExtensionField { const TWO_ADICITY: u64 = 29; const TWO_ADIC_PRIMITVE_ROOT_OF_UNITY: Self::BaseType = [ diff --git a/provers/stark/src/examples/mod.rs b/provers/stark/src/examples/mod.rs index ba4f6586e..f21b62568 100644 --- a/provers/stark/src/examples/mod.rs +++ b/provers/stark/src/examples/mod.rs @@ -5,5 +5,6 @@ pub mod fibonacci_2_columns; pub mod fibonacci_rap; pub mod quadratic_air; pub mod read_only_memory; +pub mod read_only_memory_logup; pub mod simple_fibonacci; pub mod simple_periodic_cols; diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs new file mode 100644 index 000000000..6a45b20be --- /dev/null +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -0,0 +1,704 @@ +//! Implementation of a LogUp Lookup Argument example. +//! See our blog post for detailed explanation. +//! + +use std::marker::PhantomData; + +use crate::{ + constraints::{ + boundary::{BoundaryConstraint, BoundaryConstraints}, + transition::TransitionConstraint, + }, + context::AirContext, + proof::options::ProofOptions, + trace::TraceTable, + traits::{TransitionEvaluationContext, AIR}, +}; +use itertools::Itertools; +use lambdaworks_crypto::fiat_shamir::is_transcript::IsTranscript; +use lambdaworks_math::{ + field::{ + element::FieldElement, + traits::{IsFFTField, IsField, IsPrimeField, IsSubFieldOf}, + }, + traits::ByteConversion, +}; + +/// Transition Constraint that ensures the continuity of the sorted address column of a memory. +#[derive(Clone)] +struct ContinuityConstraint + IsFFTField + Send + Sync, E: IsField + Send + Sync> +{ + phantom_f: PhantomData, + phantom_e: PhantomData, +} + +impl ContinuityConstraint +where + F: IsSubFieldOf + IsFFTField + Send + Sync, + E: IsField + Send + Sync, +{ + pub fn new() -> Self { + Self { + phantom_f: PhantomData::, + phantom_e: PhantomData::, + } + } +} + +impl TransitionConstraint for ContinuityConstraint +where + F: IsFFTField + IsSubFieldOf + Send + Sync, + E: IsField + Send + Sync, +{ + fn degree(&self) -> usize { + 2 + } + + fn constraint_idx(&self) -> usize { + 0 + } + + fn end_exemptions(&self) -> usize { + // NOTE: We are assuming that the trace has as length a power of 2. + 1 + } + + fn evaluate( + &self, + evaluation_context: &TransitionEvaluationContext, + transition_evaluations: &mut [FieldElement], + ) { + // In both evaluation contexts, Prover and Verfier will evaluate the transition polynomial in the same way. + // The only difference is that the Prover's Frame has base field and field extension elements, + // while the Verfier's Frame has only field extension elements. + match evaluation_context { + TransitionEvaluationContext::Prover { + frame, + periodic_values: _periodic_values, + rap_challenges: _rap_challenges, + } => { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + let a_sorted_0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + // (a'_{i+1} - a'_i)(a'_{i+1} - a'_i - 1) = 0 where a' is the sorted address + let res = (a_sorted_1 - a_sorted_0) + * (a_sorted_1 - a_sorted_0 - FieldElement::::one()); + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res.to_extension(); + } + } + + TransitionEvaluationContext::Verifier { + frame, + periodic_values: _periodic_values, + rap_challenges: _rap_challenges, + } => { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + let a_sorted_0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + // (a'_{i+1} - a'_i)(a'_{i+1} - a'_i - 1) = 0 where a' is the sorted address + let res = (a_sorted_1 - a_sorted_0) + * (a_sorted_1 - a_sorted_0 - FieldElement::::one()); + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } + } + } + } +} +/// Transition constraint that ensures that same addresses have same values, making the sorted memory read-only. +#[derive(Clone)] +struct SingleValueConstraint< + F: IsSubFieldOf + IsFFTField + Send + Sync, + E: IsField + Send + Sync, +> { + phantom_f: PhantomData, + phantom_e: PhantomData, +} + +impl SingleValueConstraint +where + F: IsSubFieldOf + IsFFTField + Send + Sync, + E: IsField + Send + Sync, +{ + pub fn new() -> Self { + Self { + phantom_f: PhantomData::, + phantom_e: PhantomData::, + } + } +} + +impl TransitionConstraint for SingleValueConstraint +where + F: IsFFTField + IsSubFieldOf + Send + Sync, + E: IsField + Send + Sync, +{ + fn degree(&self) -> usize { + 2 + } + + fn constraint_idx(&self) -> usize { + 1 + } + + fn end_exemptions(&self) -> usize { + // NOTE: We are assuming that the trace has as length a power of 2. + 1 + } + + fn evaluate( + &self, + evaluation_context: &TransitionEvaluationContext, + transition_evaluations: &mut [FieldElement], + ) { + // In both evaluation contexts, Prover and Verfier will evaluate the transition polynomial in the same way. + // The only difference is that the Prover's Frame has base field and field extension elements, + // while the Verfier's Frame has only field extension elements. + match evaluation_context { + TransitionEvaluationContext::Prover { + frame, + periodic_values: _periodic_values, + rap_challenges: _rap_challenges, + } => { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + let a_sorted_0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + let v_sorted_0 = first_step.get_main_evaluation_element(0, 3); + let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); + // (v'_{i+1} - v'_i) * (a'_{i+1} - a'_i - 1) = 0 + let res = (v_sorted_1 - v_sorted_0) + * (a_sorted_1 - a_sorted_0 - FieldElement::::one()); + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res.to_extension(); + } + } + + TransitionEvaluationContext::Verifier { + frame, + periodic_values: _periodic_values, + rap_challenges: _rap_challenges, + } => { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + let a_sorted_0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + let v_sorted_0 = first_step.get_main_evaluation_element(0, 3); + let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); + // (v'_{i+1} - v'_i) * (a'_{i+1} - a'_i - 1) = 0 + let res = (v_sorted_1 - v_sorted_0) + * (a_sorted_1 - a_sorted_0 - FieldElement::::one()); + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } + } + } + } +} +/// Transition constraint that ensures that the sorted columns are a permutation of the original ones. +/// We are using the LogUp construction described in: +/// . +/// See also our post of LogUp argument in blog.lambdaclass.com. +#[derive(Clone)] +struct PermutationConstraint< + F: IsSubFieldOf + IsFFTField + Send + Sync, + E: IsField + Send + Sync, +> { + phantom_f: PhantomData, + phantom_e: PhantomData, +} + +impl PermutationConstraint +where + F: IsSubFieldOf + IsFFTField + Send + Sync, + E: IsField + Send + Sync, +{ + pub fn new() -> Self { + Self { + phantom_f: PhantomData::, + phantom_e: PhantomData::, + } + } +} + +impl TransitionConstraint for PermutationConstraint +where + F: IsSubFieldOf + IsFFTField + Send + Sync, + E: IsField + Send + Sync, +{ + fn degree(&self) -> usize { + 3 + } + + fn constraint_idx(&self) -> usize { + 2 + } + + fn end_exemptions(&self) -> usize { + 1 + } + + fn evaluate( + &self, + evaluation_context: &TransitionEvaluationContext, + transition_evaluations: &mut [FieldElement], + ) { + // In both evaluation contexts, Prover and Verfier will evaluate the transition polynomial in the same way. + // The only difference is that the Prover's Frame has base field and field extension elements, + // while the Verfier's Frame has only field extension elements. + match evaluation_context { + TransitionEvaluationContext::Prover { + frame, + periodic_values: _periodic_values, + rap_challenges, + } => { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + // Auxiliary frame elements + let s0 = first_step.get_aux_evaluation_element(0, 0); + let s1 = second_step.get_aux_evaluation_element(0, 0); + + // Challenges + let z = &rap_challenges[0]; + let alpha = &rap_challenges[1]; + + // Main frame elements + let a1 = second_step.get_main_evaluation_element(0, 0); + let v1 = second_step.get_main_evaluation_element(0, 1); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); + let m = second_step.get_main_evaluation_element(0, 4); + + let unsorted_term = -(a1 + v1 * alpha) + z; + let sorted_term = -(a_sorted_1 + v_sorted_1 * alpha) + z; + + // We are using the following LogUp equation: + // s1 = s0 + m / sorted_term - 1/unsorted_term. + // Since constraints must be expressed without division, we multiply each term by sorted_term * unsorted_term: + let res = s0 * &unsorted_term * &sorted_term + m * &unsorted_term + - &sorted_term + - s1 * unsorted_term * sorted_term; + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } + } + + TransitionEvaluationContext::Verifier { + frame, + periodic_values: _periodic_values, + rap_challenges, + } => { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + // Auxiliary frame elements + let s0 = first_step.get_aux_evaluation_element(0, 0); + let s1 = second_step.get_aux_evaluation_element(0, 0); + + // Challenges + let z = &rap_challenges[0]; + let alpha = &rap_challenges[1]; + + // Main frame elements + let a1 = second_step.get_main_evaluation_element(0, 0); + let v1 = second_step.get_main_evaluation_element(0, 1); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); + let m = second_step.get_main_evaluation_element(0, 4); + + let unsorted_term = z - (a1 + alpha * v1); + let sorted_term = z - (a_sorted_1 + alpha * v_sorted_1); + + // We are using the following LogUp equation: + // s1 = s0 + m / sorted_term - 1/unsorted_term. + // Since constraints must be expressed without division, we multiply each term by sorted_term * unsorted_term: + let res = s0 * &unsorted_term * &sorted_term + m * &unsorted_term + - &sorted_term + - s1 * unsorted_term * sorted_term; + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } + } + } + } +} + +/// AIR for a continuous read-only memory using the LogUp Lookup Argument. +/// To accompany the understanding of this code you can see corresponding post in blog.lambdaclass.com. +pub struct LogReadOnlyRAP +where + F: IsFFTField + IsSubFieldOf + Send + Sync, + E: IsField + Send + Sync, +{ + context: AirContext, + trace_length: usize, + pub_inputs: LogReadOnlyPublicInputs, + transition_constraints: Vec>>, +} + +#[derive(Clone, Debug)] +pub struct LogReadOnlyPublicInputs +where + F: IsFFTField + Send + Sync, +{ + pub a0: FieldElement, + pub v0: FieldElement, + pub a_sorted_0: FieldElement, + pub v_sorted_0: FieldElement, + // The multiplicity of (a_sorted_0, v_sorted_0) + pub m0: FieldElement, +} + +impl AIR for LogReadOnlyRAP +where + F: IsFFTField + IsSubFieldOf + Send + Sync + 'static, + E: IsField + Send + Sync + 'static, + FieldElement: ByteConversion, +{ + type Field = F; + type FieldExtension = E; + type PublicInputs = LogReadOnlyPublicInputs; + + const STEP_SIZE: usize = 1; + + fn new( + trace_length: usize, + pub_inputs: &Self::PublicInputs, + proof_options: &ProofOptions, + ) -> Self { + let transition_constraints: Vec< + Box>, + > = vec![ + Box::new(ContinuityConstraint::new()), + Box::new(SingleValueConstraint::new()), + Box::new(PermutationConstraint::new()), + ]; + + let context = AirContext { + proof_options: proof_options.clone(), + trace_columns: 6, + transition_offsets: vec![0, 1], + num_transition_constraints: transition_constraints.len(), + }; + + Self { + context, + trace_length, + pub_inputs: pub_inputs.clone(), + transition_constraints, + } + } + + fn build_auxiliary_trace( + &self, + trace: &mut TraceTable, + challenges: &[FieldElement], + ) where + Self::FieldExtension: IsFFTField, + { + // Main table + let main_segment_cols = trace.columns_main(); + let a = &main_segment_cols[0]; + let v = &main_segment_cols[1]; + let a_sorted = &main_segment_cols[2]; + let v_sorted = &main_segment_cols[3]; + let m = &main_segment_cols[4]; + + // Challenges + let z = &challenges[0]; + let alpha = &challenges[1]; + + let trace_len = trace.num_rows(); + let mut aux_col = Vec::new(); + + // s_0 = m_0/(z - (a'_0 + α * v'_0) - 1/(z - (a_0 + α * v_0) + let unsorted_term = (-(&a[0] + &v[0] * alpha) + z).inv().unwrap(); + let sorted_term = (-(&a_sorted[0] + &v_sorted[0] * alpha) + z).inv().unwrap(); + aux_col.push(&m[0] * sorted_term - unsorted_term); + + // Apply the same equation given in the permutation transition contraint to the rest of the trace. + // s_{i+1} = s_i + m_{i+1}/(z - (a'_{i+1} + α * v'_{i+1}) - 1/(z - (a_{i+1} + α * v_{i+1}) + for i in 0..trace_len - 1 { + let unsorted_term = (-(&a[i + 1] + &v[i + 1] * alpha) + z).inv().unwrap(); + let sorted_term = (-(&a_sorted[i + 1] + &v_sorted[i + 1] * alpha) + z) + .inv() + .unwrap(); + aux_col.push(&aux_col[i] + &m[i + 1] * sorted_term - unsorted_term); + } + + for (i, aux_elem) in aux_col.iter().enumerate().take(trace.num_rows()) { + trace.set_aux(i, 0, aux_elem.clone()) + } + } + + fn build_rap_challenges( + &self, + transcript: &mut impl IsTranscript, + ) -> Vec> { + vec![ + transcript.sample_field_element(), + transcript.sample_field_element(), + ] + } + + fn trace_layout(&self) -> (usize, usize) { + (5, 1) + } + + fn boundary_constraints( + &self, + rap_challenges: &[FieldElement], + ) -> BoundaryConstraints { + let a0 = &self.pub_inputs.a0; + let v0 = &self.pub_inputs.v0; + let a_sorted_0 = &self.pub_inputs.a_sorted_0; + let v_sorted_0 = &self.pub_inputs.v_sorted_0; + let m0 = &self.pub_inputs.m0; + let z = &rap_challenges[0]; + let alpha = &rap_challenges[1]; + + // Main boundary constraints + let c1 = BoundaryConstraint::new_main(0, 0, a0.clone().to_extension()); + let c2 = BoundaryConstraint::new_main(1, 0, v0.clone().to_extension()); + let c3 = BoundaryConstraint::new_main(2, 0, a_sorted_0.clone().to_extension()); + let c4 = BoundaryConstraint::new_main(3, 0, v_sorted_0.clone().to_extension()); + let c5 = BoundaryConstraint::new_main(4, 0, m0.clone().to_extension()); + + // Auxiliary boundary constraints + let unsorted_term = (-(a0 + v0 * alpha) + z).inv().unwrap(); + let sorted_term = (-(a_sorted_0 + v_sorted_0 * alpha) + z).inv().unwrap(); + let p0_value = m0 * sorted_term - unsorted_term; + + let c_aux1 = BoundaryConstraint::new_aux(0, 0, p0_value); + let c_aux2 = BoundaryConstraint::new_aux( + 0, + self.trace_length - 1, + FieldElement::::zero(), + ); + + BoundaryConstraints::from_constraints(vec![c1, c2, c3, c4, c5, c_aux1, c_aux2]) + } + + fn transition_constraints( + &self, + ) -> &Vec>> { + &self.transition_constraints + } + + fn context(&self) -> &AirContext { + &self.context + } + + // The prover use this function to define the number of parts of the composition polynomial. + // The number of parts will be: composition_poly_degree_bound() / trace_length(). + // Since we have a transition constraint of degree 3, we need the bound to be two times the trace length. + fn composition_poly_degree_bound(&self) -> usize { + self.trace_length() * 2 + } + + fn trace_length(&self) -> usize { + self.trace_length + } + + fn pub_inputs(&self) -> &Self::PublicInputs { + &self.pub_inputs + } +} + +/// Return a trace table with an auxiliary column full of zeros (that will be then replaced +/// with the correct values by the air) and the following five main columns: +/// The original addresses and values, the sorted addresses and values without duplicates, and +/// the multiplicities of each sorted address and value in the original ones (i.e. how many times +/// they appear in the original address an value columns). +pub fn read_only_logup_trace< + F: IsPrimeField + IsFFTField + IsSubFieldOf + Send + Sync, + E: IsField + Send + Sync, +>( + addresses: Vec>, + values: Vec>, +) -> TraceTable { + let mut address_value_pairs: Vec<_> = addresses.iter().zip(values.iter()).collect(); + address_value_pairs.sort_by_key(|(addr, _)| addr.representative()); + + let mut multiplicities = Vec::new(); + let mut sorted_addresses = Vec::new(); + let mut sorted_values = Vec::new(); + + for (key, group) in &address_value_pairs.into_iter().group_by(|&(a, v)| (a, v)) { + let group_vec: Vec<_> = group.collect(); + multiplicities.push(FieldElement::::from(group_vec.len() as u64)); + sorted_addresses.push(key.0.clone()); + sorted_values.push(key.1.clone()); + } + + // We resize the sorted addresses and values with the last value of each one so they have the + // same number of rows as the original addresses and values. However, their multiplicity should be zero. + sorted_addresses.resize(addresses.len(), sorted_addresses.last().unwrap().clone()); + sorted_values.resize(addresses.len(), sorted_values.last().unwrap().clone()); + multiplicities.resize(addresses.len(), FieldElement::::zero()); + + let main_columns = vec![ + addresses.clone(), + values.clone(), + sorted_addresses, + sorted_values, + multiplicities, + ]; + + // create a vector with zeros of the same length as the main columns + let zero_vec = vec![FieldElement::::zero(); main_columns[0].len()]; + TraceTable::from_columns(main_columns, vec![zero_vec], 1) +} + +#[cfg(test)] +mod test { + use super::*; + use lambdaworks_math::field::fields::{ + fft_friendly::{ + babybear::Babybear31PrimeField, quartic_babybear::Degree4BabyBearExtensionField, + }, + u64_prime_field::{F17, FE17}, + }; + + #[test] + fn tes_logup_trace_construction() { + let address_col = vec![ + FE17::from(3), + FE17::from(7), + FE17::from(2), + FE17::from(8), + FE17::from(4), + FE17::from(5), + FE17::from(1), + FE17::from(6), + ]; + let value_col = vec![ + FE17::from(30), + FE17::from(70), + FE17::from(20), + FE17::from(80), + FE17::from(40), + FE17::from(50), + FE17::from(10), + FE17::from(60), + ]; + + let logup_trace: TraceTable = read_only_logup_trace(address_col, value_col); + + let expected_sorted_addresses = vec![ + FE17::from(1), + FE17::from(2), + FE17::from(3), + FE17::from(4), + FE17::from(5), + FE17::from(6), + FE17::from(7), + FE17::from(8), + ]; + let expected_sorted_values = vec![ + FE17::from(10), + FE17::from(20), + FE17::from(30), + FE17::from(40), + FE17::from(50), + FE17::from(60), + FE17::from(70), + FE17::from(80), + ]; + let expected_multiplicities = vec![ + FE17::one(), + FE17::one(), + FE17::one(), + FE17::one(), + FE17::one(), + FE17::one(), + FE17::one(), + FE17::one(), + ]; + assert_eq!(logup_trace.columns_main()[2], expected_sorted_addresses); + assert_eq!(logup_trace.columns_main()[3], expected_sorted_values); + assert_eq!(logup_trace.columns_main()[4], expected_multiplicities); + } + + #[test] + fn test_logup_trace_construction_2() { + let address_col = vec![ + FieldElement::::from(3), // a0 + FieldElement::::from(2), // a1 + FieldElement::::from(2), // a2 + FieldElement::::from(3), // a3 + FieldElement::::from(4), // a4 + FieldElement::::from(5), // a5 + FieldElement::::from(1), // a6 + FieldElement::::from(3), // a7 + ]; + let value_col = vec![ + FieldElement::::from(30), // v0 + FieldElement::::from(20), // v1 + FieldElement::::from(20), // v2 + FieldElement::::from(30), // v3 + FieldElement::::from(40), // v4 + FieldElement::::from(50), // v5 + FieldElement::::from(10), // v6 + FieldElement::::from(30), // v7 + ]; + + let sorted_address_col = vec![ + FieldElement::::from(1), // a0 + FieldElement::::from(2), // a1 + FieldElement::::from(3), // a2 + FieldElement::::from(4), // a3 + FieldElement::::from(5), // a4 + FieldElement::::from(5), // a5 + FieldElement::::from(5), // a6 + FieldElement::::from(5), // a7 + ]; + let sorted_value_col = vec![ + FieldElement::::from(10), // v0 + FieldElement::::from(20), // v1 + FieldElement::::from(30), // v2 + FieldElement::::from(40), // v3 + FieldElement::::from(50), // v4 + FieldElement::::from(50), // v5 + FieldElement::::from(50), // v6 + FieldElement::::from(50), // v7 + ]; + + let multiplicity_col = vec![ + FieldElement::::from(1), // v0 + FieldElement::::from(2), // v1 + FieldElement::::from(3), // v2 + FieldElement::::from(1), // v3 + FieldElement::::from(1), // v4 + FieldElement::::from(0), // v5 + FieldElement::::from(0), // v6 + FieldElement::::from(0), // v7 + ]; + let logup_trace: TraceTable = + read_only_logup_trace(address_col, value_col); + + assert_eq!(logup_trace.columns_main()[2], sorted_address_col); + assert_eq!(logup_trace.columns_main()[3], sorted_value_col); + assert_eq!(logup_trace.columns_main()[4], multiplicity_col); + } +} diff --git a/provers/stark/src/tests/integration_tests.rs b/provers/stark/src/tests/integration_tests.rs index 7513caad0..c16e8d6ff 100644 --- a/provers/stark/src/tests/integration_tests.rs +++ b/provers/stark/src/tests/integration_tests.rs @@ -1,7 +1,14 @@ +#[cfg(not(feature = "metal"))] +use lambdaworks_crypto::fiat_shamir::default_transcript::DefaultTranscript; use lambdaworks_math::field::{ element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, }; +#[cfg(not(feature = "metal"))] +use lambdaworks_math::field::fields::fft_friendly::{ + babybear::Babybear31PrimeField, quartic_babybear::Degree4BabyBearExtensionField, +}; + use crate::{ examples::{ bit_flags::{self, BitFlagsAIR}, @@ -21,6 +28,11 @@ use crate::{ Felt252, }; +#[cfg(not(feature = "metal"))] +use crate::examples::read_only_memory_logup::{ + read_only_logup_trace, LogReadOnlyPublicInputs, LogReadOnlyRAP, +}; + #[test_log::test] fn test_prove_fib() { let mut trace = simple_fibonacci::fibonacci_trace([Felt252::from(1), Felt252::from(1)], 8); @@ -294,3 +306,54 @@ fn test_prove_read_only_memory() { StoneProverTranscript::new(&[]) )); } + +#[cfg(not(feature = "metal"))] +#[test_log::test] +fn test_prove_log_read_only_memory() { + let address_col = vec![ + FieldElement::::from(3), // a0 + FieldElement::::from(2), // a1 + FieldElement::::from(2), // a2 + FieldElement::::from(3), // a3 + FieldElement::::from(4), // a4 + FieldElement::::from(5), // a5 + FieldElement::::from(1), // a6 + FieldElement::::from(3), // a7 + ]; + let value_col = vec![ + FieldElement::::from(30), // v0 + FieldElement::::from(20), // v1 + FieldElement::::from(20), // v2 + FieldElement::::from(30), // v3 + FieldElement::::from(40), // v4 + FieldElement::::from(50), // v5 + FieldElement::::from(10), // v6 + FieldElement::::from(30), // v7 + ]; + + let pub_inputs = LogReadOnlyPublicInputs { + a0: FieldElement::::from(3), + v0: FieldElement::::from(30), + a_sorted_0: FieldElement::::from(1), + v_sorted_0: FieldElement::::from(10), + m0: FieldElement::::from(1), + }; + let mut trace = read_only_logup_trace(address_col, value_col); + let proof_options = ProofOptions::default_test_options(); + let proof = + Prover::>::prove( + &mut trace, + &pub_inputs, + &proof_options, + DefaultTranscript::::new(&[]), + ) + .unwrap(); + assert!(Verifier::< + LogReadOnlyRAP, + >::verify( + &proof, + &pub_inputs, + &proof_options, + DefaultTranscript::::new(&[]), + )); +}