diff --git a/crypto/src/merkle_tree/backends/field_element.rs b/crypto/src/merkle_tree/backends/field_element.rs index c6d00245a..f5c034b97 100644 --- a/crypto/src/merkle_tree/backends/field_element.rs +++ b/crypto/src/merkle_tree/backends/field_element.rs @@ -1,7 +1,7 @@ use crate::merkle_tree::traits::IsMerkleTreeBackend; use lambdaworks_math::{ field::{element::FieldElement, traits::IsField}, - traits::ByteConversion, + traits::Serializable, }; use sha3::{ digest::{generic_array::GenericArray, OutputSizeUser}, @@ -28,7 +28,7 @@ impl IsMerkleTreeBackend for FieldElementBackend where F: IsField, - FieldElement: ByteConversion, + FieldElement: Serializable, [u8; NUM_BYTES]: From::OutputSize>>, { type Node = [u8; NUM_BYTES]; @@ -36,7 +36,7 @@ where fn hash_data(&self, input: &FieldElement) -> [u8; NUM_BYTES] { let mut hasher = D::new(); - hasher.update(input.to_bytes_be()); + hasher.update(input.serialize()); hasher.finalize().into() } diff --git a/crypto/src/merkle_tree/backends/field_element_vector.rs b/crypto/src/merkle_tree/backends/field_element_vector.rs index c22ce6318..7d748f945 100644 --- a/crypto/src/merkle_tree/backends/field_element_vector.rs +++ b/crypto/src/merkle_tree/backends/field_element_vector.rs @@ -3,7 +3,7 @@ use std::marker::PhantomData; use crate::merkle_tree::traits::IsMerkleTreeBackend; use lambdaworks_math::{ field::{element::FieldElement, traits::IsField}, - traits::ByteConversion, + traits::Serializable, }; use sha3::{ digest::{generic_array::GenericArray, OutputSizeUser}, @@ -29,7 +29,7 @@ impl IsMerkleTreeBackend for FieldElementVectorBackend where F: IsField, - FieldElement: ByteConversion, + FieldElement: Serializable, [u8; NUM_BYTES]: From::OutputSize>>, { type Node = [u8; NUM_BYTES]; @@ -38,7 +38,7 @@ where fn hash_data(&self, input: &Vec>) -> [u8; NUM_BYTES] { let mut hasher = D::new(); for element in input.iter() { - hasher.update(element.to_bytes_be()); + hasher.update(element.serialize()); } let mut result_hash = [0_u8; NUM_BYTES]; result_hash.copy_from_slice(&hasher.finalize()); diff --git a/math/src/field/fields/montgomery_backed_prime_fields.rs b/math/src/field/fields/montgomery_backed_prime_fields.rs index 674914991..91d5600e6 100644 --- a/math/src/field/fields/montgomery_backed_prime_fields.rs +++ b/math/src/field/fields/montgomery_backed_prime_fields.rs @@ -1,7 +1,7 @@ use crate::field::element::FieldElement; use crate::field::errors::FieldError; use crate::field::traits::IsPrimeField; -use crate::traits::ByteConversion; +use crate::traits::{ByteConversion, Serializable}; use crate::{ field::traits::IsField, unsigned_integer::element::UnsignedInteger, unsigned_integer::montgomery::MontgomeryAlgorithms, @@ -350,6 +350,16 @@ where } } +impl Serializable + for FieldElement> +where + M: IsModulus> + Clone + Debug, +{ + #[cfg(feature = "std")] + fn serialize(&self) -> Vec { + self.value().to_bytes_be() + } +} #[cfg(test)] mod tests_u384_prime_fields { use crate::field::element::FieldElement; diff --git a/math/src/traits.rs b/math/src/traits.rs index e97704f5a..b5ece2d19 100644 --- a/math/src/traits.rs +++ b/math/src/traits.rs @@ -29,9 +29,9 @@ pub trait ByteConversion { /// Serialize function without args /// Used for serialization when formatting options are not relevant -#[cfg(feature = "std")] pub trait Serializable { /// Default serialize without args + #[cfg(feature = "std")] fn serialize(&self) -> Vec; } diff --git a/provers/stark/src/constraints/evaluator.rs b/provers/stark/src/constraints/evaluator.rs index 18c0fc6f4..9c07823cc 100644 --- a/provers/stark/src/constraints/evaluator.rs +++ b/provers/stark/src/constraints/evaluator.rs @@ -3,7 +3,7 @@ use lambdaworks_math::{ fft::cpu::roots_of_unity::get_powers_of_primitive_root_coset, field::{element::FieldElement, traits::IsFFTField}, polynomial::Polynomial, - traits::ByteConversion, + traits::Serializable, }; #[cfg(feature = "parallel")] @@ -44,7 +44,7 @@ impl> ConstraintEvaluator { rap_challenges: &A::RAPChallenges, ) -> ConstraintEvaluationTable where - FieldElement: ByteConversion + Send + Sync, + FieldElement: Serializable + Send + Sync, A: Send + Sync, A::RAPChallenges: Send + Sync, { diff --git a/provers/stark/src/domain.rs b/provers/stark/src/domain.rs index d35b2f60e..a287d9ad1 100644 --- a/provers/stark/src/domain.rs +++ b/provers/stark/src/domain.rs @@ -23,7 +23,7 @@ impl Domain { { // Initial definitions let blowup_factor = air.options().blowup_factor as usize; - let coset_offset = FieldElement::::from(air.options().coset_offset); + let coset_offset = FieldElement::from(air.options().coset_offset); let interpolation_domain_size = air.trace_length(); let root_order = air.trace_length().trailing_zeros(); // * Generate Coset @@ -31,7 +31,7 @@ impl Domain { let trace_roots_of_unity = get_powers_of_primitive_root_coset( root_order as u64, interpolation_domain_size, - &FieldElement::::one(), + &FieldElement::one(), ) .unwrap(); diff --git a/provers/stark/src/fri/fri_commitment.rs b/provers/stark/src/fri/fri_commitment.rs index d6f974004..60df1b6e5 100644 --- a/provers/stark/src/fri/fri_commitment.rs +++ b/provers/stark/src/fri/fri_commitment.rs @@ -5,7 +5,7 @@ use lambdaworks_math::{ traits::{IsFFTField, IsField}, }, polynomial::Polynomial, - traits::ByteConversion, + traits::Serializable, }; use crate::config::FriMerkleTree; @@ -14,7 +14,7 @@ use crate::config::FriMerkleTree; pub struct FriLayer where F: IsField, - FieldElement: ByteConversion, + FieldElement: Serializable, { pub evaluation: Vec>, pub merkle_tree: FriMerkleTree, @@ -25,7 +25,7 @@ where impl FriLayer where F: IsField + IsFFTField, - FieldElement: ByteConversion, + FieldElement: Serializable, { pub fn new( poly: &Polynomial>, diff --git a/provers/stark/src/fri/mod.rs b/provers/stark/src/fri/mod.rs index 2feb10b95..1ee9dde90 100644 --- a/provers/stark/src/fri/mod.rs +++ b/provers/stark/src/fri/mod.rs @@ -3,7 +3,7 @@ pub mod fri_decommit; mod fri_functions; use lambdaworks_math::field::traits::{IsFFTField, IsField}; -use lambdaworks_math::traits::ByteConversion; +use lambdaworks_math::traits::Serializable; pub use lambdaworks_math::{ field::{element::FieldElement, fields::u64_prime_field::U64PrimeField}, polynomial::Polynomial, @@ -25,7 +25,7 @@ pub fn fri_commit_phase( domain_size: usize, ) -> (FieldElement, Vec>) where - FieldElement: ByteConversion, + FieldElement: Serializable, { let mut domain_size = domain_size; @@ -80,7 +80,7 @@ pub fn fri_query_phase( where F: IsFFTField, A: AIR, - FieldElement: ByteConversion, + FieldElement: Serializable, { if !fri_layers.is_empty() { let number_of_queries = air.options().fri_number_of_queries; diff --git a/provers/stark/src/prover.rs b/provers/stark/src/prover.rs index 130a14852..d177621af 100644 --- a/provers/stark/src/prover.rs +++ b/provers/stark/src/prover.rs @@ -1,11 +1,13 @@ #[cfg(feature = "instruments")] use std::time::Instant; +use lambdaworks_crypto::merkle_tree::proof::Proof; +use lambdaworks_math::fft::cpu::bit_reversing::in_place_bit_reverse_permute; use lambdaworks_math::fft::{errors::FFTError, polynomial::FFTPoly}; +use lambdaworks_math::traits::Serializable; use lambdaworks_math::{ field::{element::FieldElement, traits::IsFFTField}, polynomial::Polynomial, - traits::ByteConversion, }; use log::info; @@ -38,7 +40,7 @@ struct Round1 where F: IsFFTField, A: AIR, - FieldElement: ByteConversion, + FieldElement: Serializable, { trace_polys: Vec>>, lde_trace: TraceTable, @@ -50,7 +52,7 @@ where struct Round2 where F: IsFFTField, - FieldElement: ByteConversion, + FieldElement: Serializable, { composition_poly_even: Polynomial>, lde_composition_poly_even_evaluations: Vec>, @@ -77,7 +79,7 @@ struct Round4 { fn batch_commit(vectors: &[Vec>]) -> (BatchedMerkleTree, Commitment) where F: IsFFTField, - FieldElement: ByteConversion, + FieldElement: Serializable, { let tree = BatchedMerkleTree::::build(vectors); let commitment = tree.root; @@ -94,7 +96,6 @@ where F: IsFFTField, Polynomial>: FFTPoly, { - // Evaluate those polynomials t_j on the large domain D_LDE. let evaluations = p.evaluate_offset_fft(blowup_factor, Some(domain_size), offset)?; let step = evaluations.len() / (domain_size * blowup_factor); match step { @@ -103,6 +104,47 @@ where } } +fn apply_permutation(vector: &mut Vec>, permutation: &[usize]) { + assert_eq!( + vector.len(), + permutation.len(), + "Vector and permutation must have the same length" + ); + + let mut temp = Vec::with_capacity(vector.len()); + for &index in permutation { + temp.push(vector[index].clone()); + } + + vector.clear(); + vector.extend(temp); +} + +/// This function returns the permutation that converts lambdaworks ordering of rows to the one used in the stone prover +pub fn get_stone_prover_domain_permutation(domain_size: usize, blowup_factor: usize) -> Vec { + let mut permutation = Vec::new(); + let n = domain_size; + + let mut indices: Vec = (0..blowup_factor).collect(); + in_place_bit_reverse_permute(&mut indices); + + for i in indices.iter() { + for j in 0..n { + permutation.push(i + j * blowup_factor) + } + } + + for coset_indices in permutation.chunks_mut(n) { + let mut temp = coset_indices.to_owned(); + in_place_bit_reverse_permute(&mut temp); + for (j, elem) in coset_indices.iter_mut().enumerate() { + *elem = temp[j]; + } + } + + permutation.to_vec() +} + #[allow(clippy::type_complexity)] fn interpolate_and_commit( trace: &TraceTable, @@ -116,15 +158,24 @@ fn interpolate_and_commit( ) where F: IsFFTField, - FieldElement: ByteConversion + Send + Sync, + FieldElement: Serializable + Send + Sync, { let trace_polys = trace.compute_trace_polys(); // Evaluate those polynomials t_j on the large domain D_LDE. let lde_trace_evaluations = compute_lde_trace_evaluations(&trace_polys, domain); + let permutation = + get_stone_prover_domain_permutation(domain.interpolation_domain_size, domain.blowup_factor); + + let mut lde_trace_permuted = lde_trace_evaluations.clone(); + + for col in lde_trace_permuted.iter_mut() { + apply_permutation(col, &permutation); + } + // Compute commitments [t_j]. - let lde_trace = TraceTable::new_from_cols(&lde_trace_evaluations); + let lde_trace = TraceTable::new_from_cols(&lde_trace_permuted); let (lde_trace_merkle_tree, lde_trace_merkle_root) = batch_commit(&lde_trace.rows()); // >>>> Send commitments: [tⱼ] @@ -171,7 +222,7 @@ fn round_1_randomized_air_with_preprocessing>( transcript: &mut impl IsStarkTranscript, ) -> Result, ProvingError> where - FieldElement: ByteConversion + Send + Sync, + FieldElement: Serializable + Send + Sync, { let (mut trace_polys, mut evaluations, main_merkle_tree, main_merkle_root) = interpolate_and_commit(main_trace, domain, transcript); @@ -214,7 +265,7 @@ where F: IsFFTField, A: AIR + Send + Sync, A::RAPChallenges: Send + Sync, - FieldElement: ByteConversion + Send + Sync, + FieldElement: Serializable + Send + Sync, { // Create evaluation table let evaluator = ConstraintEvaluator::new(air, &round_1_result.rap_challenges); @@ -273,7 +324,7 @@ fn round_3_evaluate_polynomials_in_out_of_domain_element, ) -> Round3 where - FieldElement: ByteConversion, + FieldElement: Serializable, { let z_squared = z.square(); @@ -317,7 +368,7 @@ fn round_4_compute_and_run_fri_on_the_deep_composition_polynomial< transcript: &mut impl IsStarkTranscript, ) -> Round4 where - FieldElement: ByteConversion + Send + Sync, + FieldElement: Serializable + Send + Sync, { let coset_offset_u64 = air.context().proof_options.coset_offset; let coset_offset = FieldElement::::from(coset_offset_u64); @@ -399,7 +450,7 @@ fn compute_deep_composition_poly( where A: AIR, F: IsFFTField, - FieldElement: ByteConversion + Send + Sync, + FieldElement: Serializable + Send + Sync, { // Compute composition polynomial terms of the deep composition polynomial. let h_1 = &round_2_result.composition_poly_even; @@ -480,7 +531,7 @@ fn compute_trace_term( ) -> Polynomial> where F: IsFFTField, - FieldElement: ByteConversion + Send + Sync, + FieldElement: Serializable + Send + Sync, { let i_times_trace_frame_evaluation = i * trace_frame_length; let iter_trace_gammas = trace_terms_gammas @@ -513,8 +564,10 @@ fn open_deep_composition_poly>( indexes_to_open: &[usize], // list of iotas ) -> Vec> where - FieldElement: ByteConversion, + FieldElement: Serializable, { + let permutation = + get_stone_prover_domain_permutation(domain.interpolation_domain_size, domain.blowup_factor); indexes_to_open .iter() .map(|index_to_open| { @@ -533,18 +586,19 @@ where let lde_composition_poly_odd_evaluation = round_2_result.lde_composition_poly_odd_evaluations[index].clone(); + let lde_trace_evaluations = round_1_result.lde_trace.get_row(index).to_vec(); + + let index = permutation[index]; // Trace polynomials openings #[cfg(feature = "parallel")] let merkle_trees_iter = round_1_result.lde_trace_merkle_trees.par_iter(); #[cfg(not(feature = "parallel"))] let merkle_trees_iter = round_1_result.lde_trace_merkle_trees.iter(); - let lde_trace_merkle_proofs = merkle_trees_iter + let lde_trace_merkle_proofs: Vec> = merkle_trees_iter .map(|tree| tree.get_proof_by_pos(index).unwrap()) .collect(); - let lde_trace_evaluations = round_1_result.lde_trace.get_row(index).to_vec(); - DeepPolynomialOpenings { lde_composition_poly_proof, lde_composition_poly_even_evaluation, @@ -567,7 +621,7 @@ where F: IsFFTField, A: AIR + Send + Sync, A::RAPChallenges: Send + Sync, - FieldElement: ByteConversion + Send + Sync, + FieldElement: Serializable + Send + Sync, { info!("Started proof generation..."); #[cfg(feature = "instruments")] @@ -621,16 +675,22 @@ where #[cfg(feature = "instruments")] let timer2 = Instant::now(); - // <<<< Receive challenges: 𝛽_j^B - let boundary_coefficients = batch_sample_challenges( - air.boundary_constraints(&round_1_result.rap_challenges) - .constraints - .len(), - &mut transcript, - ); - // <<<< Receive challenges: 𝛽_j^T - let transition_coefficients = - batch_sample_challenges(air.context().num_transition_constraints, &mut transcript); + // <<<< Receive challenge: 𝛽 + let beta = transcript.sample_field_element(); + let num_boundary_constraints = air + .boundary_constraints(&round_1_result.rap_challenges) + .constraints + .len(); + + let num_transition_constraints = air.context().num_transition_constraints; + + let mut coefficients: Vec<_> = (1..num_boundary_constraints + num_transition_constraints + 1) + .map(|i| beta.pow(i)) + .collect(); + + let transition_coefficients: Vec<_> = + coefficients.drain(..num_transition_constraints).collect(); + let boundary_coefficients = coefficients; let round_2_result = round_2_compute_composition_polynomial( &air, @@ -767,9 +827,15 @@ where #[cfg(test)] mod tests { + use std::num::ParseIntError; + use crate::{ - examples::simple_fibonacci::{self, FibonacciPublicInputs}, + examples::{ + fibonacci_2_cols_shifted::{self, Fibonacci2ColsShifted}, + simple_fibonacci::{self, FibonacciPublicInputs}, + }, proof::options::ProofOptions, + transcript::StoneProverTranscript, Felt252, }; @@ -878,4 +944,76 @@ mod tests { assert_eq!(*eval, poly.evaluate(&(offset * primitive_root.pow(i)))); } } + + pub fn decode_hex(s: &str) -> Result, ParseIntError> { + (0..s.len()) + .step_by(2) + .map(|i| u8::from_str_radix(&s[i..i + 2], 16)) + .collect() + } + + #[test] + fn test_trace_commitment_is_compatible_with_stone_prover_1() { + let trace = fibonacci_2_cols_shifted::compute_trace(FieldElement::one(), 4); + + let claimed_index = 3; + let claimed_value = trace.get_row(claimed_index)[0]; + let mut proof_options = ProofOptions::default_test_options(); + proof_options.blowup_factor = 4; + proof_options.coset_offset = 3; + + let pub_inputs = fibonacci_2_cols_shifted::PublicInputs { + claimed_value, + claimed_index, + }; + + let transcript_init_seed = [0xca, 0xfe, 0xca, 0xfe]; + + let air = Fibonacci2ColsShifted::new(trace.n_rows(), &pub_inputs, &proof_options); + let domain = Domain::new(&air); + + let (_, _, _, trace_commitment) = interpolate_and_commit( + &trace, + &domain, + &mut StoneProverTranscript::new(&transcript_init_seed), + ); + + assert_eq!( + &trace_commitment.to_vec(), + &decode_hex("0eb9dcc0fb1854572a01236753ce05139d392aa3aeafe72abff150fe21175594") + .unwrap() + ); + } + #[test] + fn test_trace_commitment_is_compatible_with_stone_prover_2() { + let trace = fibonacci_2_cols_shifted::compute_trace(FieldElement::one(), 4); + + let claimed_index = 3; + let claimed_value = trace.get_row(claimed_index)[0]; + let mut proof_options = ProofOptions::default_test_options(); + proof_options.blowup_factor = 64; + proof_options.coset_offset = 3; + + let pub_inputs = fibonacci_2_cols_shifted::PublicInputs { + claimed_value, + claimed_index, + }; + + let transcript_init_seed = [0xfa, 0xfa, 0xfa, 0xee]; + + let air = Fibonacci2ColsShifted::new(trace.n_rows(), &pub_inputs, &proof_options); + let domain = Domain::new(&air); + + let (_, _, _, trace_commitment) = interpolate_and_commit( + &trace, + &domain, + &mut StoneProverTranscript::new(&transcript_init_seed), + ); + + assert_eq!( + &trace_commitment.to_vec(), + &decode_hex("99d8d4342895c4e35a084f8ea993036be06f51e7fa965734ed9c7d41104f0848") + .unwrap() + ); + } } diff --git a/provers/stark/src/transcript.rs b/provers/stark/src/transcript.rs index bd7d81dca..b6952b70b 100644 --- a/provers/stark/src/transcript.rs +++ b/provers/stark/src/transcript.rs @@ -4,7 +4,7 @@ use lambdaworks_math::{ fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, traits::{IsFFTField, IsField}, }, - traits::ByteConversion, + traits::{ByteConversion, Serializable}, unsigned_integer::element::U256, }; use sha3::{Digest, Keccak256}; @@ -21,7 +21,7 @@ pub trait IsStarkTranscript { trace_roots_of_unity: &[FieldElement], ) -> FieldElement where - FieldElement: ByteConversion, + FieldElement: Serializable, { loop { let value: FieldElement = self.sample_field_element(); @@ -155,7 +155,7 @@ pub fn batch_sample_challenges( transcript: &mut impl IsStarkTranscript, ) -> Vec> where - FieldElement: ByteConversion, + FieldElement: Serializable, { (0..size) .map(|_| transcript.sample_field_element()) @@ -289,7 +289,7 @@ mod tests { // This corresponds to the following run. // Air: `Fibonacci2ColsShifted` // `trace_length`: 4 - // `blowup_factor`: 2 + // `blowup_factor`: 4 // `fri_number_of_queries`: 1 let mut transcript = StoneProverTranscript::new(&[0xca, 0xfe, 0xca, 0xfe]); // Send hash of trace commitment @@ -304,7 +304,7 @@ mod tests { "86105fff7b04ed4068ecccb8dbf1ed223bd45cd26c3532d6c80a818dbd4fa7" ) ); - // Send hash of composition poly commitment H(z) + // Send hash of composition poly commitment H transcript.append_bytes( &decode_hex("7cdd8d5fe3bd62254a417e2e260e0fed4fccdb6c9005e828446f645879394f38") .unwrap(), @@ -374,7 +374,7 @@ mod tests { // This corresponds to the following run. // Air: `Fibonacci2ColsShifted` // `trace_length`: 4 - // `blowup_factor`: 6 + // `blowup_factor`: 64 // `fri_number_of_queries`: 2 let mut transcript = StoneProverTranscript::new(&[0xfa, 0xfa, 0xfa, 0xee]); // Send hash of trace commitment diff --git a/provers/stark/src/verifier.rs b/provers/stark/src/verifier.rs index fe7afd687..38a505602 100644 --- a/provers/stark/src/verifier.rs +++ b/provers/stark/src/verifier.rs @@ -10,10 +10,10 @@ use lambdaworks_math::{ element::FieldElement, traits::{IsFFTField, IsField}, }, - traits::ByteConversion, + traits::Serializable, }; -use crate::transcript::IsStarkTranscript; +use crate::{prover::get_stone_prover_domain_permutation, transcript::IsStarkTranscript}; use super::{ config::{BatchedMerkleTreeBackend, FriMerkleTreeBackend}, @@ -22,7 +22,6 @@ use super::{ grinding::hash_transcript_with_int_and_get_leading_zeros, proof::{options::ProofOptions, stark::StarkProof}, traits::AIR, - transcript::batch_sample_challenges, }; struct Challenges @@ -50,7 +49,7 @@ fn step_1_replay_rounds_and_recover_challenges( ) -> Challenges where F: IsFFTField, - FieldElement: ByteConversion, + FieldElement: Serializable, A: AIR, { // =================================== @@ -72,15 +71,18 @@ where // ==========| Round 2 |========== // =================================== - // These are the challenges alpha^B_j and beta^B_j - // >>>> Send challenges: 𝛽_j^B - let boundary_coeffs = batch_sample_challenges( - air.boundary_constraints(&rap_challenges).constraints.len(), - transcript, - ); - // >>>> Send challenges: 𝛽_j^T - let transition_coeffs = - batch_sample_challenges(air.context().num_transition_constraints, transcript); + // <<<< Receive challenge: 𝛽 + let beta = transcript.sample_field_element(); + let num_boundary_constraints = air.boundary_constraints(&rap_challenges).constraints.len(); + + let num_transition_constraints = air.context().num_transition_constraints; + + let mut coefficients: Vec<_> = (1..num_boundary_constraints + num_transition_constraints + 1) + .map(|i| beta.pow(i)) + .collect(); + + let transition_coeffs: Vec<_> = coefficients.drain(..num_transition_constraints).collect(); + let boundary_coeffs = coefficients; // <<<< Receive commitments: [H₁], [H₂] transcript.append_bytes(&proof.composition_poly_root); @@ -270,7 +272,7 @@ fn step_3_verify_fri( ) -> bool where F: IsFFTField, - FieldElement: ByteConversion, + FieldElement: Serializable, A: AIR, { // verify FRI @@ -309,8 +311,10 @@ fn step_4_verify_deep_composition_polynomial>( challenges: &Challenges, ) -> bool where - FieldElement: ByteConversion, + FieldElement: Serializable, { + let permutation = + get_stone_prover_domain_permutation(domain.interpolation_domain_size, domain.blowup_factor); let primitive_root = &F::get_primitive_root_of_unity(domain.root_order as u64).unwrap(); let z_squared = &challenges.z.square(); let mut denom_inv = challenges @@ -363,7 +367,7 @@ where .fold(result, |acc, ((merkle_root, merkle_proof), evaluation)| { acc & merkle_proof.verify::>( merkle_root, - *iota_n, + permutation[*iota_n], &evaluation, ) }); @@ -397,7 +401,7 @@ fn verify_query_and_sym_openings( two_inv: &FieldElement, ) -> bool where - FieldElement: ByteConversion, + FieldElement: Serializable, { let fri_layers_merkle_roots = &proof.fri_layers_merkle_roots; let evaluation_point_vec: Vec> = @@ -512,7 +516,7 @@ pub fn verify( where F: IsFFTField, A: AIR, - FieldElement: ByteConversion, + FieldElement: Serializable, { // Verify there are enough queries if proof.query_list.len() < proof_options.fri_number_of_queries { @@ -602,3 +606,80 @@ where true } + +#[cfg(test)] +pub mod tests { + use std::num::ParseIntError; + + use lambdaworks_math::field::{ + element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, + }; + + use crate::{ + domain::Domain, + examples::fibonacci_2_cols_shifted::{self, Fibonacci2ColsShifted}, + proof::options::ProofOptions, + prover::prove, + traits::AIR, + transcript::StoneProverTranscript, + verifier::step_1_replay_rounds_and_recover_challenges, + }; + + pub fn decode_hex(s: &str) -> Result, ParseIntError> { + (0..s.len()) + .step_by(2) + .map(|i| u8::from_str_radix(&s[i..i + 2], 16)) + .collect() + } + + #[test] + fn test_sharp_compatibility() { + let trace = fibonacci_2_cols_shifted::compute_trace(FieldElement::one(), 4); + + let claimed_index = 3; + let claimed_value = trace.get_row(claimed_index)[0]; + let mut proof_options = ProofOptions::default_test_options(); + proof_options.blowup_factor = 4; + proof_options.coset_offset = 3; + + let pub_inputs = fibonacci_2_cols_shifted::PublicInputs { + claimed_value, + claimed_index, + }; + + let transcript_init_seed = [0xca, 0xfe, 0xca, 0xfe]; + + let proof = prove::>( + &trace, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&transcript_init_seed), + ) + .unwrap(); + + let air = Fibonacci2ColsShifted::new(proof.trace_length, &pub_inputs, &proof_options); + let domain = Domain::new(&air); + let challenges = step_1_replay_rounds_and_recover_challenges( + &air, + &proof, + &domain, + &mut StoneProverTranscript::new(&transcript_init_seed), + ); + + assert_eq!( + proof.lde_trace_merkle_roots[0].to_vec(), + decode_hex("0eb9dcc0fb1854572a01236753ce05139d392aa3aeafe72abff150fe21175594").unwrap() + ); + + let beta = challenges.transition_coeffs[0]; + assert_eq!( + beta, + FieldElement::from_hex_unchecked( + "86105fff7b04ed4068ecccb8dbf1ed223bd45cd26c3532d6c80a818dbd4fa7" + ), + ); + assert_eq!(challenges.transition_coeffs[1], beta.pow(2u64)); + assert_eq!(challenges.boundary_coeffs[0], beta.pow(3u64)); + assert_eq!(challenges.boundary_coeffs[1], beta.pow(4u64)); + } +}