From cc980eacdbbca69a2429ca28e1963601c7c4f9a6 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Sat, 22 Jan 2022 17:08:46 +0000 Subject: [PATCH 1/8] modify transcript to use banderwagon --- src/transcript.rs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/transcript.rs b/src/transcript.rs index 43b7dd4..19f8c0e 100644 --- a/src/transcript.rs +++ b/src/transcript.rs @@ -1,9 +1,9 @@ use ark_ff::PrimeField; -use bandersnatch::{EdwardsProjective, Fr}; +use banderwagon::{Element, Fr}; pub trait TranscriptProtocol { /// Compute a `label`ed challenge variable. fn challenge_scalar(&mut self, label: &'static [u8]) -> Fr; - fn append_point(&mut self, label: &'static [u8], point: &EdwardsProjective); + fn append_point(&mut self, label: &'static [u8], point: &Element); fn append_scalar(&mut self, label: &'static [u8], point: &Fr); fn domain_sep(&mut self, label: &'static [u8]); } @@ -47,7 +47,7 @@ impl TranscriptProtocol for Transcript { scalar } - fn append_point(&mut self, label: &'static [u8], point: &EdwardsProjective) { + fn append_point(&mut self, label: &'static [u8], point: &Element) { let mut bytes = [0u8; 32]; point.serialize(&mut bytes[..]).unwrap(); self.append_message(&bytes, label) @@ -71,6 +71,7 @@ mod tests { let mut tr = Transcript::new(b"simple_protocol"); let first_challenge = tr.challenge_scalar(b"simple_challenge"); let second_challenge = tr.challenge_scalar(b"simple_challenge"); + // We can never even accidentally, generate the same challenge assert_ne!(first_challenge, second_challenge) } #[test] @@ -121,13 +122,13 @@ mod tests { fn test_vector_4() { use ark_ec::ProjectiveCurve; let mut tr = Transcript::new(b"simple_protocol"); - let generator = EdwardsProjective::prime_subgroup_generator(); + let generator = Element::prime_subgroup_generator(); tr.append_point(b"generator", &generator); let challenge = tr.challenge_scalar(b"simple_challenge"); - let expected = "c3d390ff8ef3242c4ec3508d9c5f66d8c9f6aae3bde9ce7b4e1a53b9a6e9ac18"; + let expected = "8c2dafe7c0aabfa9ed542bb2cbf0568399ae794fc44fdfd7dff6cc0e6144921c"; let got = scalar_to_hex(&challenge); assert_eq!(got, expected) From bb6632a17a0df704289e89c065eebf8ba0ee4c46 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Sat, 22 Jan 2022 17:09:02 +0000 Subject: [PATCH 2/8] move crs to its own module and use Element --- src/crs.rs | 141 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 141 insertions(+) create mode 100644 src/crs.rs diff --git a/src/crs.rs b/src/crs.rs new file mode 100644 index 0000000..db4ce31 --- /dev/null +++ b/src/crs.rs @@ -0,0 +1,141 @@ +use ark_serialize::CanonicalSerialize; +use bandersnatch::{EdwardsAffine, EdwardsProjective}; +use banderwagon::Element; + +use crate::{ipa::slow_vartime_multiscalar_mul, lagrange_basis::LagrangeBasis}; + +#[derive(Debug, Clone)] +pub struct CRS { + pub n: usize, + pub G: Vec, + pub Q: Element, +} + +impl CRS { + pub fn new(n: usize, seed: &'static [u8]) -> CRS { + // TODO generate the Q value from the seed also + // TODO: this will also make assert_dedup work as expected + // TODO: since we should take in `Q` too + let G: Vec<_> = generate_random_elements(n, seed).into_iter().collect(); + let Q = Element::prime_subgroup_generator(); + + CRS::assert_dedup(&G); + + CRS { n, G, Q } + } + // Asserts that not of the points generated are the same + fn assert_dedup(points: &[Element]) { + use std::collections::HashMap; + let mut map = HashMap::new(); + for point in points { + assert!( + map.insert(point.to_bytes(), ()).is_none(), + "crs has duplicated points" + ) + } + } + pub fn commit_lagrange_poly(&self, polynomial: &LagrangeBasis) -> Element { + slow_vartime_multiscalar_mul(polynomial.values().iter(), self.G.iter()) + } +} + +impl std::ops::Index for CRS { + type Output = Element; + + fn index(&self, index: usize) -> &Self::Output { + &self.G[index] + } +} + +fn generate_random_elements(num_required_points: usize, seed: &'static [u8]) -> Vec { + use ark_ec::group::Group; + use ark_ff::PrimeField; + use bandersnatch::Fq; + use sha2::{Digest, Sha256}; + + let mut hasher = Sha256::new(); + let choose_largest = false; + + (0u32..) + .into_iter() + // Hash the seed + i to get a possible x value + .map(|i| { + let mut hasher = Sha256::new(); + hasher.update(seed); + hasher.update(&i.to_be_bytes()); + let bytes: Vec = hasher.finalize().to_vec(); + bytes + }) + // The from_bytes method does not reduce the bytes, it expects the + // input to be in a canonical format, so we must do the reduction ourselves + .map(|hash_bytes| bandersnatch::Fq::from_be_bytes_mod_order(&hash_bytes)) + // Using the x co-ordinate fetch a possible y co-ordinate + .map(|x_coord| EdwardsAffine::get_point_from_x(x_coord, choose_largest)) + .filter_map(|point| point) + // Double the point incase its not in the prime order subgroup + .map(|point| point.double()) + // Serialise x co-ordinate of point + .map(|point| { + let mut bytes = [0u8; 32]; + point.x.serialize(&mut bytes[..]).unwrap(); + // TODO: this reverse is hacky, and its because there is no way to specify the endianness in arkworks + // TODO So we reverse it here, to be interopable with the banderwagon specs which needs big endian bytes + bytes.reverse(); + bytes + }) + // Using banderwagon deserialise the x-cordinate to get a valid banderwagon element + .map(|bytes| Element::from_bytes(&bytes)) + .filter_map(|point| point) + .take(num_required_points) + .collect() +} + +// TODO: update hackmd as we are now using banderwagon +// TODO then redo this test +// #[test] +// fn crs_consistency() { +// // See: https://hackmd.io/1RcGSMQgT4uREaq1CCx_cg#Methodology +// use ark_serialize::CanonicalSerialize; +// use bandersnatch::Fq; +// use sha2::{Digest, Sha256}; + +// let points = generate_random_elements(256, b"eth_verkle_oct_2021"); +// for point in &points { +// let on_curve = point.is_on_curve(); +// let in_correct_subgroup = point.is_in_correct_subgroup_assuming_on_curve(); +// if !on_curve { +// panic!("generated a point which is not on the curve") +// } +// if !in_correct_subgroup { +// panic!("generated a point which is not in the prime subgroup") +// } +// } + +// let mut bytes = [0u8; 32]; +// points[0].serialize(&mut bytes[..]).unwrap(); +// assert_eq!( +// hex::encode(&bytes), +// "22ac968a98ab6c50379fc8b039abc8fd9aca259f4746a05bfbdf12c86463c208", +// "the first point is incorrect" +// ); +// let mut bytes = [0u8; 32]; +// points[255].serialize(&mut bytes[..]).unwrap(); +// assert_eq!( +// hex::encode(&bytes), +// "c8b4968a98ab6c50379fc8b039abc8fd9aca259f4746a05bfbdf12c86463c208", +// "the 256th (last) point is incorrect" +// ); + +// let mut hasher = Sha256::new(); +// for point in &points { +// let mut bytes = [0u8; 32]; +// point.serialize(&mut bytes[..]).unwrap(); +// hasher.update(&bytes); +// } +// let bytes = hasher.finalize().to_vec(); +// assert_eq!( +// hex::encode(&bytes), +// "c390cbb4bc42019685d5a01b2fb8a536d4332ea4e128934d0ae7644163089e76", +// "unexpected point encountered" +// ); +// } From 08a4aa5233123ab3d3ce31f9ccf301b2f72ec417 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Sat, 22 Jan 2022 17:09:49 +0000 Subject: [PATCH 3/8] modify ipa and multiproof to use banderwagon --- Cargo.toml | 8 +-- src/ipa.rs | 67 ++++++++++++------------- src/lib.rs | 3 +- src/multiproof.rs | 123 ++++------------------------------------------ 4 files changed, 48 insertions(+), 153 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index aae70c4..fa367e7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,6 +18,8 @@ rand_chacha = { version = "0.3.0", default-features = false } itertools = "0.10.1" sha2 = "0.9.8" hex = "0.4.3" -[[bench]] -name = "benchmark_main" -harness = false +banderwagon = { git = "https://github.com/crate-crypto/banderwagon" } + +# [[bench]] +# name = "benchmark_main" +# harness = false diff --git a/src/ipa.rs b/src/ipa.rs index 0549a4b..68bf111 100644 --- a/src/ipa.rs +++ b/src/ipa.rs @@ -1,6 +1,6 @@ #![allow(non_snake_case)] +use crate::crs::CRS; use crate::math_utils::inner_product; -use crate::multiproof::CRS; use crate::transcript::{Transcript, TranscriptProtocol}; use ark_ec::group::Group; use ark_ec::{AffineCurve, ProjectiveCurve}; @@ -8,10 +8,8 @@ use ark_ff::Field; use ark_ff::PrimeField; use ark_ff::{One, Zero}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -use bandersnatch::multi_scalar_mul; -use bandersnatch::EdwardsAffine; -use bandersnatch::EdwardsProjective; -use bandersnatch::Fr; + +use banderwagon::{multi_scalar_mul, Element, Fr}; use itertools::Itertools; use crate::{IOError, IOErrorKind, IOResult}; @@ -22,8 +20,8 @@ use std::iter; #[derive(Debug, Clone, PartialEq, Eq)] pub struct IPAProof { - pub(crate) L_vec: Vec, - pub(crate) R_vec: Vec, + pub(crate) L_vec: Vec, + pub(crate) R_vec: Vec, pub(crate) a: Fr, } @@ -45,16 +43,16 @@ impl IPAProof { for _ in 0..num_points { let chunk = chunks.next().unwrap(); - let point: EdwardsAffine = CanonicalDeserialize::deserialize(chunk) + let point: Element = CanonicalDeserialize::deserialize(chunk) .map_err(|_| IOError::from(IOErrorKind::InvalidData))?; - L_vec.push(point.into_projective()) + L_vec.push(point) } for _ in 0..num_points { let chunk = chunks.next().unwrap(); - let point: EdwardsAffine = CanonicalDeserialize::deserialize(chunk) + let point: Element = CanonicalDeserialize::deserialize(chunk) .map_err(|_| IOError::from(IOErrorKind::InvalidData))?; - R_vec.push(point.into_projective()) + R_vec.push(point) } let last_32_bytes = chunks.next().unwrap(); @@ -86,7 +84,7 @@ pub fn create( transcript: &mut Transcript, mut crs: CRS, mut a_vec: Vec, - a_comm: EdwardsProjective, + a_comm: Element, mut b_vec: Vec, // This is the z in f(z) input_point: Fr, @@ -114,12 +112,12 @@ pub fn create( transcript.append_scalar(b"output point", &output_point); let w = transcript.challenge_scalar(b"w"); - let Q = (&crs.Q).mul(&w); // XXX: It would not hurt to add this augmented point into the transcript + let Q = crs.Q * w; // XXX: It would not hurt to add this augmented point into the transcript let num_rounds = log2(n); - let mut L_vec: Vec = Vec::with_capacity(num_rounds as usize); - let mut R_vec: Vec = Vec::with_capacity(num_rounds as usize); + let mut L_vec: Vec = Vec::with_capacity(num_rounds as usize); + let mut R_vec: Vec = Vec::with_capacity(num_rounds as usize); for k in 0..num_rounds { let (a_L, a_R) = halve(a); @@ -149,7 +147,7 @@ pub fn create( for i in 0..a_L.len() { a_L[i] = a_L[i] + x * a_R[i]; b_L[i] = b_L[i] + x_inv * b_R[i]; - G_L[i] = G_L[i] + G_R[i].mul(x_inv.into_repr()); + G_L[i] = G_L[i] + G_R[i] * x_inv; } a = a_L; @@ -179,7 +177,7 @@ impl IPAProof { transcript: &mut Transcript, mut crs: CRS, mut b: Vec, - a_comm: EdwardsProjective, + a_comm: Element, input_point: Fr, output_point: Fr, ) -> bool { @@ -202,11 +200,11 @@ impl IPAProof { transcript.append_scalar(b"output point", &output_point); let w = transcript.challenge_scalar(b"w"); - let Q = (&crs.Q).mul(&w); + let Q = crs.Q * w; let num_rounds = self.L_vec.len(); - let mut a_comm = a_comm + (Q.mul(output_point.into_repr())); + let mut a_comm = a_comm + (Q * output_point); let challenges = generate_challenges(self, transcript); let mut challenges_inv = challenges.clone(); @@ -220,7 +218,7 @@ impl IPAProof { let L = self.L_vec[i]; let R = self.R_vec[i]; - a_comm = a_comm + L.mul(x.into_repr()) + R.mul(x_inv.into_repr()); + a_comm = a_comm + (L * x) + (R * x_inv); } for x_inv in challenges_inv.iter() { @@ -228,7 +226,7 @@ impl IPAProof { let (b_L, b_R) = halve(b); for i in 0..G_L.len() { - G_L[i] = G_L[i] + G_R[i].mul(x_inv.into_repr()); + G_L[i] = G_L[i] + G_R[i] * *x_inv; b_L[i] = b_L[i] + b_R[i] * x_inv; } G = G_L; @@ -237,7 +235,7 @@ impl IPAProof { assert_eq!(G.len(), 1); assert_eq!(b.len(), 1); - let exp_P = G[0].mul(self.a.into_repr()) + Q.mul((self.a * b[0]).into_repr()); + let exp_P = (G[0] * self.a) + Q * (self.a * b[0]); exp_P == a_comm } @@ -246,7 +244,7 @@ impl IPAProof { transcript: &mut Transcript, crs: &CRS, b_vec: Vec, - a_comm: EdwardsProjective, + a_comm: Element, input_point: Fr, output_point: Fr, ) -> bool { @@ -323,7 +321,7 @@ impl IPAProof { transcript: &mut Transcript, crs: &CRS, b_Vec: Vec, - a_comm: EdwardsProjective, + a_comm: Element, input_point: Fr, output_point: Fr, ) -> bool { @@ -343,9 +341,9 @@ impl IPAProof { transcript.append_scalar(b"output point", &output_point); let w = transcript.challenge_scalar(b"w"); - let Q = (&crs.Q).mul(&w); + let Q = crs.Q * w; - let a_comm = a_comm + (Q.mul(output_point.into_repr())); + let a_comm = a_comm + (Q * output_point); let challenges = generate_challenges(self, transcript); let mut challenges_inv = challenges.clone(); @@ -378,7 +376,7 @@ impl IPAProof { let b_0 = inner_product(&b_Vec, &g_i); let G_0 = slow_vartime_multiscalar_mul(g_i.iter(), crs.G.iter()); // TODO: Optimise the majority of the time is spent on this vector, precompute - let exp_P = G_0.mul(self.a.into_repr()) + Q.mul((self.a * b_0).into_repr()); + let exp_P = (G_0 * self.a) + Q * (self.a * b_0); exp_P == P } @@ -394,14 +392,11 @@ fn to_bits(n: usize, bits_needed: usize) -> impl Iterator { // TODO check performance of that versus the current method pub fn slow_vartime_multiscalar_mul<'a>( scalars: impl Iterator, - points: impl Iterator, -) -> EdwardsProjective { - use ark_ec::group::Group; - use ark_ec::msm::VariableBaseMSM; - - let scalars: Vec<_> = scalars.into_iter().map(|s| s.into_repr()).collect(); - let points: Vec<_> = points.map(|p| p.into_affine()).collect(); - VariableBaseMSM::multi_scalar_mul(&points, &scalars) + points: impl Iterator, +) -> Element { + let scalars: Vec<_> = scalars.into_iter().copied().collect(); + let points: Vec<_> = points.into_iter().copied().collect(); + multi_scalar_mul(&points, &scalars) } fn generate_challenges(proof: &IPAProof, transcript: &mut Transcript) -> Vec { @@ -421,8 +416,8 @@ fn generate_challenges(proof: &IPAProof, transcript: &mut Transcript) -> Vec #[cfg(test)] mod tests { use super::*; + use crate::crs::CRS; use crate::math_utils::{inner_product, powers_of}; - use crate::multiproof::CRS; use ark_std::rand; use ark_std::rand::SeedableRng; use ark_std::UniformRand; diff --git a/src/lib.rs b/src/lib.rs index cce4bb2..30621d2 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,9 +1,10 @@ +pub mod crs; pub mod ipa; // follows the BCMS20 scheme pub mod math_utils; pub mod multiproof; pub mod transcript; -pub use ipa::slow_vartime_multiscalar_mul; +pub(crate) use ipa::slow_vartime_multiscalar_mul; pub mod lagrange_basis; diff --git a/src/multiproof.rs b/src/multiproof.rs index 7be3d3b..ea19062 100644 --- a/src/multiproof.rs +++ b/src/multiproof.rs @@ -1,13 +1,11 @@ // We get given multiple polynomials evaluated at different points #![allow(non_snake_case)] -use std::collections::HashMap; - -use crate::ipa::{self, IPAProof}; +use crate::crs::CRS; +use crate::ipa::{self, slow_vartime_multiscalar_mul, IPAProof}; use crate::lagrange_basis::{LagrangeBasis, PrecomputedWeights}; use crate::math_utils::inner_product; use crate::math_utils::powers_of; -use crate::slow_vartime_multiscalar_mul; use crate::transcript::Transcript; use crate::transcript::TranscriptProtocol; use ark_ec::{AffineCurve, ProjectiveCurve}; @@ -16,67 +14,14 @@ use ark_ff::{batch_inversion, Field}; use ark_ff::{One, Zero}; use ark_poly::univariate::DensePolynomial; use ark_poly::{Polynomial, UVPolynomial}; -use bandersnatch::multi_scalar_mul; -use bandersnatch::EdwardsAffine; -use bandersnatch::EdwardsProjective; -use bandersnatch::Fr; -#[derive(Debug, Clone)] -pub struct CRS { - pub n: usize, - pub G: Vec, - pub Q: EdwardsProjective, -} - -impl CRS { - pub fn new(n: usize, seed: &'static [u8]) -> CRS { - let G: Vec<_> = generate_random_elements(n, seed) - .into_iter() - .map(|affine_point| affine_point.into_projective()) - .collect(); - let Q = EdwardsProjective::prime_subgroup_generator(); - CRS { n, G, Q } - } - - pub fn commit_lagrange_poly(&self, polynomial: &LagrangeBasis) -> EdwardsProjective { - slow_vartime_multiscalar_mul(polynomial.values().iter(), self.G.iter()) - } -} - -impl std::ops::Index for CRS { - type Output = EdwardsProjective; - - fn index(&self, index: usize) -> &Self::Output { - &self.G[index] - } -} - -fn generate_random_elements(num_required_points: usize, seed: &'static [u8]) -> Vec { - use bandersnatch::Fq; - use sha2::{Digest, Sha256}; - - let mut hasher = Sha256::new(); - - hasher.update(seed); - let bytes = hasher.finalize().to_vec(); - - let u = bandersnatch::Fq::from_be_bytes_mod_order(&bytes); - let choose_largest = false; - - (0..) - .into_iter() - .map(|i| Fq::from(i as u128) + u) - .map(|x| EdwardsAffine::get_point_from_x(x, choose_largest)) - .filter_map(|point| point) - .filter(|point| point.is_in_correct_subgroup_assuming_on_curve()) - .take(num_required_points) - .collect() -} +use std::collections::HashMap; +use banderwagon::{multi_scalar_mul, Element, Fr}; pub struct MultiPoint; #[derive(Clone, Debug)] pub struct ProverQuery { - pub commitment: EdwardsProjective, + pub commitment: Element, pub poly: LagrangeBasis, // TODO: Make this a reference so that upstream libraries do not need to clone // Given a function f, we use z_i to denote the input point and y_i to denote the output, ie f(z_i) = y_i pub point: usize, @@ -93,7 +38,7 @@ impl From for VerifierQuery { } } pub struct VerifierQuery { - pub commitment: EdwardsProjective, + pub commitment: Element, pub point: Fr, pub result: Fr, } @@ -223,7 +168,7 @@ impl MultiPoint { #[derive(Debug, Clone, PartialEq, Eq)] pub struct MultiPointProof { open_proof: IPAProof, - g_x_comm: EdwardsProjective, + g_x_comm: Element, } impl MultiPointProof { @@ -234,9 +179,9 @@ impl MultiPointProof { let g_x_comm_bytes = &bytes[0..32]; let ipa_bytes = &bytes[32..]; // TODO: we should return a Result here incase the user gives us bad bytes - let point: EdwardsAffine = CanonicalDeserialize::deserialize(g_x_comm_bytes) + let point: Element = CanonicalDeserialize::deserialize(g_x_comm_bytes) .map_err(|_| IOError::from(IOErrorKind::InvalidData))?; - let g_x_comm = point.into_projective(); + let g_x_comm = point; let open_proof = IPAProof::from_bytes(ipa_bytes, poly_degree)?; Ok(MultiPointProof { @@ -325,7 +270,7 @@ pub(crate) fn open_point_outside_of_domain( precomp: &PrecomputedWeights, transcript: &mut Transcript, polynomial: LagrangeBasis, - commitment: EdwardsProjective, + commitment: Element, z_i: Fr, ) -> IPAProof { let a = polynomial.values().to_vec(); @@ -553,51 +498,3 @@ fn multiproof_consistency() { let expected = "1e575ed50234769345382d64f828d8dd65052cc623c4bfe6dd1ca0a8eb6940de717d20b92f592aea4e1a649644ee92d83813e8e296c71e2d32b40532f455d8b9b56baadafbe84808d784aa920836b73af49d758bd8bb1a2690df8b2450d2112e3a48a06378bc60dffa9cd9f80c9c4da0385a388fc8edeca1a740d76b3ab1d8d3ccb0387a0c2005432d6a52e98ca46c0649a69b6b02b9832b1e108199e6977c403624cfff05715445e37586444a27d8c97f18b3bbf417b442e8c8ab16dfe3b0e96ba20178280e6192f8e4e861a21215f394c1ff3057cd5492d1a5154ed8330f3f93f7f02079042c27d51c6299904eadaf6e1e290cc94920d143112ddb34cf2488131bc321ff0349150aad44563ac765905b15b30ac71ebb01c78d7e26e4f920219d040fb50fab3a233ea349fe5e09b1c7e56b311dc8e4505c04c60e27c86d8cbb72a0fe057815972f4bf2e126684a79ba5a3932a9713e059cd51d1a8f0599efa54172d4dfae7016ce2b7b2b325ba847782a2741ba560c158e38d10362a61a11538dd3c5e6742bb96901f53291649fbad13518c79c40af9733f5b54743f7fba3cda82d56894d0265f0befbc2e8a45612411e9bde4123263b1cde7c76ede1b21d97694382416b8c8f502f2c9af06bf250095122fbbfada1b683f588aa01a654a2ddd736135729835790845b3c403cc793bbfc808dba33b7af33bb43d49e06595a095ac84290e268e41d72ef9b93d4bafd0bf537179621a1c4936a5b7f713e9dd5f0cec2779933f46e0d8f48f15a81565de89df43e727e834de5386e446ca2696a13"; assert_eq!(got, expected) } - -#[test] -fn crs_consistency() { - // See: https://hackmd.io/1RcGSMQgT4uREaq1CCx_cg#Methodology - use ark_serialize::CanonicalSerialize; - use bandersnatch::Fq; - use sha2::{Digest, Sha256}; - - let points = generate_random_elements(256, b"eth_verkle_oct_2021"); - for point in &points { - let on_curve = point.is_on_curve(); - let in_correct_subgroup = point.is_in_correct_subgroup_assuming_on_curve(); - if !on_curve { - panic!("generated a point which is not on the curve") - } - if !in_correct_subgroup { - panic!("generated a point which is not in the prime subgroup") - } - } - - let mut bytes = [0u8; 32]; - points[0].serialize(&mut bytes[..]).unwrap(); - assert_eq!( - hex::encode(&bytes), - "22ac968a98ab6c50379fc8b039abc8fd9aca259f4746a05bfbdf12c86463c208", - "the first point is incorrect" - ); - let mut bytes = [0u8; 32]; - points[255].serialize(&mut bytes[..]).unwrap(); - assert_eq!( - hex::encode(&bytes), - "c8b4968a98ab6c50379fc8b039abc8fd9aca259f4746a05bfbdf12c86463c208", - "the 256th (last) point is incorrect" - ); - - let mut hasher = Sha256::new(); - for point in &points { - let mut bytes = [0u8; 32]; - point.serialize(&mut bytes[..]).unwrap(); - hasher.update(&bytes); - } - let bytes = hasher.finalize().to_vec(); - assert_eq!( - hex::encode(&bytes), - "c390cbb4bc42019685d5a01b2fb8a536d4332ea4e128934d0ae7644163089e76", - "unexpected point encountered" - ); -} From 27830cd05fd566cee9862811cc4efbcf0bef32ef Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Sat, 22 Jan 2022 18:23:19 +0000 Subject: [PATCH 4/8] [banderwagon/CRS] - Redo test vectors to match new CRS and the fact that we use banderwagon --- src/crs.rs | 101 +++++++++++++++++++--------------------------- src/multiproof.rs | 21 ++++++++-- 2 files changed, 59 insertions(+), 63 deletions(-) diff --git a/src/crs.rs b/src/crs.rs index db4ce31..5d44e7e 100644 --- a/src/crs.rs +++ b/src/crs.rs @@ -53,10 +53,9 @@ fn generate_random_elements(num_required_points: usize, seed: &'static [u8]) -> use bandersnatch::Fq; use sha2::{Digest, Sha256}; - let mut hasher = Sha256::new(); let choose_largest = false; - (0u32..) + (0u64..) .into_iter() // Hash the seed + i to get a possible x value .map(|i| { @@ -66,76 +65,60 @@ fn generate_random_elements(num_required_points: usize, seed: &'static [u8]) -> let bytes: Vec = hasher.finalize().to_vec(); bytes }) - // The from_bytes method does not reduce the bytes, it expects the + // The Element::from_bytes method does not reduce the bytes, it expects the // input to be in a canonical format, so we must do the reduction ourselves - .map(|hash_bytes| bandersnatch::Fq::from_be_bytes_mod_order(&hash_bytes)) - // Using the x co-ordinate fetch a possible y co-ordinate - .map(|x_coord| EdwardsAffine::get_point_from_x(x_coord, choose_largest)) - .filter_map(|point| point) - // Double the point incase its not in the prime order subgroup - .map(|point| point.double()) - // Serialise x co-ordinate of point - .map(|point| { + .map(|hash_bytes| Fq::from_be_bytes_mod_order(&hash_bytes)) + .map(|x_coord| { let mut bytes = [0u8; 32]; - point.x.serialize(&mut bytes[..]).unwrap(); + x_coord.serialize(&mut bytes[..]).unwrap(); // TODO: this reverse is hacky, and its because there is no way to specify the endianness in arkworks // TODO So we reverse it here, to be interopable with the banderwagon specs which needs big endian bytes bytes.reverse(); bytes }) - // Using banderwagon deserialise the x-cordinate to get a valid banderwagon element + // Deserialise the x-cordinate to get a valid banderwagon element .map(|bytes| Element::from_bytes(&bytes)) .filter_map(|point| point) .take(num_required_points) .collect() } -// TODO: update hackmd as we are now using banderwagon -// TODO then redo this test -// #[test] -// fn crs_consistency() { -// // See: https://hackmd.io/1RcGSMQgT4uREaq1CCx_cg#Methodology -// use ark_serialize::CanonicalSerialize; -// use bandersnatch::Fq; -// use sha2::{Digest, Sha256}; +#[test] +fn crs_consistency() { + // TODO: update hackmd as we are now using banderwagon and the point finding strategy + // TODO is a bit different + // See: https://hackmd.io/1RcGSMQgT4uREaq1CCx_cg#Methodology + use ark_serialize::CanonicalSerialize; + use bandersnatch::Fq; + use sha2::{Digest, Sha256}; -// let points = generate_random_elements(256, b"eth_verkle_oct_2021"); -// for point in &points { -// let on_curve = point.is_on_curve(); -// let in_correct_subgroup = point.is_in_correct_subgroup_assuming_on_curve(); -// if !on_curve { -// panic!("generated a point which is not on the curve") -// } -// if !in_correct_subgroup { -// panic!("generated a point which is not in the prime subgroup") -// } -// } + let points = generate_random_elements(256, b"eth_verkle_oct_2021"); -// let mut bytes = [0u8; 32]; -// points[0].serialize(&mut bytes[..]).unwrap(); -// assert_eq!( -// hex::encode(&bytes), -// "22ac968a98ab6c50379fc8b039abc8fd9aca259f4746a05bfbdf12c86463c208", -// "the first point is incorrect" -// ); -// let mut bytes = [0u8; 32]; -// points[255].serialize(&mut bytes[..]).unwrap(); -// assert_eq!( -// hex::encode(&bytes), -// "c8b4968a98ab6c50379fc8b039abc8fd9aca259f4746a05bfbdf12c86463c208", -// "the 256th (last) point is incorrect" -// ); + let mut bytes = [0u8; 32]; + points[0].serialize(&mut bytes[..]).unwrap(); + assert_eq!( + hex::encode(&bytes), + "01587ad1336675eb912550ec2a28eb8923b824b490dd2ba82e48f14590a298a0", + "the first point is incorrect" + ); + let mut bytes = [0u8; 32]; + points[255].serialize(&mut bytes[..]).unwrap(); + assert_eq!( + hex::encode(&bytes), + "3de2be346b539395b0c0de56a5ccca54a317f1b5c80107b0802af9a62276a4d8", + "the 256th (last) point is incorrect" + ); -// let mut hasher = Sha256::new(); -// for point in &points { -// let mut bytes = [0u8; 32]; -// point.serialize(&mut bytes[..]).unwrap(); -// hasher.update(&bytes); -// } -// let bytes = hasher.finalize().to_vec(); -// assert_eq!( -// hex::encode(&bytes), -// "c390cbb4bc42019685d5a01b2fb8a536d4332ea4e128934d0ae7644163089e76", -// "unexpected point encountered" -// ); -// } + let mut hasher = Sha256::new(); + for point in &points { + let mut bytes = [0u8; 32]; + point.serialize(&mut bytes[..]).unwrap(); + hasher.update(&bytes); + } + let bytes = hasher.finalize().to_vec(); + assert_eq!( + hex::encode(&bytes), + "1fcaea10bf24f750200e06fa473c76ff0468007291fa548e2d99f09ba9256fdb", + "unexpected point encountered" + ); +} diff --git a/src/multiproof.rs b/src/multiproof.rs index ea19062..97a20b1 100644 --- a/src/multiproof.rs +++ b/src/multiproof.rs @@ -265,6 +265,7 @@ impl MultiPointProof { // TODO: we could probably get rid of this method altogether and just do this in the multiproof // TODO method +// TODO: check that the point is actually not in the domain pub(crate) fn open_point_outside_of_domain( crs: CRS, precomp: &PrecomputedWeights, @@ -378,6 +379,11 @@ fn test_ipa_consistency() { let poly: Vec = (0..n).map(|i| Fr::from(((i % 32) + 1) as u128)).collect(); let polynomial = LagrangeBasis::new(poly.clone()); let commitment = crs.commit_lagrange_poly(&polynomial); + assert_eq!( + hex::encode(commitment.to_bytes()), + "1b9dff8f5ebbac250d291dfe90e36283a227c64b113c37f1bfb9e7a743cdb128" + ); + let mut prover_transcript = Transcript::new(b"test"); let proof = open_point_outside_of_domain( @@ -394,12 +400,19 @@ fn test_ipa_consistency() { p_challenge.serialize(&mut bytes[..]).unwrap(); assert_eq!( hex::encode(&bytes), - "50d7f61175ffcfefc0dd603943ec8da7568608564d509cd0d1fa71cc48dc3515" + "0a81881cbfd7d7197a54ebd67ed6a68b5867f3c783706675b34ece43e85e7306" ); let mut verifier_transcript = Transcript::new(b"test"); let b = LagrangeBasis::evaluate_lagrange_coefficients(&precomp, crs.n, input_point); let output_point = inner_product(&poly, &b); + let mut bytes = [0u8; 32]; + output_point.serialize(&mut bytes[..]).unwrap(); + assert_eq!( + hex::encode(bytes), + "4a353e70b03c89f161de002e8713beec0d740a5e20722fd5bd68b30540a33208" + ); + assert!(proof.verify_multiexp( &mut verifier_transcript, &crs, @@ -419,7 +432,7 @@ fn test_ipa_consistency() { // Check that serialisation is consistent with other implementations let got = hex::encode(&bytes); - let expected = "9cbba7fb5bf96ef7fd13e085f783e8b09263426dc5d17142acd0d851ff705fd0fcf15f2fad4f6578d95339e914b44ae6dce731d786bf252c92b5fc0d9c4461d310595f85da60a24822cf8aaa137f0db313069fe6bf32d9f41b4eeead08ea3b88956fc57860b5b479b8dd6d7b73c37a793b134b47197f6e9a1dfaa518cca52b29fab70bb94ed51588684776fe5da4d4e6aaee0126fff920f0f1b744f5a4dc3226eb0f8ec433351abb5cde8a53d6e4ecd86e5a00486dc41ae0feab9823137d132d288d91cf339a2e944b921fe0f886f333902a32026408f7e30b8b4193b7f9c2f128ae45c0c7cfe8cd752559b8dc191eba7f13536d173cc087de5425cbb7114f529107539160aa9f8706fd0ef56adf45ba1cce515b88fc43e8618586d207a25f1ce07ff1bbeff6dc1306c2125d21db49c9431240fd78865b010dc3132a7052bdeb23970d4af5304857423fafcd08e4e91d60a82006da73d2df57fa80588f753e3aaa12e294af01ecd06cdc2c69fb4603536355f523ae918ca24ba51aff3130dd5b3f7a962db4208154c268a83c1dfb65d8a91609403ffbb085cbe8f28c24ae3aa67a9776135e07ab675275a76ec54f8ff5355fe9e6419739d1e2f1f4951c43ce619758c8348f28e50000cb5c45915044a9e47bf9514c6eaf8ec88f31fb3cc7b52ba60e038ebd684a9f8efee1d345724764bebec999c230908759ac01cf30829cd981fff0e1fa629b4fc6702c824d7764901af6e9e0b5d36d1fc194ba2408311b0c"; + let expected = "273395a8febdaed38e94c3d874e99c911a47dd84616d54c55021d5c4131b507e46a4ec2c7e82b77ec2f533994c91ca7edaef212c666a1169b29c323eabb0cf690e0146638d0e2d543f81da4bd597bf3013e1663f340a8f87b845495598d0a3951590b6417f868edaeb3424ff174901d1185a53a3ee127fb7be0af42dda44bf992885bde279ef821a298087717ef3f2b78b2ede7f5d2ea1b60a4195de86a530eb247fd7e456012ae9a070c61635e55d1b7a340dfab8dae991d6273d099d9552815434cc1ba7bcdae341cf7928c6f25102370bdf4b26aad3af654d9dff4b3735661db3177342de5aad774a59d3e1b12754aee641d5f9cd1ecd2751471b308d2d8410add1c9fcc5a2b7371259f0538270832a98d18151f653efbc60895fab8be9650510449081626b5cd24671d1a3253487d44f589c2ff0da3557e307e520cf4e0054bbf8bdffaa24b7e4cce5092ccae5a08281ee24758374f4e65f126cacce64051905b5e2038060ad399c69ca6cb1d596d7c9cb5e161c7dcddc1a7ad62660dd4a5f69b31229b80e6b3df520714e4ea2b5896ebd48d14c7455e91c1ecf4acc5ffb36937c49413b7d1005dd6efbd526f5af5d61131ca3fcdae1218ce81c75e62b39100ec7f474b48a2bee6cef453fa1bc3db95c7c6575bc2d5927cbf7413181ac905766a4038a7b422a8ef2bf7b5059b5c546c19a33c1049482b9a9093f864913ca82290decf6e9a65bf3f66bc3ba4a8ed17b56d890a83bcbe74435a42499dec115"; assert_eq!(got, expected) } @@ -475,7 +488,7 @@ fn multiproof_consistency() { p_challenge.serialize(&mut bytes[..]).unwrap(); assert_eq!( hex::encode(&bytes), - "f9c48313d1af5e069386805b966ce53a3d95794b82da3aac6d68fd629062a31c" + "eee8a80357ff74b766eba39db90797d022e8d6dee426ded71234241be504d519" ); let mut verifier_transcript = Transcript::new(b"test"); @@ -495,6 +508,6 @@ fn multiproof_consistency() { // Check that serialisation is consistent with other implementations let got = hex::encode(bytes); - let expected = "1e575ed50234769345382d64f828d8dd65052cc623c4bfe6dd1ca0a8eb6940de717d20b92f592aea4e1a649644ee92d83813e8e296c71e2d32b40532f455d8b9b56baadafbe84808d784aa920836b73af49d758bd8bb1a2690df8b2450d2112e3a48a06378bc60dffa9cd9f80c9c4da0385a388fc8edeca1a740d76b3ab1d8d3ccb0387a0c2005432d6a52e98ca46c0649a69b6b02b9832b1e108199e6977c403624cfff05715445e37586444a27d8c97f18b3bbf417b442e8c8ab16dfe3b0e96ba20178280e6192f8e4e861a21215f394c1ff3057cd5492d1a5154ed8330f3f93f7f02079042c27d51c6299904eadaf6e1e290cc94920d143112ddb34cf2488131bc321ff0349150aad44563ac765905b15b30ac71ebb01c78d7e26e4f920219d040fb50fab3a233ea349fe5e09b1c7e56b311dc8e4505c04c60e27c86d8cbb72a0fe057815972f4bf2e126684a79ba5a3932a9713e059cd51d1a8f0599efa54172d4dfae7016ce2b7b2b325ba847782a2741ba560c158e38d10362a61a11538dd3c5e6742bb96901f53291649fbad13518c79c40af9733f5b54743f7fba3cda82d56894d0265f0befbc2e8a45612411e9bde4123263b1cde7c76ede1b21d97694382416b8c8f502f2c9af06bf250095122fbbfada1b683f588aa01a654a2ddd736135729835790845b3c403cc793bbfc808dba33b7af33bb43d49e06595a095ac84290e268e41d72ef9b93d4bafd0bf537179621a1c4936a5b7f713e9dd5f0cec2779933f46e0d8f48f15a81565de89df43e727e834de5386e446ca2696a13"; + let expected = "4f53588244efaf07a370ee3f9c467f933eed360d4fbf7a19dfc8bc49b67df4711bf1d0a720717cd6a8c75f1a668cb7cbdd63b48c676b89a7aee4298e71bd7f4013d7657146aa9736817da47051ed6a45fc7b5a61d00eb23e5df82a7f285cc10e67d444e91618465ca68d8ae4f2c916d1942201b7e2aae491ef0f809867d00e83468fb7f9af9b42ede76c1e90d89dd789ff22eb09e8b1d062d8a58b6f88b3cbe80136fc68331178cd45a1df9496ded092d976911b5244b85bc3de41e844ec194256b39aeee4ea55538a36139211e9910ad6b7a74e75d45b869d0a67aa4bf600930a5f760dfb8e4df9938d1f47b743d71c78ba8585e3b80aba26d24b1f50b36fa1458e79d54c05f58049245392bc3e2b5c5f9a1b99d43ed112ca82b201fb143d401741713188e47f1d6682b0bf496a5d4182836121efff0fd3b030fc6bfb5e21d6314a200963fe75cb856d444a813426b2084dfdc49dca2e649cb9da8bcb47859a4c629e97898e3547c591e39764110a224150d579c33fb74fa5eb96427036899c04154feab5344873d36a53a5baefd78c132be419f3f3a8dd8f60f72eb78dd5f43c53226f5ceb68947da3e19a750d760fb31fa8d4c7f53bfef11c4b89158aa56b1f4395430e16a3128f88e234ce1df7ef865f2d2c4975e8c82225f578310c31fd41d265fd530cbfa2b8895b228a510b806c31dff3b1fa5c08bffad443d567ed0e628febdd22775776e0cc9cebcaea9c6df9279a5d91dd0ee5e7a0434e989a160005321c97026cb559f71db23360105460d959bcdf74bee22c4ad8805a1d497507"; assert_eq!(got, expected) } From 9477c0bbb74a9c2ce31a8cffb05b521ed32e43ba Mon Sep 17 00:00:00 2001 From: Dragan Pilipovic Date: Thu, 14 Sep 2023 19:34:07 +0200 Subject: [PATCH 5/8] turn on benchmarks --- Cargo.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Cargo.toml b/Cargo.toml index fa367e7..1717f1d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,9 @@ name = "ipa-multipoint" version = "0.1.0" edition = "2018" +[[bench]] +name = "benchmark_main" +harness = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] From ae151907980ed8882b9e6addf1921bfdf7bf4553 Mon Sep 17 00:00:00 2001 From: Dragan Pilipovic Date: Thu, 14 Sep 2023 20:02:32 +0200 Subject: [PATCH 6/8] fix benchmarks place --- Cargo.toml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 1717f1d..bedee71 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,9 +3,7 @@ name = "ipa-multipoint" version = "0.1.0" edition = "2018" -[[bench]] -name = "benchmark_main" -harness = false + # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] @@ -23,6 +21,6 @@ sha2 = "0.9.8" hex = "0.4.3" banderwagon = { git = "https://github.com/crate-crypto/banderwagon" } -# [[bench]] -# name = "benchmark_main" -# harness = false +[[bench]] +name = "benchmark_main" +harness = false From 6b4c4b1fe8d91f710baa899704eb683fb28346ad Mon Sep 17 00:00:00 2001 From: Dragan Pilipovic Date: Thu, 14 Sep 2023 20:32:05 +0200 Subject: [PATCH 7/8] update docs, remove unnecessary dependencies --- Readme.md | 45 +++++++++++++++++++++++++++++++++++++++++-- src/crs.rs | 1 - src/ipa.rs | 7 +------ src/lagrange_basis.rs | 2 +- src/multiproof.rs | 1 - 5 files changed, 45 insertions(+), 11 deletions(-) diff --git a/Readme.md b/Readme.md index cf4e18d..24fd77c 100644 --- a/Readme.md +++ b/Readme.md @@ -2,7 +2,7 @@ A polynomial commitment scheme for opening multiple polynomials at different points using the inner product argument. -This library uses the bandersnatch curve and is described in [https://eprint.iacr.org/2021/1152.pdf]. +This library uses the banderwagon prime subgroup (https://hackmd.io/@6iQDuIePQjyYBqDChYw_jg/BJ2-L6Nzc) built on top of bandersnatch curve described in [https://eprint.iacr.org/2021/1152.pdf]. **Do not use in production.** @@ -24,6 +24,8 @@ This library uses the bandersnatch curve and is described in [https://eprint.iac ## Tentative benchmarks +Bandersnatch (old): + Machine : 2.4 GHz 8-Core Intel Core i9 - To verify the opening of a polynomial of degree 255 (256 points in lagrange basis): `11.92ms` @@ -37,4 +39,43 @@ Machine : 2.4 GHz 8-Core Intel Core i9 - To prove a multi-opening proof of 20,000 polynomials: `422.94ms` -These benchmarks are tentative because on one hand, the machine being used may not be the what the average user uses, while on the other hand, we have not optimised the verifier algorithm to remove `bH` , the pippenger algorithm does not take into consideration GLV and we are not using rayon to parallelise. + +New benchmark on banderwagon subgroup: Apple M1 Pro 16GB RAM + +- ipa - prove (256): `28.700 ms` + +- ipa - verify (multi exp2 256): `2.1628 ms` + +- ipa - verify (256): `20.818 ms` + +- multipoint - verify (256)/1: `2.6983 ms` + +- multipoint - verify (256)/1000: `8.5925 ms` + +- multipoint - verify (256)/2000: `12.688 ms` + +- multipoint - verify (256)/4000: `21.726 ms` + +- multipoint - verify (256)/8000: `36.616 ms` + +- multipoint - verify (256)/16000: `69.401 ms` + +- multipoint - verify (256)/128000: `490.23 ms` + +- multiproof - prove (256)/1: `33.231 ms` + +- multiproof - prove (256)/1000: `47.764 ms` + +- multiproof - prove (256)/2000: `56.670 ms` + +- multiproof - prove (256)/4000: `74.597 ms` + +- multiproof - prove (256)/8000: `114.39 ms` + +- multiproof - prove (256)/16000: `189.94 ms` + +- multiproof - prove (256)/128000: `1.2693 s` + + + +*These benchmarks are tentative because on one hand, the machine being used may not be the what the average user uses, while on the other hand, we have not optimised the verifier algorithm to remove `bH` , the pippenger algorithm does not take into consideration GLV and we are not using rayon to parallelise.* diff --git a/src/crs.rs b/src/crs.rs index 5d44e7e..9486ab0 100644 --- a/src/crs.rs +++ b/src/crs.rs @@ -1,5 +1,4 @@ use ark_serialize::CanonicalSerialize; -use bandersnatch::{EdwardsAffine, EdwardsProjective}; use banderwagon::Element; use crate::{ipa::slow_vartime_multiscalar_mul, lagrange_basis::LagrangeBasis}; diff --git a/src/ipa.rs b/src/ipa.rs index 68bf111..a7cf168 100644 --- a/src/ipa.rs +++ b/src/ipa.rs @@ -2,20 +2,15 @@ use crate::crs::CRS; use crate::math_utils::inner_product; use crate::transcript::{Transcript, TranscriptProtocol}; -use ark_ec::group::Group; -use ark_ec::{AffineCurve, ProjectiveCurve}; use ark_ff::Field; -use ark_ff::PrimeField; -use ark_ff::{One, Zero}; +use ark_ff::One; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use banderwagon::{multi_scalar_mul, Element, Fr}; use itertools::Itertools; use crate::{IOError, IOErrorKind, IOResult}; -use std::io::{Read, Write}; -use std::borrow::Borrow; use std::iter; #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/src/lagrange_basis.rs b/src/lagrange_basis.rs index aa2d569..d431f06 100644 --- a/src/lagrange_basis.rs +++ b/src/lagrange_basis.rs @@ -1,5 +1,5 @@ use ark_ff::{batch_inversion, batch_inversion_and_mul, Field, One, Zero}; -use ark_poly::{domain, univariate::DensePolynomial, Polynomial, UVPolynomial}; +use ark_poly::{univariate::DensePolynomial, Polynomial, UVPolynomial}; use bandersnatch::Fr; use std::{ convert::TryFrom, diff --git a/src/multiproof.rs b/src/multiproof.rs index 97a20b1..3c180d4 100644 --- a/src/multiproof.rs +++ b/src/multiproof.rs @@ -12,7 +12,6 @@ use ark_ec::{AffineCurve, ProjectiveCurve}; use ark_ff::PrimeField; use ark_ff::{batch_inversion, Field}; use ark_ff::{One, Zero}; -use ark_poly::univariate::DensePolynomial; use ark_poly::{Polynomial, UVPolynomial}; use std::collections::HashMap; From 85dc2b4def99883bcb459fee8babe3523326f55c Mon Sep 17 00:00:00 2001 From: Dragan Pilipovic Date: Fri, 15 Sep 2023 09:51:05 +0200 Subject: [PATCH 8/8] Update Readme.md --- Readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Readme.md b/Readme.md index 24fd77c..bb8a7c8 100644 --- a/Readme.md +++ b/Readme.md @@ -2,7 +2,7 @@ A polynomial commitment scheme for opening multiple polynomials at different points using the inner product argument. -This library uses the banderwagon prime subgroup (https://hackmd.io/@6iQDuIePQjyYBqDChYw_jg/BJ2-L6Nzc) built on top of bandersnatch curve described in [https://eprint.iacr.org/2021/1152.pdf]. +This library uses the banderwagon prime group (https://hackmd.io/@6iQDuIePQjyYBqDChYw_jg/BJ2-L6Nzc) built on top of bandersnatch curve described in [https://eprint.iacr.org/2021/1152.pdf]. **Do not use in production.**