Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ZKEmail using DKIM and new circuits #41

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,5 +28,5 @@ phf = { version = "0.11.2", features = ["macros"] }
rsa = { version = "0.9.2" }
getrandom = { version = "0.2.10", features = ["custom"] }
p256 = {version = "0.13.2", features = ["ecdsa-core", "arithmetic", "serde"]}
cosmos-sdk-proto = {git = "https://github.com/burnt-labs/cosmos-rust.git", rev = "75e72f446629f98330e209e2f6268250d325cccb", default-features = false, features = ["std", "cosmwasm", "xion", "serde"]}
cosmos-sdk-proto = {git = "https://github.com/burnt-labs/cosmos-rust.git", rev = "2b3d0a8c2bfb5d19565faf708f97495ebccca3f2", default-features = false, features = ["std", "cosmwasm", "xion", "serde"]}
url = "2.5.2"
13 changes: 13 additions & 0 deletions contracts/account/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,3 +31,16 @@ rsa = { workspace = true }
getrandom = { workspace = true }
p256 = { workspace = true }
cosmos-sdk-proto = { workspace = true }


# zk deps
ark-crypto-primitives = { version = "=0.4.0" }
ark-ec = { version = "=0.4.2", default-features = false }
ark-ff = { version = "=0.4.2", default-features = false, features = [ "asm"] }
ark-std = { version = "=0.4.0", default-features = false }
ark-bn254 = { version = "=0.4.0" }
ark-groth16 = { version = "=0.4.0", default-features = false }
ark-relations = { version = "=0.4.0", default-features = false }
ark-serialize = { version = "=0.4.2", default-features = false }
ark-poly = { version = "=0.4.2", default-features = false }
poseidon-ark = {git = "https://github.com/arnaucube/poseidon-ark"}
50 changes: 44 additions & 6 deletions contracts/account/src/auth.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,13 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};

mod eth_crypto;
mod groth16;
pub mod jwt;
pub mod passkey;
mod secp256r1;
mod sign_arb;
pub mod util;
mod zkemail;

pub mod testing {
pub use super::sign_arb::wrap_message;
Expand Down Expand Up @@ -48,6 +50,13 @@ pub enum AddAuthenticator {
url: String,
credential: Binary,
},
ZKEmail {
id: u8,
vkey: Binary,
email_hash: Binary,
dkim_domain: String,
proof: Binary,
},
}

impl AddAuthenticator {
Expand All @@ -59,18 +68,38 @@ impl AddAuthenticator {
AddAuthenticator::Jwt { id, .. } => *id,
AddAuthenticator::Secp256R1 { id, .. } => *id,
AddAuthenticator::Passkey { id, .. } => *id,
AddAuthenticator::ZKEmail { id, .. } => *id,
}
}
}

#[derive(Serialize, Deserialize, Clone, JsonSchema, PartialEq, Debug)]
pub enum Authenticator {
Secp256K1 { pubkey: Binary },
Ed25519 { pubkey: Binary },
EthWallet { address: String },
Jwt { aud: String, sub: String },
Secp256R1 { pubkey: Binary },
Passkey { url: String, passkey: Binary },
Secp256K1 {
pubkey: Binary,
},
Ed25519 {
pubkey: Binary,
},
EthWallet {
address: String,
},
Jwt {
aud: String,
sub: String,
},
Secp256R1 {
pubkey: Binary,
},
Passkey {
url: String,
passkey: Binary,
},
ZKEmail {
vkey: Binary,
email_hash: Binary,
dkim_domain: String,
},
}

impl Authenticator {
Expand Down Expand Up @@ -138,6 +167,15 @@ impl Authenticator {

Ok(true)
}
Authenticator::ZKEmail {
vkey,
email_hash,
dkim_domain,
} => {
let verification =
zkemail::verify(deps, tx_bytes, sig_bytes, vkey, email_hash, dkim_domain)?;
Ok(verification)
}
}
}
}
122 changes: 122 additions & 0 deletions contracts/account/src/auth/groth16.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
use ark_bn254::{Bn254, Config, FrConfig};
use ark_ec::bn::Bn;
use ark_ff::Fp;
use ark_ff::MontBackend;
use ark_groth16::{Groth16, Proof, VerifyingKey};
use ark_ff::PrimeField;
use ark_groth16::r1cs_to_qap::{evaluate_constraint, LibsnarkReduction, R1CSToQAP};
use ark_poly::EvaluationDomain;
use ark_relations::r1cs::{ConstraintMatrices, ConstraintSystemRef, SynthesisError};
use ark_std::{cfg_into_iter, cfg_iter, cfg_iter_mut, vec};

// Developer's Note:
// This has been copied over from the ark-circom package, which focuses on
// proving and verifying in arkworks using circom. It has many dependencies on
// wasmer/ethers/js that we do not need, if we only want to verify existing proofs

pub type GrothBnVkey = VerifyingKey<Bn254>;
pub type GrothBnProof = Proof<Bn<Config>>;
pub type GrothBn = Groth16<Bn254, CircomReduction>;
pub type GrothFp = Fp<MontBackend<FrConfig, 4>, 4>;


/// Implements the witness map used by snarkjs. The arkworks witness map calculates the
/// coefficients of H through computing (AB-C)/Z in the evaluation domain and going back to the
/// coefficient's domain. snarkjs instead precomputes the Lagrange form of the powers of tau bases
/// in a domain twice as large and the witness map is computed as the odd coefficients of (AB-C)
/// in that domain. This serves as HZ when computing the C proof element.
pub struct CircomReduction;

impl R1CSToQAP for CircomReduction {
#[allow(clippy::type_complexity)]
fn instance_map_with_evaluation<F: PrimeField, D: EvaluationDomain<F>>(
cs: ConstraintSystemRef<F>,
t: &F,
) -> Result<(Vec<F>, Vec<F>, Vec<F>, F, usize, usize), SynthesisError> {
LibsnarkReduction::instance_map_with_evaluation::<F, D>(cs, t)
}

fn witness_map_from_matrices<F: PrimeField, D: EvaluationDomain<F>>(
matrices: &ConstraintMatrices<F>,
num_inputs: usize,
num_constraints: usize,
full_assignment: &[F],
) -> Result<Vec<F>, SynthesisError> {
let zero = F::zero();
let domain =
D::new(num_constraints + num_inputs).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
let domain_size = domain.size();

let mut a = vec![zero; domain_size];
let mut b = vec![zero; domain_size];

cfg_iter_mut!(a[..num_constraints])
.zip(cfg_iter_mut!(b[..num_constraints]))
.zip(cfg_iter!(&matrices.a))
.zip(cfg_iter!(&matrices.b))
.for_each(|(((a, b), at_i), bt_i)| {
*a = evaluate_constraint(at_i, full_assignment);
*b = evaluate_constraint(bt_i, full_assignment);
});

{
let start = num_constraints;
let end = start + num_inputs;
a[start..end].clone_from_slice(&full_assignment[..num_inputs]);
}

let mut c = vec![zero; domain_size];
cfg_iter_mut!(c[..num_constraints])
.zip(&a)
.zip(&b)
.for_each(|((c_i, &a), &b)| {
*c_i = a * b;
});

domain.ifft_in_place(&mut a);
domain.ifft_in_place(&mut b);

let root_of_unity = {
let domain_size_double = 2 * domain_size;
let domain_double =
D::new(domain_size_double).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
domain_double.element(1)
};
D::distribute_powers_and_mul_by_const(&mut a, root_of_unity, F::one());
D::distribute_powers_and_mul_by_const(&mut b, root_of_unity, F::one());

domain.fft_in_place(&mut a);
domain.fft_in_place(&mut b);

let mut ab = domain.mul_polynomials_in_evaluation_domain(&a, &b);
drop(a);
drop(b);

domain.ifft_in_place(&mut c);
D::distribute_powers_and_mul_by_const(&mut c, root_of_unity, F::one());
domain.fft_in_place(&mut c);

cfg_iter_mut!(ab)
.zip(c)
.for_each(|(ab_i, c_i)| *ab_i -= &c_i);

Ok(ab)
}

fn h_query_scalars<F: PrimeField, D: EvaluationDomain<F>>(
max_power: usize,
t: F,
_: F,
delta_inverse: F,
) -> Result<Vec<F>, SynthesisError> {
// the usual H query has domain-1 powers. Z has domain powers. So HZ has 2*domain-1 powers.
let mut scalars = cfg_into_iter!(0..2 * max_power + 1)
.map(|i| delta_inverse * t.pow([i as u64]))
.collect::<Vec<_>>();
let domain_size = scalars.len();
let domain = D::new(domain_size).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
// generate the lagrange coefficients
domain.ifft_in_place(&mut scalars);
Ok(cfg_into_iter!(scalars).skip(1).step_by(2).collect())
}
}
103 changes: 103 additions & 0 deletions contracts/account/src/auth/zkemail.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
use crate::auth::groth16::{GrothBn, GrothBnProof, GrothBnVkey, GrothFp};
use crate::error::ContractError::InvalidDkim;
use crate::error::ContractResult;
use ark_crypto_primitives::snark::SNARK;
use ark_ff::{PrimeField, Zero};
use ark_serialize::CanonicalDeserialize;
use base64::engine::general_purpose::STANDARD_NO_PAD;
use base64::Engine;
use cosmos_sdk_proto::prost::Message;
use cosmos_sdk_proto::traits::MessageExt;
use cosmos_sdk_proto::xion::v1::dkim::{QueryDkimPubKeysRequest, QueryDkimPubKeysResponse};
use cosmwasm_std::{Binary, Deps};

const TX_BODY_MAX_BYTES: usize = 512;

pub fn calculate_tx_body_commitment(tx: &str) -> GrothFp {
let padded_tx_bytes = pad_bytes(tx.as_bytes(), TX_BODY_MAX_BYTES);
let tx = pack_bytes_into_fields(padded_tx_bytes);
let poseidon = poseidon_ark::Poseidon::new();
let mut commitment = GrothFp::zero(); // Initialize commitment with an initial value

tx.chunks(16).enumerate().for_each(|(i, chunk)| {
let chunk_commitment = poseidon.hash(chunk.to_vec()).unwrap();
commitment = if i == 0 {
chunk_commitment
} else {
poseidon.hash(vec![commitment, chunk_commitment]).unwrap()
};
});

commitment
}

fn pack_bytes_into_fields(bytes: Vec<u8>) -> Vec<GrothFp> {
// convert each 31 bytes into one field element
let mut fields = vec![];
bytes.chunks(31).for_each(|chunk| {
fields.push(GrothFp::from_le_bytes_mod_order(&chunk));
});
fields
}

fn pad_bytes(bytes: &[u8], length: usize) -> Vec<u8> {
let mut padded = bytes.to_vec();
let padding = length - bytes.len();
for _ in 0..padding {
padded.push(0);
}
padded
}

pub fn verify(
deps: Deps,
tx_bytes: &Binary,
sig_bytes: &Binary,
vkey_bytes: &Binary,
email_hash: &Binary,
dkim_domain: &String,
) -> ContractResult<bool> {
// vkey serialization is checked on submission
let vkey = GrothBnVkey::deserialize_compressed_unchecked(vkey_bytes.as_slice())?;

let (dkim_hash_bz, proof_bz) = sig_bytes.split_at(256);

// proof submission is from the tx, we can't be sure if it was properly serialized
let proof = GrothBnProof::deserialize_compressed(proof_bz)?;

// inputs are tx body, email hash, and dmarc key hash
let mut inputs: [GrothFp; 3] = [GrothFp::zero(); 3];

// tx body input
let tx_input = calculate_tx_body_commitment(STANDARD_NO_PAD.encode(tx_bytes).as_str());
inputs[0] = tx_input;

// email hash input, compressed at authenticator registration
let email_hash_input = GrothFp::deserialize_compressed_unchecked(email_hash.as_slice())?;
inputs[1] = email_hash_input;

// verify that domain+hash are known in chain state
let query = QueryDkimPubKeysRequest {
selector: "".to_string(),
domain: dkim_domain.to_string(),
poseidon_hash: dkim_hash_bz.to_vec(),
pagination: None,
};
let query_bz = query.to_bytes()?;
let query_response = deps.querier.query_grpc(
String::from("/xion.dkim.v1.Query/QueryDkimPubKeys"),
Binary::new(query_bz),
)?;
let query_response = QueryDkimPubKeysResponse::decode(query_response.as_slice())?;
if query_response.dkim_pub_keys.is_empty() {
return Err(InvalidDkim);
}

// verify the dkim pubkey hash in the proof output. the poseidon hash is
// from the tx, we can't be sure if it was properly formatted
inputs[2] = GrothFp::deserialize_compressed(dkim_hash_bz)?;

let verified = GrothBn::verify(&vkey, inputs.as_slice(), &proof)?;

Ok(verified)
}
15 changes: 15 additions & 0 deletions contracts/account/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,15 @@ pub enum ContractError {

#[error(transparent)]
FromUTF8(#[from] std::string::FromUtf8Error),

#[error("r1cs synthesis error")]
R1CS(#[from] ark_relations::r1cs::SynthesisError),

#[error("{0}")]
ArkSerialization(String),

#[error("dkim invalid")]
InvalidDkim,
}

pub type ContractResult<T> = Result<T, ContractError>;
Expand All @@ -99,3 +108,9 @@ impl From<serde_json::Error> for ContractError {
Self::SerdeJSON(format!("{:?}", value))
}
}

impl From<ark_serialize::SerializationError> for ContractError {
fn from(value: ark_serialize::SerializationError) -> Self {
Self::ArkSerialization(format!("{:?}", value))
}
}
19 changes: 19 additions & 0 deletions contracts/account/src/execute.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,12 @@ pub fn before_tx(
Authenticator::Passkey { .. } => {
// todo: figure out if there are minimum checks for passkeys
}
Authenticator::ZKEmail { .. } => {
// todo: verify that this minimum is as high as possible
if sig_bytes.len() < 700 {
return Err(ContractError::ShortSignature);
}
}
}

return match authenticator.verify(deps, env, tx_bytes, sig_bytes)? {
Expand Down Expand Up @@ -219,6 +225,19 @@ pub fn add_auth_method(
*(credential) = passkey;
Ok(())
}
AddAuthenticator::ZKEmail {
id, vkey, email_hash, dkim_domain, proof: _
} => {
// todo: how does verification work in a situation like this?

let auth = Authenticator::ZKEmail {
vkey: vkey.clone(),
email_hash: email_hash.clone(),
dkim_domain: dkim_domain.clone(),
};
save_authenticator(deps, *id, &auth)?;
Ok(())
}
}?;
Ok(
Response::new().add_event(Event::new("add_auth_method").add_attributes(vec![
Expand Down