Skip to content

Commit

Permalink
feat: add asyncAggregateWithRandomness (#159)
Browse files Browse the repository at this point in the history
* feat: add asyncAggregateWithRandomness

* fix: remove unnecessary errors

* feat: run sig validation in worker thread

* fix: sigh JS... sigh.... its all async... but its not

* docs: add comment about wonky blst error

* chore: small refactor

---------

Co-authored-by: Cayman <[email protected]>
  • Loading branch information
matthewkeil and wemeetagain authored Oct 29, 2024
1 parent 60c1450 commit fa3a944
Show file tree
Hide file tree
Showing 5 changed files with 160 additions and 8 deletions.
6 changes: 6 additions & 0 deletions index.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,12 @@ export declare function aggregateSerializedSignatures(sigs: Array<Uint8Array>, s
* Signatures are deserialized and validated with infinity and group checks before aggregation.
*/
export declare function aggregateWithRandomness(sets: Array<PkAndSerializedSig>): PkAndSig
/**
* Aggregate multiple public keys and multiple serialized signatures into a single blinded public key and blinded signature.
*
* Signatures are deserialized and validated with infinity and group checks before aggregation.
*/
export declare function asyncAggregateWithRandomness(sets: Array<PkAndSerializedSig>): Promise<PkAndSig>
/**
* Verify a signature against a message and public key.
*
Expand Down
3 changes: 2 additions & 1 deletion index.js
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ if (!nativeBinding) {
throw new Error(`Failed to load native binding`)
}

const { SECRET_KEY_LENGTH, PUBLIC_KEY_LENGTH_COMPRESSED, PUBLIC_KEY_LENGTH_UNCOMPRESSED, SIGNATURE_LENGTH_COMPRESSED, SIGNATURE_LENGTH_UNCOMPRESSED, SecretKey, PublicKey, Signature, aggregatePublicKeys, aggregateSignatures, aggregateSerializedPublicKeys, aggregateSerializedSignatures, aggregateWithRandomness, verify, aggregateVerify, fastAggregateVerify, verifyMultipleAggregateSignatures } = nativeBinding
const { SECRET_KEY_LENGTH, PUBLIC_KEY_LENGTH_COMPRESSED, PUBLIC_KEY_LENGTH_UNCOMPRESSED, SIGNATURE_LENGTH_COMPRESSED, SIGNATURE_LENGTH_UNCOMPRESSED, SecretKey, PublicKey, Signature, aggregatePublicKeys, aggregateSignatures, aggregateSerializedPublicKeys, aggregateSerializedSignatures, aggregateWithRandomness, asyncAggregateWithRandomness, verify, aggregateVerify, fastAggregateVerify, verifyMultipleAggregateSignatures } = nativeBinding

module.exports.SECRET_KEY_LENGTH = SECRET_KEY_LENGTH
module.exports.PUBLIC_KEY_LENGTH_COMPRESSED = PUBLIC_KEY_LENGTH_COMPRESSED
Expand All @@ -325,6 +325,7 @@ module.exports.aggregateSignatures = aggregateSignatures
module.exports.aggregateSerializedPublicKeys = aggregateSerializedPublicKeys
module.exports.aggregateSerializedSignatures = aggregateSerializedSignatures
module.exports.aggregateWithRandomness = aggregateWithRandomness
module.exports.asyncAggregateWithRandomness = asyncAggregateWithRandomness
module.exports.verify = verify
module.exports.aggregateVerify = aggregateVerify
module.exports.fastAggregateVerify = fastAggregateVerify
Expand Down
73 changes: 67 additions & 6 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#![deny(clippy::all)]

use blst::{blst_scalar, blst_scalar_from_uint64, min_pk, MultiPoint, BLST_ERROR};
use napi::bindgen_prelude::*;
use napi::{bindgen_prelude::*, Task};
use napi_derive::napi;
use rand::{rngs::ThreadRng, Rng};

Expand Down Expand Up @@ -84,6 +84,13 @@ fn from_napi_err(napi_err: Error) -> Error<ErrorStatus> {
)
}

fn blst_to_napi_err(blst_error: BLST_ERROR) -> napi::Error<napi::Status> {
napi::Error::new(
napi::Status::GenericFailure,
blst_error_to_reason(blst_error),
)
}

fn invalid_hex_err(e: hex::FromHexError) -> Error<ErrorStatus> {
Error::new(ErrorStatus::InvalidHex, format!("Invalid hex: {}", e))
}
Expand Down Expand Up @@ -393,7 +400,7 @@ pub fn aggregate_with_randomness(env: Env, sets: Vec<PkAndSerializedSig>) -> Res
return Err(from_blst_err(BLST_ERROR::BLST_AGGR_TYPE_MISMATCH));
}

let (pks, sigs) = unzip_and_validate_aggregation_sets(&sets)?;
let (pks, sigs) = unzip_aggregation_sets(&sets, true)?;
let rands = create_rand_slice(pks.len());
let (pk, sig) = aggregate_with(pks.as_slice(), sigs.as_slice(), rands.as_slice());

Expand All @@ -403,6 +410,52 @@ pub fn aggregate_with_randomness(env: Env, sets: Vec<PkAndSerializedSig>) -> Res
})
}

pub struct AsyncAggregateWithRandomness {
pks: Vec<min_pk::PublicKey>,
sigs: Vec<min_pk::Signature>,
}

#[napi]
impl Task for AsyncAggregateWithRandomness {
type Output = (min_pk::PublicKey, min_pk::Signature);
type JsValue = PkAndSig;

fn compute(&mut self) -> napi::Result<Self::Output> {
let scalars = create_rand_slice(self.pks.len());
let pk = self.pks.as_slice().mult(&scalars, 64).to_public_key();
for sig in &self.sigs {
if let Err(blst_error) = sig.validate(true) {
return Err(blst_to_napi_err(blst_error));
}
}
let sig = self.sigs.as_slice().mult(&scalars, 64).to_signature();

Ok((pk, sig))
}

fn resolve(&mut self, env: Env, output: Self::Output) -> napi::Result<Self::JsValue> {
Ok(PkAndSig {
pk: PublicKey::into_reference(PublicKey(output.0), env)?,
sig: Signature::into_reference(Signature(output.1), env)?,
})
}
}

#[napi]
/// Aggregate multiple public keys and multiple serialized signatures into a single blinded public key and blinded signature.
///
/// Signatures are deserialized and validated with infinity and group checks before aggregation.
pub fn async_aggregate_with_randomness(
sets: Vec<PkAndSerializedSig>,
) -> Result<AsyncTask<AsyncAggregateWithRandomness>> {
if sets.is_empty() {
return Err(from_blst_err(BLST_ERROR::BLST_AGGR_TYPE_MISMATCH));
}

let (pks, sigs) = unzip_aggregation_sets(&sets, false)?;
Ok(AsyncTask::new(AsyncAggregateWithRandomness { pks, sigs }))
}

#[napi]
/// Verify a signature against a message and public key.
///
Expand Down Expand Up @@ -525,16 +578,24 @@ fn unzip_signature_sets<'a>(
}

/// Convert a list of tuples into a tuple of lists (deserializing and validating signatures along the way)
fn unzip_and_validate_aggregation_sets(
fn unzip_aggregation_sets(
sets: &[PkAndSerializedSig],
sig_validate: bool,
) -> Result<(Vec<min_pk::PublicKey>, Vec<min_pk::Signature>)> {
let len = sets.len();
let mut pks = Vec::with_capacity(len);
let mut sigs = Vec::with_capacity(len);

for set in sets {
pks.push(set.pk.0);
sigs.push(min_pk::Signature::sig_validate(set.sig.as_ref(), true).map_err(from_blst_err)?);
if sig_validate {
for set in sets {
pks.push(set.pk.0);
sigs.push(min_pk::Signature::sig_validate(set.sig.as_ref(), true).map_err(from_blst_err)?);
}
} else {
for set in sets {
pks.push(set.pk.0);
sigs.push(min_pk::Signature::from_bytes(set.sig.as_ref()).map_err(from_blst_err)?);
}
}

Ok((pks, sigs))
Expand Down
85 changes: 84 additions & 1 deletion test/unit/aggregateWithRandomness.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import {
aggregatePublicKeys,
aggregateSerializedSignatures,
aggregateWithRandomness,
asyncAggregateWithRandomness,
PublicKey,
Signature,
verify,
Expand Down Expand Up @@ -61,7 +62,8 @@ describe("Aggregate With Randomness", () => {
aggregateWithRandomness(
sets.concat({
pk: sets[0].pk,
sig: G2_POINT_AT_INFINITY, //TODO: (@matthewkeil) this throws error "Public key is infinity" not signature
//TODO: (@matthewkeil) this throws error "Public key is infinity" not signature because there is only one blst error
sig: G2_POINT_AT_INFINITY,
} as any)
)
).to.throw();
Expand Down Expand Up @@ -101,4 +103,85 @@ describe("Aggregate With Randomness", () => {
expect(verify(msg, pk, sig)).to.be.false;
});
});
describe("asyncAggregateWithRandomness()", () => {
it("should not accept an empty array argument", async () => {
try {
await asyncAggregateWithRandomness([]);
expect.fail("asyncAggregateWithRandomness with empty list should throw");
} catch (e) {
expect((e as any).code).to.equal("BLST_AGGR_TYPE_MISMATCH");
}
});
describe("should accept an array of {pk: PublicKey, sig: Uint8Array}", () => {
it("should handle valid case", () => {
expect(() => asyncAggregateWithRandomness([{pk: sets[0].pk, sig: sets[0].sig}])).not.to.throw();
});
it("should handle invalid publicKey property name", () => {
expect(() => asyncAggregateWithRandomness([{publicKey: sets[0].pk, sig: sets[0].sig} as any])).to.throw(
"Missing field `pk`"
);
});
it("should handle invalid publicKey property value", () => {
expect(() => asyncAggregateWithRandomness([{pk: 1 as any, sig: sets[0].sig}])).to.throw();
});
it("should handle invalid signature property name", () => {
expect(() => asyncAggregateWithRandomness([{pk: sets[0].pk, signature: sets[0].sig} as any])).to.throw(
"Missing field `sig`"
);
});
it("should handle invalid signature property value", () => {
expect(() => asyncAggregateWithRandomness([{pk: sets[0].pk, sig: "bar" as any}])).to.throw();
});
});
it("should throw for invalid serialized", async () => {
try {
await asyncAggregateWithRandomness(
sets.concat({
pk: sets[0].pk,
//TODO: (@matthewkeil) this throws error "Public key is infinity" not signature because there is only one blst error
sig: G2_POINT_AT_INFINITY,
} as any)
);
expect.fail("should not get here");
} catch (err) {
expect((err as Error).message).to.contain("Public key is infinity");
}
});
it("should return a {pk: PublicKey, sig: Signature} object", async () => {
const aggPromise = asyncAggregateWithRandomness(sets);
expect(aggPromise).to.be.instanceOf(Promise);
const agg = await aggPromise;
expect(agg).to.be.instanceOf(Object);

expect(agg).to.haveOwnProperty("pk");
expect(agg.pk).to.be.instanceOf(PublicKey);
expect(() => agg.pk.keyValidate()).not.to.throw();

expect(agg).to.haveOwnProperty("sig");
expect(agg.sig).to.be.instanceOf(Signature);
expect(() => agg.sig.sigValidate()).not.to.throw();
});
it("should add randomness to aggregated publicKey", async () => {
const withoutRandomness = aggregatePublicKeys(sets.map(({pk}) => pk));
const withRandomness = await asyncAggregateWithRandomness(sets);
expectNotEqualHex(withRandomness.pk, withoutRandomness);
});
it("should add randomness to aggregated signature", async () => {
const withoutRandomness = aggregateSerializedSignatures(sets.map(({sig}) => sig));
const withRandomness = await asyncAggregateWithRandomness(sets);
expectNotEqualHex(withRandomness.sig, withoutRandomness);
});
it("should produce verifiable set", async () => {
const {pk, sig} = await asyncAggregateWithRandomness(sets);
expect(verify(msg, pk, sig));
});
it("should not validate for different message", async () => {
const {pk, sig} = await asyncAggregateWithRandomness(sets);
expect(verify(randomSet.msg, pk, sig)).to.be.false;
});
it("should not validate included key/sig for different message", async () => {
const {pk, sig} = await asyncAggregateWithRandomness([...sets, {pk: randomSet.pk, sig: randomSet.sig.toBytes()}]);
expect(verify(msg, pk, sig)).to.be.false;
});
});
});
1 change: 1 addition & 0 deletions test/unit/bindings.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ describe("bindings", () => {
"aggregateSerializedPublicKeys",
"aggregateSerializedSignatures",
"aggregateWithRandomness",
"asyncAggregateWithRandomness",
"verify",
"aggregateVerify",
"fastAggregateVerify",
Expand Down

0 comments on commit fa3a944

Please sign in to comment.