Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

PeerDAS spec tests #5772

Merged
merged 7 commits into from
May 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

17 changes: 11 additions & 6 deletions consensus/types/src/data_column_subnet_id.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
use crate::data_column_sidecar::ColumnIndex;
use crate::EthSpec;
use ethereum_types::U256;
use itertools::Itertools;
use safe_arith::{ArithError, SafeArith};
use serde::{Deserialize, Serialize};
use smallvec::SmallVec;
Expand Down Expand Up @@ -60,15 +61,15 @@ impl DataColumnSubnetId {
node_id: U256,
custody_subnet_count: u64,
) -> impl Iterator<Item = DataColumnSubnetId> {
// NOTE: we could perform check on `custody_subnet_count` here to ensure that it is a valid
// TODO(das): we could perform check on `custody_subnet_count` here to ensure that it is a valid
// value, but here we assume it is valid.

let mut subnets = SmallVec::<[u64; 32]>::new();
let mut offset = 0;
let mut current_id = node_id;
while (subnets.len() as u64) < custody_subnet_count {
let offset_node_id = node_id + U256::from(offset);
let offset_node_id = offset_node_id.low_u64().to_le_bytes();
let hash: [u8; 32] = ethereum_hashing::hash_fixed(&offset_node_id);
let mut node_id_bytes = [0u8; 32];
current_id.to_little_endian(&mut node_id_bytes);
let hash = ethereum_hashing::hash_fixed(&node_id_bytes);
let hash_prefix = [
hash[0], hash[1], hash[2], hash[3], hash[4], hash[5], hash[6], hash[7],
];
Expand All @@ -79,7 +80,10 @@ impl DataColumnSubnetId {
subnets.push(subnet);
}

offset += 1
if current_id == U256::MAX {
current_id = U256::zero()
}
current_id += U256::one()
}
subnets.into_iter().map(DataColumnSubnetId::new)
}
Expand All @@ -90,6 +94,7 @@ impl DataColumnSubnetId {
) -> impl Iterator<Item = ColumnIndex> {
Self::compute_custody_subnets::<E>(node_id, custody_subnet_count)
.flat_map(|subnet| subnet.columns::<E>())
.sorted()
}
}

Expand Down
4 changes: 2 additions & 2 deletions consensus/types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ pub use crate::beacon_block_body::{
pub use crate::beacon_block_header::BeaconBlockHeader;
pub use crate::beacon_committee::{BeaconCommittee, OwnedBeaconCommittee};
pub use crate::beacon_state::{Error as BeaconStateError, *};
pub use crate::blob_sidecar::{BlobSidecar, BlobSidecarList, BlobsList};
pub use crate::blob_sidecar::{BlobIdentifier, BlobSidecar, BlobSidecarList, BlobsList};
pub use crate::bls_to_execution_change::BlsToExecutionChange;
pub use crate::chain_spec::{ChainSpec, Config, Domain};
pub use crate::checkpoint::Checkpoint;
Expand All @@ -143,7 +143,7 @@ pub use crate::config_and_preset::{
};
pub use crate::consolidation::Consolidation;
pub use crate::contribution_and_proof::ContributionAndProof;
pub use crate::data_column_sidecar::{ColumnIndex, DataColumnSidecar};
pub use crate::data_column_sidecar::{ColumnIndex, DataColumnIdentifier, DataColumnSidecar};
pub use crate::data_column_subnet_id::DataColumnSubnetId;
pub use crate::deposit::{Deposit, DEPOSIT_TREE_DEPTH};
pub use crate::deposit_data::DepositData;
Expand Down
1 change: 1 addition & 0 deletions testing/ef_tests/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,3 +40,4 @@ store = { workspace = true }
fork_choice = { workspace = true }
execution_layer = { workspace = true }
logging = { workspace = true }
lazy_static = { workspace = true }
2 changes: 1 addition & 1 deletion testing/ef_tests/Makefile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
TESTS_TAG := v1.4.0-beta.6
TESTS_TAG := v1.5.0-alpha.2
TESTS = general minimal mainnet
TARBALLS = $(patsubst %,%-$(TESTS_TAG).tar.gz,$(TESTS))

Expand Down
7 changes: 7 additions & 0 deletions testing/ef_tests/check_all_files_accessed.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@
# following regular expressions, we will assume they are to be ignored (i.e., we are purposefully
# *not* running the spec tests).
excluded_paths = [
# TODO(das): remove once electra tests are on unstable
"tests/.*/electra/",
# Eth1Block and PowBlock
#
# Intentionally omitted, as per https://github.com/sigp/lighthouse/issues/1835
Expand All @@ -31,10 +33,15 @@
"tests/.*/.*/ssz_static/LightClientStore",
# LightClientSnapshot
"tests/.*/.*/ssz_static/LightClientSnapshot",
# Unused kzg methods
"tests/.*/.*/kzg/compute_cells",
"tests/.*/.*/kzg/recover_all_cells",
"tests/.*/.*/kzg/verify_cell_kzg_proof",
# One of the EF researchers likes to pack the tarballs on a Mac
".*\.DS_Store.*",
# More Mac weirdness.
"tests/mainnet/bellatrix/operations/deposit/pyspec_tests/deposit_with_previous_fork_version__valid_ineffective/._meta.yaml",
"tests/mainnet/eip7594/networking/get_custody_columns/pyspec_tests/get_custody_columns__short_node_id/._meta.yaml",
# bls tests are moved to bls12-381-tests directory
"tests/general/phase0/bls",
# some bls tests are not included now
Expand Down
28 changes: 27 additions & 1 deletion testing/ef_tests/src/cases.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use super::*;
use rayon::prelude::*;
use std::fmt::Debug;
use std::fmt::{Debug, Display, Formatter};
use std::path::{Path, PathBuf};
use types::ForkName;

Expand All @@ -18,11 +18,14 @@ mod fork;
mod fork_choice;
mod genesis_initialization;
mod genesis_validity;
mod get_custody_columns;
mod kzg_blob_to_kzg_commitment;
mod kzg_compute_blob_kzg_proof;
mod kzg_compute_cells_and_kzg_proofs;
mod kzg_compute_kzg_proof;
mod kzg_verify_blob_kzg_proof;
mod kzg_verify_blob_kzg_proof_batch;
mod kzg_verify_cell_kzg_proof_batch;
mod kzg_verify_kzg_proof;
mod merkle_proof_validity;
mod operations;
Expand All @@ -48,11 +51,14 @@ pub use epoch_processing::*;
pub use fork::ForkTest;
pub use genesis_initialization::*;
pub use genesis_validity::*;
pub use get_custody_columns::*;
pub use kzg_blob_to_kzg_commitment::*;
pub use kzg_compute_blob_kzg_proof::*;
pub use kzg_compute_cells_and_kzg_proofs::*;
pub use kzg_compute_kzg_proof::*;
pub use kzg_verify_blob_kzg_proof::*;
pub use kzg_verify_blob_kzg_proof_batch::*;
pub use kzg_verify_cell_kzg_proof_batch::*;
pub use kzg_verify_kzg_proof::*;
pub use merkle_proof_validity::*;
pub use operations::*;
Expand All @@ -64,6 +70,19 @@ pub use ssz_generic::*;
pub use ssz_static::*;
pub use transition::TransitionTest;

#[derive(Debug, PartialEq)]
pub enum FeatureName {
Eip7594,
}

impl Display for FeatureName {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
FeatureName::Eip7594 => f.write_str("eip7594"),
}
}
}

pub trait LoadCase: Sized {
/// Load the test case from a test case directory.
fn load_from_dir(_path: &Path, _fork_name: ForkName) -> Result<Self, Error>;
Expand All @@ -84,6 +103,13 @@ pub trait Case: Debug + Sync {
true
}

/// Whether or not this test exists for the given `feature_name`.
///
/// Returns `true` by default.
fn is_enabled_for_feature(_feature_name: FeatureName) -> bool {
true
}

/// Execute a test and return the result.
///
/// `case_index` reports the index of the case in the set of test cases. It is not strictly
Expand Down
39 changes: 39 additions & 0 deletions testing/ef_tests/src/cases/get_custody_columns.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
use super::*;
use ethereum_types::U256;
use serde::Deserialize;
use std::marker::PhantomData;
use types::DataColumnSubnetId;

#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec", deny_unknown_fields)]
pub struct GetCustodyColumns<E: EthSpec> {
pub node_id: String,
pub custody_subnet_count: u64,
pub result: Vec<u64>,
#[serde(skip)]
_phantom: PhantomData<E>,
}

impl<E: EthSpec> LoadCase for GetCustodyColumns<E> {
fn load_from_dir(path: &Path, _fork_name: ForkName) -> Result<Self, Error> {
decode::yaml_decode_file(path.join("meta.yaml").as_path())
}
}

impl<E: EthSpec> Case for GetCustodyColumns<E> {
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
let node_id = U256::from_dec_str(&self.node_id)
.map_err(|e| Error::FailedToParseTest(format!("{e:?}")))?;
let computed =
DataColumnSubnetId::compute_custody_columns::<E>(node_id, self.custody_subnet_count)
.collect::<Vec<_>>();
let expected = &self.result;
if computed == *expected {
Ok(())
} else {
Err(Error::NotEqual(format!(
"Got {computed:?}\nExpected {expected:?}"
)))
}
}
}
8 changes: 5 additions & 3 deletions testing/ef_tests/src/cases/kzg_blob_to_kzg_commitment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,13 @@ impl<E: EthSpec> Case for KZGBlobToKZGCommitment<E> {
fork_name == ForkName::Deneb
}

fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
let kzg = get_kzg()?;
fn is_enabled_for_feature(feature_name: FeatureName) -> bool {
feature_name != FeatureName::Eip7594
}

fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
let commitment = parse_blob::<E>(&self.input.blob).and_then(|blob| {
blob_to_kzg_commitment::<E>(&kzg, &blob).map_err(|e| {
blob_to_kzg_commitment::<E>(&KZG, &blob).map_err(|e| {
Error::InternalError(format!("Failed to compute kzg commitment: {:?}", e))
})
});
Expand Down
7 changes: 5 additions & 2 deletions testing/ef_tests/src/cases/kzg_compute_blob_kzg_proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,19 @@ impl<E: EthSpec> Case for KZGComputeBlobKZGProof<E> {
fork_name == ForkName::Deneb
}

fn is_enabled_for_feature(feature_name: FeatureName) -> bool {
feature_name != FeatureName::Eip7594
}

fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
let parse_input = |input: &KZGComputeBlobKZGProofInput| -> Result<_, Error> {
let blob = parse_blob::<E>(&input.blob)?;
let commitment = parse_commitment(&input.commitment)?;
Ok((blob, commitment))
};

let kzg = get_kzg()?;
let proof = parse_input(&self.input).and_then(|(blob, commitment)| {
compute_blob_kzg_proof::<E>(&kzg, &blob, commitment)
compute_blob_kzg_proof::<E>(&KZG, &blob, commitment)
.map_err(|e| Error::InternalError(format!("Failed to compute kzg proof: {:?}", e)))
});

Expand Down
73 changes: 73 additions & 0 deletions testing/ef_tests/src/cases/kzg_compute_cells_and_kzg_proofs.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
use super::*;
use crate::case_result::compare_result;
use kzg::{Blob as KzgBlob, Cell};
use kzg::{KzgProof, CELLS_PER_EXT_BLOB};
use serde::Deserialize;
use std::marker::PhantomData;

#[derive(Debug, Clone, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct KZGComputeCellsAndKzgProofsInput {
pub blob: String,
}

#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec", deny_unknown_fields)]
pub struct KZGComputeCellsAndKZGProofs<E: EthSpec> {
pub input: KZGComputeCellsAndKzgProofsInput,
pub output: Option<(Vec<String>, Vec<String>)>,
#[serde(skip)]
_phantom: PhantomData<E>,
}

impl<E: EthSpec> LoadCase for KZGComputeCellsAndKZGProofs<E> {
fn load_from_dir(path: &Path, _fork_name: ForkName) -> Result<Self, Error> {
decode::yaml_decode_file(path.join("data.yaml").as_path())
}
}

impl<E: EthSpec> Case for KZGComputeCellsAndKZGProofs<E> {
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
fork_name == ForkName::Deneb
}

fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
let cells_and_proofs = parse_blob::<E>(&self.input.blob).and_then(|blob| {
let blob = KzgBlob::from_bytes(&blob).map_err(|e| {
Error::InternalError(format!("Failed to convert blob to kzg blob: {e:?}"))
})?;
KZG.compute_cells_and_proofs(&blob).map_err(|e| {
Error::InternalError(format!("Failed to compute cells and kzg proofs: {e:?}"))
})
});

let expected = self.output.as_ref().and_then(|(cells, proofs)| {
parse_cells_and_proofs(cells, proofs)
.map(|(cells, proofs)| {
(
cells
.try_into()
.map_err(|e| {
Error::FailedToParseTest(format!("Failed to parse cells: {e:?}"))
})
.unwrap(),
proofs
.try_into()
.map_err(|e| {
Error::FailedToParseTest(format!("Failed to parse proofs: {e:?}"))
})
.unwrap(),
)
})
.ok()
});

compare_result::<
(
Box<[Cell; CELLS_PER_EXT_BLOB]>,
Box<[KzgProof; CELLS_PER_EXT_BLOB]>,
),
_,
>(&cells_and_proofs, &expected)
}
}
7 changes: 5 additions & 2 deletions testing/ef_tests/src/cases/kzg_compute_kzg_proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,16 +39,19 @@ impl<E: EthSpec> Case for KZGComputeKZGProof<E> {
fork_name == ForkName::Deneb
}

fn is_enabled_for_feature(feature_name: FeatureName) -> bool {
feature_name != FeatureName::Eip7594
}

fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
let parse_input = |input: &KZGComputeKZGProofInput| -> Result<_, Error> {
let blob = parse_blob::<E>(&input.blob)?;
let z = parse_point(&input.z)?;
Ok((blob, z))
};

let kzg = get_kzg()?;
let proof = parse_input(&self.input).and_then(|(blob, z)| {
compute_kzg_proof::<E>(&kzg, &blob, z)
compute_kzg_proof::<E>(&KZG, &blob, z)
.map_err(|e| Error::InternalError(format!("Failed to compute kzg proof: {:?}", e)))
});

Expand Down
Loading