Skip to content

Commit

Permalink
Merge pull request #36 from madara-alliance/iterator
Browse files Browse the repository at this point in the history
refactor: tree traversal implemented using a new iterator abstraction, multiproofs
  • Loading branch information
cchudant authored Oct 23, 2024
2 parents 56d7d62 + f4c3492 commit 2ba731e
Show file tree
Hide file tree
Showing 26 changed files with 3,223 additions and 3,085 deletions.
29 changes: 16 additions & 13 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,46 +6,49 @@ version = "0.1.0"
[features]
default = ["std", "rocksdb"]
rocksdb = ["dep:rocksdb"]
std = ["parity-scale-codec/std", "bitvec/std", "starknet-types-core/std", "rayon", "hashbrown/rayon"]
std = [
"parity-scale-codec/std",
"bitvec/std",
"starknet-types-core/std",
"rayon",
"hashbrown/rayon",
]
# internal
bench = []

[dependencies]
bitvec = { version = "1", default-features = false, features = ["alloc"] }
derive_more = { version = "0.99.17", default-features = false, features = [
"constructor",
"constructor",
] }
hashbrown = "0.14.3"
log = "0.4.20"
rayon = { version = "1.9.0", optional = true }
smallvec = { version = "1.11.2", features = ["serde"] }
slotmap = "1.0.7"

parity-scale-codec = { version = "3.0.0", default-features = false, features = [
"derive",
"derive",
] }
serde = { version = "1.0.195", default-features = false, features = [
"derive",
"alloc",
"derive",
"alloc",
] }
starknet-types-core = { version = "0.1.5", default-features = false, features = [
"hash",
"parity-scale-codec",
"alloc",
"hash",
"parity-scale-codec",
"alloc",
] }

# Optionals
rocksdb = { optional = true, version = "0.21.0", features = [
"multi-threaded-cf",
"multi-threaded-cf",
] }

[dev-dependencies]
env_logger = "0.11.3"
once_cell = "1.19.0"
pprof = { version = "0.3", features = ["flamegraph"] }
pathfinder-common = { git = "https://github.com/massalabs/pathfinder.git", package = "pathfinder-common", rev = "b7b6d76a76ab0e10f92e5f84ce099b5f727cb4db" }
pathfinder-crypto = { git = "https://github.com/massalabs/pathfinder.git", package = "pathfinder-crypto", rev = "b7b6d76a76ab0e10f92e5f84ce099b5f727cb4db" }
pathfinder-merkle-tree = { git = "https://github.com/massalabs/pathfinder.git", package = "pathfinder-merkle-tree", rev = "b7b6d76a76ab0e10f92e5f84ce099b5f727cb4db" }
pathfinder-storage = { git = "https://github.com/massalabs/pathfinder.git", package = "pathfinder-storage", rev = "b7b6d76a76ab0e10f92e5f84ce099b5f727cb4db" }
rand = { version = "0.8.5", features = ["small_rng"] }
tempfile = "3.8.0"
rstest = "0.18.2"
Expand Down
21 changes: 17 additions & 4 deletions benches/storage.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use std::hint::black_box;

use bitvec::vec::BitVec;
use bonsai_trie::{
databases::HashMapDb,
id::{BasicId, BasicIdBuilder},
Expand All @@ -10,7 +9,7 @@ use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
use rand::{prelude::*, thread_rng};
use starknet_types_core::{
felt::Felt,
hash::{Pedersen, StarkHash},
hash::{Pedersen, Poseidon, StarkHash},
};

mod flamegraph;
Expand Down Expand Up @@ -244,7 +243,7 @@ fn multiple_contracts(c: &mut Criterion) {
});
}

fn hash(c: &mut Criterion) {
fn pedersen_hash(c: &mut Criterion) {
c.bench_function("pedersen hash", move |b| {
let felt0 =
Felt::from_hex("0x100bd6fbfced88ded1b34bd1a55b747ce3a9fde9a914bca75571e4496b56443")
Expand All @@ -258,9 +257,23 @@ fn hash(c: &mut Criterion) {
});
}

fn poseidon_hash(c: &mut Criterion) {
c.bench_function("poseidon hash", move |b| {
let felt0 =
Felt::from_hex("0x100bd6fbfced88ded1b34bd1a55b747ce3a9fde9a914bca75571e4496b56443")
.unwrap();
let felt1 =
Felt::from_hex("0x00a038cda302fedbc4f6117648c6d3faca3cda924cb9c517b46232c6316b152f")
.unwrap();
b.iter(|| {
black_box(Poseidon::hash(&felt0, &felt1));
})
});
}

criterion_group! {
name = benches;
config = Criterion::default(); // .with_profiler(flamegraph::FlamegraphProfiler::new(100));
targets = storage, one_update, five_updates, hash, drop_storage, storage_with_insert, multiple_contracts
targets = storage, one_update, five_updates, pedersen_hash, poseidon_hash, drop_storage, storage_with_insert, multiple_contracts
}
criterion_main!(benches);
7 changes: 7 additions & 0 deletions proptest-regressions/tests/proptest.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# Seeds for failure cases proptest has generated in the past. It is
# automatically read and these particular cases re-run before any
# novel cases are generated.
#
# It is recommended to check this file in to source control so that
# everyone who runs the test benefits from these saved cases.
cc 4e01e217c0cfdcb88c0fda9a6f6c5d2bac70c39f3cb9aed3afc4cbb2c0000ac9 # shrinks to pb = MerkleTreeInsertProblem([Insert([00110], 0x52889e3d0cd0a2a1c49d2f232fb0938760bb332ba356dbee7a698a5b05cebff), Commit, Commit, Commit, Commit, Insert([00100], 0x1f0a9ec83897033916a4417aa017cb8591c7374e9c53ea1b3895b8da022658b), Commit, Commit, Remove([00110]), Commit, Insert([00010], 0x31ba571cd01834c526a0a394ece58dcea478e34ee3af3f2fe9874beb8ac3a8c), Commit, Commit, Remove([00100]), Insert([00111], 0x3667307f1e8e8150930e6ef12390b595ac3e54aa68df53222bf25a9006b1298), Remove([11001]), Remove([10001]), Insert([00011], 0x32de2acb4350b53e3d87f4307dc81c180f1e3aa8c1177643e6f779c441abb92), Remove([11000]), Insert([10010], 0x3437d396d3992c99cbbf1d9aa492a6ea90d40d41897091a4fcb6f9bfa6e5bbc), Commit, Remove([11010]), Commit, Insert([01011], 0x4159a60c3014785b08904cfa2336b0c6168efe8b460350573b2d5bc9548cffa), Remove([01101]), Commit, Remove([00010]), Commit, Remove([10110]), Commit, Commit, Commit, Commit, Insert([01100], 0x3b88e95019dd205f2bfc89f2eb9594901c4690ba7de27124e43adbdbddafcfc), Commit, Remove([11110]), Remove([10010]), Commit, Insert([00011], 0x33778bf3f7cdeca9f1bfb1ccd857d5e57bae7ac0d93538155784214ef081b89), Insert([00111], 0x17d918366c70c47a80af7ffa5b27363affc93788f0ac06276394a46997c87c1), Commit, Commit, Remove([11110]), Insert([00001], 0x3c2616fce851e54a0b7f65a5816a77086cffdff98ef59075d9b72c641abaf1a), Remove([00011]), Remove([11011]), Remove([00001]), Commit, Commit, Remove([00001]), Commit, Insert([01001], 0x13d745e43bb4d094211e14c57b28505afa2519e8c30feee5fa4c6a3c96f728b), Insert([00010], 0xb980fa05a0424e41452c9c197235a415f726a202541d2ced481e0e04b8c6db), Remove([00010]), Commit, Remove([10100]), Remove([10100]), Insert([00000], 0x73fa95cefa03f0c723b6a595672e0cc830e49a364e69b6200106c7ff7a69610), Commit, Remove([11111]), Insert([10111], 0x342719e264ac5638ff3b14214a6c33851a5d100831f4588a482facba2040386), Commit, Commit, Insert([01011], 0x28b63e6950adc4dd378685c4f00e0c9df567db08c49924389b1af309adf9b8), Insert([10001], 0x17452a2bb9b207fdda9d116a52a72640738baefe326e0b6cb74ffe2a38147c8), Remove([01110]), Remove([10011])])
8 changes: 8 additions & 0 deletions proptest-regressions/trie/iterator.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Seeds for failure cases proptest has generated in the past. It is
# automatically read and these particular cases re-run before any
# novel cases are generated.
#
# It is recommended to check this file in to source control so that
# everyone who runs the test benefits from these saved cases.
cc cdc544314a0c860fc660985c3ce379c8680d5d52b0bca8837b69b7ecfa301afc # shrinks to cases = [1, 4]
cc c41a719525322a5cc0c9799d0cfb090cc3d1a17a1acc4fd66122065c02cd48ba # shrinks to cases = [5, 5]
2 changes: 1 addition & 1 deletion src/bonsai_database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ pub trait DBError: Error + Send + Sync {}
pub trait DBError: Send + Sync {}

/// Trait to be implemented on any type that can be used as a database.
pub trait BonsaiDatabase {
pub trait BonsaiDatabase: core::fmt::Debug {
type Batch: Default;
#[cfg(feature = "std")]
type DatabaseError: Error + DBError;
Expand Down
6 changes: 6 additions & 0 deletions src/databases/rocks_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,12 @@ pub struct RocksDBTransaction<'a> {
column_families: HashMap<String, ColumnFamilyRef<'a>>,
}

impl<'a> fmt::Debug for RocksDBTransaction<'a> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("RocksDBTransaction").finish()
}
}

impl<'db, ID> BonsaiDatabase for RocksDB<'db, ID>
where
ID: Id,
Expand Down
12 changes: 11 additions & 1 deletion src/error.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#[cfg(feature = "std")]
use std::{error::Error, fmt::Display};

use crate::{bonsai_database::DBError, String};
use crate::{bonsai_database::DBError, BitVec, String};

/// All errors that can be returned by BonsaiStorage.
#[derive(Debug)]
Expand All @@ -21,6 +21,10 @@ where
Database(DatabaseError),
/// Error when decoding a node
NodeDecodeError(parity_scale_codec::Error),
/// Error when creating a storage proof.
CreateProofKeyNotInTree { key: BitVec },
/// Malformated trie key.
KeyLength { expected: usize, got: usize },
}

impl<DatabaseError: DBError> core::convert::From<DatabaseError>
Expand Down Expand Up @@ -52,6 +56,12 @@ where
BonsaiStorageError::Merge(e) => write!(f, "Merge error: {}", e),
BonsaiStorageError::Database(e) => write!(f, "Database error: {}", e),
BonsaiStorageError::NodeDecodeError(e) => write!(f, "Node decode error: {}", e),
BonsaiStorageError::CreateProofKeyNotInTree { key } => {
write!(f, "Key not in tree: {key:b}")
}
BonsaiStorageError::KeyLength { expected, got } => {
write!(f, "Malformated key length: expected {expected}, got {got}")
}
}
}
}
6 changes: 2 additions & 4 deletions src/key_value_db.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
use crate::{
changes::key_new_value, format, trie::merkle_tree::bytes_to_bitvec, BTreeSet, ByteVec,
changes::key_new_value, format, trie::tree::bytes_to_bitvec, BTreeSet, BitVec, ByteVec,
Change as ExternChange, ToString,
};
use bitvec::{order::Msb0, vec::BitVec};
use hashbrown::HashMap;
use log::trace;
use parity_scale_codec::Decode;
Expand Down Expand Up @@ -92,8 +91,7 @@ where
pub(crate) fn get_changes(
&self,
id: ID,
) -> Result<HashMap<BitVec<u8, Msb0>, ExternChange>, BonsaiStorageError<DB::DatabaseError>>
{
) -> Result<HashMap<BitVec, ExternChange>, BonsaiStorageError<DB::DatabaseError>> {
if self.changes_store.id_queue.contains(&id) {
let mut leaf_changes = HashMap::new();
let changes = ChangeBatch::deserialize(
Expand Down
Loading

0 comments on commit 2ba731e

Please sign in to comment.