From cb123084bf6d22f5b6b8a185697f8333a20525ae Mon Sep 17 00:00:00 2001 From: Oliver Date: Fri, 11 Oct 2024 04:53:19 +0200 Subject: [PATCH 001/425] docs: `LoadFee::eip1559_fees` returns base fee, not max fee per gas (#11652) --- crates/rpc/rpc-eth-api/src/helpers/fee.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/rpc/rpc-eth-api/src/helpers/fee.rs b/crates/rpc/rpc-eth-api/src/helpers/fee.rs index b6dcef470..52ccabebf 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/fee.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/fee.rs @@ -284,15 +284,15 @@ pub trait LoadFee: LoadBlock { /// Returns the EIP-1559 fees if they are set, otherwise fetches a suggested gas price for /// EIP-1559 transactions. /// - /// Returns (`max_fee`, `priority_fee`) + /// Returns (`base_fee`, `priority_fee`) fn eip1559_fees( &self, - max_fee_per_gas: Option, + base_fee: Option, max_priority_fee_per_gas: Option, ) -> impl Future> + Send { async move { - let max_fee_per_gas = match max_fee_per_gas { - Some(max_fee_per_gas) => max_fee_per_gas, + let base_fee = match base_fee { + Some(base_fee) => base_fee, None => { // fetch pending base fee let base_fee = self @@ -311,7 +311,7 @@ pub trait LoadFee: LoadBlock { Some(max_priority_fee_per_gas) => max_priority_fee_per_gas, None => self.suggested_priority_fee().await?, }; - Ok((max_fee_per_gas, max_priority_fee_per_gas)) + Ok((base_fee, max_priority_fee_per_gas)) } } From ad485277deef9cf6359409b0a7d208ba9ab67e71 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 11 Oct 2024 08:28:07 +0200 Subject: [PATCH 002/425] feat: propagate helper (#11654) Co-authored-by: Oliver --- crates/net/network/src/transactions/mod.rs | 25 ++++++++++++++++++---- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index 0c488ff91..bc77de9f4 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -153,6 +153,14 @@ impl TransactionsHandle { self.send(TransactionsCommand::PropagateTransactionsTo(transactions, peer)) } + /// Manually propagate the given transactions to all peers. + /// + /// It's up to the [`TransactionsManager`] whether the transactions are sent as hashes or in + /// full. + pub fn propagate_transactions(&self, transactions: Vec) { + self.send(TransactionsCommand::PropagateTransactions(transactions)) + } + /// Request the transaction hashes known by specific peers. pub async fn get_transaction_hashes( &self, @@ -398,8 +406,14 @@ where trace!(target: "net::tx", num_hashes=?hashes.len(), "Start propagating transactions"); - // This fetches all transaction from the pool, including the 4844 blob transactions but - // __without__ their sidecar, because 4844 transactions are only ever announced as hashes. + self.propagate_all(hashes); + } + + /// Propagates the given transactions to the peers + /// + /// This fetches all transaction from the pool, including the 4844 blob transactions but + /// __without__ their sidecar, because 4844 transactions are only ever announced as hashes. + fn propagate_all(&mut self, hashes: Vec) { let propagated = self.propagate_transactions( self.pool.get_all(hashes).into_iter().map(PropagateTransaction::new).collect(), ); @@ -872,11 +886,12 @@ where let peers = self.peers.keys().copied().collect::>(); tx.send(peers).ok(); } - TransactionsCommand::PropagateTransactionsTo(txs, _peer) => { - if let Some(propagated) = self.propagate_full_transactions_to_peer(txs, _peer) { + TransactionsCommand::PropagateTransactionsTo(txs, peer) => { + if let Some(propagated) = self.propagate_full_transactions_to_peer(txs, peer) { self.pool.on_propagated(propagated); } } + TransactionsCommand::PropagateTransactions(txs) => self.propagate_all(txs), TransactionsCommand::GetTransactionHashes { peers, tx } => { let mut res = HashMap::with_capacity(peers.len()); for peer_id in peers { @@ -1653,6 +1668,8 @@ enum TransactionsCommand { GetActivePeers(oneshot::Sender>), /// Propagate a collection of full transactions to a specific peer. PropagateTransactionsTo(Vec, PeerId), + /// Propagate a collection of full transactions to all peers. + PropagateTransactions(Vec), /// Request transaction hashes known by specific peers from the [`TransactionsManager`]. GetTransactionHashes { peers: Vec, From 8fc703cf82dd97c023935374caac83d4cbb7e02d Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Fri, 11 Oct 2024 10:21:59 +0200 Subject: [PATCH 003/425] chore(ci): remove assertoor workflow (#11656) --- .github/workflows/assertoor.yml | 230 -------------------------------- 1 file changed, 230 deletions(-) delete mode 100644 .github/workflows/assertoor.yml diff --git a/.github/workflows/assertoor.yml b/.github/workflows/assertoor.yml deleted file mode 100644 index a5028f7ff..000000000 --- a/.github/workflows/assertoor.yml +++ /dev/null @@ -1,230 +0,0 @@ -name: Assertoor Tests - -on: - workflow_dispatch: - schedule: - - cron: '0 0 * * *' - -jobs: - get_tests: - name: "Run assertoor tests on reth pairs" - runs-on: ubuntu-latest - outputs: - test_result: ${{ steps.test_result.outputs.test_result }} - test_status: ${{ steps.test_result.outputs.test_status }} - failed_test_status: ${{ steps.test_result.outputs.failed_test_status }} - if: github.repository == 'paradigmxyz/reth' - steps: - - name: Checkout Repository - uses: actions/checkout@v4 - - name: Setup Kurtosis - shell: bash - run: | - echo "deb [trusted=yes] https://apt.fury.io/kurtosis-tech/ /" | sudo tee /etc/apt/sources.list.d/kurtosis.list - sudo apt update - sudo apt install kurtosis-cli - kurtosis analytics disable - - - name: Run Kurtosis - shell: bash - id: services - run: | - export github_sha=${{ github.sha }} - export github_repository=${{ github.repository }} - - cat etc/assertoor/assertoor-template.yaml | envsubst > etc/assertoor/assertoor.yaml - - kurtosis run github.com/ethpandaops/ethereum-package --enclave assertoor-${{ github.run_id }} --args-file etc/assertoor/assertoor.yaml - - enclave_dump=$(kurtosis enclave inspect assertoor-${{ github.run_id }}) - - assertoor_url=$(echo "$enclave_dump" | grep assertoor | grep http | sed 's/.*\(http:\/\/[0-9.:]\+\).*/\1/') - echo "assertoor_url: ${assertoor_url}" - echo "assertoor_url=${assertoor_url}" >> $GITHUB_OUTPUT - - - name: Await test completion - shell: bash - id: test_result - run: | - assertoor_url="${{ steps.services.outputs.assertoor_url }}" - - YELLOW='\033[1;33m' - GRAY='\033[0;37m' - GREEN='\033[0;32m' - RED='\033[0;31m' - NC='\033[0m' - - # print assertor logs - assertoor_container=$(docker container list | grep assertoor | sed 's/^\([^ ]\+\) .*$/\1/') - docker logs -f $assertoor_container & - - # helper to fetch task status for specific test id - get_tasks_status() { - tasks=$(curl -s ${assertoor_url}/api/v1/test_run/$1 | jq -c ".data.tasks[] | {index, parent_index, name, title, status, result}") - declare -A task_graph_map - task_graph_map[0]="" - - while read task; do - task_id=$(echo "$task" | jq -r ".index") - task_parent=$(echo "$task" | jq -r ".parent_index") - task_name=$(echo "$task" | jq -r ".name") - task_title=$(echo "$task" | jq -r ".title") - task_status=$(echo "$task" | jq -r ".status") - task_result=$(echo "$task" | jq -r ".result") - - task_graph="${task_graph_map[$task_parent]}" - task_graph_map[$task_id]="$task_graph |" - if [ ! -z "$task_graph" ]; then - task_graph="${task_graph}- " - fi - - if [ "$task_status" == "pending" ]; then - task_status="${GRAY}pending ${NC}" - elif [ "$task_status" == "running" ]; then - task_status="${YELLOW}running ${NC}" - elif [ "$task_status" == "complete" ]; then - task_status="${GREEN}complete${NC}" - fi - - if [ "$task_result" == "none" ]; then - task_result="${GRAY}none ${NC}" - elif [ "$task_result" == "success" ]; then - task_result="${GREEN}success${NC}" - elif [ "$task_result" == "failure" ]; then - task_result="${RED}failure${NC}" - fi - - echo -e " $(printf '%-4s' "$task_id")\t$task_status\t$task_result\t$(printf '%-50s' "$task_graph$task_name") \t$task_title" - done <<< $(echo "$tasks") - } - - # poll & check test status - final_test_result="" - failed_test_id="" - while true - do - pending_tests=0 - failed_tests=0 - total_tests=0 - running_test="" - - status_lines=() - task_lines="" - status_lines+=("$(date +'%Y-%m-%d %H:%M:%S') Test Status:") - - tests=$(curl -s ${assertoor_url}/api/v1/test_runs | jq -c ".data[] | {run_id, test_id, name, status}") - while read test; do - if [ -z "$test" ]; then - continue - fi - run_id=$(echo "$test" | jq -r ".run_id") - test_id=$(echo "$test" | jq -r ".test_id") - test_name=$(echo "$test" | jq -r ".name") - test_status=$(echo "$test" | jq -r ".status") - - if [ "$test_status" == "pending" ]; then - pending_tests=$(expr $pending_tests + 1) - status_name="${GRAY}pending${NC}" - elif [ "$test_status" == "running" ]; then - pending_tests=$(expr $pending_tests + 1) - running_test="$run_id" - status_name="${YELLOW}running${NC}" - - elif [ "$test_status" == "success" ]; then - status_name="${GREEN}success${NC}" - elif [ "$test_status" == "failure" ]; then - failed_tests=$(expr $failed_tests + 1) - failed_test_id="$run_id" - status_name="${RED}failure${NC}" - else - status_name="$test_status" - fi - status_lines+=(" $(printf '%-3s' "$test_id") $status_name \t$test_name") - total_tests=$(expr $total_tests + 1) - done <<< $(echo "$tests") - - for status_line in "${status_lines[@]}" - do - echo -e "$status_line" - done - - if ! [ -z "$running_test" ]; then - task_lines=$(get_tasks_status "$running_test") - echo "Active Test Task Status:" - echo "$task_lines" - fi - - if [ $failed_tests -gt 0 ]; then - final_test_result="failure" - break - fi - if [ $total_tests -gt 0 ] && [ $pending_tests -le 0 ]; then - final_test_result="success" - break - fi - - sleep 60 - done - - # save test results & status to github output - echo "test_result=$(echo "$final_test_result")" >> $GITHUB_OUTPUT - echo "test_status<> $GITHUB_OUTPUT - for status_line in "${status_lines[@]}" - do - echo -e "$status_line" >> $GITHUB_OUTPUT - done - echo "EOF" >> $GITHUB_OUTPUT - - if ! [ -z "$failed_test_id" ]; then - echo "failed_test_status<> $GITHUB_OUTPUT - get_tasks_status "$failed_test_id" >> $GITHUB_OUTPUT - echo "EOF" >> $GITHUB_OUTPUT - else - echo "failed_test_status=" >> $GITHUB_OUTPUT - fi - - - name: Generate dump and remove kurtosis enclave - shell: bash - run: | - mkdir -p ./temp/dump - cd ./temp/dump - cp ../../etc/assertoor/assertoor.yaml ./kurtosis-params.yaml - - kurtosis enclave dump assertoor-${{ github.run_id }} - kurtosis enclave rm -f assertoor-${{ github.run_id }} - - - name: Upload dump artifact - uses: actions/upload-artifact@v4 - with: - name: "kurtosis-enclave-dump-${{ github.run_id }}" - path: ./temp/dump - - - name: Return test result - shell: bash - run: | - test_result="${{ steps.test_result.outputs.test_result }}" - test_status=$( - cat <<"EOF" - ${{ steps.test_result.outputs.test_status }} - EOF - ) - failed_test_status=$( - cat <<"EOF" - ${{ steps.test_result.outputs.failed_test_status }} - EOF - ) - - echo "Test Result: $test_result" - echo "$test_status" - - if ! [ "$test_result" == "success" ]; then - echo "" - echo "Failed Test Task Status:" - echo "$failed_test_status" - - echo "" - echo "See 'Await test completion' task for detailed logs about this failure!" - echo "" - - exit 1 # fail action - fi From c4411991e1e5a10f2cfe99056b650557f4da62fb Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 11 Oct 2024 11:45:44 +0200 Subject: [PATCH 004/425] feat: add override for additional_validation_tasks (#11655) --- crates/node/builder/src/components/pool.rs | 3 +++ crates/optimism/node/src/node.rs | 6 +++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/crates/node/builder/src/components/pool.rs b/crates/node/builder/src/components/pool.rs index 234455913..436a80c52 100644 --- a/crates/node/builder/src/components/pool.rs +++ b/crates/node/builder/src/components/pool.rs @@ -52,6 +52,8 @@ pub struct PoolBuilderConfigOverrides { pub minimal_protocol_basefee: Option, /// Addresses that will be considered as local. Above exemptions apply. pub local_addresses: HashSet
, + /// Additional tasks to validate new transactions. + pub additional_validation_tasks: Option, } impl PoolBuilderConfigOverrides { @@ -65,6 +67,7 @@ impl PoolBuilderConfigOverrides { max_account_slots, minimal_protocol_basefee, local_addresses, + additional_validation_tasks: _, } = self; if let Some(pending_limit) = pending_limit { diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index caeab3741..b6e64f7e0 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -189,7 +189,11 @@ where )) .with_head_timestamp(ctx.head().timestamp) .kzg_settings(ctx.kzg_settings()?) - .with_additional_tasks(ctx.config().txpool.additional_validation_tasks) + .with_additional_tasks( + pool_config_overrides + .additional_validation_tasks + .unwrap_or_else(|| ctx.config().txpool.additional_validation_tasks), + ) .build_with_tasks(ctx.provider().clone(), ctx.task_executor().clone(), blob_store.clone()) .map(|validator| { OpTransactionValidator::new(validator) From 6d3aa5a0d5df5a42b6fcf38a9937e3255052bd47 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 11 Oct 2024 14:17:07 +0200 Subject: [PATCH 005/425] Revert "fix(net): batch `P2PStream` sends" (#11658) --- crates/net/eth-wire/src/p2pstream.rs | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/crates/net/eth-wire/src/p2pstream.rs b/crates/net/eth-wire/src/p2pstream.rs index 76075838b..9bb8fe7c3 100644 --- a/crates/net/eth-wire/src/p2pstream.rs +++ b/crates/net/eth-wire/src/p2pstream.rs @@ -614,24 +614,19 @@ where /// Returns `Poll::Ready(Ok(()))` when no buffered items remain. fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { let mut this = self.project(); - let poll_res = loop { - match this.inner.as_mut().poll_ready(cx) { - Poll::Pending => break Poll::Pending, - Poll::Ready(Err(err)) => break Poll::Ready(Err(err.into())), - Poll::Ready(Ok(())) => { + loop { + match ready!(this.inner.as_mut().poll_flush(cx)) { + Err(err) => return Poll::Ready(Err(err.into())), + Ok(()) => { let Some(message) = this.outgoing_messages.pop_front() else { - break Poll::Ready(Ok(())) + return Poll::Ready(Ok(())) }; if let Err(err) = this.inner.as_mut().start_send(message) { - break Poll::Ready(Err(err.into())) + return Poll::Ready(Err(err.into())) } } } - }; - - ready!(this.inner.as_mut().poll_flush(cx))?; - - poll_res + } } fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { From c29c1f5fa8949f3a6eb111b9c699af08873ab5a9 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Fri, 11 Oct 2024 15:34:52 +0200 Subject: [PATCH 006/425] chore(net): log p2p stream flush error (#11659) --- crates/net/eth-wire/src/p2pstream.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/crates/net/eth-wire/src/p2pstream.rs b/crates/net/eth-wire/src/p2pstream.rs index 9bb8fe7c3..9882e3978 100644 --- a/crates/net/eth-wire/src/p2pstream.rs +++ b/crates/net/eth-wire/src/p2pstream.rs @@ -616,7 +616,13 @@ where let mut this = self.project(); loop { match ready!(this.inner.as_mut().poll_flush(cx)) { - Err(err) => return Poll::Ready(Err(err.into())), + Err(err) => { + trace!(target: "net::p2p", + %err, + "error flushing p2p stream" + ); + return Poll::Ready(Err(err.into())) + } Ok(()) => { let Some(message) = this.outgoing_messages.pop_front() else { return Poll::Ready(Ok(())) From 6a56ae75b0342fc187d9546db5c4f83ec407607b Mon Sep 17 00:00:00 2001 From: Ayene <2958807+ayenesimo1i@users.noreply.github.com> Date: Fri, 11 Oct 2024 18:31:11 +0300 Subject: [PATCH 007/425] fix(docs): remove ci link (#11665) Co-authored-by: Federico Gimenez --- docs/repo/ci.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/repo/ci.md b/docs/repo/ci.md index da84e001f..5ed2cec00 100644 --- a/docs/repo/ci.md +++ b/docs/repo/ci.md @@ -26,7 +26,7 @@ The CI runs a couple of workflows: ### Integration Testing -- **[assertoor]**: Runs Assertoor tests on Reth pairs. +- **[kurtosis]**: Spins up a Kurtosis testnet and runs Assertoor tests on Reth pairs. - **[hive]**: Runs `ethereum/hive` tests. ### Linting and Checks @@ -48,7 +48,7 @@ The CI runs a couple of workflows: [dependencies]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/dependencies.yml [stale]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/stale.yml [docker]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/docker.yml -[assertoor]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/assertoor.yml +[kurtosis]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/kurtosis.yml [hive]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/hive.yml [lint]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/lint.yml [lint-actions]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/lint-actions.yml From ad2a2f2101d7407efec67293f5f9509faa0fb5f6 Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Sat, 12 Oct 2024 00:27:24 +0800 Subject: [PATCH 008/425] chore(test): use collect void realloc (#11669) --- crates/transaction-pool/src/test_utils/mock.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index 6b470bb6f..e2b5f373e 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -1338,10 +1338,8 @@ impl MockTransactionDistribution { nonce_range: Range, rng: &mut impl rand::Rng, ) -> MockTransactionSet { - let mut txs = Vec::new(); - for nonce in nonce_range { - txs.push(self.tx(nonce, rng).with_sender(sender)); - } + let txs = + nonce_range.map(|nonce| self.tx(nonce, rng).with_sender(sender)).collect::>(); MockTransactionSet::new(txs) } From 0affb976a02881ab4aaa3b56b7d6f3e0a9ebbc9a Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Sat, 12 Oct 2024 01:49:29 +0800 Subject: [PATCH 009/425] transaction-pool:rm redundance clone (#11667) --- crates/transaction-pool/src/pool/best.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/transaction-pool/src/pool/best.rs b/crates/transaction-pool/src/pool/best.rs index 5880a73f5..52f25a9db 100644 --- a/crates/transaction-pool/src/pool/best.rs +++ b/crates/transaction-pool/src/pool/best.rs @@ -132,9 +132,8 @@ impl BestTransactions { /// created and inserts them fn add_new_transactions(&mut self) { while let Some(pending_tx) = self.try_recv() { - let tx = pending_tx.transaction.clone(); // same logic as PendingPool::add_transaction/PendingPool::best_with_unlocked - let tx_id = *tx.id(); + let tx_id = *pending_tx.transaction.id(); if self.ancestor(&tx_id).is_none() { self.independent.insert(pending_tx.clone()); } From d8b7f6014f90c86afc0415b0200929752a415a63 Mon Sep 17 00:00:00 2001 From: Francis Li Date: Fri, 11 Oct 2024 10:55:22 -0700 Subject: [PATCH 010/425] fix(rpc): add missing codes for witness (#11673) --- crates/rpc/rpc/src/debug.rs | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index b26459c4f..738b91ad0 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -34,7 +34,7 @@ use reth_rpc_server_types::{result::internal_rpc_err, ToRpcResult}; use reth_tasks::pool::BlockingTaskGuard; use reth_trie::{HashedPostState, HashedStorage}; use revm::{ - db::CacheDB, + db::{CacheDB, State}, primitives::{db::DatabaseCommit, BlockEnv, CfgEnvWithHandlerCfg, Env, EnvWithHandlerCfg}, }; use revm_inspectors::tracing::{ @@ -614,12 +614,23 @@ where let _ = block_executor .execute_with_state_closure( (&block.clone().unseal(), block.difficulty).into(), - |statedb| { + |statedb: &State<_>| { codes = statedb .cache .contracts .iter() .map(|(hash, code)| (*hash, code.original_bytes())) + .chain( + // cache state does not have all the contracts, especially when + // a contract is created within the block + // the contract only exists in bundle state, therefore we need + // to include them as well + statedb + .bundle_state + .contracts + .iter() + .map(|(hash, code)| (*hash, code.original_bytes())), + ) .collect(); for (address, account) in &statedb.cache.accounts { From bca11aa2dd2ad2a0fdb43853ca1c5047fcb43e8f Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Fri, 11 Oct 2024 19:56:33 +0200 Subject: [PATCH 011/425] clippy: add `from_iter_instead_of_collect` warn (#11666) --- Cargo.toml | 1 + crates/evm/execution-types/src/chain.rs | 2 +- crates/rpc/rpc-builder/src/metrics.rs | 9 ++-- crates/rpc/rpc/src/debug.rs | 6 +-- .../src/providers/database/provider.rs | 8 ++-- .../storage/provider/src/test_utils/blocks.rs | 47 +++++++++---------- crates/transaction-pool/benches/reorder.rs | 10 ++-- crates/transaction-pool/src/maintain.rs | 2 +- crates/trie/db/src/state.rs | 32 +++++++------ crates/trie/db/tests/post_state.rs | 44 +++++++++-------- crates/trie/trie/src/proof.rs | 2 +- crates/trie/trie/src/updates.rs | 2 +- 12 files changed, 85 insertions(+), 80 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index db115c89c..efae22f8e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -183,6 +183,7 @@ equatable_if_let = "warn" explicit_into_iter_loop = "warn" explicit_iter_loop = "warn" flat_map_option = "warn" +from_iter_instead_of_collect = "warn" if_not_else = "warn" implicit_clone = "warn" imprecise_flops = "warn" diff --git a/crates/evm/execution-types/src/chain.rs b/crates/evm/execution-types/src/chain.rs index 25bc39ea3..30f1f4cd2 100644 --- a/crates/evm/execution-types/src/chain.rs +++ b/crates/evm/execution-types/src/chain.rs @@ -52,7 +52,7 @@ impl Chain { execution_outcome: ExecutionOutcome, trie_updates: Option, ) -> Self { - let blocks = BTreeMap::from_iter(blocks.into_iter().map(|b| (b.number, b))); + let blocks = blocks.into_iter().map(|b| (b.number, b)).collect::>(); debug_assert!(!blocks.is_empty(), "Chain should have at least one block"); Self { blocks, execution_outcome, trie_updates } diff --git a/crates/rpc/rpc-builder/src/metrics.rs b/crates/rpc/rpc-builder/src/metrics.rs index 08fd38898..57283ded3 100644 --- a/crates/rpc/rpc-builder/src/metrics.rs +++ b/crates/rpc/rpc-builder/src/metrics.rs @@ -30,9 +30,12 @@ impl RpcRequestMetrics { Self { inner: Arc::new(RpcServerMetricsInner { connection_metrics: transport.connection_metrics(), - call_metrics: HashMap::from_iter(module.method_names().map(|method| { - (method, RpcServerCallMetrics::new_with_labels(&[("method", method)])) - })), + call_metrics: module + .method_names() + .map(|method| { + (method, RpcServerCallMetrics::new_with_labels(&[("method", method)])) + }) + .collect(), }), } } diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 738b91ad0..fbef6f7a7 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -663,11 +663,7 @@ where let state = state_provider.witness(Default::default(), hashed_state).map_err(Into::into)?; - Ok(ExecutionWitness { - state: HashMap::from_iter(state.into_iter()), - codes, - keys: Some(keys), - }) + Ok(ExecutionWitness { state: state.into_iter().collect(), codes, keys: Some(keys) }) }) .await } diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index c9b0af3d3..33fed1e80 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -2914,10 +2914,10 @@ impl HashingWriter for DatabaseProvider()?; // Hash the address and key and apply them to HashedStorage (if Storage is None diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index daed90664..f7928319b 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -324,11 +324,10 @@ fn block3( ) .state_storage( address, - HashMap::from_iter( - slot_range - .clone() - .map(|slot| (U256::from(slot), (U256::ZERO, U256::from(slot)))), - ), + slot_range + .clone() + .map(|slot| (U256::from(slot), (U256::ZERO, U256::from(slot)))) + .collect(), ) .revert_account_info(number, address, Some(None)) .revert_storage(number, address, Vec::new()); @@ -393,20 +392,18 @@ fn block4( ) .state_storage( address, - HashMap::from_iter( - slot_range.clone().map(|slot| { - (U256::from(slot), (U256::from(slot), U256::from(slot * 2))) - }), - ), + slot_range + .clone() + .map(|slot| (U256::from(slot), (U256::from(slot), U256::from(slot * 2)))) + .collect(), ) } else { bundle_state_builder.state_address(address).state_storage( address, - HashMap::from_iter( - slot_range - .clone() - .map(|slot| (U256::from(slot), (U256::from(slot), U256::ZERO))), - ), + slot_range + .clone() + .map(|slot| (U256::from(slot), (U256::from(slot), U256::ZERO))) + .collect(), ) }; // record previous account info @@ -423,7 +420,7 @@ fn block4( .revert_storage( number, address, - Vec::from_iter(slot_range.clone().map(|slot| (U256::from(slot), U256::from(slot)))), + slot_range.clone().map(|slot| (U256::from(slot), U256::from(slot))).collect(), ); } let execution_outcome = ExecutionOutcome::new( @@ -485,12 +482,11 @@ fn block5( ) .state_storage( address, - HashMap::from_iter( - slot_range - .clone() - .take(50) - .map(|slot| (U256::from(slot), (U256::from(slot), U256::from(slot * 4)))), - ), + slot_range + .clone() + .take(50) + .map(|slot| (U256::from(slot), (U256::from(slot), U256::from(slot * 4)))) + .collect(), ); bundle_state_builder = if idx % 2 == 0 { bundle_state_builder @@ -506,9 +502,10 @@ fn block5( .revert_storage( number, address, - Vec::from_iter( - slot_range.clone().map(|slot| (U256::from(slot), U256::from(slot * 2))), - ), + slot_range + .clone() + .map(|slot| (U256::from(slot), U256::from(slot * 2))) + .collect(), ) } else { bundle_state_builder.revert_address(number, address) diff --git a/crates/transaction-pool/benches/reorder.rs b/crates/transaction-pool/benches/reorder.rs index 1836ce40c..9fc216297 100644 --- a/crates/transaction-pool/benches/reorder.rs +++ b/crates/transaction-pool/benches/reorder.rs @@ -206,10 +206,12 @@ mod implementations { self.base_fee = Some(base_fee); let drained = self.inner.drain(); - self.inner = BinaryHeap::from_iter(drained.map(|mock| { - let priority = mock.tx.effective_tip_per_gas(base_fee).expect("set"); - MockTransactionWithPriority { tx: mock.tx, priority } - })); + self.inner = drained + .map(|mock| { + let priority = mock.tx.effective_tip_per_gas(base_fee).expect("set"); + MockTransactionWithPriority { tx: mock.tx, priority } + }) + .collect(); } } } diff --git a/crates/transaction-pool/src/maintain.rs b/crates/transaction-pool/src/maintain.rs index 66b986147..36c7067d4 100644 --- a/crates/transaction-pool/src/maintain.rs +++ b/crates/transaction-pool/src/maintain.rs @@ -490,7 +490,7 @@ impl MaintainedPoolState { } } -/// A unique `ChangedAccount` identified by its address that can be used for deduplication +/// A unique [`ChangedAccount`] identified by its address that can be used for deduplication #[derive(Eq)] struct ChangedAccountEntry(ChangedAccount); diff --git a/crates/trie/db/src/state.rs b/crates/trie/db/src/state.rs index 5acb9e0d1..4d46183df 100644 --- a/crates/trie/db/src/state.rs +++ b/crates/trie/db/src/state.rs @@ -244,22 +244,24 @@ impl DatabaseHashedPostState for HashedPostState { } } - let hashed_accounts = HashMap::from_iter( - accounts.into_iter().map(|(address, info)| (keccak256(address), info)), - ); + let hashed_accounts = + accounts.into_iter().map(|(address, info)| (keccak256(address), info)).collect(); - let hashed_storages = HashMap::from_iter(storages.into_iter().map(|(address, storage)| { - ( - keccak256(address), - HashedStorage::from_iter( - // The `wiped` flag indicates only whether previous storage entries - // should be looked up in db or not. For reverts it's a noop since all - // wiped changes had been written as storage reverts. - false, - storage.into_iter().map(|(slot, value)| (keccak256(slot), value)), - ), - ) - })); + let hashed_storages = storages + .into_iter() + .map(|(address, storage)| { + ( + keccak256(address), + HashedStorage::from_iter( + // The `wiped` flag indicates only whether previous storage entries + // should be looked up in db or not. For reverts it's a noop since all + // wiped changes had been written as storage reverts. + false, + storage.into_iter().map(|(slot, value)| (keccak256(slot), value)), + ), + ) + }) + .collect(); Ok(Self { accounts: hashed_accounts, storages: hashed_storages }) } diff --git a/crates/trie/db/tests/post_state.rs b/crates/trie/db/tests/post_state.rs index ceadf7cde..ce6f10d76 100644 --- a/crates/trie/db/tests/post_state.rs +++ b/crates/trie/db/tests/post_state.rs @@ -55,7 +55,7 @@ fn assert_storage_cursor_order( #[test] fn post_state_only_accounts() { let accounts = - Vec::from_iter((1..11).map(|key| (B256::with_last_byte(key), Account::default()))); + (1..11).map(|key| (B256::with_last_byte(key), Account::default())).collect::>(); let mut hashed_post_state = HashedPostState::default(); for (hashed_address, account) in &accounts { @@ -73,7 +73,7 @@ fn post_state_only_accounts() { #[test] fn db_only_accounts() { let accounts = - Vec::from_iter((1..11).map(|key| (B256::with_last_byte(key), Account::default()))); + (1..11).map(|key| (B256::with_last_byte(key), Account::default())).collect::>(); let db = create_test_rw_db(); db.update(|tx| { @@ -96,7 +96,7 @@ fn db_only_accounts() { fn account_cursor_correct_order() { // odd keys are in post state, even keys are in db let accounts = - Vec::from_iter((1..111).map(|key| (B256::with_last_byte(key), Account::default()))); + (1..111).map(|key| (B256::with_last_byte(key), Account::default())).collect::>(); let db = create_test_rw_db(); db.update(|tx| { @@ -121,9 +121,9 @@ fn account_cursor_correct_order() { fn removed_accounts_are_discarded() { // odd keys are in post state, even keys are in db let accounts = - Vec::from_iter((1..111).map(|key| (B256::with_last_byte(key), Account::default()))); + (1..111).map(|key| (B256::with_last_byte(key), Account::default())).collect::>(); // accounts 5, 9, 11 should be considered removed from post state - let removed_keys = Vec::from_iter([5, 9, 11].into_iter().map(B256::with_last_byte)); + let removed_keys = [5, 9, 11].into_iter().map(B256::with_last_byte).collect::>(); let db = create_test_rw_db(); db.update(|tx| { @@ -150,9 +150,9 @@ fn removed_accounts_are_discarded() { #[test] fn post_state_accounts_take_precedence() { - let accounts = Vec::from_iter((1..10).map(|key| { - (B256::with_last_byte(key), Account { nonce: key as u64, ..Default::default() }) - })); + let accounts = (1..10) + .map(|key| (B256::with_last_byte(key), Account { nonce: key as u64, ..Default::default() })) + .collect::>(); let db = create_test_rw_db(); db.update(|tx| { @@ -224,7 +224,7 @@ fn storage_is_empty() { } let db_storage = - BTreeMap::from_iter((0..10).map(|key| (B256::with_last_byte(key), U256::from(key)))); + (0..10).map(|key| (B256::with_last_byte(key), U256::from(key))).collect::>(); db.update(|tx| { for (slot, value) in &db_storage { // insert zero value accounts to the database @@ -299,9 +299,10 @@ fn storage_is_empty() { fn storage_cursor_correct_order() { let address = B256::random(); let db_storage = - BTreeMap::from_iter((1..11).map(|key| (B256::with_last_byte(key), U256::from(key)))); - let post_state_storage = - BTreeMap::from_iter((11..21).map(|key| (B256::with_last_byte(key), U256::from(key)))); + (1..11).map(|key| (B256::with_last_byte(key), U256::from(key))).collect::>(); + let post_state_storage = (11..21) + .map(|key| (B256::with_last_byte(key), U256::from(key))) + .collect::>(); let db = create_test_rw_db(); db.update(|tx| { @@ -334,10 +335,12 @@ fn storage_cursor_correct_order() { fn zero_value_storage_entries_are_discarded() { let address = B256::random(); let db_storage = - BTreeMap::from_iter((0..10).map(|key| (B256::with_last_byte(key), U256::from(key)))); // every even number is changed to zero value - let post_state_storage = BTreeMap::from_iter((0..10).map(|key| { - (B256::with_last_byte(key), if key % 2 == 0 { U256::ZERO } else { U256::from(key) }) - })); + (0..10).map(|key| (B256::with_last_byte(key), U256::from(key))).collect::>(); // every even number is changed to zero value + let post_state_storage = (0..10) + .map(|key| { + (B256::with_last_byte(key), if key % 2 == 0 { U256::ZERO } else { U256::from(key) }) + }) + .collect::>(); let db = create_test_rw_db(); db.update(|tx| { @@ -371,9 +374,10 @@ fn zero_value_storage_entries_are_discarded() { fn wiped_storage_is_discarded() { let address = B256::random(); let db_storage = - BTreeMap::from_iter((1..11).map(|key| (B256::with_last_byte(key), U256::from(key)))); - let post_state_storage = - BTreeMap::from_iter((11..21).map(|key| (B256::with_last_byte(key), U256::from(key)))); + (1..11).map(|key| (B256::with_last_byte(key), U256::from(key))).collect::>(); + let post_state_storage = (11..21) + .map(|key| (B256::with_last_byte(key), U256::from(key))) + .collect::>(); let db = create_test_rw_db(); db.update(|tx| { @@ -404,7 +408,7 @@ fn wiped_storage_is_discarded() { fn post_state_storages_take_precedence() { let address = B256::random(); let storage = - BTreeMap::from_iter((1..10).map(|key| (B256::with_last_byte(key), U256::from(key)))); + (1..10).map(|key| (B256::with_last_byte(key), U256::from(key))).collect::>(); let db = create_test_rw_db(); db.update(|tx| { diff --git a/crates/trie/trie/src/proof.rs b/crates/trie/trie/src/proof.rs index d31d63fd9..e99d686ac 100644 --- a/crates/trie/trie/src/proof.rs +++ b/crates/trie/trie/src/proof.rs @@ -100,7 +100,7 @@ where let walker = TrieWalker::new(trie_cursor, prefix_set.freeze()); // Create a hash builder to rebuild the root node since it is not available in the database. - let retainer = ProofRetainer::from_iter(targets.keys().map(Nibbles::unpack)); + let retainer = targets.keys().map(Nibbles::unpack).collect(); let mut hash_builder = HashBuilder::default().with_proof_retainer(retainer); let mut storages = HashMap::default(); diff --git a/crates/trie/trie/src/updates.rs b/crates/trie/trie/src/updates.rs index 03e80cf52..6d1bcab63 100644 --- a/crates/trie/trie/src/updates.rs +++ b/crates/trie/trie/src/updates.rs @@ -236,7 +236,7 @@ mod serde_nibbles_set { S: Serializer, { let mut storage_nodes = - Vec::from_iter(map.iter().map(|elem| alloy_primitives::hex::encode(elem.pack()))); + map.iter().map(|elem| alloy_primitives::hex::encode(elem.pack())).collect::>(); storage_nodes.sort_unstable(); storage_nodes.serialize(serializer) } From 160e4b2ce7a7ab4ff70a4cae7644a3dd01cc25ee Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Fri, 11 Oct 2024 20:11:58 +0200 Subject: [PATCH 012/425] tx-pool: simplify `FinalizedBlockTracker` update logic (#11664) --- crates/transaction-pool/src/maintain.rs | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/crates/transaction-pool/src/maintain.rs b/crates/transaction-pool/src/maintain.rs index 36c7067d4..aaf2d6d12 100644 --- a/crates/transaction-pool/src/maintain.rs +++ b/crates/transaction-pool/src/maintain.rs @@ -454,20 +454,11 @@ impl FinalizedBlockTracker { /// Updates the tracked finalized block and returns the new finalized block if it changed fn update(&mut self, finalized_block: Option) -> Option { - match (self.last_finalized_block, finalized_block) { - (Some(last), Some(finalized)) => { - self.last_finalized_block = Some(finalized); - if last < finalized { - Some(finalized) - } else { - None - } - } - (None, Some(finalized)) => { - self.last_finalized_block = Some(finalized); - Some(finalized) - } - _ => None, + let finalized = finalized_block?; + if self.last_finalized_block.replace(finalized).map_or(true, |last| last < finalized) { + Some(finalized) + } else { + None } } } From b51299b86bc8447e37d3a3c9a4a8329091d58936 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Fri, 11 Oct 2024 20:22:20 +0200 Subject: [PATCH 013/425] refac: small refactor in `BlockchainProvider2` (#11660) --- .../provider/src/providers/blockchain_provider.rs | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index a0811db2a..dc4264210 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -681,7 +681,7 @@ impl HeaderProvider for BlockchainProvider2 { { last_finalized_num_hash.number } else { - self.database.last_block_number()? + self.last_block_number()? } } else { // Otherwise, return what we have on disk for the input block @@ -841,12 +841,7 @@ impl BlockReader for BlockchainProvider2 { id, |db_provider| db_provider.ommers(id), |block_state| { - if self - .database - .chain_spec() - .final_paris_total_difficulty(block_state.number()) - .is_some() - { + if self.chain_spec().final_paris_total_difficulty(block_state.number()).is_some() { return Ok(Some(Vec::new())) } @@ -1174,7 +1169,7 @@ impl WithdrawalsProvider for BlockchainProvider2 { id: BlockHashOrNumber, timestamp: u64, ) -> ProviderResult> { - if !self.database.chain_spec().is_shanghai_active_at_timestamp(timestamp) { + if !self.chain_spec().is_shanghai_active_at_timestamp(timestamp) { return Ok(None) } @@ -1210,7 +1205,7 @@ impl RequestsProvider for BlockchainProvider2 { id: BlockHashOrNumber, timestamp: u64, ) -> ProviderResult> { - if !self.database.chain_spec().is_prague_active_at_timestamp(timestamp) { + if !self.chain_spec().is_prague_active_at_timestamp(timestamp) { return Ok(None) } From f2440c763520e16315b3f60241ff9c9b52a4c50c Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Fri, 11 Oct 2024 20:52:36 +0200 Subject: [PATCH 014/425] test: add unit tests for `ChainInfoTracker` (#11642) Co-authored-by: Alexey Shekhirin --- Cargo.lock | 1 + crates/chain-state/Cargo.toml | 1 + crates/chain-state/src/chain_info.rs | 203 ++++++++++++++++++++++++++- 3 files changed, 201 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 37da7b575..22e1360a2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6511,6 +6511,7 @@ dependencies = [ "reth-metrics", "reth-primitives", "reth-storage-api", + "reth-testing-utils", "reth-trie", "revm", "tokio", diff --git a/crates/chain-state/Cargo.toml b/crates/chain-state/Cargo.toml index 63016918c..c9691bec4 100644 --- a/crates/chain-state/Cargo.toml +++ b/crates/chain-state/Cargo.toml @@ -47,6 +47,7 @@ rand = { workspace = true, optional = true } revm = { workspace = true, optional = true } [dev-dependencies] +reth-testing-utils.workspace = true alloy-signer.workspace = true alloy-signer-local.workspace = true alloy-consensus.workspace = true diff --git a/crates/chain-state/src/chain_info.rs b/crates/chain-state/src/chain_info.rs index d36d9f47e..3c75544ac 100644 --- a/crates/chain-state/src/chain_info.rs +++ b/crates/chain-state/src/chain_info.rs @@ -95,14 +95,12 @@ impl ChainInfoTracker { /// Returns the safe header of the chain. pub fn get_safe_num_hash(&self) -> Option { - let h = self.inner.safe_block.borrow(); - h.as_ref().map(|h| h.num_hash()) + self.inner.safe_block.borrow().as_ref().map(SealedHeader::num_hash) } /// Returns the finalized header of the chain. pub fn get_finalized_num_hash(&self) -> Option { - let h = self.inner.finalized_block.borrow(); - h.as_ref().map(|h| h.num_hash()) + self.inner.finalized_block.borrow().as_ref().map(SealedHeader::num_hash) } /// Sets the canonical head of the chain. @@ -169,3 +167,200 @@ struct ChainInfoInner { /// The block that the beacon node considers finalized. finalized_block: watch::Sender>, } + +#[cfg(test)] +mod tests { + use super::*; + use reth_testing_utils::{generators, generators::random_header}; + + #[test] + fn test_chain_info() { + // Create a random header + let mut rng = generators::rng(); + let header = random_header(&mut rng, 10, None); + + // Create a new chain info tracker with the header + let tracker = ChainInfoTracker::new(header.clone(), None, None); + + // Fetch the chain information from the tracker + let chain_info = tracker.chain_info(); + + // Verify that the chain information matches the header + assert_eq!(chain_info.best_number, header.number); + assert_eq!(chain_info.best_hash, header.hash()); + } + + #[test] + fn test_on_forkchoice_update_received() { + // Create a random block header + let mut rng = generators::rng(); + let header = random_header(&mut rng, 10, None); + + // Create a new chain info tracker with the header + let tracker = ChainInfoTracker::new(header, None, None); + + // Assert that there has been no forkchoice update yet (the timestamp is None) + assert!(tracker.last_forkchoice_update_received_at().is_none()); + + // Call the method to record the receipt of a forkchoice update + tracker.on_forkchoice_update_received(); + + // Assert that there is now a timestamp indicating when the forkchoice update was received + assert!(tracker.last_forkchoice_update_received_at().is_some()); + } + + #[test] + fn test_on_transition_configuration_exchanged() { + // Create a random header + let mut rng = generators::rng(); + let header = random_header(&mut rng, 10, None); + + // Create a new chain info tracker with the header + let tracker = ChainInfoTracker::new(header, None, None); + + // Assert that there has been no transition configuration exchange yet (the timestamp is + // None) + assert!(tracker.last_transition_configuration_exchanged_at().is_none()); + + // Call the method to record the transition configuration exchange + tracker.on_transition_configuration_exchanged(); + + // Assert that there is now a timestamp indicating when the transition configuration + // exchange occurred + assert!(tracker.last_transition_configuration_exchanged_at().is_some()); + } + + #[test] + fn test_set_canonical_head() { + // Create a random number generator + let mut rng = generators::rng(); + // Generate two random headers for testing + let header1 = random_header(&mut rng, 10, None); + let header2 = random_header(&mut rng, 20, None); + + // Create a new chain info tracker with the first header + let tracker = ChainInfoTracker::new(header1, None, None); + + // Set the second header as the canonical head of the tracker + tracker.set_canonical_head(header2.clone()); + + // Assert that the tracker now uses the second header as its canonical head + let canonical_head = tracker.get_canonical_head(); + assert_eq!(canonical_head, header2); + } + + #[test] + fn test_set_safe() { + // Create a random number generator + let mut rng = generators::rng(); + + // Case 1: basic test + // Generate two random headers for the test + let header1 = random_header(&mut rng, 10, None); + let header2 = random_header(&mut rng, 20, None); + + // Create a new chain info tracker with the first header (header1) + let tracker = ChainInfoTracker::new(header1, None, None); + + // Call the set_safe method with the second header (header2) + tracker.set_safe(header2.clone()); + + // Verify that the tracker now has header2 as the safe block + let safe_header = tracker.get_safe_header(); + assert!(safe_header.is_some()); // Ensure a safe header is present + let safe_header = safe_header.unwrap(); + assert_eq!(safe_header, header2); + + // Case 2: call with the same header as the current safe block + // Call set_safe again with the same header (header2) + tracker.set_safe(header2.clone()); + + // Verify that nothing changes and the safe header remains the same + let same_safe_header = tracker.get_safe_header(); + assert!(same_safe_header.is_some()); + let same_safe_header = same_safe_header.unwrap(); + assert_eq!(same_safe_header, header2); + + // Case 3: call with a different (new) header + // Generate a third header with a higher block number + let header3 = random_header(&mut rng, 30, None); + + // Call set_safe with this new header (header3) + tracker.set_safe(header3.clone()); + + // Verify that the safe header is updated with the new header + let updated_safe_header = tracker.get_safe_header(); + assert!(updated_safe_header.is_some()); + let updated_safe_header = updated_safe_header.unwrap(); + assert_eq!(updated_safe_header, header3); + } + + #[test] + fn test_set_finalized() { + // Create a random number generator + let mut rng = generators::rng(); + + // Generate random headers for testing + let header1 = random_header(&mut rng, 10, None); + let header2 = random_header(&mut rng, 20, None); + let header3 = random_header(&mut rng, 30, None); + + // Create a new chain info tracker with the first header + let tracker = ChainInfoTracker::new(header1, None, None); + + // Initial state: finalize header should be None + assert!(tracker.get_finalized_header().is_none()); + + // Set the second header as the finalized header + tracker.set_finalized(header2.clone()); + + // Assert that the tracker now uses the second header as its finalized block + let finalized_header = tracker.get_finalized_header(); + assert!(finalized_header.is_some()); + let finalized_header = finalized_header.unwrap(); + assert_eq!(finalized_header, header2); + + // Case 2: attempt to set the same finalized header again + tracker.set_finalized(header2.clone()); + + // The finalized header should remain unchanged + let unchanged_finalized_header = tracker.get_finalized_header(); + assert_eq!(unchanged_finalized_header.unwrap(), header2); // Should still be header2 + + // Case 3: set a higher block number as finalized + tracker.set_finalized(header3.clone()); + + // The finalized header should now be updated to header3 + let updated_finalized_header = tracker.get_finalized_header(); + assert!(updated_finalized_header.is_some()); + assert_eq!(updated_finalized_header.unwrap(), header3); + } + + #[test] + fn test_get_finalized_num_hash() { + // Create a random header + let mut rng = generators::rng(); + let finalized_header = random_header(&mut rng, 10, None); + + // Create a new chain info tracker with the finalized header + let tracker = + ChainInfoTracker::new(finalized_header.clone(), Some(finalized_header.clone()), None); + + // Assert that the BlockNumHash returned matches the finalized header + assert_eq!(tracker.get_finalized_num_hash(), Some(finalized_header.num_hash())); + } + + #[test] + fn test_get_safe_num_hash() { + // Create a random header + let mut rng = generators::rng(); + let safe_header = random_header(&mut rng, 10, None); + + // Create a new chain info tracker with the safe header + let tracker = ChainInfoTracker::new(safe_header.clone(), None, None); + tracker.set_safe(safe_header.clone()); + + // Assert that the BlockNumHash returned matches the safe header + assert_eq!(tracker.get_safe_num_hash(), Some(safe_header.num_hash())); + } +} From 5e1bd04941604156c9c212f19174044592410d37 Mon Sep 17 00:00:00 2001 From: 0xDmtri <0xDmtri@protonmail.com> Date: Fri, 11 Oct 2024 20:53:34 +0200 Subject: [PATCH 015/425] chore(examples): fix db-access example with RO provider (#11670) Co-authored-by: Matthias Seitz --- examples/db-access/src/main.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/examples/db-access/src/main.rs b/examples/db-access/src/main.rs index ab018a0b0..5772461bd 100644 --- a/examples/db-access/src/main.rs +++ b/examples/db-access/src/main.rs @@ -1,6 +1,7 @@ use alloy_primitives::{Address, Sealable, B256}; use alloy_rpc_types::{Filter, FilteredParams}; use reth_chainspec::ChainSpecBuilder; +use reth_db::{open_db_read_only, DatabaseEnv}; use reth_node_ethereum::EthereumNode; use reth_node_types::NodeTypesWithDBAdapter; use reth_primitives::SealedHeader; @@ -8,7 +9,7 @@ use reth_provider::{ providers::StaticFileProvider, AccountReader, BlockReader, BlockSource, HeaderProvider, ProviderFactory, ReceiptProvider, StateProvider, TransactionsProvider, }; -use std::path::Path; +use std::{path::Path, sync::Arc}; // Providers are zero cost abstractions on top of an opened MDBX Transaction // exposing a familiar API to query the chain's information without requiring knowledge @@ -20,17 +21,16 @@ fn main() -> eyre::Result<()> { // Opens a RO handle to the database file. let db_path = std::env::var("RETH_DB_PATH")?; let db_path = Path::new(&db_path); + let db = open_db_read_only(db_path.join("db").as_path(), Default::default())?; // Instantiate a provider factory for Ethereum mainnet using the provided DB. // TODO: Should the DB version include the spec so that you do not need to specify it here? let spec = ChainSpecBuilder::mainnet().build(); - let factory = - ProviderFactory::>::new_with_database_path( - db_path, - spec.into(), - Default::default(), - StaticFileProvider::read_only(db_path.join("static_files"), false)?, - )?; + let factory = ProviderFactory::>>::new( + db.into(), + spec.into(), + StaticFileProvider::read_only(db_path.join("static_files"), true)?, + ); // This call opens a RO transaction on the database. To write to the DB you'd need to call // the `provider_rw` function and look for the `Writer` variants of the traits. From 75dda1c398153b674c760402056ec52a8c2cceaf Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Sat, 12 Oct 2024 08:24:06 +0400 Subject: [PATCH 016/425] fix: always poll new pool imports (#11675) --- crates/net/network/src/transactions/mod.rs | 46 +++++++++++----------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index bc77de9f4..439f92bad 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -1255,29 +1255,6 @@ where // yield back control to tokio. See `NetworkManager` for more context on the design // pattern. - // Advance pool imports (flush txns to pool). - // - // Note, this is done in batches. A batch is filled from one `Transactions` - // broadcast messages or one `PooledTransactions` response at a time. The - // minimum batch size is 1 transaction (and might often be the case with blob - // transactions). - // - // The smallest decodable transaction is an empty legacy transaction, 10 bytes - // (2 MiB / 10 bytes > 200k transactions). - // - // Since transactions aren't validated until they are inserted into the pool, - // this can potentially validate >200k transactions. More if the message size - // is bigger than the soft limit on a `PooledTransactions` response which is - // 2 MiB (`Transactions` broadcast messages is smaller, 128 KiB). - let maybe_more_pool_imports = metered_poll_nested_stream_with_budget!( - poll_durations.acc_pending_imports, - "net::tx", - "Batched pool imports stream", - DEFAULT_BUDGET_TRY_DRAIN_PENDING_POOL_IMPORTS, - this.pool_imports.poll_next_unpin(cx), - |batch_results| this.on_batch_import_result(batch_results) - ); - // Advance network/peer related events (update peers map). let maybe_more_network_events = metered_poll_nested_stream_with_budget!( poll_durations.acc_network_events, @@ -1351,6 +1328,29 @@ where |event| this.on_network_tx_event(event), ); + // Advance pool imports (flush txns to pool). + // + // Note, this is done in batches. A batch is filled from one `Transactions` + // broadcast messages or one `PooledTransactions` response at a time. The + // minimum batch size is 1 transaction (and might often be the case with blob + // transactions). + // + // The smallest decodable transaction is an empty legacy transaction, 10 bytes + // (2 MiB / 10 bytes > 200k transactions). + // + // Since transactions aren't validated until they are inserted into the pool, + // this can potentially validate >200k transactions. More if the message size + // is bigger than the soft limit on a `PooledTransactions` response which is + // 2 MiB (`Transactions` broadcast messages is smaller, 128 KiB). + let maybe_more_pool_imports = metered_poll_nested_stream_with_budget!( + poll_durations.acc_pending_imports, + "net::tx", + "Batched pool imports stream", + DEFAULT_BUDGET_TRY_DRAIN_PENDING_POOL_IMPORTS, + this.pool_imports.poll_next_unpin(cx), + |batch_results| this.on_batch_import_result(batch_results) + ); + // Tries to drain hashes pending fetch cache if the tx manager currently has // capacity for this (fetch txns). // From 43fe46f0d340fdf27e6b1b51916d0ecf9f6c314b Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Sat, 12 Oct 2024 10:06:19 +0200 Subject: [PATCH 017/425] fix(net): decrease budget for header reqs to process before yielding thread (#11636) --- crates/net/network/src/budget.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/net/network/src/budget.rs b/crates/net/network/src/budget.rs index 0b5e3d3a9..2f878cdb2 100644 --- a/crates/net/network/src/budget.rs +++ b/crates/net/network/src/budget.rs @@ -5,8 +5,8 @@ pub const DEFAULT_BUDGET_TRY_DRAIN_STREAM: u32 = 10; /// Default budget to try and drain headers and bodies download streams. /// -/// Default is 4 iterations. -pub const DEFAULT_BUDGET_TRY_DRAIN_DOWNLOADERS: u32 = 4; +/// Default is 1 iteration. +pub const DEFAULT_BUDGET_TRY_DRAIN_DOWNLOADERS: u32 = 1; /// Default budget to try and drain [`Swarm`](crate::swarm::Swarm). /// From 5c84daba11d5ce38b1649504f0bae947e2b7c362 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 12 Oct 2024 13:19:48 +0200 Subject: [PATCH 018/425] fix: dont remove txs manually (#11683) --- crates/consensus/auto-seal/src/task.rs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/crates/consensus/auto-seal/src/task.rs b/crates/consensus/auto-seal/src/task.rs index e4873615f..cb0586d44 100644 --- a/crates/consensus/auto-seal/src/task.rs +++ b/crates/consensus/auto-seal/src/task.rs @@ -113,7 +113,6 @@ where let to_engine = this.to_engine.clone(); let client = this.client.clone(); let chain_spec = Arc::clone(&this.chain_spec); - let pool = this.pool.clone(); let events = this.pipe_line_events.take(); let executor = this.block_executor.clone(); @@ -139,11 +138,6 @@ where &executor, ) { Ok((new_header, _bundle_state)) => { - // clear all transactions from pool - pool.remove_transactions( - transactions.iter().map(|tx| tx.hash()).collect(), - ); - let state = ForkchoiceState { head_block_hash: new_header.hash(), finalized_block_hash: new_header.hash(), From 8ebf10b4ac3f5e95752fa28d2d8a9b14585730b9 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 12 Oct 2024 14:42:46 +0200 Subject: [PATCH 019/425] fix: only +1 on the pool nonce (#11680) --- crates/rpc/rpc-eth-api/src/helpers/state.rs | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/crates/rpc/rpc-eth-api/src/helpers/state.rs b/crates/rpc/rpc-eth-api/src/helpers/state.rs index d601e43d9..7b11ce6af 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/state.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/state.rs @@ -284,8 +284,8 @@ pub trait LoadState: EthApiTypes { Self: SpawnBlocking, { self.spawn_blocking_io(move |this| { - // first fetch the on chain nonce - let nonce = this + // first fetch the on chain nonce of the account + let on_chain_account_nonce = this .state_at_block_id_or_latest(block_id)? .account_nonce(address) .map_err(Self::Error::from_eth_err)? @@ -297,20 +297,24 @@ pub trait LoadState: EthApiTypes { this.pool().get_highest_transaction_by_sender(address) { { - // and the corresponding txcount is nonce + 1 - let next_nonce = - nonce.max(highest_pool_tx.nonce()).checked_add(1).ok_or_else(|| { + // and the corresponding txcount is nonce + 1 of the highest tx in the pool + // (on chain nonce is increased after tx) + let next_tx_nonce = + highest_pool_tx.nonce().checked_add(1).ok_or_else(|| { Self::Error::from(EthApiError::InvalidTransaction( RpcInvalidTransactionError::NonceMaxValue, )) })?; - let tx_count = nonce.max(next_nonce); + // guard against drifts in the pool + let next_tx_nonce = on_chain_account_nonce.max(next_tx_nonce); + + let tx_count = on_chain_account_nonce.max(next_tx_nonce); return Ok(U256::from(tx_count)); } } } - Ok(U256::from(nonce)) + Ok(U256::from(on_chain_account_nonce)) }) } From db1d64b1c8126abf7573ccb4a0a1a757c1a100b2 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Sat, 12 Oct 2024 14:46:03 +0200 Subject: [PATCH 020/425] test: more unit tests for `HashedPostState` (#11663) --- crates/trie/trie/src/state.rs | 148 ++++++++++++++++++++++++++++++++++ 1 file changed, 148 insertions(+) diff --git a/crates/trie/trie/src/state.rs b/crates/trie/trie/src/state.rs index 3b0af5cd8..2af48dfff 100644 --- a/crates/trie/trie/src/state.rs +++ b/crates/trie/trie/src/state.rs @@ -347,6 +347,15 @@ impl HashedStorageSorted { #[cfg(test)] mod tests { + use alloy_primitives::Bytes; + use revm::{ + db::{ + states::{plain_account::PlainStorage, StorageSlot}, + PlainAccount, StorageWithOriginalValues, + }, + primitives::{AccountInfo, Bytecode}, + }; + use super::*; #[test] @@ -422,4 +431,143 @@ mod tests { ); assert_eq!(account_storage.map(|st| st.wiped), Some(true)); } + + #[test] + fn test_hashed_post_state_from_bundle_state() { + // Prepare a random Ethereum address as a key for the account. + let address = Address::random(); + + // Create a mock account info object. + let account_info = AccountInfo { + balance: U256::from(123), + nonce: 42, + code_hash: B256::random(), + code: Some(Bytecode::LegacyRaw(Bytes::from(vec![1, 2]))), + }; + + let mut storage = StorageWithOriginalValues::default(); + storage.insert( + U256::from(1), + StorageSlot { present_value: U256::from(4), ..Default::default() }, + ); + + // Create a `BundleAccount` struct to represent the account and its storage. + let account = BundleAccount { + status: AccountStatus::Changed, + info: Some(account_info.clone()), + storage, + original_info: None, + }; + + // Create a vector of tuples representing the bundle state. + let state = vec![(&address, &account)]; + + // Convert the bundle state into a hashed post state. + let hashed_state = HashedPostState::from_bundle_state(state); + + // Validate the hashed post state. + assert_eq!(hashed_state.accounts.len(), 1); + assert_eq!(hashed_state.storages.len(), 1); + + // Validate the account info. + assert_eq!( + *hashed_state.accounts.get(&keccak256(address)).unwrap(), + Some(account_info.into()) + ); + } + + #[test] + fn test_hashed_post_state_from_cache_state() { + // Prepare a random Ethereum address. + let address = Address::random(); + + // Create mock account info. + let account_info = AccountInfo { + balance: U256::from(500), + nonce: 5, + code_hash: B256::random(), + code: None, + }; + + let mut storage = PlainStorage::default(); + storage.insert(U256::from(1), U256::from(35636)); + + // Create a `CacheAccount` with the mock account info. + let account = CacheAccount { + account: Some(PlainAccount { info: account_info.clone(), storage }), + status: AccountStatus::Changed, + }; + + // Create a vector of tuples representing the cache state. + let state = vec![(&address, &account)]; + + // Convert the cache state into a hashed post state. + let hashed_state = HashedPostState::from_cache_state(state); + + // Validate the hashed post state. + assert_eq!(hashed_state.accounts.len(), 1); + assert_eq!(hashed_state.storages.len(), 1); + + // Validate the account info. + assert_eq!( + *hashed_state.accounts.get(&keccak256(address)).unwrap(), + Some(account_info.into()) + ); + } + + #[test] + fn test_hashed_post_state_with_accounts() { + // Prepare random addresses and mock account info. + let address_1 = Address::random(); + let address_2 = Address::random(); + + let account_info_1 = AccountInfo { + balance: U256::from(1000), + nonce: 1, + code_hash: B256::random(), + code: None, + }; + + // Create hashed accounts with addresses. + let account_1 = (keccak256(address_1), Some(account_info_1.into())); + let account_2 = (keccak256(address_2), None); + + // Add accounts to the hashed post state. + let hashed_state = HashedPostState::default().with_accounts(vec![account_1, account_2]); + + // Validate the hashed post state. + assert_eq!(hashed_state.accounts.len(), 2); + assert!(hashed_state.accounts.contains_key(&keccak256(address_1))); + assert!(hashed_state.accounts.contains_key(&keccak256(address_2))); + } + + #[test] + fn test_hashed_post_state_with_storages() { + // Prepare random addresses and mock storage entries. + let address_1 = Address::random(); + let address_2 = Address::random(); + + let storage_1 = (keccak256(address_1), HashedStorage::new(false)); + let storage_2 = (keccak256(address_2), HashedStorage::new(true)); + + // Add storages to the hashed post state. + let hashed_state = HashedPostState::default().with_storages(vec![storage_1, storage_2]); + + // Validate the hashed post state. + assert_eq!(hashed_state.storages.len(), 2); + assert!(hashed_state.storages.contains_key(&keccak256(address_1))); + assert!(hashed_state.storages.contains_key(&keccak256(address_2))); + } + + #[test] + fn test_hashed_post_state_is_empty() { + // Create an empty hashed post state and validate it's empty. + let empty_state = HashedPostState::default(); + assert!(empty_state.is_empty()); + + // Add an account and validate the state is no longer empty. + let non_empty_state = HashedPostState::default() + .with_accounts(vec![(keccak256(Address::random()), Some(Account::default()))]); + assert!(!non_empty_state.is_empty()); + } } From b365bd52f260ba3459f305926c3d6c3264913ed1 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 12 Oct 2024 15:27:52 +0200 Subject: [PATCH 021/425] docs: complete sentence (#11685) --- crates/net/network/src/transactions/config.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/net/network/src/transactions/config.rs b/crates/net/network/src/transactions/config.rs index 81ec293ea..b838f7cfe 100644 --- a/crates/net/network/src/transactions/config.rs +++ b/crates/net/network/src/transactions/config.rs @@ -47,7 +47,7 @@ pub enum TransactionPropagationMode { } impl TransactionPropagationMode { - /// Returns the number of peers that should + /// Returns the number of peers full transactions should be propagated to. pub(crate) fn full_peer_count(&self, peer_count: usize) -> usize { match self { Self::Sqrt => (peer_count as f64).sqrt().round() as usize, From 9ec4c000246f84594ec7169e88e31a5ae496cdfe Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 12 Oct 2024 15:28:08 +0200 Subject: [PATCH 022/425] chore: we dont need sat here (#11678) --- crates/net/network/src/budget.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/net/network/src/budget.rs b/crates/net/network/src/budget.rs index 2f878cdb2..6eba076f6 100644 --- a/crates/net/network/src/budget.rs +++ b/crates/net/network/src/budget.rs @@ -55,7 +55,7 @@ macro_rules! poll_nested_stream_with_budget { let mut f = $on_ready_some; f(item); - budget = budget.saturating_sub(1); + budget -= 1; if budget == 0 { break true } From de736a53cc7216141d900f4e145f06e892d6cdd7 Mon Sep 17 00:00:00 2001 From: Delweng Date: Sat, 12 Oct 2024 21:29:20 +0800 Subject: [PATCH 023/425] chore(stages): reduce the progress logging (#11653) Signed-off-by: jsvisa --- .../stages/src/stages/hashing_account.rs | 20 +++++---- .../stages/src/stages/hashing_storage.rs | 20 +++++---- crates/stages/stages/src/stages/headers.rs | 24 +++++----- crates/stages/stages/src/stages/tx_lookup.rs | 8 +++- crates/stages/stages/src/stages/utils.rs | 44 ++++++++++++++----- 5 files changed, 74 insertions(+), 42 deletions(-) diff --git a/crates/stages/stages/src/stages/hashing_account.rs b/crates/stages/stages/src/stages/hashing_account.rs index 14afb37d8..2fdccd783 100644 --- a/crates/stages/stages/src/stages/hashing_account.rs +++ b/crates/stages/stages/src/stages/hashing_account.rs @@ -1,3 +1,4 @@ +use crate::log_progress; use alloy_primitives::{keccak256, B256}; use itertools::Itertools; use reth_config::config::{EtlConfig, HashingConfig}; @@ -18,6 +19,7 @@ use std::{ fmt::Debug, ops::{Range, RangeInclusive}, sync::mpsc::{self, Receiver}, + time::Instant, }; use tracing::*; @@ -186,16 +188,16 @@ where let mut hashed_account_cursor = tx.cursor_write::>()?; - let total_hashes = collector.len(); - let interval = (total_hashes / 10).max(1); + let total = collector.len(); + let mut last_log = Instant::now(); for (index, item) in collector.iter()?.enumerate() { - if index > 0 && index % interval == 0 { - info!( - target: "sync::stages::hashing_account", - progress = %format!("{:.2}%", (index as f64 / total_hashes as f64) * 100.0), - "Inserting hashes" - ); - } + log_progress!( + "sync::stages::hashing_account", + index, + total, + last_log, + "Inserting hashes" + ); let (key, value) = item?; hashed_account_cursor diff --git a/crates/stages/stages/src/stages/hashing_storage.rs b/crates/stages/stages/src/stages/hashing_storage.rs index ef070d30c..064603293 100644 --- a/crates/stages/stages/src/stages/hashing_storage.rs +++ b/crates/stages/stages/src/stages/hashing_storage.rs @@ -1,3 +1,4 @@ +use crate::log_progress; use alloy_primitives::{bytes::BufMut, keccak256, B256}; use itertools::Itertools; use reth_config::config::{EtlConfig, HashingConfig}; @@ -19,6 +20,7 @@ use reth_storage_errors::provider::ProviderResult; use std::{ fmt::Debug, sync::mpsc::{self, Receiver}, + time::Instant, }; use tracing::*; @@ -117,17 +119,17 @@ where collect(&mut channels, &mut collector)?; - let total_hashes = collector.len(); - let interval = (total_hashes / 10).max(1); + let total = collector.len(); + let mut last_log = Instant::now(); let mut cursor = tx.cursor_dup_write::()?; for (index, item) in collector.iter()?.enumerate() { - if index > 0 && index % interval == 0 { - info!( - target: "sync::stages::hashing_storage", - progress = %format!("{:.2}%", (index as f64 / total_hashes as f64) * 100.0), - "Inserting hashes" - ); - } + log_progress!( + "sync::stages::hashing_storage", + index, + total, + last_log, + "Inserting hashes" + ); let (addr_key, value) = item?; cursor.append_dup( diff --git a/crates/stages/stages/src/stages/headers.rs b/crates/stages/stages/src/stages/headers.rs index 199e015c2..eaa2ea01f 100644 --- a/crates/stages/stages/src/stages/headers.rs +++ b/crates/stages/stages/src/stages/headers.rs @@ -1,3 +1,4 @@ +use crate::log_progress; use alloy_primitives::{BlockHash, BlockNumber, Bytes, B256}; use futures_util::StreamExt; use reth_config::config::EtlConfig; @@ -25,6 +26,7 @@ use reth_storage_errors::provider::ProviderError; use std::{ sync::Arc, task::{ready, Context, Poll}, + time::Instant, }; use tokio::sync::watch; use tracing::*; @@ -95,9 +97,9 @@ where provider: &impl DBProvider, static_file_provider: StaticFileProvider, ) -> Result { - let total_headers = self.header_collector.len(); + let total = self.header_collector.len(); - info!(target: "sync::stages::headers", total = total_headers, "Writing headers"); + info!(target: "sync::stages::headers", total, "Writing headers"); // Consistency check of expected headers in static files vs DB is done on provider::sync_gap // when poll_execute_ready is polled. @@ -113,13 +115,11 @@ where // Although headers were downloaded in reverse order, the collector iterates it in ascending // order let mut writer = static_file_provider.latest_writer(StaticFileSegment::Headers)?; - let interval = (total_headers / 10).max(1); + let mut last_log = Instant::now(); for (index, header) in self.header_collector.iter()?.enumerate() { let (_, header_buf) = header?; - if index > 0 && index % interval == 0 && total_headers > 100 { - info!(target: "sync::stages::headers", progress = %format!("{:.2}%", (index as f64 / total_headers as f64) * 100.0), "Writing headers"); - } + log_progress!("sync::stages::headers", index, total, last_log, "Writing headers"); let sealed_header: SealedHeader = bincode::deserialize::>(&header_buf) @@ -147,7 +147,7 @@ where writer.append_header(&header, td, &header_hash)?; } - info!(target: "sync::stages::headers", total = total_headers, "Writing headers hash index"); + info!(target: "sync::stages::headers", total, "Writing headers hash index"); let mut cursor_header_numbers = provider.tx_ref().cursor_write::>()?; @@ -168,9 +168,13 @@ where for (index, hash_to_number) in self.hash_collector.iter()?.enumerate() { let (hash, number) = hash_to_number?; - if index > 0 && index % interval == 0 && total_headers > 100 { - info!(target: "sync::stages::headers", progress = %format!("{:.2}%", (index as f64 / total_headers as f64) * 100.0), "Writing headers hash index"); - } + log_progress!( + "sync::stages::headers", + index, + total, + last_log, + "Writing headers hash index" + ); if first_sync { cursor_header_numbers.append( diff --git a/crates/stages/stages/src/stages/tx_lookup.rs b/crates/stages/stages/src/stages/tx_lookup.rs index 60c958abf..c5ccff54c 100644 --- a/crates/stages/stages/src/stages/tx_lookup.rs +++ b/crates/stages/stages/src/stages/tx_lookup.rs @@ -1,3 +1,4 @@ +use super::utils::LOG_INTERVAL; use alloy_primitives::{TxHash, TxNumber}; use num_traits::Zero; use reth_config::config::{EtlConfig, TransactionLookupConfig}; @@ -17,6 +18,7 @@ use reth_stages_api::{ UnwindInput, UnwindOutput, }; use reth_storage_errors::provider::ProviderError; +use std::time::Instant; use tracing::*; /// The transaction lookup stage. @@ -147,16 +149,18 @@ where .cursor_write::>()?; let total_hashes = hash_collector.len(); - let interval = (total_hashes / 10).max(1); + let mut last_log = Instant::now(); for (index, hash_to_number) in hash_collector.iter()?.enumerate() { let (hash, number) = hash_to_number?; - if index > 0 && index % interval == 0 { + let now = Instant::now(); + if now.duration_since(last_log) >= LOG_INTERVAL { info!( target: "sync::stages::transaction_lookup", ?append_only, progress = %format!("{:.2}%", (index as f64 / total_hashes as f64) * 100.0), "Inserting hashes" ); + last_log = now; } let key = RawKey::::from_vec(hash); diff --git a/crates/stages/stages/src/stages/utils.rs b/crates/stages/stages/src/stages/utils.rs index caf039fac..af6594cd7 100644 --- a/crates/stages/stages/src/stages/utils.rs +++ b/crates/stages/stages/src/stages/utils.rs @@ -12,12 +12,36 @@ use reth_db_api::{ use reth_etl::Collector; use reth_provider::DBProvider; use reth_stages_api::StageError; -use std::{collections::HashMap, hash::Hash, ops::RangeBounds}; +use std::{ + collections::HashMap, + hash::Hash, + ops::RangeBounds, + time::{Duration, Instant}, +}; use tracing::info; /// Number of blocks before pushing indices from cache to [`Collector`] const DEFAULT_CACHE_THRESHOLD: u64 = 100_000; +/// Log interval for progress. +pub(crate) const LOG_INTERVAL: Duration = Duration::from_secs(5); + +/// Log progress at a regular interval. +#[macro_export] +macro_rules! log_progress { + ($target:expr, $index:expr, $total:expr, $last_log:expr, $message:expr) => { + let now = std::time::Instant::now(); + if now.duration_since($last_log) >= $crate::stages::utils::LOG_INTERVAL { + info!( + target: $target, + progress = %format!("{:.2}%", ($index as f64 / $total as f64) * 100.0), + $message + ); + $last_log = now; + } + } +} + /// Collects all history (`H`) indices for a range of changesets (`CS`) and stores them in a /// [`Collector`]. /// @@ -65,18 +89,16 @@ where }; // observability - let total_changesets = provider.tx_ref().entries::()?; - let interval = (total_changesets / 1000).max(1); + let total = provider.tx_ref().entries::()?; + let mut last_log = Instant::now(); let mut flush_counter = 0; let mut current_block_number = u64::MAX; - for (idx, entry) in changeset_cursor.walk_range(range)?.enumerate() { + for (index, entry) in changeset_cursor.walk_range(range)?.enumerate() { let (block_number, key) = partial_key_factory(entry?); cache.entry(key).or_default().push(block_number); - if idx > 0 && idx % interval == 0 && total_changesets > 1000 { - info!(target: "sync::stages::index_history", progress = %format!("{:.4}%", (idx as f64 / total_changesets as f64) * 100.0), "Collecting indices"); - } + log_progress!("sync::stages::index_history", index, total, last_log, "Collecting indices"); // Make sure we only flush the cache every DEFAULT_CACHE_THRESHOLD blocks. if current_block_number != block_number { @@ -120,17 +142,15 @@ where let mut current_list = Vec::::new(); // observability - let total_entries = collector.len(); - let interval = (total_entries / 100).max(1); + let total = collector.len(); + let mut last_log = Instant::now(); for (index, element) in collector.iter()?.enumerate() { let (k, v) = element?; let sharded_key = decode_key(k)?; let new_list = BlockNumberList::decompress_owned(v)?; - if index > 0 && index % interval == 0 && total_entries > 100 { - info!(target: "sync::stages::index_history", progress = %format!("{:.2}%", (index as f64 / total_entries as f64) * 100.0), "Writing indices"); - } + log_progress!("sync::stages::index_history", index, total, last_log, "Writing indices"); // AccountsHistory: `Address`. // StorageHistory: `Address.StorageKey`. From 86c6ba5d8da703cdd948f03f18a8ca2de645322a Mon Sep 17 00:00:00 2001 From: Delweng Date: Sat, 12 Oct 2024 23:18:43 +0800 Subject: [PATCH 024/425] chore(forkid): simplify and add comment of the set_head_priv (#11686) Signed-off-by: jsvisa --- crates/ethereum-forks/src/forkid.rs | 34 +++++++++++------------------ 1 file changed, 13 insertions(+), 21 deletions(-) diff --git a/crates/ethereum-forks/src/forkid.rs b/crates/ethereum-forks/src/forkid.rs index 8a344e2dd..6876d0eb9 100644 --- a/crates/ethereum-forks/src/forkid.rs +++ b/crates/ethereum-forks/src/forkid.rs @@ -258,32 +258,24 @@ impl ForkFilter { } fn set_head_priv(&mut self, head: Head) -> Option { - let recompute_cache = { - let head_in_past = match self.cache.epoch_start { - ForkFilterKey::Block(epoch_start_block) => head.number < epoch_start_block, - ForkFilterKey::Time(epoch_start_time) => head.timestamp < epoch_start_time, - }; - let head_in_future = match self.cache.epoch_end { - Some(ForkFilterKey::Block(epoch_end_block)) => head.number >= epoch_end_block, - Some(ForkFilterKey::Time(epoch_end_time)) => head.timestamp >= epoch_end_time, - None => false, - }; - - head_in_past || head_in_future + let head_in_past = match self.cache.epoch_start { + ForkFilterKey::Block(epoch_start_block) => head.number < epoch_start_block, + ForkFilterKey::Time(epoch_start_time) => head.timestamp < epoch_start_time, }; - - // recompute the cache - let transition = if recompute_cache { - let past = self.current(); - self.cache = Cache::compute_cache(&self.forks, head); - Some(ForkTransition { current: self.current(), past }) - } else { - None + let head_in_future = match self.cache.epoch_end { + Some(ForkFilterKey::Block(epoch_end_block)) => head.number >= epoch_end_block, + Some(ForkFilterKey::Time(epoch_end_time)) => head.timestamp >= epoch_end_time, + None => false, }; self.head = head; - transition + // Recompute the cache if the head is in the past or future epoch. + (head_in_past || head_in_future).then(|| { + let past = self.current(); + self.cache = Cache::compute_cache(&self.forks, head); + ForkTransition { current: self.current(), past } + }) } /// Set the current head. From c47621754fc4fe1b97639d5c479be65a66dc11d7 Mon Sep 17 00:00:00 2001 From: Kien Trinh <51135161+kien6034@users.noreply.github.com> Date: Sat, 12 Oct 2024 22:45:47 +0700 Subject: [PATCH 025/425] feat: add helper function to modify node builder (#11682) --- crates/node/builder/src/builder/mod.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/crates/node/builder/src/builder/mod.rs b/crates/node/builder/src/builder/mod.rs index 8b57781ea..15bdc730a 100644 --- a/crates/node/builder/src/builder/mod.rs +++ b/crates/node/builder/src/builder/mod.rs @@ -162,6 +162,14 @@ impl NodeBuilder<(), ChainSpec> { pub const fn new(config: NodeConfig) -> Self { Self { config, database: () } } + + /// Apply a function to the builder + pub fn apply(self, f: F) -> Self + where + F: FnOnce(Self) -> Self, + { + f(self) + } } impl NodeBuilder { @@ -400,6 +408,14 @@ where &self.builder.config } + /// Apply a function to the builder + pub fn apply(self, f: F) -> Self + where + F: FnOnce(Self) -> Self, + { + f(self) + } + /// Sets the hook that is run once the node's components are initialized. pub fn on_component_initialized(self, hook: F) -> Self where From 53bd6872dbbac4b938140b1320902610523114bb Mon Sep 17 00:00:00 2001 From: greged93 <82421016+greged93@users.noreply.github.com> Date: Sat, 12 Oct 2024 17:57:25 +0200 Subject: [PATCH 026/425] feat(op): opchainspec builder (#11630) Co-authored-by: Matthias Seitz --- crates/chainspec/src/spec.rs | 67 ++-------- crates/optimism/chainspec/src/lib.rs | 165 ++++++++++++++++++++++-- crates/optimism/evm/src/config.rs | 15 ++- crates/optimism/evm/src/execute.rs | 19 +-- crates/optimism/node/tests/e2e/utils.rs | 14 +- 5 files changed, 179 insertions(+), 101 deletions(-) diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index d6ca92aa2..f80a20924 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -755,13 +755,19 @@ impl ChainSpecBuilder { } /// Add the given fork with the given activation condition to the spec. - pub fn with_fork(mut self, fork: EthereumHardfork, condition: ForkCondition) -> Self { + pub fn with_fork(mut self, fork: H, condition: ForkCondition) -> Self { self.hardforks.insert(fork, condition); self } + /// Add the given chain hardforks to the spec. + pub fn with_forks(mut self, forks: ChainHardforks) -> Self { + self.hardforks = forks; + self + } + /// Remove the given fork from the spec. - pub fn without_fork(mut self, fork: EthereumHardfork) -> Self { + pub fn without_fork(mut self, fork: H) -> Self { self.hardforks.remove(fork); self } @@ -876,63 +882,6 @@ impl ChainSpecBuilder { self } - /// Enable Bedrock at genesis - #[cfg(feature = "optimism")] - pub fn bedrock_activated(mut self) -> Self { - self = self.paris_activated(); - self.hardforks - .insert(reth_optimism_forks::OptimismHardfork::Bedrock, ForkCondition::Block(0)); - self - } - - /// Enable Regolith at genesis - #[cfg(feature = "optimism")] - pub fn regolith_activated(mut self) -> Self { - self = self.bedrock_activated(); - self.hardforks - .insert(reth_optimism_forks::OptimismHardfork::Regolith, ForkCondition::Timestamp(0)); - self - } - - /// Enable Canyon at genesis - #[cfg(feature = "optimism")] - pub fn canyon_activated(mut self) -> Self { - self = self.regolith_activated(); - // Canyon also activates changes from L1's Shanghai hardfork - self.hardforks.insert(EthereumHardfork::Shanghai, ForkCondition::Timestamp(0)); - self.hardforks - .insert(reth_optimism_forks::OptimismHardfork::Canyon, ForkCondition::Timestamp(0)); - self - } - - /// Enable Ecotone at genesis - #[cfg(feature = "optimism")] - pub fn ecotone_activated(mut self) -> Self { - self = self.canyon_activated(); - self.hardforks.insert(EthereumHardfork::Cancun, ForkCondition::Timestamp(0)); - self.hardforks - .insert(reth_optimism_forks::OptimismHardfork::Ecotone, ForkCondition::Timestamp(0)); - self - } - - /// Enable Fjord at genesis - #[cfg(feature = "optimism")] - pub fn fjord_activated(mut self) -> Self { - self = self.ecotone_activated(); - self.hardforks - .insert(reth_optimism_forks::OptimismHardfork::Fjord, ForkCondition::Timestamp(0)); - self - } - - /// Enable Granite at genesis - #[cfg(feature = "optimism")] - pub fn granite_activated(mut self) -> Self { - self = self.fjord_activated(); - self.hardforks - .insert(reth_optimism_forks::OptimismHardfork::Granite, ForkCondition::Timestamp(0)); - self - } - /// Build the resulting [`ChainSpec`]. /// /// # Panics diff --git a/crates/optimism/chainspec/src/lib.rs b/crates/optimism/chainspec/src/lib.rs index 9b2d1c8c1..5ebc18f67 100644 --- a/crates/optimism/chainspec/src/lib.rs +++ b/crates/optimism/chainspec/src/lib.rs @@ -16,25 +16,154 @@ mod dev; mod op; mod op_sepolia; -use std::fmt::Display; - +use alloy_chains::Chain; use alloy_genesis::Genesis; use alloy_primitives::{Parity, Signature, B256, U256}; pub use base::BASE_MAINNET; pub use base_sepolia::BASE_SEPOLIA; +use derive_more::{Constructor, Deref, From, Into}; pub use dev::OP_DEV; +use once_cell::sync::OnceCell; pub use op::OP_MAINNET; pub use op_sepolia::OP_SEPOLIA; - -use derive_more::{Constructor, Deref, Into}; -use once_cell::sync::OnceCell; use reth_chainspec::{ - BaseFeeParams, BaseFeeParamsKind, ChainSpec, DepositContract, EthChainSpec, EthereumHardforks, - ForkFilter, ForkId, Hardforks, Head, + BaseFeeParams, BaseFeeParamsKind, ChainSpec, ChainSpecBuilder, DepositContract, EthChainSpec, + EthereumHardforks, ForkFilter, ForkId, Hardforks, Head, }; -use reth_ethereum_forks::{ChainHardforks, EthereumHardfork, ForkCondition}; +use reth_ethereum_forks::{ChainHardforks, EthereumHardfork, ForkCondition, Hardfork}; use reth_network_peers::NodeRecord; use reth_primitives_traits::Header; +use std::fmt::Display; + +/// Chain spec builder for a OP stack chain. +#[derive(Debug, Default, From)] +pub struct OpChainSpecBuilder { + /// [`ChainSpecBuilder`] + inner: ChainSpecBuilder, +} + +impl OpChainSpecBuilder { + /// Construct a new builder from the base mainnet chain spec. + pub fn base_mainnet() -> Self { + let mut inner = ChainSpecBuilder::default() + .chain(BASE_MAINNET.chain) + .genesis(BASE_MAINNET.genesis.clone()); + let forks = BASE_MAINNET.hardforks.clone(); + inner = inner.with_forks(forks); + + Self { inner } + } + + /// Construct a new builder from the optimism mainnet chain spec. + pub fn optimism_mainnet() -> Self { + let mut inner = + ChainSpecBuilder::default().chain(OP_MAINNET.chain).genesis(OP_MAINNET.genesis.clone()); + let forks = OP_MAINNET.hardforks.clone(); + inner = inner.with_forks(forks); + + Self { inner } + } +} + +impl OpChainSpecBuilder { + /// Set the chain ID + pub fn chain(mut self, chain: Chain) -> Self { + self.inner = self.inner.chain(chain); + self + } + + /// Set the genesis block. + pub fn genesis(mut self, genesis: Genesis) -> Self { + self.inner = self.inner.genesis(genesis); + self + } + + /// Add the given fork with the given activation condition to the spec. + pub fn with_fork(mut self, fork: H, condition: ForkCondition) -> Self { + self.inner = self.inner.with_fork(fork, condition); + self + } + + /// Add the given forks with the given activation condition to the spec. + pub fn with_forks(mut self, forks: ChainHardforks) -> Self { + self.inner = self.inner.with_forks(forks); + self + } + + /// Remove the given fork from the spec. + pub fn without_fork(mut self, fork: reth_optimism_forks::OptimismHardfork) -> Self { + self.inner = self.inner.without_fork(fork); + self + } + + /// Enable Bedrock at genesis + pub fn bedrock_activated(mut self) -> Self { + self.inner = self.inner.paris_activated(); + self.inner = self + .inner + .with_fork(reth_optimism_forks::OptimismHardfork::Bedrock, ForkCondition::Block(0)); + self + } + + /// Enable Regolith at genesis + pub fn regolith_activated(mut self) -> Self { + self = self.bedrock_activated(); + self.inner = self.inner.with_fork( + reth_optimism_forks::OptimismHardfork::Regolith, + ForkCondition::Timestamp(0), + ); + self + } + + /// Enable Canyon at genesis + pub fn canyon_activated(mut self) -> Self { + self = self.regolith_activated(); + // Canyon also activates changes from L1's Shanghai hardfork + self.inner = self.inner.with_fork(EthereumHardfork::Shanghai, ForkCondition::Timestamp(0)); + self.inner = self + .inner + .with_fork(reth_optimism_forks::OptimismHardfork::Canyon, ForkCondition::Timestamp(0)); + self + } + + /// Enable Ecotone at genesis + pub fn ecotone_activated(mut self) -> Self { + self = self.canyon_activated(); + self.inner = self.inner.with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(0)); + self.inner = self + .inner + .with_fork(reth_optimism_forks::OptimismHardfork::Ecotone, ForkCondition::Timestamp(0)); + self + } + + /// Enable Fjord at genesis + pub fn fjord_activated(mut self) -> Self { + self = self.ecotone_activated(); + self.inner = self + .inner + .with_fork(reth_optimism_forks::OptimismHardfork::Fjord, ForkCondition::Timestamp(0)); + self + } + + /// Enable Granite at genesis + pub fn granite_activated(mut self) -> Self { + self = self.fjord_activated(); + self.inner = self + .inner + .with_fork(reth_optimism_forks::OptimismHardfork::Granite, ForkCondition::Timestamp(0)); + self + } + + /// Build the resulting [`OpChainSpec`]. + /// + /// # Panics + /// + /// This function panics if the chain ID and genesis is not set ([`Self::chain`] and + /// [`Self::genesis`]) + pub fn build(self) -> OpChainSpec { + OpChainSpec { inner: self.inner.build() } + } +} /// OP stack chain spec type. #[derive(Debug, Clone, Deref, Into, Constructor, PartialEq, Eq)] @@ -286,6 +415,8 @@ mod tests { #[test] fn base_mainnet_forkids() { + let base_mainnet = OpChainSpecBuilder::base_mainnet().build(); + let _ = base_mainnet.genesis_hash.set(BASE_MAINNET.genesis_hash.get().copied().unwrap()); test_fork_ids( &BASE_MAINNET, &[ @@ -372,8 +503,12 @@ mod tests { #[test] fn op_mainnet_forkids() { + let op_mainnet = OpChainSpecBuilder::optimism_mainnet().build(); + // for OP mainnet we have to do this because the genesis header can't be properly computed + // from the genesis.json file + let _ = op_mainnet.genesis_hash.set(OP_MAINNET.genesis_hash()); test_fork_ids( - &OP_MAINNET, + &op_mainnet, &[ ( Head { number: 0, ..Default::default() }, @@ -483,9 +618,19 @@ mod tests { ) } + #[test] + fn latest_base_mainnet_fork_id_with_builder() { + let base_mainnet = OpChainSpecBuilder::base_mainnet().build(); + assert_eq!( + ForkId { hash: ForkHash([0xbc, 0x38, 0xf9, 0xca]), next: 0 }, + base_mainnet.latest_fork_id() + ) + } + #[test] fn is_bedrock_active() { - assert!(!OP_MAINNET.is_bedrock_active_at_block(1)) + let op_mainnet = OpChainSpecBuilder::optimism_mainnet().build(); + assert!(!op_mainnet.is_bedrock_active_at_block(1)) } #[test] diff --git a/crates/optimism/evm/src/config.rs b/crates/optimism/evm/src/config.rs index 95d2c9a66..668fcba4d 100644 --- a/crates/optimism/evm/src/config.rs +++ b/crates/optimism/evm/src/config.rs @@ -1,5 +1,5 @@ -use reth_chainspec::ChainSpec; use reth_ethereum_forks::{EthereumHardfork, Head}; +use reth_optimism_chainspec::OpChainSpec; use reth_optimism_forks::OptimismHardfork; /// Returns the revm [`SpecId`](revm_primitives::SpecId) at the given timestamp. @@ -9,7 +9,7 @@ use reth_optimism_forks::OptimismHardfork; /// This is only intended to be used after the Bedrock, when hardforks are activated by /// timestamp. pub fn revm_spec_by_timestamp_after_bedrock( - chain_spec: &ChainSpec, + chain_spec: &OpChainSpec, timestamp: u64, ) -> revm_primitives::SpecId { if chain_spec.fork(OptimismHardfork::Granite).active_at_timestamp(timestamp) { @@ -28,7 +28,7 @@ pub fn revm_spec_by_timestamp_after_bedrock( } /// Map the latest active hardfork at the given block to a revm [`SpecId`](revm_primitives::SpecId). -pub fn revm_spec(chain_spec: &ChainSpec, block: &Head) -> revm_primitives::SpecId { +pub fn revm_spec(chain_spec: &OpChainSpec, block: &Head) -> revm_primitives::SpecId { if chain_spec.fork(OptimismHardfork::Granite).active_at_head(block) { revm_primitives::GRANITE } else if chain_spec.fork(OptimismHardfork::Fjord).active_at_head(block) { @@ -79,12 +79,13 @@ pub fn revm_spec(chain_spec: &ChainSpec, block: &Head) -> revm_primitives::SpecI mod tests { use super::*; use reth_chainspec::ChainSpecBuilder; + use reth_optimism_chainspec::{OpChainSpec, OpChainSpecBuilder}; #[test] fn test_revm_spec_by_timestamp_after_merge() { #[inline(always)] - fn op_cs(f: impl FnOnce(ChainSpecBuilder) -> ChainSpecBuilder) -> ChainSpec { - let cs = ChainSpecBuilder::mainnet().chain(reth_chainspec::Chain::from_id(10)); + fn op_cs(f: impl FnOnce(OpChainSpecBuilder) -> OpChainSpecBuilder) -> OpChainSpec { + let cs = ChainSpecBuilder::mainnet().chain(reth_chainspec::Chain::from_id(10)).into(); f(cs).build() } assert_eq!( @@ -116,8 +117,8 @@ mod tests { #[test] fn test_to_revm_spec() { #[inline(always)] - fn op_cs(f: impl FnOnce(ChainSpecBuilder) -> ChainSpecBuilder) -> ChainSpec { - let cs = ChainSpecBuilder::mainnet().chain(reth_chainspec::Chain::from_id(10)); + fn op_cs(f: impl FnOnce(OpChainSpecBuilder) -> OpChainSpecBuilder) -> OpChainSpec { + let cs = ChainSpecBuilder::mainnet().chain(reth_chainspec::Chain::from_id(10)).into(); f(cs).build() } assert_eq!( diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index 2491e99b5..f4abb8c88 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -513,8 +513,8 @@ mod tests { use crate::OpChainSpec; use alloy_consensus::TxEip1559; use alloy_primitives::{b256, Address, StorageKey, StorageValue}; - use reth_chainspec::{ChainSpecBuilder, MIN_TRANSACTION_GAS}; - use reth_optimism_chainspec::{optimism_deposit_tx_signature, BASE_MAINNET}; + use reth_chainspec::MIN_TRANSACTION_GAS; + use reth_optimism_chainspec::{optimism_deposit_tx_signature, OpChainSpecBuilder}; use reth_primitives::{Account, Block, BlockBody, Signature, Transaction, TransactionSigned}; use reth_revm::{ database::StateProviderDatabase, test_utils::StateProviderTest, L1_BLOCK_CONTRACT, @@ -548,8 +548,7 @@ mod tests { db } - fn executor_provider(chain_spec: Arc) -> OpExecutorProvider { - let chain_spec = Arc::new(OpChainSpec::new(Arc::unwrap_or_clone(chain_spec))); + fn executor_provider(chain_spec: Arc) -> OpExecutorProvider { OpExecutorProvider { evm_config: OptimismEvmConfig::new(chain_spec.clone()), chain_spec } } @@ -572,11 +571,7 @@ mod tests { let account = Account { balance: U256::MAX, ..Account::default() }; db.insert_account(addr, account, None, HashMap::default()); - let chain_spec = Arc::new( - ChainSpecBuilder::from(&Arc::new(BASE_MAINNET.inner.clone())) - .regolith_activated() - .build(), - ); + let chain_spec = Arc::new(OpChainSpecBuilder::base_mainnet().regolith_activated().build()); let tx = TransactionSigned::from_transaction_and_signature( Transaction::Eip1559(TxEip1559 { @@ -656,11 +651,7 @@ mod tests { db.insert_account(addr, account, None, HashMap::default()); - let chain_spec = Arc::new( - ChainSpecBuilder::from(&Arc::new(BASE_MAINNET.inner.clone())) - .canyon_activated() - .build(), - ); + let chain_spec = Arc::new(OpChainSpecBuilder::base_mainnet().canyon_activated().build()); let tx = TransactionSigned::from_transaction_and_signature( Transaction::Eip1559(TxEip1559 { diff --git a/crates/optimism/node/tests/e2e/utils.rs b/crates/optimism/node/tests/e2e/utils.rs index 1e9ffa652..863bf254e 100644 --- a/crates/optimism/node/tests/e2e/utils.rs +++ b/crates/optimism/node/tests/e2e/utils.rs @@ -1,15 +1,13 @@ -use std::sync::Arc; - use alloy_genesis::Genesis; use alloy_primitives::{Address, B256}; use reth::{rpc::types::engine::PayloadAttributes, tasks::TaskManager}; -use reth_chainspec::ChainSpecBuilder; use reth_e2e_test_utils::{transaction::TransactionTestContext, wallet::Wallet, NodeHelperType}; -use reth_optimism_chainspec::{OpChainSpec, BASE_MAINNET}; +use reth_optimism_chainspec::OpChainSpecBuilder; use reth_optimism_node::{ node::OptimismAddOns, OptimismBuiltPayload, OptimismNode, OptimismPayloadBuilderAttributes, }; use reth_payload_builder::EthPayloadBuilderAttributes; +use std::sync::Arc; use tokio::sync::Mutex; /// Optimism Node Helper type @@ -19,13 +17,7 @@ pub(crate) async fn setup(num_nodes: usize) -> eyre::Result<(Vec, TaskMa let genesis: Genesis = serde_json::from_str(include_str!("../assets/genesis.json")).unwrap(); reth_e2e_test_utils::setup( num_nodes, - Arc::new(OpChainSpec::new( - ChainSpecBuilder::default() - .chain(BASE_MAINNET.chain) - .genesis(genesis) - .ecotone_activated() - .build(), - )), + Arc::new(OpChainSpecBuilder::base_mainnet().genesis(genesis).ecotone_activated().build()), false, ) .await From 67c57250770ed3ce5c75b948b8ec4921f9b0be6d Mon Sep 17 00:00:00 2001 From: Delweng Date: Sun, 13 Oct 2024 02:53:33 +0800 Subject: [PATCH 027/425] chore(clippy): fix the very complex type used (#11689) Signed-off-by: jsvisa Co-authored-by: Matthias Seitz --- crates/rpc/rpc-eth-api/src/helpers/block.rs | 9 +++++++-- crates/rpc/rpc-eth-api/src/helpers/call.rs | 6 ++++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/crates/rpc/rpc-eth-api/src/helpers/block.rs b/crates/rpc/rpc-eth-api/src/helpers/block.rs index d7e081d80..898037f4a 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/block.rs @@ -13,6 +13,11 @@ use crate::{FromEthApiError, FullEthApiTypes, RpcBlock, RpcReceipt}; use super::{LoadPendingBlock, LoadReceipt, SpawnBlocking}; +/// Result type of the fetched block receipts. +pub type BlockReceiptsResult = Result>>, E>; +/// Result type of the fetched block and its receipts. +pub type BlockAndReceiptsResult = Result>)>, E>; + /// Block related functions for the [`EthApiServer`](crate::EthApiServer) trait in the /// `eth_` namespace. pub trait EthBlocks: LoadBlock { @@ -108,7 +113,7 @@ pub trait EthBlocks: LoadBlock { fn block_receipts( &self, block_id: BlockId, - ) -> impl Future>>, Self::Error>> + Send + ) -> impl Future> + Send where Self: LoadReceipt; @@ -116,7 +121,7 @@ pub trait EthBlocks: LoadBlock { fn load_block_and_receipts( &self, block_id: BlockId, - ) -> impl Future>)>, Self::Error>> + Send + ) -> impl Future> + Send where Self: LoadReceipt, { diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index 0aa38a36e..5f6f3d77f 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -41,6 +41,9 @@ use tracing::trace; use super::{LoadBlock, LoadPendingBlock, LoadState, LoadTransaction, SpawnBlocking, Trace}; +/// Result type for `eth_simulateV1` RPC method. +pub type SimulatedBlocksResult = Result>>, E>; + /// Execution related functions for the [`EthApiServer`](crate::EthApiServer) trait in /// the `eth_` namespace. pub trait EthCall: Call + LoadPendingBlock { @@ -62,8 +65,7 @@ pub trait EthCall: Call + LoadPendingBlock { &self, payload: SimulatePayload, block: Option, - ) -> impl Future>>, Self::Error>> - + Send + ) -> impl Future> + Send where Self: LoadBlock + FullEthApiTypes, { From c03399d1eb2f6a54c751661dadf8fa8fa4197ad7 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Sun, 13 Oct 2024 11:25:56 +0200 Subject: [PATCH 028/425] chore(ci): unpin clippy (#11697) --- .github/workflows/lint.yml | 2 -- crates/storage/provider/src/providers/blockchain_provider.rs | 5 +++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index fd71a8c63..efa38857e 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -30,7 +30,6 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@clippy with: - toolchain: nightly-2024-09-25 components: clippy - uses: Swatinem/rust-cache@v2 with: @@ -52,7 +51,6 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@nightly with: - toolchain: nightly-2024-09-25 components: clippy - uses: Swatinem/rust-cache@v2 with: diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index dc4264210..9b88cab13 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -197,8 +197,9 @@ impl BlockchainProvider2 { for (_, block_body) in block_bodies { let mut block_receipts = Vec::with_capacity(block_body.tx_count as usize); for tx_num in block_body.tx_num_range() { - let receipt = - receipt_iter.next().ok_or(ProviderError::ReceiptNotFound(tx_num.into()))?; + let receipt = receipt_iter + .next() + .ok_or_else(|| ProviderError::ReceiptNotFound(tx_num.into()))?; block_receipts.push(Some(receipt)); } receipts.push(block_receipts); From ffb78b3dc756c15067f25329855b15aca000aee3 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Sun, 13 Oct 2024 12:58:55 +0200 Subject: [PATCH 029/425] refactor(tree): small refac for `BlockBuffer` (#11691) --- crates/blockchain-tree/src/block_buffer.rs | 30 ++++++++++++---------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/crates/blockchain-tree/src/block_buffer.rs b/crates/blockchain-tree/src/block_buffer.rs index e116463e4..5d4ca2705 100644 --- a/crates/blockchain-tree/src/block_buffer.rs +++ b/crates/blockchain-tree/src/block_buffer.rs @@ -2,7 +2,7 @@ use crate::metrics::BlockBufferMetrics; use alloy_primitives::{BlockHash, BlockNumber}; use reth_network::cache::LruCache; use reth_primitives::SealedBlockWithSenders; -use std::collections::{btree_map, hash_map, BTreeMap, HashMap, HashSet}; +use std::collections::{BTreeMap, HashMap, HashSet}; /// Contains the tree of pending blocks that cannot be executed due to missing parent. /// It allows to store unconnected blocks for potential future inclusion. @@ -83,6 +83,7 @@ impl BlockBuffer { } self.metrics.blocks.set(self.blocks.len() as f64); } + /// Removes the given block from the buffer and also all the children of the block. /// /// This is used to get all the blocks that are dependent on the block that is included. @@ -93,10 +94,11 @@ impl BlockBuffer { &mut self, parent_hash: &BlockHash, ) -> Vec { - // remove parent block if present - let mut removed = self.remove_block(parent_hash).into_iter().collect::>(); - - removed.extend(self.remove_children(vec![*parent_hash])); + let removed = self + .remove_block(parent_hash) + .into_iter() + .chain(self.remove_children(vec![*parent_hash])) + .collect(); self.metrics.blocks.set(self.blocks.len() as f64); removed } @@ -126,10 +128,10 @@ impl BlockBuffer { /// Remove block entry fn remove_from_earliest_blocks(&mut self, number: BlockNumber, hash: &BlockHash) { - if let btree_map::Entry::Occupied(mut entry) = self.earliest_blocks.entry(number) { - entry.get_mut().remove(hash); - if entry.get().is_empty() { - entry.remove(); + if let Some(entry) = self.earliest_blocks.get_mut(&number) { + entry.remove(hash); + if entry.is_empty() { + self.earliest_blocks.remove(&number); } } } @@ -137,13 +139,13 @@ impl BlockBuffer { /// Remove from parent child connection. This method does not remove children. fn remove_from_parent(&mut self, parent_hash: BlockHash, hash: &BlockHash) { // remove from parent to child connection, but only for this block parent. - if let hash_map::Entry::Occupied(mut entry) = self.parent_to_child.entry(parent_hash) { - entry.get_mut().remove(hash); + if let Some(entry) = self.parent_to_child.get_mut(&parent_hash) { + entry.remove(hash); // if set is empty remove block entry. - if entry.get().is_empty() { - entry.remove(); + if entry.is_empty() { + self.parent_to_child.remove(&parent_hash); } - }; + } } /// Removes block from inner collections. From 6d8d327129a75f758e207ff15ffa5b6937d87129 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 13 Oct 2024 16:32:52 +0200 Subject: [PATCH 030/425] chore: set request budget to 2 (#11699) Co-authored-by: Oliver --- crates/net/network/src/budget.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/net/network/src/budget.rs b/crates/net/network/src/budget.rs index 6eba076f6..5f5d88861 100644 --- a/crates/net/network/src/budget.rs +++ b/crates/net/network/src/budget.rs @@ -5,8 +5,8 @@ pub const DEFAULT_BUDGET_TRY_DRAIN_STREAM: u32 = 10; /// Default budget to try and drain headers and bodies download streams. /// -/// Default is 1 iteration. -pub const DEFAULT_BUDGET_TRY_DRAIN_DOWNLOADERS: u32 = 1; +/// Default is 2 iterations. +pub const DEFAULT_BUDGET_TRY_DRAIN_DOWNLOADERS: u32 = 2; /// Default budget to try and drain [`Swarm`](crate::swarm::Swarm). /// From 661b260f6172c047e8530e2331b2c84141e03c2b Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 13 Oct 2024 14:39:44 +0000 Subject: [PATCH 031/425] chore(deps): weekly `cargo update` (#11696) Co-authored-by: github-merge-queue <118344674+github-merge-queue@users.noreply.github.com> --- Cargo.lock | 352 ++++++++++++++++++++++++++--------------------------- 1 file changed, 170 insertions(+), 182 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 22e1360a2..538816c7c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.24.1" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] @@ -97,10 +97,11 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "alloy-chains" -version = "0.1.34" +version = "0.1.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8158b4878c67837e5413721cc44298e6a2d88d39203175ea025e51892a16ba4c" +checksum = "156bfc5dcd52ef9a5f33381701fa03310317e14c65093a9430d3e3557b08dcd3" dependencies = [ + "alloy-primitives", "alloy-rlp", "arbitrary", "num_enum", @@ -129,9 +130,9 @@ dependencies = [ [[package]] name = "alloy-dyn-abi" -version = "0.8.5" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b499852e1d0e9b8c6db0f24c48998e647c0d5762a01090f955106a7700e4611" +checksum = "f95d76a38cae906fd394a5afb0736aaceee5432efe76addfd71048e623e208af" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -205,9 +206,9 @@ dependencies = [ [[package]] name = "alloy-json-abi" -version = "0.8.5" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a438d4486b5d525df3b3004188f9d5cd1d65cd30ecc41e5a3ccef6f6342e8af9" +checksum = "03c66eec1acdd96b39b995b8f5ee5239bc0c871d62c527ae1ac9fd1d7fecd455" dependencies = [ "alloy-primitives", "alloy-sol-type-parser", @@ -297,7 +298,7 @@ dependencies = [ "getrandom 0.2.15", "hashbrown 0.15.0", "hex-literal", - "indexmap 2.5.0", + "indexmap 2.6.0", "itoa", "k256", "keccak-asm", @@ -599,9 +600,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "0.8.5" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68e7f6e8fe5b443f82b3f1e15abfa191128f71569148428e49449d01f6f49e8b" +checksum = "661c516eb1fa3294cc7f2fb8955b3b609d639c282ac81a4eedb14d3046db503a" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", @@ -613,14 +614,14 @@ dependencies = [ [[package]] name = "alloy-sol-macro-expander" -version = "0.8.5" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b96ce28d2fde09abb6135f410c41fad670a3a770b6776869bd852f1df102e6f" +checksum = "ecbabb8fc3d75a0c2cea5215be22e7a267e3efde835b0f2a8922f5e3f5d47683" dependencies = [ "alloy-sol-macro-input", "const-hex", "heck", - "indexmap 2.5.0", + "indexmap 2.6.0", "proc-macro-error2", "proc-macro2", "quote", @@ -631,9 +632,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro-input" -version = "0.8.5" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "906746396a8296537745711630d9185746c0b50c033d5e9d18b0a6eba3d53f90" +checksum = "16517f2af03064485150d89746b8ffdcdbc9b6eeb3d536fb66efd7c2846fbc75" dependencies = [ "const-hex", "dunce", @@ -646,9 +647,9 @@ dependencies = [ [[package]] name = "alloy-sol-type-parser" -version = "0.8.5" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc85178909a49c8827ffccfc9103a7ce1767ae66a801b69bdc326913870bf8e6" +checksum = "c07ebb0c1674ff8cbb08378d7c2e0e27919d2a2dae07ad3bca26174deda8d389" dependencies = [ "serde", "winnow", @@ -656,9 +657,9 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "0.8.5" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d86a533ce22525969661b25dfe296c112d35eb6861f188fd284f8bd4bb3842ae" +checksum = "8e448d879903624863f608c552d10efb0e0905ddbee98b0049412799911eb062" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -1017,9 +1018,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.12" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fec134f64e2bc57411226dfc4e52dec859ddfc7e711fc5e07b612584f000e4aa" +checksum = "998282f8f49ccd6116b0ed8a4de0fbd3151697920e7c7533416d6e25e76434a7" dependencies = [ "brotli", "flate2", @@ -1204,26 +1205,6 @@ dependencies = [ "serde", ] -[[package]] -name = "bindgen" -version = "0.69.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" -dependencies = [ - "bitflags 2.6.0", - "cexpr", - "clang-sys", - "itertools 0.12.1", - "lazy_static", - "lazycell", - "proc-macro2", - "quote", - "regex", - "rustc-hash 1.1.0", - "shlex", - "syn 2.0.79", -] - [[package]] name = "bindgen" version = "0.70.1" @@ -1334,7 +1315,7 @@ dependencies = [ "bitflags 2.6.0", "boa_interner", "boa_macros", - "indexmap 2.5.0", + "indexmap 2.6.0", "num-bigint", "rustc-hash 2.0.0", ] @@ -1360,7 +1341,7 @@ dependencies = [ "fast-float", "hashbrown 0.14.5", "icu_normalizer", - "indexmap 2.5.0", + "indexmap 2.6.0", "intrusive-collections", "itertools 0.13.0", "num-bigint", @@ -1406,7 +1387,7 @@ dependencies = [ "boa_gc", "boa_macros", "hashbrown 0.14.5", - "indexmap 2.5.0", + "indexmap 2.6.0", "once_cell", "phf", "rustc-hash 2.0.0", @@ -1474,9 +1455,9 @@ dependencies = [ [[package]] name = "brotli" -version = "6.0.0" +version = "7.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b" +checksum = "cc97b8f16f944bba54f0433f07e30be199b6dc2bd25937444bbad560bcea29bd" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -1526,9 +1507,9 @@ dependencies = [ [[package]] name = "bytemuck_derive" -version = "1.7.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cc8b54b395f2fcfbb3d90c47b01c7f444d94d05bdeb775811dec868ac3bbc26" +checksum = "bcfcc3cd946cb52f0bbfdbbcfa2f4e24f75ebb6c0e1002f7c25904fada18b9ec" dependencies = [ "proc-macro2", "quote", @@ -1620,9 +1601,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.24" +version = "1.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812acba72f0a070b003d3697490d2b55b837230ae7c6c6497f05cc2ddbb8d938" +checksum = "b16803a61b81d9eabb7eae2588776c4c1e584b738ede45fdbb4c972cec1e9945" dependencies = [ "jobserver", "libc", @@ -1715,9 +1696,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.19" +version = "4.5.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7be5744db7978a28d9df86a214130d106a89ce49644cbc4e3f0c22c3fba30615" +checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8" dependencies = [ "clap_builder", "clap_derive", @@ -1725,9 +1706,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.19" +version = "4.5.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5fbc17d3ef8278f55b282b2a2e75ae6f6c7d4bb70ed3d0382375104bfafdb4b" +checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54" dependencies = [ "anstream", "anstyle", @@ -3140,9 +3121,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", @@ -3155,9 +3136,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", @@ -3165,15 +3146,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", @@ -3182,9 +3163,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-lite" @@ -3203,9 +3184,9 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", @@ -3214,15 +3195,15 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-timer" @@ -3236,9 +3217,9 @@ dependencies = [ [[package]] name = "futures-util" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-channel", "futures-core", @@ -3318,9 +3299,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.31.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" @@ -3397,7 +3378,7 @@ dependencies = [ "futures-core", "futures-sink", "http", - "indexmap 2.5.0", + "indexmap 2.6.0", "slab", "tokio", "tokio-util", @@ -3448,6 +3429,8 @@ version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" dependencies = [ + "allocator-api2", + "equivalent", "foldhash", "serde", ] @@ -3979,13 +3962,13 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" +checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "arbitrary", "equivalent", - "hashbrown 0.14.5", + "hashbrown 0.15.0", "serde", ] @@ -4002,7 +3985,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "232929e1d75fe899576a3d5c7416ad0d88dbfbb3c3d6aa00873a7408a50ddb88" dependencies = [ "ahash", - "indexmap 2.5.0", + "indexmap 2.6.0", "is-terminal", "itoa", "log", @@ -4100,15 +4083,15 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.10.0" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "187674a687eed5fe42285b40c6291f9a01517d415fad1c3cbc6a9f778af7fcd4" +checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" [[package]] name = "iri-string" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44bd7eced44cfe2cebc674adb2a7124a754a4b5269288d22e9f39f8fada3562d" +checksum = "dc0f0a572e8ffe56e2ff4f769f32ffe919282c3916799f8b68688b6030063bea" dependencies = [ "memchr", "serde", @@ -4140,15 +4123,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.13.0" @@ -4195,18 +4169,18 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.70" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" +checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" dependencies = [ "wasm-bindgen", ] [[package]] name = "jsonrpsee" -version = "0.24.5" +version = "0.24.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126b48a5acc3c52fbd5381a77898cb60e145123179588a29e7ac48f9c06e401b" +checksum = "02f01f48e04e0d7da72280ab787c9943695699c9b32b99158ece105e8ad0afea" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -4222,9 +4196,9 @@ dependencies = [ [[package]] name = "jsonrpsee-client-transport" -version = "0.24.5" +version = "0.24.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf679a8e0e083c77997f7c4bb4ca826577105906027ae462aac70ff348d02c6a" +checksum = "d80eccbd47a7b9f1e67663fd846928e941cb49c65236e297dd11c9ea3c5e3387" dependencies = [ "base64 0.22.1", "futures-channel", @@ -4247,9 +4221,9 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.24.5" +version = "0.24.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0e503369a76e195b65af35058add0e6900b794a4e9a9316900ddd3a87a80477" +checksum = "3c2709a32915d816a6e8f625bf72cf74523ebe5d8829f895d6b041b1d3137818" dependencies = [ "async-trait", "bytes", @@ -4274,9 +4248,9 @@ dependencies = [ [[package]] name = "jsonrpsee-http-client" -version = "0.24.5" +version = "0.24.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2c0caba4a6a8efbafeec9baa986aa22a75a96c29d3e4b0091b0098d6470efb5" +checksum = "cc54db939002b030e794fbfc9d5a925aa2854889c5a2f0352b0bffa54681707e" dependencies = [ "async-trait", "base64 0.22.1", @@ -4299,9 +4273,9 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.24.5" +version = "0.24.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc660a9389e2748e794a40673a4155d501f32db667757cdb80edeff0306b489b" +checksum = "3a9a4b2eaba8cc928f49c4ccf4fcfa65b690a73997682da99ed08f3393b51f07" dependencies = [ "heck", "proc-macro-crate", @@ -4312,9 +4286,9 @@ dependencies = [ [[package]] name = "jsonrpsee-server" -version = "0.24.5" +version = "0.24.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af6e6c9b6d975edcb443565d648b605f3e85a04ec63aa6941811a8894cc9cded" +checksum = "e30110d0f2d7866c8cc6c86483bdab2eb9f4d2f0e20db55518b2bca84651ba8e" dependencies = [ "futures-util", "http", @@ -4339,9 +4313,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.24.5" +version = "0.24.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8fb16314327cbc94fdf7965ef7e4422509cd5597f76d137bd104eb34aeede67" +checksum = "1ca331cd7b3fe95b33432825c2d4c9f5a43963e207fdc01ae67f9fd80ab0930f" dependencies = [ "http", "serde", @@ -4351,9 +4325,9 @@ dependencies = [ [[package]] name = "jsonrpsee-wasm-client" -version = "0.24.5" +version = "0.24.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0da62b43702bd5640ea305d35df95da30abc878e79a7b4b01feda3beaf35d3c" +checksum = "5c603d97578071dc44d79d3cfaf0775437638fd5adc33c6b622dfe4fa2ec812d" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -4362,9 +4336,9 @@ dependencies = [ [[package]] name = "jsonrpsee-ws-client" -version = "0.24.5" +version = "0.24.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39aabf5d6c6f22da8d5b808eea1fab0736059f11fb42f71f141b14f404e5046a" +checksum = "755ca3da1c67671f1fae01cd1a47f41dfb2233a8f19a643e587ab0a663942044" dependencies = [ "http", "jsonrpsee-client-transport", @@ -4465,12 +4439,6 @@ dependencies = [ "spin", ] -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - [[package]] name = "libc" version = "0.2.159" @@ -4514,11 +4482,11 @@ dependencies = [ [[package]] name = "libproc" -version = "0.14.8" +version = "0.14.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae9ea4b75e1a81675429dafe43441df1caea70081e82246a8cccf514884a88bb" +checksum = "e78a09b56be5adbcad5aa1197371688dc6bb249a26da3bca2011ee2fb987ebfb" dependencies = [ - "bindgen 0.69.4", + "bindgen", "errno", "libc", ] @@ -4640,11 +4608,11 @@ dependencies = [ [[package]] name = "lru" -version = "0.12.4" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ee39891760e7d94734f6f63fedc29a2e4a152f836120753a72503f09fcf904" +checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ - "hashbrown 0.14.5", + "hashbrown 0.15.0", ] [[package]] @@ -4739,7 +4707,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4f0c8427b39666bf970460908b213ec09b3b350f20c0c2eabcbba51704a08e6" dependencies = [ "base64 0.22.1", - "indexmap 2.5.0", + "indexmap 2.6.0", "metrics", "metrics-util", "quanta", @@ -4748,17 +4716,18 @@ dependencies = [ [[package]] name = "metrics-process" -version = "2.1.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb524e5438255eaa8aa74214d5a62713b77b2c3c6e3c0bbeee65cfd9a58948ba" +checksum = "e69e6ced169644e186e060ddc15f3923fdf06862c811a867bb1e5e7c7824f4d0" dependencies = [ + "libc", "libproc", "mach2", "metrics", "once_cell", - "procfs", + "procfs 0.17.0", "rlimit", - "windows 0.57.0", + "windows 0.58.0", ] [[package]] @@ -5150,21 +5119,18 @@ dependencies = [ [[package]] name = "object" -version = "0.36.4" +version = "0.36.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" +checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.20.1" +version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82881c4be219ab5faaf2ad5e5e5ecdff8c66bd7402ca3160975c93b24961afd1" -dependencies = [ - "portable-atomic", -] +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "oorandom" @@ -5510,18 +5476,18 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.5" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +checksum = "baf123a161dde1e524adf36f90bc5d8d3462824a9c43553ad07a8183161189ec" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.5" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +checksum = "a4502d8515ca9f32f1fb543d987f63d95a14934883db45bdb48060b6b69257f8" dependencies = [ "proc-macro2", "quote", @@ -5777,9 +5743,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "b3e4daa0dcf6feba26f985457cdf104d4b4256fc5a09547140f3631bb076b19a" dependencies = [ "unicode-ident", ] @@ -5795,7 +5761,19 @@ dependencies = [ "flate2", "hex", "lazy_static", - "procfs-core", + "procfs-core 0.16.0", + "rustix", +] + +[[package]] +name = "procfs" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc5b72d8145275d844d4b5f6d4e1eef00c8cd889edb6035c21675d1bb1f45c9f" +dependencies = [ + "bitflags 2.6.0", + "hex", + "procfs-core 0.17.0", "rustix", ] @@ -5810,6 +5788,16 @@ dependencies = [ "hex", ] +[[package]] +name = "procfs-core" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "239df02d8349b06fc07398a3a1697b06418223b1c7725085e801e7c0fc6a12ec" +dependencies = [ + "bitflags 2.6.0", + "hex", +] + [[package]] name = "proptest" version = "1.5.0" @@ -6057,9 +6045,9 @@ dependencies = [ [[package]] name = "raw-cpuid" -version = "11.1.0" +version = "11.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb9ee317cfe3fbd54b36a511efc1edd42e216903c9cd575e686dd68a2ba90d8d" +checksum = "1ab240315c661615f2ee9f0f2cd32d5a7343a84d5ebcccb99d46e6637565e7b0" dependencies = [ "bitflags 2.6.0", ] @@ -7578,7 +7566,7 @@ dependencies = [ "criterion", "dashmap 6.1.0", "derive_more 1.0.0", - "indexmap 2.5.0", + "indexmap 2.6.0", "parking_lot 0.12.3", "pprof", "rand 0.8.5", @@ -7594,7 +7582,7 @@ dependencies = [ name = "reth-mdbx-sys" version = "1.1.0" dependencies = [ - "bindgen 0.70.1", + "bindgen", "cc", ] @@ -7979,7 +7967,7 @@ dependencies = [ "metrics-exporter-prometheus", "metrics-process", "metrics-util", - "procfs", + "procfs 0.16.0", "reqwest", "reth-chainspec", "reth-db-api", @@ -9499,9 +9487,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.13" +version = "0.23.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dabaac7466917e566adb06783a81ca48944c6898a1b08b9374106dd671f4c8" +checksum = "415d9944693cb90382053259f89fbb077ea730ad7273047ec63b19bc9b160ba8" dependencies = [ "log", "once_cell", @@ -9632,18 +9620,18 @@ dependencies = [ [[package]] name = "scc" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836f1e0f4963ef5288b539b643b35e043e76a32d0f4e47e67febf69576527f50" +checksum = "553f8299af7450cda9a52d3a370199904e7a46b5ffd1bef187c4a6af3bb6db69" dependencies = [ "sdd", ] [[package]] name = "schannel" -version = "0.1.24" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9aaafd5a2b6e3d657ff009d82fbd630b6bd54dd4eb06f21693925cdf80f9b8b" +checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1" dependencies = [ "windows-sys 0.59.0", ] @@ -9673,9 +9661,9 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sdd" -version = "3.0.3" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a7b59a5d9b0099720b417b6325d91a52cbf5b3dcb5041d864be53eefa58abc" +checksum = "49c1eeaf4b6a87c7479688c6d52b9f1153cedd3c489300564f932b065c6eab95" [[package]] name = "sec1" @@ -9800,7 +9788,7 @@ version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" dependencies = [ - "indexmap 2.5.0", + "indexmap 2.6.0", "itoa", "memchr", "ryu", @@ -9852,15 +9840,15 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.10.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9720086b3357bcb44fce40117d769a4d068c70ecfa190850a980a71755f66fcc" +checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817" dependencies = [ "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.5.0", + "indexmap 2.6.0", "serde", "serde_derive", "serde_json", @@ -9870,9 +9858,9 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.10.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f1abbfe725f27678f4663bcacb75a83e829fd464c25d78dd038a3a29e307cec" +checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d" dependencies = [ "darling", "proc-macro2", @@ -10260,9 +10248,9 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.8.5" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ab661c8148c2261222a4d641ad5477fd4bea79406a99056096a0b41b35617a5" +checksum = "20e7b52ad118b2153644eea95c6fc740b6c1555b2344fdab763fc9de4075f665" dependencies = [ "paste", "proc-macro2", @@ -10670,7 +10658,7 @@ version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ - "indexmap 2.5.0", + "indexmap 2.6.0", "serde", "serde_spanned", "toml_datetime", @@ -11019,9 +11007,9 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.15" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" +checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" @@ -11214,9 +11202,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" +checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" dependencies = [ "cfg-if", "once_cell", @@ -11225,9 +11213,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" +checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" dependencies = [ "bumpalo", "log", @@ -11240,9 +11228,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.43" +version = "0.4.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e9300f63a621e96ed275155c108eb6f843b6a26d053f122ab69724559dc8ed" +checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" dependencies = [ "cfg-if", "js-sys", @@ -11252,9 +11240,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" +checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -11262,9 +11250,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" +checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", @@ -11275,9 +11263,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.93" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" +checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" [[package]] name = "wasm-streams" @@ -11294,9 +11282,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.70" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26fdeaafd9bd129f65e7c031593c24d62186301e0c72c8978fa1678be7d532c0" +checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" dependencies = [ "js-sys", "wasm-bindgen", From 176496189dbd5300fc4c778aca671a55345bd386 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 14 Oct 2024 10:29:58 +0200 Subject: [PATCH 032/425] Revert "chore(stages): reduce the progress logging " (#11698) --- .../stages/src/stages/hashing_account.rs | 20 ++++----- .../stages/src/stages/hashing_storage.rs | 20 ++++----- crates/stages/stages/src/stages/headers.rs | 24 +++++----- crates/stages/stages/src/stages/tx_lookup.rs | 8 +--- crates/stages/stages/src/stages/utils.rs | 44 +++++-------------- 5 files changed, 42 insertions(+), 74 deletions(-) diff --git a/crates/stages/stages/src/stages/hashing_account.rs b/crates/stages/stages/src/stages/hashing_account.rs index 2fdccd783..14afb37d8 100644 --- a/crates/stages/stages/src/stages/hashing_account.rs +++ b/crates/stages/stages/src/stages/hashing_account.rs @@ -1,4 +1,3 @@ -use crate::log_progress; use alloy_primitives::{keccak256, B256}; use itertools::Itertools; use reth_config::config::{EtlConfig, HashingConfig}; @@ -19,7 +18,6 @@ use std::{ fmt::Debug, ops::{Range, RangeInclusive}, sync::mpsc::{self, Receiver}, - time::Instant, }; use tracing::*; @@ -188,16 +186,16 @@ where let mut hashed_account_cursor = tx.cursor_write::>()?; - let total = collector.len(); - let mut last_log = Instant::now(); + let total_hashes = collector.len(); + let interval = (total_hashes / 10).max(1); for (index, item) in collector.iter()?.enumerate() { - log_progress!( - "sync::stages::hashing_account", - index, - total, - last_log, - "Inserting hashes" - ); + if index > 0 && index % interval == 0 { + info!( + target: "sync::stages::hashing_account", + progress = %format!("{:.2}%", (index as f64 / total_hashes as f64) * 100.0), + "Inserting hashes" + ); + } let (key, value) = item?; hashed_account_cursor diff --git a/crates/stages/stages/src/stages/hashing_storage.rs b/crates/stages/stages/src/stages/hashing_storage.rs index 064603293..ef070d30c 100644 --- a/crates/stages/stages/src/stages/hashing_storage.rs +++ b/crates/stages/stages/src/stages/hashing_storage.rs @@ -1,4 +1,3 @@ -use crate::log_progress; use alloy_primitives::{bytes::BufMut, keccak256, B256}; use itertools::Itertools; use reth_config::config::{EtlConfig, HashingConfig}; @@ -20,7 +19,6 @@ use reth_storage_errors::provider::ProviderResult; use std::{ fmt::Debug, sync::mpsc::{self, Receiver}, - time::Instant, }; use tracing::*; @@ -119,17 +117,17 @@ where collect(&mut channels, &mut collector)?; - let total = collector.len(); - let mut last_log = Instant::now(); + let total_hashes = collector.len(); + let interval = (total_hashes / 10).max(1); let mut cursor = tx.cursor_dup_write::()?; for (index, item) in collector.iter()?.enumerate() { - log_progress!( - "sync::stages::hashing_storage", - index, - total, - last_log, - "Inserting hashes" - ); + if index > 0 && index % interval == 0 { + info!( + target: "sync::stages::hashing_storage", + progress = %format!("{:.2}%", (index as f64 / total_hashes as f64) * 100.0), + "Inserting hashes" + ); + } let (addr_key, value) = item?; cursor.append_dup( diff --git a/crates/stages/stages/src/stages/headers.rs b/crates/stages/stages/src/stages/headers.rs index eaa2ea01f..199e015c2 100644 --- a/crates/stages/stages/src/stages/headers.rs +++ b/crates/stages/stages/src/stages/headers.rs @@ -1,4 +1,3 @@ -use crate::log_progress; use alloy_primitives::{BlockHash, BlockNumber, Bytes, B256}; use futures_util::StreamExt; use reth_config::config::EtlConfig; @@ -26,7 +25,6 @@ use reth_storage_errors::provider::ProviderError; use std::{ sync::Arc, task::{ready, Context, Poll}, - time::Instant, }; use tokio::sync::watch; use tracing::*; @@ -97,9 +95,9 @@ where provider: &impl DBProvider, static_file_provider: StaticFileProvider, ) -> Result { - let total = self.header_collector.len(); + let total_headers = self.header_collector.len(); - info!(target: "sync::stages::headers", total, "Writing headers"); + info!(target: "sync::stages::headers", total = total_headers, "Writing headers"); // Consistency check of expected headers in static files vs DB is done on provider::sync_gap // when poll_execute_ready is polled. @@ -115,11 +113,13 @@ where // Although headers were downloaded in reverse order, the collector iterates it in ascending // order let mut writer = static_file_provider.latest_writer(StaticFileSegment::Headers)?; - let mut last_log = Instant::now(); + let interval = (total_headers / 10).max(1); for (index, header) in self.header_collector.iter()?.enumerate() { let (_, header_buf) = header?; - log_progress!("sync::stages::headers", index, total, last_log, "Writing headers"); + if index > 0 && index % interval == 0 && total_headers > 100 { + info!(target: "sync::stages::headers", progress = %format!("{:.2}%", (index as f64 / total_headers as f64) * 100.0), "Writing headers"); + } let sealed_header: SealedHeader = bincode::deserialize::>(&header_buf) @@ -147,7 +147,7 @@ where writer.append_header(&header, td, &header_hash)?; } - info!(target: "sync::stages::headers", total, "Writing headers hash index"); + info!(target: "sync::stages::headers", total = total_headers, "Writing headers hash index"); let mut cursor_header_numbers = provider.tx_ref().cursor_write::>()?; @@ -168,13 +168,9 @@ where for (index, hash_to_number) in self.hash_collector.iter()?.enumerate() { let (hash, number) = hash_to_number?; - log_progress!( - "sync::stages::headers", - index, - total, - last_log, - "Writing headers hash index" - ); + if index > 0 && index % interval == 0 && total_headers > 100 { + info!(target: "sync::stages::headers", progress = %format!("{:.2}%", (index as f64 / total_headers as f64) * 100.0), "Writing headers hash index"); + } if first_sync { cursor_header_numbers.append( diff --git a/crates/stages/stages/src/stages/tx_lookup.rs b/crates/stages/stages/src/stages/tx_lookup.rs index c5ccff54c..60c958abf 100644 --- a/crates/stages/stages/src/stages/tx_lookup.rs +++ b/crates/stages/stages/src/stages/tx_lookup.rs @@ -1,4 +1,3 @@ -use super::utils::LOG_INTERVAL; use alloy_primitives::{TxHash, TxNumber}; use num_traits::Zero; use reth_config::config::{EtlConfig, TransactionLookupConfig}; @@ -18,7 +17,6 @@ use reth_stages_api::{ UnwindInput, UnwindOutput, }; use reth_storage_errors::provider::ProviderError; -use std::time::Instant; use tracing::*; /// The transaction lookup stage. @@ -149,18 +147,16 @@ where .cursor_write::>()?; let total_hashes = hash_collector.len(); - let mut last_log = Instant::now(); + let interval = (total_hashes / 10).max(1); for (index, hash_to_number) in hash_collector.iter()?.enumerate() { let (hash, number) = hash_to_number?; - let now = Instant::now(); - if now.duration_since(last_log) >= LOG_INTERVAL { + if index > 0 && index % interval == 0 { info!( target: "sync::stages::transaction_lookup", ?append_only, progress = %format!("{:.2}%", (index as f64 / total_hashes as f64) * 100.0), "Inserting hashes" ); - last_log = now; } let key = RawKey::::from_vec(hash); diff --git a/crates/stages/stages/src/stages/utils.rs b/crates/stages/stages/src/stages/utils.rs index af6594cd7..caf039fac 100644 --- a/crates/stages/stages/src/stages/utils.rs +++ b/crates/stages/stages/src/stages/utils.rs @@ -12,36 +12,12 @@ use reth_db_api::{ use reth_etl::Collector; use reth_provider::DBProvider; use reth_stages_api::StageError; -use std::{ - collections::HashMap, - hash::Hash, - ops::RangeBounds, - time::{Duration, Instant}, -}; +use std::{collections::HashMap, hash::Hash, ops::RangeBounds}; use tracing::info; /// Number of blocks before pushing indices from cache to [`Collector`] const DEFAULT_CACHE_THRESHOLD: u64 = 100_000; -/// Log interval for progress. -pub(crate) const LOG_INTERVAL: Duration = Duration::from_secs(5); - -/// Log progress at a regular interval. -#[macro_export] -macro_rules! log_progress { - ($target:expr, $index:expr, $total:expr, $last_log:expr, $message:expr) => { - let now = std::time::Instant::now(); - if now.duration_since($last_log) >= $crate::stages::utils::LOG_INTERVAL { - info!( - target: $target, - progress = %format!("{:.2}%", ($index as f64 / $total as f64) * 100.0), - $message - ); - $last_log = now; - } - } -} - /// Collects all history (`H`) indices for a range of changesets (`CS`) and stores them in a /// [`Collector`]. /// @@ -89,16 +65,18 @@ where }; // observability - let total = provider.tx_ref().entries::()?; - let mut last_log = Instant::now(); + let total_changesets = provider.tx_ref().entries::()?; + let interval = (total_changesets / 1000).max(1); let mut flush_counter = 0; let mut current_block_number = u64::MAX; - for (index, entry) in changeset_cursor.walk_range(range)?.enumerate() { + for (idx, entry) in changeset_cursor.walk_range(range)?.enumerate() { let (block_number, key) = partial_key_factory(entry?); cache.entry(key).or_default().push(block_number); - log_progress!("sync::stages::index_history", index, total, last_log, "Collecting indices"); + if idx > 0 && idx % interval == 0 && total_changesets > 1000 { + info!(target: "sync::stages::index_history", progress = %format!("{:.4}%", (idx as f64 / total_changesets as f64) * 100.0), "Collecting indices"); + } // Make sure we only flush the cache every DEFAULT_CACHE_THRESHOLD blocks. if current_block_number != block_number { @@ -142,15 +120,17 @@ where let mut current_list = Vec::::new(); // observability - let total = collector.len(); - let mut last_log = Instant::now(); + let total_entries = collector.len(); + let interval = (total_entries / 100).max(1); for (index, element) in collector.iter()?.enumerate() { let (k, v) = element?; let sharded_key = decode_key(k)?; let new_list = BlockNumberList::decompress_owned(v)?; - log_progress!("sync::stages::index_history", index, total, last_log, "Writing indices"); + if index > 0 && index % interval == 0 && total_entries > 100 { + info!(target: "sync::stages::index_history", progress = %format!("{:.2}%", (index as f64 / total_entries as f64) * 100.0), "Writing indices"); + } // AccountsHistory: `Address`. // StorageHistory: `Address.StorageKey`. From a049dff0b774f2766ce1bef97020f6b390dadc60 Mon Sep 17 00:00:00 2001 From: Kien Trinh <51135161+kien6034@users.noreply.github.com> Date: Mon, 14 Oct 2024 16:11:24 +0700 Subject: [PATCH 033/425] tests(node-builder): basic exex test update for apply function (#11695) Co-authored-by: Matthias Seitz --- crates/node/builder/src/builder/mod.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/crates/node/builder/src/builder/mod.rs b/crates/node/builder/src/builder/mod.rs index 15bdc730a..9f7254bad 100644 --- a/crates/node/builder/src/builder/mod.rs +++ b/crates/node/builder/src/builder/mod.rs @@ -177,6 +177,11 @@ impl NodeBuilder { pub const fn config(&self) -> &NodeConfig { &self.config } + + /// Returns a mutable reference to the node builder's config. + pub fn config_mut(&mut self) -> &mut NodeConfig { + &mut self.config + } } impl NodeBuilder { From 482468579f6cf3c4ef07fe3cccc22cb88998f035 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Mon, 14 Oct 2024 11:12:29 +0200 Subject: [PATCH 034/425] chore(sdk): define trait `Receipt` (#11643) --- crates/primitives-traits/Cargo.toml | 2 +- crates/primitives-traits/src/lib.rs | 3 +++ crates/primitives-traits/src/receipt.rs | 29 +++++++++++++++++++++++++ 3 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 crates/primitives-traits/src/receipt.rs diff --git a/crates/primitives-traits/Cargo.toml b/crates/primitives-traits/Cargo.toml index 2fec75666..b34987327 100644 --- a/crates/primitives-traits/Cargo.toml +++ b/crates/primitives-traits/Cargo.toml @@ -64,4 +64,4 @@ arbitrary = [ "dep:proptest", "dep:proptest-arbitrary-interop", ] -serde-bincode-compat = ["serde_with", "alloy-consensus/serde-bincode-compat"] +serde-bincode-compat = ["serde_with", "alloy-consensus/serde-bincode-compat"] \ No newline at end of file diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index ccc3ea13b..9c244226b 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -20,6 +20,9 @@ pub use constants::gas_units::{format_gas, format_gas_throughput}; pub mod account; pub use account::{Account, Bytecode}; +pub mod receipt; +pub use receipt::Receipt; + mod integer_list; pub use integer_list::{IntegerList, IntegerListError}; diff --git a/crates/primitives-traits/src/receipt.rs b/crates/primitives-traits/src/receipt.rs new file mode 100644 index 000000000..e2d19e4d4 --- /dev/null +++ b/crates/primitives-traits/src/receipt.rs @@ -0,0 +1,29 @@ +//! Receipt abstraction + +use alloc::fmt; + +use alloy_consensus::TxReceipt; +use reth_codecs::Compact; +use serde::{Deserialize, Serialize}; + +/// Helper trait that unifies all behaviour required by receipt to support full node operations. +pub trait FullReceipt: Receipt + Compact {} + +impl FullReceipt for T where T: Receipt + Compact {} + +/// Abstraction of a receipt. +pub trait Receipt: + TxReceipt + + Clone + + fmt::Debug + + PartialEq + + Eq + + Default + + alloy_rlp::Encodable + + alloy_rlp::Decodable + + Serialize + + for<'de> Deserialize<'de> +{ + /// Returns transaction type. + fn tx_type(&self) -> u8; +} From a129f62aaa4ccfa9926498804fa5b70bf6f72784 Mon Sep 17 00:00:00 2001 From: Varun Doshi <61531351+varun-doshi@users.noreply.github.com> Date: Mon, 14 Oct 2024 14:49:04 +0530 Subject: [PATCH 035/425] feat: reset pruned numbers on stage drop (#11491) Co-authored-by: Alexey Shekhirin --- crates/cli/commands/src/stage/drop.rs | 114 ++++++++++++-------------- 1 file changed, 51 insertions(+), 63 deletions(-) diff --git a/crates/cli/commands/src/stage/drop.rs b/crates/cli/commands/src/stage/drop.rs index e324c9851..9e0396404 100644 --- a/crates/cli/commands/src/stage/drop.rs +++ b/crates/cli/commands/src/stage/drop.rs @@ -4,7 +4,7 @@ use clap::Parser; use itertools::Itertools; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; -use reth_db::{static_file::iter_static_files, tables}; +use reth_db::{mdbx::tx::Tx, static_file::iter_static_files, tables, DatabaseError}; use reth_db_api::transaction::{DbTx, DbTxMut}; use reth_db_common::{ init::{insert_genesis_header, insert_genesis_history, insert_genesis_state}, @@ -69,42 +69,28 @@ impl> Command tx.clear::()?; tx.clear::()?; tx.clear::()?; - tx.put::( - StageId::Headers.to_string(), - Default::default(), - )?; + reset_stage_checkpoint(tx, StageId::Headers)?; + insert_genesis_header(&provider_rw.0, &static_file_provider, &self.env.chain)?; } StageEnum::Bodies => { tx.clear::()?; tx.clear::()?; + reset_prune_checkpoint(tx, PruneSegment::Transactions)?; + tx.clear::()?; tx.clear::()?; tx.clear::()?; tx.clear::()?; - tx.put::( - StageId::Bodies.to_string(), - Default::default(), - )?; + reset_stage_checkpoint(tx, StageId::Bodies)?; + insert_genesis_header(&provider_rw.0, &static_file_provider, &self.env.chain)?; } StageEnum::Senders => { tx.clear::()?; // Reset pruned numbers to not count them in the next rerun's stage progress - if let Some(mut prune_checkpoint) = - tx.get::(PruneSegment::SenderRecovery)? - { - prune_checkpoint.block_number = None; - prune_checkpoint.tx_number = None; - tx.put::( - PruneSegment::SenderRecovery, - prune_checkpoint, - )?; - } - tx.put::( - StageId::SenderRecovery.to_string(), - Default::default(), - )?; + reset_prune_checkpoint(tx, PruneSegment::SenderRecovery)?; + reset_stage_checkpoint(tx, StageId::SenderRecovery)?; } StageEnum::Execution => { tx.clear::()?; @@ -113,53 +99,38 @@ impl> Command tx.clear::()?; tx.clear::()?; tx.clear::()?; - tx.put::( - StageId::Execution.to_string(), - Default::default(), - )?; + + reset_prune_checkpoint(tx, PruneSegment::Receipts)?; + reset_prune_checkpoint(tx, PruneSegment::ContractLogs)?; + reset_stage_checkpoint(tx, StageId::Execution)?; + let alloc = &self.env.chain.genesis().alloc; insert_genesis_state(&provider_rw.0, alloc.iter())?; } StageEnum::AccountHashing => { tx.clear::()?; - tx.put::( - StageId::AccountHashing.to_string(), - Default::default(), - )?; + reset_stage_checkpoint(tx, StageId::AccountHashing)?; } StageEnum::StorageHashing => { tx.clear::()?; - tx.put::( - StageId::StorageHashing.to_string(), - Default::default(), - )?; + reset_stage_checkpoint(tx, StageId::StorageHashing)?; } StageEnum::Hashing => { // Clear hashed accounts tx.clear::()?; - tx.put::( - StageId::AccountHashing.to_string(), - Default::default(), - )?; + reset_stage_checkpoint(tx, StageId::AccountHashing)?; // Clear hashed storages tx.clear::()?; - tx.put::( - StageId::StorageHashing.to_string(), - Default::default(), - )?; + reset_stage_checkpoint(tx, StageId::StorageHashing)?; } StageEnum::Merkle => { tx.clear::()?; tx.clear::()?; - tx.put::( - StageId::MerkleExecute.to_string(), - Default::default(), - )?; - tx.put::( - StageId::MerkleUnwind.to_string(), - Default::default(), - )?; + + reset_stage_checkpoint(tx, StageId::MerkleExecute)?; + reset_stage_checkpoint(tx, StageId::MerkleUnwind)?; + tx.delete::( StageId::MerkleExecute.to_string(), None, @@ -168,22 +139,17 @@ impl> Command StageEnum::AccountHistory | StageEnum::StorageHistory => { tx.clear::()?; tx.clear::()?; - tx.put::( - StageId::IndexAccountHistory.to_string(), - Default::default(), - )?; - tx.put::( - StageId::IndexStorageHistory.to_string(), - Default::default(), - )?; + + reset_stage_checkpoint(tx, StageId::IndexAccountHistory)?; + reset_stage_checkpoint(tx, StageId::IndexStorageHistory)?; + insert_genesis_history(&provider_rw.0, self.env.chain.genesis().alloc.iter())?; } StageEnum::TxLookup => { tx.clear::()?; - tx.put::( - StageId::TransactionLookup.to_string(), - Default::default(), - )?; + reset_prune_checkpoint(tx, PruneSegment::TransactionLookup)?; + + reset_stage_checkpoint(tx, StageId::TransactionLookup)?; insert_genesis_header(&provider_rw.0, &static_file_provider, &self.env.chain)?; } } @@ -195,3 +161,25 @@ impl> Command Ok(()) } } + +fn reset_prune_checkpoint( + tx: &Tx, + prune_segment: PruneSegment, +) -> Result<(), DatabaseError> { + if let Some(mut prune_checkpoint) = tx.get::(prune_segment)? { + prune_checkpoint.block_number = None; + prune_checkpoint.tx_number = None; + tx.put::(prune_segment, prune_checkpoint)?; + } + + Ok(()) +} + +fn reset_stage_checkpoint( + tx: &Tx, + stage_id: StageId, +) -> Result<(), DatabaseError> { + tx.put::(stage_id.to_string(), Default::default())?; + + Ok(()) +} From d2233fcc0d2d745e131ace1c4689116792dd8f8c Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Mon, 14 Oct 2024 11:22:27 +0200 Subject: [PATCH 036/425] feat: new `reth-trie-sparse` crate (#11707) --- Cargo.lock | 4 ++++ Cargo.toml | 1 + crates/trie/sparse/Cargo.toml | 12 ++++++++++++ crates/trie/sparse/src/lib.rs | 1 + 4 files changed, 18 insertions(+) create mode 100644 crates/trie/sparse/Cargo.toml create mode 100644 crates/trie/sparse/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 538816c7c..3e3fd18a8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9197,6 +9197,10 @@ dependencies = [ "tracing", ] +[[package]] +name = "reth-trie-sparse" +version = "1.1.0" + [[package]] name = "revm" version = "14.0.3" diff --git a/Cargo.toml b/Cargo.toml index efae22f8e..628d7d47b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -123,6 +123,7 @@ members = [ "crates/trie/common", "crates/trie/db", "crates/trie/parallel/", + "crates/trie/sparse", "crates/trie/trie", "examples/beacon-api-sidecar-fetcher/", "examples/beacon-api-sse/", diff --git a/crates/trie/sparse/Cargo.toml b/crates/trie/sparse/Cargo.toml new file mode 100644 index 000000000..4ebb56145 --- /dev/null +++ b/crates/trie/sparse/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "reth-trie-sparse" +version.workspace = true +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +description = "Sparse MPT implementation" + +[lints] +workspace = true diff --git a/crates/trie/sparse/src/lib.rs b/crates/trie/sparse/src/lib.rs new file mode 100644 index 000000000..5d3d4a5b6 --- /dev/null +++ b/crates/trie/sparse/src/lib.rs @@ -0,0 +1 @@ +//! The implementation of sparse MPT. From 7c2c3a6a5c54432d2ea820a77616064338322021 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 14 Oct 2024 11:46:00 +0200 Subject: [PATCH 037/425] fix: respect --debug.terminate --debug.max-block (#11710) --- crates/node/builder/src/launch/common.rs | 9 +++++++++ crates/node/builder/src/launch/engine.rs | 7 +++++++ 2 files changed, 16 insertions(+) diff --git a/crates/node/builder/src/launch/common.rs b/crates/node/builder/src/launch/common.rs index 99e9b2936..3e8f92e70 100644 --- a/crates/node/builder/src/launch/common.rs +++ b/crates/node/builder/src/launch/common.rs @@ -806,6 +806,15 @@ where Ok(initial_target) } + /// Returns true if the node should terminate after the initial backfill run. + /// + /// This is the case if any of these configs are set: + /// `--debug.max-block` + /// `--debug.terminate` + pub const fn terminate_after_initial_backfill(&self) -> bool { + self.node_config().debug.terminate || self.node_config().debug.max_block.is_some() + } + /// Check if the pipeline is consistent (all stages have the checkpoint block numbers no less /// than the checkpoint of the first stage). /// diff --git a/crates/node/builder/src/launch/engine.rs b/crates/node/builder/src/launch/engine.rs index 46ffacbf7..782cc7bbb 100644 --- a/crates/node/builder/src/launch/engine.rs +++ b/crates/node/builder/src/launch/engine.rs @@ -334,6 +334,8 @@ where .fuse(); let chainspec = ctx.chain_spec(); let (exit, rx) = oneshot::channel(); + let terminate_after_backfill = ctx.terminate_after_initial_backfill(); + info!(target: "reth::cli", "Starting consensus engine"); ctx.task_executor().spawn_critical("consensus engine", async move { if let Some(initial_target) = initial_target { @@ -357,6 +359,11 @@ where debug!(target: "reth::cli", "Event: {event}"); match event { ChainEvent::BackfillSyncFinished => { + if terminate_after_backfill { + debug!(target: "reth::cli", "Terminating after initial backfill"); + break + } + network_handle.update_sync_state(SyncState::Idle); } ChainEvent::BackfillSyncStarted => { From b6371018944e8cba6f5b4c56e76ff211b9e820b1 Mon Sep 17 00:00:00 2001 From: caglarkaya Date: Mon, 14 Oct 2024 12:50:07 +0300 Subject: [PATCH 038/425] docs: move ExEx book examples (#11616) Co-authored-by: Alexey Shekhirin --- book/developers/exex/remote.md | 349 +----------------- book/developers/exex/tracking-state.md | 138 +------ book/sources/Cargo.toml | 2 + book/sources/exex/remote/Cargo.toml | 52 +++ book/sources/exex/remote/build.rs | 4 + book/sources/exex/remote/proto/exex.proto | 13 + book/sources/exex/remote/src/consumer.rs | 32 ++ book/sources/exex/remote/src/exex.rs | 87 +++++ book/sources/exex/remote/src/exex_1.rs | 40 ++ book/sources/exex/remote/src/exex_2.rs | 49 +++ book/sources/exex/remote/src/exex_3.rs | 65 ++++ book/sources/exex/remote/src/exex_4.rs | 84 +++++ book/sources/exex/remote/src/lib.rs | 3 + book/sources/exex/tracking-state/Cargo.toml | 14 + book/sources/exex/tracking-state/src/bin/1.rs | 57 +++ book/sources/exex/tracking-state/src/bin/2.rs | 73 ++++ 16 files changed, 587 insertions(+), 475 deletions(-) create mode 100644 book/sources/exex/remote/Cargo.toml create mode 100644 book/sources/exex/remote/build.rs create mode 100644 book/sources/exex/remote/proto/exex.proto create mode 100644 book/sources/exex/remote/src/consumer.rs create mode 100644 book/sources/exex/remote/src/exex.rs create mode 100644 book/sources/exex/remote/src/exex_1.rs create mode 100644 book/sources/exex/remote/src/exex_2.rs create mode 100644 book/sources/exex/remote/src/exex_3.rs create mode 100644 book/sources/exex/remote/src/exex_4.rs create mode 100644 book/sources/exex/remote/src/lib.rs create mode 100644 book/sources/exex/tracking-state/Cargo.toml create mode 100644 book/sources/exex/tracking-state/src/bin/1.rs create mode 100644 book/sources/exex/tracking-state/src/bin/2.rs diff --git a/book/developers/exex/remote.md b/book/developers/exex/remote.md index 4344e28b3..0ec704308 100644 --- a/book/developers/exex/remote.md +++ b/book/developers/exex/remote.md @@ -25,41 +25,7 @@ We will also need a bunch of dependencies. Some of them you know from the [Hello but some of specific to what we need now. ```toml -[package] -name = "remote-exex" -version = "0.1.0" -edition = "2021" - -[dependencies] -# reth -reth = { git = "https://github.com/paradigmxyz/reth.git" } -reth-exex = { git = "https://github.com/paradigmxyz/reth.git", features = ["serde"] } -reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth.git"} -reth-tracing = { git = "https://github.com/paradigmxyz/reth.git" } - -# async -tokio = { version = "1", features = ["full"] } -tokio-stream = "0.1" -futures-util = "0.3" - -# grpc -tonic = "0.11" -prost = "0.12" -bincode = "1" - -# misc -eyre = "0.6" - -[build-dependencies] -tonic-build = "0.11" - -[[bin]] -name = "exex" -path = "src/exex.rs" - -[[bin]] -name = "consumer" -path = "src/consumer.rs" +{{#include ../../sources/exex/remote/Cargo.toml}} ``` We also added a build dependency for Tonic. We will use it to generate the Rust code for our @@ -87,26 +53,12 @@ For an example of a full schema, see the [Remote ExEx](https://github.com/paradi ```protobuf -syntax = "proto3"; - -package exex; - -service RemoteExEx { - rpc Subscribe(SubscribeRequest) returns (stream ExExNotification) {} -} - -message SubscribeRequest {} - -message ExExNotification { - bytes data = 1; -} +{{#include ../../sources/exex/remote/proto/exex.proto}} ``` To instruct Tonic to generate the Rust code using this `.proto`, add the following lines to your `lib.rs` file: ```rust,norun,noplayground,ignore -pub mod proto { - tonic::include_proto!("exex"); -} +{{#include ../../sources/exex/remote/src/lib.rs}} ``` ## ExEx and gRPC server @@ -119,52 +71,7 @@ Let's create a minimal gRPC server that listens on the port `:10000`, and spawn the [NodeBuilder](https://reth.rs/docs/reth/builder/struct.NodeBuilder.html)'s [task executor](https://reth.rs/docs/reth/tasks/struct.TaskExecutor.html). ```rust,norun,noplayground,ignore -use remote_exex::proto::{ - self, - remote_ex_ex_server::{RemoteExEx, RemoteExExServer}, -}; -use reth_exex::ExExNotification; -use reth_node_ethereum::EthereumNode; -use reth_tracing::tracing::info; -use std::sync::Arc; -use tokio::sync::{broadcast, mpsc}; -use tokio_stream::wrappers::ReceiverStream; -use tonic::{transport::Server, Request, Response, Status}; - -struct ExExService {} - -#[tonic::async_trait] -impl RemoteExEx for ExExService { - type SubscribeStream = ReceiverStream>; - - async fn subscribe( - &self, - _request: Request, - ) -> Result, Status> { - let (_tx, rx) = mpsc::channel(1); - - Ok(Response::new(ReceiverStream::new(rx))) - } -} - -fn main() -> eyre::Result<()> { - reth::cli::Cli::parse_args().run(|builder, _| async move { - let server = Server::builder() - .add_service(RemoteExExServer::new(ExExService {})) - .serve("[::1]:10000".parse().unwrap()); - - let handle = builder.node(EthereumNode::default()).launch().await?; - - handle - .node - .task_executor - .spawn_critical("gRPC server", async move { - server.await.expect("failed to start gRPC server") - }); - - handle.wait_for_node_exit().await - }) -} +{{#include ../../sources/exex/remote/src/exex_1.rs}} ``` Currently, it does not send anything on the stream. @@ -175,40 +82,7 @@ Let's create this channel in the `main` function where we will have both gRPC se and save the sender part (that way we will be able to create new receivers) of this channel in our gRPC server. ```rust,norun,noplayground,ignore -// ... -use reth_exex::{ExExNotification}; - -struct ExExService { - notifications: Arc>, -} - -... - -fn main() -> eyre::Result<()> { - reth::cli::Cli::parse_args().run(|builder, _| async move { - let notifications = Arc::new(broadcast::channel(1).0); - - let server = Server::builder() - .add_service(RemoteExExServer::new(ExExService { - notifications: notifications.clone(), - })) - .serve("[::1]:10000".parse().unwrap()); - - let handle = builder - .node(EthereumNode::default()) - .launch() - .await?; - - handle - .node - .task_executor - .spawn_critical("gRPC server", async move { - server.await.expect("failed to start gRPC server") - }); - - handle.wait_for_node_exit().await - }) -} +{{#include ../../sources/exex/remote/src/exex_2.rs}} ``` And with that, we're ready to handle incoming notifications, serialize them with [bincode](https://docs.rs/bincode/) @@ -218,37 +92,7 @@ For each incoming request, we spawn a separate tokio task that will run in the b and then return the stream receiver to the client. ```rust,norun,noplayground,ignore -// ... - -#[tonic::async_trait] -impl RemoteExEx for ExExService { - type SubscribeStream = ReceiverStream>; - - async fn subscribe( - &self, - _request: Request, - ) -> Result, Status> { - let (tx, rx) = mpsc::channel(1); - - let mut notifications = self.notifications.subscribe(); - tokio::spawn(async move { - while let Ok(notification) = notifications.recv().await { - let proto_notification = proto::ExExNotification { - data: bincode::serialize(¬ification).expect("failed to serialize"), - }; - tx.send(Ok(proto_notification)) - .await - .expect("failed to send notification to client"); - - info!("Notification sent to the gRPC client"); - } - }); - - Ok(Response::new(ReceiverStream::new(rx))) - } -} - -// ... +{{#rustdoc_include ../../sources/exex/remote/src/exex_3.rs:snippet}} ``` That's it for the gRPC server part! It doesn't receive anything on the `notifications` channel yet, @@ -267,65 +111,14 @@ Don't forget to emit `ExExEvent::FinishedHeight` ```rust,norun,noplayground,ignore -// ... - -use futures_util::StreamExt; -use reth_exex::{ExExContext, ExExEvent}; - -async fn remote_exex( - mut ctx: ExExContext, - notifications: Arc>, -) -> eyre::Result<()> { - while let Some(notification) = ctx.notifications.next().await { - if let Some(committed_chain) = notification.committed_chain() { - ctx.events - .send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; - } - - info!("Notification sent to the gRPC server"); - let _ = notifications.send(notification); - } - - Ok(()) -} - -// ... +{{#rustdoc_include ../../sources/exex/remote/src/exex_4.rs:snippet}} ``` All that's left is to connect all pieces together: install our ExEx in the node and pass the sender part of communication channel to it. ```rust,norun,noplayground,ignore -// ... - -fn main() -> eyre::Result<()> { - reth::cli::Cli::parse_args().run(|builder, _| async move { - let notifications = Arc::new(broadcast::channel(1).0); - - let server = Server::builder() - .add_service(RemoteExExServer::new(ExExService { - notifications: notifications.clone(), - })) - .serve("[::1]:10000".parse().unwrap()); - - let handle = builder - .node(EthereumNode::default()) - .install_exex("remote-exex", |ctx| async move { - Ok(remote_exex(ctx, notifications)) - }) - .launch() - .await?; - - handle - .node - .task_executor - .spawn_critical("gRPC server", async move { - server.await.expect("failed to start gRPC server") - }); - - handle.wait_for_node_exit().await - }) -} +{{#rustdoc_include ../../sources/exex/remote/src/exex.rs:snippet}} ``` ### Full `exex.rs` code @@ -334,98 +127,7 @@ fn main() -> eyre::Result<()> { Click to expand ```rust,norun,noplayground,ignore -use std::sync::Arc; - -use futures_util::StreamExt; -use remote_exex::proto::{ - self, - remote_ex_ex_server::{RemoteExEx, RemoteExExServer}, -}; -use reth::api::FullNodeComponents; -use reth_exex::{ExExContext, ExExEvent, ExExNotification}; -use reth_node_ethereum::EthereumNode; -use reth_tracing::tracing::info; -use tokio::sync::{broadcast, mpsc}; -use tokio_stream::wrappers::ReceiverStream; -use tonic::{transport::Server, Request, Response, Status}; - -struct ExExService { - notifications: Arc>, -} - -#[tonic::async_trait] -impl RemoteExEx for ExExService { - type SubscribeStream = ReceiverStream>; - - async fn subscribe( - &self, - _request: Request, - ) -> Result, Status> { - let (tx, rx) = mpsc::channel(1); - - let mut notifications = self.notifications.subscribe(); - tokio::spawn(async move { - while let Ok(notification) = notifications.recv().await { - let proto_notification = proto::ExExNotification { - data: bincode::serialize(¬ification).expect("failed to serialize"), - }; - tx.send(Ok(proto_notification)) - .await - .expect("failed to send notification to client"); - - info!(?notification, "Notification sent to the gRPC client"); - } - }); - - Ok(Response::new(ReceiverStream::new(rx))) - } -} - -async fn remote_exex( - mut ctx: ExExContext, - notifications: Arc>, -) -> eyre::Result<()> { - while let Some(notification) = ctx.notifications.next().await { - if let Some(committed_chain) = notification.committed_chain() { - ctx.events - .send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; - } - - info!(?notification, "Notification sent to the gRPC server"); - let _ = notifications.send(notification); - } - - Ok(()) -} - -fn main() -> eyre::Result<()> { - reth::cli::Cli::parse_args().run(|builder, _| async move { - let notifications = Arc::new(broadcast::channel(1).0); - - let server = Server::builder() - .add_service(RemoteExExServer::new(ExExService { - notifications: notifications.clone(), - })) - .serve("[::1]:10000".parse().unwrap()); - - let handle = builder - .node(EthereumNode::default()) - .install_exex("remote-exex", |ctx| async move { - Ok(remote_exex(ctx, notifications)) - }) - .launch() - .await?; - - handle - .node - .task_executor - .spawn_critical("gRPC server", async move { - server.await.expect("failed to start gRPC server") - }); - - handle.wait_for_node_exit().await - }) -} +{{#include ../../sources/exex/remote/src/exex.rs}} ``` @@ -442,38 +144,7 @@ because notifications can get very heavy ```rust,norun,noplayground,ignore -use remote_exex::proto::{remote_ex_ex_client::RemoteExExClient, SubscribeRequest}; -use reth_exex::ExExNotification; -use reth_tracing::{tracing::info, RethTracer, Tracer}; - -#[tokio::main] -async fn main() -> eyre::Result<()> { - let _ = RethTracer::new().init()?; - - let mut client = RemoteExExClient::connect("http://[::1]:10000") - .await? - .max_encoding_message_size(usize::MAX) - .max_decoding_message_size(usize::MAX); - - let mut stream = client.subscribe(SubscribeRequest {}).await?.into_inner(); - while let Some(notification) = stream.message().await? { - let notification: ExExNotification = bincode::deserialize(¬ification.data)?; - - match notification { - ExExNotification::ChainCommitted { new } => { - info!(committed_chain = ?new.range(), "Received commit"); - } - ExExNotification::ChainReorged { old, new } => { - info!(from_chain = ?old.range(), to_chain = ?new.range(), "Received reorg"); - } - ExExNotification::ChainReverted { old } => { - info!(reverted_chain = ?old.range(), "Received revert"); - } - }; - } - - Ok(()) -} +{{#include ../../sources/exex/remote/src/consumer.rs}} ``` ## Running diff --git a/book/developers/exex/tracking-state.md b/book/developers/exex/tracking-state.md index 52c73e618..d2a9fe6ca 100644 --- a/book/developers/exex/tracking-state.md +++ b/book/developers/exex/tracking-state.md @@ -19,63 +19,7 @@ because you can't access variables inside the function to assert the state of yo ```rust,norun,noplayground,ignore -use std::{ - future::Future, - pin::Pin, - task::{ready, Context, Poll}, -}; - -use futures_util::StreamExt; -use reth::api::FullNodeComponents; -use reth_exex::{ExExContext, ExExEvent, ExExNotification}; -use reth_node_ethereum::EthereumNode; -use reth_tracing::tracing::info; - -struct MyExEx { - ctx: ExExContext, -} - -impl Future for MyExEx { - type Output = eyre::Result<()>; - - fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - let this = self.get_mut(); - - while let Some(notification) = ready!(this.ctx.notifications.poll_next_unpin(cx)) { - match ¬ification { - ExExNotification::ChainCommitted { new } => { - info!(committed_chain = ?new.range(), "Received commit"); - } - ExExNotification::ChainReorged { old, new } => { - info!(from_chain = ?old.range(), to_chain = ?new.range(), "Received reorg"); - } - ExExNotification::ChainReverted { old } => { - info!(reverted_chain = ?old.range(), "Received revert"); - } - }; - - if let Some(committed_chain) = notification.committed_chain() { - this.ctx - .events - .send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; - } - } - - Poll::Ready(Ok(())) - } -} - -fn main() -> eyre::Result<()> { - reth::cli::Cli::parse_args().run(|builder, _| async move { - let handle = builder - .node(EthereumNode::default()) - .install_exex("my-exex", |ctx| async move { Ok(MyExEx { ctx }) }) - .launch() - .await?; - - handle.wait_for_node_exit().await - }) -} +{{#include ../../sources/exex/tracking-state/src/bin/1.rs}} ``` For those who are not familiar with how async Rust works on a lower level, that may seem scary, @@ -96,85 +40,7 @@ With all that done, we're now free to add more fields to our `MyExEx` struct, an Our ExEx will count the number of transactions in each block and log it to the console. ```rust,norun,noplayground,ignore -use std::{ - future::Future, - pin::Pin, - task::{ready, Context, Poll}, -}; - -use futures_util::StreamExt; -use reth::{api::FullNodeComponents, primitives::BlockNumber}; -use reth_exex::{ExExContext, ExExEvent}; -use reth_node_ethereum::EthereumNode; -use reth_tracing::tracing::info; - -struct MyExEx { - ctx: ExExContext, - /// First block that was committed since the start of the ExEx. - first_block: Option, - /// Total number of transactions committed. - transactions: u64, -} - -impl MyExEx { - fn new(ctx: ExExContext) -> Self { - Self { - ctx, - first_block: None, - transactions: 0, - } - } -} - -impl Future for MyExEx { - type Output = eyre::Result<()>; - - fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - let this = self.get_mut(); - - while let Some(notification) = ready!(this.ctx.notifications.poll_next_unpin(cx)) { - if let Some(reverted_chain) = notification.reverted_chain() { - this.transactions = this.transactions.saturating_sub( - reverted_chain - .blocks_iter() - .map(|b| b.body.len() as u64) - .sum(), - ); - } - - if let Some(committed_chain) = notification.committed_chain() { - this.first_block.get_or_insert(committed_chain.first().number); - - this.transactions += committed_chain - .blocks_iter() - .map(|b| b.body.len() as u64) - .sum::(); - - this.ctx - .events - .send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; - } - - if let Some(first_block) = this.first_block { - info!(%first_block, transactions = %this.transactions, "Total number of transactions"); - } - } - - Poll::Ready(Ok(())) - } -} - -fn main() -> eyre::Result<()> { - reth::cli::Cli::parse_args().run(|builder, _| async move { - let handle = builder - .node(EthereumNode::default()) - .install_exex("my-exex", |ctx| async move { Ok(MyExEx::new(ctx)) }) - .launch() - .await?; - - handle.wait_for_node_exit().await - }) -} +{{#include ../../sources/exex/tracking-state/src/bin/2.rs}} ``` As you can see, we added two fields to our ExEx struct: diff --git a/book/sources/Cargo.toml b/book/sources/Cargo.toml index c04c8567f..1529af952 100644 --- a/book/sources/Cargo.toml +++ b/book/sources/Cargo.toml @@ -1,6 +1,8 @@ [workspace] members = [ "exex/hello-world", + "exex/remote", + "exex/tracking-state", ] # Explicitly set the resolver to version 2, which is the default for packages with edition >= 2021 diff --git a/book/sources/exex/remote/Cargo.toml b/book/sources/exex/remote/Cargo.toml new file mode 100644 index 000000000..6eeb848ca --- /dev/null +++ b/book/sources/exex/remote/Cargo.toml @@ -0,0 +1,52 @@ +[package] +name = "remote-exex" +version = "0.1.0" +edition = "2021" + +[dependencies] +# reth +reth = { git = "https://github.com/paradigmxyz/reth.git" } +reth-exex = { git = "https://github.com/paradigmxyz/reth.git", features = ["serde"] } +reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth.git"} +reth-node-api = { git = "https://github.com/paradigmxyz/reth.git"} +reth-tracing = { git = "https://github.com/paradigmxyz/reth.git" } + +# async +tokio = { version = "1", features = ["full"] } +tokio-stream = "0.1" +futures-util = "0.3" + +# grpc +tonic = "0.11" +prost = "0.12" +bincode = "1" + +# misc +eyre = "0.6" + +[build-dependencies] +tonic-build = "0.11" + +[[bin]] +name = "exex_1" +path = "src/exex_1.rs" + +[[bin]] +name = "exex_2" +path = "src/exex_2.rs" + +[[bin]] +name = "exex_3" +path = "src/exex_3.rs" + +[[bin]] +name = "exex_4" +path = "src/exex_4.rs" + +[[bin]] +name = "exex" +path = "src/exex.rs" + +[[bin]] +name = "consumer" +path = "src/consumer.rs" \ No newline at end of file diff --git a/book/sources/exex/remote/build.rs b/book/sources/exex/remote/build.rs new file mode 100644 index 000000000..8e66f2a30 --- /dev/null +++ b/book/sources/exex/remote/build.rs @@ -0,0 +1,4 @@ +fn main() -> Result<(), Box> { + tonic_build::compile_protos("proto/exex.proto")?; + Ok(()) +} diff --git a/book/sources/exex/remote/proto/exex.proto b/book/sources/exex/remote/proto/exex.proto new file mode 100644 index 000000000..9bb180b5f --- /dev/null +++ b/book/sources/exex/remote/proto/exex.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package exex; + +service RemoteExEx { + rpc Subscribe(SubscribeRequest) returns (stream ExExNotification) {} +} + +message SubscribeRequest {} + +message ExExNotification { + bytes data = 1; +} \ No newline at end of file diff --git a/book/sources/exex/remote/src/consumer.rs b/book/sources/exex/remote/src/consumer.rs new file mode 100644 index 000000000..a0400f4bb --- /dev/null +++ b/book/sources/exex/remote/src/consumer.rs @@ -0,0 +1,32 @@ +use remote_exex::proto::{remote_ex_ex_client::RemoteExExClient, SubscribeRequest}; +use reth_exex::ExExNotification; +use reth_tracing::{tracing::info, RethTracer, Tracer}; + +#[tokio::main] +async fn main() -> eyre::Result<()> { + let _ = RethTracer::new().init()?; + + let mut client = RemoteExExClient::connect("http://[::1]:10000") + .await? + .max_encoding_message_size(usize::MAX) + .max_decoding_message_size(usize::MAX); + + let mut stream = client.subscribe(SubscribeRequest {}).await?.into_inner(); + while let Some(notification) = stream.message().await? { + let notification: ExExNotification = bincode::deserialize(¬ification.data)?; + + match notification { + ExExNotification::ChainCommitted { new } => { + info!(committed_chain = ?new.range(), "Received commit"); + } + ExExNotification::ChainReorged { old, new } => { + info!(from_chain = ?old.range(), to_chain = ?new.range(), "Received reorg"); + } + ExExNotification::ChainReverted { old } => { + info!(reverted_chain = ?old.range(), "Received revert"); + } + }; + } + + Ok(()) +} diff --git a/book/sources/exex/remote/src/exex.rs b/book/sources/exex/remote/src/exex.rs new file mode 100644 index 000000000..1ae4785db --- /dev/null +++ b/book/sources/exex/remote/src/exex.rs @@ -0,0 +1,87 @@ +use futures_util::TryStreamExt; +use remote_exex::proto::{ + self, + remote_ex_ex_server::{RemoteExEx, RemoteExExServer}, +}; +use reth_exex::{ExExContext, ExExEvent, ExExNotification}; +use reth_node_api::FullNodeComponents; +use reth_node_ethereum::EthereumNode; +use reth_tracing::tracing::info; +use std::sync::Arc; +use tokio::sync::{broadcast, mpsc}; +use tokio_stream::wrappers::ReceiverStream; +use tonic::{transport::Server, Request, Response, Status}; + +struct ExExService { + notifications: Arc>, +} + +#[tonic::async_trait] +impl RemoteExEx for ExExService { + type SubscribeStream = ReceiverStream>; + + async fn subscribe( + &self, + _request: Request, + ) -> Result, Status> { + let (tx, rx) = mpsc::channel(1); + + let mut notifications = self.notifications.subscribe(); + tokio::spawn(async move { + while let Ok(notification) = notifications.recv().await { + let proto_notification = proto::ExExNotification { + data: bincode::serialize(¬ification).expect("failed to serialize"), + }; + tx.send(Ok(proto_notification)) + .await + .expect("failed to send notification to client"); + + info!("Notification sent to the gRPC client"); + } + }); + + Ok(Response::new(ReceiverStream::new(rx))) + } +} + +async fn remote_exex( + mut ctx: ExExContext, + notifications: Arc>, +) -> eyre::Result<()> { + while let Some(notification) = ctx.notifications.try_next().await? { + if let Some(committed_chain) = notification.committed_chain() { + ctx.events.send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; + } + + info!("Notification sent to the gRPC server"); + let _ = notifications.send(notification); + } + + Ok(()) +} + +// ANCHOR: snippet +fn main() -> eyre::Result<()> { + reth::cli::Cli::parse_args().run(|builder, _| async move { + let notifications = Arc::new(broadcast::channel(1).0); + + let server = Server::builder() + .add_service(RemoteExExServer::new(ExExService { + notifications: notifications.clone(), + })) + .serve("[::1]:10000".parse().unwrap()); + + let handle = builder + .node(EthereumNode::default()) + .install_exex("remote-exex", |ctx| async move { Ok(remote_exex(ctx, notifications)) }) + .launch() + .await?; + + handle.node.task_executor.spawn_critical("gRPC server", async move { + server.await.expect("failed to start gRPC server") + }); + + handle.wait_for_node_exit().await + }) +} +// ANCHOR_END: snippet diff --git a/book/sources/exex/remote/src/exex_1.rs b/book/sources/exex/remote/src/exex_1.rs new file mode 100644 index 000000000..09a4bcc06 --- /dev/null +++ b/book/sources/exex/remote/src/exex_1.rs @@ -0,0 +1,40 @@ +use remote_exex::proto::{ + self, + remote_ex_ex_server::{RemoteExEx, RemoteExExServer}, +}; +use reth_node_ethereum::EthereumNode; +use tokio::sync::mpsc; +use tokio_stream::wrappers::ReceiverStream; +use tonic::{transport::Server, Request, Response, Status}; + +struct ExExService {} + +#[tonic::async_trait] +impl RemoteExEx for ExExService { + type SubscribeStream = ReceiverStream>; + + async fn subscribe( + &self, + _request: Request, + ) -> Result, Status> { + let (_tx, rx) = mpsc::channel(1); + + Ok(Response::new(ReceiverStream::new(rx))) + } +} + +fn main() -> eyre::Result<()> { + reth::cli::Cli::parse_args().run(|builder, _| async move { + let server = Server::builder() + .add_service(RemoteExExServer::new(ExExService {})) + .serve("[::1]:10000".parse().unwrap()); + + let handle = builder.node(EthereumNode::default()).launch().await?; + + handle.node.task_executor.spawn_critical("gRPC server", async move { + server.await.expect("failed to start gRPC server") + }); + + handle.wait_for_node_exit().await + }) +} diff --git a/book/sources/exex/remote/src/exex_2.rs b/book/sources/exex/remote/src/exex_2.rs new file mode 100644 index 000000000..c4f51ddae --- /dev/null +++ b/book/sources/exex/remote/src/exex_2.rs @@ -0,0 +1,49 @@ +use remote_exex::proto::{ + self, + remote_ex_ex_server::{RemoteExEx, RemoteExExServer}, +}; +use reth_exex::ExExNotification; +use reth_node_ethereum::EthereumNode; +use std::sync::Arc; +use tokio::sync::{broadcast, mpsc}; +use tokio_stream::wrappers::ReceiverStream; +use tonic::{transport::Server, Request, Response, Status}; + +#[allow(dead_code)] +struct ExExService { + notifications: Arc>, +} + +#[tonic::async_trait] +impl RemoteExEx for ExExService { + type SubscribeStream = ReceiverStream>; + + async fn subscribe( + &self, + _request: Request, + ) -> Result, Status> { + let (_tx, rx) = mpsc::channel(1); + + Ok(Response::new(ReceiverStream::new(rx))) + } +} + +fn main() -> eyre::Result<()> { + reth::cli::Cli::parse_args().run(|builder, _| async move { + let notifications = Arc::new(broadcast::channel(1).0); + + let server = Server::builder() + .add_service(RemoteExExServer::new(ExExService { + notifications: notifications.clone(), + })) + .serve("[::1]:10000".parse().unwrap()); + + let handle = builder.node(EthereumNode::default()).launch().await?; + + handle.node.task_executor.spawn_critical("gRPC server", async move { + server.await.expect("failed to start gRPC server") + }); + + handle.wait_for_node_exit().await + }) +} diff --git a/book/sources/exex/remote/src/exex_3.rs b/book/sources/exex/remote/src/exex_3.rs new file mode 100644 index 000000000..9f264cf34 --- /dev/null +++ b/book/sources/exex/remote/src/exex_3.rs @@ -0,0 +1,65 @@ +use remote_exex::proto::{ + self, + remote_ex_ex_server::{RemoteExEx, RemoteExExServer}, +}; +use reth_exex::ExExNotification; +use reth_node_ethereum::EthereumNode; +use reth_tracing::tracing::info; +use std::sync::Arc; +use tokio::sync::{broadcast, mpsc}; +use tokio_stream::wrappers::ReceiverStream; +use tonic::{transport::Server, Request, Response, Status}; + +struct ExExService { + notifications: Arc>, +} + +// ANCHOR: snippet +#[tonic::async_trait] +impl RemoteExEx for ExExService { + type SubscribeStream = ReceiverStream>; + + async fn subscribe( + &self, + _request: Request, + ) -> Result, Status> { + let (tx, rx) = mpsc::channel(1); + + let mut notifications = self.notifications.subscribe(); + tokio::spawn(async move { + while let Ok(notification) = notifications.recv().await { + let proto_notification = proto::ExExNotification { + data: bincode::serialize(¬ification).expect("failed to serialize"), + }; + tx.send(Ok(proto_notification)) + .await + .expect("failed to send notification to client"); + + info!("Notification sent to the gRPC client"); + } + }); + + Ok(Response::new(ReceiverStream::new(rx))) + } +} +// ANCHOR_END: snippet + +fn main() -> eyre::Result<()> { + reth::cli::Cli::parse_args().run(|builder, _| async move { + let notifications = Arc::new(broadcast::channel(1).0); + + let server = Server::builder() + .add_service(RemoteExExServer::new(ExExService { + notifications: notifications.clone(), + })) + .serve("[::1]:10000".parse().unwrap()); + + let handle = builder.node(EthereumNode::default()).launch().await?; + + handle.node.task_executor.spawn_critical("gRPC server", async move { + server.await.expect("failed to start gRPC server") + }); + + handle.wait_for_node_exit().await + }) +} diff --git a/book/sources/exex/remote/src/exex_4.rs b/book/sources/exex/remote/src/exex_4.rs new file mode 100644 index 000000000..24c7bf2c2 --- /dev/null +++ b/book/sources/exex/remote/src/exex_4.rs @@ -0,0 +1,84 @@ +use futures_util::TryStreamExt; +use remote_exex::proto::{ + self, + remote_ex_ex_server::{RemoteExEx, RemoteExExServer}, +}; +use reth_exex::{ExExContext, ExExEvent, ExExNotification}; +use reth_node_api::FullNodeComponents; +use reth_node_ethereum::EthereumNode; +use reth_tracing::tracing::info; +use std::sync::Arc; +use tokio::sync::{broadcast, mpsc}; +use tokio_stream::wrappers::ReceiverStream; +use tonic::{transport::Server, Request, Response, Status}; + +struct ExExService { + notifications: Arc>, +} + +#[tonic::async_trait] +impl RemoteExEx for ExExService { + type SubscribeStream = ReceiverStream>; + + async fn subscribe( + &self, + _request: Request, + ) -> Result, Status> { + let (tx, rx) = mpsc::channel(1); + + let mut notifications = self.notifications.subscribe(); + tokio::spawn(async move { + while let Ok(notification) = notifications.recv().await { + let proto_notification = proto::ExExNotification { + data: bincode::serialize(¬ification).expect("failed to serialize"), + }; + tx.send(Ok(proto_notification)) + .await + .expect("failed to send notification to client"); + + info!("Notification sent to the gRPC client"); + } + }); + + Ok(Response::new(ReceiverStream::new(rx))) + } +} + +// ANCHOR: snippet +#[allow(dead_code)] +async fn remote_exex( + mut ctx: ExExContext, + notifications: Arc>, +) -> eyre::Result<()> { + while let Some(notification) = ctx.notifications.try_next().await? { + if let Some(committed_chain) = notification.committed_chain() { + ctx.events.send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; + } + + info!("Notification sent to the gRPC server"); + let _ = notifications.send(notification); + } + + Ok(()) +} +// ANCHOR_END: snippet + +fn main() -> eyre::Result<()> { + reth::cli::Cli::parse_args().run(|builder, _| async move { + let notifications = Arc::new(broadcast::channel(1).0); + + let server = Server::builder() + .add_service(RemoteExExServer::new(ExExService { + notifications: notifications.clone(), + })) + .serve("[::1]:10000".parse().unwrap()); + + let handle = builder.node(EthereumNode::default()).launch().await?; + + handle.node.task_executor.spawn_critical("gRPC server", async move { + server.await.expect("failed to start gRPC server") + }); + + handle.wait_for_node_exit().await + }) +} diff --git a/book/sources/exex/remote/src/lib.rs b/book/sources/exex/remote/src/lib.rs new file mode 100644 index 000000000..9abb458bd --- /dev/null +++ b/book/sources/exex/remote/src/lib.rs @@ -0,0 +1,3 @@ +pub mod proto { + tonic::include_proto!("exex"); +} diff --git a/book/sources/exex/tracking-state/Cargo.toml b/book/sources/exex/tracking-state/Cargo.toml new file mode 100644 index 000000000..3ce21b0c3 --- /dev/null +++ b/book/sources/exex/tracking-state/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "tracking-state" +version = "0.1.0" +edition = "2021" + +[dependencies] +reth = { git = "https://github.com/paradigmxyz/reth.git" } +reth-exex = { git = "https://github.com/paradigmxyz/reth.git", features = ["serde"] } +reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth.git"} +reth-tracing = { git = "https://github.com/paradigmxyz/reth.git" } + +eyre = "0.6" # Easy error handling +futures-util = "0.3" # Stream utilities for consuming notifications +alloy-primitives = "0.8.7" diff --git a/book/sources/exex/tracking-state/src/bin/1.rs b/book/sources/exex/tracking-state/src/bin/1.rs new file mode 100644 index 000000000..0d42e0791 --- /dev/null +++ b/book/sources/exex/tracking-state/src/bin/1.rs @@ -0,0 +1,57 @@ +use std::{ + future::Future, + pin::Pin, + task::{ready, Context, Poll}, +}; + +use futures_util::{FutureExt, TryStreamExt}; +use reth::api::FullNodeComponents; +use reth_exex::{ExExContext, ExExEvent, ExExNotification}; +use reth_node_ethereum::EthereumNode; +use reth_tracing::tracing::info; + +struct MyExEx { + ctx: ExExContext, +} + +impl Future for MyExEx { + type Output = eyre::Result<()>; + + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let this = self.get_mut(); + + while let Some(notification) = ready!(this.ctx.notifications.try_next().poll_unpin(cx))? { + match ¬ification { + ExExNotification::ChainCommitted { new } => { + info!(committed_chain = ?new.range(), "Received commit"); + } + ExExNotification::ChainReorged { old, new } => { + info!(from_chain = ?old.range(), to_chain = ?new.range(), "Received reorg"); + } + ExExNotification::ChainReverted { old } => { + info!(reverted_chain = ?old.range(), "Received revert"); + } + }; + + if let Some(committed_chain) = notification.committed_chain() { + this.ctx + .events + .send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; + } + } + + Poll::Ready(Ok(())) + } +} + +fn main() -> eyre::Result<()> { + reth::cli::Cli::parse_args().run(|builder, _| async move { + let handle = builder + .node(EthereumNode::default()) + .install_exex("my-exex", |ctx| async move { Ok(MyExEx { ctx }) }) + .launch() + .await?; + + handle.wait_for_node_exit().await + }) +} diff --git a/book/sources/exex/tracking-state/src/bin/2.rs b/book/sources/exex/tracking-state/src/bin/2.rs new file mode 100644 index 000000000..941681066 --- /dev/null +++ b/book/sources/exex/tracking-state/src/bin/2.rs @@ -0,0 +1,73 @@ +use std::{ + future::Future, + pin::Pin, + task::{ready, Context, Poll}, +}; + +use alloy_primitives::BlockNumber; +use futures_util::{FutureExt, TryStreamExt}; +use reth::api::FullNodeComponents; +use reth_exex::{ExExContext, ExExEvent}; +use reth_node_ethereum::EthereumNode; +use reth_tracing::tracing::info; + +struct MyExEx { + ctx: ExExContext, + /// First block that was committed since the start of the ExEx. + first_block: Option, + /// Total number of transactions committed. + transactions: u64, +} + +impl MyExEx { + fn new(ctx: ExExContext) -> Self { + Self { ctx, first_block: None, transactions: 0 } + } +} + +impl Future for MyExEx { + type Output = eyre::Result<()>; + + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let this = self.get_mut(); + + while let Some(notification) = ready!(this.ctx.notifications.try_next().poll_unpin(cx))? { + if let Some(reverted_chain) = notification.reverted_chain() { + this.transactions = this.transactions.saturating_sub( + reverted_chain.blocks_iter().map(|b| b.body.transactions.len() as u64).sum(), + ); + } + + if let Some(committed_chain) = notification.committed_chain() { + this.first_block.get_or_insert(committed_chain.first().number); + + this.transactions += committed_chain + .blocks_iter() + .map(|b| b.body.transactions.len() as u64) + .sum::(); + + this.ctx + .events + .send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; + } + + if let Some(first_block) = this.first_block { + info!(%first_block, transactions = %this.transactions, "Total number of transactions"); + } + } + + Poll::Ready(Ok(())) + } +} + +fn main() -> eyre::Result<()> { + reth::cli::Cli::parse_args().run(|builder, _| async move { + let handle = builder + .node(EthereumNode::default()) + .install_exex("my-exex", |ctx| async move { Ok(MyExEx::new(ctx)) }) + .launch() + .await?; + + handle.wait_for_node_exit().await + }) +} From a087b52dc21a0ec59b8e63dd2f8c3222739e9962 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Mon, 14 Oct 2024 11:58:15 +0100 Subject: [PATCH 039/425] perf(rpc): do not clone filter matcher on every block tracing (#11714) --- crates/rpc/rpc/src/trace.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/rpc/rpc/src/trace.rs b/crates/rpc/rpc/src/trace.rs index 687762b74..f7b2f7ab7 100644 --- a/crates/rpc/rpc/src/trace.rs +++ b/crates/rpc/rpc/src/trace.rs @@ -251,7 +251,8 @@ where &self, filter: TraceFilter, ) -> Result, Eth::Error> { - let matcher = filter.matcher(); + // We'll reuse the matcher across multiple blocks that are traced in parallel + let matcher = Arc::new(filter.matcher()); let TraceFilter { from_block, to_block, after, count, .. } = filter; let start = from_block.unwrap_or(0); let end = if let Some(to_block) = to_block { From 9c8360e5327794fdc9a4917ccd254d0adf441f72 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Mon, 14 Oct 2024 13:41:50 +0200 Subject: [PATCH 040/425] fix(net): remove outdated debug assert `TransactionFetcher` (#11713) --- crates/net/network/src/transactions/fetcher.rs | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/crates/net/network/src/transactions/fetcher.rs b/crates/net/network/src/transactions/fetcher.rs index 9276219d5..3e8569515 100644 --- a/crates/net/network/src/transactions/fetcher.rs +++ b/crates/net/network/src/transactions/fetcher.rs @@ -912,17 +912,6 @@ impl TransactionFetcher { // fallback peers let GetPooledTxResponse { peer_id, mut requested_hashes, result } = response; - debug_assert!( - self.active_peers.get(&peer_id).is_some(), - "`{}` has been removed from `@active_peers` before inflight request(s) resolved, broken invariant `@active_peers` and `@inflight_requests`, `%peer_id`: {}, `@hashes_fetch_inflight_and_pending_fetch` for `%requested_hashes`: {:?}", - peer_id, - peer_id, - requested_hashes.iter().map(|hash| { - let metadata = self.hashes_fetch_inflight_and_pending_fetch.get(hash); - (*hash, metadata.map(|m| (m.retries, m.tx_encoded_length))) - }).collect::)>)>>() - ); - self.decrement_inflight_request_count_for(&peer_id); match result { From c05a90054263f6da4ff581a5a7d933261767f0c5 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Mon, 14 Oct 2024 21:44:28 +0900 Subject: [PATCH 041/425] perf(rpc): use `Arc` on cache and rpc (#11635) --- Cargo.lock | 1 + crates/ethereum/payload/src/lib.rs | 2 +- crates/evm/src/lib.rs | 10 +- crates/optimism/evm/src/l1.rs | 13 +- crates/optimism/node/src/txpool.rs | 2 +- crates/optimism/payload/src/builder.rs | 4 +- crates/optimism/rpc/src/eth/block.rs | 2 +- crates/optimism/rpc/src/eth/receipt.rs | 5 +- crates/primitives/src/transaction/mod.rs | 5 + crates/rpc/rpc-eth-api/src/helpers/block.rs | 25 +-- crates/rpc/rpc-eth-api/src/helpers/call.rs | 27 +-- crates/rpc/rpc-eth-api/src/helpers/fee.rs | 14 +- .../rpc-eth-api/src/helpers/pending_block.rs | 2 +- crates/rpc/rpc-eth-api/src/helpers/trace.rs | 10 +- .../rpc-eth-api/src/helpers/transaction.rs | 30 +-- crates/rpc/rpc-eth-types/Cargo.toml | 1 + crates/rpc/rpc-eth-types/src/cache/mod.rs | 180 +++++------------- crates/rpc/rpc-eth-types/src/fee_history.rs | 17 +- crates/rpc/rpc-eth-types/src/gas_oracle.rs | 14 +- crates/rpc/rpc-eth-types/src/logs_utils.rs | 7 +- crates/rpc/rpc/src/debug.rs | 17 +- crates/rpc/rpc/src/eth/filter.rs | 5 +- crates/rpc/rpc/src/trace.rs | 10 +- 23 files changed, 168 insertions(+), 235 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3e3fd18a8..3c5639ead 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8745,6 +8745,7 @@ dependencies = [ "alloy-sol-types", "derive_more 1.0.0", "futures", + "itertools 0.13.0", "jsonrpsee-core", "jsonrpsee-types", "metrics", diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 5d2fcde2f..97237efa8 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -228,7 +228,7 @@ where let env = EnvWithHandlerCfg::new_with_cfg_env( initialized_cfg.clone(), initialized_block_env.clone(), - evm_config.tx_env(&tx), + evm_config.tx_env(tx.as_signed(), tx.signer()), ); // Configure the environment for the block. diff --git a/crates/evm/src/lib.rs b/crates/evm/src/lib.rs index 6fcb3d9f8..e0d45f04c 100644 --- a/crates/evm/src/lib.rs +++ b/crates/evm/src/lib.rs @@ -11,11 +11,9 @@ extern crate alloc; -use core::ops::Deref; - use crate::builder::RethEvmBuilder; use alloy_primitives::{Address, Bytes, B256, U256}; -use reth_primitives::{TransactionSigned, TransactionSignedEcRecovered}; +use reth_primitives::TransactionSigned; use reth_primitives_traits::BlockHeader; use revm::{Database, Evm, GetInspector}; use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, Env, EnvWithHandlerCfg, SpecId, TxEnv}; @@ -111,10 +109,10 @@ pub trait ConfigureEvmEnv: Send + Sync + Unpin + Clone + 'static { /// The header type used by the EVM. type Header: BlockHeader; - /// Returns a [`TxEnv`] from a [`TransactionSignedEcRecovered`]. - fn tx_env(&self, transaction: &TransactionSignedEcRecovered) -> TxEnv { + /// Returns a [`TxEnv`] from a [`TransactionSigned`] and [`Address`]. + fn tx_env(&self, transaction: &TransactionSigned, signer: Address) -> TxEnv { let mut tx_env = TxEnv::default(); - self.fill_tx_env(&mut tx_env, transaction.deref(), transaction.signer()); + self.fill_tx_env(&mut tx_env, transaction, signer); tx_env } diff --git a/crates/optimism/evm/src/l1.rs b/crates/optimism/evm/src/l1.rs index 18ccbed95..3412501eb 100644 --- a/crates/optimism/evm/src/l1.rs +++ b/crates/optimism/evm/src/l1.rs @@ -6,7 +6,7 @@ use reth_chainspec::ChainSpec; use reth_execution_errors::BlockExecutionError; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_forks::OptimismHardfork; -use reth_primitives::Block; +use reth_primitives::BlockBody; use revm::{ primitives::{Bytecode, HashMap, SpecId}, DatabaseCommit, L1BlockInfo, @@ -31,9 +31,8 @@ const L1_BLOCK_ECOTONE_SELECTOR: [u8; 4] = hex!("440a5e20"); /// transaction in the L2 block. /// /// Returns an error if the L1 info transaction is not found, if the block is empty. -pub fn extract_l1_info(block: &Block) -> Result { - let l1_info_tx_data = block - .body +pub fn extract_l1_info(body: &BlockBody) -> Result { + let l1_info_tx_data = body .transactions .first() .ok_or_else(|| OptimismBlockExecutionError::L1BlockInfoError { @@ -302,7 +301,7 @@ mod tests { use alloy_eips::eip2718::Decodable2718; use reth_optimism_chainspec::OP_MAINNET; use reth_optimism_forks::OptimismHardforks; - use reth_primitives::{BlockBody, TransactionSigned}; + use reth_primitives::{Block, BlockBody, TransactionSigned}; use super::*; @@ -318,7 +317,7 @@ mod tests { body: BlockBody { transactions: vec![l1_info_tx], ..Default::default() }, }; - let l1_info: L1BlockInfo = extract_l1_info(&mock_block).unwrap(); + let l1_info: L1BlockInfo = extract_l1_info(&mock_block.body).unwrap(); assert_eq!(l1_info.l1_base_fee, U256::from(652_114)); assert_eq!(l1_info.l1_fee_overhead, Some(U256::from(2100))); assert_eq!(l1_info.l1_base_fee_scalar, U256::from(1_000_000)); @@ -358,7 +357,7 @@ mod tests { // test - let l1_block_info: L1BlockInfo = extract_l1_info(&block).unwrap(); + let l1_block_info: L1BlockInfo = extract_l1_info(&block.body).unwrap(); assert_eq!(l1_block_info.l1_base_fee, expected_l1_base_fee); assert_eq!(l1_block_info.l1_base_fee_scalar, expected_l1_base_fee_scalar); diff --git a/crates/optimism/node/src/txpool.rs b/crates/optimism/node/src/txpool.rs index 7ed2a161d..09aa76fef 100644 --- a/crates/optimism/node/src/txpool.rs +++ b/crates/optimism/node/src/txpool.rs @@ -99,7 +99,7 @@ where /// Update the L1 block info. fn update_l1_block_info(&self, block: &Block) { self.block_info.timestamp.store(block.timestamp, Ordering::Relaxed); - if let Ok(cost_addition) = reth_optimism_evm::extract_l1_info(block) { + if let Ok(cost_addition) = reth_optimism_evm::extract_l1_info(&block.body) { *self.block_info.l1_block_info.write() = cost_addition; } } diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 5e9c1b5d1..2ff3d3342 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -275,7 +275,7 @@ where let env = EnvWithHandlerCfg::new_with_cfg_env( initialized_cfg.clone(), initialized_block_env.clone(), - evm_config.tx_env(&sequencer_tx), + evm_config.tx_env(sequencer_tx.as_signed(), sequencer_tx.signer()), ); let mut evm = evm_config.evm_with_env(&mut db, env); @@ -356,7 +356,7 @@ where let env = EnvWithHandlerCfg::new_with_cfg_env( initialized_cfg.clone(), initialized_block_env.clone(), - evm_config.tx_env(&tx), + evm_config.tx_env(tx.as_signed(), tx.signer()), ); // Configure the environment for the block. diff --git a/crates/optimism/rpc/src/eth/block.rs b/crates/optimism/rpc/src/eth/block.rs index d5066be0c..dfdd09608 100644 --- a/crates/optimism/rpc/src/eth/block.rs +++ b/crates/optimism/rpc/src/eth/block.rs @@ -45,7 +45,7 @@ where let block = block.unseal(); let l1_block_info = - reth_optimism_evm::extract_l1_info(&block).map_err(OpEthApiError::from)?; + reth_optimism_evm::extract_l1_info(&block.body).map_err(OpEthApiError::from)?; return block .body diff --git a/crates/optimism/rpc/src/eth/receipt.rs b/crates/optimism/rpc/src/eth/receipt.rs index fac8d220c..cc6851041 100644 --- a/crates/optimism/rpc/src/eth/receipt.rs +++ b/crates/optimism/rpc/src/eth/receipt.rs @@ -40,9 +40,8 @@ where meta.block_hash.into(), )))?; - let block = block.unseal(); let l1_block_info = - reth_optimism_evm::extract_l1_info(&block).map_err(OpEthApiError::from)?; + reth_optimism_evm::extract_l1_info(&block.body).map_err(OpEthApiError::from)?; Ok(OpReceiptBuilder::new( &self.inner.provider().chain_spec(), @@ -353,7 +352,7 @@ mod test { }; let l1_block_info = - reth_optimism_evm::extract_l1_info(&block).expect("should extract l1 info"); + reth_optimism_evm::extract_l1_info(&block.body).expect("should extract l1 info"); // test assert!(OP_MAINNET.is_fjord_active_at_timestamp(BLOCK_124665056_TIMESTAMP)); diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 7ef2c0c1f..3622b9531 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1472,6 +1472,11 @@ impl TransactionSignedEcRecovered { self.signer } + /// Returns a reference to [`TransactionSigned`] + pub const fn as_signed(&self) -> &TransactionSigned { + &self.signed_transaction + } + /// Transform back to [`TransactionSigned`] pub fn into_signed(self) -> TransactionSigned { self.signed_transaction diff --git a/crates/rpc/rpc-eth-api/src/helpers/block.rs b/crates/rpc/rpc-eth-api/src/helpers/block.rs index 898037f4a..5d8496abd 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/block.rs @@ -64,7 +64,7 @@ pub trait EthBlocks: LoadBlock { } let block = from_block::( - block.unseal(), + (*block).clone().unseal(), total_difficulty.unwrap_or_default(), full.into(), Some(block_hash), @@ -100,10 +100,10 @@ pub trait EthBlocks: LoadBlock { Ok(self .cache() - .get_block_transactions(block_hash) + .get_sealed_block_with_senders(block_hash) .await .map_err(Self::Error::from_eth_err)? - .map(|txs| txs.len())) + .map(|b| b.body.transactions.len())) } } @@ -150,6 +150,7 @@ pub trait EthBlocks: LoadBlock { .get_block_and_receipts(block_hash) .await .map_err(Self::Error::from_eth_err) + .map(|b| b.map(|(b, r)| (b.block.clone(), r))) } Ok(None) @@ -208,23 +209,11 @@ pub trait LoadBlock: LoadPendingBlock + SpawnBlocking { /// Data access in default (L1) trait method implementations. fn cache(&self) -> &EthStateCache; - /// Returns the block object for the given block id. - fn block( - &self, - block_id: BlockId, - ) -> impl Future, Self::Error>> + Send { - async move { - self.block_with_senders(block_id) - .await - .map(|maybe_block| maybe_block.map(|block| block.block)) - } - } - /// Returns the block object for the given block id. fn block_with_senders( &self, block_id: BlockId, - ) -> impl Future, Self::Error>> + Send { + ) -> impl Future>, Self::Error>> + Send { async move { if block_id.is_pending() { // Pending block can be fetched directly without need for caching @@ -232,11 +221,11 @@ pub trait LoadBlock: LoadPendingBlock + SpawnBlocking { .pending_block_with_senders() .map_err(Self::Error::from_eth_err)?; return if maybe_pending.is_some() { - Ok(maybe_pending) + Ok(maybe_pending.map(Arc::new)) } else { // If no pending block from provider, try to get local pending block return match self.local_pending_block().await? { - Some((block, _)) => Ok(Some(block)), + Some((block, _)) => Ok(Some(Arc::new(block))), None => Ok(None), }; }; diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index 5f6f3d77f..ff6e93006 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -5,7 +5,7 @@ use crate::{ AsEthApiError, FromEthApiError, FromEvmError, FullEthApiTypes, IntoEthApiError, RpcBlock, }; use alloy_eips::{eip1559::calc_next_block_base_fee, eip2930::AccessListResult}; -use alloy_primitives::{Bytes, TxKind, B256, U256}; +use alloy_primitives::{Address, Bytes, TxKind, B256, U256}; use alloy_rpc_types::{ simulate::{SimBlock, SimulatePayload, SimulatedBlock}, state::{EvmOverrides, StateOverride}, @@ -20,7 +20,7 @@ use reth_primitives::{ BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ExecutionResult, HaltReason, ResultAndState, TransactTo, TxEnv, }, - Header, TransactionSignedEcRecovered, + Header, TransactionSigned, }; use reth_provider::{BlockIdReader, ChainSpecProvider, HeaderProvider, StateProvider}; use reth_revm::{database::StateProviderDatabase, db::CacheDB, DatabaseRef}; @@ -92,7 +92,8 @@ pub trait EthCall: Call + LoadPendingBlock { // Gas cap for entire operation let total_gas_limit = self.call_gas_limit(); - let base_block = self.block(block).await?.ok_or(EthApiError::HeaderNotFound(block))?; + let base_block = + self.block_with_senders(block).await?.ok_or(EthApiError::HeaderNotFound(block))?; let mut parent_hash = base_block.header.hash(); let total_difficulty = LoadPendingBlock::provider(self) .header_td_by_number(block_env.number.to()) @@ -292,12 +293,12 @@ pub trait EthCall: Call + LoadPendingBlock { if replay_block_txs { // only need to replay the transactions in the block if not all transactions are // to be replayed - let transactions = block.into_transactions_ecrecovered().take(num_txs); - for tx in transactions { + let transactions = block.transactions_with_sender().take(num_txs); + for (signer, tx) in transactions { let env = EnvWithHandlerCfg::new_with_cfg_env( cfg.clone(), block_env.clone(), - Call::evm_config(&this).tx_env(&tx), + Call::evm_config(&this).tx_env(tx, *signer), ); let (res, _) = this.transact(&mut db, env)?; db.commit(res.state); @@ -605,11 +606,11 @@ pub trait Call: LoadState + SpawnBlocking { // we need to get the state of the parent block because we're essentially replaying the // block the transaction is included in let parent_block = block.parent_hash; - let block_txs = block.into_transactions_ecrecovered(); let this = self.clone(); self.spawn_with_state_at_block(parent_block.into(), move |state| { let mut db = CacheDB::new(StateProviderDatabase::new(state)); + let block_txs = block.transactions_with_sender(); // replay all transactions prior to the targeted transaction this.replay_transactions_until( @@ -623,7 +624,7 @@ pub trait Call: LoadState + SpawnBlocking { let env = EnvWithHandlerCfg::new_with_cfg_env( cfg, block_env, - Call::evm_config(&this).tx_env(&tx), + Call::evm_config(&this).tx_env(tx.as_signed(), tx.signer()), ); let (res, _) = this.transact(&mut db, env)?; @@ -641,30 +642,30 @@ pub trait Call: LoadState + SpawnBlocking { /// /// Note: This assumes the target transaction is in the given iterator. /// Returns the index of the target transaction in the given iterator. - fn replay_transactions_until( + fn replay_transactions_until<'a, DB, I>( &self, db: &mut CacheDB, cfg: CfgEnvWithHandlerCfg, block_env: BlockEnv, - transactions: impl IntoIterator, + transactions: I, target_tx_hash: B256, ) -> Result where DB: DatabaseRef, EthApiError: From, + I: IntoIterator, { let env = EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, Default::default()); let mut evm = self.evm_config().evm_with_env(db, env); let mut index = 0; - for tx in transactions { + for (sender, tx) in transactions { if tx.hash() == target_tx_hash { // reached the target transaction break } - let sender = tx.signer(); - self.evm_config().fill_tx_env(evm.tx_mut(), &tx.into_signed(), sender); + self.evm_config().fill_tx_env(evm.tx_mut(), tx, *sender); evm.transact_commit().map_err(Self::Error::from_evm_err)?; index += 1; } diff --git a/crates/rpc/rpc-eth-api/src/helpers/fee.rs b/crates/rpc/rpc-eth-api/src/helpers/fee.rs index 52ccabebf..34ba6dc7e 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/fee.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/fee.rs @@ -172,8 +172,8 @@ pub trait EthFees: LoadFee { // Percentiles were specified, so we need to collect reward percentile ino if let Some(percentiles) = &reward_percentiles { - let (transactions, receipts) = LoadFee::cache(self) - .get_transactions_and_receipts(header.hash()) + let (block, receipts) = LoadFee::cache(self) + .get_block_and_receipts(header.hash()) .await .map_err(Self::Error::from_eth_err)? .ok_or(EthApiError::InvalidBlockRange)?; @@ -182,7 +182,7 @@ pub trait EthFees: LoadFee { percentiles, header.gas_used, header.base_fee_per_gas.unwrap_or_default(), - &transactions, + &block.body.transactions, &receipts, ) .unwrap_or_default(), @@ -296,7 +296,7 @@ pub trait LoadFee: LoadBlock { None => { // fetch pending base fee let base_fee = self - .block(BlockNumberOrTag::Pending.into()) + .block_with_senders(BlockNumberOrTag::Pending.into()) .await? .ok_or(EthApiError::HeaderNotFound(BlockNumberOrTag::Pending.into()))? .base_fee_per_gas @@ -332,7 +332,7 @@ pub trait LoadFee: LoadBlock { /// /// See also: fn gas_price(&self) -> impl Future> + Send { - let header = self.block(BlockNumberOrTag::Latest.into()); + let header = self.block_with_senders(BlockNumberOrTag::Latest.into()); let suggested_tip = self.suggested_priority_fee(); async move { let (header, suggested_tip) = futures::try_join!(header, suggested_tip)?; @@ -344,9 +344,9 @@ pub trait LoadFee: LoadBlock { /// Returns a suggestion for a base fee for blob transactions. fn blob_base_fee(&self) -> impl Future> + Send { async move { - self.block(BlockNumberOrTag::Latest.into()) + self.block_with_senders(BlockNumberOrTag::Latest.into()) .await? - .and_then(|h: reth_primitives::SealedBlock| h.next_block_blob_fee()) + .and_then(|h| h.next_block_blob_fee()) .ok_or(EthApiError::ExcessBlobGasNotSet.into()) .map(U256::from) } diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index 8014851e3..03597289f 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -323,7 +323,7 @@ pub trait LoadPendingBlock: EthApiTypes { let env = Env::boxed( cfg.cfg_env.clone(), block_env.clone(), - Self::evm_config(self).tx_env(&tx), + Self::evm_config(self).tx_env(tx.as_signed(), tx.signer()), ); let mut evm = revm::Evm::builder().with_env(env).with_db(&mut db).build(); diff --git a/crates/rpc/rpc-eth-api/src/helpers/trace.rs b/crates/rpc/rpc-eth-api/src/helpers/trace.rs index 7b5d13b2a..2dc1c5b47 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/trace.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/trace.rs @@ -191,11 +191,11 @@ pub trait Trace: LoadState { // block the transaction is included in let parent_block = block.parent_hash; let parent_beacon_block_root = block.parent_beacon_block_root; - let block_txs = block.into_transactions_ecrecovered(); let this = self.clone(); self.spawn_with_state_at_block(parent_block.into(), move |state| { let mut db = CacheDB::new(StateProviderDatabase::new(state)); + let block_txs = block.transactions_with_sender(); // apply relevant system calls let mut system_caller = SystemCaller::new( @@ -227,7 +227,7 @@ pub trait Trace: LoadState { let env = EnvWithHandlerCfg::new_with_cfg_env( cfg, block_env, - Call::evm_config(&this).tx_env(&tx), + Call::evm_config(&this).tx_env(tx.as_signed(), tx.signer()), ); let (res, _) = this.inspect(StateCacheDbRefMutWrapper(&mut db), env, &mut inspector)?; @@ -356,10 +356,10 @@ pub trait Trace: LoadState { let mut results = Vec::with_capacity(max_transactions); let mut transactions = block - .into_transactions_ecrecovered() + .transactions_with_sender() .take(max_transactions) .enumerate() - .map(|(idx, tx)| { + .map(|(idx, (signer, tx))| { let tx_info = TransactionInfo { hash: Some(tx.hash()), index: Some(idx as u64), @@ -367,7 +367,7 @@ pub trait Trace: LoadState { block_number: Some(block_number), base_fee: Some(base_fee), }; - let tx_env = Trace::evm_config(&this).tx_env(&tx); + let tx_env = Trace::evm_config(&this).tx_env(tx, *signer); (tx_info, tx_env) }) .peekable(); diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index 94dd04414..4c2493717 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -18,6 +18,7 @@ use reth_rpc_eth_types::{ }; use reth_rpc_types_compat::transaction::{from_recovered, from_recovered_with_block_context}; use reth_transaction_pool::{PoolTransaction, TransactionOrigin, TransactionPool}; +use std::sync::Arc; use crate::{FromEthApiError, FullEthApiTypes, IntoEthApiError, RpcReceipt, RpcTransaction}; @@ -79,7 +80,11 @@ pub trait EthTransactions: LoadTransaction { block: B256, ) -> impl Future>, Self::Error>> + Send { async move { - self.cache().get_block_transactions(block).await.map_err(Self::Error::from_eth_err) + self.cache() + .get_sealed_block_with_senders(block) + .await + .map(|b| b.map(|b| b.body.transactions.clone())) + .map_err(Self::Error::from_eth_err) } } @@ -194,7 +199,7 @@ pub trait EthTransactions: LoadTransaction { let block_hash = block.hash(); let block_number = block.number; let base_fee_per_gas = block.base_fee_per_gas; - if let Some(tx) = block.into_transactions_ecrecovered().nth(index) { + if let Some((signer, tx)) = block.transactions_with_sender().nth(index) { let tx_info = TransactionInfo { hash: Some(tx.hash()), block_hash: Some(block_hash), @@ -204,7 +209,8 @@ pub trait EthTransactions: LoadTransaction { }; return Ok(Some(from_recovered_with_block_context::( - tx, tx_info, + tx.clone().with_signer(*signer), + tx_info, ))) } } @@ -270,10 +276,10 @@ pub trait EthTransactions: LoadTransaction { let base_fee_per_gas = block.base_fee_per_gas; block - .into_transactions_ecrecovered() + .transactions_with_sender() .enumerate() - .find(|(_, tx)| tx.signer() == sender && tx.nonce() == nonce) - .map(|(index, tx)| { + .find(|(_, (signer, tx))| **signer == sender && tx.nonce() == nonce) + .map(|(index, (signer, tx))| { let tx_info = TransactionInfo { hash: Some(tx.hash()), block_hash: Some(block_hash), @@ -282,7 +288,8 @@ pub trait EthTransactions: LoadTransaction { index: Some(index as u64), }; from_recovered_with_block_context::( - tx, tx_info, + tx.clone().with_signer(*signer), + tx_info, ) }) }) @@ -544,8 +551,9 @@ pub trait LoadTransaction: SpawnBlocking + FullEthApiTypes { fn transaction_and_block( &self, hash: B256, - ) -> impl Future, Self::Error>> - + Send { + ) -> impl Future< + Output = Result)>, Self::Error>, + > + Send { async move { let (transaction, at) = match self.transaction_by_hash_at(hash).await? { None => return Ok(None), @@ -559,10 +567,10 @@ pub trait LoadTransaction: SpawnBlocking + FullEthApiTypes { }; let block = self .cache() - .get_block_with_senders(block_hash) + .get_sealed_block_with_senders(block_hash) .await .map_err(Self::Error::from_eth_err)?; - Ok(block.map(|block| (transaction, (*block).clone().seal(block_hash)))) + Ok(block.map(|block| (transaction, block))) } } } diff --git a/crates/rpc/rpc-eth-types/Cargo.toml b/crates/rpc/rpc-eth-types/Cargo.toml index 2c6f51ff4..46a0d7b5c 100644 --- a/crates/rpc/rpc-eth-types/Cargo.toml +++ b/crates/rpc/rpc-eth-types/Cargo.toml @@ -58,6 +58,7 @@ derive_more.workspace = true schnellru.workspace = true rand.workspace = true tracing.workspace = true +itertools.workspace = true [dev-dependencies] serde_json.workspace = true diff --git a/crates/rpc/rpc-eth-types/src/cache/mod.rs b/crates/rpc/rpc-eth-types/src/cache/mod.rs index e8353ecc4..cbf05f276 100644 --- a/crates/rpc/rpc-eth-types/src/cache/mod.rs +++ b/crates/rpc/rpc-eth-types/src/cache/mod.rs @@ -7,8 +7,7 @@ use reth_errors::{ProviderError, ProviderResult}; use reth_evm::{provider::EvmEnvProvider, ConfigureEvm}; use reth_execution_types::Chain; use reth_primitives::{ - Block, BlockHashOrNumber, BlockWithSenders, Header, Receipt, SealedBlock, - SealedBlockWithSenders, TransactionSigned, TransactionSignedEcRecovered, + BlockHashOrNumber, Header, Receipt, SealedBlockWithSenders, TransactionSigned, }; use reth_storage_api::{BlockReader, StateProviderFactory, TransactionVariant}; use reth_tasks::{TaskSpawner, TokioTaskExecutor}; @@ -33,13 +32,13 @@ pub mod db; pub mod metrics; pub mod multi_consumer; -/// The type that can send the response to a requested [`Block`] +/// The type that can send the response to a requested [`SealedBlockWithSenders`] type BlockTransactionsResponseSender = oneshot::Sender>>>; -/// The type that can send the response to a requested [`BlockWithSenders`] +/// The type that can send the response to a requested [`SealedBlockWithSenders`] type BlockWithSendersResponseSender = - oneshot::Sender>>>; + oneshot::Sender>>>; /// The type that can send the response to the requested receipts of a block. type ReceiptsResponseSender = oneshot::Sender>>>>; @@ -49,7 +48,7 @@ type EnvResponseSender = oneshot::Sender = MultiConsumerLruCache< B256, - Arc, + Arc, L, Either, >; @@ -142,92 +141,18 @@ impl EthStateCache { this } - /// Requests the [`Block`] for the block hash - /// - /// Returns `None` if the block does not exist. - pub async fn get_block(&self, block_hash: B256) -> ProviderResult> { - let (response_tx, rx) = oneshot::channel(); - let _ = self.to_service.send(CacheAction::GetBlockWithSenders { block_hash, response_tx }); - let block_with_senders_res = - rx.await.map_err(|_| ProviderError::CacheServiceUnavailable)?; - - if let Ok(Some(block_with_senders)) = block_with_senders_res { - Ok(Some(block_with_senders.block.clone())) - } else { - Ok(None) - } - } - - /// Requests the [`Block`] for the block hash, sealed with the given block hash. - /// - /// Returns `None` if the block does not exist. - pub async fn get_sealed_block(&self, block_hash: B256) -> ProviderResult> { - Ok(self.get_block(block_hash).await?.map(|block| block.seal(block_hash))) - } - - /// Requests the transactions of the [`Block`] - /// - /// Returns `None` if the block does not exist. - pub async fn get_block_transactions( - &self, - block_hash: B256, - ) -> ProviderResult>> { - let (response_tx, rx) = oneshot::channel(); - let _ = self.to_service.send(CacheAction::GetBlockTransactions { block_hash, response_tx }); - rx.await.map_err(|_| ProviderError::CacheServiceUnavailable)? - } - - /// Requests the ecrecovered transactions of the [`Block`] - /// - /// Returns `None` if the block does not exist. - pub async fn get_block_transactions_ecrecovered( - &self, - block_hash: B256, - ) -> ProviderResult>> { - Ok(self - .get_block_with_senders(block_hash) - .await? - .map(|block| (*block).clone().into_transactions_ecrecovered().collect())) - } - - /// Fetches both transactions and receipts for the given block hash. - pub async fn get_transactions_and_receipts( - &self, - block_hash: B256, - ) -> ProviderResult, Arc>)>> { - let transactions = self.get_block_transactions(block_hash); - let receipts = self.get_receipts(block_hash); - - let (transactions, receipts) = futures::try_join!(transactions, receipts)?; - - Ok(transactions.zip(receipts)) - } - - /// Requests the [`BlockWithSenders`] for the block hash + /// Requests the [`SealedBlockWithSenders`] for the block hash /// /// Returns `None` if the block does not exist. - pub async fn get_block_with_senders( + pub async fn get_sealed_block_with_senders( &self, block_hash: B256, - ) -> ProviderResult>> { + ) -> ProviderResult>> { let (response_tx, rx) = oneshot::channel(); let _ = self.to_service.send(CacheAction::GetBlockWithSenders { block_hash, response_tx }); rx.await.map_err(|_| ProviderError::CacheServiceUnavailable)? } - /// Requests the [`SealedBlockWithSenders`] for the block hash - /// - /// Returns `None` if the block does not exist. - pub async fn get_sealed_block_with_senders( - &self, - block_hash: B256, - ) -> ProviderResult> { - Ok(self - .get_block_with_senders(block_hash) - .await? - .map(|block| (*block).clone().seal(block_hash))) - } - /// Requests the [Receipt] for the block hash /// /// Returns `None` if the block was not found. @@ -244,8 +169,8 @@ impl EthStateCache { pub async fn get_block_and_receipts( &self, block_hash: B256, - ) -> ProviderResult>)>> { - let block = self.get_sealed_block(block_hash); + ) -> ProviderResult, Arc>)>> { + let block = self.get_sealed_block_with_senders(block_hash); let receipts = self.get_receipts(block_hash); let (block, receipts) = futures::try_join!(block, receipts)?; @@ -292,7 +217,7 @@ pub(crate) struct EthStateCacheService< LimitReceipts = ByLength, LimitEnvs = ByLength, > where - LimitBlocks: Limiter>, + LimitBlocks: Limiter>, LimitReceipts: Limiter>>, LimitEnvs: Limiter, { @@ -325,7 +250,7 @@ where fn on_new_block( &mut self, block_hash: B256, - res: ProviderResult>>, + res: ProviderResult>>, ) { if let Some(queued) = self.full_block_cache.remove(&block_hash) { // send the response to queued senders @@ -367,7 +292,11 @@ where } } - fn on_reorg_block(&mut self, block_hash: B256, res: ProviderResult>) { + fn on_reorg_block( + &mut self, + block_hash: B256, + res: ProviderResult>, + ) { let res = res.map(|b| b.map(Arc::new)); if let Some(queued) = self.full_block_cache.remove(&block_hash) { // send the response to queued senders @@ -441,7 +370,7 @@ where // Only look in the database to prevent situations where we // looking up the tree is blocking let block_sender = provider - .block_with_senders( + .sealed_block_with_senders( BlockHashOrNumber::Hash(block_hash), TransactionVariant::WithHash, ) @@ -453,36 +382,6 @@ where })); } } - CacheAction::GetBlockTransactions { block_hash, response_tx } => { - // check if block is cached - if let Some(block) = this.full_block_cache.get(&block_hash) { - let _ = response_tx.send(Ok(Some(block.body.transactions.clone()))); - continue - } - - // block is not in the cache, request it if this is the first consumer - if this.full_block_cache.queue(block_hash, Either::Right(response_tx)) { - let provider = this.provider.clone(); - let action_tx = this.action_tx.clone(); - let rate_limiter = this.rate_limiter.clone(); - this.action_task_spawner.spawn_blocking(Box::pin(async move { - // Acquire permit - let _permit = rate_limiter.acquire().await; - // Only look in the database to prevent situations where we - // looking up the tree is blocking - let res = provider - .block_with_senders( - BlockHashOrNumber::Hash(block_hash), - TransactionVariant::WithHash, - ) - .map(|b| b.map(Arc::new)); - let _ = action_tx.send(CacheAction::BlockWithSendersResult { - block_hash, - res, - }); - })); - } - } CacheAction::GetReceipts { block_hash, response_tx } => { // check if block is cached if let Some(receipts) = this.receipts_cache.get(&block_hash).cloned() { @@ -574,7 +473,7 @@ where } CacheAction::CacheNewCanonicalChain { chain_change } => { for block in chain_change.blocks { - this.on_new_block(block.hash(), Ok(Some(Arc::new(block.unseal())))); + this.on_new_block(block.hash(), Ok(Some(Arc::new(block)))); } for block_receipts in chain_change.receipts { @@ -588,7 +487,7 @@ where } CacheAction::RemoveReorgedChain { chain_change } => { for block in chain_change.blocks { - this.on_reorg_block(block.hash(), Ok(Some(block.unseal()))); + this.on_reorg_block(block.hash(), Ok(Some(block))); } for block_receipts in chain_change.receipts { @@ -610,15 +509,36 @@ where /// All message variants sent through the channel enum CacheAction { - GetBlockWithSenders { block_hash: B256, response_tx: BlockWithSendersResponseSender }, - GetBlockTransactions { block_hash: B256, response_tx: BlockTransactionsResponseSender }, - GetEnv { block_hash: B256, response_tx: EnvResponseSender }, - GetReceipts { block_hash: B256, response_tx: ReceiptsResponseSender }, - BlockWithSendersResult { block_hash: B256, res: ProviderResult>> }, - ReceiptsResult { block_hash: B256, res: ProviderResult>>> }, - EnvResult { block_hash: B256, res: Box> }, - CacheNewCanonicalChain { chain_change: ChainChange }, - RemoveReorgedChain { chain_change: ChainChange }, + GetBlockWithSenders { + block_hash: B256, + response_tx: BlockWithSendersResponseSender, + }, + GetEnv { + block_hash: B256, + response_tx: EnvResponseSender, + }, + GetReceipts { + block_hash: B256, + response_tx: ReceiptsResponseSender, + }, + BlockWithSendersResult { + block_hash: B256, + res: ProviderResult>>, + }, + ReceiptsResult { + block_hash: B256, + res: ProviderResult>>>, + }, + EnvResult { + block_hash: B256, + res: Box>, + }, + CacheNewCanonicalChain { + chain_change: ChainChange, + }, + RemoveReorgedChain { + chain_change: ChainChange, + }, } struct BlockReceipts { diff --git a/crates/rpc/rpc-eth-types/src/fee_history.rs b/crates/rpc/rpc-eth-types/src/fee_history.rs index 08ac56845..57dd276e5 100644 --- a/crates/rpc/rpc-eth-types/src/fee_history.rs +++ b/crates/rpc/rpc-eth-types/src/fee_history.rs @@ -73,16 +73,16 @@ impl FeeHistoryCache { } /// Insert block data into the cache. - async fn insert_blocks(&self, blocks: I) + async fn insert_blocks<'a, I>(&self, blocks: I) where - I: IntoIterator>)>, + I: IntoIterator>)>, { let mut entries = self.inner.entries.write().await; let percentiles = self.predefined_percentiles(); // Insert all new blocks and calculate approximated rewards for (block, receipts) in blocks { - let mut fee_history_entry = FeeHistoryEntry::new(&block); + let mut fee_history_entry = FeeHistoryEntry::new(block); fee_history_entry.rewards = calculate_reward_percentiles_for_block( &percentiles, fee_history_entry.gas_used, @@ -237,7 +237,9 @@ pub async fn fee_history_cache_new_blocks_task( tokio::select! { res = &mut fetch_missing_block => { if let Ok(res) = res { - fee_history_cache.insert_blocks(res.into_iter()).await; + fee_history_cache.insert_blocks(res.as_ref() + .map(|(b, r)| (&b.block, r.clone())) + .into_iter()).await; } } event = events.next() => { @@ -245,11 +247,12 @@ pub async fn fee_history_cache_new_blocks_task( // the stream ended, we are done break; }; - let (blocks, receipts): (Vec<_>, Vec<_>) = event - .committed() + + let committed = event .committed(); + let (blocks, receipts): (Vec<_>, Vec<_>) = committed .blocks_and_receipts() .map(|(block, receipts)| { - (block.block.clone(), Arc::new(receipts.iter().flatten().cloned().collect::>())) + (&block.block, Arc::new(receipts.iter().flatten().cloned().collect::>())) }) .unzip(); fee_history_cache.insert_blocks(blocks.into_iter().zip(receipts)).await; diff --git a/crates/rpc/rpc-eth-types/src/gas_oracle.rs b/crates/rpc/rpc-eth-types/src/gas_oracle.rs index 01591bc4d..84e7ab830 100644 --- a/crates/rpc/rpc-eth-types/src/gas_oracle.rs +++ b/crates/rpc/rpc-eth-types/src/gas_oracle.rs @@ -1,16 +1,16 @@ //! An implementation of the eth gas price oracle, used for providing gas price estimates based on //! previous blocks. -use std::fmt::{self, Debug, Formatter}; - use alloy_primitives::{B256, U256}; use alloy_rpc_types::BlockId; use derive_more::{Deref, DerefMut, From, Into}; +use itertools::Itertools; use reth_primitives::{constants::GWEI_TO_WEI, BlockNumberOrTag}; use reth_rpc_server_types::constants; use reth_storage_api::BlockReaderIdExt; use schnellru::{ByLength, LruMap}; use serde::{Deserialize, Serialize}; +use std::fmt::{self, Debug, Formatter}; use tokio::sync::Mutex; use tracing::warn; @@ -212,7 +212,7 @@ where limit: usize, ) -> EthResult)>> { // check the cache (this will hit the disk if the block is not cached) - let mut block = match self.cache.get_block(block_hash).await? { + let block = match self.cache.get_sealed_block_with_senders(block_hash).await? { Some(block) => block, None => return Ok(None), }; @@ -221,11 +221,15 @@ where let parent_hash = block.parent_hash; // sort the functions by ascending effective tip first - block.body.transactions.sort_by_cached_key(|tx| tx.effective_tip_per_gas(base_fee_per_gas)); + let sorted_transactions = block + .body + .transactions + .iter() + .sorted_by_cached_key(|tx| tx.effective_tip_per_gas(base_fee_per_gas)); let mut prices = Vec::with_capacity(limit); - for tx in block.body.transactions() { + for tx in sorted_transactions { let mut effective_gas_tip = None; // ignore transactions with a tip under the configured threshold if let Some(ignore_under) = self.ignore_price { diff --git a/crates/rpc/rpc-eth-types/src/logs_utils.rs b/crates/rpc/rpc-eth-types/src/logs_utils.rs index c64bbe055..205e2bba3 100644 --- a/crates/rpc/rpc-eth-types/src/logs_utils.rs +++ b/crates/rpc/rpc-eth-types/src/logs_utils.rs @@ -6,8 +6,9 @@ use alloy_primitives::TxHash; use alloy_rpc_types::{FilteredParams, Log}; use reth_chainspec::ChainInfo; use reth_errors::ProviderError; -use reth_primitives::{BlockNumHash, Receipt, SealedBlock}; +use reth_primitives::{BlockNumHash, Receipt, SealedBlockWithSenders}; use reth_storage_api::BlockReader; +use std::sync::Arc; /// Returns all matching of a block's receipts when the transaction hashes are known. pub fn matching_block_logs_with_tx_hashes<'a, I>( @@ -50,8 +51,8 @@ where pub enum ProviderOrBlock<'a, P: BlockReader> { /// Provider Provider(&'a P), - /// [`SealedBlock`] - Block(SealedBlock), + /// [`SealedBlockWithSenders`] + Block(Arc), } /// Appends all matching logs of a block's receipts. diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index fbef6f7a7..acf215b3b 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -119,7 +119,7 @@ where env: Env::boxed( cfg.cfg_env.clone(), block_env.clone(), - Call::evm_config(this.eth_api()).tx_env(&tx), + Call::evm_config(this.eth_api()).tx_env(tx.as_signed(), tx.signer()), ), handler_cfg: cfg.handler_cfg, }; @@ -219,7 +219,7 @@ where self.trace_block( state_at.into(), - block.into_transactions_ecrecovered().collect(), + (*block).clone().into_transactions_ecrecovered().collect(), cfg, block_env, opts, @@ -245,11 +245,12 @@ where // block the transaction is included in let state_at: BlockId = block.parent_hash.into(); let block_hash = block.hash(); - let block_txs = block.into_transactions_ecrecovered(); let this = self.clone(); self.eth_api() .spawn_with_state_at_block(state_at, move |state| { + let block_txs = block.transactions_with_sender(); + // configure env for the target transaction let tx = transaction.into_recovered(); @@ -267,7 +268,7 @@ where env: Env::boxed( cfg.cfg_env.clone(), block_env, - Call::evm_config(this.eth_api()).tx_env(&tx), + Call::evm_config(this.eth_api()).tx_env(tx.as_signed(), tx.signer()), ), handler_cfg: cfg.handler_cfg, }; @@ -527,15 +528,15 @@ where if replay_block_txs { // only need to replay the transactions in the block if not all transactions are // to be replayed - let transactions = block.into_transactions_ecrecovered().take(num_txs); + let transactions = block.transactions_with_sender().take(num_txs); // Execute all transactions until index - for tx in transactions { + for (signer, tx) in transactions { let env = EnvWithHandlerCfg { env: Env::boxed( cfg.cfg_env.clone(), block_env.clone(), - Call::evm_config(this.eth_api()).tx_env(&tx), + Call::evm_config(this.eth_api()).tx_env(tx, *signer), ), handler_cfg: cfg.handler_cfg, }; @@ -613,7 +614,7 @@ where let _ = block_executor .execute_with_state_closure( - (&block.clone().unseal(), block.difficulty).into(), + (&(*block).clone().unseal(), block.difficulty).into(), |statedb: &State<_>| { codes = statedb .cache diff --git a/crates/rpc/rpc/src/eth/filter.rs b/crates/rpc/rpc/src/eth/filter.rs index 9efecf3da..b136861c7 100644 --- a/crates/rpc/rpc/src/eth/filter.rs +++ b/crates/rpc/rpc/src/eth/filter.rs @@ -19,7 +19,7 @@ use async_trait::async_trait; use jsonrpsee::{core::RpcResult, server::IdProvider}; use reth_chainspec::ChainInfo; use reth_node_api::EthApiTypes; -use reth_primitives::{Receipt, SealedBlock, TransactionSignedEcRecovered}; +use reth_primitives::{Receipt, SealedBlockWithSenders, TransactionSignedEcRecovered}; use reth_provider::{BlockIdReader, BlockReader, EvmEnvProvider, ProviderError}; use reth_rpc_eth_api::{EthFilterApiServer, FullEthApiTypes, RpcTransaction, TransactionCompat}; use reth_rpc_eth_types::{ @@ -534,7 +534,8 @@ where &self, block_num_hash: &BlockNumHash, best_number: u64, - ) -> Result>, Option)>, EthFilterError> { + ) -> Result>, Option>)>, EthFilterError> + { // The last 4 blocks are most likely cached, so we can just fetch them let cached_range = best_number.saturating_sub(4)..=best_number; let receipts_block = if cached_range.contains(&block_num_hash.number) { diff --git a/crates/rpc/rpc/src/trace.rs b/crates/rpc/rpc/src/trace.rs index f7b2f7ab7..05588d4da 100644 --- a/crates/rpc/rpc/src/trace.rs +++ b/crates/rpc/rpc/src/trace.rs @@ -118,14 +118,14 @@ where trace_types: HashSet, block_id: Option, ) -> Result { - let tx = recover_raw_transaction(tx)?; + let tx = recover_raw_transaction(tx)?.into_ecrecovered_transaction(); let (cfg, block, at) = self.eth_api().evm_env_at(block_id.unwrap_or_default()).await?; let env = EnvWithHandlerCfg::new_with_cfg_env( cfg, block, - Call::evm_config(self.eth_api()).tx_env(&tx.into_ecrecovered_transaction()), + Call::evm_config(self.eth_api()).tx_env(tx.as_signed(), tx.signer()), ); let config = TracingInspectorConfig::from_parity_config(&trace_types); @@ -372,7 +372,7 @@ where }, ); - let block = self.eth_api().block(block_id); + let block = self.eth_api().block_with_senders(block_id); let (maybe_traces, maybe_block) = futures::try_join!(traces, block)?; let mut maybe_traces = @@ -468,7 +468,9 @@ where let Some(transactions) = res else { return Ok(None) }; - let Some(block) = self.eth_api().block(block_id).await? else { return Ok(None) }; + let Some(block) = self.eth_api().block_with_senders(block_id).await? else { + return Ok(None) + }; Ok(Some(BlockOpcodeGas { block_hash: block.hash(), From 523bfb9c81ad7c2493882d83d0c9a6d0bcb2ce52 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Mon, 14 Oct 2024 17:21:41 +0400 Subject: [PATCH 042/425] feat: refactor and integrate local engine into `EngineNodeLauncher` (#11703) --- Cargo.lock | 17 +- .../consensus/beacon/src/engine/forkchoice.rs | 9 +- crates/e2e-test-utils/Cargo.toml | 1 + crates/engine/local/Cargo.toml | 24 +- crates/engine/local/src/lib.rs | 13 + crates/engine/local/src/miner.rs | 193 +++++++- crates/engine/local/src/payload.rs | 63 ++- crates/engine/local/src/service.rs | 427 +++++------------- crates/engine/service/src/service.rs | 2 +- crates/ethereum/node/tests/e2e/dev.rs | 53 ++- crates/ethereum/node/tests/e2e/utils.rs | 5 - crates/node/builder/Cargo.toml | 1 + crates/node/builder/src/launch/engine.rs | 79 +++- crates/node/events/src/node.rs | 2 +- crates/optimism/node/Cargo.toml | 2 + crates/payload/primitives/src/traits.rs | 9 +- 16 files changed, 500 insertions(+), 400 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3c5639ead..d690cf536 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6971,6 +6971,7 @@ dependencies = [ "reth", "reth-chainspec", "reth-db", + "reth-engine-local", "reth-network-peers", "reth-node-builder", "reth-node-ethereum", @@ -7028,22 +7029,22 @@ dependencies = [ "alloy-rpc-types-engine", "eyre", "futures-util", + "op-alloy-rpc-types-engine", "reth-beacon-consensus", - "reth-chain-state", "reth-chainspec", - "reth-config", - "reth-db", + "reth-consensus", + "reth-engine-primitives", + "reth-engine-service", "reth-engine-tree", "reth-ethereum-engine-primitives", - "reth-exex-test-utils", - "reth-node-types", + "reth-evm", "reth-payload-builder", "reth-payload-primitives", - "reth-primitives", + "reth-payload-validator", "reth-provider", "reth-prune", + "reth-rpc-types-compat", "reth-stages-api", - "reth-tracing", "reth-transaction-pool", "tokio", "tokio-stream", @@ -7809,6 +7810,7 @@ dependencies = [ "reth-db-api", "reth-db-common", "reth-downloaders", + "reth-engine-local", "reth-engine-service", "reth-engine-tree", "reth-engine-util", @@ -8131,6 +8133,7 @@ dependencies = [ "reth-db", "reth-discv5", "reth-e2e-test-utils", + "reth-engine-local", "reth-evm", "reth-network", "reth-node-api", diff --git a/crates/consensus/beacon/src/engine/forkchoice.rs b/crates/consensus/beacon/src/engine/forkchoice.rs index 975c2ee3b..7e49714ba 100644 --- a/crates/consensus/beacon/src/engine/forkchoice.rs +++ b/crates/consensus/beacon/src/engine/forkchoice.rs @@ -150,15 +150,18 @@ pub enum ForkchoiceStatus { } impl ForkchoiceStatus { - pub(crate) const fn is_valid(&self) -> bool { + /// Returns `true` if the forkchoice state is [`ForkchoiceStatus::Valid`]. + pub const fn is_valid(&self) -> bool { matches!(self, Self::Valid) } - pub(crate) const fn is_invalid(&self) -> bool { + /// Returns `true` if the forkchoice state is [`ForkchoiceStatus::Invalid`]. + pub const fn is_invalid(&self) -> bool { matches!(self, Self::Invalid) } - pub(crate) const fn is_syncing(&self) -> bool { + /// Returns `true` if the forkchoice state is [`ForkchoiceStatus::Syncing`]. + pub const fn is_syncing(&self) -> bool { matches!(self, Self::Syncing) } diff --git a/crates/e2e-test-utils/Cargo.toml b/crates/e2e-test-utils/Cargo.toml index a10162e78..2742d7040 100644 --- a/crates/e2e-test-utils/Cargo.toml +++ b/crates/e2e-test-utils/Cargo.toml @@ -13,6 +13,7 @@ workspace = true [dependencies] reth.workspace = true reth-chainspec.workspace = true +reth-engine-local.workspace = true reth-primitives.workspace = true reth-tracing.workspace = true reth-db = { workspace = true, features = ["test-utils"] } diff --git a/crates/engine/local/Cargo.toml b/crates/engine/local/Cargo.toml index d7a5d0509..f22ab1f8d 100644 --- a/crates/engine/local/Cargo.toml +++ b/crates/engine/local/Cargo.toml @@ -11,15 +11,19 @@ exclude.workspace = true [dependencies] # reth reth-beacon-consensus.workspace = true -reth-chain-state.workspace = true +reth-chainspec.workspace = true +reth-consensus.workspace = true +reth-engine-primitives.workspace = true +reth-engine-service.workspace = true reth-engine-tree.workspace = true +reth-evm.workspace = true reth-ethereum-engine-primitives.workspace = true -reth-node-types.workspace = true reth-payload-builder.workspace = true reth-payload-primitives.workspace = true -reth-primitives.workspace = true +reth-payload-validator.workspace = true reth-provider.workspace = true reth-prune.workspace = true +reth-rpc-types-compat.workspace = true reth-transaction-pool.workspace = true reth-stages-api.workspace = true @@ -36,16 +40,10 @@ futures-util.workspace = true eyre.workspace = true tracing.workspace = true -[dev-dependencies] -reth-chainspec.workspace = true -reth-chain-state.workspace = true -reth-config.workspace = true -reth-db = { workspace = true, features = ["test-utils"] } -reth-ethereum-engine-primitives.workspace = true -reth-exex-test-utils.workspace = true -reth-payload-builder = { workspace = true, features = ["test-utils"] } -reth-provider = { workspace = true, features = ["test-utils"] } -reth-tracing.workspace = true +op-alloy-rpc-types-engine = { workspace = true, optional = true } [lints] workspace = true + +[features] +optimism = ["op-alloy-rpc-types-engine"] diff --git a/crates/engine/local/src/lib.rs b/crates/engine/local/src/lib.rs index 1b84c8a11..26c84d50c 100644 --- a/crates/engine/local/src/lib.rs +++ b/crates/engine/local/src/lib.rs @@ -1,4 +1,17 @@ //! A local engine service that can be used to drive a dev chain. + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", + html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" +)] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] +#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] + pub mod miner; pub mod payload; pub mod service; + +pub use miner::MiningMode; +pub use payload::LocalPayloadAttributesBuilder; +pub use service::LocalEngineService; diff --git a/crates/engine/local/src/miner.rs b/crates/engine/local/src/miner.rs index de3d8cb8d..e12a2a50d 100644 --- a/crates/engine/local/src/miner.rs +++ b/crates/engine/local/src/miner.rs @@ -1,16 +1,31 @@ //! Contains the implementation of the mining mode for the local engine. -use alloy_primitives::TxHash; +use alloy_primitives::{TxHash, B256}; +use alloy_rpc_types_engine::{CancunPayloadFields, ForkchoiceState}; +use eyre::OptionExt; use futures_util::{stream::Fuse, StreamExt}; +use reth_beacon_consensus::BeaconEngineMessage; +use reth_chainspec::EthereumHardforks; +use reth_engine_primitives::EngineTypes; +use reth_payload_builder::PayloadBuilderHandle; +use reth_payload_primitives::{ + BuiltPayload, PayloadAttributesBuilder, PayloadBuilder, PayloadTypes, +}; +use reth_provider::{BlockReader, ChainSpecProvider}; +use reth_rpc_types_compat::engine::payload::block_to_payload; use reth_transaction_pool::TransactionPool; use std::{ future::Future, pin::Pin, task::{Context, Poll}, - time::Duration, + time::{Duration, UNIX_EPOCH}, +}; +use tokio::{ + sync::{mpsc::UnboundedSender, oneshot}, + time::Interval, }; -use tokio::time::Interval; use tokio_stream::wrappers::ReceiverStream; +use tracing::error; /// A mining mode for the local dev engine. #[derive(Debug)] @@ -58,3 +73,175 @@ impl Future for MiningMode { } } } + +/// Local miner advancing the chain/ +#[derive(Debug)] +pub struct LocalMiner { + /// Provider to read the current tip of the chain. + provider: Provider, + /// The payload attribute builder for the engine + payload_attributes_builder: B, + /// Sender for events to engine. + to_engine: UnboundedSender>, + /// The mining mode for the engine + mode: MiningMode, + /// The payload builder for the engine + payload_builder: PayloadBuilderHandle, + /// Timestamp for the next block. + last_timestamp: u64, + /// Stores latest mined blocks. + last_block_hashes: Vec, +} + +impl LocalMiner +where + EngineT: EngineTypes, + Provider: BlockReader + ChainSpecProvider + 'static, + B: PayloadAttributesBuilder<::PayloadAttributes>, +{ + /// Spawns a new [`LocalMiner`] with the given parameters. + pub fn spawn_new( + provider: Provider, + payload_attributes_builder: B, + to_engine: UnboundedSender>, + mode: MiningMode, + payload_builder: PayloadBuilderHandle, + ) { + let latest_header = + provider.sealed_header(provider.best_block_number().unwrap()).unwrap().unwrap(); + + let miner = Self { + provider, + payload_attributes_builder, + to_engine, + mode, + payload_builder, + last_timestamp: latest_header.timestamp, + last_block_hashes: vec![latest_header.hash()], + }; + + // Spawn the miner + tokio::spawn(miner.run()); + } + + /// Runs the [`LocalMiner`] in a loop, polling the miner and building payloads. + async fn run(mut self) { + let mut fcu_interval = tokio::time::interval(Duration::from_secs(1)); + loop { + tokio::select! { + // Wait for the interval or the pool to receive a transaction + _ = &mut self.mode => { + if let Err(e) = self.advance().await { + error!(target: "engine::local", "Error advancing the chain: {:?}", e); + } + } + // send FCU once in a while + _ = fcu_interval.tick() => { + if let Err(e) = self.update_forkchoice_state().await { + error!(target: "engine::local", "Error updating fork choice: {:?}", e); + } + } + } + } + } + + /// Returns current forkchoice state. + fn forkchoice_state(&self) -> ForkchoiceState { + ForkchoiceState { + head_block_hash: *self.last_block_hashes.last().expect("at least 1 block exists"), + safe_block_hash: *self + .last_block_hashes + .get(self.last_block_hashes.len().saturating_sub(32)) + .expect("at least 1 block exists"), + finalized_block_hash: *self + .last_block_hashes + .get(self.last_block_hashes.len().saturating_sub(64)) + .expect("at least 1 block exists"), + } + } + + /// Sends a FCU to the engine. + async fn update_forkchoice_state(&self) -> eyre::Result<()> { + let (tx, rx) = oneshot::channel(); + self.to_engine.send(BeaconEngineMessage::ForkchoiceUpdated { + state: self.forkchoice_state(), + payload_attrs: None, + tx, + })?; + + let res = rx.await??; + if !res.forkchoice_status().is_valid() { + eyre::bail!("Invalid fork choice update") + } + + Ok(()) + } + + /// Generates payload attributes for a new block, passes them to FCU and inserts built payload + /// through newPayload. + async fn advance(&mut self) -> eyre::Result<()> { + let timestamp = std::cmp::max( + self.last_timestamp + 1, + std::time::SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("cannot be earlier than UNIX_EPOCH") + .as_secs(), + ); + + let (tx, rx) = oneshot::channel(); + self.to_engine.send(BeaconEngineMessage::ForkchoiceUpdated { + state: self.forkchoice_state(), + payload_attrs: Some(self.payload_attributes_builder.build(timestamp)), + tx, + })?; + + let res = rx.await??.await?; + if !res.payload_status.is_valid() { + eyre::bail!("Invalid payload status") + } + + let payload_id = res.payload_id.ok_or_eyre("No payload id")?; + + // wait for some time to let the payload be built + tokio::time::sleep(Duration::from_millis(200)).await; + + let Some(Ok(payload)) = self.payload_builder.best_payload(payload_id).await else { + eyre::bail!("No payload") + }; + + let block = payload.block(); + + let cancun_fields = + if self.provider.chain_spec().is_cancun_active_at_timestamp(block.timestamp) { + Some(CancunPayloadFields { + parent_beacon_block_root: block.parent_beacon_block_root.unwrap(), + versioned_hashes: block.blob_versioned_hashes().into_iter().copied().collect(), + }) + } else { + None + }; + + let (tx, rx) = oneshot::channel(); + self.to_engine.send(BeaconEngineMessage::NewPayload { + payload: block_to_payload(payload.block().clone()), + cancun_fields, + tx, + })?; + + let res = rx.await??; + + if !res.is_valid() { + eyre::bail!("Invalid payload") + } + + self.last_timestamp = timestamp; + self.last_block_hashes.push(block.hash()); + // ensure we keep at most 64 blocks + if self.last_block_hashes.len() > 64 { + self.last_block_hashes = + self.last_block_hashes.split_off(self.last_block_hashes.len() - 64); + } + + Ok(()) + } +} diff --git a/crates/engine/local/src/payload.rs b/crates/engine/local/src/payload.rs index 4fd49f53f..15d5ff2cf 100644 --- a/crates/engine/local/src/payload.rs +++ b/crates/engine/local/src/payload.rs @@ -2,29 +2,62 @@ //! [`LocalEngineService`](super::service::LocalEngineService). use alloy_primitives::{Address, B256}; +use reth_chainspec::EthereumHardforks; use reth_ethereum_engine_primitives::EthPayloadAttributes; use reth_payload_primitives::PayloadAttributesBuilder; -use std::{convert::Infallible, time::UNIX_EPOCH}; +use std::sync::Arc; /// The attributes builder for local Ethereum payload. #[derive(Debug)] -pub struct EthLocalPayloadAttributesBuilder; - -impl PayloadAttributesBuilder for EthLocalPayloadAttributesBuilder { - type PayloadAttributes = EthPayloadAttributes; - type Error = Infallible; +#[non_exhaustive] +pub struct LocalPayloadAttributesBuilder { + chain_spec: Arc, +} - fn build(&self) -> Result { - let ts = std::time::SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("cannot be earlier than UNIX_EPOCH"); +impl LocalPayloadAttributesBuilder { + /// Creates a new instance of the builder. + pub const fn new(chain_spec: Arc) -> Self { + Self { chain_spec } + } +} - Ok(EthPayloadAttributes { - timestamp: ts.as_secs(), +impl PayloadAttributesBuilder + for LocalPayloadAttributesBuilder +where + ChainSpec: Send + Sync + EthereumHardforks + 'static, +{ + fn build(&self, timestamp: u64) -> EthPayloadAttributes { + EthPayloadAttributes { + timestamp, prev_randao: B256::random(), suggested_fee_recipient: Address::random(), - withdrawals: None, - parent_beacon_block_root: None, - }) + withdrawals: if self.chain_spec.is_shanghai_active_at_timestamp(timestamp) { + Some(Default::default()) + } else { + None + }, + parent_beacon_block_root: if self.chain_spec.is_cancun_active_at_timestamp(timestamp) { + Some(B256::random()) + } else { + None + }, + } + } +} + +#[cfg(feature = "optimism")] +impl PayloadAttributesBuilder + for LocalPayloadAttributesBuilder +where + ChainSpec: Send + Sync + EthereumHardforks + 'static, +{ + fn build(&self, timestamp: u64) -> op_alloy_rpc_types_engine::OpPayloadAttributes { + op_alloy_rpc_types_engine::OpPayloadAttributes { + payload_attributes: self.build(timestamp), + transactions: None, + no_tx_pool: None, + gas_limit: None, + eip_1559_params: None, + } } } diff --git a/crates/engine/local/src/service.rs b/crates/engine/local/src/service.rs index c9794ecfa..93a9cf11e 100644 --- a/crates/engine/local/src/service.rs +++ b/crates/engine/local/src/service.rs @@ -6,357 +6,154 @@ //! with a single transaction. The `Interval` mode will initiate block //! building at a fixed interval. -use crate::miner::MiningMode; -use eyre::eyre; -use reth_beacon_consensus::EngineNodeTypes; -use reth_chain_state::{CanonicalInMemoryState, ExecutedBlock, NewCanonicalChain}; -use reth_engine_tree::persistence::PersistenceHandle; -use reth_payload_builder::PayloadBuilderHandle; -use reth_payload_primitives::{ - BuiltPayload, PayloadAttributesBuilder, PayloadBuilder, PayloadBuilderAttributes, PayloadTypes, +use core::fmt; +use std::{ + fmt::{Debug, Formatter}, + pin::Pin, + sync::Arc, + task::{Context, Poll}, +}; + +use crate::miner::{LocalMiner, MiningMode}; +use futures_util::{Stream, StreamExt}; +use reth_beacon_consensus::{BeaconConsensusEngineEvent, BeaconEngineMessage, EngineNodeTypes}; +use reth_chainspec::EthChainSpec; +use reth_consensus::Consensus; +use reth_engine_service::service::EngineMessageStream; +use reth_engine_tree::{ + chain::{ChainEvent, HandlerEvent}, + engine::{ + EngineApiKind, EngineApiRequest, EngineApiRequestHandler, EngineRequestHandler, FromEngine, + RequestHandlerEvent, + }, + persistence::PersistenceHandle, + tree::{EngineApiTreeHandler, InvalidBlockHook, TreeConfig}, }; -use reth_provider::ProviderFactory; +use reth_evm::execute::BlockExecutorProvider; +use reth_payload_builder::PayloadBuilderHandle; +use reth_payload_primitives::{PayloadAttributesBuilder, PayloadTypes}; +use reth_payload_validator::ExecutionPayloadValidator; +use reth_provider::{providers::BlockchainProvider2, ChainSpecProvider, ProviderFactory}; use reth_prune::PrunerWithFactory; use reth_stages_api::MetricEventsSender; -use tokio::sync::oneshot; -use tracing::debug; +use tokio::sync::mpsc::UnboundedSender; +use tracing::error; /// Provides a local dev service engine that can be used to drive the /// chain forward. -#[derive(Debug)] -pub struct LocalEngineService +/// +/// This service both produces and consumes [`BeaconEngineMessage`]s. This is done to allow +/// modifications of the stream +pub struct LocalEngineService where N: EngineNodeTypes, - B: PayloadAttributesBuilder::PayloadAttributes>, { - /// The payload builder for the engine - payload_builder: PayloadBuilderHandle, - /// The payload attribute builder for the engine - payload_attributes_builder: B, - /// Keep track of the Canonical chain state that isn't persisted on disk yet - canonical_in_memory_state: CanonicalInMemoryState, - /// A handle to the persistence layer - persistence_handle: PersistenceHandle, - /// The mining mode for the engine - mode: MiningMode, + /// Processes requests. + /// + /// This type is responsible for processing incoming requests. + handler: EngineApiRequestHandler>, + /// Receiver for incoming requests (from the engine API endpoint) that need to be processed. + incoming_requests: EngineMessageStream, } -impl LocalEngineService +impl LocalEngineService where N: EngineNodeTypes, - B: PayloadAttributesBuilder::PayloadAttributes>, { /// Constructor for [`LocalEngineService`]. - pub fn new( - payload_builder: PayloadBuilderHandle, - payload_attributes_builder: B, + #[allow(clippy::too_many_arguments)] + pub fn new( + consensus: Arc, + executor_factory: impl BlockExecutorProvider, provider: ProviderFactory, + blockchain_db: BlockchainProvider2, pruner: PrunerWithFactory>, - canonical_in_memory_state: CanonicalInMemoryState, + payload_builder: PayloadBuilderHandle, + tree_config: TreeConfig, + invalid_block_hook: Box, sync_metrics_tx: MetricEventsSender, + to_engine: UnboundedSender>, + from_engine: EngineMessageStream, mode: MiningMode, - ) -> Self { + payload_attributes_builder: B, + ) -> Self + where + B: PayloadAttributesBuilder<::PayloadAttributes>, + { + let chain_spec = provider.chain_spec(); + let engine_kind = + if chain_spec.is_optimism() { EngineApiKind::OpStack } else { EngineApiKind::Ethereum }; + let persistence_handle = PersistenceHandle::spawn_service(provider, pruner, sync_metrics_tx); + let payload_validator = ExecutionPayloadValidator::new(chain_spec); - Self { - payload_builder, - payload_attributes_builder, - canonical_in_memory_state, + let canonical_in_memory_state = blockchain_db.canonical_in_memory_state(); + + let (to_tree_tx, from_tree) = EngineApiTreeHandler::spawn_new( + blockchain_db.clone(), + executor_factory, + consensus, + payload_validator, persistence_handle, - mode, - } - } + payload_builder.clone(), + canonical_in_memory_state, + tree_config, + invalid_block_hook, + engine_kind, + ); - /// Spawn the [`LocalEngineService`] on a tokio green thread. The service will poll the payload - /// builder with two varying modes, [`MiningMode::Instant`] or [`MiningMode::Interval`] - /// which will respectively either execute the block as soon as it finds a - /// transaction in the pool or build the block based on an interval. - pub fn spawn_new( - payload_builder: PayloadBuilderHandle, - payload_attributes_builder: B, - provider: ProviderFactory, - pruner: PrunerWithFactory>, - canonical_in_memory_state: CanonicalInMemoryState, - sync_metrics_tx: MetricEventsSender, - mode: MiningMode, - ) { - let engine = Self::new( - payload_builder, + let handler = EngineApiRequestHandler::new(to_tree_tx, from_tree); + + LocalMiner::spawn_new( + blockchain_db, payload_attributes_builder, - provider, - pruner, - canonical_in_memory_state, - sync_metrics_tx, + to_engine, mode, + payload_builder, ); - // Spawn the engine - tokio::spawn(engine.run()); + Self { handler, incoming_requests: from_engine } } +} - /// Runs the [`LocalEngineService`] in a loop, polling the miner and building - /// payloads. - async fn run(mut self) { - loop { - // Wait for the interval or the pool to receive a transaction - (&mut self.mode).await; - - // Start a new payload building job - let executed_block = self.build_and_save_payload().await; - - if executed_block.is_err() { - debug!(target: "local_engine", err = ?executed_block.unwrap_err(), "failed payload building"); - continue - } - let block = executed_block.expect("not error"); - - let res = self.update_canonical_in_memory_state(block); - if res.is_err() { - debug!(target: "local_engine", err = ?res.unwrap_err(), "failed canonical state update"); +impl Stream for LocalEngineService +where + N: EngineNodeTypes, +{ + type Item = ChainEvent; + + fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + let this = self.get_mut(); + + if let Poll::Ready(ev) = this.handler.poll(cx) { + return match ev { + RequestHandlerEvent::HandlerEvent(ev) => match ev { + HandlerEvent::BackfillAction(_) => { + error!(target: "engine::local", "received backfill request in local engine"); + Poll::Ready(Some(ChainEvent::FatalError)) + } + HandlerEvent::Event(ev) => Poll::Ready(Some(ChainEvent::Handler(ev))), + HandlerEvent::FatalError => Poll::Ready(Some(ChainEvent::FatalError)), + }, + RequestHandlerEvent::Download(_) => { + error!(target: "engine::local", "received download request in local engine"); + Poll::Ready(Some(ChainEvent::FatalError)) + } } } - } - - /// Builds a payload by initiating a new payload job via the [`PayloadBuilderHandle`], - /// saving the execution outcome to persistence and returning the executed block. - async fn build_and_save_payload(&self) -> eyre::Result { - let payload_attributes = self.payload_attributes_builder.build()?; - let parent = self.canonical_in_memory_state.get_canonical_head().hash(); - let payload_builder_attributes = - ::PayloadBuilderAttributes::try_new( - parent, - payload_attributes, - ) - .map_err(|_| eyre::eyre!("failed to fetch payload attributes"))?; - - let payload = self - .payload_builder - .send_and_resolve_payload(payload_builder_attributes) - .await? - .await?; - - let executed_block = - payload.executed_block().ok_or_else(|| eyre!("missing executed block"))?; - let (tx, rx) = oneshot::channel(); - - let _ = self.persistence_handle.save_blocks(vec![executed_block.clone()], tx); - - // Wait for the persistence_handle to complete - let _ = rx.await?.ok_or_else(|| eyre!("missing new head"))?; - Ok(executed_block) - } - - /// Update the canonical in memory state and send notification for a new canon state to - /// all the listeners. - fn update_canonical_in_memory_state(&self, executed_block: ExecutedBlock) -> eyre::Result<()> { - let chain = NewCanonicalChain::Commit { new: vec![executed_block] }; - let tip = chain.tip().header.clone(); - let notification = chain.to_chain_notification(); - - // Update the tracked in-memory state with the new chain - self.canonical_in_memory_state.update_chain(chain); - self.canonical_in_memory_state.set_canonical_head(tip); - - // Sends an event to all active listeners about the new canonical chain - self.canonical_in_memory_state.notify_canon_state(notification); - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use reth_chainspec::MAINNET; - use reth_config::PruneConfig; - use reth_db::test_utils::{create_test_rw_db, create_test_static_files_dir}; - use reth_ethereum_engine_primitives::EthEngineTypes; - use reth_exex_test_utils::TestNode; - use reth_node_types::NodeTypesWithDBAdapter; - use reth_payload_builder::test_utils::spawn_test_payload_service; - use reth_provider::{providers::StaticFileProvider, BlockReader, ProviderFactory}; - use reth_prune::PrunerBuilder; - use reth_transaction_pool::{ - test_utils::{testing_pool, MockTransaction}, - TransactionPool, - }; - use std::{convert::Infallible, time::Duration}; - use tokio::sync::mpsc::unbounded_channel; - - #[derive(Debug)] - struct TestPayloadAttributesBuilder; - - impl PayloadAttributesBuilder for TestPayloadAttributesBuilder { - type PayloadAttributes = alloy_rpc_types_engine::PayloadAttributes; - type Error = Infallible; - - fn build(&self) -> Result { - Ok(alloy_rpc_types_engine::PayloadAttributes { - timestamp: 0, - prev_randao: Default::default(), - suggested_fee_recipient: Default::default(), - withdrawals: None, - parent_beacon_block_root: None, - }) + // forward incoming requests to the handler + while let Poll::Ready(Some(req)) = this.incoming_requests.poll_next_unpin(cx) { + this.handler.on_event(FromEngine::Request(req.into())); } - } - - #[tokio::test] - async fn test_local_engine_service_interval() -> eyre::Result<()> { - reth_tracing::init_test_tracing(); - - // Start the provider and the pruner - let (_, static_dir_path) = create_test_static_files_dir(); - let provider = ProviderFactory::>::new( - create_test_rw_db(), - MAINNET.clone(), - StaticFileProvider::read_write(static_dir_path)?, - ); - let pruner = PrunerBuilder::new(PruneConfig::default()) - .build_with_provider_factory(provider.clone()); - - // Create an empty canonical in memory state - let canonical_in_memory_state = CanonicalInMemoryState::empty(); - - // Start the payload builder service - let payload_handle = spawn_test_payload_service::(); - - // Sync metric channel - let (sync_metrics_tx, _) = unbounded_channel(); - - // Launch the LocalEngineService in interval mode - let period = Duration::from_secs(1); - LocalEngineService::spawn_new( - payload_handle, - TestPayloadAttributesBuilder, - provider.clone(), - pruner, - canonical_in_memory_state, - sync_metrics_tx, - MiningMode::interval(period), - ); - - // Check that we have no block for now - let block = provider.block_by_number(0)?; - assert!(block.is_none()); - // Wait 4 intervals - tokio::time::sleep(2 * period).await; - - // Assert a block has been build - let block = provider.block_by_number(0)?; - assert!(block.is_some()); - - Ok(()) + Poll::Pending } +} - #[tokio::test] - async fn test_local_engine_service_instant() -> eyre::Result<()> { - reth_tracing::init_test_tracing(); - - // Start the provider and the pruner - let (_, static_dir_path) = create_test_static_files_dir(); - let provider = ProviderFactory::>::new( - create_test_rw_db(), - MAINNET.clone(), - StaticFileProvider::read_write(static_dir_path)?, - ); - let pruner = PrunerBuilder::new(PruneConfig::default()) - .build_with_provider_factory(provider.clone()); - - // Create an empty canonical in memory state - let canonical_in_memory_state = CanonicalInMemoryState::empty(); - - // Start the payload builder service - let payload_handle = spawn_test_payload_service::(); - - // Start a transaction pool - let pool = testing_pool(); - - // Sync metric channel - let (sync_metrics_tx, _) = unbounded_channel(); - - // Launch the LocalEngineService in instant mode - LocalEngineService::spawn_new( - payload_handle, - TestPayloadAttributesBuilder, - provider.clone(), - pruner, - canonical_in_memory_state, - sync_metrics_tx, - MiningMode::instant(pool.clone()), - ); - - // Wait for a small period to assert block building is - // triggered by adding a transaction to the pool - let period = Duration::from_millis(500); - tokio::time::sleep(period).await; - let block = provider.block_by_number(0)?; - assert!(block.is_none()); - - // Add a transaction to the pool - let transaction = MockTransaction::legacy().with_gas_price(10); - pool.add_transaction(Default::default(), transaction).await?; - - // Wait for block building - let period = Duration::from_secs(2); - tokio::time::sleep(period).await; - - // Assert a block has been build - let block = provider.block_by_number(0)?; - assert!(block.is_some()); - - Ok(()) - } - - #[tokio::test] - async fn test_canonical_chain_subscription() -> eyre::Result<()> { - reth_tracing::init_test_tracing(); - - // Start the provider and the pruner - let (_, static_dir_path) = create_test_static_files_dir(); - let provider = ProviderFactory::>::new( - create_test_rw_db(), - MAINNET.clone(), - StaticFileProvider::read_write(static_dir_path)?, - ); - let pruner = PrunerBuilder::new(PruneConfig::default()) - .build_with_provider_factory(provider.clone()); - - // Create an empty canonical in memory state - let canonical_in_memory_state = CanonicalInMemoryState::empty(); - let mut notifications = canonical_in_memory_state.subscribe_canon_state(); - - // Start the payload builder service - let payload_handle = spawn_test_payload_service::(); - - // Start a transaction pool - let pool = testing_pool(); - - // Sync metric channel - let (sync_metrics_tx, _) = unbounded_channel(); - - // Launch the LocalEngineService in instant mode - LocalEngineService::spawn_new( - payload_handle, - TestPayloadAttributesBuilder, - provider.clone(), - pruner, - canonical_in_memory_state, - sync_metrics_tx, - MiningMode::instant(pool.clone()), - ); - - // Add a transaction to the pool - let transaction = MockTransaction::legacy().with_gas_price(10); - pool.add_transaction(Default::default(), transaction).await?; - - // Check a notification is received for block 0 - let res = notifications.recv().await?; - - assert_eq!(res.tip().number, 0); - - Ok(()) +impl Debug for LocalEngineService { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("LocalEngineService").finish_non_exhaustive() } } diff --git a/crates/engine/service/src/service.rs b/crates/engine/service/src/service.rs index ed9c1aa1c..026476a82 100644 --- a/crates/engine/service/src/service.rs +++ b/crates/engine/service/src/service.rs @@ -31,7 +31,7 @@ use std::{ }; /// Alias for consensus engine stream. -type EngineMessageStream = Pin> + Send + Sync>>; +pub type EngineMessageStream = Pin> + Send + Sync>>; /// Alias for chain orchestrator. type EngineServiceType = ChainOrchestrator< diff --git a/crates/ethereum/node/tests/e2e/dev.rs b/crates/ethereum/node/tests/e2e/dev.rs index 2ef6e08c7..6b4733b6f 100644 --- a/crates/ethereum/node/tests/e2e/dev.rs +++ b/crates/ethereum/node/tests/e2e/dev.rs @@ -3,29 +3,64 @@ use std::sync::Arc; use alloy_genesis::Genesis; use alloy_primitives::{b256, hex}; use futures::StreamExt; -use reth::core::rpc::eth::helpers::EthTransactions; +use reth::{args::DevArgs, core::rpc::eth::helpers::EthTransactions}; use reth_chainspec::ChainSpec; use reth_e2e_test_utils::setup; -use reth_provider::CanonStateSubscriptions; - -use crate::utils::EthNode; +use reth_node_api::{FullNodeComponents, NodeAddOns}; +use reth_node_builder::{EngineNodeLauncher, FullNode, NodeBuilder, NodeConfig, NodeHandle}; +use reth_node_ethereum::{node::EthereumAddOns, EthereumNode}; +use reth_provider::{providers::BlockchainProvider2, CanonStateSubscriptions}; +use reth_tasks::TaskManager; #[tokio::test] async fn can_run_dev_node() -> eyre::Result<()> { reth_tracing::init_test_tracing(); - let (mut nodes, _tasks, _) = setup(1, custom_chain(), true).await?; + let (mut nodes, _tasks, _) = setup::(1, custom_chain(), true).await?; + + assert_chain_advances(nodes.pop().unwrap().inner).await; + Ok(()) +} + +#[tokio::test] +async fn can_run_dev_node_new_engine() -> eyre::Result<()> { + reth_tracing::init_test_tracing(); + let tasks = TaskManager::current(); + let exec = tasks.executor(); + + let node_config = NodeConfig::test() + .with_chain(custom_chain()) + .with_dev(DevArgs { dev: true, ..Default::default() }); + let NodeHandle { node, .. } = NodeBuilder::new(node_config.clone()) + .testing_node(exec.clone()) + .with_types_and_provider::>() + .with_components(EthereumNode::components()) + .with_add_ons(EthereumAddOns::default()) + .launch_with_fn(|builder| { + let launcher = EngineNodeLauncher::new( + builder.task_executor().clone(), + builder.config().datadir(), + Default::default(), + ); + builder.launch_with(launcher) + }) + .await?; + + assert_chain_advances(node).await; - assert_chain_advances(nodes.pop().unwrap()).await; Ok(()) } -async fn assert_chain_advances(node: EthNode) { - let mut notifications = node.inner.provider.canonical_state_stream(); +async fn assert_chain_advances(node: FullNode) +where + N: FullNodeComponents, + AddOns: NodeAddOns, +{ + let mut notifications = node.provider.canonical_state_stream(); // submit tx through rpc let raw_tx = hex!("02f876820a28808477359400847735940082520894ab0840c0e43688012c1adb0f5e3fc665188f83d28a029d394a5d630544000080c080a0a044076b7e67b5deecc63f61a8d7913fab86ca365b344b5759d1fe3563b4c39ea019eab979dd000da04dfc72bb0377c092d30fd9e1cab5ae487de49586cc8b0090"); - let eth_api = node.inner.rpc_registry.eth_api(); + let eth_api = node.rpc_registry.eth_api(); let hash = eth_api.send_raw_transaction(raw_tx.into()).await.unwrap(); diff --git a/crates/ethereum/node/tests/e2e/utils.rs b/crates/ethereum/node/tests/e2e/utils.rs index 5a7950999..6e534f5dc 100644 --- a/crates/ethereum/node/tests/e2e/utils.rs +++ b/crates/ethereum/node/tests/e2e/utils.rs @@ -1,12 +1,7 @@ use alloy_primitives::{Address, B256}; use reth::rpc::types::engine::PayloadAttributes; -use reth_e2e_test_utils::NodeHelperType; -use reth_node_ethereum::{node::EthereumAddOns, EthereumNode}; use reth_payload_builder::EthPayloadBuilderAttributes; -/// Ethereum Node Helper type -pub(crate) type EthNode = NodeHelperType; - /// Helper function to create a new eth payload attributes pub(crate) fn eth_payload_attributes(timestamp: u64) -> EthPayloadBuilderAttributes { let attributes = PayloadAttributes { diff --git a/crates/node/builder/Cargo.toml b/crates/node/builder/Cargo.toml index 1bf2ba233..53e53cd2b 100644 --- a/crates/node/builder/Cargo.toml +++ b/crates/node/builder/Cargo.toml @@ -26,6 +26,7 @@ reth-db = { workspace = true, features = ["mdbx"], optional = true } reth-db-api.workspace = true reth-db-common.workspace = true reth-downloaders.workspace = true +reth-engine-local.workspace = true reth-engine-service.workspace = true reth-engine-tree.workspace = true reth-engine-util.workspace = true diff --git a/crates/node/builder/src/launch/engine.rs b/crates/node/builder/src/launch/engine.rs index 782cc7bbb..f9e26f202 100644 --- a/crates/node/builder/src/launch/engine.rs +++ b/crates/node/builder/src/launch/engine.rs @@ -9,6 +9,7 @@ use reth_beacon_consensus::{ use reth_blockchain_tree::BlockchainTreeConfig; use reth_chainspec::EthChainSpec; use reth_consensus_debug_client::{DebugConsensusClient, EtherscanBlockProvider}; +use reth_engine_local::{LocalEngineService, LocalPayloadAttributesBuilder, MiningMode}; use reth_engine_service::service::{ChainEvent, EngineService}; use reth_engine_tree::{ engine::{EngineApiRequest, EngineRequestHandler}, @@ -18,7 +19,10 @@ use reth_engine_util::EngineMessageStreamExt; use reth_exex::ExExManagerHandle; use reth_network::{NetworkSyncUpdater, SyncState}; use reth_network_api::{BlockDownloaderProvider, NetworkEventListenerProvider}; -use reth_node_api::{BuiltPayload, FullNodeTypes, NodeAddOns, NodeTypesWithEngine}; +use reth_node_api::{ + BuiltPayload, FullNodeTypes, NodeAddOns, NodeTypesWithEngine, PayloadAttributesBuilder, + PayloadTypes, +}; use reth_node_core::{ dirs::{ChainPath, DataDirPath}, exit::NodeExitFuture, @@ -80,6 +84,9 @@ where + FullEthApiServer + AddDevSigners, >, + LocalPayloadAttributesBuilder: PayloadAttributesBuilder< + <::Engine as PayloadTypes>::PayloadAttributes, + >, { type Node = NodeHandle, AO>; @@ -210,23 +217,49 @@ where let pruner_events = pruner.events(); info!(target: "reth::cli", prune_config=?ctx.prune_config().unwrap_or_default(), "Pruner initialized"); - // Configure the consensus engine - let mut eth_service = EngineService::new( - ctx.consensus(), - ctx.components().block_executor().clone(), - ctx.chain_spec(), - network_client.clone(), - Box::pin(consensus_engine_stream), - pipeline, - Box::new(ctx.task_executor().clone()), - ctx.provider_factory().clone(), - ctx.blockchain_db().clone(), - pruner, - ctx.components().payload_builder().clone(), - engine_tree_config, - ctx.invalid_block_hook()?, - ctx.sync_metrics_tx(), - ); + let mut engine_service = if ctx.is_dev() { + let mining_mode = if let Some(block_time) = ctx.node_config().dev.block_time { + MiningMode::interval(block_time) + } else { + MiningMode::instant(ctx.components().pool().clone()) + }; + let eth_service = LocalEngineService::new( + ctx.consensus(), + ctx.components().block_executor().clone(), + ctx.provider_factory().clone(), + ctx.blockchain_db().clone(), + pruner, + ctx.components().payload_builder().clone(), + engine_tree_config, + ctx.invalid_block_hook()?, + ctx.sync_metrics_tx(), + consensus_engine_tx.clone(), + Box::pin(consensus_engine_stream), + mining_mode, + LocalPayloadAttributesBuilder::new(ctx.chain_spec()), + ); + + Either::Left(eth_service) + } else { + let eth_service = EngineService::new( + ctx.consensus(), + ctx.components().block_executor().clone(), + ctx.chain_spec(), + network_client.clone(), + Box::pin(consensus_engine_stream), + pipeline, + Box::new(ctx.task_executor().clone()), + ctx.provider_factory().clone(), + ctx.blockchain_db().clone(), + pruner, + ctx.components().payload_builder().clone(), + engine_tree_config, + ctx.invalid_block_hook()?, + ctx.sync_metrics_tx(), + ); + + Either::Right(eth_service) + }; let event_sender = EventSender::default(); @@ -340,7 +373,9 @@ where ctx.task_executor().spawn_critical("consensus engine", async move { if let Some(initial_target) = initial_target { debug!(target: "reth::cli", %initial_target, "start backfill sync"); - eth_service.orchestrator_mut().start_backfill_sync(initial_target); + if let Either::Right(eth_service) = &mut engine_service { + eth_service.orchestrator_mut().start_backfill_sync(initial_target); + } } let mut res = Ok(()); @@ -351,10 +386,12 @@ where payload = built_payloads.select_next_some() => { if let Some(executed_block) = payload.executed_block() { debug!(target: "reth::cli", block=?executed_block.block().num_hash(), "inserting built payload"); - eth_service.orchestrator_mut().handler_mut().handler_mut().on_event(EngineApiRequest::InsertExecutedBlock(executed_block).into()); + if let Either::Right(eth_service) = &mut engine_service { + eth_service.orchestrator_mut().handler_mut().handler_mut().on_event(EngineApiRequest::InsertExecutedBlock(executed_block).into()); + } } } - event = eth_service.next() => { + event = engine_service.next() => { let Some(event) = event else { break }; debug!(target: "reth::cli", "Event: {event}"); match event { diff --git a/crates/node/events/src/node.rs b/crates/node/events/src/node.rs index c856c0ec9..e10caaee7 100644 --- a/crates/node/events/src/node.rs +++ b/crates/node/events/src/node.rs @@ -504,7 +504,7 @@ where } else if let Some(latest_block) = this.state.latest_block { let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap_or_default().as_secs(); - if now - this.state.latest_block_time.unwrap_or(0) > 60 { + if now.saturating_sub(this.state.latest_block_time.unwrap_or(0)) > 60 { // Once we start receiving consensus nodes, don't emit status unless stalled for // 1 minute info!( diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index 3c298bea9..4675029b6 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -13,6 +13,7 @@ workspace = true [dependencies] # reth reth-chainspec.workspace = true +reth-engine-local.workspace = true reth-primitives.workspace = true reth-payload-builder.workspace = true reth-auto-seal-consensus.workspace = true @@ -87,6 +88,7 @@ optimism = [ "reth-revm/optimism", "reth-auto-seal-consensus/optimism", "reth-optimism-rpc/optimism", + "reth-engine-local/optimism", ] asm-keccak = ["reth-primitives/asm-keccak"] test-utils = ["reth-node-builder/test-utils"] diff --git a/crates/payload/primitives/src/traits.rs b/crates/payload/primitives/src/traits.rs index 6ae6361fd..494ed68aa 100644 --- a/crates/payload/primitives/src/traits.rs +++ b/crates/payload/primitives/src/traits.rs @@ -160,12 +160,7 @@ impl PayloadAttributes for OpPayloadAttributes { } /// A builder that can return the current payload attribute. -pub trait PayloadAttributesBuilder: std::fmt::Debug + Send + Sync + 'static { - /// The payload attributes type returned by the builder. - type PayloadAttributes: PayloadAttributes; - /// The error type returned by [`PayloadAttributesBuilder::build`]. - type Error: core::error::Error + Send + Sync; - +pub trait PayloadAttributesBuilder: Send + Sync + 'static { /// Return a new payload attribute from the builder. - fn build(&self) -> Result; + fn build(&self, timestamp: u64) -> Attributes; } From f684dd4c4cebc9f0d0b6b1402530d45ae952e7f3 Mon Sep 17 00:00:00 2001 From: Delweng Date: Mon, 14 Oct 2024 23:45:26 +0800 Subject: [PATCH 043/425] chore(clippy): enable if_then_some_else_none lint (#11679) Signed-off-by: jsvisa Co-authored-by: Matthias Seitz --- Cargo.toml | 1 + crates/chainspec/src/spec.rs | 7 ++-- crates/consensus/auto-seal/src/lib.rs | 32 +++++++------------ crates/engine/local/src/miner.rs | 10 +++--- crates/engine/local/src/payload.rs | 18 +++++------ crates/ethereum/evm/src/lib.rs | 11 ++----- crates/net/network/src/cache.rs | 2 +- crates/net/p2p/src/test_utils/full_block.rs | 18 +++++------ crates/node/core/src/args/log.rs | 2 +- crates/optimism/evm/src/lib.rs | 11 ++----- .../src/segments/user/receipts_by_logs.rs | 6 +--- .../rpc-eth-api/src/helpers/pending_block.rs | 2 +- crates/storage/db-models/src/accounts.rs | 8 ++--- .../storage/db/src/implementation/mdbx/mod.rs | 7 ++-- .../src/providers/database/provider.rs | 14 ++++---- .../src/providers/static_file/manager.rs | 2 +- .../src/providers/static_file/writer.rs | 20 ++++++------ .../storage/provider/src/test_utils/mock.rs | 15 ++------- crates/transaction-pool/src/maintain.rs | 9 +++--- crates/trie/common/src/subnode.rs | 10 +++--- crates/trie/trie/src/trie_cursor/subnode.rs | 2 +- crates/trie/trie/src/witness.rs | 15 ++++----- 22 files changed, 87 insertions(+), 135 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 628d7d47b..ac0ab2d39 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -186,6 +186,7 @@ explicit_iter_loop = "warn" flat_map_option = "warn" from_iter_instead_of_collect = "warn" if_not_else = "warn" +if_then_some_else_none = "warn" implicit_clone = "warn" imprecise_flops = "warn" iter_on_empty_collections = "warn" diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index f80a20924..e43e0bc78 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -297,11 +297,8 @@ impl ChainSpec { }; // If Prague is activated at genesis we set requests root to an empty trie root. - let requests_root = if self.is_prague_active_at_timestamp(self.genesis.timestamp) { - Some(EMPTY_ROOT_HASH) - } else { - None - }; + let requests_root = + self.is_prague_active_at_timestamp(self.genesis.timestamp).then_some(EMPTY_ROOT_HASH); Header { gas_limit: self.genesis.gas_limit, diff --git a/crates/consensus/auto-seal/src/lib.rs b/crates/consensus/auto-seal/src/lib.rs index f1ef64c8c..261227f10 100644 --- a/crates/consensus/auto-seal/src/lib.rs +++ b/crates/consensus/auto-seal/src/lib.rs @@ -282,17 +282,13 @@ impl StorageInner { parent.next_block_base_fee(chain_spec.base_fee_params_at_timestamp(timestamp)) }); - let blob_gas_used = if chain_spec.is_cancun_active_at_timestamp(timestamp) { - let mut sum_blob_gas_used = 0; - for tx in transactions { - if let Some(blob_tx) = tx.transaction.as_eip4844() { - sum_blob_gas_used += blob_tx.blob_gas(); - } - } - Some(sum_blob_gas_used) - } else { - None - }; + let blob_gas_used = chain_spec.is_cancun_active_at_timestamp(timestamp).then(|| { + transactions + .iter() + .filter_map(|tx| tx.transaction.as_eip4844()) + .map(|blob_tx| blob_tx.blob_gas()) + .sum::() + }); let mut header = Header { parent_hash: self.best_hash, @@ -304,7 +300,7 @@ impl StorageInner { gas_limit: chain_spec.max_gas_limit(), timestamp, base_fee_per_gas, - blob_gas_used: blob_gas_used.map(Into::into), + blob_gas_used, requests_root: requests.map(|r| proofs::calculate_requests_root(&r.0)), ..Default::default() }; @@ -316,14 +312,10 @@ impl StorageInner { header.blob_gas_used = Some(0); let (parent_excess_blob_gas, parent_blob_gas_used) = match parent { - Some(parent_block) - if chain_spec.is_cancun_active_at_timestamp(parent_block.timestamp) => - { - ( - parent_block.excess_blob_gas.unwrap_or_default(), - parent_block.blob_gas_used.unwrap_or_default(), - ) - } + Some(parent) if chain_spec.is_cancun_active_at_timestamp(parent.timestamp) => ( + parent.excess_blob_gas.unwrap_or_default(), + parent.blob_gas_used.unwrap_or_default(), + ), _ => (0, 0), }; header.excess_blob_gas = diff --git a/crates/engine/local/src/miner.rs b/crates/engine/local/src/miner.rs index e12a2a50d..f20d70b14 100644 --- a/crates/engine/local/src/miner.rs +++ b/crates/engine/local/src/miner.rs @@ -212,14 +212,12 @@ where let block = payload.block(); let cancun_fields = - if self.provider.chain_spec().is_cancun_active_at_timestamp(block.timestamp) { - Some(CancunPayloadFields { + self.provider.chain_spec().is_cancun_active_at_timestamp(block.timestamp).then(|| { + CancunPayloadFields { parent_beacon_block_root: block.parent_beacon_block_root.unwrap(), versioned_hashes: block.blob_versioned_hashes().into_iter().copied().collect(), - }) - } else { - None - }; + } + }); let (tx, rx) = oneshot::channel(); self.to_engine.send(BeaconEngineMessage::NewPayload { diff --git a/crates/engine/local/src/payload.rs b/crates/engine/local/src/payload.rs index 15d5ff2cf..5111360d5 100644 --- a/crates/engine/local/src/payload.rs +++ b/crates/engine/local/src/payload.rs @@ -31,16 +31,14 @@ where timestamp, prev_randao: B256::random(), suggested_fee_recipient: Address::random(), - withdrawals: if self.chain_spec.is_shanghai_active_at_timestamp(timestamp) { - Some(Default::default()) - } else { - None - }, - parent_beacon_block_root: if self.chain_spec.is_cancun_active_at_timestamp(timestamp) { - Some(B256::random()) - } else { - None - }, + withdrawals: self + .chain_spec + .is_shanghai_active_at_timestamp(timestamp) + .then(Default::default), + parent_beacon_block_root: self + .chain_spec + .is_cancun_active_at_timestamp(timestamp) + .then(B256::random), } } } diff --git a/crates/ethereum/evm/src/lib.rs b/crates/ethereum/evm/src/lib.rs index a71f26f70..8ea39f93d 100644 --- a/crates/ethereum/evm/src/lib.rs +++ b/crates/ethereum/evm/src/lib.rs @@ -134,17 +134,10 @@ impl ConfigureEvmEnv for EthEvmConfig { let spec_id = revm_spec_by_timestamp_after_merge(&self.chain_spec, attributes.timestamp); // if the parent block did not have excess blob gas (i.e. it was pre-cancun), but it is - // cancun now, we need to set the excess blob gas to the default value + // cancun now, we need to set the excess blob gas to the default value(0) let blob_excess_gas_and_price = parent .next_block_excess_blob_gas() - .or_else(|| { - if spec_id == SpecId::CANCUN { - // default excess blob gas is zero - Some(0) - } else { - None - } - }) + .or_else(|| (spec_id == SpecId::CANCUN).then_some(0)) .map(BlobExcessGasAndPrice::new); let mut basefee = parent.next_block_base_fee( diff --git a/crates/net/network/src/cache.rs b/crates/net/network/src/cache.rs index fb2daca66..758b49167 100644 --- a/crates/net/network/src/cache.rs +++ b/crates/net/network/src/cache.rs @@ -42,7 +42,7 @@ impl LruCache { pub fn insert_and_get_evicted(&mut self, entry: T) -> (bool, Option) { let new = self.inner.peek(&entry).is_none(); let evicted = - if new && (self.limit as usize) <= self.inner.len() { self.remove_lru() } else { None }; + (new && (self.limit as usize) <= self.inner.len()).then(|| self.remove_lru()).flatten(); _ = self.inner.get_or_insert(entry, || ()); (new, evicted) diff --git a/crates/net/p2p/src/test_utils/full_block.rs b/crates/net/p2p/src/test_utils/full_block.rs index acc01a60e..8a13f6932 100644 --- a/crates/net/p2p/src/test_utils/full_block.rs +++ b/crates/net/p2p/src/test_utils/full_block.rs @@ -185,15 +185,15 @@ impl HeadersClient for TestFullBlockClient { .filter_map(|_| { headers.iter().find_map(|(hash, header)| { // Checks if the header matches the specified block or number. - if BlockNumHash::new(header.number, *hash).matches_block_or_num(&block) { - match request.direction { - HeadersDirection::Falling => block = header.parent_hash.into(), - HeadersDirection::Rising => block = (header.number + 1).into(), - } - Some(header.clone()) - } else { - None - } + BlockNumHash::new(header.number, *hash).matches_block_or_num(&block).then( + || { + match request.direction { + HeadersDirection::Falling => block = header.parent_hash.into(), + HeadersDirection::Rising => block = (header.number + 1).into(), + } + header.clone() + }, + ) }) }) .collect::>(); diff --git a/crates/node/core/src/args/log.rs b/crates/node/core/src/args/log.rs index aa2e0cf5f..3d124fba2 100644 --- a/crates/node/core/src/args/log.rs +++ b/crates/node/core/src/args/log.rs @@ -78,7 +78,7 @@ impl LogArgs { format, self.verbosity.directive().to_string(), filter, - if use_color { Some(self.color.to_string()) } else { None }, + use_color.then(|| self.color.to_string()), ) } diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index b220b6056..158ed2e89 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -139,17 +139,10 @@ impl ConfigureEvmEnv for OptimismEvmConfig { let spec_id = revm_spec_by_timestamp_after_bedrock(&self.chain_spec, attributes.timestamp); // if the parent block did not have excess blob gas (i.e. it was pre-cancun), but it is - // cancun now, we need to set the excess blob gas to the default value + // cancun now, we need to set the excess blob gas to the default value(0) let blob_excess_gas_and_price = parent .next_block_excess_blob_gas() - .or_else(|| { - if spec_id.is_enabled_in(SpecId::CANCUN) { - // default excess blob gas is zero - Some(0) - } else { - None - } - }) + .or_else(|| (spec_id.is_enabled_in(SpecId::CANCUN)).then_some(0)) .map(BlobExcessGasAndPrice::new); let block_env = BlockEnv { diff --git a/crates/prune/prune/src/segments/user/receipts_by_logs.rs b/crates/prune/prune/src/segments/user/receipts_by_logs.rs index 489df7e72..05bc40b6c 100644 --- a/crates/prune/prune/src/segments/user/receipts_by_logs.rs +++ b/crates/prune/prune/src/segments/user/receipts_by_logs.rs @@ -267,11 +267,7 @@ mod tests { let mut receipt = random_receipt(&mut rng, transaction, Some(1)); receipt.logs.push(random_log( &mut rng, - if txi == (block.body.transactions.len() - 1) { - Some(deposit_contract_addr) - } else { - None - }, + (txi == (block.body.transactions.len() - 1)).then_some(deposit_contract_addr), Some(1), )); receipts.push((receipts.len() as u64, receipt)); diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index 03597289f..85ad7fd18 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -415,7 +415,7 @@ pub trait LoadPendingBlock: EthApiTypes { // check if cancun is activated to set eip4844 header fields correctly let blob_gas_used = - if cfg.handler_cfg.spec_id >= SpecId::CANCUN { Some(sum_blob_gas_used) } else { None }; + (cfg.handler_cfg.spec_id >= SpecId::CANCUN).then_some(sum_blob_gas_used); // note(onbjerg): the rpc spec has not been changed to include requests, so for now we just // set these to empty diff --git a/crates/storage/db-models/src/accounts.rs b/crates/storage/db-models/src/accounts.rs index e1f477396..b0099d22d 100644 --- a/crates/storage/db-models/src/accounts.rs +++ b/crates/storage/db-models/src/accounts.rs @@ -39,13 +39,11 @@ impl Compact for AccountBeforeTx { let address = Address::from_slice(&buf[..20]); buf.advance(20); - let info = if len - 20 > 0 { + let info = (len - 20 > 0).then(|| { let (acc, advanced_buf) = Account::from_compact(buf, len - 20); buf = advanced_buf; - Some(acc) - } else { - None - }; + acc + }); (Self { address, info }, buf) } diff --git a/crates/storage/db/src/implementation/mdbx/mod.rs b/crates/storage/db/src/implementation/mdbx/mod.rs index 1deb86ba6..65b804e6a 100644 --- a/crates/storage/db/src/implementation/mdbx/mod.rs +++ b/crates/storage/db/src/implementation/mdbx/mod.rs @@ -256,10 +256,9 @@ impl DatabaseEnv { args: DatabaseArguments, ) -> Result { let _lock_file = if kind.is_rw() { - Some( - StorageLock::try_acquire(path) - .map_err(|err| DatabaseError::Other(err.to_string()))?, - ) + StorageLock::try_acquire(path) + .map_err(|err| DatabaseError::Other(err.to_string()))? + .into() } else { None }; diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 33fed1e80..3b074a5af 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -542,18 +542,18 @@ impl DatabaseProvider { // even if empty let withdrawals = if self.chain_spec.is_shanghai_active_at_timestamp(header_ref.timestamp) { - Some( - withdrawals_cursor - .seek_exact(header_ref.number)? - .map(|(_, w)| w.withdrawals) - .unwrap_or_default(), - ) + withdrawals_cursor + .seek_exact(header_ref.number)? + .map(|(_, w)| w.withdrawals) + .unwrap_or_default() + .into() } else { None }; let requests = if self.chain_spec.is_prague_active_at_timestamp(header_ref.timestamp) { - Some(requests_cursor.seek_exact(header_ref.number)?.unwrap_or_default().1) + (requests_cursor.seek_exact(header_ref.number)?.unwrap_or_default().1) + .into() } else { None }; diff --git a/crates/storage/provider/src/providers/static_file/manager.rs b/crates/storage/provider/src/providers/static_file/manager.rs index ec76e9504..e233332a0 100644 --- a/crates/storage/provider/src/providers/static_file/manager.rs +++ b/crates/storage/provider/src/providers/static_file/manager.rs @@ -222,7 +222,7 @@ impl StaticFileProviderInner { /// Creates a new [`StaticFileProviderInner`]. fn new(path: impl AsRef, access: StaticFileAccess) -> ProviderResult { let _lock_file = if access.is_read_write() { - Some(StorageLock::try_acquire(path.as_ref())?) + StorageLock::try_acquire(path.as_ref())?.into() } else { None }; diff --git a/crates/storage/provider/src/providers/static_file/writer.rs b/crates/storage/provider/src/providers/static_file/writer.rs index 3858f1b14..8c31c021f 100644 --- a/crates/storage/provider/src/providers/static_file/writer.rs +++ b/crates/storage/provider/src/providers/static_file/writer.rs @@ -289,16 +289,16 @@ impl StaticFileProviderRW { // // If that expected block start is 0, then it means that there's no actual block data, and // there's no block data in static files. - let segment_max_block = match self.writer.user_header().block_range() { - Some(block_range) => Some(block_range.end()), - None => { - if self.writer.user_header().expected_block_start() > 0 { - Some(self.writer.user_header().expected_block_start() - 1) - } else { - None - } - } - }; + let segment_max_block = self + .writer + .user_header() + .block_range() + .as_ref() + .map(|block_range| block_range.end()) + .or_else(|| { + (self.writer.user_header().expected_block_start() > 0) + .then(|| self.writer.user_header().expected_block_start() - 1) + }); self.reader().update_index(self.writer.user_header().segment(), segment_max_block) } diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index 3325d3ae9..c7c94b939 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -344,13 +344,8 @@ impl TransactionsProvider for MockEthProvider { .values() .flat_map(|block| &block.body.transactions) .enumerate() - .filter_map(|(tx_number, tx)| { - if range.contains(&(tx_number as TxNumber)) { - Some(tx.clone().into()) - } else { - None - } - }) + .filter(|&(tx_number, _)| range.contains(&(tx_number as TxNumber))) + .map(|(_, tx)| tx.clone().into()) .collect(); Ok(transactions) @@ -366,11 +361,7 @@ impl TransactionsProvider for MockEthProvider { .flat_map(|block| &block.body.transactions) .enumerate() .filter_map(|(tx_number, tx)| { - if range.contains(&(tx_number as TxNumber)) { - Some(tx.recover_signer()?) - } else { - None - } + range.contains(&(tx_number as TxNumber)).then(|| tx.recover_signer()).flatten() }) .collect(); diff --git a/crates/transaction-pool/src/maintain.rs b/crates/transaction-pool/src/maintain.rs index aaf2d6d12..23a8d0dc6 100644 --- a/crates/transaction-pool/src/maintain.rs +++ b/crates/transaction-pool/src/maintain.rs @@ -455,11 +455,10 @@ impl FinalizedBlockTracker { /// Updates the tracked finalized block and returns the new finalized block if it changed fn update(&mut self, finalized_block: Option) -> Option { let finalized = finalized_block?; - if self.last_finalized_block.replace(finalized).map_or(true, |last| last < finalized) { - Some(finalized) - } else { - None - } + self.last_finalized_block + .replace(finalized) + .map_or(true, |last| last < finalized) + .then_some(finalized) } } diff --git a/crates/trie/common/src/subnode.rs b/crates/trie/common/src/subnode.rs index 98ce76a32..c64b2317c 100644 --- a/crates/trie/common/src/subnode.rs +++ b/crates/trie/common/src/subnode.rs @@ -51,16 +51,14 @@ impl Compact for StoredSubNode { buf.advance(key_len); let nibbles_exists = buf.get_u8() != 0; - let nibble = if nibbles_exists { Some(buf.get_u8()) } else { None }; + let nibble = nibbles_exists.then(|| buf.get_u8()); let node_exists = buf.get_u8() != 0; - let node = if node_exists { + let node = node_exists.then(|| { let (node, rest) = BranchNodeCompact::from_compact(buf, 0); buf = rest; - Some(node) - } else { - None - }; + node + }); (Self { key, nibble, node }, buf) } diff --git a/crates/trie/trie/src/trie_cursor/subnode.rs b/crates/trie/trie/src/trie_cursor/subnode.rs index c2ba839eb..9d5a2770b 100644 --- a/crates/trie/trie/src/trie_cursor/subnode.rs +++ b/crates/trie/trie/src/trie_cursor/subnode.rs @@ -49,7 +49,7 @@ impl From for CursorSubNode { impl From for StoredSubNode { fn from(value: CursorSubNode) -> Self { - let nibble = if value.nibble >= 0 { Some(value.nibble as u8) } else { None }; + let nibble = (value.nibble >= 0).then_some(value.nibble as u8); Self { key: value.key.to_vec(), nibble, node: value.node } } } diff --git a/crates/trie/trie/src/witness.rs b/crates/trie/trie/src/witness.rs index c042a0d82..582319c7f 100644 --- a/crates/trie/trie/src/witness.rs +++ b/crates/trie/trie/src/witness.rs @@ -111,14 +111,13 @@ where .accounts .get(&hashed_address) .ok_or(TrieWitnessError::MissingAccount(hashed_address))?; - let value = if account.is_some() || storage_multiproof.root != EMPTY_ROOT_HASH { - account_rlp.clear(); - TrieAccount::from((account.unwrap_or_default(), storage_multiproof.root)) - .encode(&mut account_rlp as &mut dyn BufMut); - Some(account_rlp.clone()) - } else { - None - }; + let value = + (account.is_some() || storage_multiproof.root != EMPTY_ROOT_HASH).then(|| { + account_rlp.clear(); + TrieAccount::from((account.unwrap_or_default(), storage_multiproof.root)) + .encode(&mut account_rlp as &mut dyn BufMut); + account_rlp.clone() + }); let key = Nibbles::unpack(hashed_address); account_trie_nodes.extend( self.target_nodes( From 600a394571e8397295256b25ed50c8c9ba6f4c8d Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Mon, 14 Oct 2024 17:52:34 +0200 Subject: [PATCH 044/425] feat: update SystemCaller (#11718) --- .../engine/invalid-block-hooks/src/witness.rs | 3 ++- crates/engine/util/src/reorg.rs | 2 +- crates/ethereum/evm/src/execute.rs | 12 +++++---- crates/ethereum/payload/src/lib.rs | 2 +- crates/evm/src/either.rs | 2 +- crates/evm/src/execute.rs | 2 +- crates/evm/src/system_calls/mod.rs | 26 +++++++++---------- crates/optimism/evm/src/execute.rs | 12 +++++---- crates/optimism/payload/src/builder.rs | 2 +- .../rpc-eth-api/src/helpers/pending_block.rs | 3 +-- crates/rpc/rpc-eth-api/src/helpers/trace.rs | 4 +-- 11 files changed, 36 insertions(+), 34 deletions(-) diff --git a/crates/engine/invalid-block-hooks/src/witness.rs b/crates/engine/invalid-block-hooks/src/witness.rs index 51978311f..bb227e304 100644 --- a/crates/engine/invalid-block-hooks/src/witness.rs +++ b/crates/engine/invalid-block-hooks/src/witness.rs @@ -84,7 +84,8 @@ where EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, Default::default()), ); - let mut system_caller = SystemCaller::new(&self.evm_config, self.provider.chain_spec()); + let mut system_caller = + SystemCaller::new(self.evm_config.clone(), self.provider.chain_spec()); // Apply pre-block system contract calls. system_caller.apply_pre_execution_changes(&block.clone().unseal(), &mut evm)?; diff --git a/crates/engine/util/src/reorg.rs b/crates/engine/util/src/reorg.rs index bd7b1b956..611095101 100644 --- a/crates/engine/util/src/reorg.rs +++ b/crates/engine/util/src/reorg.rs @@ -286,7 +286,7 @@ where let mut evm = evm_config.evm_with_env(&mut state, env); // apply eip-4788 pre block contract call - let mut system_caller = SystemCaller::new(evm_config, chain_spec); + let mut system_caller = SystemCaller::new(evm_config.clone(), chain_spec); system_caller.apply_beacon_root_contract_call( reorg_target.timestamp, diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index 6513cf75f..108e1f87c 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -141,10 +141,12 @@ where where DB: Database, DB::Error: Into + Display, - F: OnStateHook, + F: OnStateHook + 'static, { - let mut system_caller = - SystemCaller::new(&self.evm_config, &self.chain_spec).with_state_hook(state_hook); + let mut system_caller = SystemCaller::new(self.evm_config.clone(), &self.chain_spec); + if let Some(hook) = state_hook { + system_caller.with_state_hook(Some(Box::new(hook) as Box)); + } system_caller.apply_pre_execution_changes(block, &mut evm)?; @@ -290,7 +292,7 @@ where state_hook: Option, ) -> Result where - F: OnStateHook, + F: OnStateHook + 'static, { // 1. prepare state on new block self.on_new_block(&block.header); @@ -396,7 +398,7 @@ where state_hook: F, ) -> Result where - F: OnStateHook, + F: OnStateHook + 'static, { let BlockExecutionInput { block, total_difficulty } = input; let EthExecuteOutput { receipts, requests, gas_used } = self diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 97237efa8..248aa3486 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -160,7 +160,7 @@ where let block_number = initialized_block_env.number.to::(); - let mut system_caller = SystemCaller::new(&evm_config, chain_spec.clone()); + let mut system_caller = SystemCaller::new(evm_config.clone(), chain_spec.clone()); // apply eip-4788 pre block contract call system_caller diff --git a/crates/evm/src/either.rs b/crates/evm/src/either.rs index 8022c68c4..82f84301f 100644 --- a/crates/evm/src/either.rs +++ b/crates/evm/src/either.rs @@ -97,7 +97,7 @@ where state_hook: F, ) -> Result where - F: OnStateHook, + F: OnStateHook + 'static, { match self { Self::Left(a) => a.execute_with_state_hook(input, state_hook), diff --git a/crates/evm/src/execute.rs b/crates/evm/src/execute.rs index 145eca29c..476c695e7 100644 --- a/crates/evm/src/execute.rs +++ b/crates/evm/src/execute.rs @@ -56,7 +56,7 @@ pub trait Executor { state_hook: F, ) -> Result where - F: OnStateHook; + F: OnStateHook + 'static; } /// A general purpose executor that can execute multiple inputs in sequence, validate the outputs, diff --git a/crates/evm/src/system_calls/mod.rs b/crates/evm/src/system_calls/mod.rs index 43baa1c76..5dc3f35bd 100644 --- a/crates/evm/src/system_calls/mod.rs +++ b/crates/evm/src/system_calls/mod.rs @@ -1,7 +1,7 @@ //! System contract call functions. use crate::ConfigureEvm; -use alloc::vec::Vec; +use alloc::{boxed::Box, vec::Vec}; use core::fmt::Display; use reth_chainspec::EthereumHardforks; use reth_execution_errors::BlockExecutionError; @@ -42,27 +42,26 @@ impl OnStateHook for NoopHook { /// /// This can be used to chain system transaction calls. #[allow(missing_debug_implementations)] -pub struct SystemCaller<'a, EvmConfig, Chainspec, Hook = NoopHook> { - evm_config: &'a EvmConfig, +pub struct SystemCaller { + evm_config: EvmConfig, chain_spec: Chainspec, /// Optional hook to be called after each state change. - hook: Option, + hook: Option>, } -impl<'a, EvmConfig, Chainspec> SystemCaller<'a, EvmConfig, Chainspec, NoopHook> { +impl SystemCaller { /// Create a new system caller with the given EVM config, database, and chain spec, and creates /// the EVM with the given initialized config and block environment. - pub const fn new(evm_config: &'a EvmConfig, chain_spec: Chainspec) -> Self { + pub const fn new(evm_config: EvmConfig, chain_spec: Chainspec) -> Self { Self { evm_config, chain_spec, hook: None } } + /// Installs a custom hook to be called after each state change. - pub fn with_state_hook( - self, - hook: Option, - ) -> SystemCaller<'a, EvmConfig, Chainspec, H> { - let Self { evm_config, chain_spec, .. } = self; - SystemCaller { evm_config, chain_spec, hook } + pub fn with_state_hook(&mut self, hook: Option>) -> &mut Self { + self.hook = hook; + self } + /// Convenience method to consume the type and drop borrowed fields pub fn finish(self) {} } @@ -85,11 +84,10 @@ where .build() } -impl SystemCaller<'_, EvmConfig, Chainspec, Hook> +impl SystemCaller where EvmConfig: ConfigureEvm
, Chainspec: EthereumHardforks, - Hook: OnStateHook, { /// Apply pre execution changes. pub fn apply_pre_execution_changes( diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index f4abb8c88..ee0d028e4 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -121,10 +121,12 @@ where ) -> Result<(Vec, u64), BlockExecutionError> where DB: Database + Display>, - F: OnStateHook, + F: OnStateHook + 'static, { - let mut system_caller = - SystemCaller::new(&self.evm_config, &self.chain_spec).with_state_hook(state_hook); + let mut system_caller = SystemCaller::new(self.evm_config.clone(), &self.chain_spec); + if let Some(hook) = state_hook { + system_caller.with_state_hook(Some(Box::new(hook) as Box)); + } // apply pre execution changes system_caller.apply_beacon_root_contract_call( @@ -306,7 +308,7 @@ where state_hook: Option, ) -> Result<(Vec, u64), BlockExecutionError> where - F: OnStateHook, + F: OnStateHook + 'static, { // 1. prepare state on new block self.on_new_block(&block.header); @@ -410,7 +412,7 @@ where state_hook: F, ) -> Result where - F: OnStateHook, + F: OnStateHook + 'static, { let BlockExecutionInput { block, total_difficulty } = input; let (receipts, gas_used) = self.execute_without_verification_with_state_hook( diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 2ff3d3342..0a8dcdb12 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -201,7 +201,7 @@ where ); // apply eip-4788 pre block contract call - let mut system_caller = SystemCaller::new(&evm_config, &chain_spec); + let mut system_caller = SystemCaller::new(evm_config.clone(), &chain_spec); system_caller .pre_block_beacon_root_contract_call( diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index 85ad7fd18..26e0ffc74 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -260,8 +260,7 @@ pub trait LoadPendingBlock: EthApiTypes { let chain_spec = self.provider().chain_spec(); - let evm_config = self.evm_config().clone(); - let mut system_caller = SystemCaller::new(&evm_config, chain_spec.clone()); + let mut system_caller = SystemCaller::new(self.evm_config().clone(), chain_spec.clone()); let parent_beacon_block_root = if origin.is_actual_pending() { // apply eip-4788 pre block contract call if we got the block from the CL with the real diff --git a/crates/rpc/rpc-eth-api/src/helpers/trace.rs b/crates/rpc/rpc-eth-api/src/helpers/trace.rs index 2dc1c5b47..457cbb481 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/trace.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/trace.rs @@ -199,7 +199,7 @@ pub trait Trace: LoadState { // apply relevant system calls let mut system_caller = SystemCaller::new( - Trace::evm_config(&this), + Trace::evm_config(&this).clone(), LoadState::provider(&this).chain_spec(), ); system_caller @@ -332,7 +332,7 @@ pub trait Trace: LoadState { // apply relevant system calls let mut system_caller = SystemCaller::new( - Trace::evm_config(&this), + Trace::evm_config(&this).clone(), LoadState::provider(&this).chain_spec(), ); system_caller From 9f9de0fab96712761944806120b1764cf1bee0eb Mon Sep 17 00:00:00 2001 From: Delweng Date: Tue, 15 Oct 2024 01:24:20 +0800 Subject: [PATCH 045/425] fix(rpc/trace): return empty if after >= traces (#11715) Signed-off-by: jsvisa Co-authored-by: Matthias Seitz --- crates/rpc/rpc/src/trace.rs | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/crates/rpc/rpc/src/trace.rs b/crates/rpc/rpc/src/trace.rs index 05588d4da..0cd94ef15 100644 --- a/crates/rpc/rpc/src/trace.rs +++ b/crates/rpc/rpc/src/trace.rs @@ -322,13 +322,18 @@ where } } - // apply after and count to traces if specified, this allows for a pagination style. - // only consider traces after - if let Some(after) = after.map(|a| a as usize).filter(|a| *a < all_traces.len()) { - all_traces = all_traces.split_off(after); + // Skips the first `after` number of matching traces. + // If `after` is greater than or equal to the number of matched traces, it returns an empty + // array. + if let Some(after) = after.map(|a| a as usize) { + if after < all_traces.len() { + all_traces.drain(..after); + } else { + return Ok(vec![]) + } } - // at most, return count of traces + // Return at most `count` of traces if let Some(count) = count { let count = count as usize; if count < all_traces.len() { From 0dbc37463964e6985910a58008113e4c90eb506d Mon Sep 17 00:00:00 2001 From: Dan Cline <6798349+Rjected@users.noreply.github.com> Date: Mon, 14 Oct 2024 18:38:15 -0400 Subject: [PATCH 046/425] feat(ci): add workflow for git sha container builds (#11721) --- .github/workflows/docker-git.yml | 44 ++++++++++++++++++++++++++++++++ .github/workflows/docker.yml | 1 - Makefile | 17 ++++++++++++ 3 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/docker-git.yml diff --git a/.github/workflows/docker-git.yml b/.github/workflows/docker-git.yml new file mode 100644 index 000000000..2e3ad59ae --- /dev/null +++ b/.github/workflows/docker-git.yml @@ -0,0 +1,44 @@ +# Publishes the Docker image, only to be used with `workflow_dispatch`. The +# images from this workflow will be tagged with the git sha of the branch used +# and will NOT tag it as `latest`. + +name: docker-git + +on: + workflow_dispatch: {} + +env: + REPO_NAME: ${{ github.repository_owner }}/reth + IMAGE_NAME: ${{ github.repository_owner }}/reth + OP_IMAGE_NAME: ${{ github.repository_owner }}/op-reth + CARGO_TERM_COLOR: always + DOCKER_IMAGE_NAME: ghcr.io/${{ github.repository_owner }}/reth + OP_DOCKER_IMAGE_NAME: ghcr.io/${{ github.repository_owner }}/op-reth + DOCKER_USERNAME: ${{ github.actor }} + GIT_SHA: ${{ github.sha }} + +jobs: + build: + name: build and push + runs-on: ubuntu-20.04 + permissions: + packages: write + contents: read + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + with: + cache-on-failure: true + - uses: taiki-e/install-action@cross + - name: Log in to Docker + run: | + echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io --username ${DOCKER_USERNAME} --password-stdin + - name: Set up Docker builder + run: | + docker run --privileged --rm tonistiigi/binfmt --install arm64,amd64 + docker buildx create --use --name cross-builder + - name: Build and push the git-sha-tagged reth image + run: make PROFILE=maxperf GIT_SHA=$GIT_SHA docker-build-push-git-sha + - name: Build and push the git-sha-tagged op-reth image + run: make IMAGE_NAME=$OP_IMAGE_NAME DOCKER_IMAGE_NAME=$OP_DOCKER_IMAGE_NAME GIT_SHA=$GIT_SHA PROFILE=maxperf op-docker-build-push-git-sha diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 2af324a39..7b6f0d51e 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -3,7 +3,6 @@ name: docker on: - workflow_dispatch: {} push: tags: - v* diff --git a/Makefile b/Makefile index 4d897c7ee..908f1ef24 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,7 @@ # Heavily inspired by Lighthouse: https://github.com/sigp/lighthouse/blob/693886b94176faa4cb450f024696cb69cda2fe58/Makefile .DEFAULT_GOAL := help +GIT_SHA ?= $(shell git rev-parse HEAD) GIT_TAG ?= $(shell git describe --tags --abbrev=0) BIN_DIR = "dist/bin" @@ -199,6 +200,14 @@ ef-tests: $(EF_TESTS_DIR) ## Runs Ethereum Foundation tests. docker-build-push: ## Build and push a cross-arch Docker image tagged with the latest git tag. $(call docker_build_push,$(GIT_TAG),$(GIT_TAG)) +# Note: This requires a buildx builder with emulation support. For example: +# +# `docker run --privileged --rm tonistiigi/binfmt --install amd64,arm64` +# `docker buildx create --use --driver docker-container --name cross-builder` +.PHONY: docker-build-push-git-sha +docker-build-push-git-sha: ## Build and push a cross-arch Docker image tagged with the latest git sha. + $(call docker_build_push,$(GIT_SHA),$(GIT_SHA)) + # Note: This requires a buildx builder with emulation support. For example: # # `docker run --privileged --rm tonistiigi/binfmt --install amd64,arm64` @@ -243,6 +252,14 @@ endef op-docker-build-push: ## Build and push a cross-arch Docker image tagged with the latest git tag. $(call op_docker_build_push,$(GIT_TAG),$(GIT_TAG)) +# Note: This requires a buildx builder with emulation support. For example: +# +# `docker run --privileged --rm tonistiigi/binfmt --install amd64,arm64` +# `docker buildx create --use --driver docker-container --name cross-builder` +.PHONY: op-docker-build-push-git-sha +op-docker-build-push-git-sha: ## Build and push a cross-arch Docker image tagged with the latest git sha. + $(call op_docker_build_push,$(GIT_SHA),$(GIT_SHA)) + # Note: This requires a buildx builder with emulation support. For example: # # `docker run --privileged --rm tonistiigi/binfmt --install amd64,arm64` From cf38ff540137692a2e1beb01500551638815c3d7 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 15 Oct 2024 10:02:00 +0200 Subject: [PATCH 047/425] primitive: introduce reth `Transaction` trait (#11728) --- crates/primitives-traits/src/lib.rs | 3 +++ crates/primitives-traits/src/transaction.rs | 26 +++++++++++++++++++++ 2 files changed, 29 insertions(+) create mode 100644 crates/primitives-traits/src/transaction.rs diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index 9c244226b..dd68607f5 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -23,6 +23,9 @@ pub use account::{Account, Bytecode}; pub mod receipt; pub use receipt::Receipt; +pub mod transaction; +pub use transaction::Transaction; + mod integer_list; pub use integer_list::{IntegerList, IntegerListError}; diff --git a/crates/primitives-traits/src/transaction.rs b/crates/primitives-traits/src/transaction.rs new file mode 100644 index 000000000..93645ead8 --- /dev/null +++ b/crates/primitives-traits/src/transaction.rs @@ -0,0 +1,26 @@ +//! Transaction abstraction + +use alloc::fmt; + +use reth_codecs::Compact; +use serde::{Deserialize, Serialize}; + +/// Helper trait that unifies all behaviour required by transaction to support full node operations. +pub trait FullTransaction: Transaction + Compact {} + +impl FullTransaction for T where T: Transaction + Compact {} + +/// Abstraction of a transaction. +pub trait Transaction: + alloy_consensus::Transaction + + Clone + + fmt::Debug + + PartialEq + + Eq + + Default + + alloy_rlp::Encodable + + alloy_rlp::Decodable + + Serialize + + for<'de> Deserialize<'de> +{ +} From e0a26ac9a2c19d4591f830958a7c9e1f336448cc Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 15 Oct 2024 10:08:45 +0200 Subject: [PATCH 048/425] feat: add node builder helpers (#11731) --- crates/node/builder/src/builder/mod.rs | 42 ++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/crates/node/builder/src/builder/mod.rs b/crates/node/builder/src/builder/mod.rs index 9f7254bad..fb94413fe 100644 --- a/crates/node/builder/src/builder/mod.rs +++ b/crates/node/builder/src/builder/mod.rs @@ -170,6 +170,18 @@ impl NodeBuilder<(), ChainSpec> { { f(self) } + + /// Apply a function to the builder, if the condition is `true`. + pub fn apply_if(self, cond: bool, f: F) -> Self + where + F: FnOnce(Self) -> Self, + { + if cond { + f(self) + } else { + self + } + } } impl NodeBuilder { @@ -421,6 +433,18 @@ where f(self) } + /// Apply a function to the builder, if the condition is `true`. + pub fn apply_if(self, cond: bool, f: F) -> Self + where + F: FnOnce(Self) -> Self, + { + if cond { + f(self) + } else { + self + } + } + /// Sets the hook that is run once the node's components are initialized. pub fn on_component_initialized(self, hook: F) -> Self where @@ -482,6 +506,24 @@ where } } + /// Installs an `ExEx` (Execution Extension) in the node if the condition is true. + /// + /// # Note + /// + /// The `ExEx` ID must be unique. + pub fn install_exex_if(self, cond: bool, exex_id: impl Into, exex: F) -> Self + where + F: FnOnce(ExExContext>) -> R + Send + 'static, + R: Future> + Send, + E: Future> + Send, + { + if cond { + self.install_exex(exex_id, exex) + } else { + self + } + } + /// Launches the node with the given launcher. pub async fn launch_with(self, launcher: L) -> eyre::Result where From 2a862456492e49f9d31efc65547905de70b99484 Mon Sep 17 00:00:00 2001 From: caglarkaya Date: Tue, 15 Oct 2024 11:19:23 +0300 Subject: [PATCH 049/425] feat: extend txpool remove txs utility (#11702) --- crates/transaction-pool/src/lib.rs | 14 ++ crates/transaction-pool/src/noop.rs | 14 ++ crates/transaction-pool/src/pool/mod.rs | 34 +++- crates/transaction-pool/src/pool/txpool.rs | 174 +++++++++++++++++++++ crates/transaction-pool/src/traits.rs | 18 ++- 5 files changed, 252 insertions(+), 2 deletions(-) diff --git a/crates/transaction-pool/src/lib.rs b/crates/transaction-pool/src/lib.rs index a5acd6edb..75465afac 100644 --- a/crates/transaction-pool/src/lib.rs +++ b/crates/transaction-pool/src/lib.rs @@ -463,6 +463,20 @@ where self.pool.remove_transactions(hashes) } + fn remove_transactions_and_descendants( + &self, + hashes: Vec, + ) -> Vec>> { + self.pool.remove_transactions_and_descendants(hashes) + } + + fn remove_transactions_by_sender( + &self, + sender: Address, + ) -> Vec>> { + self.pool.remove_transactions_by_sender(sender) + } + fn retain_unknown(&self, announcement: &mut A) where A: HandleMempoolData, diff --git a/crates/transaction-pool/src/noop.rs b/crates/transaction-pool/src/noop.rs index ddab4f622..681aa9896 100644 --- a/crates/transaction-pool/src/noop.rs +++ b/crates/transaction-pool/src/noop.rs @@ -183,6 +183,20 @@ impl TransactionPool for NoopTransactionPool { vec![] } + fn remove_transactions_and_descendants( + &self, + _hashes: Vec, + ) -> Vec>> { + vec![] + } + + fn remove_transactions_by_sender( + &self, + _sender: Address, + ) -> Vec>> { + vec![] + } + fn retain_unknown(&self, _announcement: &mut A) where A: HandleMempoolData, diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index 090b92fb6..44dd5a8d5 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -195,7 +195,7 @@ where pub(crate) fn block_info(&self) -> BlockInfo { self.get_pool_data().block_info() } - /// Returns the currently tracked block + /// Sets the currently tracked block pub(crate) fn set_block_info(&self, info: BlockInfo) { self.pool.write().set_block_info(info) } @@ -715,6 +715,38 @@ where removed } + /// Removes and returns all matching transactions and their dependent transactions from the + /// pool. + pub(crate) fn remove_transactions_and_descendants( + &self, + hashes: Vec, + ) -> Vec>> { + if hashes.is_empty() { + return Vec::new() + } + let removed = self.pool.write().remove_transactions_and_descendants(hashes); + + let mut listener = self.event_listener.write(); + + removed.iter().for_each(|tx| listener.discarded(tx.hash())); + + removed + } + + pub(crate) fn remove_transactions_by_sender( + &self, + sender: Address, + ) -> Vec>> { + let sender_id = self.get_sender_id(sender); + let removed = self.pool.write().remove_transactions_by_sender(sender_id); + + let mut listener = self.event_listener.write(); + + removed.iter().for_each(|tx| listener.discarded(tx.hash())); + + removed + } + /// Removes and returns all transactions that are present in the pool. pub(crate) fn retain_unknown(&self, announcement: &mut A) where diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index 10605565c..7aaa77df4 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -663,6 +663,38 @@ impl TxPool { txs } + /// Removes and returns all matching transactions and their descendants from the pool. + pub(crate) fn remove_transactions_and_descendants( + &mut self, + hashes: Vec, + ) -> Vec>> { + let mut removed = Vec::new(); + for hash in hashes { + if let Some(tx) = self.remove_transaction_by_hash(&hash) { + removed.push(tx.clone()); + self.remove_descendants(tx.id(), &mut removed); + } + } + self.update_size_metrics(); + removed + } + + /// Removes all transactions from the given sender. + pub(crate) fn remove_transactions_by_sender( + &mut self, + sender_id: SenderId, + ) -> Vec>> { + let mut removed = Vec::new(); + let txs = self.get_transactions_by_sender(sender_id); + for tx in txs { + if let Some(tx) = self.remove_transaction(tx.id()) { + removed.push(tx); + } + } + self.update_size_metrics(); + removed + } + /// Remove the transaction from the __entire__ pool. /// /// This includes the total set of transaction and the subpool it currently resides in. @@ -2963,6 +2995,148 @@ mod tests { assert_eq!(vec![v1.nonce()], pool_txs); } #[test] + fn test_remove_transactions() { + let on_chain_balance = U256::from(10_000); + let on_chain_nonce = 0; + let mut f = MockTransactionFactory::default(); + let mut pool = TxPool::new(MockOrdering::default(), Default::default()); + + let tx_0 = MockTransaction::eip1559().set_gas_price(100).inc_limit(); + let tx_1 = tx_0.next(); + let tx_2 = MockTransaction::eip1559().set_gas_price(100).inc_limit(); + let tx_3 = tx_2.next(); + + // Create 4 transactions + let v0 = f.validated(tx_0); + let v1 = f.validated(tx_1); + let v2 = f.validated(tx_2); + let v3 = f.validated(tx_3); + + // Add them to the pool + let _res = pool.add_transaction(v0.clone(), on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v1.clone(), on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v2.clone(), on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v3.clone(), on_chain_balance, on_chain_nonce).unwrap(); + + assert_eq!(0, pool.queued_transactions().len()); + assert_eq!(4, pool.pending_transactions().len()); + + pool.remove_transactions(vec![*v0.hash(), *v2.hash()]); + + assert_eq!(0, pool.queued_transactions().len()); + assert_eq!(2, pool.pending_transactions().len()); + assert!(pool.contains(v1.hash())); + assert!(pool.contains(v3.hash())); + } + + #[test] + fn test_remove_transactions_and_descendants() { + let on_chain_balance = U256::from(10_000); + let on_chain_nonce = 0; + let mut f = MockTransactionFactory::default(); + let mut pool = TxPool::new(MockOrdering::default(), Default::default()); + + let tx_0 = MockTransaction::eip1559().set_gas_price(100).inc_limit(); + let tx_1 = tx_0.next(); + let tx_2 = MockTransaction::eip1559().set_gas_price(100).inc_limit(); + let tx_3 = tx_2.next(); + let tx_4 = tx_3.next(); + + // Create 5 transactions + let v0 = f.validated(tx_0); + let v1 = f.validated(tx_1); + let v2 = f.validated(tx_2); + let v3 = f.validated(tx_3); + let v4 = f.validated(tx_4); + + // Add them to the pool + let _res = pool.add_transaction(v0.clone(), on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v1, on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v2.clone(), on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v3, on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v4, on_chain_balance, on_chain_nonce).unwrap(); + + assert_eq!(0, pool.queued_transactions().len()); + assert_eq!(5, pool.pending_transactions().len()); + + pool.remove_transactions_and_descendants(vec![*v0.hash(), *v2.hash()]); + + assert_eq!(0, pool.queued_transactions().len()); + assert_eq!(0, pool.pending_transactions().len()); + } + #[test] + fn test_remove_descendants() { + let on_chain_balance = U256::from(10_000); + let on_chain_nonce = 0; + let mut f = MockTransactionFactory::default(); + let mut pool = TxPool::new(MockOrdering::default(), Default::default()); + + let tx_0 = MockTransaction::eip1559().set_gas_price(100).inc_limit(); + let tx_1 = tx_0.next(); + let tx_2 = tx_1.next(); + let tx_3 = tx_2.next(); + + // Create 4 transactions + let v0 = f.validated(tx_0); + let v1 = f.validated(tx_1); + let v2 = f.validated(tx_2); + let v3 = f.validated(tx_3); + + // Add them to the pool + let _res = pool.add_transaction(v0.clone(), on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v1, on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v2, on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v3, on_chain_balance, on_chain_nonce).unwrap(); + + assert_eq!(0, pool.queued_transactions().len()); + assert_eq!(4, pool.pending_transactions().len()); + + let mut removed = Vec::new(); + pool.remove_transaction(v0.id()); + pool.remove_descendants(v0.id(), &mut removed); + + assert_eq!(0, pool.queued_transactions().len()); + assert_eq!(0, pool.pending_transactions().len()); + assert_eq!(3, removed.len()); + } + #[test] + fn test_remove_transactions_by_sender() { + let on_chain_balance = U256::from(10_000); + let on_chain_nonce = 0; + let mut f = MockTransactionFactory::default(); + let mut pool = TxPool::new(MockOrdering::default(), Default::default()); + + let tx_0 = MockTransaction::eip1559().set_gas_price(100).inc_limit(); + let tx_1 = tx_0.next(); + let tx_2 = MockTransaction::eip1559().set_gas_price(100).inc_limit(); + let tx_3 = tx_2.next(); + let tx_4 = tx_3.next(); + + // Create 5 transactions + let v0 = f.validated(tx_0); + let v1 = f.validated(tx_1); + let v2 = f.validated(tx_2); + let v3 = f.validated(tx_3); + let v4 = f.validated(tx_4); + + // Add them to the pool + let _res = pool.add_transaction(v0.clone(), on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v1.clone(), on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v2.clone(), on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v3, on_chain_balance, on_chain_nonce).unwrap(); + let _res = pool.add_transaction(v4, on_chain_balance, on_chain_nonce).unwrap(); + + assert_eq!(0, pool.queued_transactions().len()); + assert_eq!(5, pool.pending_transactions().len()); + + pool.remove_transactions_by_sender(v2.sender_id()); + + assert_eq!(0, pool.queued_transactions().len()); + assert_eq!(2, pool.pending_transactions().len()); + assert!(pool.contains(v0.hash())); + assert!(pool.contains(v1.hash())); + } + #[test] fn wrong_best_order_of_transactions() { let on_chain_balance = U256::from(10_000); let mut on_chain_nonce = 0; diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index adae238e4..07fe9b9c2 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -291,16 +291,32 @@ pub trait TransactionPool: Send + Sync + Clone { /// Consumer: RPC fn all_transactions(&self) -> AllPoolTransactions; + /// Removes all transactions corresponding to the given hashes. + /// + /// Consumer: Utility + fn remove_transactions( + &self, + hashes: Vec, + ) -> Vec>>; + /// Removes all transactions corresponding to the given hashes. /// /// Also removes all _dependent_ transactions. /// /// Consumer: Utility - fn remove_transactions( + fn remove_transactions_and_descendants( &self, hashes: Vec, ) -> Vec>>; + /// Removes all transactions from the given sender + /// + /// Consumer: Utility + fn remove_transactions_by_sender( + &self, + sender: Address, + ) -> Vec>>; + /// Retains only those hashes that are unknown to the pool. /// In other words, removes all transactions from the given set that are currently present in /// the pool. Returns hashes already known to the pool. From 3ab1f9559e2e9bb5c2c7fe1f2e2447142f8e9576 Mon Sep 17 00:00:00 2001 From: Deil Urba Date: Tue, 15 Oct 2024 09:21:01 +0100 Subject: [PATCH 050/425] feat: replace once_cell with std (#11694) Co-authored-by: Emilia Hane --- Cargo.lock | 59 +++++++++++-------- Cargo.toml | 4 +- crates/chainspec/src/lib.rs | 12 +++- crates/chainspec/src/spec.rs | 24 ++++---- crates/ethereum-forks/Cargo.toml | 7 ++- crates/ethereum-forks/src/hardfork/dev.rs | 9 ++- crates/optimism/chainspec/Cargo.toml | 11 +++- crates/optimism/chainspec/src/base.rs | 7 +-- crates/optimism/chainspec/src/base_sepolia.rs | 7 +-- crates/optimism/chainspec/src/dev.rs | 5 +- crates/optimism/chainspec/src/lib.rs | 10 +++- crates/optimism/chainspec/src/op.rs | 7 +-- crates/optimism/chainspec/src/op_sepolia.rs | 7 +-- crates/optimism/hardforks/src/dev.rs | 8 ++- crates/primitives/src/transaction/mod.rs | 10 +++- crates/rpc/rpc-eth-api/src/helpers/block.rs | 2 + crates/rpc/rpc-eth-api/src/helpers/call.rs | 1 + crates/storage/provider/Cargo.toml | 13 ++-- .../storage/provider/src/test_utils/blocks.rs | 6 +- 19 files changed, 129 insertions(+), 80 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d690cf536..7cbb3e937 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -130,9 +130,9 @@ dependencies = [ [[package]] name = "alloy-dyn-abi" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f95d76a38cae906fd394a5afb0736aaceee5432efe76addfd71048e623e208af" +checksum = "e6228abfc751a29cde117b0879b805a3e0b3b641358f063272c83ca459a56886" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -206,9 +206,9 @@ dependencies = [ [[package]] name = "alloy-json-abi" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03c66eec1acdd96b39b995b8f5ee5239bc0c871d62c527ae1ac9fd1d7fecd455" +checksum = "d46eb5871592c216d39192499c95a99f7175cb94104f88c307e6dc960676d9f1" dependencies = [ "alloy-primitives", "alloy-sol-type-parser", @@ -283,9 +283,9 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb848c43f6b06ae3de2e4a67496cbbabd78ae87db0f1248934f15d76192c6a" +checksum = "38f35429a652765189c1c5092870d8360ee7b7769b09b06d89ebaefd34676446" dependencies = [ "alloy-rlp", "arbitrary", @@ -600,9 +600,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "661c516eb1fa3294cc7f2fb8955b3b609d639c282ac81a4eedb14d3046db503a" +checksum = "3b2395336745358cc47207442127c47c63801a7065ecc0aa928da844f8bb5576" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", @@ -614,9 +614,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro-expander" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecbabb8fc3d75a0c2cea5215be22e7a267e3efde835b0f2a8922f5e3f5d47683" +checksum = "9ed5047c9a241df94327879c2b0729155b58b941eae7805a7ada2e19436e6b39" dependencies = [ "alloy-sol-macro-input", "const-hex", @@ -632,9 +632,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro-input" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16517f2af03064485150d89746b8ffdcdbc9b6eeb3d536fb66efd7c2846fbc75" +checksum = "5dee02a81f529c415082235129f0df8b8e60aa1601b9c9298ffe54d75f57210b" dependencies = [ "const-hex", "dunce", @@ -647,9 +647,9 @@ dependencies = [ [[package]] name = "alloy-sol-type-parser" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c07ebb0c1674ff8cbb08378d7c2e0e27919d2a2dae07ad3bca26174deda8d389" +checksum = "f631f0bd9a9d79619b27c91b6b1ab2c4ef4e606a65192369a1ee05d40dcf81cc" dependencies = [ "serde", "winnow", @@ -657,9 +657,9 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e448d879903624863f608c552d10efb0e0905ddbee98b0049412799911eb062" +checksum = "c2841af22d99e2c0f82a78fe107b6481be3dd20b89bfb067290092794734343a" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -1018,9 +1018,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "998282f8f49ccd6116b0ed8a4de0fbd3151697920e7c7533416d6e25e76434a7" +checksum = "e26a9844c659a2a293d239c7910b752f8487fe122c6c8bd1659bf85a6507c302" dependencies = [ "brotli", "flate2", @@ -1498,9 +1498,9 @@ checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" [[package]] name = "bytemuck" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94bbb0ad554ad961ddc5da507a12a29b14e4ae5bda06b19f575a3e6079d2e2ae" +checksum = "8334215b81e418a0a7bdb8ef0849474f40bb10c8b71f1c4ed315cff49f32494d" dependencies = [ "bytemuck_derive", ] @@ -2011,6 +2011,12 @@ dependencies = [ "itertools 0.10.5", ] +[[package]] +name = "critical-section" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f64009896348fc5af4222e9cf7d7d82a95a256c634ebcf61c53e4ea461422242" + [[package]] name = "crossbeam-channel" version = "0.5.13" @@ -5131,6 +5137,10 @@ name = "once_cell" version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +dependencies = [ + "critical-section", + "portable-atomic", +] [[package]] name = "oorandom" @@ -5413,9 +5423,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.13" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdbef9d1d47087a895abd220ed25eb4ad973a5e26f6a4367b038c25e28dfc2d9" +checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442" dependencies = [ "memchr", "thiserror", @@ -8393,7 +8403,6 @@ dependencies = [ "itertools 0.13.0", "metrics", "notify", - "once_cell", "parking_lot 0.12.3", "rand 0.8.5", "rayon", @@ -10256,9 +10265,9 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20e7b52ad118b2153644eea95c6fc740b6c1555b2344fdab763fc9de4075f665" +checksum = "ebfc1bfd06acc78f16d8fd3ef846bc222ee7002468d10a7dce8d703d6eab89a3" dependencies = [ "paste", "proc-macro2", diff --git a/Cargo.toml b/Cargo.toml index ac0ab2d39..2068f02bb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -497,7 +497,9 @@ notify = { version = "6.1.1", default-features = false, features = [ "macos_fsevent", ] } nybbles = "0.2.1" -once_cell = "1.19" +once_cell = { version = "1.19", default-features = false, features = [ + "critical-section", +] } parking_lot = "0.12" paste = "1.0" rand = "0.8.5" diff --git a/crates/chainspec/src/lib.rs b/crates/chainspec/src/lib.rs index 424b2b77c..81aadb834 100644 --- a/crates/chainspec/src/lib.rs +++ b/crates/chainspec/src/lib.rs @@ -11,6 +11,12 @@ extern crate alloc; +use once_cell as _; +#[cfg(not(feature = "std"))] +pub(crate) use once_cell::sync::{Lazy as LazyLock, OnceCell as OnceLock}; +#[cfg(feature = "std")] +pub(crate) use std::sync::{LazyLock, OnceLock}; + /// Chain specific constants pub(crate) mod constants; pub use constants::MIN_TRANSACTION_GAS; @@ -34,9 +40,9 @@ pub use spec::{ DepositContract, ForkBaseFeeParams, DEV, HOLESKY, MAINNET, SEPOLIA, }; -/// Simple utility to create a `OnceCell` with a value set. -pub fn once_cell_set(value: T) -> once_cell::sync::OnceCell { - let once = once_cell::sync::OnceCell::new(); +/// Simple utility to create a thread-safe sync cell with a value set. +pub fn once_cell_set(value: T) -> OnceLock { + let once = OnceLock::new(); let _ = once.set(value); once } diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index e43e0bc78..f78fcbb61 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -6,7 +6,7 @@ use alloy_genesis::Genesis; use alloy_primitives::{address, b256, Address, BlockNumber, B256, U256}; use alloy_trie::EMPTY_ROOT_HASH; use derive_more::From; -use once_cell::sync::{Lazy, OnceCell}; + use reth_ethereum_forks::{ ChainHardforks, DisplayHardforks, EthereumHardfork, EthereumHardforks, ForkCondition, ForkFilter, ForkFilterKey, ForkHash, ForkId, Hardfork, Hardforks, Head, DEV_HARDFORKS, @@ -24,10 +24,10 @@ use reth_primitives_traits::{ }; use reth_trie_common::root::state_root_ref_unhashed; -use crate::{constants::MAINNET_DEPOSIT_CONTRACT, once_cell_set, EthChainSpec}; +use crate::{constants::MAINNET_DEPOSIT_CONTRACT, once_cell_set, EthChainSpec, LazyLock, OnceLock}; /// The Ethereum mainnet spec -pub static MAINNET: Lazy> = Lazy::new(|| { +pub static MAINNET: LazyLock> = LazyLock::new(|| { let mut spec = ChainSpec { chain: Chain::mainnet(), genesis: serde_json::from_str(include_str!("../res/genesis/mainnet.json")) @@ -55,7 +55,7 @@ pub static MAINNET: Lazy> = Lazy::new(|| { }); /// The Sepolia spec -pub static SEPOLIA: Lazy> = Lazy::new(|| { +pub static SEPOLIA: LazyLock> = LazyLock::new(|| { let mut spec = ChainSpec { chain: Chain::sepolia(), genesis: serde_json::from_str(include_str!("../res/genesis/sepolia.json")) @@ -80,7 +80,7 @@ pub static SEPOLIA: Lazy> = Lazy::new(|| { }); /// The Holesky spec -pub static HOLESKY: Lazy> = Lazy::new(|| { +pub static HOLESKY: LazyLock> = LazyLock::new(|| { let mut spec = ChainSpec { chain: Chain::holesky(), genesis: serde_json::from_str(include_str!("../res/genesis/holesky.json")) @@ -106,7 +106,7 @@ pub static HOLESKY: Lazy> = Lazy::new(|| { /// /// Includes 20 prefunded accounts with `10_000` ETH each derived from mnemonic "test test test test /// test test test test test test test junk". -pub static DEV: Lazy> = Lazy::new(|| { +pub static DEV: LazyLock> = LazyLock::new(|| { ChainSpec { chain: Chain::dev(), genesis: serde_json::from_str(include_str!("../res/genesis/dev.json")) @@ -182,13 +182,13 @@ pub struct ChainSpec { /// /// This is either stored at construction time if it is known using [`once_cell_set`], or /// computed once on the first access. - pub genesis_hash: OnceCell, + pub genesis_hash: OnceLock, /// The header corresponding to the genesis block. /// /// This is either stored at construction time if it is known using [`once_cell_set`], or /// computed once on the first access. - pub genesis_header: OnceCell
, + pub genesis_header: OnceLock
, /// The block at which [`EthereumHardfork::Paris`] was activated and the final difficulty at /// this block. @@ -665,7 +665,7 @@ impl From for ChainSpec { Self { chain: genesis.config.chain_id.into(), genesis, - genesis_hash: OnceCell::new(), + genesis_hash: OnceLock::new(), hardforks: ChainHardforks::new(ordered_hardforks), paris_block_and_final_difficulty, deposit_contract, @@ -898,7 +898,7 @@ impl ChainSpecBuilder { ChainSpec { chain: self.chain.expect("The chain is required"), genesis: self.genesis.expect("The genesis is required"), - genesis_hash: OnceCell::new(), + genesis_hash: OnceLock::new(), hardforks: self.hardforks, paris_block_and_final_difficulty, deposit_contract: None, @@ -2217,7 +2217,7 @@ Post-merge hard forks (timestamp based): let spec = ChainSpec { chain: Chain::mainnet(), genesis: Genesis::default(), - genesis_hash: OnceCell::new(), + genesis_hash: OnceLock::new(), hardforks: ChainHardforks::new(vec![( EthereumHardfork::Frontier.boxed(), ForkCondition::Never, @@ -2235,7 +2235,7 @@ Post-merge hard forks (timestamp based): let spec = ChainSpec { chain: Chain::mainnet(), genesis: Genesis::default(), - genesis_hash: OnceCell::new(), + genesis_hash: OnceLock::new(), hardforks: ChainHardforks::new(vec![( EthereumHardfork::Shanghai.boxed(), ForkCondition::Never, diff --git a/crates/ethereum-forks/Cargo.toml b/crates/ethereum-forks/Cargo.toml index 7eb58b15f..08a0bc98d 100644 --- a/crates/ethereum-forks/Cargo.toml +++ b/crates/ethereum-forks/Cargo.toml @@ -41,5 +41,10 @@ proptest.workspace = true default = ["std", "serde", "rustc-hash"] arbitrary = ["dep:arbitrary", "dep:proptest", "dep:proptest-derive"] serde = ["dep:serde"] -std = ["thiserror-no-std/std", "rustc-hash/std"] +std = [ + "alloy-chains/std", + "alloy-primitives/std", + "thiserror-no-std/std", + "rustc-hash/std", +] rustc-hash = ["dep:rustc-hash"] diff --git a/crates/ethereum-forks/src/hardfork/dev.rs b/crates/ethereum-forks/src/hardfork/dev.rs index 87abd68f2..068e29070 100644 --- a/crates/ethereum-forks/src/hardfork/dev.rs +++ b/crates/ethereum-forks/src/hardfork/dev.rs @@ -1,12 +1,17 @@ use alloc::vec; use alloy_primitives::U256; -use once_cell::sync::Lazy; + +use once_cell as _; +#[cfg(not(feature = "std"))] +use once_cell::sync::Lazy as LazyLock; +#[cfg(feature = "std")] +use std::sync::LazyLock; use crate::{ChainHardforks, EthereumHardfork, ForkCondition}; /// Dev hardforks -pub static DEV_HARDFORKS: Lazy = Lazy::new(|| { +pub static DEV_HARDFORKS: LazyLock = LazyLock::new(|| { ChainHardforks::new(vec![ (EthereumHardfork::Frontier.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Homestead.boxed(), ForkCondition::Block(0)), diff --git a/crates/optimism/chainspec/Cargo.toml b/crates/optimism/chainspec/Cargo.toml index e13b95056..30590d4e8 100644 --- a/crates/optimism/chainspec/Cargo.toml +++ b/crates/optimism/chainspec/Cargo.toml @@ -43,4 +43,13 @@ op-alloy-rpc-types.workspace = true [features] default = ["std"] -std = [] \ No newline at end of file +std = [ + "alloy-chains/std", + "alloy-genesis/std", + "alloy-primitives/std", + "op-alloy-rpc-types/std", + "reth-chainspec/std", + "reth-ethereum-forks/std", + "reth-primitives-traits/std", + "reth-optimism-forks/std", +] diff --git a/crates/optimism/chainspec/src/base.rs b/crates/optimism/chainspec/src/base.rs index 3d986b3bc..7aa26bf9a 100644 --- a/crates/optimism/chainspec/src/base.rs +++ b/crates/optimism/chainspec/src/base.rs @@ -1,18 +1,17 @@ //! Chain specification for the Base Mainnet network. -use alloc::sync::Arc; +use alloc::{sync::Arc, vec}; use alloy_chains::Chain; use alloy_primitives::{b256, U256}; -use once_cell::sync::Lazy; use reth_chainspec::{once_cell_set, BaseFeeParams, BaseFeeParamsKind, ChainSpec}; use reth_ethereum_forks::EthereumHardfork; use reth_optimism_forks::OptimismHardfork; -use crate::OpChainSpec; +use crate::{LazyLock, OpChainSpec}; /// The Base mainnet spec -pub static BASE_MAINNET: Lazy> = Lazy::new(|| { +pub static BASE_MAINNET: LazyLock> = LazyLock::new(|| { OpChainSpec { inner: ChainSpec { chain: Chain::base_mainnet(), diff --git a/crates/optimism/chainspec/src/base_sepolia.rs b/crates/optimism/chainspec/src/base_sepolia.rs index 5b85f5a6b..b992dcaba 100644 --- a/crates/optimism/chainspec/src/base_sepolia.rs +++ b/crates/optimism/chainspec/src/base_sepolia.rs @@ -1,18 +1,17 @@ //! Chain specification for the Base Sepolia testnet network. -use alloc::sync::Arc; +use alloc::{sync::Arc, vec}; use alloy_chains::Chain; use alloy_primitives::{b256, U256}; -use once_cell::sync::Lazy; use reth_chainspec::{once_cell_set, BaseFeeParams, BaseFeeParamsKind, ChainSpec}; use reth_ethereum_forks::EthereumHardfork; use reth_optimism_forks::OptimismHardfork; -use crate::OpChainSpec; +use crate::{LazyLock, OpChainSpec}; /// The Base Sepolia spec -pub static BASE_SEPOLIA: Lazy> = Lazy::new(|| { +pub static BASE_SEPOLIA: LazyLock> = LazyLock::new(|| { OpChainSpec { inner: ChainSpec { chain: Chain::base_sepolia(), diff --git a/crates/optimism/chainspec/src/dev.rs b/crates/optimism/chainspec/src/dev.rs index 4724e2801..cb8163dfc 100644 --- a/crates/optimism/chainspec/src/dev.rs +++ b/crates/optimism/chainspec/src/dev.rs @@ -4,18 +4,17 @@ use alloc::sync::Arc; use alloy_chains::Chain; use alloy_primitives::U256; -use once_cell::sync::Lazy; use reth_chainspec::{once_cell_set, BaseFeeParams, BaseFeeParamsKind, ChainSpec}; use reth_optimism_forks::DEV_HARDFORKS; use reth_primitives_traits::constants::DEV_GENESIS_HASH; -use crate::OpChainSpec; +use crate::{LazyLock, OpChainSpec}; /// OP dev testnet specification /// /// Includes 20 prefunded accounts with `10_000` ETH each derived from mnemonic "test test test test /// test test test test test test test junk". -pub static OP_DEV: Lazy> = Lazy::new(|| { +pub static OP_DEV: LazyLock> = LazyLock::new(|| { OpChainSpec { inner: ChainSpec { chain: Chain::dev(), diff --git a/crates/optimism/chainspec/src/lib.rs b/crates/optimism/chainspec/src/lib.rs index 5ebc18f67..a18614240 100644 --- a/crates/optimism/chainspec/src/lib.rs +++ b/crates/optimism/chainspec/src/lib.rs @@ -6,6 +6,7 @@ issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] extern crate alloc; @@ -16,14 +17,17 @@ mod dev; mod op; mod op_sepolia; +use alloc::{vec, vec::Vec}; use alloy_chains::Chain; use alloy_genesis::Genesis; use alloy_primitives::{Parity, Signature, B256, U256}; pub use base::BASE_MAINNET; pub use base_sepolia::BASE_SEPOLIA; +use core::fmt::Display; use derive_more::{Constructor, Deref, From, Into}; pub use dev::OP_DEV; -use once_cell::sync::OnceCell; +#[cfg(not(feature = "std"))] +pub(crate) use once_cell::sync::Lazy as LazyLock; pub use op::OP_MAINNET; pub use op_sepolia::OP_SEPOLIA; use reth_chainspec::{ @@ -33,7 +37,8 @@ use reth_chainspec::{ use reth_ethereum_forks::{ChainHardforks, EthereumHardfork, ForkCondition, Hardfork}; use reth_network_peers::NodeRecord; use reth_primitives_traits::Header; -use std::fmt::Display; +#[cfg(feature = "std")] +pub(crate) use std::sync::LazyLock; /// Chain spec builder for a OP stack chain. #[derive(Debug, Default, From)] @@ -345,7 +350,6 @@ impl From for OpChainSpec { inner: ChainSpec { chain: genesis.config.chain_id.into(), genesis, - genesis_hash: OnceCell::new(), hardforks: ChainHardforks::new(ordered_hardforks), paris_block_and_final_difficulty, base_fee_params: optimism_genesis_info.base_fee_params, diff --git a/crates/optimism/chainspec/src/op.rs b/crates/optimism/chainspec/src/op.rs index 4e8b13005..8c0da5320 100644 --- a/crates/optimism/chainspec/src/op.rs +++ b/crates/optimism/chainspec/src/op.rs @@ -1,19 +1,18 @@ //! Chain specification for the Optimism Mainnet network. -use alloc::sync::Arc; +use alloc::{sync::Arc, vec}; use alloy_chains::Chain; use alloy_primitives::{b256, U256}; -use once_cell::sync::Lazy; use reth_chainspec::{once_cell_set, BaseFeeParams, BaseFeeParamsKind, ChainSpec}; use reth_ethereum_forks::EthereumHardfork; use reth_optimism_forks::OptimismHardfork; use reth_primitives_traits::constants::ETHEREUM_BLOCK_GAS_LIMIT; -use crate::OpChainSpec; +use crate::{LazyLock, OpChainSpec}; /// The Optimism Mainnet spec -pub static OP_MAINNET: Lazy> = Lazy::new(|| { +pub static OP_MAINNET: LazyLock> = LazyLock::new(|| { OpChainSpec { inner: ChainSpec { chain: Chain::optimism_mainnet(), diff --git a/crates/optimism/chainspec/src/op_sepolia.rs b/crates/optimism/chainspec/src/op_sepolia.rs index 9d453c805..d3243ebd5 100644 --- a/crates/optimism/chainspec/src/op_sepolia.rs +++ b/crates/optimism/chainspec/src/op_sepolia.rs @@ -1,19 +1,18 @@ //! Chain specification for the Optimism Sepolia testnet network. -use alloc::sync::Arc; +use alloc::{sync::Arc, vec}; use alloy_chains::{Chain, NamedChain}; use alloy_primitives::{b256, U256}; -use once_cell::sync::Lazy; use reth_chainspec::{once_cell_set, BaseFeeParams, BaseFeeParamsKind, ChainSpec}; use reth_ethereum_forks::EthereumHardfork; use reth_optimism_forks::OptimismHardfork; use reth_primitives_traits::constants::ETHEREUM_BLOCK_GAS_LIMIT; -use crate::OpChainSpec; +use crate::{LazyLock, OpChainSpec}; /// The OP Sepolia spec -pub static OP_SEPOLIA: Lazy> = Lazy::new(|| { +pub static OP_SEPOLIA: LazyLock> = LazyLock::new(|| { OpChainSpec { inner: ChainSpec { chain: Chain::from_named(NamedChain::OptimismSepolia), diff --git a/crates/optimism/hardforks/src/dev.rs b/crates/optimism/hardforks/src/dev.rs index 07f88d3f8..328ef501c 100644 --- a/crates/optimism/hardforks/src/dev.rs +++ b/crates/optimism/hardforks/src/dev.rs @@ -1,9 +1,13 @@ use alloy_primitives::U256; -use once_cell::sync::Lazy; use reth_ethereum_forks::{ChainHardforks, EthereumHardfork, ForkCondition}; +#[cfg(not(feature = "std"))] +use once_cell::sync::Lazy as LazyLock; +#[cfg(feature = "std")] +use std::sync::LazyLock; + /// Dev hardforks -pub static DEV_HARDFORKS: Lazy = Lazy::new(|| { +pub static DEV_HARDFORKS: LazyLock = LazyLock::new(|| { ChainHardforks::new(vec![ (EthereumHardfork::Frontier.boxed(), ForkCondition::Block(0)), (EthereumHardfork::Homestead.boxed(), ForkCondition::Block(0)), diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 3622b9531..62bef1607 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -13,10 +13,14 @@ use alloy_primitives::{Bytes, TxHash}; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header}; use core::mem; use derive_more::{AsRef, Deref}; -use once_cell::sync::Lazy; +use once_cell as _; +#[cfg(not(feature = "std"))] +use once_cell::sync::Lazy as LazyLock; use rayon::prelude::{IntoParallelIterator, ParallelIterator}; use serde::{Deserialize, Serialize}; use signature::{decode_with_eip155_chain_id, with_eip155_parity}; +#[cfg(feature = "std")] +use std::sync::LazyLock; pub use error::{ InvalidTransactionError, TransactionConversionError, TryFromRecoveredTransactionError, @@ -72,8 +76,8 @@ pub type TxHashOrNumber = BlockHashOrNumber; // Expected number of transactions where we can expect a speed-up by recovering the senders in // parallel. -pub(crate) static PARALLEL_SENDER_RECOVERY_THRESHOLD: Lazy = - Lazy::new(|| match rayon::current_num_threads() { +pub(crate) static PARALLEL_SENDER_RECOVERY_THRESHOLD: LazyLock = + LazyLock::new(|| match rayon::current_num_threads() { 0..=1 => usize::MAX, 2..=8 => 10, _ => 5, diff --git a/crates/rpc/rpc-eth-api/src/helpers/block.rs b/crates/rpc/rpc-eth-api/src/helpers/block.rs index 5d8496abd..9993b477a 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/block.rs @@ -110,6 +110,7 @@ pub trait EthBlocks: LoadBlock { /// Helper function for `eth_getBlockReceipts`. /// /// Returns all transaction receipts in block, or `None` if block wasn't found. + #[allow(clippy::type_complexity)] fn block_receipts( &self, block_id: BlockId, @@ -118,6 +119,7 @@ pub trait EthBlocks: LoadBlock { Self: LoadReceipt; /// Helper method that loads a bock and all its receipts. + #[allow(clippy::type_complexity)] fn load_block_and_receipts( &self, block_id: BlockId, diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index ff6e93006..cb5c4e79a 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -61,6 +61,7 @@ pub trait EthCall: Call + LoadPendingBlock { /// The transactions are packed into individual blocks. Overrides can be provided. /// /// See also: + #[allow(clippy::type_complexity)] fn simulate_v1( &self, payload: SimulatePayload, diff --git a/crates/storage/provider/Cargo.toml b/crates/storage/provider/Cargo.toml index 048353452..7ef827a37 100644 --- a/crates/storage/provider/Cargo.toml +++ b/crates/storage/provider/Cargo.toml @@ -56,13 +56,14 @@ metrics.workspace = true # misc auto_impl.workspace = true itertools.workspace = true -notify = { workspace = true, default-features = false, features = ["macos_fsevent"] } +notify = { workspace = true, default-features = false, features = [ + "macos_fsevent", +] } parking_lot.workspace = true dashmap = { workspace = true, features = ["inline"] } strum.workspace = true # test-utils -once_cell = { workspace = true, optional = true } reth-ethereum-engine-primitives = { workspace = true, optional = true } alloy-consensus = { workspace = true, optional = true } @@ -79,20 +80,22 @@ parking_lot.workspace = true tempfile.workspace = true assert_matches.workspace = true rand.workspace = true -once_cell.workspace = true eyre.workspace = true alloy-consensus.workspace = true [features] -optimism = ["reth-primitives/optimism", "reth-execution-types/optimism", "reth-optimism-primitives"] +optimism = [ + "reth-primitives/optimism", + "reth-execution-types/optimism", + "reth-optimism-primitives", +] serde = ["reth-execution-types/serde"] test-utils = [ "reth-db/test-utils", "reth-nippy-jar/test-utils", "reth-trie/test-utils", "reth-chain-state/test-utils", - "once_cell", "reth-ethereum-engine-primitives", "alloy-consensus", ] diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index f7928319b..e6a886187 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -5,7 +5,7 @@ use alloy_primitives::{ b256, hex_literal::hex, map::HashMap, Address, BlockNumber, Bytes, Log, Parity, Sealable, TxKind, B256, U256, }; -use once_cell::sync::Lazy; + use reth_db::tables; use reth_db_api::{database::Database, models::StoredBlockBodyIndices}; use reth_primitives::{ @@ -14,7 +14,7 @@ use reth_primitives::{ }; use reth_trie::root::{state_root_unhashed, storage_root_unhashed}; use revm::{db::BundleState, primitives::AccountInfo}; -use std::str::FromStr; +use std::{str::FromStr, sync::LazyLock}; /// Assert genesis block pub fn assert_genesis_block( @@ -61,7 +61,7 @@ pub fn assert_genesis_block( // StageCheckpoints is not updated in tests } -pub(crate) static TEST_BLOCK: Lazy = Lazy::new(|| SealedBlock { +pub(crate) static TEST_BLOCK: LazyLock = LazyLock::new(|| SealedBlock { header: SealedHeader::new( Header { parent_hash: hex!("c86e8cc0310ae7c531c758678ddbfd16fc51c8cef8cec650b032de9869e8b94f") From 0b6397217ca47fe7cd7f5d30963564f8e63dead8 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 15 Oct 2024 10:24:10 +0200 Subject: [PATCH 051/425] chore: rm optimism feature from chainspec (#11722) --- Cargo.lock | 1 - crates/chainspec/Cargo.toml | 4 ---- crates/chainspec/src/api.rs | 2 +- crates/chainspec/src/lib.rs | 2 +- crates/chainspec/src/spec.rs | 18 ------------------ crates/consensus/beacon/Cargo.toml | 5 ++--- crates/consensus/beacon/src/engine/mod.rs | 3 +-- crates/optimism/chainspec/Cargo.toml | 2 +- crates/optimism/chainspec/src/lib.rs | 3 +++ crates/optimism/evm/src/lib.rs | 2 +- crates/optimism/node/Cargo.toml | 1 - crates/optimism/payload/Cargo.toml | 1 - crates/optimism/rpc/src/eth/receipt.rs | 22 +--------------------- 13 files changed, 11 insertions(+), 55 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7cbb3e937..1bbfbbd30 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6532,7 +6532,6 @@ dependencies = [ "once_cell", "reth-ethereum-forks", "reth-network-peers", - "reth-optimism-forks", "reth-primitives-traits", "reth-trie-common", "serde_json", diff --git a/crates/chainspec/Cargo.toml b/crates/chainspec/Cargo.toml index 80636d139..b44a606b6 100644 --- a/crates/chainspec/Cargo.toml +++ b/crates/chainspec/Cargo.toml @@ -17,9 +17,6 @@ reth-network-peers.workspace = true reth-trie-common.workspace = true reth-primitives-traits.workspace = true -# op-reth -reth-optimism-forks = { workspace = true, optional = true } - # ethereum alloy-chains = { workspace = true, features = ["serde", "rlp"] } alloy-eips = { workspace = true, features = ["serde"] } @@ -42,7 +39,6 @@ alloy-genesis.workspace = true [features] default = ["std"] -optimism = ["reth-optimism-forks"] std = [ "alloy-chains/std", "alloy-eips/std", diff --git a/crates/chainspec/src/api.rs b/crates/chainspec/src/api.rs index e481fbede..39a6716ee 100644 --- a/crates/chainspec/src/api.rs +++ b/crates/chainspec/src/api.rs @@ -99,6 +99,6 @@ impl EthChainSpec for ChainSpec { } fn is_optimism(&self) -> bool { - Self::is_optimism(self) + self.chain.is_optimism() } } diff --git a/crates/chainspec/src/lib.rs b/crates/chainspec/src/lib.rs index 81aadb834..2e97caba0 100644 --- a/crates/chainspec/src/lib.rs +++ b/crates/chainspec/src/lib.rs @@ -33,7 +33,7 @@ pub use reth_ethereum_forks::*; pub use api::EthChainSpec; pub use info::ChainInfo; -#[cfg(feature = "test-utils")] +#[cfg(any(test, feature = "test-utils"))] pub use spec::test_fork_ids; pub use spec::{ BaseFeeParams, BaseFeeParamsKind, ChainSpec, ChainSpecBuilder, ChainSpecProvider, diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index f78fcbb61..e6e8a67d7 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -239,21 +239,6 @@ impl ChainSpec { self.chain.is_ethereum() } - /// Returns `true` if this chain contains Optimism configuration. - #[inline] - #[cfg(feature = "optimism")] - pub fn is_optimism(&self) -> bool { - self.chain.is_optimism() || - self.hardforks.get(reth_optimism_forks::OptimismHardfork::Bedrock).is_some() - } - - /// Returns `true` if this chain contains Optimism configuration. - #[inline] - #[cfg(not(feature = "optimism"))] - pub const fn is_optimism(&self) -> bool { - self.chain.is_optimism() - } - /// Returns `true` if this chain is Optimism mainnet. #[inline] pub fn is_optimism_mainnet(&self) -> bool { @@ -706,9 +691,6 @@ impl EthereumHardforks for ChainSpec { } } -#[cfg(feature = "optimism")] -impl reth_optimism_forks::OptimismHardforks for ChainSpec {} - /// A trait for reading the current chainspec. #[auto_impl::auto_impl(&, Arc)] pub trait ChainSpecProvider: Send + Sync { diff --git a/crates/consensus/beacon/Cargo.toml b/crates/consensus/beacon/Cargo.toml index f62c6fbf2..f13668126 100644 --- a/crates/consensus/beacon/Cargo.toml +++ b/crates/consensus/beacon/Cargo.toml @@ -28,6 +28,7 @@ reth-tokio-util.workspace = true reth-engine-primitives.workspace = true reth-network-p2p.workspace = true reth-node-types.workspace = true +reth-chainspec = { workspace = true, optional = true } # ethereum alloy-primitives.workspace = true @@ -48,8 +49,6 @@ thiserror.workspace = true schnellru.workspace = true itertools.workspace = true -reth-chainspec = { workspace = true, optional = true } - [dev-dependencies] # reth reth-payload-builder = { workspace = true, features = ["test-utils"] } @@ -78,8 +77,8 @@ assert_matches.workspace = true [features] optimism = [ + "reth-chainspec", "reth-primitives/optimism", "reth-provider/optimism", "reth-blockchain-tree/optimism", - "reth-chainspec" ] diff --git a/crates/consensus/beacon/src/engine/mod.rs b/crates/consensus/beacon/src/engine/mod.rs index 5774d4da2..ccea982bf 100644 --- a/crates/consensus/beacon/src/engine/mod.rs +++ b/crates/consensus/beacon/src/engine/mod.rs @@ -462,8 +462,7 @@ where ) -> bool { // On Optimism, the proposers are allowed to reorg their own chain at will. #[cfg(feature = "optimism")] - if reth_chainspec::EthChainSpec::chain(self.blockchain.chain_spec().as_ref()).is_optimism() - { + if reth_chainspec::EthChainSpec::is_optimism(&self.blockchain.chain_spec()) { debug!( target: "consensus::engine", fcu_head_num=?header.number, diff --git a/crates/optimism/chainspec/Cargo.toml b/crates/optimism/chainspec/Cargo.toml index 30590d4e8..c9f951c8d 100644 --- a/crates/optimism/chainspec/Cargo.toml +++ b/crates/optimism/chainspec/Cargo.toml @@ -13,7 +13,7 @@ workspace = true [dependencies] # reth -reth-chainspec = { workspace = true, features = ["optimism"] } +reth-chainspec.workspace = true reth-ethereum-forks.workspace = true reth-primitives-traits.workspace = true reth-network-peers.workspace = true diff --git a/crates/optimism/chainspec/src/lib.rs b/crates/optimism/chainspec/src/lib.rs index a18614240..98c6589d1 100644 --- a/crates/optimism/chainspec/src/lib.rs +++ b/crates/optimism/chainspec/src/lib.rs @@ -36,6 +36,7 @@ use reth_chainspec::{ }; use reth_ethereum_forks::{ChainHardforks, EthereumHardfork, ForkCondition, Hardfork}; use reth_network_peers::NodeRecord; +use reth_optimism_forks::OptimismHardforks; use reth_primitives_traits::Header; #[cfg(feature = "std")] pub(crate) use std::sync::LazyLock; @@ -267,6 +268,8 @@ impl EthereumHardforks for OpChainSpec { } } +impl OptimismHardforks for OpChainSpec {} + impl From for OpChainSpec { fn from(genesis: Genesis) -> Self { use reth_optimism_forks::OptimismHardfork; diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index 158ed2e89..63d0ee6b4 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -124,7 +124,7 @@ impl ConfigureEvmEnv for OptimismEvmConfig { cfg_env.perf_analyse_created_bytecodes = AnalysisKind::Analyse; cfg_env.handler_cfg.spec_id = spec_id; - cfg_env.handler_cfg.is_optimism = self.chain_spec.is_optimism(); + cfg_env.handler_cfg.is_optimism = true; } fn next_cfg_and_block_env( diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index 4675029b6..0902d7858 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -79,7 +79,6 @@ op-alloy-consensus.workspace = true [features] optimism = [ - "reth-chainspec/optimism", "reth-primitives/optimism", "reth-provider/optimism", "reth-optimism-evm/optimism", diff --git a/crates/optimism/payload/Cargo.toml b/crates/optimism/payload/Cargo.toml index d5f8b520e..414b2c358 100644 --- a/crates/optimism/payload/Cargo.toml +++ b/crates/optimism/payload/Cargo.toml @@ -49,7 +49,6 @@ sha2.workspace = true [features] optimism = [ - "reth-chainspec/optimism", "reth-primitives/optimism", "reth-provider/optimism", "reth-optimism-evm/optimism", diff --git a/crates/optimism/rpc/src/eth/receipt.rs b/crates/optimism/rpc/src/eth/receipt.rs index cc6851041..200b626d8 100644 --- a/crates/optimism/rpc/src/eth/receipt.rs +++ b/crates/optimism/rpc/src/eth/receipt.rs @@ -4,7 +4,6 @@ use alloy_eips::eip2718::Encodable2718; use alloy_rpc_types::{AnyReceiptEnvelope, Log, TransactionReceipt}; use op_alloy_consensus::{OpDepositReceipt, OpDepositReceiptWithBloom, OpReceiptEnvelope}; use op_alloy_rpc_types::{receipt::L1BlockInfo, OpTransactionReceipt, OpTransactionReceiptFields}; -use reth_chainspec::ChainSpec; use reth_node_api::{FullNodeComponents, NodeTypes}; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_evm::RethL1BlockInfo; @@ -55,25 +54,6 @@ where } } -impl OpEthApi -where - N: FullNodeComponents>, -{ - /// Builds a receipt w.r.t. chain spec. - pub fn build_op_receipt_meta( - &self, - tx: &TransactionSigned, - l1_block_info: revm::L1BlockInfo, - receipt: &Receipt, - ) -> Result { - Ok(OpReceiptFieldsBuilder::default() - .l1_block_info(&self.inner.provider().chain_spec(), tx, l1_block_info)? - .deposit_nonce(receipt.deposit_nonce) - .deposit_version(receipt.deposit_receipt_version) - .build()) - } -} - /// L1 fee and data gas for a non-deposit transaction, or deposit nonce and receipt version for a /// deposit transaction. #[derive(Debug, Default, Clone)] @@ -113,7 +93,7 @@ impl OpReceiptFieldsBuilder { /// Applies [`L1BlockInfo`](revm::L1BlockInfo). pub fn l1_block_info( mut self, - chain_spec: &ChainSpec, + chain_spec: &OpChainSpec, tx: &TransactionSigned, l1_block_info: revm::L1BlockInfo, ) -> Result { From e92fbaea3042172e438f9479166d0f43aff94810 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Tue, 15 Oct 2024 10:35:47 +0200 Subject: [PATCH 052/425] chore(ci): do not run hive on legacy engine (#11733) --- .github/assets/hive/expected_failures.yaml | 25 ++------ .../hive/expected_failures_experimental.yaml | 63 ------------------- .github/workflows/hive.yml | 11 +--- 3 files changed, 8 insertions(+), 91 deletions(-) delete mode 100644 .github/assets/hive/expected_failures_experimental.yaml diff --git a/.github/assets/hive/expected_failures.yaml b/.github/assets/hive/expected_failures.yaml index 7a212a51d..d4b3d2bcb 100644 --- a/.github/assets/hive/expected_failures.yaml +++ b/.github/assets/hive/expected_failures.yaml @@ -20,7 +20,7 @@ rpc-compat: - eth_getBlockByNumber/get-latest (reth) - eth_getBlockByNumber/get-safe (reth) - # https://github.com/paradigmxyz/reth/issues/8732 +# https://github.com/paradigmxyz/reth/issues/8732 engine-withdrawals: - Withdrawals Fork On Genesis (Paris) (reth) - Withdrawals Fork on Block 1 (Paris) (reth) @@ -43,15 +43,7 @@ engine-withdrawals: # https://github.com/paradigmxyz/reth/issues/8305 # https://github.com/paradigmxyz/reth/issues/6217 -engine-api: - - Inconsistent Head in ForkchoiceState (Paris) (reth) - - Invalid NewPayload, StateRoot, Syncing=True, EmptyTxs=True, DynFeeTxs=False (Paris) (reth) - - Invalid NewPayload, StateRoot, Syncing=True, EmptyTxs=False, DynFeeTxs=False (Paris) (reth) - - Invalid NewPayload, PrevRandao, Syncing=True, EmptyTxs=False, DynFeeTxs=False (Paris) (reth) - - Invalid Missing Ancestor Syncing ReOrg, StateRoot, EmptyTxs=True, CanonicalReOrg=False, Invalid P9 (Paris) (reth) - - Invalid Missing Ancestor Syncing ReOrg, StateRoot, EmptyTxs=False, CanonicalReOrg=False, Invalid P9 (Paris) (reth) - - Invalid Missing Ancestor Syncing ReOrg, StateRoot, EmptyTxs=True, CanonicalReOrg=True, Invalid P9 (Paris) (reth) - - Invalid Missing Ancestor Syncing ReOrg, StateRoot, EmptyTxs=False, CanonicalReOrg=True, Invalid P9 (Paris) (reth) +engine-api: [] # https://github.com/paradigmxyz/reth/issues/8305 # https://github.com/paradigmxyz/reth/issues/6217 @@ -59,17 +51,12 @@ engine-api: # https://github.com/paradigmxyz/reth/issues/7144 engine-cancun: - Blob Transaction Ordering, Multiple Clients (Cancun) (reth) - - Inconsistent Head in ForkchoiceState (Cancun) (reth) - - Invalid NewPayload, StateRoot, Syncing=True, EmptyTxs=True, DynFeeTxs=False (Cancun) (reth) - - Invalid NewPayload, StateRoot, Syncing=True, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) - - Invalid NewPayload, PrevRandao, Syncing=True, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) - - Invalid Missing Ancestor Syncing ReOrg, StateRoot, EmptyTxs=True, CanonicalReOrg=False, Invalid P9 (Cancun) (reth) - - Invalid Missing Ancestor Syncing ReOrg, StateRoot, EmptyTxs=False, CanonicalReOrg=False, Invalid P9 (Cancun) (reth) - - Invalid Missing Ancestor Syncing ReOrg, StateRoot, EmptyTxs=True, CanonicalReOrg=True, Invalid P9 (Cancun) (reth) - - Invalid Missing Ancestor Syncing ReOrg, StateRoot, EmptyTxs=False, CanonicalReOrg=True, Invalid P9 (Cancun) (reth) - Invalid PayloadAttributes, Missing BeaconRoot, Syncing=True (Cancun) (reth) - - Invalid NewPayload, ParentBeaconBlockRoot, Syncing=True, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) - Invalid NewPayload, ExcessBlobGas, Syncing=True, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) + - Invalid NewPayload, VersionedHashes, Syncing=False, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) + - Invalid NewPayload, VersionedHashes Version, Syncing=False, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) + - Invalid NewPayload, Incomplete VersionedHashes, Syncing=False, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) + - Invalid NewPayload, Extra VersionedHashes, Syncing=False, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) # https://github.com/paradigmxyz/reth/issues/8579 sync: diff --git a/.github/assets/hive/expected_failures_experimental.yaml b/.github/assets/hive/expected_failures_experimental.yaml deleted file mode 100644 index d4b3d2bcb..000000000 --- a/.github/assets/hive/expected_failures_experimental.yaml +++ /dev/null @@ -1,63 +0,0 @@ -# https://github.com/paradigmxyz/reth/issues/7015 -# https://github.com/paradigmxyz/reth/issues/6332 -rpc-compat: - - debug_getRawBlock/get-invalid-number (reth) - - debug_getRawHeader/get-invalid-number (reth) - - debug_getRawReceipts/get-invalid-number (reth) - - debug_getRawTransaction/get-invalid-hash (reth) - - - eth_call/call-callenv (reth) - - eth_feeHistory/fee-history (reth) - - eth_getStorageAt/get-storage-invalid-key-too-large (reth) - - eth_getStorageAt/get-storage-invalid-key (reth) - - eth_getTransactionReceipt/get-access-list (reth) - - eth_getTransactionReceipt/get-blob-tx (reth) - - eth_getTransactionReceipt/get-dynamic-fee (reth) - - eth_getBlockByHash/get-block-by-hash (reth) - - eth_getBlockByNumber/get-block-n (reth) - - eth_getBlockByNumber/get-finalized (reth) - - eth_getBlockByNumber/get-genesis (reth) - - eth_getBlockByNumber/get-latest (reth) - - eth_getBlockByNumber/get-safe (reth) - -# https://github.com/paradigmxyz/reth/issues/8732 -engine-withdrawals: - - Withdrawals Fork On Genesis (Paris) (reth) - - Withdrawals Fork on Block 1 (Paris) (reth) - - Withdrawals Fork on Block 2 (Paris) (reth) - - Withdrawals Fork on Block 3 (Paris) (reth) - - Withdraw to a single account (Paris) (reth) - - Withdraw to two accounts (Paris) (reth) - - Withdraw many accounts (Paris) (reth) - - Withdraw zero amount (Paris) (reth) - - Empty Withdrawals (Paris) (reth) - - Corrupted Block Hash Payload (INVALID) (Paris) (reth) - - Withdrawals Fork on Block 1 - 8 Block Re-Org NewPayload (Paris) (reth) - - Withdrawals Fork on Block 1 - 8 Block Re-Org, Sync (Paris) (reth) - - Withdrawals Fork on Block 8 - 10 Block Re-Org NewPayload (Paris) (reth) - - Withdrawals Fork on Block 8 - 10 Block Re-Org Sync (Paris) (reth) - - Withdrawals Fork on Canonical Block 8 / Side Block 7 - 10 Block Re-Org (Paris) (reth) - - Withdrawals Fork on Canonical Block 8 / Side Block 7 - 10 Block Re-Org Sync (Paris) (reth) - - Withdrawals Fork on Canonical Block 8 / Side Block 9 - 10 Block Re-Org (Paris) (reth) - - Withdrawals Fork on Canonical Block 8 / Side Block 9 - 10 Block Re-Org Sync (Paris) (reth) - -# https://github.com/paradigmxyz/reth/issues/8305 -# https://github.com/paradigmxyz/reth/issues/6217 -engine-api: [] - -# https://github.com/paradigmxyz/reth/issues/8305 -# https://github.com/paradigmxyz/reth/issues/6217 -# https://github.com/paradigmxyz/reth/issues/8306 -# https://github.com/paradigmxyz/reth/issues/7144 -engine-cancun: - - Blob Transaction Ordering, Multiple Clients (Cancun) (reth) - - Invalid PayloadAttributes, Missing BeaconRoot, Syncing=True (Cancun) (reth) - - Invalid NewPayload, ExcessBlobGas, Syncing=True, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) - - Invalid NewPayload, VersionedHashes, Syncing=False, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) - - Invalid NewPayload, VersionedHashes Version, Syncing=False, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) - - Invalid NewPayload, Incomplete VersionedHashes, Syncing=False, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) - - Invalid NewPayload, Extra VersionedHashes, Syncing=False, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) - -# https://github.com/paradigmxyz/reth/issues/8579 -sync: - - sync reth -> reth diff --git a/.github/workflows/hive.yml b/.github/workflows/hive.yml index 12224a951..6c50923d3 100644 --- a/.github/workflows/hive.yml +++ b/.github/workflows/hive.yml @@ -82,7 +82,6 @@ jobs: strategy: fail-fast: false matrix: - engine: [regular, experimental] # ethereum/rpc to be deprecated: # https://github.com/ethereum/hive/pull/1117 scenario: @@ -181,7 +180,7 @@ jobs: needs: - prepare-reth - prepare-hive - name: run ${{ matrix.engine }} - ${{ matrix.scenario.sim }}${{ matrix.scenario.limit && format(' - {0}', matrix.scenario.limit) }} + name: run ${{ matrix.scenario.sim }}${{ matrix.scenario.limit && format(' - {0}', matrix.scenario.limit) }} runs-on: group: Reth permissions: @@ -218,11 +217,6 @@ jobs: ref: master path: hivetests - - name: Modify client for experimental engine - if: matrix.engine == 'experimental' - run: | - sed -ie 's/RUST_LOG=info $reth node $FLAGS/RUST_LOG=info $reth node --engine.experimental $FLAGS/' hivetests/clients/reth/reth.sh - - name: Run simulator run: | LIMIT="${{ matrix.scenario.limit }}" @@ -241,8 +235,7 @@ jobs: - name: Parse hive output run: | - FAILURE_FILE="${{ matrix.engine == 'experimental' && '.github/assets/hive/expected_failures_experimental.yaml' || '.github/assets/hive/expected_failures.yaml' }}" - find hivetests/workspace/logs -type f -name "*.json" ! -name "hive.json" | xargs -I {} python .github/assets/hive/parse.py {} --exclusion $FAILURE_FILE + find hivetests/workspace/logs -type f -name "*.json" ! -name "hive.json" | xargs -I {} python .github/assets/hive/parse.py {} --exclusion .github/assets/hive/expected_failures.yaml - name: Print simulator output if: ${{ failure() }} From 5e386130da0b123e107f5eb5a9a3a3622bc50129 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 15 Oct 2024 10:42:15 +0200 Subject: [PATCH 053/425] primitives: impl `alloy_consensus::Transaction` for `Transaction` (#11727) --- crates/primitives/src/transaction/mod.rs | 184 ++++++++++++++++++++++- 1 file changed, 183 insertions(+), 1 deletion(-) diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 62bef1607..0cb860ff6 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -2,7 +2,7 @@ use crate::BlockHashOrNumber; use alloy_eips::eip7702::SignedAuthorization; -use alloy_primitives::{keccak256, Address, TxKind, B256, U256}; +use alloy_primitives::{keccak256, Address, ChainId, TxKind, B256, U256}; use alloy_consensus::{SignableTransaction, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy}; use alloy_eips::{ @@ -824,6 +824,188 @@ impl Encodable for Transaction { } } +impl alloy_consensus::Transaction for Transaction { + fn chain_id(&self) -> Option { + match self { + Self::Legacy(tx) => tx.chain_id(), + Self::Eip2930(tx) => tx.chain_id(), + Self::Eip1559(tx) => tx.chain_id(), + Self::Eip4844(tx) => tx.chain_id(), + Self::Eip7702(tx) => tx.chain_id(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.chain_id(), + } + } + + fn nonce(&self) -> u64 { + match self { + Self::Legacy(tx) => tx.nonce(), + Self::Eip2930(tx) => tx.nonce(), + Self::Eip1559(tx) => tx.nonce(), + Self::Eip4844(tx) => tx.nonce(), + Self::Eip7702(tx) => tx.nonce(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.nonce(), + } + } + + fn gas_limit(&self) -> u64 { + match self { + Self::Legacy(tx) => tx.gas_limit(), + Self::Eip2930(tx) => tx.gas_limit(), + Self::Eip1559(tx) => tx.gas_limit(), + Self::Eip4844(tx) => tx.gas_limit(), + Self::Eip7702(tx) => tx.gas_limit(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.gas_limit(), + } + } + + fn gas_price(&self) -> Option { + match self { + Self::Legacy(tx) => tx.gas_price(), + Self::Eip2930(tx) => tx.gas_price(), + Self::Eip1559(tx) => tx.gas_price(), + Self::Eip4844(tx) => tx.gas_price(), + Self::Eip7702(tx) => tx.gas_price(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.gas_price(), + } + } + + fn max_fee_per_gas(&self) -> u128 { + match self { + Self::Legacy(tx) => tx.max_fee_per_gas(), + Self::Eip2930(tx) => tx.max_fee_per_gas(), + Self::Eip1559(tx) => tx.max_fee_per_gas(), + Self::Eip4844(tx) => tx.max_fee_per_gas(), + Self::Eip7702(tx) => tx.max_fee_per_gas(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.max_fee_per_gas(), + } + } + + fn max_priority_fee_per_gas(&self) -> Option { + match self { + Self::Legacy(tx) => tx.max_priority_fee_per_gas(), + Self::Eip2930(tx) => tx.max_priority_fee_per_gas(), + Self::Eip1559(tx) => tx.max_priority_fee_per_gas(), + Self::Eip4844(tx) => tx.max_priority_fee_per_gas(), + Self::Eip7702(tx) => tx.max_priority_fee_per_gas(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.max_priority_fee_per_gas(), + } + } + + fn priority_fee_or_price(&self) -> u128 { + match self { + Self::Legacy(tx) => tx.priority_fee_or_price(), + Self::Eip2930(tx) => tx.priority_fee_or_price(), + Self::Eip1559(tx) => tx.priority_fee_or_price(), + Self::Eip4844(tx) => tx.priority_fee_or_price(), + Self::Eip7702(tx) => tx.priority_fee_or_price(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.priority_fee_or_price(), + } + } + + fn max_fee_per_blob_gas(&self) -> Option { + match self { + Self::Legacy(tx) => tx.max_fee_per_blob_gas(), + Self::Eip2930(tx) => tx.max_fee_per_blob_gas(), + Self::Eip1559(tx) => tx.max_fee_per_blob_gas(), + Self::Eip4844(tx) => tx.max_fee_per_blob_gas(), + Self::Eip7702(tx) => tx.max_fee_per_blob_gas(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.max_fee_per_blob_gas(), + } + } + + fn to(&self) -> TxKind { + match self { + Self::Legacy(tx) => tx.to(), + Self::Eip2930(tx) => tx.to(), + Self::Eip1559(tx) => tx.to(), + Self::Eip4844(tx) => tx.to(), + Self::Eip7702(tx) => tx.to(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.to(), + } + } + + fn value(&self) -> alloy_primitives::U256 { + match self { + Self::Legacy(tx) => tx.value(), + Self::Eip2930(tx) => tx.value(), + Self::Eip1559(tx) => tx.value(), + Self::Eip4844(tx) => tx.value(), + Self::Eip7702(tx) => tx.value(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.value(), + } + } + + fn input(&self) -> &[u8] { + match self { + Self::Legacy(tx) => tx.input(), + Self::Eip2930(tx) => tx.input(), + Self::Eip1559(tx) => tx.input(), + Self::Eip4844(tx) => tx.input(), + Self::Eip7702(tx) => tx.input(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.input(), + } + } + + fn ty(&self) -> u8 { + match self { + Self::Legacy(tx) => tx.ty(), + Self::Eip2930(tx) => tx.ty(), + Self::Eip1559(tx) => tx.ty(), + Self::Eip4844(tx) => tx.ty(), + Self::Eip7702(tx) => tx.ty(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.ty(), + } + } + + fn access_list(&self) -> Option<&AccessList> { + match self { + Self::Legacy(tx) => tx.access_list(), + Self::Eip2930(tx) => tx.access_list(), + Self::Eip1559(tx) => tx.access_list(), + Self::Eip4844(tx) => tx.access_list(), + Self::Eip7702(tx) => tx.access_list(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.access_list(), + } + } + + fn blob_versioned_hashes(&self) -> Option<&[B256]> { + match self { + Self::Legacy(tx) => tx.blob_versioned_hashes(), + Self::Eip2930(tx) => tx.blob_versioned_hashes(), + Self::Eip1559(tx) => tx.blob_versioned_hashes(), + Self::Eip4844(tx) => tx.blob_versioned_hashes(), + Self::Eip7702(tx) => tx.blob_versioned_hashes(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.blob_versioned_hashes(), + } + } + + fn authorization_list(&self) -> Option<&[SignedAuthorization]> { + match self { + Self::Legacy(tx) => tx.authorization_list(), + Self::Eip2930(tx) => tx.authorization_list(), + Self::Eip1559(tx) => tx.authorization_list(), + Self::Eip4844(tx) => tx.authorization_list(), + Self::Eip7702(tx) => tx.authorization_list(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.authorization_list(), + } + } +} + /// Signed transaction without its Hash. Used type for inserting into the DB. /// /// This can by converted to [`TransactionSigned`] by calling [`TransactionSignedNoHash::hash`]. From 39f0ab41169ec3b8f30cdae1d138ff3a04364928 Mon Sep 17 00:00:00 2001 From: Jonathan LEI Date: Tue, 15 Oct 2024 18:32:25 +0800 Subject: [PATCH 054/425] chore: turn off `reth-revm` default features (#10215) Co-authored-by: Federico Gimenez Co-authored-by: Matthias Seitz --- Cargo.lock | 5 +++++ Cargo.toml | 4 +--- crates/ethereum/evm/src/lib.rs | 6 ++++++ crates/ethereum/node/Cargo.toml | 4 ++++ crates/ethereum/node/src/lib.rs | 3 +++ crates/evm/src/lib.rs | 6 ++++++ crates/optimism/node/Cargo.toml | 6 +++++- testing/ef-tests/Cargo.toml | 3 +++ testing/ef-tests/src/lib.rs | 3 +++ 9 files changed, 36 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1bbfbbd30..3fbcc7514 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2562,7 +2562,9 @@ dependencies = [ "reth-evm-ethereum", "reth-primitives", "reth-provider", + "reth-revm", "reth-stages", + "revm", "serde", "serde_json", "thiserror", @@ -7937,10 +7939,12 @@ dependencies = [ "reth-payload-builder", "reth-primitives", "reth-provider", + "reth-revm", "reth-rpc", "reth-tasks", "reth-tracing", "reth-transaction-pool", + "revm", "serde_json", "tokio", ] @@ -8163,6 +8167,7 @@ dependencies = [ "reth-rpc-types-compat", "reth-tracing", "reth-transaction-pool", + "revm", "serde", "serde_json", "thiserror", diff --git a/Cargo.toml b/Cargo.toml index 2068f02bb..f734fd44c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -381,7 +381,7 @@ reth-primitives-traits = { path = "crates/primitives-traits", default-features = reth-provider = { path = "crates/storage/provider" } reth-prune = { path = "crates/prune/prune" } reth-prune-types = { path = "crates/prune/types" } -reth-revm = { path = "crates/revm" } +reth-revm = { path = "crates/revm", default-features = false } reth-rpc = { path = "crates/rpc/rpc" } reth-rpc-api = { path = "crates/rpc/rpc-api" } reth-rpc-api-testing-util = { path = "crates/rpc/rpc-testing-util" } @@ -412,8 +412,6 @@ reth-trie-parallel = { path = "crates/trie/parallel" } # revm revm = { version = "14.0.3", features = [ "std", - "secp256k1", - "blst", ], default-features = false } revm-inspectors = "0.8.1" revm-primitives = { version = "10.0.0", features = [ diff --git a/crates/ethereum/evm/src/lib.rs b/crates/ethereum/evm/src/lib.rs index 8ea39f93d..cede8008b 100644 --- a/crates/ethereum/evm/src/lib.rs +++ b/crates/ethereum/evm/src/lib.rs @@ -1,4 +1,10 @@ //! EVM config for vanilla ethereum. +//! +//! # Revm features +//! +//! This crate does __not__ enforce specific revm features such as `blst` or `c-kzg`, which are +//! critical for revm's evm internals, it is the responsibility of the implementer to ensure the +//! proper features are selected. #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", diff --git a/crates/ethereum/node/Cargo.toml b/crates/ethereum/node/Cargo.toml index 145a96683..7a323f91d 100644 --- a/crates/ethereum/node/Cargo.toml +++ b/crates/ethereum/node/Cargo.toml @@ -29,6 +29,10 @@ reth-rpc.workspace = true reth-node-api.workspace = true reth-chainspec.workspace = true reth-primitives.workspace = true +reth-revm = { workspace = true, features = ["std"] } + +# revm with required ethereum features +revm = { workspace = true, features = ["secp256k1", "blst", "c-kzg"] } # misc eyre.workspace = true diff --git a/crates/ethereum/node/src/lib.rs b/crates/ethereum/node/src/lib.rs index 44ec6836c..37ebc33c2 100644 --- a/crates/ethereum/node/src/lib.rs +++ b/crates/ethereum/node/src/lib.rs @@ -8,6 +8,9 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +use reth_revm as _; +use revm as _; + pub use reth_ethereum_engine_primitives::EthEngineTypes; pub mod evm; diff --git a/crates/evm/src/lib.rs b/crates/evm/src/lib.rs index e0d45f04c..66026a07c 100644 --- a/crates/evm/src/lib.rs +++ b/crates/evm/src/lib.rs @@ -1,4 +1,10 @@ //! Traits for configuring an EVM specifics. +//! +//! # Revm features +//! +//! This crate does __not__ enforce specific revm features such as `blst` or `c-kzg`, which are +//! critical for revm's evm internals, it is the responsibility of the implementer to ensure the +//! proper features are selected. #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index 0902d7858..f9e038a3d 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -27,7 +27,7 @@ reth-provider.workspace = true reth-transaction-pool.workspace = true reth-network.workspace = true reth-evm.workspace = true -reth-revm.workspace = true +reth-revm = { workspace = true, features = ["std"] } reth-beacon-consensus.workspace = true reth-discv5.workspace = true reth-rpc-eth-types.workspace = true @@ -42,6 +42,9 @@ reth-optimism-chainspec.workspace = true reth-optimism-consensus.workspace = true reth-optimism-forks.workspace = true +# revm with required optimism features +revm = { workspace = true, features = ["secp256k1", "blst", "c-kzg"] } + # ethereum alloy-eips.workspace = true alloy-primitives.workspace = true @@ -85,6 +88,7 @@ optimism = [ "reth-optimism-payload-builder/optimism", "reth-beacon-consensus/optimism", "reth-revm/optimism", + "revm/optimism", "reth-auto-seal-consensus/optimism", "reth-optimism-rpc/optimism", "reth-engine-local/optimism", diff --git a/testing/ef-tests/Cargo.toml b/testing/ef-tests/Cargo.toml index ca23ffcce..df68f5154 100644 --- a/testing/ef-tests/Cargo.toml +++ b/testing/ef-tests/Cargo.toml @@ -23,6 +23,9 @@ reth-db-api.workspace = true reth-provider = { workspace = true, features = ["test-utils"] } reth-stages.workspace = true reth-evm-ethereum.workspace = true +reth-revm = { workspace = true, features = ["std"] } + +revm = { workspace = true, features = ["secp256k1", "blst", "c-kzg"] } alloy-rlp.workspace = true alloy-primitives.workspace = true diff --git a/testing/ef-tests/src/lib.rs b/testing/ef-tests/src/lib.rs index 45f296d1f..ca5e47d2d 100644 --- a/testing/ef-tests/src/lib.rs +++ b/testing/ef-tests/src/lib.rs @@ -7,6 +7,9 @@ )] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +use reth_revm as _; +use revm as _; + pub mod case; pub mod result; pub mod suite; From 3cb4bf266d624b9f985fc9273540f353f34f6b94 Mon Sep 17 00:00:00 2001 From: DaniPopes <57450786+DaniPopes@users.noreply.github.com> Date: Tue, 15 Oct 2024 12:53:34 +0200 Subject: [PATCH 055/425] chore(deps): bump alloy-trie 0.7 (#11362) Co-authored-by: Roman Krasiuk Co-authored-by: Alexey Shekhirin --- Cargo.lock | 12 ++++--- Cargo.toml | 2 +- crates/stages/types/src/checkpoints.rs | 2 +- crates/storage/codecs/src/alloy/trie.rs | 33 +++++++++++--------- crates/trie/common/src/hash_builder/state.rs | 12 +++---- crates/trie/trie/src/witness.rs | 8 +++-- 6 files changed, 39 insertions(+), 30 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3fbcc7514..828444080 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -741,13 +741,14 @@ dependencies = [ [[package]] name = "alloy-trie" -version = "0.6.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9703ce68b97f8faae6f7739d1e003fc97621b856953cbcdbb2b515743f23288" +checksum = "6fa8acead43cb238a7b7f47238c71137f4677a0b8d90e7e3be6e6ca59a28194e" dependencies = [ "alloy-primitives", "alloy-rlp", "arbitrary", + "arrayvec", "derive_arbitrary", "derive_more 1.0.0", "nybbles", @@ -992,6 +993,9 @@ name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +dependencies = [ + "serde", +] [[package]] name = "asn1_der" @@ -4460,7 +4464,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -11348,7 +11352,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index f734fd44c..e3ec1c1fb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -424,7 +424,7 @@ alloy-dyn-abi = "0.8.0" alloy-primitives = { version = "0.8.7", default-features = false } alloy-rlp = "0.3.4" alloy-sol-types = "0.8.0" -alloy-trie = { version = "0.6", default-features = false } +alloy-trie = { version = "0.7", default-features = false } alloy-consensus = { version = "0.4.2", default-features = false } alloy-eips = { version = "0.4.2", default-features = false } diff --git a/crates/stages/types/src/checkpoints.rs b/crates/stages/types/src/checkpoints.rs index e88e933d6..79e896bf4 100644 --- a/crates/stages/types/src/checkpoints.rs +++ b/crates/stages/types/src/checkpoints.rs @@ -8,7 +8,7 @@ use std::ops::RangeInclusive; use super::StageId; /// Saves the progress of Merkle stage. -#[derive(Default, Debug, Clone, PartialEq)] +#[derive(Default, Debug, Clone, PartialEq, Eq)] pub struct MerkleCheckpoint { /// The target block number. pub target_block: BlockNumber, diff --git a/crates/storage/codecs/src/alloy/trie.rs b/crates/storage/codecs/src/alloy/trie.rs index c89ef0bf6..cc5273dd0 100644 --- a/crates/storage/codecs/src/alloy/trie.rs +++ b/crates/storage/codecs/src/alloy/trie.rs @@ -1,15 +1,18 @@ -//! Native Compact codec impl for EIP-7685 requests. +//! Native Compact codec impl for alloy-trie types. use crate::Compact; use alloc::vec::Vec; use alloy_primitives::B256; -use alloy_trie::{hash_builder::HashBuilderValue, BranchNodeCompact, TrieMask}; +use alloy_trie::{ + hash_builder::{HashBuilderValue, HashBuilderValueRef}, + BranchNodeCompact, TrieMask, +}; use bytes::{Buf, BufMut}; -/// Identifier for [`HashBuilderValue::Hash`] +/// Identifier for [`HashBuilderValueRef::Hash`] const HASH_BUILDER_TYPE_HASH: u8 = 0; -/// Identifier for [`HashBuilderValue::Bytes`] +/// Identifier for [`HashBuilderValueRef::Bytes`] const HASH_BUILDER_TYPE_BYTES: u8 = 1; impl Compact for HashBuilderValue { @@ -17,34 +20,34 @@ impl Compact for HashBuilderValue { where B: BufMut + AsMut<[u8]>, { - match self { - Self::Hash(hash) => { + match self.as_ref() { + HashBuilderValueRef::Hash(hash) => { buf.put_u8(HASH_BUILDER_TYPE_HASH); 1 + hash.to_compact(buf) } - Self::Bytes(bytes) => { + HashBuilderValueRef::Bytes(bytes) => { buf.put_u8(HASH_BUILDER_TYPE_BYTES); 1 + bytes.to_compact(buf) } } } - // # Panics - // - // A panic will be triggered if a HashBuilderValue variant greater than 1 is passed from the - // database. fn from_compact(mut buf: &[u8], _: usize) -> (Self, &[u8]) { - match buf.get_u8() { + let mut this = Self::default(); + let buf = match buf.get_u8() { HASH_BUILDER_TYPE_HASH => { let (hash, buf) = B256::from_compact(buf, 32); - (Self::Hash(hash), buf) + this.set_from_ref(HashBuilderValueRef::Hash(&hash)); + buf } HASH_BUILDER_TYPE_BYTES => { let (bytes, buf) = Vec::from_compact(buf, 0); - (Self::Bytes(bytes), buf) + this.set_bytes_owned(bytes); + buf } _ => unreachable!("Junk data in database: unknown HashBuilderValue variant"), - } + }; + (this, buf) } } diff --git a/crates/trie/common/src/hash_builder/state.rs b/crates/trie/common/src/hash_builder/state.rs index 467931672..c5cae21a1 100644 --- a/crates/trie/common/src/hash_builder/state.rs +++ b/crates/trie/common/src/hash_builder/state.rs @@ -1,5 +1,5 @@ use crate::TrieMask; -use alloy_trie::{hash_builder::HashBuilderValue, HashBuilder}; +use alloy_trie::{hash_builder::HashBuilderValue, nodes::RlpNode, HashBuilder}; use bytes::Buf; use nybbles::Nibbles; use reth_codecs::Compact; @@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize}; /// The hash builder state for storing in the database. /// Check the `reth-trie` crate for more info on hash builder. -#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)] #[cfg_attr( feature = "arbitrary", derive(arbitrary::Arbitrary), @@ -16,10 +16,10 @@ use serde::{Deserialize, Serialize}; pub struct HashBuilderState { /// The current key. pub key: Vec, - /// The builder stack. - pub stack: Vec>, /// The current node value. pub value: HashBuilderValue, + /// The builder stack. + pub stack: Vec, /// Group masks. pub groups: Vec, @@ -112,7 +112,7 @@ impl Compact for HashBuilderState { let mut stack = Vec::with_capacity(stack_len); for _ in 0..stack_len { let item_len = buf.get_u16() as usize; - stack.push(Vec::from(&buf[..item_len])); + stack.push(RlpNode::from_raw(&buf[..item_len]).unwrap()); buf.advance(item_len); } @@ -154,7 +154,7 @@ mod tests { #[test] fn hash_builder_state_regression() { let mut state = HashBuilderState::default(); - state.stack.push(vec![]); + state.stack.push(Default::default()); let mut buf = vec![]; let len = state.clone().to_compact(&mut buf); let (decoded, _) = HashBuilderState::from_compact(&buf, len); diff --git a/crates/trie/trie/src/witness.rs b/crates/trie/trie/src/witness.rs index 582319c7f..971f10cfb 100644 --- a/crates/trie/trie/src/witness.rs +++ b/crates/trie/trie/src/witness.rs @@ -1,5 +1,3 @@ -use std::collections::BTreeMap; - use crate::{ hashed_cursor::{HashedCursor, HashedCursorFactory}, prefix_set::TriePrefixSetsMut, @@ -19,6 +17,7 @@ use reth_primitives::constants::EMPTY_ROOT_HASH; use reth_trie_common::{ BranchNode, HashBuilder, Nibbles, StorageMultiProof, TrieAccount, TrieNode, CHILD_INDEX_RANGE, }; +use std::collections::BTreeMap; /// State transition witness for the trie. #[derive(Debug)] @@ -229,7 +228,10 @@ where TrieNode::Leaf(leaf) => { next_path.extend_from_slice(&leaf.key); if next_path != key { - trie_nodes.insert(next_path.clone(), Either::Right(leaf.value.clone())); + trie_nodes.insert( + next_path.clone(), + Either::Right(leaf.value.as_slice().to_vec()), + ); } } TrieNode::EmptyRoot => { From 161605313a567a081ea2eaf23ab1f6eeba29d19f Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Tue, 15 Oct 2024 13:12:41 +0200 Subject: [PATCH 056/425] feat: sketch composable executor (#11447) --- crates/evm/src/execute.rs | 335 +++++++++++++++++++++++++++++++++++++- 1 file changed, 331 insertions(+), 4 deletions(-) diff --git a/crates/evm/src/execute.rs b/crates/evm/src/execute.rs index 476c695e7..eb77d054b 100644 --- a/crates/evm/src/execute.rs +++ b/crates/evm/src/execute.rs @@ -7,11 +7,12 @@ pub use reth_execution_errors::{ pub use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput, ExecutionOutcome}; pub use reth_storage_errors::provider::ProviderError; +use alloc::{boxed::Box, vec::Vec}; use alloy_primitives::BlockNumber; -use core::fmt::Display; -use reth_primitives::{BlockWithSenders, Receipt}; +use core::{fmt::Display, marker::PhantomData}; +use reth_primitives::{BlockWithSenders, Receipt, Request}; use reth_prune_types::PruneModes; -use revm::State; +use revm::{db::BundleState, State}; use revm_primitives::db::Database; use crate::system_calls::OnStateHook; @@ -163,12 +164,233 @@ pub trait BlockExecutorProvider: Send + Sync + Clone + Unpin + 'static { DB: Database + Display>; } +/// Defines the strategy for executing a single block. +pub trait BlockExecutionStrategy { + /// The error type returned by this strategy's methods. + type Error: From + core::error::Error; + + /// Applies any necessary changes before executing the block's transactions. + fn apply_pre_execution_changes(&mut self) -> Result<(), Self::Error>; + + /// Executes all transactions in the block. + fn execute_transactions( + &mut self, + block: &BlockWithSenders, + ) -> Result<(Vec, u64), Self::Error>; + + /// Applies any necessary changes after executing the block's transactions. + fn apply_post_execution_changes(&mut self) -> Result, Self::Error>; + + /// Returns a reference to the current state. + fn state_ref(&self) -> &State; + + /// Sets a hook to be called after each state change during execution. + fn with_state_hook(&mut self, hook: Option>); + + /// Returns the final bundle state. + fn finish(&self) -> BundleState; +} + +/// A strategy factory that can create block execution strategies. +pub trait BlockExecutionStrategyFactory: Send + Sync + Clone + Unpin + 'static { + /// Associated strategy type. + type Strategy + Display>>: BlockExecutionStrategy< + DB, + Error = BlockExecutionError, + >; + + /// Creates a strategy using the give database. + fn create_strategy(&self, db: DB) -> Self::Strategy + where + DB: Database + Display>; +} + +impl Clone for GenericBlockExecutorProvider +where + F: Clone, +{ + fn clone(&self) -> Self { + Self { strategy_factory: self.strategy_factory.clone() } + } +} + +/// A generic block executor provider that can create executors using a strategy factory. +#[allow(missing_debug_implementations)] +pub struct GenericBlockExecutorProvider { + strategy_factory: F, +} + +impl GenericBlockExecutorProvider { + /// Creates a new `GenericBlockExecutorProvider` with the given strategy factory. + pub const fn new(strategy_factory: F) -> Self { + Self { strategy_factory } + } +} + +impl BlockExecutorProvider for GenericBlockExecutorProvider +where + F: BlockExecutionStrategyFactory, +{ + type Executor + Display>> = + GenericBlockExecutor, DB>; + + type BatchExecutor + Display>> = + GenericBatchExecutor, DB>; + + fn executor(&self, db: DB) -> Self::Executor + where + DB: Database + Display>, + { + let strategy = self.strategy_factory.create_strategy(db); + GenericBlockExecutor::new(strategy) + } + + fn batch_executor(&self, db: DB) -> Self::BatchExecutor + where + DB: Database + Display>, + { + let strategy = self.strategy_factory.create_strategy(db); + GenericBatchExecutor::new(strategy) + } +} + +/// A generic block executor that uses a [`BlockExecutionStrategy`] to +/// execute blocks. +#[allow(missing_debug_implementations, dead_code)] +pub struct GenericBlockExecutor +where + S: BlockExecutionStrategy, +{ + strategy: S, + _phantom: PhantomData, +} + +impl GenericBlockExecutor +where + S: BlockExecutionStrategy, +{ + /// Creates a new `GenericBlockExecutor` with the given strategy. + pub const fn new(strategy: S) -> Self { + Self { strategy, _phantom: PhantomData } + } +} + +impl Executor for GenericBlockExecutor +where + S: BlockExecutionStrategy, + DB: Database + Display>, +{ + type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; + type Output = BlockExecutionOutput; + type Error = S::Error; + + fn execute(mut self, input: Self::Input<'_>) -> Result { + let BlockExecutionInput { block, total_difficulty: _ } = input; + + self.strategy.apply_pre_execution_changes()?; + let (receipts, gas_used) = self.strategy.execute_transactions(block)?; + let requests = self.strategy.apply_post_execution_changes()?; + let state = self.strategy.finish(); + + Ok(BlockExecutionOutput { state, receipts, requests, gas_used }) + } + + fn execute_with_state_closure( + mut self, + input: Self::Input<'_>, + mut state: F, + ) -> Result + where + F: FnMut(&State), + { + let BlockExecutionInput { block, total_difficulty: _ } = input; + + self.strategy.apply_pre_execution_changes()?; + let (receipts, gas_used) = self.strategy.execute_transactions(block)?; + let requests = self.strategy.apply_post_execution_changes()?; + + state(self.strategy.state_ref()); + + let state = self.strategy.finish(); + + Ok(BlockExecutionOutput { state, receipts, requests, gas_used }) + } + + fn execute_with_state_hook( + mut self, + input: Self::Input<'_>, + state_hook: H, + ) -> Result + where + H: OnStateHook + 'static, + { + let BlockExecutionInput { block, total_difficulty: _ } = input; + + self.strategy.with_state_hook(Some(Box::new(state_hook))); + + self.strategy.apply_pre_execution_changes()?; + let (receipts, gas_used) = self.strategy.execute_transactions(block)?; + let requests = self.strategy.apply_post_execution_changes()?; + + let state = self.strategy.finish(); + + Ok(BlockExecutionOutput { state, receipts, requests, gas_used }) + } +} + +/// A generic batch executor that uses a [`BlockExecutionStrategy`] to +/// execute batches. +#[allow(missing_debug_implementations)] +pub struct GenericBatchExecutor { + _strategy: S, + _phantom: PhantomData, +} + +impl GenericBatchExecutor { + /// Creates a new `GenericBatchExecutor` with the given strategy. + pub const fn new(_strategy: S) -> Self { + Self { _strategy, _phantom: PhantomData } + } +} + +impl BatchExecutor for GenericBatchExecutor +where + S: BlockExecutionStrategy, + DB: Database + Display>, +{ + type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; + type Output = ExecutionOutcome; + type Error = BlockExecutionError; + + fn execute_and_verify_one(&mut self, _input: Self::Input<'_>) -> Result<(), Self::Error> { + todo!() + } + + fn finalize(self) -> Self::Output { + todo!() + } + + fn set_tip(&mut self, _tip: BlockNumber) { + todo!() + } + + fn set_prune_modes(&mut self, _prune_modes: PruneModes) { + todo!() + } + + fn size_hint(&self) -> Option { + None + } +} + #[cfg(test)] mod tests { use super::*; + use alloy_eips::eip6110::DepositRequest; use alloy_primitives::U256; + use reth_chainspec::{ChainSpec, MAINNET}; use revm::db::{CacheDB, EmptyDBTyped}; - use std::marker::PhantomData; + use std::sync::Arc; #[derive(Clone, Default)] struct TestExecutorProvider; @@ -252,6 +474,83 @@ mod tests { } } + struct TestExecutorStrategy { + // chain spec and evm config here only to illustrate how the strategy + // factory can use them in a real use case. + _chain_spec: Arc, + _evm_config: EvmConfig, + state: State, + execute_transactions_result: (Vec, u64), + apply_post_execution_changes_result: Vec, + finish_result: BundleState, + } + + #[derive(Clone)] + struct TestExecutorStrategyFactory { + execute_transactions_result: (Vec, u64), + apply_post_execution_changes_result: Vec, + finish_result: BundleState, + } + + impl BlockExecutionStrategyFactory for TestExecutorStrategyFactory { + type Strategy + Display>> = + TestExecutorStrategy; + + fn create_strategy(&self, db: DB) -> Self::Strategy + where + DB: Database + Display>, + { + let state = State::builder() + .with_database(db) + .with_bundle_update() + .without_state_clear() + .build(); + + TestExecutorStrategy { + _chain_spec: MAINNET.clone(), + _evm_config: TestEvmConfig {}, + execute_transactions_result: self.execute_transactions_result.clone(), + apply_post_execution_changes_result: self + .apply_post_execution_changes_result + .clone(), + finish_result: self.finish_result.clone(), + state, + } + } + } + + impl BlockExecutionStrategy for TestExecutorStrategy { + type Error = BlockExecutionError; + + fn apply_pre_execution_changes(&mut self) -> Result<(), Self::Error> { + Ok(()) + } + + fn execute_transactions( + &mut self, + _block: &BlockWithSenders, + ) -> Result<(Vec, u64), Self::Error> { + Ok(self.execute_transactions_result.clone()) + } + + fn apply_post_execution_changes(&mut self) -> Result, Self::Error> { + Ok(self.apply_post_execution_changes_result.clone()) + } + + fn state_ref(&self) -> &State { + &self.state + } + + fn with_state_hook(&mut self, _hook: Option>) {} + + fn finish(&self) -> BundleState { + self.finish_result.clone() + } + } + + #[derive(Clone)] + struct TestEvmConfig {} + #[test] fn test_provider() { let provider = TestExecutorProvider; @@ -259,4 +558,32 @@ mod tests { let executor = provider.executor(db); let _ = executor.execute(BlockExecutionInput::new(&Default::default(), U256::ZERO)); } + + #[test] + fn test_strategy() { + let expected_gas_used = 10; + let expected_receipts = vec![Receipt::default()]; + let expected_execute_transactions_result = (expected_receipts.clone(), expected_gas_used); + let expected_apply_post_execution_changes_result = + vec![Request::DepositRequest(DepositRequest::default())]; + let expected_finish_result = BundleState::default(); + + let strategy_factory = TestExecutorStrategyFactory { + execute_transactions_result: expected_execute_transactions_result, + apply_post_execution_changes_result: expected_apply_post_execution_changes_result + .clone(), + finish_result: expected_finish_result.clone(), + }; + let provider = GenericBlockExecutorProvider::new(strategy_factory); + let db = CacheDB::>::default(); + let executor = provider.executor(db); + let result = executor.execute(BlockExecutionInput::new(&Default::default(), U256::ZERO)); + + assert!(result.is_ok()); + let block_execution_output = result.unwrap(); + assert_eq!(block_execution_output.gas_used, expected_gas_used); + assert_eq!(block_execution_output.receipts, expected_receipts); + assert_eq!(block_execution_output.requests, expected_apply_post_execution_changes_result); + assert_eq!(block_execution_output.state, expected_finish_result); + } } From c4d7b591834055bdf3afed96b696e0a2cef0f383 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Tue, 15 Oct 2024 22:04:20 +0900 Subject: [PATCH 057/425] perf(rpc): add optional block argument to `trace_block_until_with_inspector` (#11631) --- crates/rpc/rpc-eth-api/src/helpers/trace.rs | 22 +++++++++++++++++---- crates/rpc/rpc/src/otterscan.rs | 1 + crates/rpc/rpc/src/trace.rs | 17 ++++++++++++---- 3 files changed, 32 insertions(+), 8 deletions(-) diff --git a/crates/rpc/rpc-eth-api/src/helpers/trace.rs b/crates/rpc/rpc-eth-api/src/helpers/trace.rs index 457cbb481..981de8fa6 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/trace.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/trace.rs @@ -1,12 +1,14 @@ //! Loads a pending block from database. Helper trait for `eth_` call and trace RPC methods. +use std::sync::Arc; + use crate::FromEvmError; use alloy_primitives::B256; use alloy_rpc_types::{BlockId, TransactionInfo}; use futures::Future; use reth_chainspec::ChainSpecProvider; use reth_evm::{system_calls::SystemCaller, ConfigureEvm, ConfigureEvmEnv}; -use reth_primitives::Header; +use reth_primitives::{Header, SealedBlockWithSenders}; use reth_revm::database::StateProviderDatabase; use reth_rpc_eth_types::{ cache::db::{StateCacheDb, StateCacheDbRefMutWrapper, StateProviderTraitObjWrapper}, @@ -247,6 +249,7 @@ pub trait Trace: LoadState { fn trace_block_until( &self, block_id: BlockId, + block: Option>, highest_index: Option, config: TracingInspectorConfig, f: F, @@ -266,6 +269,7 @@ pub trait Trace: LoadState { { self.trace_block_until_with_inspector( block_id, + block, highest_index, move || TracingInspector::new(config), f, @@ -285,6 +289,7 @@ pub trait Trace: LoadState { fn trace_block_until_with_inspector( &self, block_id: BlockId, + block: Option>, highest_index: Option, mut inspector_setup: Setup, f: F, @@ -305,8 +310,15 @@ pub trait Trace: LoadState { R: Send + 'static, { async move { + let block = async { + if block.is_some() { + return Ok(block) + } + self.block_with_senders(block_id).await + }; + let ((cfg, block_env, _), block) = - futures::try_join!(self.evm_env_at(block_id), self.block_with_senders(block_id))?; + futures::try_join!(self.evm_env_at(block_id), block)?; let Some(block) = block else { return Ok(None) }; @@ -409,6 +421,7 @@ pub trait Trace: LoadState { fn trace_block_with( &self, block_id: BlockId, + block: Option>, config: TracingInspectorConfig, f: F, ) -> impl Future>, Self::Error>> + Send @@ -427,7 +440,7 @@ pub trait Trace: LoadState { + 'static, R: Send + 'static, { - self.trace_block_until(block_id, None, config, f) + self.trace_block_until(block_id, block, None, config, f) } /// Executes all transactions of a block and returns a list of callback results invoked for each @@ -447,6 +460,7 @@ pub trait Trace: LoadState { fn trace_block_inspector( &self, block_id: BlockId, + block: Option>, insp_setup: Setup, f: F, ) -> impl Future>, Self::Error>> + Send @@ -467,6 +481,6 @@ pub trait Trace: LoadState { Insp: for<'a, 'b> Inspector> + Send + 'static, R: Send + 'static, { - self.trace_block_until_with_inspector(block_id, None, insp_setup, f) + self.trace_block_until_with_inspector(block_id, block, None, insp_setup, f) } } diff --git a/crates/rpc/rpc/src/otterscan.rs b/crates/rpc/rpc/src/otterscan.rs index 31db343a1..45722978f 100644 --- a/crates/rpc/rpc/src/otterscan.rs +++ b/crates/rpc/rpc/src/otterscan.rs @@ -334,6 +334,7 @@ where .eth .trace_block_with( num.into(), + None, TracingInspectorConfig::default_parity(), |tx_info, inspector, _, _, _| { Ok(inspector.into_parity_builder().into_localized_transaction_traces(tx_info)) diff --git a/crates/rpc/rpc/src/trace.rs b/crates/rpc/rpc/src/trace.rs index 0cd94ef15..8ac532ff3 100644 --- a/crates/rpc/rpc/src/trace.rs +++ b/crates/rpc/rpc/src/trace.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use alloy_primitives::{map::HashSet, Bytes, B256, U256}; use alloy_rpc_types::{ state::{EvmOverrides, StateOverride}, @@ -37,6 +35,7 @@ use revm_inspectors::{ opcode::OpcodeGasInspector, tracing::{parity::populate_state_diff, TracingInspector, TracingInspectorConfig}, }; +use std::sync::Arc; use tokio::sync::{AcquireError, OwnedSemaphorePermit}; /// `trace` API implementation. @@ -278,14 +277,21 @@ where } // fetch all blocks in that range - let blocks = self.provider().block_range(start..=end).map_err(Eth::Error::from_eth_err)?; + let blocks = self + .provider() + .sealed_block_with_senders_range(start..=end) + .map_err(Eth::Error::from_eth_err)? + .into_iter() + .map(Arc::new) + .collect::>(); // trace all blocks let mut block_traces = Vec::with_capacity(blocks.len()); for block in &blocks { let matcher = matcher.clone(); let traces = self.eth_api().trace_block_until( - block.number.into(), + block.hash().into(), + Some(block.clone()), None, TracingInspectorConfig::default_parity(), move |tx_info, inspector, _, _, _| { @@ -369,6 +375,7 @@ where ) -> Result>, Eth::Error> { let traces = self.eth_api().trace_block_with( block_id, + None, TracingInspectorConfig::default_parity(), |tx_info, inspector, _, _, _| { let traces = @@ -405,6 +412,7 @@ where self.eth_api() .trace_block_with( block_id, + None, TracingInspectorConfig::from_parity_config(&trace_types), move |tx_info, inspector, res, state, db| { let mut full_trace = @@ -460,6 +468,7 @@ where .eth_api() .trace_block_inspector( block_id, + None, OpcodeGasInspector::default, move |tx_info, inspector, _res, _, _| { let trace = TransactionOpcodeGas { From a235f7214c588ab79af7996a07524cfbf5d2628b Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Tue, 15 Oct 2024 15:53:43 +0200 Subject: [PATCH 058/425] feat(trie): sparse trie (#11741) --- Cargo.lock | 15 + crates/trie/sparse/Cargo.toml | 32 ++ crates/trie/sparse/benches/root.rs | 191 +++++++++ crates/trie/sparse/src/errors.rs | 49 +++ crates/trie/sparse/src/lib.rs | 9 + crates/trie/sparse/src/state.rs | 131 ++++++ crates/trie/sparse/src/trie.rs | 627 +++++++++++++++++++++++++++++ crates/trie/trie/src/prefix_set.rs | 2 +- 8 files changed, 1055 insertions(+), 1 deletion(-) create mode 100644 crates/trie/sparse/benches/root.rs create mode 100644 crates/trie/sparse/src/errors.rs create mode 100644 crates/trie/sparse/src/state.rs create mode 100644 crates/trie/sparse/src/trie.rs diff --git a/Cargo.lock b/Cargo.lock index 828444080..7ee37e080 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9221,6 +9221,21 @@ dependencies = [ [[package]] name = "reth-trie-sparse" version = "1.1.0" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "assert_matches", + "criterion", + "itertools 0.13.0", + "proptest", + "rayon", + "reth-primitives", + "reth-trie", + "reth-trie-common", + "smallvec", + "thiserror", + "tracing", +] [[package]] name = "revm" diff --git a/crates/trie/sparse/Cargo.toml b/crates/trie/sparse/Cargo.toml index 4ebb56145..c31bbe2df 100644 --- a/crates/trie/sparse/Cargo.toml +++ b/crates/trie/sparse/Cargo.toml @@ -10,3 +10,35 @@ description = "Sparse MPT implementation" [lints] workspace = true + + +[dependencies] +# reth +reth-primitives.workspace = true +reth-trie-common.workspace = true +reth-trie.workspace = true + +# alloy +alloy-primitives.workspace = true +alloy-rlp.workspace = true + +# tracing +tracing.workspace = true + +# misc +thiserror.workspace = true +rayon.workspace = true +smallvec = { workspace = true, features = ["const_new"] } + +[dev-dependencies] +reth-primitives = { workspace = true, features = ["test-utils", "arbitrary"] } +reth-trie-common = { workspace = true, features = ["test-utils", "arbitrary"] } +reth-trie = { workspace = true, features = ["test-utils"] } +assert_matches.workspace = true +itertools.workspace = true +proptest.workspace = true +criterion.workspace = true + +[[bench]] +name = "root" +harness = false diff --git a/crates/trie/sparse/benches/root.rs b/crates/trie/sparse/benches/root.rs new file mode 100644 index 000000000..4078eb7af --- /dev/null +++ b/crates/trie/sparse/benches/root.rs @@ -0,0 +1,191 @@ +#![allow(missing_docs, unreachable_pub)] +use alloy_primitives::{map::HashMap, B256, U256}; +use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; +use itertools::Itertools; +use proptest::{prelude::*, strategy::ValueTree, test_runner::TestRunner}; +use reth_trie::{ + hashed_cursor::{noop::NoopHashedStorageCursor, HashedPostStateStorageCursor}, + node_iter::{TrieElement, TrieNodeIter}, + trie_cursor::{noop::NoopStorageTrieCursor, InMemoryStorageTrieCursor}, + updates::StorageTrieUpdates, + walker::TrieWalker, + HashedStorage, +}; +use reth_trie_common::{HashBuilder, Nibbles}; +use reth_trie_sparse::SparseTrie; + +pub fn calculate_root_from_leaves(c: &mut Criterion) { + let mut group = c.benchmark_group("calculate root from leaves"); + group.sample_size(20); + + for size in [1_000, 5_000, 10_000, 100_000] { + let state = generate_test_data(size); + + // hash builder + group.bench_function(BenchmarkId::new("hash builder", size), |b| { + b.iter_with_setup(HashBuilder::default, |mut hb| { + for (key, value) in state.iter().sorted_by_key(|(key, _)| *key) { + hb.add_leaf(Nibbles::unpack(key), &alloy_rlp::encode_fixed_size(value)); + } + hb.root(); + }) + }); + + // sparse trie + group.bench_function(BenchmarkId::new("sparse trie", size), |b| { + b.iter_with_setup(SparseTrie::revealed_empty, |mut sparse| { + for (key, value) in &state { + sparse + .update_leaf( + Nibbles::unpack(key), + alloy_rlp::encode_fixed_size(value).to_vec(), + ) + .unwrap(); + } + sparse.root().unwrap(); + }) + }); + } +} + +pub fn calculate_root_from_leaves_repeated(c: &mut Criterion) { + let mut group = c.benchmark_group("calculate root from leaves repeated"); + group.sample_size(20); + + for init_size in [1_000, 10_000, 100_000] { + let init_state = generate_test_data(init_size); + + for update_size in [100, 1_000, 5_000, 10_000] { + for num_updates in [1, 3, 5, 10] { + let updates = + (0..num_updates).map(|_| generate_test_data(update_size)).collect::>(); + + // hash builder + let benchmark_id = BenchmarkId::new( + "hash builder", + format!("init size {init_size} | update size {update_size} | num updates {num_updates}"), + ); + group.bench_function(benchmark_id, |b| { + b.iter_with_setup( + || { + let init_storage = HashedStorage::from_iter(false, init_state.clone()); + let storage_updates = updates + .clone() + .into_iter() + .map(|update| HashedStorage::from_iter(false, update)) + .collect::>(); + + let mut hb = HashBuilder::default().with_updates(true); + for (key, value) in init_state.iter().sorted_by_key(|(key, _)| *key) { + hb.add_leaf( + Nibbles::unpack(key), + &alloy_rlp::encode_fixed_size(value), + ); + } + hb.root(); + + let (_, updates) = hb.split(); + let trie_updates = StorageTrieUpdates::new(updates); + (init_storage, storage_updates, trie_updates) + }, + |(init_storage, storage_updates, mut trie_updates)| { + let mut storage = init_storage; + for update in storage_updates { + storage.extend(&update); + + let prefix_set = update.construct_prefix_set().freeze(); + let storage_sorted = storage.clone().into_sorted(); + let trie_updates_sorted = trie_updates.clone().into_sorted(); + + let walker = TrieWalker::new( + InMemoryStorageTrieCursor::new( + B256::ZERO, + NoopStorageTrieCursor::default(), + Some(&trie_updates_sorted), + ), + prefix_set, + ); + let mut node_iter = TrieNodeIter::new( + walker, + HashedPostStateStorageCursor::new( + NoopHashedStorageCursor::default(), + Some(&storage_sorted), + ), + ); + + let mut hb = HashBuilder::default().with_updates(true); + while let Some(node) = node_iter.try_next().unwrap() { + match node { + TrieElement::Branch(node) => { + hb.add_branch( + node.key, + node.value, + node.children_are_in_trie, + ); + } + TrieElement::Leaf(hashed_slot, value) => { + hb.add_leaf( + Nibbles::unpack(hashed_slot), + alloy_rlp::encode_fixed_size(&value).as_ref(), + ); + } + } + } + hb.root(); + + trie_updates.finalize(node_iter.walker, hb); + } + }, + ) + }); + + // sparse trie + let benchmark_id = BenchmarkId::new( + "sparse trie", + format!("init size {init_size} | update size {update_size} | num updates {num_updates}"), + ); + group.bench_function(benchmark_id, |b| { + b.iter_with_setup( + || { + let mut sparse = SparseTrie::revealed_empty(); + for (key, value) in &init_state { + sparse + .update_leaf( + Nibbles::unpack(key), + alloy_rlp::encode_fixed_size(value).to_vec(), + ) + .unwrap(); + } + sparse.root().unwrap(); + sparse + }, + |mut sparse| { + for update in &updates { + for (key, value) in update { + sparse + .update_leaf( + Nibbles::unpack(key), + alloy_rlp::encode_fixed_size(value).to_vec(), + ) + .unwrap(); + } + sparse.root().unwrap(); + } + }, + ) + }); + } + } + } +} + +fn generate_test_data(size: usize) -> HashMap { + let mut runner = TestRunner::new(ProptestConfig::default()); + proptest::collection::hash_map(any::(), any::(), size) + .new_tree(&mut runner) + .unwrap() + .current() +} + +criterion_group!(root, calculate_root_from_leaves, calculate_root_from_leaves_repeated); +criterion_main!(root); diff --git a/crates/trie/sparse/src/errors.rs b/crates/trie/sparse/src/errors.rs new file mode 100644 index 000000000..f60d1736c --- /dev/null +++ b/crates/trie/sparse/src/errors.rs @@ -0,0 +1,49 @@ +//! Errors for sparse trie. + +use alloy_primitives::{Bytes, B256}; +use reth_trie::Nibbles; +use thiserror::Error; + +/// Result type with [`SparseStateTrieError`] as error. +pub type SparseStateTrieResult = Result; + +/// Error encountered in [`crate::SparseStateTrie`]. +#[derive(Error, Debug)] +pub enum SparseStateTrieError { + /// Encountered invalid root node. + #[error("invalid root node at {path:?}: {node:?}")] + InvalidRootNode { + /// Path to first proof node. + path: Nibbles, + /// Encoded first proof node. + node: Bytes, + }, + /// Sparse trie error. + #[error(transparent)] + Sparse(#[from] SparseTrieError), + /// RLP error. + #[error(transparent)] + Rlp(#[from] alloy_rlp::Error), +} + +/// Result type with [`SparseTrieError`] as error. +pub type SparseTrieResult = Result; + +/// Error encountered in [`crate::SparseTrie`]. +#[derive(Error, Debug)] +pub enum SparseTrieError { + /// Sparse trie is still blind. Thrown on attempt to update it. + #[error("sparse trie is blind")] + Blind, + /// Encountered blinded node on update. + #[error("attempted to update blind node at {path:?}: {hash}")] + BlindedNode { + /// Blind node path. + path: Nibbles, + /// Node hash + hash: B256, + }, + /// RLP error. + #[error(transparent)] + Rlp(#[from] alloy_rlp::Error), +} diff --git a/crates/trie/sparse/src/lib.rs b/crates/trie/sparse/src/lib.rs index 5d3d4a5b6..b3cb2c5fd 100644 --- a/crates/trie/sparse/src/lib.rs +++ b/crates/trie/sparse/src/lib.rs @@ -1 +1,10 @@ //! The implementation of sparse MPT. + +mod state; +pub use state::*; + +mod trie; +pub use trie::*; + +mod errors; +pub use errors::*; diff --git a/crates/trie/sparse/src/state.rs b/crates/trie/sparse/src/state.rs new file mode 100644 index 000000000..cfb17ef36 --- /dev/null +++ b/crates/trie/sparse/src/state.rs @@ -0,0 +1,131 @@ +use crate::{SparseStateTrieError, SparseStateTrieResult, SparseTrie}; +use alloy_primitives::{ + map::{HashMap, HashSet}, + Bytes, B256, +}; +use alloy_rlp::Decodable; +use reth_trie::{Nibbles, TrieNode}; + +/// Sparse state trie representing lazy-loaded Ethereum state trie. +#[derive(Default, Debug)] +pub struct SparseStateTrie { + /// Sparse account trie. + pub(crate) state: SparseTrie, + /// Sparse storage tries. + #[allow(dead_code)] + pub(crate) storages: HashMap, + /// Collection of revealed account and storage keys. + #[allow(dead_code)] + pub(crate) revealed: HashMap>, +} + +impl SparseStateTrie { + /// Create state trie from state trie. + pub fn from_state(state: SparseTrie) -> Self { + Self { state, ..Default::default() } + } + + /// Returns `true` if account was already revealed. + pub fn is_account_revealed(&self, account: &B256) -> bool { + self.revealed.contains_key(account) + } + + /// Returns `true` if storage slot for account was already revealed. + pub fn is_storage_slot_revealed(&self, account: &B256, slot: &B256) -> bool { + self.revealed.get(account).map_or(false, |slots| slots.contains(slot)) + } + + /// Reveal unknown trie paths from provided leaf path and its proof. + /// NOTE: This method does not extensively validate the proof. + pub fn reveal_account( + &mut self, + account: B256, + proof: impl IntoIterator, + ) -> SparseStateTrieResult<()> { + let mut proof = proof.into_iter().peekable(); + + // reveal root and initialize the trie if not already + let Some((path, node)) = proof.next() else { return Ok(()) }; + if !path.is_empty() { + return Err(SparseStateTrieError::InvalidRootNode { path, node }) + } + + // Decode root node and perform sanity check. + let root_node = TrieNode::decode(&mut &node[..])?; + if matches!(root_node, TrieNode::EmptyRoot) && proof.peek().is_some() { + return Err(SparseStateTrieError::InvalidRootNode { path, node }) + } + + // Reveal root node if it wasn't already. + let trie = self.state.reveal_root(root_node)?; + + // add the remaining proof nodes + for (path, bytes) in proof { + let node = TrieNode::decode(&mut &bytes[..])?; + trie.reveal_node(path, node)?; + } + + // Mark leaf path as revealed. + self.revealed.entry(account).or_default(); + + Ok(()) + } + + /// Update the leaf node. + pub fn update_leaf(&mut self, path: Nibbles, value: Vec) -> SparseStateTrieResult<()> { + self.state.update_leaf(path, value)?; + Ok(()) + } + + /// Returns sparse trie root if the trie has been revealed. + pub fn root(&mut self) -> Option { + self.state.root() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::Bytes; + use alloy_rlp::EMPTY_STRING_CODE; + use assert_matches::assert_matches; + use reth_trie::HashBuilder; + use reth_trie_common::proof::ProofRetainer; + + #[test] + fn sparse_trie_reveal_empty() { + let retainer = ProofRetainer::from_iter([Nibbles::default()]); + let mut hash_builder = HashBuilder::default().with_proof_retainer(retainer); + hash_builder.root(); + let proofs = hash_builder.take_proof_nodes(); + assert_eq!(proofs.len(), 1); + + let mut sparse = SparseStateTrie::default(); + assert_eq!(sparse.state, SparseTrie::Blind); + sparse.reveal_account(Default::default(), proofs.into_inner()).unwrap(); + assert_eq!(sparse.state, SparseTrie::revealed_empty()); + } + + #[test] + fn reveal_first_node_not_root() { + let mut sparse = SparseStateTrie::default(); + let proof = [(Nibbles::from_nibbles([0x1]), Bytes::from([EMPTY_STRING_CODE]))]; + assert_matches!( + sparse.reveal_account(Default::default(), proof), + Err(SparseStateTrieError::InvalidRootNode { .. }) + ); + } + + #[test] + fn reveal_invalid_proof_with_empty_root() { + let mut sparse = SparseStateTrie::default(); + let proof = [ + (Nibbles::default(), Bytes::from([EMPTY_STRING_CODE])), + (Nibbles::from_nibbles([0x1]), Bytes::new()), + ]; + assert_matches!( + sparse.reveal_account(Default::default(), proof), + Err(SparseStateTrieError::InvalidRootNode { .. }) + ); + } +} diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs new file mode 100644 index 000000000..1b83e07e4 --- /dev/null +++ b/crates/trie/sparse/src/trie.rs @@ -0,0 +1,627 @@ +use crate::{SparseTrieError, SparseTrieResult}; +use alloy_primitives::{hex, keccak256, map::HashMap, B256}; +use alloy_rlp::Decodable; +use reth_trie::{ + prefix_set::{PrefixSet, PrefixSetMut}, + RlpNode, +}; +use reth_trie_common::{ + BranchNodeRef, ExtensionNodeRef, LeafNodeRef, Nibbles, TrieMask, TrieNode, CHILD_INDEX_RANGE, + EMPTY_ROOT_HASH, +}; +use smallvec::SmallVec; +use std::{collections::HashSet, fmt}; + +/// Inner representation of the sparse trie. +/// Sparse trie is blind by default until nodes are revealed. +#[derive(PartialEq, Eq, Default, Debug)] +pub enum SparseTrie { + /// None of the trie nodes are known. + #[default] + Blind, + /// The trie nodes have been revealed. + Revealed(RevealedSparseTrie), +} + +impl SparseTrie { + /// Creates new revealed empty trie. + pub fn revealed_empty() -> Self { + Self::Revealed(RevealedSparseTrie::default()) + } + + /// Returns `true` if the sparse trie has no revealed nodes. + pub const fn is_blind(&self) -> bool { + matches!(self, Self::Blind) + } + + /// Returns mutable reference to revealed sparse trie if the trie is not blind. + pub fn as_revealed_mut(&mut self) -> Option<&mut RevealedSparseTrie> { + if let Self::Revealed(revealed) = self { + Some(revealed) + } else { + None + } + } + + /// Reveals the root node if the trie is blinded. + /// + /// # Returns + /// + /// Mutable reference to [`RevealedSparseTrie`]. + pub fn reveal_root(&mut self, root: TrieNode) -> SparseTrieResult<&mut RevealedSparseTrie> { + if self.is_blind() { + *self = Self::Revealed(RevealedSparseTrie::from_root(root)?) + } + Ok(self.as_revealed_mut().unwrap()) + } + + /// Update the leaf node. + pub fn update_leaf(&mut self, path: Nibbles, value: Vec) -> SparseTrieResult<()> { + let revealed = self.as_revealed_mut().ok_or(SparseTrieError::Blind)?; + revealed.update_leaf(path, value)?; + Ok(()) + } + + /// Calculates and returns the trie root if the trie has been revealed. + pub fn root(&mut self) -> Option { + Some(self.as_revealed_mut()?.root()) + } +} + +/// The representation of revealed sparse trie. +#[derive(PartialEq, Eq)] +pub struct RevealedSparseTrie { + /// All trie nodes. + nodes: HashMap, + /// All leaf values. + values: HashMap>, + /// Prefix set. + prefix_set: PrefixSetMut, + /// Reusable buffer for RLP encoding of nodes. + rlp_buf: Vec, +} + +impl fmt::Debug for RevealedSparseTrie { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RevealedSparseTrie") + .field("nodes", &self.nodes) + .field("values", &self.values) + .field("prefix_set", &self.prefix_set) + .field("rlp_buf", &hex::encode(&self.rlp_buf)) + .finish() + } +} + +impl Default for RevealedSparseTrie { + fn default() -> Self { + Self { + nodes: HashMap::from_iter([(Nibbles::default(), SparseNode::Empty)]), + values: HashMap::default(), + prefix_set: PrefixSetMut::default(), + rlp_buf: Vec::new(), + } + } +} + +impl RevealedSparseTrie { + /// Create new revealed sparse trie from the given root node. + pub fn from_root(node: TrieNode) -> SparseTrieResult { + let mut this = Self { + nodes: HashMap::default(), + values: HashMap::default(), + prefix_set: PrefixSetMut::default(), + rlp_buf: Vec::new(), + }; + this.reveal_node(Nibbles::default(), node)?; + Ok(this) + } + + /// Reveal the trie node only if it was not known already. + pub fn reveal_node(&mut self, path: Nibbles, node: TrieNode) -> SparseTrieResult<()> { + // TODO: revise all inserts to not overwrite existing entries + match node { + TrieNode::EmptyRoot => { + debug_assert!(path.is_empty()); + self.nodes.insert(path, SparseNode::Empty); + } + TrieNode::Branch(branch) => { + let mut stack_ptr = branch.as_ref().first_child_index(); + for idx in CHILD_INDEX_RANGE { + if branch.state_mask.is_bit_set(idx) { + let mut child_path = path.clone(); + child_path.push_unchecked(idx); + self.reveal_node_or_hash(child_path, &branch.stack[stack_ptr])?; + stack_ptr += 1; + } + } + self.nodes + .insert(path, SparseNode::Branch { state_mask: branch.state_mask, hash: None }); + } + TrieNode::Extension(ext) => { + let mut child_path = path.clone(); + child_path.extend_from_slice_unchecked(&ext.key); + self.reveal_node_or_hash(child_path, &ext.child)?; + self.nodes.insert(path, SparseNode::Extension { key: ext.key, hash: None }); + } + TrieNode::Leaf(leaf) => { + let mut full = path.clone(); + full.extend_from_slice_unchecked(&leaf.key); + self.values.insert(full, leaf.value); + self.nodes.insert(path, SparseNode::new_leaf(leaf.key)); + } + } + + Ok(()) + } + + fn reveal_node_or_hash(&mut self, path: Nibbles, child: &[u8]) -> SparseTrieResult<()> { + if child.len() == B256::len_bytes() + 1 { + // TODO: revise insert to not overwrite existing entries + self.nodes.insert(path, SparseNode::Hash(B256::from_slice(&child[1..]))); + return Ok(()) + } + + self.reveal_node(path, TrieNode::decode(&mut &child[..])?) + } + + /// Update the leaf node with provided value. + pub fn update_leaf(&mut self, path: Nibbles, value: Vec) -> SparseTrieResult<()> { + self.prefix_set.insert(path.clone()); + let existing = self.values.insert(path.clone(), value); + if existing.is_some() { + // trie structure unchanged, return immediately + return Ok(()) + } + + let mut current = Nibbles::default(); + while let Some(node) = self.nodes.get_mut(¤t) { + match node { + SparseNode::Empty => { + *node = SparseNode::new_leaf(path); + break + } + SparseNode::Hash(hash) => { + return Err(SparseTrieError::BlindedNode { path: current, hash: *hash }) + } + SparseNode::Leaf { key: current_key, .. } => { + current.extend_from_slice_unchecked(current_key); + + // this leaf is being updated + if current == path { + unreachable!("we already checked leaf presence in the beginning"); + } + + // find the common prefix + let common = current.common_prefix_length(&path); + + // update existing node + let new_ext_key = current.slice(current.len() - current_key.len()..common); + *node = SparseNode::new_ext(new_ext_key); + + // create a branch node and corresponding leaves + self.nodes.insert( + current.slice(..common), + SparseNode::new_split_branch(current[common], path[common]), + ); + self.nodes.insert( + path.slice(..=common), + SparseNode::new_leaf(path.slice(common + 1..)), + ); + self.nodes.insert( + current.slice(..=common), + SparseNode::new_leaf(current.slice(common + 1..)), + ); + + break; + } + SparseNode::Extension { key, .. } => { + current.extend_from_slice(key); + if !path.starts_with(¤t) { + // find the common prefix + let common = current.common_prefix_length(&path); + + *key = current.slice(current.len() - key.len()..common); + + // create state mask for new branch node + // NOTE: this might overwrite the current extension node + let branch = SparseNode::new_split_branch(current[common], path[common]); + self.nodes.insert(current.slice(..common), branch); + + // create new leaf + let new_leaf = SparseNode::new_leaf(path.slice(common + 1..)); + self.nodes.insert(path.slice(..=common), new_leaf); + + // recreate extension to previous child if needed + let key = current.slice(common + 1..); + if !key.is_empty() { + self.nodes.insert(current.slice(..=common), SparseNode::new_ext(key)); + } + + break; + } + } + SparseNode::Branch { state_mask, .. } => { + let nibble = path[current.len()]; + current.push_unchecked(nibble); + if !state_mask.is_bit_set(nibble) { + state_mask.set_bit(nibble); + let new_leaf = SparseNode::new_leaf(path.slice(current.len()..)); + self.nodes.insert(current, new_leaf); + break; + } + } + }; + } + + Ok(()) + } + + /// Remove leaf node from the trie. + pub fn remove_leaf(&mut self, _path: Nibbles) { + unimplemented!() + } + + /// Return the root of the sparse trie. + /// Updates all remaining dirty nodes before calculating the root. + pub fn root(&mut self) -> B256 { + // take the current prefix set. + let mut prefix_set = std::mem::take(&mut self.prefix_set).freeze(); + let root_rlp = self.rlp_node(Nibbles::default(), &mut prefix_set); + if root_rlp.len() == B256::len_bytes() + 1 { + B256::from_slice(&root_rlp[1..]) + } else { + keccak256(root_rlp) + } + } + + /// Update node hashes only if their path exceeds the provided level. + pub fn update_rlp_node_level(&mut self, min_len: usize) { + let mut paths = Vec::from([Nibbles::default()]); + let mut targets = HashSet::::default(); + + while let Some(mut path) = paths.pop() { + match self.nodes.get(&path).unwrap() { + SparseNode::Empty | SparseNode::Hash(_) => {} + SparseNode::Leaf { .. } => { + targets.insert(path); + } + SparseNode::Extension { key, .. } => { + if path.len() >= min_len { + targets.insert(path); + } else { + path.extend_from_slice_unchecked(key); + paths.push(path); + } + } + SparseNode::Branch { state_mask, .. } => { + if path.len() >= min_len { + targets.insert(path); + } else { + for bit in CHILD_INDEX_RANGE { + if state_mask.is_bit_set(bit) { + let mut child_path = path.clone(); + child_path.push_unchecked(bit); + paths.push(child_path); + } + } + } + } + } + } + + let mut prefix_set = self.prefix_set.clone().freeze(); + for target in targets { + self.rlp_node(target, &mut prefix_set); + } + } + + fn rlp_node(&mut self, path: Nibbles, prefix_set: &mut PrefixSet) -> RlpNode { + // stack of paths we need rlp nodes for + let mut path_stack = Vec::from([path]); + // stack of rlp nodes + let mut rlp_node_stack = Vec::<(Nibbles, RlpNode)>::new(); + // reusable branch child path + let mut branch_child_buf = SmallVec::<[Nibbles; 16]>::new_const(); + // reusable branch value stack + let mut branch_value_stack_buf = SmallVec::<[RlpNode; 16]>::new_const(); + + 'main: while let Some(path) = path_stack.pop() { + let rlp_node = match self.nodes.get_mut(&path).unwrap() { + SparseNode::Empty => RlpNode::word_rlp(&EMPTY_ROOT_HASH), + SparseNode::Hash(hash) => RlpNode::word_rlp(hash), + SparseNode::Leaf { key, hash } => { + self.rlp_buf.clear(); + let mut path = path.clone(); + path.extend_from_slice_unchecked(key); + if let Some(hash) = hash.filter(|_| !prefix_set.contains(&path)) { + RlpNode::word_rlp(&hash) + } else { + let value = self.values.get(&path).unwrap(); + let rlp_node = LeafNodeRef { key, value }.rlp(&mut self.rlp_buf); + if rlp_node.len() == B256::len_bytes() + 1 { + *hash = Some(B256::from_slice(&rlp_node[1..])); + } + rlp_node + } + } + SparseNode::Extension { key, hash } => { + let mut child_path = path.clone(); + child_path.extend_from_slice_unchecked(key); + if let Some(hash) = hash.filter(|_| !prefix_set.contains(&path)) { + RlpNode::word_rlp(&hash) + } else if rlp_node_stack.last().map_or(false, |e| e.0 == child_path) { + let (_, child) = rlp_node_stack.pop().unwrap(); + self.rlp_buf.clear(); + let rlp_node = ExtensionNodeRef::new(key, &child).rlp(&mut self.rlp_buf); + if rlp_node.len() == B256::len_bytes() + 1 { + *hash = Some(B256::from_slice(&rlp_node[1..])); + } + rlp_node + } else { + path_stack.extend([path, child_path]); // need to get rlp node for child first + continue + } + } + SparseNode::Branch { state_mask, hash } => { + if let Some(hash) = hash.filter(|_| !prefix_set.contains(&path)) { + rlp_node_stack.push((path, RlpNode::word_rlp(&hash))); + continue + } + + branch_child_buf.clear(); + for bit in CHILD_INDEX_RANGE { + if state_mask.is_bit_set(bit) { + let mut child = path.clone(); + child.push_unchecked(bit); + branch_child_buf.push(child); + } + } + + branch_value_stack_buf.clear(); + for child_path in &branch_child_buf { + if rlp_node_stack.last().map_or(false, |e| &e.0 == child_path) { + let (_, child) = rlp_node_stack.pop().unwrap(); + branch_value_stack_buf.push(child); + } else { + debug_assert!(branch_value_stack_buf.is_empty()); + path_stack.push(path); + path_stack.extend(branch_child_buf.drain(..)); + continue 'main + } + } + + self.rlp_buf.clear(); + let rlp_node = BranchNodeRef::new(&branch_value_stack_buf, *state_mask) + .rlp(&mut self.rlp_buf); + if rlp_node.len() == B256::len_bytes() + 1 { + *hash = Some(B256::from_slice(&rlp_node[1..])); + } + rlp_node + } + }; + rlp_node_stack.push((path, rlp_node)); + } + + rlp_node_stack.pop().unwrap().1 + } +} + +/// Enum representing trie nodes in sparse trie. +#[derive(PartialEq, Eq, Clone, Debug)] +pub enum SparseNode { + /// Empty trie node. + Empty, + /// The hash of the node that was not revealed. + Hash(B256), + /// Sparse leaf node with remaining key suffix. + Leaf { + /// Remaining key suffix for the leaf node. + key: Nibbles, + /// Pre-computed hash of the sparse node. + /// Can be reused unless this trie path has been updated. + hash: Option, + }, + /// Sparse extension node with key. + Extension { + /// The key slice stored by this extension node. + key: Nibbles, + /// Pre-computed hash of the sparse node. + /// Can be reused unless this trie path has been updated. + hash: Option, + }, + /// Sparse branch node with state mask. + Branch { + /// The bitmask representing children present in the branch node. + state_mask: TrieMask, + /// Pre-computed hash of the sparse node. + /// Can be reused unless this trie path has been updated. + hash: Option, + }, +} + +impl SparseNode { + /// Create new sparse node from [`TrieNode`]. + pub fn from_node(node: TrieNode) -> Self { + match node { + TrieNode::EmptyRoot => Self::Empty, + TrieNode::Leaf(leaf) => Self::new_leaf(leaf.key), + TrieNode::Extension(ext) => Self::new_ext(ext.key), + TrieNode::Branch(branch) => Self::new_branch(branch.state_mask), + } + } + + /// Create new [`SparseNode::Branch`] from state mask. + pub const fn new_branch(state_mask: TrieMask) -> Self { + Self::Branch { state_mask, hash: None } + } + + /// Create new [`SparseNode::Branch`] with two bits set. + pub const fn new_split_branch(bit_a: u8, bit_b: u8) -> Self { + let state_mask = TrieMask::new( + // set bits for both children + (1u16 << bit_a) | (1u16 << bit_b), + ); + Self::Branch { state_mask, hash: None } + } + + /// Create new [`SparseNode::Extension`] from the key slice. + pub const fn new_ext(key: Nibbles) -> Self { + Self::Extension { key, hash: None } + } + + /// Create new [`SparseNode::Leaf`] from leaf key and value. + pub const fn new_leaf(key: Nibbles) -> Self { + Self::Leaf { key, hash: None } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::U256; + use itertools::Itertools; + use proptest::prelude::*; + use reth_trie_common::HashBuilder; + + #[test] + fn sparse_trie_is_blind() { + assert!(SparseTrie::default().is_blind()); + assert!(!SparseTrie::revealed_empty().is_blind()); + } + + #[test] + fn sparse_trie_empty_update_one() { + let path = Nibbles::unpack(B256::with_last_byte(42)); + let value = alloy_rlp::encode_fixed_size(&U256::from(1)); + + let mut hash_builder = HashBuilder::default(); + hash_builder.add_leaf(path.clone(), &value); + let expected = hash_builder.root(); + + let mut sparse = RevealedSparseTrie::default(); + sparse.update_leaf(path, value.to_vec()).unwrap(); + let root = sparse.root(); + assert_eq!(root, expected); + } + + #[test] + fn sparse_trie_empty_update_multiple_lower_nibbles() { + let paths = (0..=16).map(|b| Nibbles::unpack(B256::with_last_byte(b))).collect::>(); + let value = alloy_rlp::encode_fixed_size(&U256::from(1)); + + let mut hash_builder = HashBuilder::default(); + for path in &paths { + hash_builder.add_leaf(path.clone(), &value); + } + let expected = hash_builder.root(); + + let mut sparse = RevealedSparseTrie::default(); + for path in &paths { + sparse.update_leaf(path.clone(), value.to_vec()).unwrap(); + } + let root = sparse.root(); + assert_eq!(root, expected); + } + + #[test] + fn sparse_trie_empty_update_multiple_upper_nibbles() { + let paths = (239..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::>(); + let value = alloy_rlp::encode_fixed_size(&U256::from(1)); + + let mut hash_builder = HashBuilder::default(); + for path in &paths { + hash_builder.add_leaf(path.clone(), &value); + } + let expected = hash_builder.root(); + + let mut sparse = RevealedSparseTrie::default(); + for path in &paths { + sparse.update_leaf(path.clone(), value.to_vec()).unwrap(); + } + let root = sparse.root(); + assert_eq!(root, expected); + } + + #[test] + fn sparse_trie_empty_update_multiple() { + let paths = (0..=255) + .map(|b| { + Nibbles::unpack(if b % 2 == 0 { + B256::repeat_byte(b) + } else { + B256::with_last_byte(b) + }) + }) + .collect::>(); + let value = alloy_rlp::encode_fixed_size(&U256::from(1)); + + let mut hash_builder = HashBuilder::default(); + for path in paths.iter().sorted_unstable_by_key(|key| *key) { + hash_builder.add_leaf(path.clone(), &value); + } + let expected = hash_builder.root(); + + let mut sparse = RevealedSparseTrie::default(); + for path in &paths { + sparse.update_leaf(path.clone(), value.to_vec()).unwrap(); + } + let root = sparse.root(); + assert_eq!(root, expected); + } + + #[test] + fn sparse_trie_empty_update_repeated() { + let paths = (0..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::>(); + let old_value = alloy_rlp::encode_fixed_size(&U256::from(1)); + let new_value = alloy_rlp::encode_fixed_size(&U256::from(2)); + + let mut hash_builder = HashBuilder::default(); + for path in paths.iter().sorted_unstable_by_key(|key| *key) { + hash_builder.add_leaf(path.clone(), &old_value); + } + let expected = hash_builder.root(); + + let mut sparse = RevealedSparseTrie::default(); + for path in &paths { + sparse.update_leaf(path.clone(), old_value.to_vec()).unwrap(); + } + let root = sparse.root(); + assert_eq!(root, expected); + + let mut hash_builder = HashBuilder::default(); + for path in paths.iter().sorted_unstable_by_key(|key| *key) { + hash_builder.add_leaf(path.clone(), &new_value); + } + let expected = hash_builder.root(); + + for path in &paths { + sparse.update_leaf(path.clone(), new_value.to_vec()).unwrap(); + } + let root = sparse.root(); + assert_eq!(root, expected); + } + + #[test] + fn sparse_trie_empty_update_fuzz() { + proptest!(ProptestConfig::with_cases(10), |(updates: Vec>)| { + let mut state = std::collections::BTreeMap::default(); + let mut sparse = RevealedSparseTrie::default(); + + for update in updates { + for (key, value) in &update { + sparse.update_leaf(Nibbles::unpack(key), alloy_rlp::encode_fixed_size(value).to_vec()).unwrap(); + } + let root = sparse.root(); + + state.extend(update); + let mut hash_builder = HashBuilder::default(); + for (key, value) in &state { + hash_builder.add_leaf(Nibbles::unpack(key), &alloy_rlp::encode_fixed_size(value)); + } + let expected = hash_builder.root(); + + assert_eq!(root, expected); + } + }); + } +} diff --git a/crates/trie/trie/src/prefix_set.rs b/crates/trie/trie/src/prefix_set.rs index af0fb173d..da912fbbd 100644 --- a/crates/trie/trie/src/prefix_set.rs +++ b/crates/trie/trie/src/prefix_set.rs @@ -82,7 +82,7 @@ pub struct TriePrefixSets { /// assert!(prefix_set.contains(&[0xa, 0xb])); /// assert!(prefix_set.contains(&[0xa, 0xb, 0xc])); /// ``` -#[derive(Clone, Default, Debug)] +#[derive(PartialEq, Eq, Clone, Default, Debug)] pub struct PrefixSetMut { /// Flag indicating that any entry should be considered changed. /// If set, the keys will be discarded. From 6fb271036dbaf0ab9c9215959baec5833ac1cc88 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 15 Oct 2024 18:51:40 +0400 Subject: [PATCH 059/425] feat: move RPC launch to add-ons (#11532) --- Cargo.lock | 21 +- crates/e2e-test-utils/src/lib.rs | 14 +- crates/e2e-test-utils/src/node.rs | 8 +- crates/ethereum/node/src/node.rs | 27 +- crates/ethereum/node/tests/e2e/dev.rs | 8 +- crates/exex/test-utils/src/lib.rs | 4 +- crates/node/api/Cargo.toml | 7 + crates/node/api/src/node.rs | 75 +++-- crates/node/builder/src/builder/add_ons.rs | 16 +- crates/node/builder/src/builder/mod.rs | 30 +- crates/node/builder/src/builder/states.rs | 109 +++---- crates/node/builder/src/handle.rs | 10 +- crates/node/builder/src/launch/engine.rs | 62 +--- crates/node/builder/src/launch/mod.rs | 65 +--- crates/node/builder/src/lib.rs | 5 +- crates/node/builder/src/node.rs | 48 ++- crates/node/builder/src/rpc.rs | 318 ++++++++++++++------ crates/optimism/bin/src/main.rs | 29 +- crates/optimism/node/src/node.rs | 45 ++- crates/optimism/node/tests/e2e/utils.rs | 6 +- crates/optimism/rpc/src/eth/mod.rs | 22 +- crates/optimism/rpc/src/eth/transaction.rs | 10 +- crates/rpc/rpc-builder/src/eth.rs | 8 +- crates/rpc/rpc-builder/src/lib.rs | 1 + crates/rpc/rpc-eth-types/src/builder/ctx.rs | 62 +--- crates/rpc/rpc/src/eth/core.rs | 24 +- examples/custom-engine-types/src/main.rs | 7 +- 27 files changed, 535 insertions(+), 506 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7ee37e080..59e8f2b27 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4464,7 +4464,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -7787,9 +7787,14 @@ dependencies = [ name = "reth-node-api" version = "1.1.0" dependencies = [ + "alloy-rpc-types-engine", + "eyre", + "reth-beacon-consensus", + "reth-consensus", "reth-engine-primitives", "reth-evm", "reth-network-api", + "reth-node-core", "reth-node-types", "reth-payload-builder", "reth-payload-primitives", @@ -9577,9 +9582,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e696e35370c65c9c541198af4543ccd580cf17fc25d8e05c5a242b202488c55" +checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" [[package]] name = "rustls-platform-verifier" @@ -9621,9 +9626,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" +checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" [[package]] name = "rusty-fork" @@ -9660,9 +9665,9 @@ dependencies = [ [[package]] name = "scc" -version = "2.2.1" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "553f8299af7450cda9a52d3a370199904e7a46b5ffd1bef187c4a6af3bb6db69" +checksum = "f2c1f7fc6deb21665a9060dfc7d271be784669295a31babdcd4dd2c79ae8cbfb" dependencies = [ "sdd", ] @@ -11367,7 +11372,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] diff --git a/crates/e2e-test-utils/src/lib.rs b/crates/e2e-test-utils/src/lib.rs index 998b48e70..48e56910e 100644 --- a/crates/e2e-test-utils/src/lib.rs +++ b/crates/e2e-test-utils/src/lib.rs @@ -7,15 +7,13 @@ use reth::{ args::{DiscoveryArgs, NetworkArgs, RpcServerArgs}, builder::{NodeBuilder, NodeConfig, NodeHandle}, network::PeersHandleProvider, - rpc::api::eth::{helpers::AddDevSigners, FullEthApiServer}, tasks::TaskManager, }; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_db::{test_utils::TempDatabase, DatabaseEnv}; use reth_node_builder::{ - components::NodeComponentsBuilder, rpc::EthApiBuilderProvider, FullNodeTypesAdapter, Node, - NodeAdapter, NodeAddOns, NodeComponents, NodeTypesWithDBAdapter, NodeTypesWithEngine, - RethFullAdapter, + components::NodeComponentsBuilder, rpc::RethRpcAddOns, FullNodeTypesAdapter, Node, NodeAdapter, + NodeComponents, NodeTypesWithDBAdapter, NodeTypesWithEngine, RethFullAdapter, }; use reth_provider::providers::BlockchainProvider; use tracing::{span, Level}; @@ -56,10 +54,7 @@ where TmpNodeAdapter, Components: NodeComponents, Network: PeersHandleProvider>, >, - N::AddOns: NodeAddOns< - Adapter, - EthApi: FullEthApiServer + AddDevSigners + EthApiBuilderProvider>, - >, + N::AddOns: RethRpcAddOns>, { let tasks = TaskManager::current(); let exec = tasks.executor(); @@ -115,7 +110,8 @@ type TmpNodeAdapter = FullNodeTypesAdapter< BlockchainProvider>, >; -type Adapter = NodeAdapter< +/// Type alias for a `NodeAdapter` +pub type Adapter = NodeAdapter< RethFullAdapter, <>>::ComponentsBuilder as NodeComponentsBuilder< RethFullAdapter, diff --git a/crates/e2e-test-utils/src/node.rs b/crates/e2e-test-utils/src/node.rs index 2ea39348f..c22913ba2 100644 --- a/crates/e2e-test-utils/src/node.rs +++ b/crates/e2e-test-utils/src/node.rs @@ -18,7 +18,7 @@ use reth::{ }, }; use reth_chainspec::EthereumHardforks; -use reth_node_builder::{NodeAddOns, NodeTypesWithEngine}; +use reth_node_builder::{rpc::RethRpcAddOns, NodeTypesWithEngine}; use reth_stages_types::StageId; use tokio_stream::StreamExt; @@ -32,7 +32,7 @@ use crate::{ pub struct NodeTestContext where Node: FullNodeComponents, - AddOns: NodeAddOns, + AddOns: RethRpcAddOns, { /// The core structure representing the full node. pub inner: FullNode, @@ -52,7 +52,7 @@ where Node: FullNodeComponents, Node::Types: NodeTypesWithEngine, Node::Network: PeersHandleProvider, - AddOns: NodeAddOns, + AddOns: RethRpcAddOns, { /// Creates a new test node pub async fn new(node: FullNode) -> eyre::Result { @@ -67,7 +67,7 @@ where canonical_stream: node.provider.canonical_state_stream(), _marker: PhantomData::, }, - rpc: RpcTestContext { inner: node.rpc_registry }, + rpc: RpcTestContext { inner: node.add_ons_handle.rpc_registry }, }) } diff --git a/crates/ethereum/node/src/node.rs b/crates/ethereum/node/src/node.rs index f17658bb3..82f313fbb 100644 --- a/crates/ethereum/node/src/node.rs +++ b/crates/ethereum/node/src/node.rs @@ -11,14 +11,15 @@ use reth_ethereum_engine_primitives::{ }; use reth_evm_ethereum::execute::EthExecutorProvider; use reth_network::NetworkHandle; -use reth_node_api::{ConfigureEvm, EngineValidator, FullNodeComponents, NodeAddOns}; +use reth_node_api::{ConfigureEvm, EngineValidator, FullNodeComponents, NodeTypesWithDB}; use reth_node_builder::{ components::{ ComponentsBuilder, ConsensusBuilder, EngineValidatorBuilder, ExecutorBuilder, NetworkBuilder, PayloadServiceBuilder, PoolBuilder, }, node::{FullNodeTypes, NodeTypes, NodeTypesWithEngine}, - BuilderContext, Node, PayloadBuilderConfig, PayloadTypes, + rpc::RpcAddOns, + BuilderContext, Node, NodeAdapter, NodeComponentsBuilder, PayloadBuilderConfig, PayloadTypes, }; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; use reth_primitives::Header; @@ -77,17 +78,19 @@ impl NodeTypesWithEngine for EthereumNode { } /// Add-ons w.r.t. l1 ethereum. -#[derive(Debug, Clone, Default)] -#[non_exhaustive] -pub struct EthereumAddOns; - -impl NodeAddOns for EthereumAddOns { - type EthApi = EthApi; -} +pub type EthereumAddOns = RpcAddOns< + N, + EthApi< + ::Provider, + ::Pool, + NetworkHandle, + ::Evm, + >, +>; impl Node for EthereumNode where - Types: NodeTypesWithEngine, + Types: NodeTypesWithDB + NodeTypesWithEngine, N: FullNodeTypes, { type ComponentsBuilder = ComponentsBuilder< @@ -100,7 +103,9 @@ where EthereumEngineValidatorBuilder, >; - type AddOns = EthereumAddOns; + type AddOns = EthereumAddOns< + NodeAdapter>::Components>, + >; fn components_builder(&self) -> Self::ComponentsBuilder { Self::components() diff --git a/crates/ethereum/node/tests/e2e/dev.rs b/crates/ethereum/node/tests/e2e/dev.rs index 6b4733b6f..cad2fb34e 100644 --- a/crates/ethereum/node/tests/e2e/dev.rs +++ b/crates/ethereum/node/tests/e2e/dev.rs @@ -6,8 +6,10 @@ use futures::StreamExt; use reth::{args::DevArgs, core::rpc::eth::helpers::EthTransactions}; use reth_chainspec::ChainSpec; use reth_e2e_test_utils::setup; -use reth_node_api::{FullNodeComponents, NodeAddOns}; -use reth_node_builder::{EngineNodeLauncher, FullNode, NodeBuilder, NodeConfig, NodeHandle}; +use reth_node_api::FullNodeComponents; +use reth_node_builder::{ + rpc::RethRpcAddOns, EngineNodeLauncher, FullNode, NodeBuilder, NodeConfig, NodeHandle, +}; use reth_node_ethereum::{node::EthereumAddOns, EthereumNode}; use reth_provider::{providers::BlockchainProvider2, CanonStateSubscriptions}; use reth_tasks::TaskManager; @@ -53,7 +55,7 @@ async fn can_run_dev_node_new_engine() -> eyre::Result<()> { async fn assert_chain_advances(node: FullNode) where N: FullNodeComponents, - AddOns: NodeAddOns, + AddOns: RethRpcAddOns, { let mut notifications = node.provider.canonical_state_stream(); diff --git a/crates/exex/test-utils/src/lib.rs b/crates/exex/test-utils/src/lib.rs index e219f5503..9e17013c4 100644 --- a/crates/exex/test-utils/src/lib.rs +++ b/crates/exex/test-utils/src/lib.rs @@ -142,7 +142,9 @@ where TestConsensusBuilder, EthereumEngineValidatorBuilder, >; - type AddOns = EthereumAddOns; + type AddOns = EthereumAddOns< + NodeAdapter>::Components>, + >; fn components_builder(&self) -> Self::ComponentsBuilder { ComponentsBuilder::default() diff --git a/crates/node/api/Cargo.toml b/crates/node/api/Cargo.toml index e7685acc8..c2c3eb463 100644 --- a/crates/node/api/Cargo.toml +++ b/crates/node/api/Cargo.toml @@ -12,6 +12,8 @@ workspace = true [dependencies] # reth +reth-beacon-consensus.workspace = true +reth-consensus.workspace = true reth-evm.workspace = true reth-provider.workspace = true reth-engine-primitives.workspace = true @@ -23,3 +25,8 @@ reth-rpc-eth-api.workspace = true reth-network-api.workspace = true reth-node-types.workspace = true reth-primitives.workspace = true +reth-node-core.workspace = true + +alloy-rpc-types-engine.workspace = true + +eyre.workspace = true \ No newline at end of file diff --git a/crates/node/api/src/node.rs b/crates/node/api/src/node.rs index ce6b16c8f..40c2a3a60 100644 --- a/crates/node/api/src/node.rs +++ b/crates/node/api/src/node.rs @@ -1,14 +1,18 @@ //! Traits for configuring a node. -use std::marker::PhantomData; +use std::{future::Future, marker::PhantomData}; +use alloy_rpc_types_engine::JwtSecret; +use reth_beacon_consensus::BeaconConsensusEngineHandle; +use reth_consensus::Consensus; +use reth_engine_primitives::EngineValidator; use reth_evm::execute::BlockExecutorProvider; use reth_network_api::FullNetwork; -use reth_node_types::{NodeTypesWithDB, NodeTypesWithEngine}; +use reth_node_core::node_config::NodeConfig; +use reth_node_types::{NodeTypes, NodeTypesWithDB, NodeTypesWithEngine}; use reth_payload_builder::PayloadBuilderHandle; use reth_primitives::Header; use reth_provider::FullProvider; -use reth_rpc_eth_api::EthApiTypes; use reth_tasks::TaskExecutor; use reth_transaction_pool::TransactionPool; @@ -54,9 +58,15 @@ pub trait FullNodeComponents: FullNodeTypes + Clone + 'static { /// The type that knows how to execute blocks. type Executor: BlockExecutorProvider; + /// The consensus type of the node. + type Consensus: Consensus + Clone + Unpin + 'static; + /// Network API. type Network: FullNetwork; + /// Validator for the engine API. + type EngineValidator: EngineValidator<::Engine>; + /// Returns the transaction pool of the node. fn pool(&self) -> &Self::Pool; @@ -66,8 +76,8 @@ pub trait FullNodeComponents: FullNodeTypes + Clone + 'static { /// Returns the node's executor type. fn block_executor(&self) -> &Self::Executor; - /// Returns the provider of the node. - fn provider(&self) -> &Self::Provider; + /// Returns the node's consensus type. + fn consensus(&self) -> &Self::Consensus; /// Returns the handle to the network fn network(&self) -> &Self::Network; @@ -77,37 +87,46 @@ pub trait FullNodeComponents: FullNodeTypes + Clone + 'static { &self, ) -> &PayloadBuilderHandle<::Engine>; + /// Returns the engine validator. + fn engine_validator(&self) -> &Self::EngineValidator; + + /// Returns the provider of the node. + fn provider(&self) -> &Self::Provider; + /// Returns handle to runtime. fn task_executor(&self) -> &TaskExecutor; } -/// Customizable node add-on types. -pub trait NodeAddOns: Send + Sync + Unpin + Clone + 'static { - /// The core `eth` namespace API type to install on the RPC server (see - /// `reth_rpc_eth_api::EthApiServer`). - type EthApi: EthApiTypes + Send + Clone; -} - -impl NodeAddOns for () { - type EthApi = (); +/// Context passed to [`NodeAddOns::launch_add_ons`], +#[derive(Debug)] +pub struct AddOnsContext<'a, N: FullNodeComponents> { + /// Node with all configured components. + pub node: &'a N, + /// Node configuration. + pub config: &'a NodeConfig<::ChainSpec>, + /// Handle to the beacon consensus engine. + pub beacon_engine_handle: + &'a BeaconConsensusEngineHandle<::Engine>, + /// JWT secret for the node. + pub jwt_secret: &'a JwtSecret, } -/// Returns the builder for type. -pub trait BuilderProvider: Send { - /// Context required to build type. - type Ctx<'a>; - - /// Returns builder for type. - #[allow(clippy::type_complexity)] - fn builder() -> Box Fn(Self::Ctx<'a>) -> Self + Send>; +/// Customizable node add-on types. +pub trait NodeAddOns: Send { + /// Handle to add-ons. + type Handle: Send + Sync + Clone; + + /// Configures and launches the add-ons. + fn launch_add_ons( + self, + ctx: AddOnsContext<'_, N>, + ) -> impl Future> + Send; } -impl BuilderProvider for () { - type Ctx<'a> = (); +impl NodeAddOns for () { + type Handle = (); - fn builder() -> Box Fn(Self::Ctx<'a>) -> Self + Send> { - Box::new(noop_builder) + async fn launch_add_ons(self, _components: AddOnsContext<'_, N>) -> eyre::Result { + Ok(()) } } - -const fn noop_builder(_: ()) {} diff --git a/crates/node/builder/src/builder/add_ons.rs b/crates/node/builder/src/builder/add_ons.rs index 26d7553bb..7be0411b2 100644 --- a/crates/node/builder/src/builder/add_ons.rs +++ b/crates/node/builder/src/builder/add_ons.rs @@ -1,8 +1,8 @@ //! Node add-ons. Depend on core [`NodeComponents`](crate::NodeComponents). -use reth_node_api::{EthApiTypes, FullNodeComponents, NodeAddOns}; +use reth_node_api::{FullNodeComponents, NodeAddOns}; -use crate::{exex::BoxedLaunchExEx, hooks::NodeHooks, rpc::RpcHooks}; +use crate::{exex::BoxedLaunchExEx, hooks::NodeHooks}; /// Additional node extensions. /// @@ -12,16 +12,6 @@ pub struct AddOns> { pub hooks: NodeHooks, /// The `ExExs` (execution extensions) of the node. pub exexs: Vec<(String, Box>)>, - /// Additional RPC add-ons. - pub rpc: RpcAddOns, /// Additional captured addons. - pub addons: AddOns, -} - -/// Captures node specific addons that can be installed on top of the type configured node and are -/// required for launching the node, such as RPC. -#[derive(Default)] -pub struct RpcAddOns { - /// Additional RPC hooks. - pub hooks: RpcHooks, + pub add_ons: AddOns, } diff --git a/crates/node/builder/src/builder/mod.rs b/crates/node/builder/src/builder/mod.rs index fb94413fe..82d8d96f6 100644 --- a/crates/node/builder/src/builder/mod.rs +++ b/crates/node/builder/src/builder/mod.rs @@ -13,7 +13,7 @@ use crate::{ common::WithConfigs, components::NodeComponentsBuilder, node::FullNode, - rpc::{EthApiBuilderProvider, RethRpcServerHandles, RpcContext}, + rpc::{RethRpcAddOns, RethRpcServerHandles, RpcContext}, DefaultNodeLauncher, LaunchNode, Node, NodeHandle, }; use futures::Future; @@ -37,7 +37,6 @@ use reth_node_core::{ dirs::{ChainPath, DataDirPath}, node_config::NodeConfig, primitives::Head, - rpc::eth::{helpers::AddDevSigners, FullEthApiServer}, }; use reth_primitives::revm_primitives::EnvKzgSettings; use reth_provider::{providers::BlockchainProvider, ChainSpecProvider, FullProvider}; @@ -358,19 +357,11 @@ where > where N: Node, ChainSpec = ChainSpec>, - N::AddOns: NodeAddOns< + N::AddOns: RethRpcAddOns< NodeAdapter< RethFullAdapter, >>::Components, >, - EthApi: EthApiBuilderProvider< - NodeAdapter< - RethFullAdapter, - >>::Components, - > - > - + FullEthApiServer - + AddDevSigners >, { self.node(node).launch().await @@ -418,7 +409,7 @@ impl WithLaunchContext> where T: FullNodeTypes, CB: NodeComponentsBuilder, - AO: NodeAddOns, EthApi: FullEthApiServer + AddDevSigners>, + AO: RethRpcAddOns>, { /// Returns a reference to the node builder's config. pub const fn config(&self) -> &NodeConfig<::ChainSpec> { @@ -466,6 +457,14 @@ where Self { builder: self.builder.on_node_started(hook), task_executor: self.task_executor } } + /// Modifies the addons with the given closure. + pub fn map_add_ons(self, f: F) -> Self + where + F: FnOnce(AO) -> AO, + { + Self { builder: self.builder.map_add_ons(f), task_executor: self.task_executor } + } + /// Sets the hook that is run once the rpc server is started. pub fn on_rpc_started(self, hook: F) -> Self where @@ -553,12 +552,7 @@ where DB: Database + DatabaseMetrics + DatabaseMetadata + Clone + Unpin + 'static, T: NodeTypesWithEngine, CB: NodeComponentsBuilder>, - AO: NodeAddOns< - NodeAdapter, CB::Components>, - EthApi: EthApiBuilderProvider, CB::Components>> - + FullEthApiServer - + AddDevSigners, - >, + AO: RethRpcAddOns, CB::Components>>, { /// Launches the node with the [`DefaultNodeLauncher`] that sets up engine API consensus and rpc pub async fn launch( diff --git a/crates/node/builder/src/builder/states.rs b/crates/node/builder/src/builder/states.rs index 80930ef74..c4da466f2 100644 --- a/crates/node/builder/src/builder/states.rs +++ b/crates/node/builder/src/builder/states.rs @@ -11,10 +11,7 @@ use reth_exex::ExExContext; use reth_node_api::{ FullNodeComponents, FullNodeTypes, NodeAddOns, NodeTypes, NodeTypesWithDB, NodeTypesWithEngine, }; -use reth_node_core::{ - node_config::NodeConfig, - rpc::eth::{helpers::AddDevSigners, FullEthApiServer}, -}; +use reth_node_core::node_config::NodeConfig; use reth_payload_builder::PayloadBuilderHandle; use reth_tasks::TaskExecutor; @@ -22,8 +19,8 @@ use crate::{ components::{NodeComponents, NodeComponentsBuilder}, hooks::NodeHooks, launch::LaunchNode, - rpc::{EthApiBuilderProvider, RethRpcServerHandles, RpcContext, RpcHooks}, - AddOns, FullNode, RpcAddOns, + rpc::{RethRpcAddOns, RethRpcServerHandles, RpcContext}, + AddOns, FullNode, }; /// A node builder that also has the configured types. @@ -54,12 +51,7 @@ impl NodeBuilderWithTypes { config, adapter, components_builder, - add_ons: AddOns { - hooks: NodeHooks::default(), - rpc: RpcAddOns { hooks: RpcHooks::default() }, - exexs: Vec::new(), - addons: (), - }, + add_ons: AddOns { hooks: NodeHooks::default(), exexs: Vec::new(), add_ons: () }, } } } @@ -83,8 +75,8 @@ impl fmt::Debug for NodeTypesAdapter { } } -/// Container for the node's types and the components and other internals that can be used by addons -/// of the node. +/// Container for the node's types and the components and other internals that can be used by +/// addons of the node. pub struct NodeAdapter> { /// The components of the node. pub components: C, @@ -104,6 +96,8 @@ impl> FullNodeComponents for NodeAdapter< type Evm = C::Evm; type Executor = C::Executor; type Network = C::Network; + type Consensus = C::Consensus; + type EngineValidator = C::EngineValidator; fn pool(&self) -> &Self::Pool { self.components.pool() @@ -132,6 +126,14 @@ impl> FullNodeComponents for NodeAdapter< fn task_executor(&self) -> &TaskExecutor { &self.task_executor } + + fn consensus(&self) -> &Self::Consensus { + self.components.consensus() + } + + fn engine_validator(&self) -> &Self::EngineValidator { + self.components.engine_validator() + } } impl> Clone for NodeAdapter { @@ -169,7 +171,7 @@ where { /// Advances the state of the node builder to the next state where all customizable /// [`NodeAddOns`] types are configured. - pub fn with_add_ons(self, addons: AO) -> NodeBuilderWithComponents + pub fn with_add_ons(self, add_ons: AO) -> NodeBuilderWithComponents where AO: NodeAddOns>, { @@ -179,12 +181,7 @@ where config, adapter, components_builder, - add_ons: AddOns { - hooks: NodeHooks::default(), - rpc: RpcAddOns { hooks: RpcHooks::default() }, - exexs: Vec::new(), - addons, - }, + add_ons: AddOns { hooks: NodeHooks::default(), exexs: Vec::new(), add_ons }, } } } @@ -215,31 +212,6 @@ where self } - /// Sets the hook that is run once the rpc server is started. - pub fn on_rpc_started(mut self, hook: F) -> Self - where - F: FnOnce( - RpcContext<'_, NodeAdapter, AO::EthApi>, - RethRpcServerHandles, - ) -> eyre::Result<()> - + Send - + 'static, - { - self.add_ons.rpc.hooks.set_on_rpc_started(hook); - self - } - - /// Sets the hook that is run to configure the rpc modules. - pub fn extend_rpc_modules(mut self, hook: F) -> Self - where - F: FnOnce(RpcContext<'_, NodeAdapter, AO::EthApi>) -> eyre::Result<()> - + Send - + 'static, - { - self.add_ons.rpc.hooks.set_extend_rpc_modules(hook); - self - } - /// Installs an `ExEx` (Execution Extension) in the node. /// /// # Note @@ -269,18 +241,22 @@ where pub const fn check_launch(self) -> Self { self } + + /// Modifies the addons with the given closure. + pub fn map_add_ons(mut self, f: F) -> Self + where + F: FnOnce(AO) -> AO, + { + self.add_ons.add_ons = f(self.add_ons.add_ons); + self + } } impl NodeBuilderWithComponents where T: FullNodeTypes, CB: NodeComponentsBuilder, - AO: NodeAddOns< - NodeAdapter, - EthApi: EthApiBuilderProvider> - + FullEthApiServer - + AddDevSigners, - >, + AO: RethRpcAddOns>, { /// Launches the node with the given launcher. pub async fn launch_with(self, launcher: L) -> eyre::Result @@ -289,4 +265,33 @@ where { launcher.launch_node(self).await } + + /// Sets the hook that is run once the rpc server is started. + pub fn on_rpc_started(self, hook: F) -> Self + where + F: FnOnce( + RpcContext<'_, NodeAdapter, AO::EthApi>, + RethRpcServerHandles, + ) -> eyre::Result<()> + + Send + + 'static, + { + self.map_add_ons(|mut add_ons| { + add_ons.hooks_mut().set_on_rpc_started(hook); + add_ons + }) + } + + /// Sets the hook that is run to configure the rpc modules. + pub fn extend_rpc_modules(self, hook: F) -> Self + where + F: FnOnce(RpcContext<'_, NodeAdapter, AO::EthApi>) -> eyre::Result<()> + + Send + + 'static, + { + self.map_add_ons(|mut add_ons| { + add_ons.hooks_mut().set_extend_rpc_modules(hook); + add_ons + }) + } } diff --git a/crates/node/builder/src/handle.rs b/crates/node/builder/src/handle.rs index c81aa9420..2997a8687 100644 --- a/crates/node/builder/src/handle.rs +++ b/crates/node/builder/src/handle.rs @@ -1,13 +1,13 @@ use std::fmt; -use reth_node_api::{FullNodeComponents, NodeAddOns}; +use reth_node_api::FullNodeComponents; use reth_node_core::exit::NodeExitFuture; -use crate::node::FullNode; +use crate::{node::FullNode, rpc::RethRpcAddOns}; /// A Handle to the launched node. #[must_use = "Needs to await the node exit future"] -pub struct NodeHandle> { +pub struct NodeHandle> { /// All node components. pub node: FullNode, /// The exit future of the node. @@ -17,7 +17,7 @@ pub struct NodeHandle> { impl NodeHandle where Node: FullNodeComponents, - AddOns: NodeAddOns, + AddOns: RethRpcAddOns, { /// Waits for the node to exit, if it was configured to exit. pub async fn wait_for_node_exit(self) -> eyre::Result<()> { @@ -28,7 +28,7 @@ where impl fmt::Debug for NodeHandle where Node: FullNodeComponents, - AddOns: NodeAddOns, + AddOns: RethRpcAddOns, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("NodeHandle") diff --git a/crates/node/builder/src/launch/engine.rs b/crates/node/builder/src/launch/engine.rs index f9e26f202..3de651cdc 100644 --- a/crates/node/builder/src/launch/engine.rs +++ b/crates/node/builder/src/launch/engine.rs @@ -1,6 +1,5 @@ //! Engine node related functionality. -use alloy_rpc_types::engine::ClientVersionV1; use futures::{future::Either, stream, stream_select, StreamExt}; use reth_beacon_consensus::{ hooks::{EngineHooks, StaticFileHook}, @@ -20,21 +19,17 @@ use reth_exex::ExExManagerHandle; use reth_network::{NetworkSyncUpdater, SyncState}; use reth_network_api::{BlockDownloaderProvider, NetworkEventListenerProvider}; use reth_node_api::{ - BuiltPayload, FullNodeTypes, NodeAddOns, NodeTypesWithEngine, PayloadAttributesBuilder, - PayloadTypes, + BuiltPayload, FullNodeTypes, NodeTypesWithEngine, PayloadAttributesBuilder, PayloadTypes, }; use reth_node_core::{ dirs::{ChainPath, DataDirPath}, exit::NodeExitFuture, primitives::Head, - rpc::eth::{helpers::AddDevSigners, FullEthApiServer}, - version::{CARGO_PKG_VERSION, CLIENT_CODE, NAME_CLIENT, VERGEN_GIT_SHA}, }; use reth_node_events::{cl::ConsensusLayerHealthEvents, node}; use reth_payload_primitives::PayloadBuilder; use reth_primitives::EthereumHardforks; use reth_provider::providers::{BlockchainProvider2, ProviderNodeTypes}; -use reth_rpc_engine_api::{capabilities::EngineCapabilities, EngineApi}; use reth_tasks::TaskExecutor; use reth_tokio_util::EventSender; use reth_tracing::tracing::{debug, error, info}; @@ -45,9 +40,9 @@ use tokio_stream::wrappers::UnboundedReceiverStream; use crate::{ common::{Attached, LaunchContextWith, WithConfigs}, hooks::NodeHooks, - rpc::{launch_rpc_servers, EthApiBuilderProvider}, + rpc::{RethRpcAddOns, RpcHandle}, setup::build_networked_pipeline, - AddOns, ExExLauncher, FullNode, LaunchContext, LaunchNode, NodeAdapter, + AddOns, AddOnsContext, ExExLauncher, FullNode, LaunchContext, LaunchNode, NodeAdapter, NodeBuilderWithComponents, NodeComponents, NodeComponentsBuilder, NodeHandle, NodeTypesAdapter, }; @@ -78,12 +73,7 @@ where Types: ProviderNodeTypes + NodeTypesWithEngine, T: FullNodeTypes>, CB: NodeComponentsBuilder, - AO: NodeAddOns< - NodeAdapter, - EthApi: EthApiBuilderProvider> - + FullEthApiServer - + AddDevSigners, - >, + AO: RethRpcAddOns>, LocalPayloadAttributesBuilder: PayloadAttributesBuilder< <::Engine as PayloadTypes>::PayloadAttributes, >, @@ -98,7 +88,7 @@ where let NodeBuilderWithComponents { adapter: NodeTypesAdapter { database }, components_builder, - add_ons: AddOns { hooks, rpc, exexs: installed_exex, .. }, + add_ons: AddOns { hooks, exexs: installed_exex, add_ons }, config, } = target; let NodeHooks { on_component_initialized, on_node_started, .. } = hooks; @@ -292,37 +282,18 @@ where ), ); - let client = ClientVersionV1 { - code: CLIENT_CODE, - name: NAME_CLIENT.to_string(), - version: CARGO_PKG_VERSION.to_string(), - commit: VERGEN_GIT_SHA.to_string(), - }; - let engine_api = EngineApi::new( - ctx.blockchain_db().clone(), - ctx.chain_spec(), - beacon_engine_handle, - ctx.components().payload_builder().clone().into(), - ctx.components().pool().clone(), - Box::new(ctx.task_executor().clone()), - client, - EngineCapabilities::default(), - ctx.components().engine_validator().clone(), - ); - info!(target: "reth::cli", "Engine API handler initialized"); - // extract the jwt secret from the args if possible let jwt_secret = ctx.auth_jwt_secret()?; - // Start RPC servers - let (rpc_server_handles, rpc_registry) = launch_rpc_servers( - ctx.node_adapter().clone(), - engine_api, - ctx.node_config(), - jwt_secret, - rpc, - ) - .await?; + let add_ons_ctx = AddOnsContext { + node: ctx.node_adapter(), + config: ctx.node_config(), + beacon_engine_handle: &beacon_engine_handle, + jwt_secret: &jwt_secret, + }; + + let RpcHandle { rpc_server_handles, rpc_registry } = + add_ons.launch_add_ons(add_ons_ctx).await?; // TODO: migrate to devmode with https://github.com/paradigmxyz/reth/issues/10104 if let Some(maybe_custom_etherscan_url) = ctx.node_config().debug.etherscan.clone() { @@ -442,13 +413,12 @@ where provider: ctx.node_adapter().provider.clone(), payload_builder: ctx.components().payload_builder().clone(), task_executor: ctx.task_executor().clone(), - rpc_server_handles, - rpc_registry, config: ctx.node_config().clone(), data_dir: ctx.data_dir().clone(), + add_ons_handle: RpcHandle { rpc_server_handles, rpc_registry }, }; // Notify on node started - on_node_started.on_event(full_node.clone())?; + on_node_started.on_event(FullNode::clone(&full_node))?; let handle = NodeHandle { node_exit_future: NodeExitFuture::new( diff --git a/crates/node/builder/src/launch/mod.rs b/crates/node/builder/src/launch/mod.rs index 3188cde4b..36aa55541 100644 --- a/crates/node/builder/src/launch/mod.rs +++ b/crates/node/builder/src/launch/mod.rs @@ -12,7 +12,6 @@ pub use exex::ExExLauncher; use std::{future::Future, sync::Arc}; use alloy_primitives::utils::format_ether; -use alloy_rpc_types::engine::ClientVersionV1; use futures::{future::Either, stream, stream_select, StreamExt}; use reth_beacon_consensus::{ hooks::{EngineHooks, PruneHook, StaticFileHook}, @@ -25,17 +24,14 @@ use reth_engine_util::EngineMessageStreamExt; use reth_exex::ExExManagerHandle; use reth_network::{BlockDownloaderProvider, NetworkEventListenerProvider}; use reth_node_api::{ - FullNodeComponents, FullNodeTypes, NodeAddOns, NodeTypesWithDB, NodeTypesWithEngine, + AddOnsContext, FullNodeComponents, FullNodeTypes, NodeTypesWithDB, NodeTypesWithEngine, }; use reth_node_core::{ dirs::{ChainPath, DataDirPath}, exit::NodeExitFuture, - rpc::eth::{helpers::AddDevSigners, FullEthApiServer}, - version::{CARGO_PKG_VERSION, CLIENT_CODE, NAME_CLIENT, VERGEN_GIT_SHA}, }; use reth_node_events::{cl::ConsensusLayerHealthEvents, node}; use reth_provider::providers::BlockchainProvider; -use reth_rpc_engine_api::{capabilities::EngineCapabilities, EngineApi}; use reth_tasks::TaskExecutor; use reth_tracing::tracing::{debug, info}; use reth_transaction_pool::TransactionPool; @@ -47,19 +43,18 @@ use crate::{ components::{NodeComponents, NodeComponentsBuilder}, hooks::NodeHooks, node::FullNode, - rpc::EthApiBuilderProvider, + rpc::{RethRpcAddOns, RpcHandle}, AddOns, NodeBuilderWithComponents, NodeHandle, }; /// Alias for [`reth_rpc_eth_types::EthApiBuilderCtx`], adapter for [`FullNodeComponents`]. -pub type EthApiBuilderCtx = reth_rpc_eth_types::EthApiBuilderCtx< +pub type EthApiBuilderCtx = reth_rpc_eth_types::EthApiBuilderCtx< ::Provider, ::Pool, ::Evm, ::Network, TaskExecutor, ::Provider, - Eth, >; /// A general purpose trait that launches a new node of any kind. @@ -109,12 +104,7 @@ where Types: NodeTypesWithDB + NodeTypesWithEngine, T: FullNodeTypes, Types = Types>, CB: NodeComponentsBuilder, - AO: NodeAddOns< - NodeAdapter, - EthApi: EthApiBuilderProvider> - + FullEthApiServer - + AddDevSigners, - >, + AO: RethRpcAddOns>, { type Node = NodeHandle, AO>; @@ -126,7 +116,7 @@ where let NodeBuilderWithComponents { adapter: NodeTypesAdapter { database }, components_builder, - add_ons: AddOns { hooks, rpc, exexs: installed_exex, .. }, + add_ons: AddOns { hooks, exexs: installed_exex, add_ons }, config, } = target; let NodeHooks { on_component_initialized, on_node_started, .. } = hooks; @@ -336,42 +326,18 @@ where ), ); - let client = ClientVersionV1 { - code: CLIENT_CODE, - name: NAME_CLIENT.to_string(), - version: CARGO_PKG_VERSION.to_string(), - commit: VERGEN_GIT_SHA.to_string(), - }; - let engine_api = EngineApi::new( - ctx.blockchain_db().clone(), - ctx.chain_spec(), - beacon_engine_handle, - ctx.components().payload_builder().clone().into(), - ctx.components().pool().clone(), - Box::new(ctx.task_executor().clone()), - client, - EngineCapabilities::default(), - ctx.components().engine_validator().clone(), - ); - info!(target: "reth::cli", "Engine API handler initialized"); - // extract the jwt secret from the args if possible let jwt_secret = ctx.auth_jwt_secret()?; - // Start RPC servers - let (rpc_server_handles, rpc_registry) = crate::rpc::launch_rpc_servers( - ctx.node_adapter().clone(), - engine_api, - ctx.node_config(), - jwt_secret, - rpc, - ) - .await?; + let add_ons_ctx = AddOnsContext { + node: ctx.node_adapter(), + config: ctx.node_config(), + beacon_engine_handle: &beacon_engine_handle, + jwt_secret: &jwt_secret, + }; - // in dev mode we generate 20 random dev-signer accounts - if ctx.is_dev() { - rpc_registry.eth_api().with_dev_accounts(); - } + let RpcHandle { rpc_server_handles, rpc_registry } = + add_ons.launch_add_ons(add_ons_ctx).await?; // Run consensus engine to completion let (tx, rx) = oneshot::channel(); @@ -431,13 +397,12 @@ where provider: ctx.node_adapter().provider.clone(), payload_builder: ctx.components().payload_builder().clone(), task_executor: ctx.task_executor().clone(), - rpc_server_handles, - rpc_registry, config: ctx.node_config().clone(), data_dir: ctx.data_dir().clone(), + add_ons_handle: RpcHandle { rpc_server_handles, rpc_registry }, }; // Notify on node started - on_node_started.on_event(full_node.clone())?; + on_node_started.on_event(FullNode::clone(&full_node))?; let handle = NodeHandle { node_exit_future: NodeExitFuture::new( diff --git a/crates/node/builder/src/lib.rs b/crates/node/builder/src/lib.rs index cfe16074a..899317f15 100644 --- a/crates/node/builder/src/lib.rs +++ b/crates/node/builder/src/lib.rs @@ -20,10 +20,7 @@ pub mod components; pub use components::{NodeComponents, NodeComponentsBuilder}; mod builder; -pub use builder::{ - add_ons::{AddOns, RpcAddOns}, - *, -}; +pub use builder::{add_ons::AddOns, *}; mod launch; pub use launch::{engine::EngineNodeLauncher, *}; diff --git a/crates/node/builder/src/node.rs b/crates/node/builder/src/node.rs index 3a70c08c1..3e3d5b696 100644 --- a/crates/node/builder/src/node.rs +++ b/crates/node/builder/src/node.rs @@ -1,7 +1,11 @@ // re-export the node api types pub use reth_node_api::{FullNodeTypes, NodeTypes, NodeTypesWithEngine}; -use std::{marker::PhantomData, sync::Arc}; +use std::{ + marker::PhantomData, + ops::{Deref, DerefMut}, + sync::Arc, +}; use reth_node_api::{EngineTypes, FullNodeComponents}; use reth_node_core::{ @@ -14,11 +18,7 @@ use reth_provider::ChainSpecProvider; use reth_rpc_builder::{auth::AuthServerHandle, RpcServerHandle}; use reth_tasks::TaskExecutor; -use crate::{ - components::NodeComponentsBuilder, - rpc::{RethRpcServerHandles, RpcRegistry}, - NodeAdapter, NodeAddOns, -}; +use crate::{components::NodeComponentsBuilder, rpc::RethRpcAddOns, NodeAdapter, NodeAddOns}; /// A [`crate::Node`] is a [`NodeTypesWithEngine`] that comes with preconfigured components. /// @@ -84,7 +84,7 @@ impl Node for AnyNode where N: FullNodeTypes + Clone, C: NodeComponentsBuilder + Clone + Sync + Unpin + 'static, - AO: NodeAddOns>, + AO: NodeAddOns> + Clone + Sync + Unpin + 'static, { type ComponentsBuilder = C; type AddOns = AO; @@ -117,14 +117,12 @@ pub struct FullNode> { pub payload_builder: PayloadBuilderHandle<::Engine>, /// Task executor for the node. pub task_executor: TaskExecutor, - /// Handles to the node's rpc servers - pub rpc_server_handles: RethRpcServerHandles, - /// The configured rpc namespaces - pub rpc_registry: RpcRegistry, /// The initial node config. pub config: NodeConfig<::ChainSpec>, /// The data dir of the node. pub data_dir: ChainPath, + /// The handle to launched add-ons + pub add_ons_handle: AddOns::Handle, } impl> Clone for FullNode { @@ -137,10 +135,9 @@ impl> Clone for FullNode Arc<::ChainSpec> { self.provider.chain_spec() } +} +impl FullNode +where + Engine: EngineTypes, + Node: FullNodeComponents>, + AddOns: RethRpcAddOns, +{ /// Returns the [`RpcServerHandle`] to the started rpc server. pub const fn rpc_server_handle(&self) -> &RpcServerHandle { - &self.rpc_server_handles.rpc + &self.add_ons_handle.rpc_server_handles.rpc } /// Returns the [`AuthServerHandle`] to the started authenticated engine API server. pub const fn auth_server_handle(&self) -> &AuthServerHandle { - &self.rpc_server_handles.auth + &self.add_ons_handle.rpc_server_handles.auth } /// Returns the [`EngineApiClient`] interface for the authenticated engine API. @@ -188,3 +192,17 @@ where self.auth_server_handle().ipc_client().await } } + +impl> Deref for FullNode { + type Target = AddOns::Handle; + + fn deref(&self) -> &Self::Target { + &self.add_ons_handle + } +} + +impl> DerefMut for FullNode { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.add_ons_handle + } +} diff --git a/crates/node/builder/src/rpc.rs b/crates/node/builder/src/rpc.rs index cb8d8f355..d8cce9217 100644 --- a/crates/node/builder/src/rpc.rs +++ b/crates/node/builder/src/rpc.rs @@ -1,31 +1,35 @@ //! Builder support for rpc components. use std::{ - fmt, + fmt::{self, Debug}, + marker::PhantomData, ops::{Deref, DerefMut}, }; +use alloy_rpc_types::engine::ClientVersionV1; use futures::TryFutureExt; -use reth_node_api::{BuilderProvider, FullNodeComponents, NodeTypes, NodeTypesWithEngine}; +use reth_node_api::{ + AddOnsContext, FullNodeComponents, NodeAddOns, NodeTypes, NodeTypesWithEngine, +}; use reth_node_core::{ node_config::NodeConfig, - rpc::{ - api::EngineApiServer, - eth::{EthApiTypes, FullEthApiServer}, - }, + rpc::eth::{EthApiTypes, FullEthApiServer}, + version::{CARGO_PKG_VERSION, CLIENT_CODE, NAME_CLIENT, VERGEN_GIT_SHA}, }; use reth_payload_builder::PayloadBuilderHandle; use reth_provider::providers::ProviderNodeTypes; +use reth_rpc::EthApi; +use reth_rpc_api::eth::helpers::AddDevSigners; use reth_rpc_builder::{ auth::{AuthRpcModule, AuthServerHandle}, config::RethRpcServerConfig, RpcModuleBuilder, RpcRegistryInner, RpcServerHandle, TransportRpcModules, }; -use reth_rpc_layer::JwtSecret; +use reth_rpc_engine_api::{capabilities::EngineCapabilities, EngineApi}; use reth_tasks::TaskExecutor; use reth_tracing::tracing::{debug, info}; -use crate::{EthApiBuilderCtx, RpcAddOns}; +use crate::EthApiBuilderCtx; /// Contains the handles to the spawned RPC servers. /// @@ -292,102 +296,232 @@ where } } -/// Launch the rpc servers. -pub async fn launch_rpc_servers( - node: Node, - engine_api: Engine, - config: &NodeConfig<::ChainSpec>, - jwt_secret: JwtSecret, - add_ons: RpcAddOns, -) -> eyre::Result<(RethRpcServerHandles, RpcRegistry)> +/// Handle to the launched RPC servers. +#[derive(Clone)] +pub struct RpcHandle { + /// Handles to launched servers. + pub rpc_server_handles: RethRpcServerHandles, + /// Configured RPC modules. + pub rpc_registry: RpcRegistry, +} + +impl Deref for RpcHandle { + type Target = RpcRegistry; + + fn deref(&self) -> &Self::Target { + &self.rpc_registry + } +} + +impl Debug for RpcHandle where - Node: FullNodeComponents + Clone, - Engine: EngineApiServer<::Engine>, - EthApi: EthApiBuilderProvider + FullEthApiServer, + RpcRegistry: Debug, { - let auth_config = config.rpc.auth_server_config(jwt_secret)?; - let module_config = config.rpc.transport_rpc_module_config(); - debug!(target: "reth::cli", http=?module_config.http(), ws=?module_config.ws(), "Using RPC module config"); - - let (mut modules, mut auth_module, registry) = RpcModuleBuilder::default() - .with_provider(node.provider().clone()) - .with_pool(node.pool().clone()) - .with_network(node.network().clone()) - .with_events(node.provider().clone()) - .with_executor(node.task_executor().clone()) - .with_evm_config(node.evm_config().clone()) - .with_block_executor(node.block_executor().clone()) - .build_with_auth_server(module_config, engine_api, EthApi::eth_api_builder()); - - let mut registry = RpcRegistry { registry }; - let ctx = RpcContext { - node: node.clone(), - config, - registry: &mut registry, - modules: &mut modules, - auth_module: &mut auth_module, - }; - - let RpcAddOns { hooks, .. } = add_ons; - let RpcHooks { on_rpc_started, extend_rpc_modules } = hooks; - - extend_rpc_modules.extend_rpc_modules(ctx)?; - - let server_config = config.rpc.rpc_server_config(); - let cloned_modules = modules.clone(); - let launch_rpc = server_config.start(&cloned_modules).map_ok(|handle| { - if let Some(path) = handle.ipc_endpoint() { - info!(target: "reth::cli", %path, "RPC IPC server started"); - } - if let Some(addr) = handle.http_local_addr() { - info!(target: "reth::cli", url=%addr, "RPC HTTP server started"); - } - if let Some(addr) = handle.ws_local_addr() { - info!(target: "reth::cli", url=%addr, "RPC WS server started"); - } - handle - }); - - let launch_auth = auth_module.clone().start_server(auth_config).map_ok(|handle| { - let addr = handle.local_addr(); - if let Some(ipc_endpoint) = handle.ipc_endpoint() { - info!(target: "reth::cli", url=%addr, ipc_endpoint=%ipc_endpoint,"RPC auth server started"); - } else { - info!(target: "reth::cli", url=%addr, "RPC auth server started"); + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RpcHandle") + .field("rpc_server_handles", &self.rpc_server_handles) + .field("rpc_registry", &self.rpc_registry) + .finish() + } +} + +/// Node add-ons containing RPC server configuration, with customizable eth API handler. +#[allow(clippy::type_complexity)] +pub struct RpcAddOns { + /// Additional RPC add-ons. + pub hooks: RpcHooks, + /// Builder for `EthApi` + eth_api_builder: Box) -> EthApi + Send + Sync>, + _pd: PhantomData<(Node, EthApi)>, +} + +impl Debug for RpcAddOns { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RpcAddOns") + .field("hooks", &self.hooks) + .field("eth_api_builder", &"...") + .finish() + } +} + +impl RpcAddOns { + /// Creates a new instance of the RPC add-ons. + pub fn new( + eth_api_builder: impl FnOnce(&EthApiBuilderCtx) -> EthApi + Send + Sync + 'static, + ) -> Self { + Self { + hooks: RpcHooks::default(), + eth_api_builder: Box::new(eth_api_builder), + _pd: PhantomData, } - handle - }); + } - // launch servers concurrently - let (rpc, auth) = futures::future::try_join(launch_rpc, launch_auth).await?; - let handles = RethRpcServerHandles { rpc, auth }; + /// Sets the hook that is run once the rpc server is started. + pub fn on_rpc_started(mut self, hook: F) -> Self + where + F: FnOnce(RpcContext<'_, Node, EthApi>, RethRpcServerHandles) -> eyre::Result<()> + + Send + + 'static, + { + self.hooks.set_on_rpc_started(hook); + self + } - let ctx = RpcContext { - node, - config, - registry: &mut registry, - modules: &mut modules, - auth_module: &mut auth_module, - }; + /// Sets the hook that is run to configure the rpc modules. + pub fn extend_rpc_modules(mut self, hook: F) -> Self + where + F: FnOnce(RpcContext<'_, Node, EthApi>) -> eyre::Result<()> + Send + 'static, + { + self.hooks.set_extend_rpc_modules(hook); + self + } +} + +impl> Default + for RpcAddOns +{ + fn default() -> Self { + Self::new(EthApi::build) + } +} - on_rpc_started.on_rpc_started(ctx, handles.clone())?; +impl NodeAddOns for RpcAddOns +where + N: FullNodeComponents, + EthApi: EthApiTypes + FullEthApiServer + AddDevSigners + Unpin + 'static, +{ + type Handle = RpcHandle; + + async fn launch_add_ons(self, ctx: AddOnsContext<'_, N>) -> eyre::Result { + let AddOnsContext { node, config, beacon_engine_handle, jwt_secret } = ctx; + + let client = ClientVersionV1 { + code: CLIENT_CODE, + name: NAME_CLIENT.to_string(), + version: CARGO_PKG_VERSION.to_string(), + commit: VERGEN_GIT_SHA.to_string(), + }; + + let engine_api = EngineApi::new( + node.provider().clone(), + config.chain.clone(), + beacon_engine_handle.clone(), + node.payload_builder().clone().into(), + node.pool().clone(), + Box::new(node.task_executor().clone()), + client, + EngineCapabilities::default(), + node.engine_validator().clone(), + ); + info!(target: "reth::cli", "Engine API handler initialized"); + + let auth_config = config.rpc.auth_server_config(*jwt_secret)?; + let module_config = config.rpc.transport_rpc_module_config(); + debug!(target: "reth::cli", http=?module_config.http(), ws=?module_config.ws(), "Using RPC module config"); + + let (mut modules, mut auth_module, registry) = RpcModuleBuilder::default() + .with_provider(node.provider().clone()) + .with_pool(node.pool().clone()) + .with_network(node.network().clone()) + .with_events(node.provider().clone()) + .with_executor(node.task_executor().clone()) + .with_evm_config(node.evm_config().clone()) + .with_block_executor(node.block_executor().clone()) + .build_with_auth_server(module_config, engine_api, self.eth_api_builder); + + // in dev mode we generate 20 random dev-signer accounts + if config.dev.dev { + registry.eth_api().with_dev_accounts(); + } - Ok((handles, registry)) + let mut registry = RpcRegistry { registry }; + let ctx = RpcContext { + node: node.clone(), + config, + registry: &mut registry, + modules: &mut modules, + auth_module: &mut auth_module, + }; + + let RpcHooks { on_rpc_started, extend_rpc_modules } = self.hooks; + + extend_rpc_modules.extend_rpc_modules(ctx)?; + + let server_config = config.rpc.rpc_server_config(); + let cloned_modules = modules.clone(); + let launch_rpc = server_config.start(&cloned_modules).map_ok(|handle| { + if let Some(path) = handle.ipc_endpoint() { + info!(target: "reth::cli", %path, "RPC IPC server started"); + } + if let Some(addr) = handle.http_local_addr() { + info!(target: "reth::cli", url=%addr, "RPC HTTP server started"); + } + if let Some(addr) = handle.ws_local_addr() { + info!(target: "reth::cli", url=%addr, "RPC WS server started"); + } + handle + }); + + let launch_auth = auth_module.clone().start_server(auth_config).map_ok(|handle| { + let addr = handle.local_addr(); + if let Some(ipc_endpoint) = handle.ipc_endpoint() { + info!(target: "reth::cli", url=%addr, ipc_endpoint=%ipc_endpoint,"RPC auth server started"); + } else { + info!(target: "reth::cli", url=%addr, "RPC auth server started"); + } + handle + }); + + // launch servers concurrently + let (rpc, auth) = futures::future::try_join(launch_rpc, launch_auth).await?; + + let handles = RethRpcServerHandles { rpc, auth }; + + let ctx = RpcContext { + node: node.clone(), + config, + registry: &mut registry, + modules: &mut modules, + auth_module: &mut auth_module, + }; + + on_rpc_started.on_rpc_started(ctx, handles.clone())?; + + Ok(RpcHandle { rpc_server_handles: handles, rpc_registry: registry }) + } } -/// Provides builder for the core `eth` API type. -pub trait EthApiBuilderProvider: BuilderProvider + EthApiTypes { - /// Returns the eth api builder. - #[allow(clippy::type_complexity)] - fn eth_api_builder() -> Box) -> Self + Send>; +/// Helper trait implemented for add-ons producing [`RpcHandle`]. Used by common node launcher +/// implementations. +pub trait RethRpcAddOns: + NodeAddOns> +{ + /// eth API implementation. + type EthApi: EthApiTypes; + + /// Returns a mutable reference to RPC hooks. + fn hooks_mut(&mut self) -> &mut RpcHooks; } -impl EthApiBuilderProvider for F +impl RethRpcAddOns for RpcAddOns where - N: FullNodeComponents, - for<'a> F: BuilderProvider = &'a EthApiBuilderCtx> + EthApiTypes, + Self: NodeAddOns>, { - fn eth_api_builder() -> Box) -> Self + Send> { - F::builder() + type EthApi = EthApi; + + fn hooks_mut(&mut self) -> &mut RpcHooks { + &mut self.hooks + } +} + +/// A `EthApi` that knows how to build itself from [`EthApiBuilderCtx`]. +pub trait EthApiBuilder: 'static { + /// Builds the `EthApi` from the given context. + fn build(ctx: &EthApiBuilderCtx) -> Self; +} + +impl EthApiBuilder for EthApi { + fn build(ctx: &EthApiBuilderCtx) -> Self { + Self::with_spawner(ctx) } } diff --git a/crates/optimism/bin/src/main.rs b/crates/optimism/bin/src/main.rs index 6822a6a50..c6d3e32b7 100644 --- a/crates/optimism/bin/src/main.rs +++ b/crates/optimism/bin/src/main.rs @@ -6,7 +6,6 @@ use clap::Parser; use reth_node_builder::{engine_tree_config::TreeConfig, EngineNodeLauncher}; use reth_optimism_cli::{chainspec::OpChainSpecParser, Cli}; use reth_optimism_node::{args::RollupArgs, node::OptimismAddOns, OptimismNode}; -use reth_optimism_rpc::SequencerClient; use reth_provider::providers::BlockchainProvider2; use tracing as _; @@ -34,17 +33,7 @@ fn main() { let handle = builder .with_types_and_provider::>() .with_components(OptimismNode::components(rollup_args)) - .with_add_ons(OptimismAddOns::new(sequencer_http_arg.clone())) - .extend_rpc_modules(move |ctx| { - // register sequencer tx forwarder - if let Some(sequencer_http) = sequencer_http_arg { - ctx.registry - .eth_api() - .set_sequencer_client(SequencerClient::new(sequencer_http))?; - } - - Ok(()) - }) + .with_add_ons(OptimismAddOns::new(sequencer_http_arg)) .launch_with_fn(|builder| { let launcher = EngineNodeLauncher::new( builder.task_executor().clone(), @@ -58,20 +47,8 @@ fn main() { handle.node_exit_future.await } false => { - let handle = builder - .node(OptimismNode::new(rollup_args.clone())) - .extend_rpc_modules(move |ctx| { - // register sequencer tx forwarder - if let Some(sequencer_http) = sequencer_http_arg { - ctx.registry - .eth_api() - .set_sequencer_client(SequencerClient::new(sequencer_http))?; - } - - Ok(()) - }) - .launch() - .await?; + let handle = + builder.node(OptimismNode::new(rollup_args.clone())).launch().await?; handle.node_exit_future.await } diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index b6e64f7e0..648da85d0 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -13,7 +13,8 @@ use reth_node_builder::{ NetworkBuilder, PayloadServiceBuilder, PoolBuilder, PoolBuilderConfigOverrides, }, node::{FullNodeTypes, NodeTypes, NodeTypesWithEngine}, - BuilderContext, Node, PayloadBuilderConfig, + rpc::{RethRpcAddOns, RpcAddOns, RpcHandle}, + BuilderContext, Node, NodeAdapter, NodeComponentsBuilder, PayloadBuilderConfig, }; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::OptimismBeaconConsensus; @@ -97,7 +98,9 @@ where OptimismEngineValidatorBuilder, >; - type AddOns = OptimismAddOns; + type AddOns = OptimismAddOns< + NodeAdapter>::Components>, + >; fn components_builder(&self) -> Self::ComponentsBuilder { let Self { args } = self; @@ -119,25 +122,43 @@ impl NodeTypesWithEngine for OptimismNode { } /// Add-ons w.r.t. optimism. -#[derive(Debug, Clone)] -pub struct OptimismAddOns { - sequencer_http: Option, +#[derive(Debug)] +pub struct OptimismAddOns(pub RpcAddOns>); + +impl Default for OptimismAddOns { + fn default() -> Self { + Self::new(None) + } } -impl OptimismAddOns { +impl OptimismAddOns { /// Create a new instance with the given `sequencer_http` URL. - pub const fn new(sequencer_http: Option) -> Self { - Self { sequencer_http } + pub fn new(sequencer_http: Option) -> Self { + Self(RpcAddOns::new(move |ctx| OpEthApi::new(ctx, sequencer_http))) } +} + +impl>> NodeAddOns + for OptimismAddOns +{ + type Handle = RpcHandle>; - /// Returns the sequencer HTTP URL. - pub fn sequencer_http(&self) -> Option<&str> { - self.sequencer_http.as_deref() + async fn launch_add_ons( + self, + ctx: reth_node_api::AddOnsContext<'_, N>, + ) -> eyre::Result { + self.0.launch_add_ons(ctx).await } } -impl NodeAddOns for OptimismAddOns { +impl>> RethRpcAddOns + for OptimismAddOns +{ type EthApi = OpEthApi; + + fn hooks_mut(&mut self) -> &mut reth_node_builder::rpc::RpcHooks { + self.0.hooks_mut() + } } /// A regular optimism evm and executor builder. diff --git a/crates/optimism/node/tests/e2e/utils.rs b/crates/optimism/node/tests/e2e/utils.rs index 863bf254e..8ea8df380 100644 --- a/crates/optimism/node/tests/e2e/utils.rs +++ b/crates/optimism/node/tests/e2e/utils.rs @@ -1,7 +1,9 @@ use alloy_genesis::Genesis; use alloy_primitives::{Address, B256}; use reth::{rpc::types::engine::PayloadAttributes, tasks::TaskManager}; -use reth_e2e_test_utils::{transaction::TransactionTestContext, wallet::Wallet, NodeHelperType}; +use reth_e2e_test_utils::{ + transaction::TransactionTestContext, wallet::Wallet, Adapter, NodeHelperType, +}; use reth_optimism_chainspec::OpChainSpecBuilder; use reth_optimism_node::{ node::OptimismAddOns, OptimismBuiltPayload, OptimismNode, OptimismPayloadBuilderAttributes, @@ -11,7 +13,7 @@ use std::sync::Arc; use tokio::sync::Mutex; /// Optimism Node Helper type -pub(crate) type OpNode = NodeHelperType; +pub(crate) type OpNode = NodeHelperType>>; pub(crate) async fn setup(num_nodes: usize) -> eyre::Result<(Vec, TaskManager, Wallet)> { let genesis: Genesis = serde_json::from_str(include_str!("../assets/genesis.json")).unwrap(); diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index 57ce44100..d65dd8edd 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -17,7 +17,7 @@ use op_alloy_network::Optimism; use reth_chainspec::EthereumHardforks; use reth_evm::ConfigureEvm; use reth_network_api::NetworkInfo; -use reth_node_api::{BuilderProvider, FullNodeComponents, FullNodeTypes, NodeTypes}; +use reth_node_api::{FullNodeComponents, FullNodeTypes, NodeTypes}; use reth_node_builder::EthApiBuilderCtx; use reth_primitives::Header; use reth_provider::{ @@ -38,7 +38,6 @@ use reth_tasks::{ TaskSpawner, }; use reth_transaction_pool::TransactionPool; -use tokio::sync::OnceCell; use crate::{OpEthApiError, OpTxBuilder, SequencerClient}; @@ -67,13 +66,12 @@ pub struct OpEthApi { inner: Arc>, /// Sequencer client, configured to forward submitted transactions to sequencer of given OP /// network. - sequencer_client: Arc>, + sequencer_client: Option, } impl OpEthApi { /// Creates a new instance for given context. - #[allow(clippy::type_complexity)] - pub fn with_spawner(ctx: &EthApiBuilderCtx) -> Self { + pub fn new(ctx: &EthApiBuilderCtx, sequencer_http: Option) -> Self { let blocking_task_pool = BlockingTaskPool::build().expect("failed to build blocking task pool"); @@ -93,7 +91,7 @@ impl OpEthApi { ctx.config.proof_permits, ); - Self { inner: Arc::new(inner), sequencer_client: Arc::new(OnceCell::new()) } + Self { inner: Arc::new(inner), sequencer_client: sequencer_http.map(SequencerClient::new) } } } @@ -246,18 +244,6 @@ where } } -impl BuilderProvider for OpEthApi -where - Self: Send, - N: FullNodeComponents, -{ - type Ctx<'a> = &'a EthApiBuilderCtx; - - fn builder() -> Box Fn(Self::Ctx<'a>) -> Self + Send> { - Box::new(Self::with_spawner) - } -} - impl fmt::Debug for OpEthApi { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("OpEthApi").finish_non_exhaustive() diff --git a/crates/optimism/rpc/src/eth/transaction.rs b/crates/optimism/rpc/src/eth/transaction.rs index ab7525016..36556905e 100644 --- a/crates/optimism/rpc/src/eth/transaction.rs +++ b/crates/optimism/rpc/src/eth/transaction.rs @@ -81,17 +81,9 @@ impl OpEthApi where N: FullNodeComponents, { - /// Sets a [`SequencerClient`] for `eth_sendRawTransaction` to forward transactions to. - pub fn set_sequencer_client( - &self, - sequencer_client: SequencerClient, - ) -> Result<(), tokio::sync::SetError> { - self.sequencer_client.set(sequencer_client) - } - /// Returns the [`SequencerClient`] if one is set. pub fn raw_tx_forwarder(&self) -> Option { - self.sequencer_client.get().cloned() + self.sequencer_client.clone() } } diff --git a/crates/rpc/rpc-builder/src/eth.rs b/crates/rpc/rpc-builder/src/eth.rs index 920268a98..613652678 100644 --- a/crates/rpc/rpc-builder/src/eth.rs +++ b/crates/rpc/rpc-builder/src/eth.rs @@ -1,5 +1,3 @@ -use std::marker::PhantomData; - use reth_evm::ConfigureEvm; use reth_primitives::Header; use reth_provider::{BlockReader, CanonStateSubscriptions, EvmEnvProvider, StateProviderFactory}; @@ -11,9 +9,8 @@ use reth_rpc_eth_types::{ use reth_tasks::TaskSpawner; /// Alias for `eth` namespace API builder. -pub type DynEthApiBuilder = Box< - dyn Fn(&EthApiBuilderCtx) -> EthApi, ->; +pub type DynEthApiBuilder = + Box) -> EthApi>; /// Handlers for core, filter and pubsub `eth` namespace APIs. #[derive(Debug, Clone)] @@ -87,7 +84,6 @@ where executor, events, cache, - _rpc_ty_builders: PhantomData, }; let api = eth_api_builder(&ctx); diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index e49105ab7..6a1240b64 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -152,6 +152,7 @@ use std::{ collections::HashMap, + fmt::Debug, net::{Ipv4Addr, SocketAddr, SocketAddrV4}, time::{Duration, SystemTime, UNIX_EPOCH}, }; diff --git a/crates/rpc/rpc-eth-types/src/builder/ctx.rs b/crates/rpc/rpc-eth-types/src/builder/ctx.rs index cb2750b6e..2132dd0e2 100644 --- a/crates/rpc/rpc-eth-types/src/builder/ctx.rs +++ b/crates/rpc/rpc-eth-types/src/builder/ctx.rs @@ -1,7 +1,5 @@ //! Context required for building `eth` namespace APIs. -use std::marker::PhantomData; - use reth_chain_state::CanonStateSubscriptions; use reth_chainspec::ChainSpecProvider; use reth_storage_api::BlockReaderIdExt; @@ -14,7 +12,7 @@ use crate::{ /// Context for building the `eth` namespace API. #[derive(Debug, Clone)] -pub struct EthApiBuilderCtx { +pub struct EthApiBuilderCtx { /// Database handle. pub provider: Provider, /// Mempool handle. @@ -31,12 +29,10 @@ pub struct EthApiBuilderCtx, } -impl - EthApiBuilderCtx +impl + EthApiBuilderCtx where Provider: BlockReaderIdExt + Clone, { @@ -46,53 +42,14 @@ where Provider: ChainSpecProvider + 'static, Tasks: TaskSpawner, Events: CanonStateSubscriptions, - { - FeeHistoryCacheBuilder::build(self) - } - - /// Returns a new [`GasPriceOracle`] for the context. - pub fn new_gas_price_oracle(&self) -> GasPriceOracle { - GasPriceOracleBuilder::build(self) - } -} - -/// Builds `eth_` core api component [`GasPriceOracle`], for given context. -#[derive(Debug)] -pub struct GasPriceOracleBuilder; - -impl GasPriceOracleBuilder { - /// Builds a [`GasPriceOracle`], for given context. - pub fn build( - ctx: &EthApiBuilderCtx, - ) -> GasPriceOracle - where - Provider: BlockReaderIdExt + Clone, - { - GasPriceOracle::new(ctx.provider.clone(), ctx.config.gas_oracle, ctx.cache.clone()) - } -} - -/// Builds `eth_` core api component [`FeeHistoryCache`], for given context. -#[derive(Debug)] -pub struct FeeHistoryCacheBuilder; - -impl FeeHistoryCacheBuilder { - /// Builds a [`FeeHistoryCache`], for given context. - pub fn build( - ctx: &EthApiBuilderCtx, - ) -> FeeHistoryCache - where - Provider: ChainSpecProvider + BlockReaderIdExt + Clone + 'static, - Tasks: TaskSpawner, - Events: CanonStateSubscriptions, { let fee_history_cache = - FeeHistoryCache::new(ctx.cache.clone(), ctx.config.fee_history_cache); + FeeHistoryCache::new(self.cache.clone(), self.config.fee_history_cache); - let new_canonical_blocks = ctx.events.canonical_state_stream(); + let new_canonical_blocks = self.events.canonical_state_stream(); let fhc = fee_history_cache.clone(); - let provider = ctx.provider.clone(); - ctx.executor.spawn_critical( + let provider = self.provider.clone(); + self.executor.spawn_critical( "cache canonical blocks for fee history task", Box::pin(async move { fee_history_cache_new_blocks_task(fhc, new_canonical_blocks, provider).await; @@ -101,4 +58,9 @@ impl FeeHistoryCacheBuilder { fee_history_cache } + + /// Returns a new [`GasPriceOracle`] for the context. + pub fn new_gas_price_oracle(&self) -> GasPriceOracle { + GasPriceOracle::new(self.provider.clone(), self.config.gas_oracle, self.cache.clone()) + } } diff --git a/crates/rpc/rpc/src/eth/core.rs b/crates/rpc/rpc/src/eth/core.rs index 304266f6a..6da468040 100644 --- a/crates/rpc/rpc/src/eth/core.rs +++ b/crates/rpc/rpc/src/eth/core.rs @@ -6,7 +6,6 @@ use std::sync::Arc; use alloy_network::AnyNetwork; use alloy_primitives::U256; use derive_more::Deref; -use reth_node_api::{BuilderProvider, FullNodeComponents}; use reth_primitives::BlockNumberOrTag; use reth_provider::{BlockReaderIdExt, CanonStateSubscriptions, ChainSpecProvider}; use reth_rpc_eth_api::{ @@ -19,7 +18,7 @@ use reth_rpc_eth_types::{ }; use reth_tasks::{ pool::{BlockingTaskGuard, BlockingTaskPool}, - TaskExecutor, TaskSpawner, TokioTaskExecutor, + TaskSpawner, TokioTaskExecutor, }; use tokio::sync::Mutex; @@ -95,7 +94,7 @@ where { /// Creates a new, shareable instance. pub fn with_spawner( - ctx: &EthApiBuilderCtx, + ctx: &EthApiBuilderCtx, ) -> Self where Tasks: TaskSpawner + Clone + 'static, @@ -163,25 +162,6 @@ where } } -impl BuilderProvider for EthApi -where - N: FullNodeComponents, -{ - type Ctx<'a> = &'a EthApiBuilderCtx< - N::Provider, - N::Pool, - N::Evm, - N::Network, - TaskExecutor, - N::Provider, - Self, - >; - - fn builder() -> Box Fn(Self::Ctx<'a>) -> Self + Send> { - Box::new(Self::with_spawner) - } -} - /// Container type `EthApi` #[allow(missing_debug_implementations)] pub struct EthApiInner { diff --git a/examples/custom-engine-types/src/main.rs b/examples/custom-engine-types/src/main.rs index 34f8186be..f833da862 100644 --- a/examples/custom-engine-types/src/main.rs +++ b/examples/custom-engine-types/src/main.rs @@ -36,7 +36,8 @@ use reth::{ builder::{ components::{ComponentsBuilder, EngineValidatorBuilder, PayloadServiceBuilder}, node::{NodeTypes, NodeTypesWithEngine}, - BuilderContext, FullNodeTypes, Node, NodeBuilder, PayloadBuilderConfig, + BuilderContext, FullNodeTypes, Node, NodeAdapter, NodeBuilder, NodeComponentsBuilder, + PayloadBuilderConfig, }, providers::{CanonStateSubscriptions, StateProviderFactory}, tasks::TaskManager, @@ -241,7 +242,9 @@ where EthereumConsensusBuilder, CustomEngineValidatorBuilder, >; - type AddOns = EthereumAddOns; + type AddOns = EthereumAddOns< + NodeAdapter>::Components>, + >; fn components_builder(&self) -> Self::ComponentsBuilder { ComponentsBuilder::default() From 5aceb3e11ed97be89a2cea338f904fc0afbc6dd4 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 15 Oct 2024 17:27:26 +0200 Subject: [PATCH 060/425] primitives: rm redundant `chain_id` function for Transaction (#11751) --- Cargo.lock | 1 + crates/primitives/src/alloy_compat.rs | 2 +- crates/primitives/src/transaction/mod.rs | 21 ++++++------------- crates/rpc/rpc-types-compat/Cargo.toml | 1 + .../rpc-types-compat/src/transaction/mod.rs | 1 + crates/rpc/rpc/src/eth/helpers/types.rs | 1 + crates/transaction-pool/Cargo.toml | 5 ++--- crates/transaction-pool/src/traits.rs | 1 + testing/testing-utils/src/generators.rs | 2 +- 9 files changed, 15 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 59e8f2b27..b3cd15385 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8835,6 +8835,7 @@ dependencies = [ name = "reth-rpc-types-compat" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rlp", diff --git a/crates/primitives/src/alloy_compat.rs b/crates/primitives/src/alloy_compat.rs index 65099967e..c9bdfad89 100644 --- a/crates/primitives/src/alloy_compat.rs +++ b/crates/primitives/src/alloy_compat.rs @@ -5,7 +5,7 @@ use crate::{ Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, TxType, }; use alloc::{string::ToString, vec::Vec}; -use alloy_consensus::{TxEip1559, TxEip2930, TxEip4844, TxLegacy}; +use alloy_consensus::{Transaction as _, TxEip1559, TxEip2930, TxEip4844, TxLegacy}; use alloy_primitives::{Parity, TxKind}; use alloy_rlp::Error as RlpError; use alloy_serde::WithOtherFields; diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 0cb860ff6..0463cd9ea 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -4,7 +4,10 @@ use crate::BlockHashOrNumber; use alloy_eips::eip7702::SignedAuthorization; use alloy_primitives::{keccak256, Address, ChainId, TxKind, B256, U256}; -use alloy_consensus::{SignableTransaction, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy}; +use alloy_consensus::{ + SignableTransaction, Transaction as AlloyTransaction, TxEip1559, TxEip2930, TxEip4844, + TxEip7702, TxLegacy, +}; use alloy_eips::{ eip2718::{Decodable2718, Eip2718Error, Eip2718Result, Encodable2718}, eip2930::AccessList, @@ -197,19 +200,6 @@ impl Transaction { } } - /// Get `chain_id`. - pub const fn chain_id(&self) -> Option { - match self { - Self::Legacy(TxLegacy { chain_id, .. }) => *chain_id, - Self::Eip2930(TxEip2930 { chain_id, .. }) | - Self::Eip1559(TxEip1559 { chain_id, .. }) | - Self::Eip4844(TxEip4844 { chain_id, .. }) | - Self::Eip7702(TxEip7702 { chain_id, .. }) => Some(*chain_id), - #[cfg(feature = "optimism")] - Self::Deposit(_) => None, - } - } - /// Sets the transaction's chain id to the provided value. pub fn set_chain_id(&mut self, chain_id: u64) { match self { @@ -824,7 +814,7 @@ impl Encodable for Transaction { } } -impl alloy_consensus::Transaction for Transaction { +impl AlloyTransaction for Transaction { fn chain_id(&self) -> Option { match self { Self::Legacy(tx) => tx.chain_id(), @@ -1974,6 +1964,7 @@ mod tests { transaction::{signature::Signature, TxEip1559, TxKind, TxLegacy}, Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, }; + use alloy_consensus::Transaction as _; use alloy_eips::eip2718::{Decodable2718, Encodable2718}; use alloy_primitives::{address, b256, bytes, hex, Address, Bytes, Parity, B256, U256}; use alloy_rlp::{Decodable, Encodable, Error as RlpError}; diff --git a/crates/rpc/rpc-types-compat/Cargo.toml b/crates/rpc/rpc-types-compat/Cargo.toml index 81b4def20..8e436f0d3 100644 --- a/crates/rpc/rpc-types-compat/Cargo.toml +++ b/crates/rpc/rpc-types-compat/Cargo.toml @@ -24,6 +24,7 @@ alloy-rpc-types.workspace = true alloy-rpc-types-eth = { workspace = true, default-features = false, features = ["serde"] } alloy-serde.workspace = true alloy-rpc-types-engine.workspace = true +alloy-consensus.workspace = true [dev-dependencies] serde_json.workspace = true \ No newline at end of file diff --git a/crates/rpc/rpc-types-compat/src/transaction/mod.rs b/crates/rpc/rpc-types-compat/src/transaction/mod.rs index 9bb2a8b5d..a489a5886 100644 --- a/crates/rpc/rpc-types-compat/src/transaction/mod.rs +++ b/crates/rpc/rpc-types-compat/src/transaction/mod.rs @@ -4,6 +4,7 @@ mod signature; pub use signature::*; use std::fmt; +use alloy_consensus::Transaction as _; use alloy_rpc_types::{ request::{TransactionInput, TransactionRequest}, Transaction, TransactionInfo, diff --git a/crates/rpc/rpc/src/eth/helpers/types.rs b/crates/rpc/rpc/src/eth/helpers/types.rs index d2b9c268e..982afdcac 100644 --- a/crates/rpc/rpc/src/eth/helpers/types.rs +++ b/crates/rpc/rpc/src/eth/helpers/types.rs @@ -1,5 +1,6 @@ //! L1 `eth` API types. +use alloy_consensus::Transaction as _; use alloy_network::{AnyNetwork, Network}; use alloy_primitives::{Address, TxKind}; use alloy_rpc_types::{Transaction, TransactionInfo}; diff --git a/crates/transaction-pool/Cargo.toml b/crates/transaction-pool/Cargo.toml index 41abbb4b6..887543b52 100644 --- a/crates/transaction-pool/Cargo.toml +++ b/crates/transaction-pool/Cargo.toml @@ -27,6 +27,7 @@ revm.workspace = true alloy-eips.workspace = true alloy-primitives.workspace = true alloy-rlp.workspace = true +alloy-consensus.workspace = true # async/futures futures-util.workspace = true @@ -54,7 +55,6 @@ rand = { workspace = true, optional = true } paste = { workspace = true, optional = true } proptest = { workspace = true, optional = true } proptest-arbitrary-interop = { workspace = true, optional = true } -alloy-consensus = { workspace = true, optional = true } [dev-dependencies] reth-primitives = { workspace = true, features = ["arbitrary"] } @@ -69,12 +69,11 @@ pprof = { workspace = true, features = ["criterion", "flamegraph"] } assert_matches.workspace = true tempfile.workspace = true serde_json.workspace = true -alloy-consensus.workspace = true [features] default = ["serde"] serde = ["dep:serde"] -test-utils = ["rand", "paste", "serde", "alloy-consensus"] +test-utils = ["rand", "paste", "serde"] arbitrary = ["proptest", "reth-primitives/arbitrary", "proptest-arbitrary-interop"] [[bench]] diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index 07fe9b9c2..9a6eda03d 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -7,6 +7,7 @@ use crate::{ validate::ValidPoolTransaction, AllTransactionsEvents, }; +use alloy_consensus::Transaction as _; use alloy_eips::{eip2718::Encodable2718, eip2930::AccessList, eip4844::BlobAndProofV1}; use alloy_primitives::{Address, TxHash, TxKind, B256, U256}; use futures_util::{ready, Stream}; diff --git a/testing/testing-utils/src/generators.rs b/testing/testing-utils/src/generators.rs index 70d6cc02b..d07af00ce 100644 --- a/testing/testing-utils/src/generators.rs +++ b/testing/testing-utils/src/generators.rs @@ -1,6 +1,6 @@ //! Generators for different data structures like block headers, block bodies and ranges of those. -use alloy_consensus::TxLegacy; +use alloy_consensus::{Transaction as _, TxLegacy}; use alloy_eips::{ eip6110::DepositRequest, eip7002::WithdrawalRequest, eip7251::ConsolidationRequest, }; From 04f5b53462cd9f981db9348efe23e4d81cec0012 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 15 Oct 2024 17:47:04 +0200 Subject: [PATCH 061/425] chore: touchups PayloadOrAttributes (#11749) --- crates/payload/primitives/src/payload.rs | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/crates/payload/primitives/src/payload.rs b/crates/payload/primitives/src/payload.rs index 41c2ef1ef..fc685559e 100644 --- a/crates/payload/primitives/src/payload.rs +++ b/crates/payload/primitives/src/payload.rs @@ -3,8 +3,10 @@ use alloy_primitives::B256; use alloy_rpc_types::engine::ExecutionPayload; /// Either an [`ExecutionPayload`] or a types that implements the [`PayloadAttributes`] trait. +/// +/// This is a helper type to unify pre-validation of version specific fields of the engine API. #[derive(Debug)] -pub enum PayloadOrAttributes<'a, AttributesType> { +pub enum PayloadOrAttributes<'a, Attributes> { /// An [`ExecutionPayload`] and optional parent beacon block root. ExecutionPayload { /// The inner execution payload @@ -13,13 +15,10 @@ pub enum PayloadOrAttributes<'a, AttributesType> { parent_beacon_block_root: Option, }, /// A payload attributes type. - PayloadAttributes(&'a AttributesType), + PayloadAttributes(&'a Attributes), } -impl<'a, AttributesType> PayloadOrAttributes<'a, AttributesType> -where - AttributesType: PayloadAttributes, -{ +impl<'a, Attributes> PayloadOrAttributes<'a, Attributes> { /// Construct a [`PayloadOrAttributes`] from an [`ExecutionPayload`] and optional parent beacon /// block root. pub const fn from_execution_payload( @@ -29,6 +28,16 @@ where Self::ExecutionPayload { payload, parent_beacon_block_root } } + /// Construct a [`PayloadOrAttributes::PayloadAttributes`] variant + pub const fn from_attributes(attributes: &'a Attributes) -> Self { + Self::PayloadAttributes(attributes) + } +} + +impl PayloadOrAttributes<'_, Attributes> +where + Attributes: PayloadAttributes, +{ /// Return the withdrawals for the payload or attributes. pub fn withdrawals(&self) -> Option<&Vec> { match self { From 77a382f59b2437e1b0dcefe66bddfeccc4020f06 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 15 Oct 2024 18:21:08 +0200 Subject: [PATCH 062/425] chore: allow missing const (#11750) --- crates/net/peers/src/lib.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/net/peers/src/lib.rs b/crates/net/peers/src/lib.rs index e80331f90..1d60994d8 100644 --- a/crates/net/peers/src/lib.rs +++ b/crates/net/peers/src/lib.rs @@ -117,6 +117,7 @@ pub enum AnyNode { impl AnyNode { /// Returns the peer id of the node. + #[allow(clippy::missing_const_for_fn)] pub fn peer_id(&self) -> PeerId { match self { Self::NodeRecord(record) => record.id, @@ -127,6 +128,7 @@ impl AnyNode { } /// Returns the full node record if available. + #[allow(clippy::missing_const_for_fn)] pub fn node_record(&self) -> Option { match self { Self::NodeRecord(record) => Some(*record), From 7b1b1fcb3b632d8c877a3e761a77ae4d513c0c2a Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Wed, 16 Oct 2024 00:24:25 +0800 Subject: [PATCH 063/425] chore(stage test): use with_capacity (#11759) --- crates/stages/stages/src/stages/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/stages/stages/src/stages/mod.rs b/crates/stages/stages/src/stages/mod.rs index 17ffcf2e9..4b9f92951 100644 --- a/crates/stages/stages/src/stages/mod.rs +++ b/crates/stages/stages/src/stages/mod.rs @@ -263,7 +263,7 @@ mod tests { ); db.insert_blocks(blocks.iter(), StorageKind::Static)?; - let mut receipts = Vec::new(); + let mut receipts = Vec::with_capacity(blocks.len()); let mut tx_num = 0u64; for block in &blocks { let mut block_receipts = Vec::with_capacity(block.body.transactions.len()); From 7f92760655e887f7282a4027465f4cdb4f66ef9a Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 15 Oct 2024 22:43:56 +0400 Subject: [PATCH 064/425] fix: `estimateGas` edge case (#11764) --- crates/rpc/rpc-eth-api/src/helpers/call.rs | 6 +++++- crates/rpc/rpc-eth-api/src/helpers/error.rs | 10 ++++++++++ crates/rpc/rpc-eth-types/src/error.rs | 5 +++++ 3 files changed, 20 insertions(+), 1 deletion(-) diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index cb5c4e79a..6784f9327 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -888,7 +888,11 @@ pub trait Call: LoadState + SpawnBlocking { // Execute transaction and handle potential gas errors, adjusting limits accordingly. match self.transact(&mut db, env.clone()) { Err(err) if err.is_gas_too_high() => { - // Increase the lowest gas limit if gas is too high + // Decrease the highest gas limit if gas is too high + highest_gas_limit = mid_gas_limit; + } + Err(err) if err.is_gas_too_low() => { + // Increase the lowest gas limit if gas is too low lowest_gas_limit = mid_gas_limit; } // Handle other cases, including successful transactions. diff --git a/crates/rpc/rpc-eth-api/src/helpers/error.rs b/crates/rpc/rpc-eth-api/src/helpers/error.rs index 041a01905..1d991b8e6 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/error.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/error.rs @@ -59,6 +59,16 @@ pub trait AsEthApiError { false } + + /// Returns `true` if error is + /// [`RpcInvalidTransactionError::GasTooLow`](reth_rpc_eth_types::RpcInvalidTransactionError::GasTooLow). + fn is_gas_too_low(&self) -> bool { + if let Some(err) = self.as_err() { + return err.is_gas_too_low() + } + + false + } } impl AsEthApiError for EthApiError { diff --git a/crates/rpc/rpc-eth-types/src/error.rs b/crates/rpc/rpc-eth-types/src/error.rs index 212fca36d..b38b31227 100644 --- a/crates/rpc/rpc-eth-types/src/error.rs +++ b/crates/rpc/rpc-eth-types/src/error.rs @@ -151,6 +151,11 @@ impl EthApiError { pub const fn is_gas_too_high(&self) -> bool { matches!(self, Self::InvalidTransaction(RpcInvalidTransactionError::GasTooHigh)) } + + /// Returns `true` if error is [`RpcInvalidTransactionError::GasTooLow`] + pub const fn is_gas_too_low(&self) -> bool { + matches!(self, Self::InvalidTransaction(RpcInvalidTransactionError::GasTooLow)) + } } impl From for jsonrpsee_types::error::ErrorObject<'static> { From 4144d6ea24e1ff528cde86982bce269b3dba179a Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 15 Oct 2024 21:17:31 +0200 Subject: [PATCH 065/425] chore: add get_database_args (#11766) --- crates/node/core/src/args/database.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/node/core/src/args/database.rs b/crates/node/core/src/args/database.rs index 09e9de82f..da96deb70 100644 --- a/crates/node/core/src/args/database.rs +++ b/crates/node/core/src/args/database.rs @@ -6,6 +6,7 @@ use clap::{ error::ErrorKind, Arg, Args, Command, Error, }; +use reth_db::ClientVersion; use reth_storage_errors::db::LogLevel; /// Parameters for database configuration @@ -24,7 +25,15 @@ pub struct DatabaseArgs { impl DatabaseArgs { /// Returns default database arguments with configured log level and client version. pub fn database_args(&self) -> reth_db::mdbx::DatabaseArguments { - reth_db::mdbx::DatabaseArguments::new(default_client_version()) + self.get_database_args(default_client_version()) + } + + /// Returns the database arguments with configured log level and given client version. + pub const fn get_database_args( + &self, + client_version: ClientVersion, + ) -> reth_db::mdbx::DatabaseArguments { + reth_db::mdbx::DatabaseArguments::new(client_version) .with_log_level(self.log_level) .with_exclusive(self.exclusive) } From 78415ff7c586eb11097d5181153e7b7d2c092aac Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 15 Oct 2024 21:18:21 +0200 Subject: [PATCH 066/425] chore: include hash in trace (#11762) --- crates/optimism/rpc/src/eth/transaction.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/optimism/rpc/src/eth/transaction.rs b/crates/optimism/rpc/src/eth/transaction.rs index 36556905e..e161504f8 100644 --- a/crates/optimism/rpc/src/eth/transaction.rs +++ b/crates/optimism/rpc/src/eth/transaction.rs @@ -40,7 +40,7 @@ where // On optimism, transactions are forwarded directly to the sequencer to be included in // blocks that it builds. if let Some(client) = self.raw_tx_forwarder().as_ref() { - tracing::debug!( target: "rpc::eth", "forwarding raw transaction to"); + tracing::debug!(target: "rpc::eth", hash = %pool_transaction.hash(), "forwarding raw transaction to sequencer"); let _ = client.forward_raw_transaction(&tx).await.inspect_err(|err| { tracing::debug!(target: "rpc::eth", %err, hash=% *pool_transaction.hash(), "failed to forward raw transaction"); }); From d4be773f5f71b49e7e40de3dd747a80af18daf74 Mon Sep 17 00:00:00 2001 From: "0xriazaka.eth" <168359025+0xriazaka@users.noreply.github.com> Date: Tue, 15 Oct 2024 21:47:23 +0100 Subject: [PATCH 067/425] chore: move tests in reth_execution_types::chain to reth-evm-optimism (#11115) Co-authored-by: Emilia Hane Co-authored-by: Matthias Seitz --- crates/evm/execution-types/src/chain.rs | 12 +--- crates/optimism/evm/Cargo.toml | 1 + crates/optimism/evm/src/lib.rs | 82 ++++++++++++++++++++++++- 3 files changed, 83 insertions(+), 12 deletions(-) diff --git a/crates/evm/execution-types/src/chain.rs b/crates/evm/execution-types/src/chain.rs index 30f1f4cd2..d3ed2913e 100644 --- a/crates/evm/execution-types/src/chain.rs +++ b/crates/evm/execution-types/src/chain.rs @@ -656,7 +656,6 @@ pub(super) mod serde_bincode_compat { mod tests { use super::*; use alloy_primitives::B256; - use reth_primitives::{Receipt, Receipts, TxType}; use revm::primitives::{AccountInfo, HashMap}; #[test] @@ -789,7 +788,10 @@ mod tests { } #[test] + #[cfg(not(feature = "optimism"))] fn receipts_by_block_hash() { + use reth_primitives::{Receipt, Receipts, TxType}; + // Create a default SealedBlockWithSenders object let block = SealedBlockWithSenders::default(); @@ -811,10 +813,6 @@ mod tests { cumulative_gas_used: 46913, logs: vec![], success: true, - #[cfg(feature = "optimism")] - deposit_nonce: Some(18), - #[cfg(feature = "optimism")] - deposit_receipt_version: Some(34), }; // Create another random receipt object, receipt2 @@ -823,10 +821,6 @@ mod tests { cumulative_gas_used: 1325345, logs: vec![], success: true, - #[cfg(feature = "optimism")] - deposit_nonce: Some(18), - #[cfg(feature = "optimism")] - deposit_receipt_version: Some(34), }; // Create a Receipts object with a vector of receipt vectors diff --git a/crates/optimism/evm/Cargo.toml b/crates/optimism/evm/Cargo.toml index 76fa3ce69..72231716f 100644 --- a/crates/optimism/evm/Cargo.toml +++ b/crates/optimism/evm/Cargo.toml @@ -42,6 +42,7 @@ tracing.workspace = true alloy-eips.workspace = true reth-revm = { workspace = true, features = ["test-utils"] } +reth-primitives = { workspace = true, features = ["test-utils"] } reth-optimism-chainspec.workspace = true alloy-genesis.workspace = true alloy-consensus.workspace = true diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index 63d0ee6b4..f3de053f7 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -204,12 +204,13 @@ mod tests { use super::*; use alloy_genesis::Genesis; use alloy_primitives::{B256, U256}; - use reth_chainspec::{Chain, ChainSpec}; + use reth_chainspec::ChainSpec; use reth_evm::execute::ProviderError; + use reth_execution_types::{Chain, ExecutionOutcome}; use reth_optimism_chainspec::BASE_MAINNET; use reth_primitives::{ revm_primitives::{BlockEnv, CfgEnv, SpecId}, - Header, KECCAK_EMPTY, + Header, Receipt, Receipts, SealedBlockWithSenders, TxType, KECCAK_EMPTY, }; use reth_revm::{ db::{CacheDB, EmptyDBTyped}, @@ -237,7 +238,7 @@ mod tests { // Build the ChainSpec for Ethereum mainnet, activating London, Paris, and Shanghai // hardforks let chain_spec = ChainSpec::builder() - .chain(Chain::mainnet()) + .chain(0.into()) .genesis(Genesis::default()) .london_activated() .paris_activated() @@ -540,4 +541,79 @@ mod tests { // Optimism in handler assert_eq!(evm.handler.cfg, HandlerCfg { spec_id: SpecId::ECOTONE, is_optimism: true }); } + + #[test] + fn receipts_by_block_hash() { + // Create a default SealedBlockWithSenders object + let block = SealedBlockWithSenders::default(); + + // Define block hashes for block1 and block2 + let block1_hash = B256::new([0x01; 32]); + let block2_hash = B256::new([0x02; 32]); + + // Clone the default block into block1 and block2 + let mut block1 = block.clone(); + let mut block2 = block; + + // Set the hashes of block1 and block2 + block1.block.header.set_block_number(10); + block1.block.header.set_hash(block1_hash); + + block2.block.header.set_block_number(11); + block2.block.header.set_hash(block2_hash); + + // Create a random receipt object, receipt1 + let receipt1 = Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 46913, + logs: vec![], + success: true, + deposit_nonce: Some(18), + deposit_receipt_version: Some(34), + }; + + // Create another random receipt object, receipt2 + let receipt2 = Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 1325345, + logs: vec![], + success: true, + deposit_nonce: Some(18), + deposit_receipt_version: Some(34), + }; + + // Create a Receipts object with a vector of receipt vectors + let receipts = + Receipts { receipt_vec: vec![vec![Some(receipt1.clone())], vec![Some(receipt2)]] }; + + // Create an ExecutionOutcome object with the created bundle, receipts, an empty requests + // vector, and first_block set to 10 + let execution_outcome = ExecutionOutcome { + bundle: Default::default(), + receipts, + requests: vec![], + first_block: 10, + }; + + // Create a Chain object with a BTreeMap of blocks mapped to their block numbers, + // including block1_hash and block2_hash, and the execution_outcome + let chain = Chain::new([block1, block2], execution_outcome.clone(), None); + + // Assert that the proper receipt vector is returned for block1_hash + assert_eq!(chain.receipts_by_block_hash(block1_hash), Some(vec![&receipt1])); + + // Create an ExecutionOutcome object with a single receipt vector containing receipt1 + let execution_outcome1 = ExecutionOutcome { + bundle: Default::default(), + receipts: Receipts { receipt_vec: vec![vec![Some(receipt1)]] }, + requests: vec![], + first_block: 10, + }; + + // Assert that the execution outcome at the first block contains only the first receipt + assert_eq!(chain.execution_outcome_at_block(10), Some(execution_outcome1)); + + // Assert that the execution outcome at the tip block contains the whole execution outcome + assert_eq!(chain.execution_outcome_at_block(11), Some(execution_outcome)); + } } From 3f3a7ef0237282862157e33776563b6a371ac3cb Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Wed, 16 Oct 2024 15:57:28 +0800 Subject: [PATCH 068/425] unify &Option to Option<&T> (#11755) --- crates/cli/commands/src/db/get.rs | 7 +++---- crates/rpc/rpc-builder/src/lib.rs | 4 ++-- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/crates/cli/commands/src/db/get.rs b/crates/cli/commands/src/db/get.rs index 5b794feea..4006d1660 100644 --- a/crates/cli/commands/src/db/get.rs +++ b/crates/cli/commands/src/db/get.rs @@ -132,9 +132,8 @@ pub(crate) fn table_key(key: &str) -> Result { } /// Get an instance of subkey for given dupsort table -fn table_subkey(subkey: &Option) -> Result { - serde_json::from_str::(&subkey.clone().unwrap_or_default()) - .map_err(|e| eyre::eyre!(e)) +fn table_subkey(subkey: Option<&str>) -> Result { + serde_json::from_str::(subkey.unwrap_or_default()).map_err(|e| eyre::eyre!(e)) } struct GetValueViewer<'a, N: NodeTypesWithDB> { @@ -175,7 +174,7 @@ impl TableViewer<()> for GetValueViewer<'_, N> { let key = table_key::(&self.key)?; // process dupsort table - let subkey = table_subkey::(&self.subkey)?; + let subkey = table_subkey::(self.subkey.as_deref())?; match self.tool.get_dup::(key, subkey)? { Some(content) => { diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index 6a1240b64..cd93aeb62 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -676,8 +676,8 @@ impl RpcModuleConfigBuilder { } /// Get a reference to the eth namespace config, if any - pub const fn get_eth(&self) -> &Option { - &self.eth + pub const fn get_eth(&self) -> Option<&EthConfig> { + self.eth.as_ref() } /// Get a mutable reference to the eth namespace config, if any From b8147708ad6ad997c14c2a11b4ed5196bdd38e4b Mon Sep 17 00:00:00 2001 From: Kien Trinh <51135161+kien6034@users.noreply.github.com> Date: Wed, 16 Oct 2024 15:05:56 +0700 Subject: [PATCH 069/425] feat(txpool): function to return the next free nonce (#11744) Co-authored-by: Matthias Seitz --- crates/transaction-pool/src/identifier.rs | 5 +++ crates/transaction-pool/src/lib.rs | 8 ++++ crates/transaction-pool/src/noop.rs | 8 ++++ crates/transaction-pool/src/pool/mod.rs | 11 +++++ crates/transaction-pool/src/pool/txpool.rs | 51 ++++++++++++++++++++++ crates/transaction-pool/src/traits.rs | 15 +++++++ 6 files changed, 98 insertions(+) diff --git a/crates/transaction-pool/src/identifier.rs b/crates/transaction-pool/src/identifier.rs index 97d4bda8d..c50d39ae4 100644 --- a/crates/transaction-pool/src/identifier.rs +++ b/crates/transaction-pool/src/identifier.rs @@ -59,6 +59,11 @@ impl SenderId { pub const fn start_bound(self) -> std::ops::Bound { std::ops::Bound::Included(TransactionId::new(self, 0)) } + + /// Converts the sender to a [`TransactionId`] with the given nonce. + pub const fn into_id(self, nonce: u64) -> TransactionId { + TransactionId::new(self, nonce) + } } impl From for SenderId { diff --git a/crates/transaction-pool/src/lib.rs b/crates/transaction-pool/src/lib.rs index 75465afac..2cffcd33f 100644 --- a/crates/transaction-pool/src/lib.rs +++ b/crates/transaction-pool/src/lib.rs @@ -510,6 +510,14 @@ where self.pool.get_highest_transaction_by_sender(sender) } + fn get_highest_consecutive_transaction_by_sender( + &self, + sender: Address, + on_chain_nonce: u64, + ) -> Option>> { + self.pool.get_highest_consecutive_transaction_by_sender(sender, on_chain_nonce) + } + fn get_transaction_by_sender_and_nonce( &self, sender: Address, diff --git a/crates/transaction-pool/src/noop.rs b/crates/transaction-pool/src/noop.rs index 681aa9896..4464ae1fc 100644 --- a/crates/transaction-pool/src/noop.rs +++ b/crates/transaction-pool/src/noop.rs @@ -227,6 +227,14 @@ impl TransactionPool for NoopTransactionPool { None } + fn get_highest_consecutive_transaction_by_sender( + &self, + _sender: Address, + _on_chain_nonce: u64, + ) -> Option>> { + None + } + fn get_transaction_by_sender_and_nonce( &self, _sender: Address, diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index 44dd5a8d5..64e6dad67 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -785,6 +785,17 @@ where self.get_pool_data().get_highest_transaction_by_sender(sender_id) } + /// Returns the transaction with the highest nonce that is executable given the on chain nonce. + pub(crate) fn get_highest_consecutive_transaction_by_sender( + &self, + sender: Address, + on_chain_nonce: u64, + ) -> Option>> { + let sender_id = self.get_sender_id(sender); + self.get_pool_data() + .get_highest_consecutive_transaction_by_sender(sender_id.into_id(on_chain_nonce)) + } + /// Returns all transactions that where submitted with the given [`TransactionOrigin`] pub(crate) fn get_transactions_by_origin( &self, diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index 7aaa77df4..c470faf3a 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -108,6 +108,27 @@ impl TxPool { self.all().txs_iter(sender).last().map(|(_, tx)| Arc::clone(&tx.transaction)) } + /// Returns the transaction with the highest nonce that is executable given the on chain nonce. + /// + /// Note: The next pending pooled transaction must have the on chain nonce. + pub(crate) fn get_highest_consecutive_transaction_by_sender( + &self, + on_chain: TransactionId, + ) -> Option>> { + let mut last_consecutive_tx = None; + + let mut next_expected_nonce = on_chain.nonce; + for (id, tx) in self.all().descendant_txs_inclusive(&on_chain) { + if next_expected_nonce != id.nonce { + break + } + next_expected_nonce = id.next_nonce(); + last_consecutive_tx = Some(tx); + } + + last_consecutive_tx.map(|tx| Arc::clone(&tx.transaction)) + } + /// Returns access to the [`AllTransactions`] container. pub(crate) const fn all(&self) -> &AllTransactions { &self.all_transactions @@ -2755,6 +2776,36 @@ mod tests { assert_eq!(highest_tx.as_ref().transaction, tx1); } + #[test] + fn get_highest_consecutive_transaction_by_sender() { + // Set up a mock transaction factory and a new transaction pool. + let mut pool = TxPool::new(MockOrdering::default(), PoolConfig::default()); + let mut f = MockTransactionFactory::default(); + + // Create transactions with nonces 0, 1, 2, 4, 5. + let sender = Address::random(); + let txs: Vec<_> = vec![0, 1, 2, 4, 5]; + for nonce in txs { + let mut mock_tx = MockTransaction::eip1559(); + mock_tx.set_sender(sender); + mock_tx.set_nonce(nonce); + + let validated_tx = f.validated(mock_tx); + pool.add_transaction(validated_tx, U256::from(1000), 0).unwrap(); + } + + // Get last consecutive transaction + let sender_id = f.ids.sender_id(&sender).unwrap(); + let next_tx = pool.get_highest_consecutive_transaction_by_sender(sender_id.into_id(0)); + assert_eq!(next_tx.map(|tx| tx.nonce()), Some(2), "Expected nonce 2 for on-chain nonce 0"); + + let next_tx = pool.get_highest_consecutive_transaction_by_sender(sender_id.into_id(4)); + assert_eq!(next_tx.map(|tx| tx.nonce()), Some(5), "Expected nonce 5 for on-chain nonce 4"); + + let next_tx = pool.get_highest_consecutive_transaction_by_sender(sender_id.into_id(5)); + assert_eq!(next_tx.map(|tx| tx.nonce()), Some(5), "Expected nonce 5 for on-chain nonce 5"); + } + #[test] fn discard_nonce_too_low() { let mut f = MockTransactionFactory::default(); diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index 9a6eda03d..d19381935 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -357,6 +357,21 @@ pub trait TransactionPool: Send + Sync + Clone { sender: Address, ) -> Option>>; + /// Returns the transaction with the highest nonce that is executable given the on chain nonce. + /// In other words the highest non nonce gapped transaction. + /// + /// Note: The next pending pooled transaction must have the on chain nonce. + /// + /// For example, for a given on chain nonce of `5`, the next transaction must have that nonce. + /// If the pool contains txs `[5,6,7]` this returns tx `7`. + /// If the pool contains txs `[6,7]` this returns `None` because the next valid nonce (5) is + /// missing, which means txs `[6,7]` are nonce gapped. + fn get_highest_consecutive_transaction_by_sender( + &self, + sender: Address, + on_chain_nonce: u64, + ) -> Option>>; + /// Returns a transaction sent by a given user and a nonce fn get_transaction_by_sender_and_nonce( &self, From 183cea4577714b451d9e3e1ef4a89016a79cf832 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Wed, 16 Oct 2024 17:13:30 +0900 Subject: [PATCH 070/425] chore(provider): move `state_provider_from_state` to `BlockState` impl (#11777) --- crates/chain-state/src/in_memory.rs | 24 ++++++++----------- .../src/providers/blockchain_provider.rs | 2 +- 2 files changed, 11 insertions(+), 15 deletions(-) diff --git a/crates/chain-state/src/in_memory.rs b/crates/chain-state/src/in_memory.rs index fb67608eb..f157da5ff 100644 --- a/crates/chain-state/src/in_memory.rs +++ b/crates/chain-state/src/in_memory.rs @@ -504,20 +504,6 @@ impl CanonicalInMemoryState { self.inner.canon_state_notification_sender.send(event).ok(); } - /// Return state provider with reference to in-memory blocks that overlay database state. - /// - /// This merges the state of all blocks that are part of the chain that the requested block is - /// the head of. This includes all blocks that connect back to the canonical block on disk. - pub fn state_provider_from_state( - &self, - state: &BlockState, - historical: StateProviderBox, - ) -> MemoryOverlayStateProvider { - let in_memory = state.chain().into_iter().map(|block_state| block_state.block()).collect(); - - MemoryOverlayStateProvider::new(historical, in_memory) - } - /// Return state provider with reference to in-memory blocks that overlay database state. /// /// This merges the state of all blocks that are part of the chain that the requested block is @@ -723,6 +709,16 @@ impl BlockState { pub fn iter(self: Arc) -> impl Iterator> { std::iter::successors(Some(self), |state| state.parent.clone()) } + + /// Return state provider with reference to in-memory blocks that overlay database state. + /// + /// This merges the state of all blocks that are part of the chain that the this block is + /// the head of. This includes all blocks that connect back to the canonical block on disk. + pub fn state_provider(&self, historical: StateProviderBox) -> MemoryOverlayStateProvider { + let in_memory = self.chain().into_iter().map(|block_state| block_state.block()).collect(); + + MemoryOverlayStateProvider::new(historical, in_memory) + } } /// Represents an executed block stored in-memory. diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index 9b88cab13..3013be060 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -402,7 +402,7 @@ impl BlockchainProvider2 { ) -> ProviderResult { let anchor_hash = state.anchor().hash; let latest_historical = self.database.history_by_block_hash(anchor_hash)?; - Ok(self.canonical_in_memory_state.state_provider_from_state(state, latest_historical)) + Ok(state.state_provider(latest_historical)) } /// Fetches data from either in-memory state or persistent storage for a range of transactions. From a6358d2e6fd90ecb7c225733fcca9a7587a60d58 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Wed, 16 Oct 2024 17:52:56 +0900 Subject: [PATCH 071/425] feat(provider): add `*StateProviderRef` creation methods to `DatabaseProvider` (#11776) --- .../src/providers/database/provider.rs | 74 +++++++++++++++++-- .../src/providers/state/historical.rs | 18 +++++ 2 files changed, 84 insertions(+), 8 deletions(-) diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 3b074a5af..8140700fa 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -9,12 +9,12 @@ use crate::{ AccountReader, BlockExecutionReader, BlockExecutionWriter, BlockHashReader, BlockNumReader, BlockReader, BlockWriter, BundleStateInit, ChainStateBlockReader, ChainStateBlockWriter, DBProvider, EvmEnvProvider, HashingWriter, HeaderProvider, HeaderSyncGap, - HeaderSyncGapProvider, HistoricalStateProvider, HistoryWriter, LatestStateProvider, - OriginalValuesKnown, ProviderError, PruneCheckpointReader, PruneCheckpointWriter, - RequestsProvider, RevertsInit, StageCheckpointReader, StateChangeWriter, StateProviderBox, - StateReader, StateWriter, StaticFileProviderFactory, StatsReader, StorageReader, - StorageTrieWriter, TransactionVariant, TransactionsProvider, TransactionsProviderExt, - TrieWriter, WithdrawalsProvider, + HeaderSyncGapProvider, HistoricalStateProvider, HistoricalStateProviderRef, HistoryWriter, + LatestStateProvider, LatestStateProviderRef, OriginalValuesKnown, ProviderError, + PruneCheckpointReader, PruneCheckpointWriter, RequestsProvider, RevertsInit, + StageCheckpointReader, StateChangeWriter, StateProviderBox, StateReader, StateWriter, + StaticFileProviderFactory, StatsReader, StorageReader, StorageTrieWriter, TransactionVariant, + TransactionsProvider, TransactionsProviderExt, TrieWriter, WithdrawalsProvider, }; use alloy_eips::BlockHashOrNumber; use alloy_primitives::{keccak256, Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; @@ -47,7 +47,7 @@ use reth_primitives::{ }; use reth_prune_types::{PruneCheckpoint, PruneModes, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; -use reth_storage_api::{StorageChangeSetReader, TryIntoHistoricalStateProvider}; +use reth_storage_api::{StateProvider, StorageChangeSetReader, TryIntoHistoricalStateProvider}; use reth_storage_errors::provider::{ProviderResult, RootMismatch}; use reth_trie::{ prefix_set::{PrefixSet, PrefixSetMut, TriePrefixSets}, @@ -68,7 +68,7 @@ use std::{ time::{Duration, Instant}, }; use tokio::sync::watch; -use tracing::{debug, error, warn}; +use tracing::{debug, error, trace, warn}; /// A [`DatabaseProvider`] that holds a read-only database transaction. pub type DatabaseProviderRO = DatabaseProvider<::TX, Spec>; @@ -145,6 +145,64 @@ impl DatabaseProvider { } } +impl DatabaseProvider { + /// State provider for latest block + pub fn latest<'a>(&'a self) -> ProviderResult> { + trace!(target: "providers::db", "Returning latest state provider"); + Ok(Box::new(LatestStateProviderRef::new(&self.tx, self.static_file_provider.clone()))) + } + + /// Storage provider for state at that given block hash + pub fn history_by_block_hash<'a>( + &'a self, + block_hash: BlockHash, + ) -> ProviderResult> { + let mut block_number = + self.block_number(block_hash)?.ok_or(ProviderError::BlockHashNotFound(block_hash))?; + if block_number == self.best_block_number().unwrap_or_default() && + block_number == self.last_block_number().unwrap_or_default() + { + return Ok(Box::new(LatestStateProviderRef::new( + &self.tx, + self.static_file_provider.clone(), + ))) + } + + // +1 as the changeset that we want is the one that was applied after this block. + block_number += 1; + + let account_history_prune_checkpoint = + self.get_prune_checkpoint(PruneSegment::AccountHistory)?; + let storage_history_prune_checkpoint = + self.get_prune_checkpoint(PruneSegment::StorageHistory)?; + + let mut state_provider = HistoricalStateProviderRef::new( + &self.tx, + block_number, + self.static_file_provider.clone(), + ); + + // If we pruned account or storage history, we can't return state on every historical block. + // Instead, we should cap it at the latest prune checkpoint for corresponding prune segment. + if let Some(prune_checkpoint_block_number) = + account_history_prune_checkpoint.and_then(|checkpoint| checkpoint.block_number) + { + state_provider = state_provider.with_lowest_available_account_history_block_number( + prune_checkpoint_block_number + 1, + ); + } + if let Some(prune_checkpoint_block_number) = + storage_history_prune_checkpoint.and_then(|checkpoint| checkpoint.block_number) + { + state_provider = state_provider.with_lowest_available_storage_history_block_number( + prune_checkpoint_block_number + 1, + ); + } + + Ok(Box::new(state_provider)) + } +} + impl StaticFileProviderFactory for DatabaseProvider { /// Returns a static file provider fn static_file_provider(&self) -> StaticFileProvider { diff --git a/crates/storage/provider/src/providers/state/historical.rs b/crates/storage/provider/src/providers/state/historical.rs index 781a11f6d..640041e08 100644 --- a/crates/storage/provider/src/providers/state/historical.rs +++ b/crates/storage/provider/src/providers/state/historical.rs @@ -227,6 +227,24 @@ impl<'b, TX: DbTx> HistoricalStateProviderRef<'b, TX> { Ok(HistoryInfo::NotYetWritten) } } + + /// Set the lowest block number at which the account history is available. + pub const fn with_lowest_available_account_history_block_number( + mut self, + block_number: BlockNumber, + ) -> Self { + self.lowest_available_blocks.account_history_block_number = Some(block_number); + self + } + + /// Set the lowest block number at which the storage history is available. + pub const fn with_lowest_available_storage_history_block_number( + mut self, + block_number: BlockNumber, + ) -> Self { + self.lowest_available_blocks.storage_history_block_number = Some(block_number); + self + } } impl AccountReader for HistoricalStateProviderRef<'_, TX> { From a14a9fd8b037ba839c94946fb7fd091bb1202eb5 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 16 Oct 2024 10:59:20 +0200 Subject: [PATCH 072/425] chore: add chain_id shortcut (#11782) --- crates/chainspec/src/api.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/crates/chainspec/src/api.rs b/crates/chainspec/src/api.rs index 39a6716ee..f7061ff18 100644 --- a/crates/chainspec/src/api.rs +++ b/crates/chainspec/src/api.rs @@ -14,9 +14,14 @@ pub trait EthChainSpec: Send + Sync + Unpin + Debug { // todo: make chain spec type generic over hardfork //type Hardfork: Clone + Copy + 'static; - /// Chain id. + /// Returns the [`Chain`] object this spec targets. fn chain(&self) -> Chain; + /// Returns the chain id number + fn chain_id(&self) -> u64 { + self.chain().id() + } + /// Get the [`BaseFeeParams`] for the chain at the given block. fn base_fee_params_at_block(&self, block_number: u64) -> BaseFeeParams; From 323d8edfb924af27ce0644419371850d7ca96027 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Wed, 16 Oct 2024 11:02:23 +0200 Subject: [PATCH 073/425] feat: implement batch executor (#11753) --- Cargo.lock | 2 + crates/evm/Cargo.toml | 8 +- crates/evm/src/execute.rs | 171 +++++++++++++++++++++++++++-------- crates/evm/src/test_utils.rs | 45 ++++++++- 4 files changed, 184 insertions(+), 42 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b3cd15385..59f7ad353 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7359,12 +7359,14 @@ dependencies = [ "metrics", "parking_lot 0.12.3", "reth-chainspec", + "reth-consensus", "reth-execution-errors", "reth-execution-types", "reth-metrics", "reth-primitives", "reth-primitives-traits", "reth-prune-types", + "reth-revm", "reth-storage-errors", "revm", "revm-primitives", diff --git a/crates/evm/Cargo.toml b/crates/evm/Cargo.toml index 20070d421..6081eae42 100644 --- a/crates/evm/Cargo.toml +++ b/crates/evm/Cargo.toml @@ -13,16 +13,18 @@ workspace = true [dependencies] # reth reth-chainspec.workspace = true +reth-consensus.workspace = true reth-execution-errors.workspace = true +reth-execution-types.workspace = true +reth-metrics = { workspace = true, optional = true } reth-primitives.workspace = true reth-primitives-traits.workspace = true -revm-primitives.workspace = true reth-prune-types.workspace = true -reth-metrics = { workspace = true, optional = true } +reth-revm.workspace = true reth-storage-errors.workspace = true -reth-execution-types.workspace = true revm.workspace = true +revm-primitives.workspace = true # alloy alloy-primitives.workspace = true diff --git a/crates/evm/src/execute.rs b/crates/evm/src/execute.rs index eb77d054b..9413c709d 100644 --- a/crates/evm/src/execute.rs +++ b/crates/evm/src/execute.rs @@ -7,15 +7,17 @@ pub use reth_execution_errors::{ pub use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput, ExecutionOutcome}; pub use reth_storage_errors::provider::ProviderError; -use alloc::{boxed::Box, vec::Vec}; +use crate::system_calls::OnStateHook; +use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_primitives::BlockNumber; use core::{fmt::Display, marker::PhantomData}; +use reth_chainspec::ChainSpec; +use reth_consensus::ConsensusError; use reth_primitives::{BlockWithSenders, Receipt, Request}; use reth_prune_types::PruneModes; +use reth_revm::batch::BlockBatchRecord; use revm::{db::BundleState, State}; -use revm_primitives::db::Database; - -use crate::system_calls::OnStateHook; +use revm_primitives::{db::Database, U256}; /// A general purpose executor trait that executes an input (e.g. block) and produces an output /// (e.g. state changes and receipts). @@ -170,25 +172,49 @@ pub trait BlockExecutionStrategy { type Error: From + core::error::Error; /// Applies any necessary changes before executing the block's transactions. - fn apply_pre_execution_changes(&mut self) -> Result<(), Self::Error>; + fn apply_pre_execution_changes( + &mut self, + block: &BlockWithSenders, + total_difficulty: U256, + ) -> Result<(), Self::Error>; /// Executes all transactions in the block. fn execute_transactions( &mut self, block: &BlockWithSenders, + total_difficulty: U256, ) -> Result<(Vec, u64), Self::Error>; /// Applies any necessary changes after executing the block's transactions. - fn apply_post_execution_changes(&mut self) -> Result, Self::Error>; + fn apply_post_execution_changes( + &mut self, + block: &BlockWithSenders, + total_difficulty: U256, + receipts: &[Receipt], + ) -> Result, Self::Error>; /// Returns a reference to the current state. fn state_ref(&self) -> &State; + /// Returns a mutable reference to the current state. + fn state_mut(&mut self) -> &mut State; + /// Sets a hook to be called after each state change during execution. fn with_state_hook(&mut self, hook: Option>); /// Returns the final bundle state. - fn finish(&self) -> BundleState; + fn finish(&mut self) -> BundleState; + + /// Returns the strategy chain spec. + fn chain_spec(&self) -> Arc; + + /// Validate a block with regard to execution results. + fn validate_block_post_execution( + &self, + block: &BlockWithSenders, + receipts: &[Receipt], + requests: &[Request], + ) -> Result<(), ConsensusError>; } /// A strategy factory that can create block execution strategies. @@ -250,7 +276,8 @@ where DB: Database + Display>, { let strategy = self.strategy_factory.create_strategy(db); - GenericBatchExecutor::new(strategy) + let batch_record = BlockBatchRecord::default(); + GenericBatchExecutor::new(strategy, batch_record) } } @@ -261,7 +288,8 @@ pub struct GenericBlockExecutor where S: BlockExecutionStrategy, { - strategy: S, + /// Block execution strategy. + pub(crate) strategy: S, _phantom: PhantomData, } @@ -285,11 +313,12 @@ where type Error = S::Error; fn execute(mut self, input: Self::Input<'_>) -> Result { - let BlockExecutionInput { block, total_difficulty: _ } = input; + let BlockExecutionInput { block, total_difficulty } = input; - self.strategy.apply_pre_execution_changes()?; - let (receipts, gas_used) = self.strategy.execute_transactions(block)?; - let requests = self.strategy.apply_post_execution_changes()?; + self.strategy.apply_pre_execution_changes(block, total_difficulty)?; + let (receipts, gas_used) = self.strategy.execute_transactions(block, total_difficulty)?; + let requests = + self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; let state = self.strategy.finish(); Ok(BlockExecutionOutput { state, receipts, requests, gas_used }) @@ -303,11 +332,12 @@ where where F: FnMut(&State), { - let BlockExecutionInput { block, total_difficulty: _ } = input; + let BlockExecutionInput { block, total_difficulty } = input; - self.strategy.apply_pre_execution_changes()?; - let (receipts, gas_used) = self.strategy.execute_transactions(block)?; - let requests = self.strategy.apply_post_execution_changes()?; + self.strategy.apply_pre_execution_changes(block, total_difficulty)?; + let (receipts, gas_used) = self.strategy.execute_transactions(block, total_difficulty)?; + let requests = + self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; state(self.strategy.state_ref()); @@ -324,13 +354,14 @@ where where H: OnStateHook + 'static, { - let BlockExecutionInput { block, total_difficulty: _ } = input; + let BlockExecutionInput { block, total_difficulty } = input; self.strategy.with_state_hook(Some(Box::new(state_hook))); - self.strategy.apply_pre_execution_changes()?; - let (receipts, gas_used) = self.strategy.execute_transactions(block)?; - let requests = self.strategy.apply_post_execution_changes()?; + self.strategy.apply_pre_execution_changes(block, total_difficulty)?; + let (receipts, gas_used) = self.strategy.execute_transactions(block, total_difficulty)?; + let requests = + self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; let state = self.strategy.finish(); @@ -341,15 +372,24 @@ where /// A generic batch executor that uses a [`BlockExecutionStrategy`] to /// execute batches. #[allow(missing_debug_implementations)] -pub struct GenericBatchExecutor { - _strategy: S, +pub struct GenericBatchExecutor +where + S: BlockExecutionStrategy, +{ + /// Batch execution strategy. + pub(crate) strategy: S, + /// Keeps track of batch execution receipts and requests. + batch_record: BlockBatchRecord, _phantom: PhantomData, } -impl GenericBatchExecutor { +impl GenericBatchExecutor +where + S: BlockExecutionStrategy, +{ /// Creates a new `GenericBatchExecutor` with the given strategy. - pub const fn new(_strategy: S) -> Self { - Self { _strategy, _phantom: PhantomData } + pub const fn new(strategy: S, batch_record: BlockBatchRecord) -> Self { + Self { strategy, batch_record, _phantom: PhantomData } } } @@ -362,24 +402,52 @@ where type Output = ExecutionOutcome; type Error = BlockExecutionError; - fn execute_and_verify_one(&mut self, _input: Self::Input<'_>) -> Result<(), Self::Error> { - todo!() + fn execute_and_verify_one(&mut self, input: Self::Input<'_>) -> Result<(), Self::Error> { + let BlockExecutionInput { block, total_difficulty } = input; + + if self.batch_record.first_block().is_none() { + self.batch_record.set_first_block(block.number); + } + + self.strategy.apply_pre_execution_changes(block, total_difficulty)?; + let (receipts, _gas_used) = self.strategy.execute_transactions(block, total_difficulty)?; + let requests = + self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; + + self.strategy.validate_block_post_execution(block, &receipts, &requests)?; + + // prepare the state according to the prune mode + let retention = self.batch_record.bundle_retention(block.number); + self.strategy.state_mut().merge_transitions(retention); + + // store receipts in the set + self.batch_record.save_receipts(receipts)?; + + // store requests in the set + self.batch_record.save_requests(requests); + + Ok(()) } - fn finalize(self) -> Self::Output { - todo!() + fn finalize(mut self) -> Self::Output { + ExecutionOutcome::new( + self.strategy.state_mut().take_bundle(), + self.batch_record.take_receipts(), + self.batch_record.first_block().unwrap_or_default(), + self.batch_record.take_requests(), + ) } - fn set_tip(&mut self, _tip: BlockNumber) { - todo!() + fn set_tip(&mut self, tip: BlockNumber) { + self.batch_record.set_tip(tip); } - fn set_prune_modes(&mut self, _prune_modes: PruneModes) { - todo!() + fn set_prune_modes(&mut self, prune_modes: PruneModes) { + self.batch_record.set_prune_modes(prune_modes); } fn size_hint(&self) -> Option { - None + Some(self.strategy.state_ref().bundle_state.size_hint()) } } @@ -522,18 +590,28 @@ mod tests { impl BlockExecutionStrategy for TestExecutorStrategy { type Error = BlockExecutionError; - fn apply_pre_execution_changes(&mut self) -> Result<(), Self::Error> { + fn apply_pre_execution_changes( + &mut self, + _block: &BlockWithSenders, + _total_difficulty: U256, + ) -> Result<(), Self::Error> { Ok(()) } fn execute_transactions( &mut self, _block: &BlockWithSenders, + _total_difficulty: U256, ) -> Result<(Vec, u64), Self::Error> { Ok(self.execute_transactions_result.clone()) } - fn apply_post_execution_changes(&mut self) -> Result, Self::Error> { + fn apply_post_execution_changes( + &mut self, + _block: &BlockWithSenders, + _total_difficulty: U256, + _receipts: &[Receipt], + ) -> Result, Self::Error> { Ok(self.apply_post_execution_changes_result.clone()) } @@ -541,11 +619,28 @@ mod tests { &self.state } + fn state_mut(&mut self) -> &mut State { + &mut self.state + } + fn with_state_hook(&mut self, _hook: Option>) {} - fn finish(&self) -> BundleState { + fn finish(&mut self) -> BundleState { self.finish_result.clone() } + + fn chain_spec(&self) -> Arc { + MAINNET.clone() + } + + fn validate_block_post_execution( + &self, + _block: &BlockWithSenders, + _receipts: &[Receipt], + _requests: &[Request], + ) -> Result<(), ConsensusError> { + Ok(()) + } } #[derive(Clone)] diff --git a/crates/evm/src/test_utils.rs b/crates/evm/src/test_utils.rs index fc620bb42..a033c8023 100644 --- a/crates/evm/src/test_utils.rs +++ b/crates/evm/src/test_utils.rs @@ -2,7 +2,8 @@ use crate::{ execute::{ - BatchExecutor, BlockExecutionInput, BlockExecutionOutput, BlockExecutorProvider, Executor, + BatchExecutor, BlockExecutionInput, BlockExecutionOutput, BlockExecutionStrategy, + BlockExecutorProvider, Executor, GenericBatchExecutor, GenericBlockExecutor, }, system_calls::OnStateHook, }; @@ -110,3 +111,45 @@ impl BatchExecutor for MockExecutorProvider { None } } + +impl GenericBlockExecutor +where + S: BlockExecutionStrategy, +{ + /// Provides safe read access to the state + pub fn with_state(&self, f: F) -> R + where + F: FnOnce(&State) -> R, + { + f(self.strategy.state_ref()) + } + + /// Provides safe write access to the state + pub fn with_state_mut(&mut self, f: F) -> R + where + F: FnOnce(&mut State) -> R, + { + f(self.strategy.state_mut()) + } +} + +impl GenericBatchExecutor +where + S: BlockExecutionStrategy, +{ + /// Provides safe read access to the state + pub fn with_state(&self, f: F) -> R + where + F: FnOnce(&State) -> R, + { + f(self.strategy.state_ref()) + } + + /// Provides safe write access to the state + pub fn with_state_mut(&mut self, f: F) -> R + where + F: FnOnce(&mut State) -> R, + { + f(self.strategy.state_mut()) + } +} From 0f903b1e204ead35616bba24447a09b316763793 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Wed, 16 Oct 2024 11:40:33 +0200 Subject: [PATCH 074/425] feat: add EthExecutionStrategy (#11584) --- Cargo.lock | 1 + crates/ethereum/evm/Cargo.toml | 2 + crates/ethereum/evm/src/lib.rs | 1 + crates/ethereum/evm/src/strategy.rs | 1179 +++++++++++++++++++++++++++ crates/evm/src/execute.rs | 10 +- 5 files changed, 1184 insertions(+), 9 deletions(-) create mode 100644 crates/ethereum/evm/src/strategy.rs diff --git a/Cargo.lock b/Cargo.lock index 59f7ad353..eb0bb2a3d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7382,6 +7382,7 @@ dependencies = [ "alloy-primitives", "alloy-sol-types", "reth-chainspec", + "reth-consensus", "reth-ethereum-consensus", "reth-ethereum-forks", "reth-evm", diff --git a/crates/ethereum/evm/Cargo.toml b/crates/ethereum/evm/Cargo.toml index 61ce0a23b..a19cbc018 100644 --- a/crates/ethereum/evm/Cargo.toml +++ b/crates/ethereum/evm/Cargo.toml @@ -20,6 +20,7 @@ reth-revm.workspace = true reth-ethereum-consensus.workspace = true reth-prune-types.workspace = true reth-execution-types.workspace = true +reth-consensus.workspace = true # Ethereum revm-primitives.workspace = true @@ -31,6 +32,7 @@ alloy-sol-types.workspace = true [dev-dependencies] reth-testing-utils.workspace = true +reth-evm = { workspace = true, features = ["test-utils"] } reth-revm = { workspace = true, features = ["test-utils"] } reth-primitives = { workspace = true, features = ["secp256k1"] } secp256k1.workspace = true diff --git a/crates/ethereum/evm/src/lib.rs b/crates/ethereum/evm/src/lib.rs index cede8008b..ed18a24fb 100644 --- a/crates/ethereum/evm/src/lib.rs +++ b/crates/ethereum/evm/src/lib.rs @@ -33,6 +33,7 @@ use reth_ethereum_forks::EthereumHardfork; use reth_primitives::constants::EIP1559_INITIAL_BASE_FEE; pub mod execute; +pub mod strategy; /// Ethereum DAO hardfork state change data. pub mod dao_fork; diff --git a/crates/ethereum/evm/src/strategy.rs b/crates/ethereum/evm/src/strategy.rs new file mode 100644 index 000000000..52f58a8b0 --- /dev/null +++ b/crates/ethereum/evm/src/strategy.rs @@ -0,0 +1,1179 @@ +//! Ethereum block execution strategy, + +use crate::{ + dao_fork::{DAO_HARDFORK_BENEFICIARY, DAO_HARDKFORK_ACCOUNTS}, + EthEvmConfig, +}; +use alloc::sync::Arc; +use core::fmt::Display; +use reth_chainspec::{ChainSpec, EthereumHardfork, EthereumHardforks, MAINNET}; +use reth_consensus::ConsensusError; +use reth_ethereum_consensus::validate_block_post_execution; +use reth_evm::{ + execute::{ + BlockExecutionError, BlockExecutionStrategy, BlockExecutionStrategyFactory, + BlockValidationError, ProviderError, + }, + system_calls::{OnStateHook, SystemCaller}, + ConfigureEvm, ConfigureEvmEnv, +}; +use reth_primitives::{BlockWithSenders, Header, Receipt, Request}; +use reth_revm::{ + db::{states::bundle_state::BundleRetention, BundleState}, + state_change::post_block_balance_increments, + Database, DatabaseCommit, State, +}; +use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, U256}; + +/// Factory for [`EthExecutionStrategy`]. +#[derive(Debug, Clone)] +pub struct EthExecutionStrategyFactory { + /// The chainspec + chain_spec: Arc, + /// How to create an EVM. + evm_config: EvmConfig, +} + +impl EthExecutionStrategyFactory { + /// Creates a new default ethereum executor strategy factory. + pub fn ethereum(chain_spec: Arc) -> Self { + Self::new(chain_spec.clone(), EthEvmConfig::new(chain_spec)) + } + + /// Returns a new factory for the mainnet. + pub fn mainnet() -> Self { + Self::ethereum(MAINNET.clone()) + } +} + +impl EthExecutionStrategyFactory { + /// Creates a new executor strategy factory. + pub const fn new(chain_spec: Arc, evm_config: EvmConfig) -> Self { + Self { chain_spec, evm_config } + } +} + +impl BlockExecutionStrategyFactory for EthExecutionStrategyFactory { + type Strategy + Display>> = EthExecutionStrategy; + + fn create_strategy(&self, db: DB) -> Self::Strategy + where + DB: Database + Display>, + { + let state = + State::builder().with_database(db).with_bundle_update().without_state_clear().build(); + EthExecutionStrategy::new(state, self.chain_spec.clone(), self.evm_config.clone()) + } +} + +/// Block execution strategy for Ethereum. +#[allow(missing_debug_implementations)] +pub struct EthExecutionStrategy { + /// The chainspec + chain_spec: Arc, + /// How to create an EVM. + evm_config: EvmConfig, + /// Current state for block execution. + state: State, + /// Utility to call system smart contracts. + system_caller: SystemCaller, +} + +impl EthExecutionStrategy { + /// Creates a new [`EthExecutionStrategy`] + pub fn new(state: State, chain_spec: Arc, evm_config: EthEvmConfig) -> Self { + let system_caller = SystemCaller::new(evm_config.clone(), (*chain_spec).clone()); + Self { state, chain_spec, evm_config, system_caller } + } +} + +impl EthExecutionStrategy +where + DB: Database + Display>, + EvmConfig: ConfigureEvm
, +{ + /// Configures a new evm configuration and block environment for the given block. + /// + /// # Caution + /// + /// This does not initialize the tx environment. + fn evm_env_for_block(&self, header: &Header, total_difficulty: U256) -> EnvWithHandlerCfg { + let mut cfg = CfgEnvWithHandlerCfg::new(Default::default(), Default::default()); + let mut block_env = BlockEnv::default(); + self.evm_config.fill_cfg_and_block_env(&mut cfg, &mut block_env, header, total_difficulty); + + EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, Default::default()) + } +} + +impl BlockExecutionStrategy for EthExecutionStrategy +where + DB: Database + Display>, +{ + type Error = BlockExecutionError; + + fn apply_pre_execution_changes( + &mut self, + block: &BlockWithSenders, + total_difficulty: U256, + ) -> Result<(), Self::Error> { + // Set state clear flag if the block is after the Spurious Dragon hardfork. + let state_clear_flag = + (*self.chain_spec).is_spurious_dragon_active_at_block(block.header.number); + self.state.set_state_clear_flag(state_clear_flag); + + let env = self.evm_env_for_block(&block.header, total_difficulty); + let mut evm = self.evm_config.evm_with_env(&mut self.state, env); + + self.system_caller.apply_pre_execution_changes(block, &mut evm)?; + + Ok(()) + } + + fn execute_transactions( + &mut self, + block: &BlockWithSenders, + total_difficulty: U256, + ) -> Result<(Vec, u64), Self::Error> { + let env = self.evm_env_for_block(&block.header, total_difficulty); + let mut evm = self.evm_config.evm_with_env(&mut self.state, env); + + let mut cumulative_gas_used = 0; + let mut receipts = Vec::with_capacity(block.body.transactions.len()); + for (sender, transaction) in block.transactions_with_sender() { + // The sum of the transaction’s gas limit, Tg, and the gas utilized in this block prior, + // must be no greater than the block’s gasLimit. + let block_available_gas = block.header.gas_limit - cumulative_gas_used; + if transaction.gas_limit() > block_available_gas { + return Err(BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas { + transaction_gas_limit: transaction.gas_limit(), + block_available_gas, + } + .into()) + } + + self.evm_config.fill_tx_env(evm.tx_mut(), transaction, *sender); + + // Execute transaction. + let result_and_state = evm.transact().map_err(move |err| { + let new_err = err.map_db_err(|e| e.into()); + // Ensure hash is calculated for error log, if not already done + BlockValidationError::EVM { + hash: transaction.recalculate_hash(), + error: Box::new(new_err), + } + })?; + self.system_caller.on_state(&result_and_state); + let ResultAndState { result, state } = result_and_state; + evm.db_mut().commit(state); + + // append gas used + cumulative_gas_used += result.gas_used(); + + // Push transaction changeset and calculate header bloom filter for receipt. + receipts.push( + #[allow(clippy::needless_update)] // side-effect of optimism fields + Receipt { + tx_type: transaction.tx_type(), + // Success flag was added in `EIP-658: Embedding transaction status code in + // receipts`. + success: result.is_success(), + cumulative_gas_used, + // convert to reth log + logs: result.into_logs(), + ..Default::default() + }, + ); + } + Ok((receipts, cumulative_gas_used)) + } + + fn apply_post_execution_changes( + &mut self, + block: &BlockWithSenders, + total_difficulty: U256, + receipts: &[Receipt], + ) -> Result, Self::Error> { + let env = self.evm_env_for_block(&block.header, total_difficulty); + let mut evm = self.evm_config.evm_with_env(&mut self.state, env); + + let requests = if self.chain_spec.is_prague_active_at_timestamp(block.timestamp) { + // Collect all EIP-6110 deposits + let deposit_requests = + crate::eip6110::parse_deposits_from_receipts(&self.chain_spec, receipts)?; + + let post_execution_requests = + self.system_caller.apply_post_execution_changes(&mut evm)?; + + [deposit_requests, post_execution_requests].concat() + } else { + vec![] + }; + drop(evm); + + let mut balance_increments = + post_block_balance_increments(&self.chain_spec, block, total_difficulty); + + // Irregular state change at Ethereum DAO hardfork + if self.chain_spec.fork(EthereumHardfork::Dao).transitions_at_block(block.number) { + // drain balances from hardcoded addresses. + let drained_balance: u128 = self + .state + .drain_balances(DAO_HARDKFORK_ACCOUNTS) + .map_err(|_| BlockValidationError::IncrementBalanceFailed)? + .into_iter() + .sum(); + + // return balance to DAO beneficiary. + *balance_increments.entry(DAO_HARDFORK_BENEFICIARY).or_default() += drained_balance; + } + // increment balances + self.state + .increment_balances(balance_increments) + .map_err(|_| BlockValidationError::IncrementBalanceFailed)?; + + Ok(requests) + } + + fn state_ref(&self) -> &State { + &self.state + } + + fn state_mut(&mut self) -> &mut State { + &mut self.state + } + + fn with_state_hook(&mut self, hook: Option>) { + self.system_caller.with_state_hook(hook); + } + + fn finish(&mut self) -> BundleState { + self.state.merge_transitions(BundleRetention::Reverts); + self.state.take_bundle() + } + + fn validate_block_post_execution( + &self, + block: &BlockWithSenders, + receipts: &[Receipt], + requests: &[Request], + ) -> Result<(), ConsensusError> { + validate_block_post_execution(block, &self.chain_spec.clone(), receipts, requests) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_consensus::{TxLegacy, EMPTY_ROOT_HASH}; + use alloy_eips::{ + eip2935::{HISTORY_STORAGE_ADDRESS, HISTORY_STORAGE_CODE}, + eip4788::{BEACON_ROOTS_ADDRESS, BEACON_ROOTS_CODE, SYSTEM_ADDRESS}, + eip7002::{WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS, WITHDRAWAL_REQUEST_PREDEPLOY_CODE}, + }; + use alloy_primitives::{b256, fixed_bytes, keccak256, Bytes, TxKind, B256}; + use reth_chainspec::{ChainSpecBuilder, ForkCondition}; + use reth_evm::execute::{ + BatchExecutor, BlockExecutorProvider, Executor, GenericBlockExecutorProvider, + }; + use reth_execution_types::BlockExecutionOutput; + use reth_primitives::{ + constants::ETH_TO_WEI, public_key_to_address, Account, Block, BlockBody, Transaction, + }; + use reth_revm::{ + database::StateProviderDatabase, test_utils::StateProviderTest, TransitionState, + }; + use reth_testing_utils::generators::{self, sign_tx_with_key_pair}; + use revm_primitives::BLOCKHASH_SERVE_WINDOW; + use secp256k1::{Keypair, Secp256k1}; + use std::collections::HashMap; + + fn create_state_provider_with_beacon_root_contract() -> StateProviderTest { + let mut db = StateProviderTest::default(); + + let beacon_root_contract_account = Account { + balance: U256::ZERO, + bytecode_hash: Some(keccak256(BEACON_ROOTS_CODE.clone())), + nonce: 1, + }; + + db.insert_account( + BEACON_ROOTS_ADDRESS, + beacon_root_contract_account, + Some(BEACON_ROOTS_CODE.clone()), + HashMap::default(), + ); + + db + } + + fn create_state_provider_with_withdrawal_requests_contract() -> StateProviderTest { + let mut db = StateProviderTest::default(); + + let withdrawal_requests_contract_account = Account { + nonce: 1, + balance: U256::ZERO, + bytecode_hash: Some(keccak256(WITHDRAWAL_REQUEST_PREDEPLOY_CODE.clone())), + }; + + db.insert_account( + WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS, + withdrawal_requests_contract_account, + Some(WITHDRAWAL_REQUEST_PREDEPLOY_CODE.clone()), + HashMap::default(), + ); + + db + } + + fn executor_provider( + chain_spec: Arc, + ) -> GenericBlockExecutorProvider { + let strategy_factory = + EthExecutionStrategyFactory::new(chain_spec.clone(), EthEvmConfig::new(chain_spec)); + + GenericBlockExecutorProvider::new(strategy_factory) + } + + #[test] + fn eip_4788_non_genesis_call() { + let mut header = + Header { timestamp: 1, number: 1, excess_blob_gas: Some(0), ..Header::default() }; + + let db = create_state_provider_with_beacon_root_contract(); + + let chain_spec = Arc::new( + ChainSpecBuilder::from(&*MAINNET) + .shanghai_activated() + .with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(1)) + .build(), + ); + + let provider = executor_provider(chain_spec); + + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + + // attempt to execute a block without parent beacon block root, expect err + let err = executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { + header: header.clone(), + body: BlockBody { + transactions: vec![], + ommers: vec![], + withdrawals: None, + requests: None, + }, + }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .expect_err( + "Executing cancun block without parent beacon block root field should fail", + ); + + assert_eq!( + err.as_validation().unwrap().clone(), + BlockValidationError::MissingParentBeaconBlockRoot + ); + + // fix header, set a gas limit + header.parent_beacon_block_root = Some(B256::with_last_byte(0x69)); + + // Now execute a block with the fixed header, ensure that it does not fail + executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { + header: header.clone(), + body: BlockBody { + transactions: vec![], + ommers: vec![], + withdrawals: None, + requests: None, + }, + }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .unwrap(); + + // check the actual storage of the contract - it should be: + // * The storage value at header.timestamp % HISTORY_BUFFER_LENGTH should be + // header.timestamp + // * The storage value at header.timestamp % HISTORY_BUFFER_LENGTH + HISTORY_BUFFER_LENGTH + // // should be parent_beacon_block_root + let history_buffer_length = 8191u64; + let timestamp_index = header.timestamp % history_buffer_length; + let parent_beacon_block_root_index = + timestamp_index % history_buffer_length + history_buffer_length; + + let timestamp_storage = executor.with_state_mut(|state| { + state.storage(BEACON_ROOTS_ADDRESS, U256::from(timestamp_index)).unwrap() + }); + assert_eq!(timestamp_storage, U256::from(header.timestamp)); + + // get parent beacon block root storage and compare + let parent_beacon_block_root_storage = executor.with_state_mut(|state| { + state + .storage(BEACON_ROOTS_ADDRESS, U256::from(parent_beacon_block_root_index)) + .expect("storage value should exist") + }); + assert_eq!(parent_beacon_block_root_storage, U256::from(0x69)); + } + + #[test] + fn eip_4788_no_code_cancun() { + // This test ensures that we "silently fail" when cancun is active and there is no code at + // // BEACON_ROOTS_ADDRESS + let header = Header { + timestamp: 1, + number: 1, + parent_beacon_block_root: Some(B256::with_last_byte(0x69)), + excess_blob_gas: Some(0), + ..Header::default() + }; + + let db = StateProviderTest::default(); + + // DON'T deploy the contract at genesis + let chain_spec = Arc::new( + ChainSpecBuilder::from(&*MAINNET) + .shanghai_activated() + .with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(1)) + .build(), + ); + + let provider = executor_provider(chain_spec); + + // attempt to execute an empty block with parent beacon block root, this should not fail + provider + .batch_executor(StateProviderDatabase::new(&db)) + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { + header, + body: BlockBody { + transactions: vec![], + ommers: vec![], + withdrawals: None, + requests: None, + }, + }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .expect( + "Executing a block with no transactions while cancun is active should not fail", + ); + } + + #[test] + fn eip_4788_empty_account_call() { + // This test ensures that we do not increment the nonce of an empty SYSTEM_ADDRESS account + // // during the pre-block call + + let mut db = create_state_provider_with_beacon_root_contract(); + + // insert an empty SYSTEM_ADDRESS + db.insert_account(SYSTEM_ADDRESS, Account::default(), None, HashMap::default()); + + let chain_spec = Arc::new( + ChainSpecBuilder::from(&*MAINNET) + .shanghai_activated() + .with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(1)) + .build(), + ); + + let provider = executor_provider(chain_spec); + + // construct the header for block one + let header = Header { + timestamp: 1, + number: 1, + parent_beacon_block_root: Some(B256::with_last_byte(0x69)), + excess_blob_gas: Some(0), + ..Header::default() + }; + + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + + // attempt to execute an empty block with parent beacon block root, this should not fail + executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { + header, + body: BlockBody { + transactions: vec![], + ommers: vec![], + withdrawals: None, + requests: None, + }, + }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .expect( + "Executing a block with no transactions while cancun is active should not fail", + ); + + // ensure that the nonce of the system address account has not changed + let nonce = + executor.with_state_mut(|state| state.basic(SYSTEM_ADDRESS).unwrap().unwrap().nonce); + assert_eq!(nonce, 0); + } + + #[test] + fn eip_4788_genesis_call() { + let db = create_state_provider_with_beacon_root_contract(); + + // activate cancun at genesis + let chain_spec = Arc::new( + ChainSpecBuilder::from(&*MAINNET) + .shanghai_activated() + .with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(0)) + .build(), + ); + + let mut header = chain_spec.genesis_header().clone(); + let provider = executor_provider(chain_spec); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + + // attempt to execute the genesis block with non-zero parent beacon block root, expect err + header.parent_beacon_block_root = Some(B256::with_last_byte(0x69)); + let _err = executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { header: header.clone(), body: Default::default() }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .expect_err( + "Executing genesis cancun block with non-zero parent beacon block root field + should fail", + ); + + // fix header + header.parent_beacon_block_root = Some(B256::ZERO); + + // now try to process the genesis block again, this time ensuring that a system contract + // call does not occur + executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { header, body: Default::default() }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .unwrap(); + + // there is no system contract call so there should be NO STORAGE CHANGES + // this means we'll check the transition state + let transition_state = executor.with_state_mut(|state| { + state + .transition_state + .take() + .expect("the evm should be initialized with bundle updates") + }); + + // assert that it is the default (empty) transition state + assert_eq!(transition_state, TransitionState::default()); + } + + #[test] + fn eip_4788_high_base_fee() { + // This test ensures that if we have a base fee, then we don't return an error when the + // system contract is called, due to the gas price being less than the base fee. + let header = Header { + timestamp: 1, + number: 1, + parent_beacon_block_root: Some(B256::with_last_byte(0x69)), + base_fee_per_gas: Some(u64::MAX), + excess_blob_gas: Some(0), + ..Header::default() + }; + + let db = create_state_provider_with_beacon_root_contract(); + + let chain_spec = Arc::new( + ChainSpecBuilder::from(&*MAINNET) + .shanghai_activated() + .with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(1)) + .build(), + ); + + let provider = executor_provider(chain_spec); + + // execute header + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + + // Now execute a block with the fixed header, ensure that it does not fail + executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { header: header.clone(), body: Default::default() }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .unwrap(); + + // check the actual storage of the contract - it should be: + // * The storage value at header.timestamp % HISTORY_BUFFER_LENGTH should be + // header.timestamp + // * The storage value at header.timestamp % HISTORY_BUFFER_LENGTH + HISTORY_BUFFER_LENGTH + // // should be parent_beacon_block_root + let history_buffer_length = 8191u64; + let timestamp_index = header.timestamp % history_buffer_length; + let parent_beacon_block_root_index = + timestamp_index % history_buffer_length + history_buffer_length; + + // get timestamp storage and compare + let timestamp_storage = executor.with_state_mut(|state| { + state.storage(BEACON_ROOTS_ADDRESS, U256::from(timestamp_index)).unwrap() + }); + assert_eq!(timestamp_storage, U256::from(header.timestamp)); + + // get parent beacon block root storage and compare + let parent_beacon_block_root_storage = executor.with_state_mut(|state| { + state.storage(BEACON_ROOTS_ADDRESS, U256::from(parent_beacon_block_root_index)).unwrap() + }); + assert_eq!(parent_beacon_block_root_storage, U256::from(0x69)); + } + + /// Create a state provider with blockhashes and the EIP-2935 system contract. + fn create_state_provider_with_block_hashes(latest_block: u64) -> StateProviderTest { + let mut db = StateProviderTest::default(); + for block_number in 0..=latest_block { + db.insert_block_hash(block_number, keccak256(block_number.to_string())); + } + + let blockhashes_contract_account = Account { + balance: U256::ZERO, + bytecode_hash: Some(keccak256(HISTORY_STORAGE_CODE.clone())), + nonce: 1, + }; + + db.insert_account( + HISTORY_STORAGE_ADDRESS, + blockhashes_contract_account, + Some(HISTORY_STORAGE_CODE.clone()), + HashMap::default(), + ); + + db + } + #[test] + fn eip_2935_pre_fork() { + let db = create_state_provider_with_block_hashes(1); + + let chain_spec = Arc::new( + ChainSpecBuilder::from(&*MAINNET) + .shanghai_activated() + .with_fork(EthereumHardfork::Prague, ForkCondition::Never) + .build(), + ); + + let provider = executor_provider(chain_spec); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + + // construct the header for block one + let header = Header { timestamp: 1, number: 1, ..Header::default() }; + + // attempt to execute an empty block, this should not fail + executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { header, body: Default::default() }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .expect( + "Executing a block with no transactions while Prague is active should not fail", + ); + + // ensure that the block hash was *not* written to storage, since this is before the fork + // was activated + // + // we load the account first, because revm expects it to be + // loaded + executor.with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap()); + assert!(executor.with_state_mut(|state| state + .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) + .unwrap() + .is_zero())); + } + + #[test] + fn eip_2935_fork_activation_genesis() { + let db = create_state_provider_with_block_hashes(0); + + let chain_spec = Arc::new( + ChainSpecBuilder::from(&*MAINNET) + .shanghai_activated() + .with_fork(EthereumHardfork::Prague, ForkCondition::Timestamp(0)) + .build(), + ); + + let header = chain_spec.genesis_header().clone(); + let provider = executor_provider(chain_spec); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + + // attempt to execute genesis block, this should not fail + executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { header, body: Default::default() }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .expect( + "Executing a block with no transactions while Prague is active should not fail", + ); + + // ensure that the block hash was *not* written to storage, since there are no blocks + // preceding genesis + // + // we load the account first, because revm expects it to be + // loaded + executor.with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap()); + assert!(executor.with_state_mut(|state| state + .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) + .unwrap() + .is_zero())); + } + + #[test] + fn eip_2935_fork_activation_within_window_bounds() { + let fork_activation_block = (BLOCKHASH_SERVE_WINDOW - 10) as u64; + let db = create_state_provider_with_block_hashes(fork_activation_block); + + let chain_spec = Arc::new( + ChainSpecBuilder::from(&*MAINNET) + .shanghai_activated() + .with_fork(EthereumHardfork::Prague, ForkCondition::Timestamp(1)) + .build(), + ); + + let header = Header { + parent_hash: B256::random(), + timestamp: 1, + number: fork_activation_block, + requests_root: Some(EMPTY_ROOT_HASH), + ..Header::default() + }; + let provider = executor_provider(chain_spec); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + + // attempt to execute the fork activation block, this should not fail + executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { header, body: Default::default() }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .expect( + "Executing a block with no transactions while Prague is active should not fail", + ); + + // the hash for the ancestor of the fork activation block should be present + assert!(executor + .with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some())); + assert_ne!( + executor.with_state_mut(|state| state + .storage(HISTORY_STORAGE_ADDRESS, U256::from(fork_activation_block - 1)) + .unwrap()), + U256::ZERO + ); + + // the hash of the block itself should not be in storage + assert!(executor.with_state_mut(|state| state + .storage(HISTORY_STORAGE_ADDRESS, U256::from(fork_activation_block)) + .unwrap() + .is_zero())); + } + + #[test] + fn eip_2935_fork_activation_outside_window_bounds() { + let fork_activation_block = (BLOCKHASH_SERVE_WINDOW + 256) as u64; + let db = create_state_provider_with_block_hashes(fork_activation_block); + + let chain_spec = Arc::new( + ChainSpecBuilder::from(&*MAINNET) + .shanghai_activated() + .with_fork(EthereumHardfork::Prague, ForkCondition::Timestamp(1)) + .build(), + ); + + let provider = executor_provider(chain_spec); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + + let header = Header { + parent_hash: B256::random(), + timestamp: 1, + number: fork_activation_block, + requests_root: Some(EMPTY_ROOT_HASH), + ..Header::default() + }; + + // attempt to execute the fork activation block, this should not fail + executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { header, body: Default::default() }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .expect( + "Executing a block with no transactions while Prague is active should not fail", + ); + + // the hash for the ancestor of the fork activation block should be present + assert!(executor + .with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some())); + assert_ne!( + executor.with_state_mut(|state| state + .storage( + HISTORY_STORAGE_ADDRESS, + U256::from(fork_activation_block % BLOCKHASH_SERVE_WINDOW as u64 - 1) + ) + .unwrap()), + U256::ZERO + ); + } + + #[test] + fn eip_2935_state_transition_inside_fork() { + let db = create_state_provider_with_block_hashes(2); + + let chain_spec = Arc::new( + ChainSpecBuilder::from(&*MAINNET) + .shanghai_activated() + .with_fork(EthereumHardfork::Prague, ForkCondition::Timestamp(0)) + .build(), + ); + + let mut header = chain_spec.genesis_header().clone(); + header.requests_root = Some(EMPTY_ROOT_HASH); + let header_hash = header.hash_slow(); + + let provider = executor_provider(chain_spec); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + + // attempt to execute the genesis block, this should not fail + executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { header, body: Default::default() }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .expect( + "Executing a block with no transactions while Prague is active should not fail", + ); + + // nothing should be written as the genesis has no ancestors + // + // we load the account first, because revm expects it to be + // loaded + executor.with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap()); + assert!(executor.with_state_mut(|state| state + .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) + .unwrap() + .is_zero())); + + // attempt to execute block 1, this should not fail + let header = Header { + parent_hash: header_hash, + timestamp: 1, + number: 1, + requests_root: Some(EMPTY_ROOT_HASH), + ..Header::default() + }; + let header_hash = header.hash_slow(); + + executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { header, body: Default::default() }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .expect( + "Executing a block with no transactions while Prague is active should not fail", + ); + + // the block hash of genesis should now be in storage, but not block 1 + assert!(executor + .with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some())); + assert_ne!( + executor.with_state_mut(|state| state + .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) + .unwrap()), + U256::ZERO + ); + assert!(executor.with_state_mut(|state| state + .storage(HISTORY_STORAGE_ADDRESS, U256::from(1)) + .unwrap() + .is_zero())); + + // attempt to execute block 2, this should not fail + let header = Header { + parent_hash: header_hash, + timestamp: 1, + number: 2, + requests_root: Some(EMPTY_ROOT_HASH), + ..Header::default() + }; + + executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { header, body: Default::default() }, + senders: vec![], + }, + U256::ZERO, + ) + .into(), + ) + .expect( + "Executing a block with no transactions while Prague is active should not fail", + ); + + // the block hash of genesis and block 1 should now be in storage, but not block 2 + assert!(executor + .with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some())); + assert_ne!( + executor.with_state_mut(|state| state + .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) + .unwrap()), + U256::ZERO + ); + assert_ne!( + executor.with_state_mut(|state| state + .storage(HISTORY_STORAGE_ADDRESS, U256::from(1)) + .unwrap()), + U256::ZERO + ); + assert!(executor.with_state_mut(|state| state + .storage(HISTORY_STORAGE_ADDRESS, U256::from(2)) + .unwrap() + .is_zero())); + } + + #[test] + fn eip_7002() { + let chain_spec = Arc::new( + ChainSpecBuilder::from(&*MAINNET) + .shanghai_activated() + .with_fork(EthereumHardfork::Prague, ForkCondition::Timestamp(0)) + .build(), + ); + + let mut db = create_state_provider_with_withdrawal_requests_contract(); + + let secp = Secp256k1::new(); + let sender_key_pair = Keypair::new(&secp, &mut generators::rng()); + let sender_address = public_key_to_address(sender_key_pair.public_key()); + + db.insert_account( + sender_address, + Account { nonce: 1, balance: U256::from(ETH_TO_WEI), bytecode_hash: None }, + None, + HashMap::default(), + ); + + // https://github.com/lightclient/7002asm/blob/e0d68e04d15f25057af7b6d180423d94b6b3bdb3/test/Contract.t.sol.in#L49-L64 + let validator_public_key = fixed_bytes!("111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111"); + let withdrawal_amount = fixed_bytes!("2222222222222222"); + let input: Bytes = [&validator_public_key[..], &withdrawal_amount[..]].concat().into(); + assert_eq!(input.len(), 56); + + let mut header = chain_spec.genesis_header().clone(); + header.gas_limit = 1_500_000; + header.gas_used = 134_807; + header.receipts_root = + b256!("b31a3e47b902e9211c4d349af4e4c5604ce388471e79ca008907ae4616bb0ed3"); + + let tx = sign_tx_with_key_pair( + sender_key_pair, + Transaction::Legacy(TxLegacy { + chain_id: Some(chain_spec.chain.id()), + nonce: 1, + gas_price: header.base_fee_per_gas.unwrap().into(), + gas_limit: 134_807, + to: TxKind::Call(WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS), + // `MIN_WITHDRAWAL_REQUEST_FEE` + value: U256::from(1), + input, + }), + ); + + let provider = executor_provider(chain_spec); + + let executor = provider.executor(StateProviderDatabase::new(&db)); + + let BlockExecutionOutput { receipts, requests, .. } = executor + .execute( + ( + &Block { + header, + body: BlockBody { transactions: vec![tx], ..Default::default() }, + } + .with_recovered_senders() + .unwrap(), + U256::ZERO, + ) + .into(), + ) + .unwrap(); + + let receipt = receipts.first().unwrap(); + assert!(receipt.success); + + let request = requests.first().unwrap(); + let withdrawal_request = request.as_withdrawal_request().unwrap(); + assert_eq!(withdrawal_request.source_address, sender_address); + assert_eq!(withdrawal_request.validator_pubkey, validator_public_key); + assert_eq!(withdrawal_request.amount, u64::from_be_bytes(withdrawal_amount.into())); + } + + #[test] + fn block_gas_limit_error() { + // Create a chain specification with fork conditions set for Prague + let chain_spec = Arc::new( + ChainSpecBuilder::from(&*MAINNET) + .shanghai_activated() + .with_fork(EthereumHardfork::Prague, ForkCondition::Timestamp(0)) + .build(), + ); + + // Create a state provider with the withdrawal requests contract pre-deployed + let mut db = create_state_provider_with_withdrawal_requests_contract(); + + // Initialize Secp256k1 for key pair generation + let secp = Secp256k1::new(); + // Generate a new key pair for the sender + let sender_key_pair = Keypair::new(&secp, &mut generators::rng()); + // Get the sender's address from the public key + let sender_address = public_key_to_address(sender_key_pair.public_key()); + + // Insert the sender account into the state with a nonce of 1 and a balance of 1 ETH in Wei + db.insert_account( + sender_address, + Account { nonce: 1, balance: U256::from(ETH_TO_WEI), bytecode_hash: None }, + None, + HashMap::default(), + ); + + // Define the validator public key and withdrawal amount as fixed bytes + let validator_public_key = fixed_bytes!("111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111"); + let withdrawal_amount = fixed_bytes!("2222222222222222"); + // Concatenate the validator public key and withdrawal amount into a single byte array + let input: Bytes = [&validator_public_key[..], &withdrawal_amount[..]].concat().into(); + // Ensure the input length is 56 bytes + assert_eq!(input.len(), 56); + + // Create a genesis block header with a specified gas limit and gas used + let mut header = chain_spec.genesis_header().clone(); + header.gas_limit = 1_500_000; + header.gas_used = 134_807; + header.receipts_root = + b256!("b31a3e47b902e9211c4d349af4e4c5604ce388471e79ca008907ae4616bb0ed3"); + + // Create a transaction with a gas limit higher than the block gas limit + let tx = sign_tx_with_key_pair( + sender_key_pair, + Transaction::Legacy(TxLegacy { + chain_id: Some(chain_spec.chain.id()), + nonce: 1, + gas_price: header.base_fee_per_gas.unwrap().into(), + gas_limit: 2_500_000, // higher than block gas limit + to: TxKind::Call(WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS), + value: U256::from(1), + input, + }), + ); + + // Create an executor from the state provider + let executor = executor_provider(chain_spec).executor(StateProviderDatabase::new(&db)); + + // Execute the block and capture the result + let exec_result = executor.execute( + ( + &Block { header, body: BlockBody { transactions: vec![tx], ..Default::default() } } + .with_recovered_senders() + .unwrap(), + U256::ZERO, + ) + .into(), + ); + + // Check if the execution result is an error and assert the specific error type + match exec_result { + Ok(_) => panic!("Expected block gas limit error"), + Err(err) => assert_eq!( + *err.as_validation().unwrap(), + BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas { + transaction_gas_limit: 2_500_000, + block_available_gas: 1_500_000, + } + ), + } + } +} diff --git a/crates/evm/src/execute.rs b/crates/evm/src/execute.rs index 9413c709d..859dacd8a 100644 --- a/crates/evm/src/execute.rs +++ b/crates/evm/src/execute.rs @@ -8,10 +8,9 @@ pub use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput, Execut pub use reth_storage_errors::provider::ProviderError; use crate::system_calls::OnStateHook; -use alloc::{boxed::Box, sync::Arc, vec::Vec}; +use alloc::{boxed::Box, vec::Vec}; use alloy_primitives::BlockNumber; use core::{fmt::Display, marker::PhantomData}; -use reth_chainspec::ChainSpec; use reth_consensus::ConsensusError; use reth_primitives::{BlockWithSenders, Receipt, Request}; use reth_prune_types::PruneModes; @@ -205,9 +204,6 @@ pub trait BlockExecutionStrategy { /// Returns the final bundle state. fn finish(&mut self) -> BundleState; - /// Returns the strategy chain spec. - fn chain_spec(&self) -> Arc; - /// Validate a block with regard to execution results. fn validate_block_post_execution( &self, @@ -629,10 +625,6 @@ mod tests { self.finish_result.clone() } - fn chain_spec(&self) -> Arc { - MAINNET.clone() - } - fn validate_block_post_execution( &self, _block: &BlockWithSenders, From e454b2402b0f9fe936620d9302edc94f9f8d5782 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 16 Oct 2024 11:44:15 +0200 Subject: [PATCH 075/425] chore: use highest known nonce (#11784) --- crates/transaction-pool/src/pool/txpool.rs | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index c470faf3a..c89aca830 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -110,13 +110,21 @@ impl TxPool { /// Returns the transaction with the highest nonce that is executable given the on chain nonce. /// + /// If the pool already tracks a higher nonce for the given sender, then this nonce is used + /// instead. + /// /// Note: The next pending pooled transaction must have the on chain nonce. pub(crate) fn get_highest_consecutive_transaction_by_sender( &self, - on_chain: TransactionId, + mut on_chain: TransactionId, ) -> Option>> { let mut last_consecutive_tx = None; + // ensure this operates on the most recent + if let Some(current) = self.sender_info.get(&on_chain.sender) { + on_chain.nonce = on_chain.nonce.max(current.state_nonce); + } + let mut next_expected_nonce = on_chain.nonce; for (id, tx) in self.all().descendant_txs_inclusive(&on_chain) { if next_expected_nonce != id.nonce { @@ -2784,7 +2792,7 @@ mod tests { // Create transactions with nonces 0, 1, 2, 4, 5. let sender = Address::random(); - let txs: Vec<_> = vec![0, 1, 2, 4, 5]; + let txs: Vec<_> = vec![0, 1, 2, 4, 5, 8, 9]; for nonce in txs { let mut mock_tx = MockTransaction::eip1559(); mock_tx.set_sender(sender); @@ -2804,6 +2812,13 @@ mod tests { let next_tx = pool.get_highest_consecutive_transaction_by_sender(sender_id.into_id(5)); assert_eq!(next_tx.map(|tx| tx.nonce()), Some(5), "Expected nonce 5 for on-chain nonce 5"); + + // update the tracked nonce + let mut info = SenderInfo::default(); + info.update(8, U256::ZERO); + pool.sender_info.insert(sender_id, info); + let next_tx = pool.get_highest_consecutive_transaction_by_sender(sender_id.into_id(5)); + assert_eq!(next_tx.map(|tx| tx.nonce()), Some(9), "Expected nonce 9 for on-chain nonce 8"); } #[test] From f49a4ae185d0f02e548af4434b9cfc14df79af85 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Wed, 16 Oct 2024 12:13:46 +0200 Subject: [PATCH 076/425] feat: add OpExecutionStrategy (#11761) --- Cargo.lock | 1 + crates/evm/src/execute.rs | 2 +- crates/evm/src/test_utils.rs | 7 +- crates/optimism/evm/Cargo.toml | 2 + crates/optimism/evm/src/lib.rs | 2 + crates/optimism/evm/src/strategy.rs | 491 ++++++++++++++++++++++++++++ 6 files changed, 503 insertions(+), 2 deletions(-) create mode 100644 crates/optimism/evm/src/strategy.rs diff --git a/Cargo.lock b/Cargo.lock index eb0bb2a3d..4d23c5b42 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8105,6 +8105,7 @@ dependencies = [ "alloy-primitives", "op-alloy-consensus", "reth-chainspec", + "reth-consensus", "reth-ethereum-forks", "reth-evm", "reth-execution-errors", diff --git a/crates/evm/src/execute.rs b/crates/evm/src/execute.rs index 859dacd8a..704b6a23a 100644 --- a/crates/evm/src/execute.rs +++ b/crates/evm/src/execute.rs @@ -375,7 +375,7 @@ where /// Batch execution strategy. pub(crate) strategy: S, /// Keeps track of batch execution receipts and requests. - batch_record: BlockBatchRecord, + pub(crate) batch_record: BlockBatchRecord, _phantom: PhantomData, } diff --git a/crates/evm/src/test_utils.rs b/crates/evm/src/test_utils.rs index a033c8023..5ae7ed45b 100644 --- a/crates/evm/src/test_utils.rs +++ b/crates/evm/src/test_utils.rs @@ -11,7 +11,7 @@ use alloy_primitives::BlockNumber; use parking_lot::Mutex; use reth_execution_errors::BlockExecutionError; use reth_execution_types::ExecutionOutcome; -use reth_primitives::{BlockWithSenders, Receipt}; +use reth_primitives::{BlockWithSenders, Receipt, Receipts}; use reth_prune_types::PruneModes; use reth_storage_errors::provider::ProviderError; use revm::State; @@ -152,4 +152,9 @@ where { f(self.strategy.state_mut()) } + + /// Accessor for batch executor receipts. + pub const fn receipts(&self) -> &Receipts { + self.batch_record.receipts() + } } diff --git a/crates/optimism/evm/Cargo.toml b/crates/optimism/evm/Cargo.toml index 72231716f..afaca32b6 100644 --- a/crates/optimism/evm/Cargo.toml +++ b/crates/optimism/evm/Cargo.toml @@ -20,6 +20,7 @@ reth-revm.workspace = true reth-execution-errors.workspace = true reth-execution-types.workspace = true reth-prune-types.workspace = true +reth-consensus.workspace = true # ethereum alloy-primitives.workspace = true @@ -41,6 +42,7 @@ tracing.workspace = true [dev-dependencies] alloy-eips.workspace = true +reth-evm = { workspace = true, features = ["test-utils"] } reth-revm = { workspace = true, features = ["test-utils"] } reth-primitives = { workspace = true, features = ["test-utils"] } reth-optimism-chainspec.workspace = true diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index f3de053f7..4d0f9d89f 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -33,6 +33,8 @@ use revm_primitives::{ BlobExcessGasAndPrice, BlockEnv, Bytes, CfgEnv, Env, HandlerCfg, OptimismFields, SpecId, TxKind, }; +pub mod strategy; + /// Optimism-related EVM configuration. #[derive(Debug, Clone)] pub struct OptimismEvmConfig { diff --git a/crates/optimism/evm/src/strategy.rs b/crates/optimism/evm/src/strategy.rs new file mode 100644 index 000000000..4fc06c396 --- /dev/null +++ b/crates/optimism/evm/src/strategy.rs @@ -0,0 +1,491 @@ +//! Optimism block execution strategy, + +use crate::{l1::ensure_create2_deployer, OptimismBlockExecutionError, OptimismEvmConfig}; +use reth_chainspec::EthereumHardforks; +use reth_consensus::ConsensusError; +use reth_evm::{ + execute::{ + BlockExecutionError, BlockExecutionStrategy, BlockExecutionStrategyFactory, + BlockValidationError, ProviderError, + }, + system_calls::{OnStateHook, SystemCaller}, + ConfigureEvm, ConfigureEvmEnv, +}; +use reth_optimism_chainspec::OpChainSpec; +use reth_optimism_consensus::validate_block_post_execution; +use reth_optimism_forks::OptimismHardfork; +use reth_primitives::{BlockWithSenders, Header, Receipt, Request, TxType}; +use reth_revm::{ + db::{states::bundle_state::BundleRetention, BundleState}, + state_change::post_block_balance_increments, + Database, State, +}; +use revm_primitives::{ + db::DatabaseCommit, BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, U256, +}; +use std::{fmt::Display, sync::Arc}; +use tracing::trace; + +/// Factory for [`OpExecutionStrategy`]. +#[derive(Debug, Clone)] +pub struct OpExecutionStrategyFactory { + /// The chainspec + chain_spec: Arc, + /// How to create an EVM. + evm_config: EvmConfig, +} + +impl OpExecutionStrategyFactory { + /// Creates a new default optimism executor strategy factory. + pub fn optimism(chain_spec: Arc) -> Self { + Self::new(chain_spec.clone(), OptimismEvmConfig::new(chain_spec)) + } +} + +impl OpExecutionStrategyFactory { + /// Creates a new executor strategy factory. + pub const fn new(chain_spec: Arc, evm_config: EvmConfig) -> Self { + Self { chain_spec, evm_config } + } +} + +impl BlockExecutionStrategyFactory for OpExecutionStrategyFactory { + type Strategy + Display>> = OpExecutionStrategy; + + fn create_strategy(&self, db: DB) -> Self::Strategy + where + DB: Database + Display>, + { + let state = + State::builder().with_database(db).with_bundle_update().without_state_clear().build(); + OpExecutionStrategy::new(state, self.chain_spec.clone(), self.evm_config.clone()) + } +} + +/// Block execution strategy for Optimism. +#[allow(missing_debug_implementations)] +pub struct OpExecutionStrategy { + /// The chainspec + chain_spec: Arc, + /// How to create an EVM. + evm_config: EvmConfig, + /// Current state for block execution. + state: State, + /// Utility to call system smart contracts. + system_caller: SystemCaller, +} + +impl OpExecutionStrategy { + /// Creates a new [`OpExecutionStrategy`] + pub fn new( + state: State, + chain_spec: Arc, + evm_config: OptimismEvmConfig, + ) -> Self { + let system_caller = SystemCaller::new(evm_config.clone(), (*chain_spec).clone()); + Self { state, chain_spec, evm_config, system_caller } + } +} + +impl OpExecutionStrategy { + /// Configures a new evm configuration and block environment for the given block. + /// + /// Caution: this does not initialize the tx environment. + fn evm_env_for_block(&self, header: &Header, total_difficulty: U256) -> EnvWithHandlerCfg { + let mut cfg = CfgEnvWithHandlerCfg::new(Default::default(), Default::default()); + let mut block_env = BlockEnv::default(); + self.evm_config.fill_cfg_and_block_env(&mut cfg, &mut block_env, header, total_difficulty); + + EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, Default::default()) + } +} + +impl BlockExecutionStrategy for OpExecutionStrategy +where + DB: Database + Display>, +{ + type Error = BlockExecutionError; + + fn apply_pre_execution_changes( + &mut self, + block: &BlockWithSenders, + total_difficulty: U256, + ) -> Result<(), Self::Error> { + // Set state clear flag if the block is after the Spurious Dragon hardfork. + let state_clear_flag = + (*self.chain_spec).is_spurious_dragon_active_at_block(block.header.number); + self.state.set_state_clear_flag(state_clear_flag); + + let env = self.evm_env_for_block(&block.header, total_difficulty); + let mut evm = self.evm_config.evm_with_env(&mut self.state, env); + + self.system_caller.apply_beacon_root_contract_call( + block.timestamp, + block.number, + block.parent_beacon_block_root, + &mut evm, + )?; + + // Ensure that the create2deployer is force-deployed at the canyon transition. Optimism + // blocks will always have at least a single transaction in them (the L1 info transaction), + // so we can safely assume that this will always be triggered upon the transition and that + // the above check for empty blocks will never be hit on OP chains. + ensure_create2_deployer(self.chain_spec.clone(), block.timestamp, evm.db_mut()) + .map_err(|_| OptimismBlockExecutionError::ForceCreate2DeployerFail)?; + + Ok(()) + } + + fn execute_transactions( + &mut self, + block: &BlockWithSenders, + total_difficulty: U256, + ) -> Result<(Vec, u64), Self::Error> { + let env = self.evm_env_for_block(&block.header, total_difficulty); + let mut evm = self.evm_config.evm_with_env(&mut self.state, env); + + let is_regolith = + self.chain_spec.fork(OptimismHardfork::Regolith).active_at_timestamp(block.timestamp); + + let mut cumulative_gas_used = 0; + let mut receipts = Vec::with_capacity(block.body.transactions.len()); + for (sender, transaction) in block.transactions_with_sender() { + // The sum of the transaction’s gas limit, Tg, and the gas utilized in this block prior, + // must be no greater than the block’s gasLimit. + let block_available_gas = block.header.gas_limit - cumulative_gas_used; + if transaction.gas_limit() > block_available_gas && + (is_regolith || !transaction.is_system_transaction()) + { + return Err(BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas { + transaction_gas_limit: transaction.gas_limit(), + block_available_gas, + } + .into()) + } + + // An optimism block should never contain blob transactions. + if matches!(transaction.tx_type(), TxType::Eip4844) { + return Err(OptimismBlockExecutionError::BlobTransactionRejected.into()) + } + + // Cache the depositor account prior to the state transition for the deposit nonce. + // + // Note that this *only* needs to be done post-regolith hardfork, as deposit nonces + // were not introduced in Bedrock. In addition, regular transactions don't have deposit + // nonces, so we don't need to touch the DB for those. + let depositor = (is_regolith && transaction.is_deposit()) + .then(|| { + evm.db_mut() + .load_cache_account(*sender) + .map(|acc| acc.account_info().unwrap_or_default()) + }) + .transpose() + .map_err(|_| OptimismBlockExecutionError::AccountLoadFailed(*sender))?; + + self.evm_config.fill_tx_env(evm.tx_mut(), transaction, *sender); + + // Execute transaction. + let result_and_state = evm.transact().map_err(move |err| { + let new_err = err.map_db_err(|e| e.into()); + // Ensure hash is calculated for error log, if not already done + BlockValidationError::EVM { + hash: transaction.recalculate_hash(), + error: Box::new(new_err), + } + })?; + + trace!( + target: "evm", + ?transaction, + "Executed transaction" + ); + self.system_caller.on_state(&result_and_state); + let ResultAndState { result, state } = result_and_state; + evm.db_mut().commit(state); + + // append gas used + cumulative_gas_used += result.gas_used(); + + // Push transaction changeset and calculate header bloom filter for receipt. + receipts.push(Receipt { + tx_type: transaction.tx_type(), + // Success flag was added in `EIP-658: Embedding transaction status code in + // receipts`. + success: result.is_success(), + cumulative_gas_used, + logs: result.into_logs(), + deposit_nonce: depositor.map(|account| account.nonce), + // The deposit receipt version was introduced in Canyon to indicate an update to how + // receipt hashes should be computed when set. The state transition process ensures + // this is only set for post-Canyon deposit transactions. + deposit_receipt_version: (transaction.is_deposit() && + self.chain_spec + .is_fork_active_at_timestamp(OptimismHardfork::Canyon, block.timestamp)) + .then_some(1), + }); + } + + Ok((receipts, cumulative_gas_used)) + } + + fn apply_post_execution_changes( + &mut self, + block: &BlockWithSenders, + total_difficulty: U256, + _receipts: &[Receipt], + ) -> Result, Self::Error> { + let balance_increments = + post_block_balance_increments(&self.chain_spec.clone(), block, total_difficulty); + // increment balances + self.state + .increment_balances(balance_increments) + .map_err(|_| BlockValidationError::IncrementBalanceFailed)?; + + Ok(vec![]) + } + + fn state_ref(&self) -> &State { + &self.state + } + + fn state_mut(&mut self) -> &mut State { + &mut self.state + } + + fn with_state_hook(&mut self, hook: Option>) { + self.system_caller.with_state_hook(hook); + } + + fn finish(&mut self) -> BundleState { + self.state.merge_transitions(BundleRetention::Reverts); + self.state.take_bundle() + } + + fn validate_block_post_execution( + &self, + block: &BlockWithSenders, + receipts: &[Receipt], + _requests: &[Request], + ) -> Result<(), ConsensusError> { + validate_block_post_execution(block, &self.chain_spec.clone(), receipts) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::OpChainSpec; + use alloy_consensus::TxEip1559; + use alloy_primitives::{b256, Address, StorageKey, StorageValue}; + use reth_chainspec::MIN_TRANSACTION_GAS; + use reth_evm::execute::{BatchExecutor, BlockExecutorProvider, GenericBlockExecutorProvider}; + use reth_optimism_chainspec::{optimism_deposit_tx_signature, OpChainSpecBuilder}; + use reth_primitives::{Account, Block, BlockBody, Signature, Transaction, TransactionSigned}; + use reth_revm::{ + database::StateProviderDatabase, test_utils::StateProviderTest, L1_BLOCK_CONTRACT, + }; + use std::{collections::HashMap, str::FromStr}; + + fn create_op_state_provider() -> StateProviderTest { + let mut db = StateProviderTest::default(); + + let l1_block_contract_account = + Account { balance: U256::ZERO, bytecode_hash: None, nonce: 1 }; + + let mut l1_block_storage = HashMap::default(); + // base fee + l1_block_storage.insert(StorageKey::with_last_byte(1), StorageValue::from(1000000000)); + // l1 fee overhead + l1_block_storage.insert(StorageKey::with_last_byte(5), StorageValue::from(188)); + // l1 fee scalar + l1_block_storage.insert(StorageKey::with_last_byte(6), StorageValue::from(684000)); + // l1 free scalars post ecotone + l1_block_storage.insert( + StorageKey::with_last_byte(3), + StorageValue::from_str( + "0x0000000000000000000000000000000000001db0000d27300000000000000005", + ) + .unwrap(), + ); + + db.insert_account(L1_BLOCK_CONTRACT, l1_block_contract_account, None, l1_block_storage); + + db + } + + fn executor_provider( + chain_spec: Arc, + ) -> GenericBlockExecutorProvider { + let strategy_factory = + OpExecutionStrategyFactory::new(chain_spec.clone(), OptimismEvmConfig::new(chain_spec)); + + GenericBlockExecutorProvider::new(strategy_factory) + } + + #[test] + fn op_deposit_fields_pre_canyon() { + let header = Header { + timestamp: 1, + number: 1, + gas_limit: 1_000_000, + gas_used: 42_000, + receipts_root: b256!( + "83465d1e7d01578c0d609be33570f91242f013e9e295b0879905346abbd63731" + ), + ..Default::default() + }; + + let mut db = create_op_state_provider(); + + let addr = Address::ZERO; + let account = Account { balance: U256::MAX, ..Account::default() }; + db.insert_account(addr, account, None, HashMap::default()); + + let chain_spec = Arc::new(OpChainSpecBuilder::base_mainnet().regolith_activated().build()); + + let tx = TransactionSigned::from_transaction_and_signature( + Transaction::Eip1559(TxEip1559 { + chain_id: chain_spec.chain.id(), + nonce: 0, + gas_limit: MIN_TRANSACTION_GAS, + to: addr.into(), + ..Default::default() + }), + Signature::test_signature(), + ); + + let tx_deposit = TransactionSigned::from_transaction_and_signature( + Transaction::Deposit(op_alloy_consensus::TxDeposit { + from: addr, + to: addr.into(), + gas_limit: MIN_TRANSACTION_GAS, + ..Default::default() + }), + Signature::test_signature(), + ); + + let provider = executor_provider(chain_spec); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + + // make sure the L1 block contract state is preloaded. + executor.with_state_mut(|state| { + state.load_cache_account(L1_BLOCK_CONTRACT).unwrap(); + }); + + // Attempt to execute a block with one deposit and one non-deposit transaction + executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { + header, + body: BlockBody { + transactions: vec![tx, tx_deposit], + ..Default::default() + }, + }, + senders: vec![addr, addr], + }, + U256::ZERO, + ) + .into(), + ) + .unwrap(); + + let receipts = executor.receipts(); + let tx_receipt = receipts[0][0].as_ref().unwrap(); + let deposit_receipt = receipts[0][1].as_ref().unwrap(); + + // deposit_receipt_version is not present in pre canyon transactions + assert!(deposit_receipt.deposit_receipt_version.is_none()); + assert!(tx_receipt.deposit_receipt_version.is_none()); + + // deposit_nonce is present only in deposit transactions + assert!(deposit_receipt.deposit_nonce.is_some()); + assert!(tx_receipt.deposit_nonce.is_none()); + } + + #[test] + fn op_deposit_fields_post_canyon() { + // ensure_create2_deployer will fail if timestamp is set to less then 2 + let header = Header { + timestamp: 2, + number: 1, + gas_limit: 1_000_000, + gas_used: 42_000, + receipts_root: b256!( + "fffc85c4004fd03c7bfbe5491fae98a7473126c099ac11e8286fd0013f15f908" + ), + ..Default::default() + }; + + let mut db = create_op_state_provider(); + let addr = Address::ZERO; + let account = Account { balance: U256::MAX, ..Account::default() }; + + db.insert_account(addr, account, None, HashMap::default()); + + let chain_spec = Arc::new(OpChainSpecBuilder::base_mainnet().canyon_activated().build()); + + let tx = TransactionSigned::from_transaction_and_signature( + Transaction::Eip1559(TxEip1559 { + chain_id: chain_spec.chain.id(), + nonce: 0, + gas_limit: MIN_TRANSACTION_GAS, + to: addr.into(), + ..Default::default() + }), + Signature::test_signature(), + ); + + let tx_deposit = TransactionSigned::from_transaction_and_signature( + Transaction::Deposit(op_alloy_consensus::TxDeposit { + from: addr, + to: addr.into(), + gas_limit: MIN_TRANSACTION_GAS, + ..Default::default() + }), + optimism_deposit_tx_signature(), + ); + + let provider = executor_provider(chain_spec); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + + // make sure the L1 block contract state is preloaded. + executor.with_state_mut(|state| { + state.load_cache_account(L1_BLOCK_CONTRACT).unwrap(); + }); + + // attempt to execute an empty block with parent beacon block root, this should not fail + executor + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { + header, + body: BlockBody { + transactions: vec![tx, tx_deposit], + ..Default::default() + }, + }, + senders: vec![addr, addr], + }, + U256::ZERO, + ) + .into(), + ) + .expect("Executing a block while canyon is active should not fail"); + + let receipts = executor.receipts(); + let tx_receipt = receipts[0][0].as_ref().unwrap(); + let deposit_receipt = receipts[0][1].as_ref().unwrap(); + + // deposit_receipt_version is set to 1 for post canyon deposit transactions + assert_eq!(deposit_receipt.deposit_receipt_version, Some(1)); + assert!(tx_receipt.deposit_receipt_version.is_none()); + + // deposit_nonce is present only in deposit transactions + assert!(deposit_receipt.deposit_nonce.is_some()); + assert!(tx_receipt.deposit_nonce.is_none()); + } +} From d421931b7ec86759a0bf1aa2cb7e9b6263f14c9a Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 16 Oct 2024 12:31:23 +0200 Subject: [PATCH 077/425] trie: simplify usage of `HashedStorage` with default (#11662) --- crates/storage/provider/src/providers/bundle_state_provider.rs | 2 +- crates/trie/parallel/src/parallel_root.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/storage/provider/src/providers/bundle_state_provider.rs b/crates/storage/provider/src/providers/bundle_state_provider.rs index 296dae8c6..be6549033 100644 --- a/crates/storage/provider/src/providers/bundle_state_provider.rs +++ b/crates/storage/provider/src/providers/bundle_state_provider.rs @@ -43,7 +43,7 @@ impl BundleStateProvider account.storage.iter().map(|(slot, value)| (slot, &value.present_value)), ) }) - .unwrap_or_else(|| HashedStorage::new(false)) + .unwrap_or_default() } } diff --git a/crates/trie/parallel/src/parallel_root.rs b/crates/trie/parallel/src/parallel_root.rs index a64b83514..e432b9106 100644 --- a/crates/trie/parallel/src/parallel_root.rs +++ b/crates/trie/parallel/src/parallel_root.rs @@ -320,7 +320,7 @@ mod tests { hashed_state .storages .entry(hashed_address) - .or_insert_with(|| HashedStorage::new(false)) + .or_insert_with(HashedStorage::default) .storage .insert(hashed_slot, *value); } From 248b6b5905e9c44b5e0c2af6abae450898801beb Mon Sep 17 00:00:00 2001 From: greged93 <82421016+greged93@users.noreply.github.com> Date: Wed, 16 Oct 2024 12:39:25 +0200 Subject: [PATCH 078/425] fix: task executor metrics (#11738) --- bin/reth/src/cli/mod.rs | 5 +++++ crates/cli/commands/src/node.rs | 5 ----- crates/tasks/src/lib.rs | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/bin/reth/src/cli/mod.rs b/bin/reth/src/cli/mod.rs index cca801da3..01f8f73e7 100644 --- a/bin/reth/src/cli/mod.rs +++ b/bin/reth/src/cli/mod.rs @@ -18,6 +18,7 @@ use reth_db::DatabaseEnv; use reth_ethereum_cli::chainspec::EthereumChainSpecParser; use reth_node_builder::{NodeBuilder, WithLaunchContext}; use reth_node_ethereum::{EthExecutorProvider, EthereumNode}; +use reth_node_metrics::recorder::install_prometheus_recorder; use reth_tracing::FileWorkerGuard; use std::{ffi::OsString, fmt, future::Future, sync::Arc}; use tracing::info; @@ -145,6 +146,10 @@ impl, Ext: clap::Args + fmt::Debug> Cl let _guard = self.init_tracing()?; info!(target: "reth::cli", "Initialized tracing, debug log directory: {}", self.logs.log_file_directory); + // Install the prometheus recorder to be sure to record task + // executor's metrics + let _ = install_prometheus_recorder(); + let runner = CliRunner::default(); match self.command { Commands::Node(command) => { diff --git a/crates/cli/commands/src/node.rs b/crates/cli/commands/src/node.rs index 5b1a87e06..b099a2c05 100644 --- a/crates/cli/commands/src/node.rs +++ b/crates/cli/commands/src/node.rs @@ -16,7 +16,6 @@ use reth_node_core::{ node_config::NodeConfig, version, }; -use reth_node_metrics::recorder::install_prometheus_recorder; use std::{ffi::OsString, fmt, future::Future, net::SocketAddr, path::PathBuf, sync::Arc}; /// Start the node @@ -180,10 +179,6 @@ impl< pruning, }; - // Register the prometheus recorder before creating the database, - // because database init needs it to register metrics. - let _ = install_prometheus_recorder(); - let data_dir = node_config.datadir(); let db_path = data_dir.db(); diff --git a/crates/tasks/src/lib.rs b/crates/tasks/src/lib.rs index a0070698f..28b5eaba9 100644 --- a/crates/tasks/src/lib.rs +++ b/crates/tasks/src/lib.rs @@ -288,7 +288,7 @@ pub struct TaskExecutor { on_shutdown: Shutdown, /// Sender half for sending panic signals to this type panicked_tasks_tx: UnboundedSender, - // Task Executor Metrics + /// Task Executor Metrics metrics: TaskExecutorMetrics, /// How many [`GracefulShutdown`] tasks are currently active graceful_tasks: Arc, From 87399ae2c17d83c9d874eaf3f0b902310414359e Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Wed, 16 Oct 2024 13:20:42 +0200 Subject: [PATCH 079/425] chore: rename executor and provider Generic -> Basic (#11788) --- crates/ethereum/evm/src/strategy.rs | 6 ++--- crates/evm/src/execute.rs | 36 ++++++++++++++--------------- crates/evm/src/test_utils.rs | 8 +++---- crates/optimism/evm/src/strategy.rs | 6 ++--- 4 files changed, 28 insertions(+), 28 deletions(-) diff --git a/crates/ethereum/evm/src/strategy.rs b/crates/ethereum/evm/src/strategy.rs index 52f58a8b0..1c284e068 100644 --- a/crates/ethereum/evm/src/strategy.rs +++ b/crates/ethereum/evm/src/strategy.rs @@ -274,7 +274,7 @@ mod tests { use alloy_primitives::{b256, fixed_bytes, keccak256, Bytes, TxKind, B256}; use reth_chainspec::{ChainSpecBuilder, ForkCondition}; use reth_evm::execute::{ - BatchExecutor, BlockExecutorProvider, Executor, GenericBlockExecutorProvider, + BasicBlockExecutorProvider, BatchExecutor, BlockExecutorProvider, Executor, }; use reth_execution_types::BlockExecutionOutput; use reth_primitives::{ @@ -328,11 +328,11 @@ mod tests { fn executor_provider( chain_spec: Arc, - ) -> GenericBlockExecutorProvider { + ) -> BasicBlockExecutorProvider { let strategy_factory = EthExecutionStrategyFactory::new(chain_spec.clone(), EthEvmConfig::new(chain_spec)); - GenericBlockExecutorProvider::new(strategy_factory) + BasicBlockExecutorProvider::new(strategy_factory) } #[test] diff --git a/crates/evm/src/execute.rs b/crates/evm/src/execute.rs index 704b6a23a..f52325b43 100644 --- a/crates/evm/src/execute.rs +++ b/crates/evm/src/execute.rs @@ -227,7 +227,7 @@ pub trait BlockExecutionStrategyFactory: Send + Sync + Clone + Unpin + 'static { DB: Database + Display>; } -impl Clone for GenericBlockExecutorProvider +impl Clone for BasicBlockExecutorProvider where F: Clone, { @@ -238,33 +238,33 @@ where /// A generic block executor provider that can create executors using a strategy factory. #[allow(missing_debug_implementations)] -pub struct GenericBlockExecutorProvider { +pub struct BasicBlockExecutorProvider { strategy_factory: F, } -impl GenericBlockExecutorProvider { - /// Creates a new `GenericBlockExecutorProvider` with the given strategy factory. +impl BasicBlockExecutorProvider { + /// Creates a new `BasicBlockExecutorProvider` with the given strategy factory. pub const fn new(strategy_factory: F) -> Self { Self { strategy_factory } } } -impl BlockExecutorProvider for GenericBlockExecutorProvider +impl BlockExecutorProvider for BasicBlockExecutorProvider where F: BlockExecutionStrategyFactory, { type Executor + Display>> = - GenericBlockExecutor, DB>; + BasicBlockExecutor, DB>; type BatchExecutor + Display>> = - GenericBatchExecutor, DB>; + BasicBatchExecutor, DB>; fn executor(&self, db: DB) -> Self::Executor where DB: Database + Display>, { let strategy = self.strategy_factory.create_strategy(db); - GenericBlockExecutor::new(strategy) + BasicBlockExecutor::new(strategy) } fn batch_executor(&self, db: DB) -> Self::BatchExecutor @@ -273,14 +273,14 @@ where { let strategy = self.strategy_factory.create_strategy(db); let batch_record = BlockBatchRecord::default(); - GenericBatchExecutor::new(strategy, batch_record) + BasicBatchExecutor::new(strategy, batch_record) } } /// A generic block executor that uses a [`BlockExecutionStrategy`] to /// execute blocks. #[allow(missing_debug_implementations, dead_code)] -pub struct GenericBlockExecutor +pub struct BasicBlockExecutor where S: BlockExecutionStrategy, { @@ -289,17 +289,17 @@ where _phantom: PhantomData, } -impl GenericBlockExecutor +impl BasicBlockExecutor where S: BlockExecutionStrategy, { - /// Creates a new `GenericBlockExecutor` with the given strategy. + /// Creates a new `BasicBlockExecutor` with the given strategy. pub const fn new(strategy: S) -> Self { Self { strategy, _phantom: PhantomData } } } -impl Executor for GenericBlockExecutor +impl Executor for BasicBlockExecutor where S: BlockExecutionStrategy, DB: Database + Display>, @@ -368,7 +368,7 @@ where /// A generic batch executor that uses a [`BlockExecutionStrategy`] to /// execute batches. #[allow(missing_debug_implementations)] -pub struct GenericBatchExecutor +pub struct BasicBatchExecutor where S: BlockExecutionStrategy, { @@ -379,17 +379,17 @@ where _phantom: PhantomData, } -impl GenericBatchExecutor +impl BasicBatchExecutor where S: BlockExecutionStrategy, { - /// Creates a new `GenericBatchExecutor` with the given strategy. + /// Creates a new `BasicBatchExecutor` with the given strategy. pub const fn new(strategy: S, batch_record: BlockBatchRecord) -> Self { Self { strategy, batch_record, _phantom: PhantomData } } } -impl BatchExecutor for GenericBatchExecutor +impl BatchExecutor for BasicBatchExecutor where S: BlockExecutionStrategy, DB: Database + Display>, @@ -661,7 +661,7 @@ mod tests { .clone(), finish_result: expected_finish_result.clone(), }; - let provider = GenericBlockExecutorProvider::new(strategy_factory); + let provider = BasicBlockExecutorProvider::new(strategy_factory); let db = CacheDB::>::default(); let executor = provider.executor(db); let result = executor.execute(BlockExecutionInput::new(&Default::default(), U256::ZERO)); diff --git a/crates/evm/src/test_utils.rs b/crates/evm/src/test_utils.rs index 5ae7ed45b..261b36420 100644 --- a/crates/evm/src/test_utils.rs +++ b/crates/evm/src/test_utils.rs @@ -2,8 +2,8 @@ use crate::{ execute::{ - BatchExecutor, BlockExecutionInput, BlockExecutionOutput, BlockExecutionStrategy, - BlockExecutorProvider, Executor, GenericBatchExecutor, GenericBlockExecutor, + BasicBatchExecutor, BasicBlockExecutor, BatchExecutor, BlockExecutionInput, + BlockExecutionOutput, BlockExecutionStrategy, BlockExecutorProvider, Executor, }, system_calls::OnStateHook, }; @@ -112,7 +112,7 @@ impl BatchExecutor for MockExecutorProvider { } } -impl GenericBlockExecutor +impl BasicBlockExecutor where S: BlockExecutionStrategy, { @@ -133,7 +133,7 @@ where } } -impl GenericBatchExecutor +impl BasicBatchExecutor where S: BlockExecutionStrategy, { diff --git a/crates/optimism/evm/src/strategy.rs b/crates/optimism/evm/src/strategy.rs index 4fc06c396..33770714c 100644 --- a/crates/optimism/evm/src/strategy.rs +++ b/crates/optimism/evm/src/strategy.rs @@ -278,7 +278,7 @@ mod tests { use alloy_consensus::TxEip1559; use alloy_primitives::{b256, Address, StorageKey, StorageValue}; use reth_chainspec::MIN_TRANSACTION_GAS; - use reth_evm::execute::{BatchExecutor, BlockExecutorProvider, GenericBlockExecutorProvider}; + use reth_evm::execute::{BasicBlockExecutorProvider, BatchExecutor, BlockExecutorProvider}; use reth_optimism_chainspec::{optimism_deposit_tx_signature, OpChainSpecBuilder}; use reth_primitives::{Account, Block, BlockBody, Signature, Transaction, TransactionSigned}; use reth_revm::{ @@ -315,11 +315,11 @@ mod tests { fn executor_provider( chain_spec: Arc, - ) -> GenericBlockExecutorProvider { + ) -> BasicBlockExecutorProvider { let strategy_factory = OpExecutionStrategyFactory::new(chain_spec.clone(), OptimismEvmConfig::new(chain_spec)); - GenericBlockExecutorProvider::new(strategy_factory) + BasicBlockExecutorProvider::new(strategy_factory) } #[test] From eec861fe9fc787900c18fc2d080f73c89620f5e3 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 16 Oct 2024 13:59:41 +0200 Subject: [PATCH 080/425] chore: rm unused optimism feature (#11794) --- crates/rpc/rpc/Cargo.toml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/crates/rpc/rpc/Cargo.toml b/crates/rpc/rpc/Cargo.toml index 5399e50ce..fe150e36e 100644 --- a/crates/rpc/rpc/Cargo.toml +++ b/crates/rpc/rpc/Cargo.toml @@ -98,8 +98,3 @@ jsonrpsee = { workspace = true, features = ["client"] } [features] js-tracer = ["revm-inspectors/js-tracer", "reth-rpc-eth-types/js-tracer"] -optimism = [ - "reth-primitives/optimism", - "reth-provider/optimism", - "reth-revm/optimism", -] From 6f041108767a76d4ee964411bb8e346364444d47 Mon Sep 17 00:00:00 2001 From: Evan Chipman <42247026+evchip@users.noreply.github.com> Date: Wed, 16 Oct 2024 19:25:27 +0700 Subject: [PATCH 081/425] chore: rename SenderId::into_id to SenderId::into_transaction_id (#11793) Co-authored-by: Matthias Seitz --- crates/transaction-pool/src/identifier.rs | 2 +- crates/transaction-pool/src/pool/mod.rs | 5 +++-- crates/transaction-pool/src/pool/txpool.rs | 12 ++++++++---- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/crates/transaction-pool/src/identifier.rs b/crates/transaction-pool/src/identifier.rs index c50d39ae4..2e5312a21 100644 --- a/crates/transaction-pool/src/identifier.rs +++ b/crates/transaction-pool/src/identifier.rs @@ -61,7 +61,7 @@ impl SenderId { } /// Converts the sender to a [`TransactionId`] with the given nonce. - pub const fn into_id(self, nonce: u64) -> TransactionId { + pub const fn into_transaction_id(self, nonce: u64) -> TransactionId { TransactionId::new(self, nonce) } } diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index 64e6dad67..600a8da93 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -792,8 +792,9 @@ where on_chain_nonce: u64, ) -> Option>> { let sender_id = self.get_sender_id(sender); - self.get_pool_data() - .get_highest_consecutive_transaction_by_sender(sender_id.into_id(on_chain_nonce)) + self.get_pool_data().get_highest_consecutive_transaction_by_sender( + sender_id.into_transaction_id(on_chain_nonce), + ) } /// Returns all transactions that where submitted with the given [`TransactionOrigin`] diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index c89aca830..9d284392d 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -2804,20 +2804,24 @@ mod tests { // Get last consecutive transaction let sender_id = f.ids.sender_id(&sender).unwrap(); - let next_tx = pool.get_highest_consecutive_transaction_by_sender(sender_id.into_id(0)); + let next_tx = + pool.get_highest_consecutive_transaction_by_sender(sender_id.into_transaction_id(0)); assert_eq!(next_tx.map(|tx| tx.nonce()), Some(2), "Expected nonce 2 for on-chain nonce 0"); - let next_tx = pool.get_highest_consecutive_transaction_by_sender(sender_id.into_id(4)); + let next_tx = + pool.get_highest_consecutive_transaction_by_sender(sender_id.into_transaction_id(4)); assert_eq!(next_tx.map(|tx| tx.nonce()), Some(5), "Expected nonce 5 for on-chain nonce 4"); - let next_tx = pool.get_highest_consecutive_transaction_by_sender(sender_id.into_id(5)); + let next_tx = + pool.get_highest_consecutive_transaction_by_sender(sender_id.into_transaction_id(5)); assert_eq!(next_tx.map(|tx| tx.nonce()), Some(5), "Expected nonce 5 for on-chain nonce 5"); // update the tracked nonce let mut info = SenderInfo::default(); info.update(8, U256::ZERO); pool.sender_info.insert(sender_id, info); - let next_tx = pool.get_highest_consecutive_transaction_by_sender(sender_id.into_id(5)); + let next_tx = + pool.get_highest_consecutive_transaction_by_sender(sender_id.into_transaction_id(5)); assert_eq!(next_tx.map(|tx| tx.nonce()), Some(9), "Expected nonce 9 for on-chain nonce 8"); } From c76d3194446b59581f11072e9cb66307e4aa278e Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 16 Oct 2024 14:44:19 +0200 Subject: [PATCH 082/425] chore: rm optimism feature from reth-revm (#11797) --- crates/optimism/evm/Cargo.toml | 2 +- crates/optimism/node/Cargo.toml | 1 - crates/optimism/payload/Cargo.toml | 2 +- crates/revm/Cargo.toml | 1 - 4 files changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/optimism/evm/Cargo.toml b/crates/optimism/evm/Cargo.toml index afaca32b6..53f9ae033 100644 --- a/crates/optimism/evm/Cargo.toml +++ b/crates/optimism/evm/Cargo.toml @@ -54,5 +54,5 @@ optimism = [ "reth-primitives/optimism", "reth-execution-types/optimism", "reth-optimism-consensus/optimism", - "reth-revm/optimism", + "revm/optimism", ] diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index f9e038a3d..8e359e602 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -87,7 +87,6 @@ optimism = [ "reth-optimism-evm/optimism", "reth-optimism-payload-builder/optimism", "reth-beacon-consensus/optimism", - "reth-revm/optimism", "revm/optimism", "reth-auto-seal-consensus/optimism", "reth-optimism-rpc/optimism", diff --git a/crates/optimism/payload/Cargo.toml b/crates/optimism/payload/Cargo.toml index 414b2c358..e1d6fe47d 100644 --- a/crates/optimism/payload/Cargo.toml +++ b/crates/optimism/payload/Cargo.toml @@ -52,5 +52,5 @@ optimism = [ "reth-primitives/optimism", "reth-provider/optimism", "reth-optimism-evm/optimism", - "reth-revm/optimism", + "revm/optimism", ] diff --git a/crates/revm/Cargo.toml b/crates/revm/Cargo.toml index 9e4501f62..7ffb06ce9 100644 --- a/crates/revm/Cargo.toml +++ b/crates/revm/Cargo.toml @@ -36,5 +36,4 @@ default = ["std", "c-kzg"] std = [] c-kzg = ["revm/c-kzg"] test-utils = ["dep:reth-trie"] -optimism = ["revm/optimism"] serde = ["revm/serde"] From 6ad1275e6b86b003eb51f794cd70de41559247f7 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Wed, 16 Oct 2024 17:04:23 +0200 Subject: [PATCH 083/425] chore(sdk): move block traits into `reth-primitives-traits` (#11780) --- crates/primitives-traits/src/block.rs | 99 +++++++++++++++++++ .../src}/block/body.rs | 27 +++-- .../src}/block/mod.rs | 76 ++++---------- crates/primitives/src/traits/mod.rs | 7 -- 4 files changed, 135 insertions(+), 74 deletions(-) create mode 100644 crates/primitives-traits/src/block.rs rename crates/{primitives/src/traits => primitives-traits/src}/block/body.rs (84%) rename crates/{primitives/src/traits => primitives-traits/src}/block/mod.rs (58%) delete mode 100644 crates/primitives/src/traits/mod.rs diff --git a/crates/primitives-traits/src/block.rs b/crates/primitives-traits/src/block.rs new file mode 100644 index 000000000..02f581801 --- /dev/null +++ b/crates/primitives-traits/src/block.rs @@ -0,0 +1,99 @@ +//! Block abstraction. + +pub mod body; + +use alloc::fmt; +use core::ops; + +use alloy_consensus::BlockHeader; +use alloy_primitives::{Address, Sealable, B256}; + +use crate::{traits::BlockBody, BlockWithSenders, SealedBlock, SealedHeader}; + +/// Abstraction of block data type. +pub trait Block: + fmt::Debug + + Clone + + PartialEq + + Eq + + Default + + serde::Serialize + + for<'a> serde::Deserialize<'a> + + From<(Self::Header, Self::Body)> + + Into<(Self::Header, Self::Body)> +{ + /// Header part of the block. + type Header: BlockHeader + Sealable; + + /// The block's body contains the transactions in the block. + type Body: BlockBody; + + /// A block and block hash. + type SealedBlock; + + /// A block and addresses of senders of transactions in it. + type BlockWithSenders; + + /// Returns reference to [`BlockHeader`] type. + fn header(&self) -> &Self::Header; + + /// Returns reference to [`BlockBody`] type. + fn body(&self) -> &Self::Body; + + /// Calculate the header hash and seal the block so that it can't be changed. + // todo: can be default impl if sealed block type is made generic over header and body and + // migrated to alloy + fn seal_slow(self) -> Self::SealedBlock; + + /// Seal the block with a known hash. + /// + /// WARNING: This method does not perform validation whether the hash is correct. + // todo: can be default impl if sealed block type is made generic over header and body and + // migrated to alloy + fn seal(self, hash: B256) -> Self::SealedBlock; + + /// Expensive operation that recovers transaction signer. See + /// [`SealedBlockWithSenders`](reth_primitives::SealedBlockWithSenders). + fn senders(&self) -> Option> { + self.body().recover_signers() + } + + /// Transform into a [`BlockWithSenders`]. + /// + /// # Panics + /// + /// If the number of senders does not match the number of transactions in the block + /// and the signer recovery for one of the transactions fails. + /// + /// Note: this is expected to be called with blocks read from disk. + #[track_caller] + fn with_senders_unchecked(self, senders: Vec
) -> Self::BlockWithSenders { + self.try_with_senders_unchecked(senders).expect("stored block is valid") + } + + /// Transform into a [`BlockWithSenders`] using the given senders. + /// + /// If the number of senders does not match the number of transactions in the block, this falls + /// back to manually recovery, but _without ensuring that the signature has a low `s` value_. + /// See also [`TransactionSigned::recover_signer_unchecked`] + /// + /// Returns an error if a signature is invalid. + // todo: can be default impl if block with senders type is made generic over block and migrated + // to alloy + #[track_caller] + fn try_with_senders_unchecked( + self, + senders: Vec
, + ) -> Result; + + /// **Expensive**. Transform into a [`BlockWithSenders`] by recovering senders in the contained + /// transactions. + /// + /// Returns `None` if a transaction is invalid. + // todo: can be default impl if sealed block type is made generic over header and body and + // migrated to alloy + fn with_recovered_senders(self) -> Option; + + /// Calculates a heuristic for the in-memory size of the [`Block`]. + fn size(&self) -> usize; +} diff --git a/crates/primitives/src/traits/block/body.rs b/crates/primitives-traits/src/block/body.rs similarity index 84% rename from crates/primitives/src/traits/block/body.rs rename to crates/primitives-traits/src/block/body.rs index ff8f71b76..03246c68b 100644 --- a/crates/primitives/src/traits/block/body.rs +++ b/crates/primitives-traits/src/block/body.rs @@ -3,10 +3,11 @@ use alloc::fmt; use core::ops; -use alloy_consensus::{BlockHeader, Transaction, TxType}; +use alloy_consensus::{BlockHeader,Request, Transaction, TxType}; use alloy_primitives::{Address, B256}; +use alloy_eips::eip1559::Withdrawal; -use crate::{proofs, traits::Block, Requests, Withdrawals}; +use crate::Block; /// Abstraction for block's body. pub trait BlockBody: @@ -27,18 +28,24 @@ pub trait BlockBody: /// Header type (uncle blocks). type Header: BlockHeader; + /// Withdrawals in block. + type Withdrawals: Iterator; + + /// Requests in block. + type Requests: Iterator; + /// Returns reference to transactions in block. fn transactions(&self) -> &[Self::SignedTransaction]; /// Returns [`Withdrawals`] in the block, if any. // todo: branch out into extension trait - fn withdrawals(&self) -> Option<&Withdrawals>; + fn withdrawals(&self) -> Option<&Self::Withdrawals>; /// Returns reference to uncle block headers. fn ommers(&self) -> &[Self::Header]; /// Returns [`Request`] in block, if any. - fn requests(&self) -> Option<&Requests>; + fn requests(&self) -> Option<&Self::Requests>; /// Create a [`Block`] from the body and its header. fn into_block>(self, header: Self::Header) -> T { @@ -53,15 +60,15 @@ pub trait BlockBody: /// Calculate the withdrawals root for the block body, if withdrawals exist. If there are no /// withdrawals, this will return `None`. - fn calculate_withdrawals_root(&self) -> Option { - Some(proofs::calculate_withdrawals_root(self.withdrawals()?)) - } + // todo: can be default impl if `calculate_withdrawals_root` made into a method on + // `Withdrawals` and `Withdrawals` moved to alloy + fn calculate_withdrawals_root(&self) -> Option; /// Calculate the requests root for the block body, if requests exist. If there are no /// requests, this will return `None`. - fn calculate_requests_root(&self) -> Option { - Some(proofs::calculate_requests_root(self.requests()?)) - } + // todo: can be default impl if `calculate_requests_root` made into a method on + // `Requests` and `Requests` moved to alloy + fn calculate_requests_root(&self) -> Option; /// Recover signer addresses for all transactions in the block body. fn recover_signers(&self) -> Option>; diff --git a/crates/primitives/src/traits/block/mod.rs b/crates/primitives-traits/src/block/mod.rs similarity index 58% rename from crates/primitives/src/traits/block/mod.rs rename to crates/primitives-traits/src/block/mod.rs index 451a54c34..02f581801 100644 --- a/crates/primitives/src/traits/block/mod.rs +++ b/crates/primitives-traits/src/block/mod.rs @@ -28,6 +28,12 @@ pub trait Block: /// The block's body contains the transactions in the block. type Body: BlockBody; + /// A block and block hash. + type SealedBlock; + + /// A block and addresses of senders of transactions in it. + type BlockWithSenders; + /// Returns reference to [`BlockHeader`] type. fn header(&self) -> &Self::Header; @@ -35,20 +41,16 @@ pub trait Block: fn body(&self) -> &Self::Body; /// Calculate the header hash and seal the block so that it can't be changed. - fn seal_slow(self) -> SealedBlock { - let (header, body) = self.into(); - let sealed = header.seal_slow(); - let (header, seal) = sealed.into_parts(); - SealedBlock { header: SealedHeader::new(header, seal), body } - } + // todo: can be default impl if sealed block type is made generic over header and body and + // migrated to alloy + fn seal_slow(self) -> Self::SealedBlock; /// Seal the block with a known hash. /// /// WARNING: This method does not perform validation whether the hash is correct. - fn seal(self, hash: B256) -> SealedBlock { - let (header, body) = self.into(); - SealedBlock { header: SealedHeader::new(header, hash), body } - } + // todo: can be default impl if sealed block type is made generic over header and body and + // migrated to alloy + fn seal(self, hash: B256) -> Self::SealedBlock; /// Expensive operation that recovers transaction signer. See /// [`SealedBlockWithSenders`](reth_primitives::SealedBlockWithSenders). @@ -65,7 +67,7 @@ pub trait Block: /// /// Note: this is expected to be called with blocks read from disk. #[track_caller] - fn with_senders_unchecked(self, senders: Vec
) -> BlockWithSenders { + fn with_senders_unchecked(self, senders: Vec
) -> Self::BlockWithSenders { self.try_with_senders_unchecked(senders).expect("stored block is valid") } @@ -76,62 +78,22 @@ pub trait Block: /// See also [`TransactionSigned::recover_signer_unchecked`] /// /// Returns an error if a signature is invalid. + // todo: can be default impl if block with senders type is made generic over block and migrated + // to alloy #[track_caller] fn try_with_senders_unchecked( self, senders: Vec
, - ) -> Result, Self> { - let senders = if self.body().transactions().len() == senders.len() { - senders - } else { - let Some(senders) = self.body().recover_signers() else { return Err(self) }; - senders - }; - - Ok(BlockWithSenders { block: self, senders }) - } + ) -> Result; /// **Expensive**. Transform into a [`BlockWithSenders`] by recovering senders in the contained /// transactions. /// /// Returns `None` if a transaction is invalid. - fn with_recovered_senders(self) -> Option> { - let senders = self.senders()?; - Some(BlockWithSenders { block: self, senders }) - } + // todo: can be default impl if sealed block type is made generic over header and body and + // migrated to alloy + fn with_recovered_senders(self) -> Option; /// Calculates a heuristic for the in-memory size of the [`Block`]. fn size(&self) -> usize; } - -impl Block for T -where - T: ops::Deref - + fmt::Debug - + Clone - + PartialEq - + Eq - + Default - + serde::Serialize - + for<'a> serde::Deserialize<'a> - + From<(::Header, ::Body)> - + Into<(::Header, ::Body)>, -{ - type Header = ::Header; - type Body = ::Body; - - #[inline] - fn header(&self) -> &Self::Header { - self.deref().header() - } - - #[inline] - fn body(&self) -> &Self::Body { - self.deref().body() - } - - #[inline] - fn size(&self) -> usize { - self.deref().size() - } -} diff --git a/crates/primitives/src/traits/mod.rs b/crates/primitives/src/traits/mod.rs deleted file mode 100644 index 8c84c6729..000000000 --- a/crates/primitives/src/traits/mod.rs +++ /dev/null @@ -1,7 +0,0 @@ -//! Abstractions of primitive data types - -pub mod block; - -pub use block::{body::BlockBody, Block}; - -pub use alloy_consensus::BlockHeader; From cb7fd084a673bae8c5b5c0c0fbf46b74fe89a23e Mon Sep 17 00:00:00 2001 From: greged93 <82421016+greged93@users.noreply.github.com> Date: Wed, 16 Oct 2024 17:05:59 +0200 Subject: [PATCH 084/425] chore: remove &self from update_estimated_gas_range (#11799) --- crates/rpc/rpc-eth-api/src/helpers/call.rs | 101 ++++++++++----------- 1 file changed, 50 insertions(+), 51 deletions(-) diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index 6784f9327..b43b34305 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -855,7 +855,7 @@ pub trait Call: LoadState + SpawnBlocking { // Update the gas used based on the new result. gas_used = res.result.gas_used(); // Update the gas limit estimates (highest and lowest) based on the execution result. - self.update_estimated_gas_range( + update_estimated_gas_range( res.result, optimistic_gas_limit, &mut highest_gas_limit, @@ -900,7 +900,7 @@ pub trait Call: LoadState + SpawnBlocking { // Unpack the result and environment if the transaction was successful. (res, env) = ethres?; // Update the estimated gas range based on the transaction result. - self.update_estimated_gas_range( + update_estimated_gas_range( res.result, mid_gas_limit, &mut highest_gas_limit, @@ -916,55 +916,6 @@ pub trait Call: LoadState + SpawnBlocking { Ok(U256::from(highest_gas_limit)) } - /// Updates the highest and lowest gas limits for binary search based on the execution result. - /// - /// This function refines the gas limit estimates used in a binary search to find the optimal - /// gas limit for a transaction. It adjusts the highest or lowest gas limits depending on - /// whether the execution succeeded, reverted, or halted due to specific reasons. - #[inline] - fn update_estimated_gas_range( - &self, - result: ExecutionResult, - tx_gas_limit: u64, - highest_gas_limit: &mut u64, - lowest_gas_limit: &mut u64, - ) -> Result<(), Self::Error> { - match result { - ExecutionResult::Success { .. } => { - // Cap the highest gas limit with the succeeding gas limit. - *highest_gas_limit = tx_gas_limit; - } - ExecutionResult::Revert { .. } => { - // Increase the lowest gas limit. - *lowest_gas_limit = tx_gas_limit; - } - ExecutionResult::Halt { reason, .. } => { - match reason { - HaltReason::OutOfGas(_) | HaltReason::InvalidFEOpcode => { - // Both `OutOfGas` and `InvalidEFOpcode` can occur dynamically if the gas - // left is too low. Treat this as an out of gas - // condition, knowing that the call succeeds with a - // higher gas limit. - // - // Common usage of invalid opcode in OpenZeppelin: - // - - // Increase the lowest gas limit. - *lowest_gas_limit = tx_gas_limit; - } - err => { - // These cases should be unreachable because we know the transaction - // succeeds, but if they occur, treat them as an - // error. - return Err(RpcInvalidTransactionError::EvmHalt(err).into_eth_err()) - } - } - } - }; - - Ok(()) - } - /// Executes the requests again after an out of gas error to check if the error is gas related /// or not #[inline] @@ -1163,3 +1114,51 @@ pub trait Call: LoadState + SpawnBlocking { Ok(env) } } + +/// Updates the highest and lowest gas limits for binary search based on the execution result. +/// +/// This function refines the gas limit estimates used in a binary search to find the optimal +/// gas limit for a transaction. It adjusts the highest or lowest gas limits depending on +/// whether the execution succeeded, reverted, or halted due to specific reasons. +#[inline] +fn update_estimated_gas_range( + result: ExecutionResult, + tx_gas_limit: u64, + highest_gas_limit: &mut u64, + lowest_gas_limit: &mut u64, +) -> Result<(), EthApiError> { + match result { + ExecutionResult::Success { .. } => { + // Cap the highest gas limit with the succeeding gas limit. + *highest_gas_limit = tx_gas_limit; + } + ExecutionResult::Revert { .. } => { + // Increase the lowest gas limit. + *lowest_gas_limit = tx_gas_limit; + } + ExecutionResult::Halt { reason, .. } => { + match reason { + HaltReason::OutOfGas(_) | HaltReason::InvalidFEOpcode => { + // Both `OutOfGas` and `InvalidEFOpcode` can occur dynamically if the gas + // left is too low. Treat this as an out of gas + // condition, knowing that the call succeeds with a + // higher gas limit. + // + // Common usage of invalid opcode in OpenZeppelin: + // + + // Increase the lowest gas limit. + *lowest_gas_limit = tx_gas_limit; + } + err => { + // These cases should be unreachable because we know the transaction + // succeeds, but if they occur, treat them as an + // error. + return Err(RpcInvalidTransactionError::EvmHalt(err).into_eth_err()) + } + } + } + }; + + Ok(()) +} From 281307fe4c25fef8cebbffcfff6eaf0f4d190d6b Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Wed, 16 Oct 2024 17:47:35 +0200 Subject: [PATCH 085/425] chore(ci): update list of crates excluded from wasm checks (#11787) --- .github/assets/check_wasm.sh | 5 ----- 1 file changed, 5 deletions(-) diff --git a/.github/assets/check_wasm.sh b/.github/assets/check_wasm.sh index 8d53f457a..1b1c0641f 100755 --- a/.github/assets/check_wasm.sh +++ b/.github/assets/check_wasm.sh @@ -30,12 +30,10 @@ exclude_crates=( reth-engine-util reth-eth-wire reth-ethereum-cli - reth-ethereum-engine reth-ethereum-engine-primitives reth-ethereum-payload-builder reth-etl reth-evm-ethereum - reth-execution-errors reth-exex reth-exex-test-utils reth-ipc @@ -49,7 +47,6 @@ exclude_crates=( reth-node-events reth-node-metrics reth-optimism-cli - reth-optimism-evm reth-optimism-node reth-optimism-payload-builder reth-optimism-rpc @@ -63,9 +60,7 @@ exclude_crates=( reth-rpc-eth-api reth-rpc-eth-types reth-rpc-layer - reth-rpc-types reth-stages - reth-storage-errors reth-engine-local # The following are not supposed to be working reth # all of the crates below From 6b2ec42e48394a8ffa0dc471ec7b2a9ba2a697c4 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 16 Oct 2024 18:50:18 +0200 Subject: [PATCH 086/425] docs: clarify max rpc tracing requests (#11796) Co-authored-by: Dan Cline <6798349+Rjected@users.noreply.github.com> --- book/cli/help.rs | 2 +- book/cli/reth/node.md | 4 +++- crates/node/core/src/args/rpc_server.rs | 5 +++++ 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/book/cli/help.rs b/book/cli/help.rs index e347e1ea5..963f53deb 100755 --- a/book/cli/help.rs +++ b/book/cli/help.rs @@ -320,7 +320,7 @@ fn preprocess_help(s: &str) -> Cow<'_, str> { (r"default: reth/.*/\w+", "default: reth//"), // Remove rpc.max-tracing-requests default value ( - r"(rpc.max-tracing-requests \n.*\n.*\n.*)\[default: \d+\]", + r"(rpc.max-tracing-requests \n.*\n.*\n.*\n.*\n.*)\[default: \d+\]", r"$1[default: ]", ), ]; diff --git a/book/cli/reth/node.md b/book/cli/reth/node.md index 73a8063a8..ea10d9522 100644 --- a/book/cli/reth/node.md +++ b/book/cli/reth/node.md @@ -331,7 +331,9 @@ RPC: [default: 500] --rpc.max-tracing-requests - Maximum number of concurrent tracing requests + Maximum number of concurrent tracing requests. + + By default this chooses a sensible value based on the number of available cores. Tracing requests are generally CPU bound. Choosing a value that is higher than the available CPU cores can have a negative impact on the performance of the node and affect the node's ability to maintain sync. [default: ] diff --git a/crates/node/core/src/args/rpc_server.rs b/crates/node/core/src/args/rpc_server.rs index 15771e989..382f22d37 100644 --- a/crates/node/core/src/args/rpc_server.rs +++ b/crates/node/core/src/args/rpc_server.rs @@ -135,6 +135,11 @@ pub struct RpcServerArgs { pub rpc_max_connections: MaxU32, /// Maximum number of concurrent tracing requests. + /// + /// By default this chooses a sensible value based on the number of available cores. + /// Tracing requests are generally CPU bound. + /// Choosing a value that is higher than the available CPU cores can have a negative impact on + /// the performance of the node and affect the node's ability to maintain sync. #[arg(long = "rpc.max-tracing-requests", alias = "rpc-max-tracing-requests", value_name = "COUNT", default_value_t = constants::default_max_tracing_requests())] pub rpc_max_tracing_requests: usize, From 12cab204b50a824bcfb55f6b6d46f2e9cda34c31 Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Wed, 16 Oct 2024 19:21:25 +0200 Subject: [PATCH 087/425] fix(witness): branch node children decoding (#11599) --- crates/trie/db/tests/witness.rs | 52 +++++++++++++++++++++++++++++++++ crates/trie/trie/src/witness.rs | 22 ++++++++++---- 2 files changed, 68 insertions(+), 6 deletions(-) diff --git a/crates/trie/db/tests/witness.rs b/crates/trie/db/tests/witness.rs index 59656383d..20f8cfbb9 100644 --- a/crates/trie/db/tests/witness.rs +++ b/crates/trie/db/tests/witness.rs @@ -6,6 +6,8 @@ use alloy_primitives::{ Address, Bytes, B256, U256, }; use alloy_rlp::EMPTY_STRING_CODE; +use reth_db::{cursor::DbCursorRW, tables}; +use reth_db_api::transaction::DbTxMut; use reth_primitives::{constants::EMPTY_ROOT_HASH, Account, StorageEntry}; use reth_provider::{test_utils::create_test_provider_factory, HashingWriter}; use reth_trie::{proof::Proof, witness::TrieWitness, HashedPostState, HashedStorage, StateRoot}; @@ -91,3 +93,53 @@ fn includes_nodes_for_destroyed_storage_nodes() { assert_eq!(witness.get(&keccak256(node)), Some(node)); } } + +#[test] +fn correctly_decodes_branch_node_values() { + let factory = create_test_provider_factory(); + let provider = factory.provider_rw().unwrap(); + + let address = Address::random(); + let hashed_address = keccak256(address); + let hashed_slot1 = B256::with_last_byte(1); + let hashed_slot2 = B256::with_last_byte(2); + + // Insert account and slots into database + provider.insert_account_for_hashing([(address, Some(Account::default()))]).unwrap(); + let mut hashed_storage_cursor = + provider.tx_ref().cursor_dup_write::().unwrap(); + hashed_storage_cursor + .upsert(hashed_address, StorageEntry { key: hashed_slot1, value: U256::from(1) }) + .unwrap(); + hashed_storage_cursor + .upsert(hashed_address, StorageEntry { key: hashed_slot2, value: U256::from(1) }) + .unwrap(); + + let state_root = StateRoot::from_tx(provider.tx_ref()).root().unwrap(); + let multiproof = Proof::from_tx(provider.tx_ref()) + .multiproof(HashMap::from_iter([( + hashed_address, + HashSet::from_iter([hashed_slot1, hashed_slot2]), + )])) + .unwrap(); + + let witness = TrieWitness::from_tx(provider.tx_ref()) + .compute(HashedPostState { + accounts: HashMap::from([(hashed_address, Some(Account::default()))]), + storages: HashMap::from([( + hashed_address, + HashedStorage::from_iter( + false, + [hashed_slot1, hashed_slot2].map(|hashed_slot| (hashed_slot, U256::from(2))), + ), + )]), + }) + .unwrap(); + assert!(witness.contains_key(&state_root)); + for node in multiproof.account_subtree.values() { + assert_eq!(witness.get(&keccak256(node)), Some(node)); + } + for node in multiproof.storages.iter().flat_map(|(_, storage)| storage.subtree.values()) { + assert_eq!(witness.get(&keccak256(node)), Some(node)); + } +} diff --git a/crates/trie/trie/src/witness.rs b/crates/trie/trie/src/witness.rs index 971f10cfb..f3b70e85a 100644 --- a/crates/trie/trie/src/witness.rs +++ b/crates/trie/trie/src/witness.rs @@ -216,9 +216,14 @@ where TrieNode::Branch(branch) => { next_path.push(key[path.len()]); let children = branch_node_children(path.clone(), &branch); - for (child_path, node_hash) in children { + for (child_path, value) in children { if !key.starts_with(&child_path) { - trie_nodes.insert(child_path, Either::Left(node_hash)); + let value = if value.len() < B256::len_bytes() { + Either::Right(value.to_vec()) + } else { + Either::Left(B256::from_slice(&value[1..])) + }; + trie_nodes.insert(child_path, value); } } } @@ -312,8 +317,13 @@ where match TrieNode::decode(&mut &node[..])? { TrieNode::Branch(branch) => { let children = branch_node_children(path, &branch); - for (child_path, branch_hash) in children { - hash_builder.add_branch(child_path, branch_hash, false); + for (child_path, value) in children { + if value.len() < B256::len_bytes() { + hash_builder.add_leaf(child_path, value); + } else { + let hash = B256::from_slice(&value[1..]); + hash_builder.add_branch(child_path, hash, false); + } } break } @@ -343,14 +353,14 @@ where } /// Returned branch node children with keys in order. -fn branch_node_children(prefix: Nibbles, node: &BranchNode) -> Vec<(Nibbles, B256)> { +fn branch_node_children(prefix: Nibbles, node: &BranchNode) -> Vec<(Nibbles, &[u8])> { let mut children = Vec::with_capacity(node.state_mask.count_ones() as usize); let mut stack_ptr = node.as_ref().first_child_index(); for index in CHILD_INDEX_RANGE { if node.state_mask.is_bit_set(index) { let mut child_path = prefix.clone(); child_path.push(index); - children.push((child_path, B256::from_slice(&node.stack[stack_ptr][1..]))); + children.push((child_path, &node.stack[stack_ptr][..])); stack_ptr += 1; } } From 099987fc3d60f1c3bc6ca458242ae46988cba5bd Mon Sep 17 00:00:00 2001 From: Kunal Arora <55632507+aroralanuk@users.noreply.github.com> Date: Wed, 16 Oct 2024 23:00:26 +0530 Subject: [PATCH 088/425] chore(cli): add `default_client_version` to rethCli (#11773) Co-authored-by: Matthias Seitz --- Cargo.lock | 1 + crates/cli/cli/Cargo.toml | 2 +- crates/cli/cli/src/lib.rs | 4 ++++ 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 4d23c5b42..3211a92c6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6551,6 +6551,7 @@ dependencies = [ "clap", "eyre", "reth-cli-runner", + "reth-db", "serde_json", "shellexpand", ] diff --git a/crates/cli/cli/Cargo.toml b/crates/cli/cli/Cargo.toml index 7eb1f43b1..5da51a1b2 100644 --- a/crates/cli/cli/Cargo.toml +++ b/crates/cli/cli/Cargo.toml @@ -14,7 +14,7 @@ workspace = true [dependencies] # reth reth-cli-runner.workspace = true - +reth-db.workspace = true alloy-genesis.workspace = true # misc diff --git a/crates/cli/cli/src/lib.rs b/crates/cli/cli/src/lib.rs index 1db5ebf86..f7bf716ea 100644 --- a/crates/cli/cli/src/lib.rs +++ b/crates/cli/cli/src/lib.rs @@ -10,6 +10,7 @@ use clap::{Error, Parser}; use reth_cli_runner::CliRunner; +use reth_db::ClientVersion; use std::{borrow::Cow, ffi::OsString}; /// The chainspec module defines the different chainspecs that can be used by the node. @@ -66,4 +67,7 @@ pub trait RethCli: Sized { Ok(cli.with_runner(f)) } + + /// The client version of the node. + fn client_version() -> ClientVersion; } From e5cd026e036e7c28fd76253f65a18402e8b8b6f2 Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Wed, 16 Oct 2024 19:49:57 +0200 Subject: [PATCH 089/425] deps: `alloy-trie@0.7.2` (#11807) --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3211a92c6..a1e11216b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -741,9 +741,9 @@ dependencies = [ [[package]] name = "alloy-trie" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fa8acead43cb238a7b7f47238c71137f4677a0b8d90e7e3be6e6ca59a28194e" +checksum = "cdd7f8b3a7c65ca09b3c7bdd7c7d72d7423d026f5247eda96af53d24e58315c1" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -4464,7 +4464,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -11378,7 +11378,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] From dcaa432155931267a4010ab09d0d30b94ed30a41 Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Wed, 16 Oct 2024 20:31:58 +0200 Subject: [PATCH 090/425] chore(trie): use `RlpNode::as_hash` (#11808) --- crates/trie/sparse/src/trie.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index 1b83e07e4..2edaaf76b 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -267,8 +267,8 @@ impl RevealedSparseTrie { // take the current prefix set. let mut prefix_set = std::mem::take(&mut self.prefix_set).freeze(); let root_rlp = self.rlp_node(Nibbles::default(), &mut prefix_set); - if root_rlp.len() == B256::len_bytes() + 1 { - B256::from_slice(&root_rlp[1..]) + if let Some(root_hash) = root_rlp.as_hash() { + root_hash } else { keccak256(root_rlp) } @@ -338,8 +338,8 @@ impl RevealedSparseTrie { } else { let value = self.values.get(&path).unwrap(); let rlp_node = LeafNodeRef { key, value }.rlp(&mut self.rlp_buf); - if rlp_node.len() == B256::len_bytes() + 1 { - *hash = Some(B256::from_slice(&rlp_node[1..])); + if let Some(node_hash) = rlp_node.as_hash() { + *hash = Some(node_hash); } rlp_node } @@ -353,8 +353,8 @@ impl RevealedSparseTrie { let (_, child) = rlp_node_stack.pop().unwrap(); self.rlp_buf.clear(); let rlp_node = ExtensionNodeRef::new(key, &child).rlp(&mut self.rlp_buf); - if rlp_node.len() == B256::len_bytes() + 1 { - *hash = Some(B256::from_slice(&rlp_node[1..])); + if let Some(node_hash) = rlp_node.as_hash() { + *hash = Some(node_hash); } rlp_node } else { @@ -393,8 +393,8 @@ impl RevealedSparseTrie { self.rlp_buf.clear(); let rlp_node = BranchNodeRef::new(&branch_value_stack_buf, *state_mask) .rlp(&mut self.rlp_buf); - if rlp_node.len() == B256::len_bytes() + 1 { - *hash = Some(B256::from_slice(&rlp_node[1..])); + if let Some(node_hash) = rlp_node.as_hash() { + *hash = Some(node_hash); } rlp_node } From 756a47e4e2a8ab350cfc12d6621981312e12f715 Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Wed, 16 Oct 2024 20:33:29 +0200 Subject: [PATCH 091/425] chore: add `shekhirin` to trie code owners (#11809) --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index afb3d6776..488e6c90c 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -43,6 +43,6 @@ crates/tasks/ @mattsse crates/tokio-util/ @fgimenez @emhane crates/tracing/ @onbjerg crates/transaction-pool/ @mattsse -crates/trie/ @rkrasiuk @Rjected +crates/trie/ @rkrasiuk @Rjected @shekhirin etc/ @Rjected @onbjerg @shekhirin .github/ @onbjerg @gakonst @DaniPopes From a2249b0b04f7e92ea3c4b632c34269d048d7da05 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Wed, 16 Oct 2024 20:23:15 +0100 Subject: [PATCH 092/425] fix(exex): filter only WAL files when walking the directory (#11802) --- crates/exex/exex/src/wal/storage.rs | 37 ++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 6 deletions(-) diff --git a/crates/exex/exex/src/wal/storage.rs b/crates/exex/exex/src/wal/storage.rs index af3a590e5..aaa4398fd 100644 --- a/crates/exex/exex/src/wal/storage.rs +++ b/crates/exex/exex/src/wal/storage.rs @@ -9,6 +9,8 @@ use reth_exex_types::ExExNotification; use reth_tracing::tracing::debug; use tracing::instrument; +static FILE_EXTENSION: &str = "wal"; + /// The underlying WAL storage backed by a directory of files. /// /// Each notification is represented by a single file that contains a MessagePack-encoded @@ -29,7 +31,7 @@ impl Storage { } fn file_path(&self, id: u32) -> PathBuf { - self.path.join(format!("{id}.wal")) + self.path.join(format!("{id}.{FILE_EXTENSION}")) } fn parse_filename(filename: &str) -> eyre::Result { @@ -70,11 +72,14 @@ impl Storage { for entry in reth_fs_util::read_dir(&self.path)? { let entry = entry?; - let file_name = entry.file_name(); - let file_id = Self::parse_filename(&file_name.to_string_lossy())?; - min_id = min_id.map_or(Some(file_id), |min_id: u32| Some(min_id.min(file_id))); - max_id = max_id.map_or(Some(file_id), |max_id: u32| Some(max_id.max(file_id))); + if entry.path().extension() == Some(FILE_EXTENSION.as_ref()) { + let file_name = entry.file_name(); + let file_id = Self::parse_filename(&file_name.to_string_lossy())?; + + min_id = min_id.map_or(Some(file_id), |min_id: u32| Some(min_id.min(file_id))); + max_id = max_id.map_or(Some(file_id), |max_id: u32| Some(max_id.max(file_id))); + } } Ok(min_id.zip(max_id).map(|(min_id, max_id)| min_id..=max_id)) @@ -167,7 +172,7 @@ impl Storage { #[cfg(test)] mod tests { - use std::sync::Arc; + use std::{fs::File, sync::Arc}; use eyre::OptionExt; use reth_exex_types::ExExNotification; @@ -206,4 +211,24 @@ mod tests { Ok(()) } + + #[test] + fn test_files_range() -> eyre::Result<()> { + let temp_dir = tempfile::tempdir()?; + let storage = Storage::new(&temp_dir)?; + + // Create WAL files + File::create(storage.file_path(1))?; + File::create(storage.file_path(2))?; + File::create(storage.file_path(3))?; + + // Create non-WAL files that should be ignored + File::create(temp_dir.path().join("0.tmp"))?; + File::create(temp_dir.path().join("4.tmp"))?; + + // Check files range + assert_eq!(storage.files_range()?, Some(1..=3)); + + Ok(()) + } } From b1cc16809b9246f632d9cd8736a283af247b98fb Mon Sep 17 00:00:00 2001 From: Kien Trinh <51135161+kien6034@users.noreply.github.com> Date: Thu, 17 Oct 2024 03:13:16 +0700 Subject: [PATCH 093/425] feat(cli): make pruning block interval an option (#11810) --- book/cli/reth/node.md | 2 -- crates/node/builder/src/launch/common.rs | 2 +- crates/node/core/src/args/pruning.rs | 4 ++-- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/book/cli/reth/node.md b/book/cli/reth/node.md index ea10d9522..34d32209a 100644 --- a/book/cli/reth/node.md +++ b/book/cli/reth/node.md @@ -616,8 +616,6 @@ Pruning: --block-interval Minimum pruning interval measured in blocks - [default: 0] - --prune.senderrecovery.full Prunes all sender recovery data diff --git a/crates/node/builder/src/launch/common.rs b/crates/node/builder/src/launch/common.rs index 3e8f92e70..ac2339fa6 100644 --- a/crates/node/builder/src/launch/common.rs +++ b/crates/node/builder/src/launch/common.rs @@ -1072,7 +1072,7 @@ mod tests { let node_config = NodeConfig { pruning: PruningArgs { full: true, - block_interval: 0, + block_interval: None, sender_recovery_full: false, sender_recovery_distance: None, sender_recovery_before: None, diff --git a/crates/node/core/src/args/pruning.rs b/crates/node/core/src/args/pruning.rs index 2bee5ec16..c0a3ae375 100644 --- a/crates/node/core/src/args/pruning.rs +++ b/crates/node/core/src/args/pruning.rs @@ -17,8 +17,8 @@ pub struct PruningArgs { pub full: bool, /// Minimum pruning interval measured in blocks. - #[arg(long, default_value_t = 0)] - pub block_interval: u64, + #[arg(long, default_value = None)] + pub block_interval: Option, // Sender Recovery /// Prunes all sender recovery data. From 82862fabd762f2a2b00dd77d140c2c6f5b63cadf Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 16 Oct 2024 22:33:57 +0200 Subject: [PATCH 094/425] primitives: rm redundant functions for `Transaction` (#11747) --- Cargo.lock | 3 + crates/chain-state/src/test_utils.rs | 2 +- crates/engine/util/Cargo.toml | 1 + crates/engine/util/src/reorg.rs | 1 + crates/ethereum/evm/Cargo.toml | 2 +- crates/ethereum/evm/src/execute.rs | 1 + crates/ethereum/evm/src/strategy.rs | 1 + crates/optimism/evm/Cargo.toml | 1 + crates/optimism/evm/src/execute.rs | 1 + crates/optimism/evm/src/strategy.rs | 1 + crates/optimism/rpc/Cargo.toml | 1 + crates/optimism/rpc/src/eth/transaction.rs | 1 + crates/primitives/src/transaction/mod.rs | 107 +----------------- crates/rpc/rpc-eth-api/Cargo.toml | 1 + .../rpc-eth-api/src/helpers/transaction.rs | 1 + crates/rpc/rpc-eth-types/src/receipt.rs | 1 + crates/rpc/rpc-eth-types/src/simulate.rs | 2 +- crates/rpc/rpc/src/txpool.rs | 1 + .../provider/src/providers/static_file/mod.rs | 1 + 19 files changed, 21 insertions(+), 109 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a1e11216b..aa7c704b4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7158,6 +7158,7 @@ dependencies = [ name = "reth-engine-util" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-primitives", "alloy-rpc-types-engine", "eyre", @@ -8235,6 +8236,7 @@ dependencies = [ name = "reth-optimism-rpc" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rpc-types", @@ -8726,6 +8728,7 @@ dependencies = [ name = "reth-rpc-eth-api" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-dyn-abi", "alloy-eips", "alloy-json-rpc", diff --git a/crates/chain-state/src/test_utils.rs b/crates/chain-state/src/test_utils.rs index 4b0bfcdd9..ad5f2dbdb 100644 --- a/crates/chain-state/src/test_utils.rs +++ b/crates/chain-state/src/test_utils.rs @@ -2,7 +2,7 @@ use crate::{ in_memory::ExecutedBlock, CanonStateNotification, CanonStateNotifications, CanonStateSubscriptions, }; -use alloy_consensus::TxEip1559; +use alloy_consensus::{Transaction as _, TxEip1559}; use alloy_primitives::{Address, BlockNumber, Sealable, B256, U256}; use alloy_signer::SignerSync; use alloy_signer_local::PrivateKeySigner; diff --git a/crates/engine/util/Cargo.toml b/crates/engine/util/Cargo.toml index 20a0acb8d..c11948b94 100644 --- a/crates/engine/util/Cargo.toml +++ b/crates/engine/util/Cargo.toml @@ -29,6 +29,7 @@ reth-trie.workspace = true # alloy alloy-primitives.workspace = true alloy-rpc-types-engine.workspace = true +alloy-consensus.workspace = true # async tokio = { workspace = true, default-features = false } diff --git a/crates/engine/util/src/reorg.rs b/crates/engine/util/src/reorg.rs index 611095101..abfa23a57 100644 --- a/crates/engine/util/src/reorg.rs +++ b/crates/engine/util/src/reorg.rs @@ -1,5 +1,6 @@ //! Stream wrapper that simulates reorgs. +use alloy_consensus::Transaction; use alloy_primitives::U256; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, diff --git a/crates/ethereum/evm/Cargo.toml b/crates/ethereum/evm/Cargo.toml index a19cbc018..7215efa68 100644 --- a/crates/ethereum/evm/Cargo.toml +++ b/crates/ethereum/evm/Cargo.toml @@ -29,6 +29,7 @@ revm-primitives.workspace = true alloy-primitives.workspace = true alloy-eips.workspace = true alloy-sol-types.workspace = true +alloy-consensus.workspace = true [dev-dependencies] reth-testing-utils.workspace = true @@ -38,7 +39,6 @@ reth-primitives = { workspace = true, features = ["secp256k1"] } secp256k1.workspace = true serde_json.workspace = true alloy-genesis.workspace = true -alloy-consensus.workspace = true [features] default = ["std"] diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index 108e1f87c..f712389fe 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -5,6 +5,7 @@ use crate::{ EthEvmConfig, }; use alloc::{boxed::Box, sync::Arc, vec, vec::Vec}; +use alloy_consensus::Transaction as _; use alloy_primitives::{BlockNumber, U256}; use core::fmt::Display; use reth_chainspec::{ChainSpec, EthereumHardforks, MAINNET}; diff --git a/crates/ethereum/evm/src/strategy.rs b/crates/ethereum/evm/src/strategy.rs index 1c284e068..7a297be49 100644 --- a/crates/ethereum/evm/src/strategy.rs +++ b/crates/ethereum/evm/src/strategy.rs @@ -5,6 +5,7 @@ use crate::{ EthEvmConfig, }; use alloc::sync::Arc; +use alloy_consensus::Transaction as _; use core::fmt::Display; use reth_chainspec::{ChainSpec, EthereumHardfork, EthereumHardforks, MAINNET}; use reth_consensus::ConsensusError; diff --git a/crates/optimism/evm/Cargo.toml b/crates/optimism/evm/Cargo.toml index 53f9ae033..0a22dcfdd 100644 --- a/crates/optimism/evm/Cargo.toml +++ b/crates/optimism/evm/Cargo.toml @@ -25,6 +25,7 @@ reth-consensus.workspace = true # ethereum alloy-primitives.workspace = true op-alloy-consensus.workspace = true +alloy-consensus.workspace = true # Optimism reth-optimism-consensus.workspace = true diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index ee0d028e4..f7da1c250 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -3,6 +3,7 @@ use crate::{ l1::ensure_create2_deployer, OpChainSpec, OptimismBlockExecutionError, OptimismEvmConfig, }; +use alloy_consensus::Transaction as _; use alloy_primitives::{BlockNumber, U256}; use reth_chainspec::{ChainSpec, EthereumHardforks}; use reth_evm::{ diff --git a/crates/optimism/evm/src/strategy.rs b/crates/optimism/evm/src/strategy.rs index 33770714c..fe8164cc7 100644 --- a/crates/optimism/evm/src/strategy.rs +++ b/crates/optimism/evm/src/strategy.rs @@ -1,6 +1,7 @@ //! Optimism block execution strategy, use crate::{l1::ensure_create2_deployer, OptimismBlockExecutionError, OptimismEvmConfig}; +use alloy_consensus::Transaction as _; use reth_chainspec::EthereumHardforks; use reth_consensus::ConsensusError; use reth_evm::{ diff --git a/crates/optimism/rpc/Cargo.toml b/crates/optimism/rpc/Cargo.toml index 65dce7510..90984998a 100644 --- a/crates/optimism/rpc/Cargo.toml +++ b/crates/optimism/rpc/Cargo.toml @@ -38,6 +38,7 @@ alloy-eips.workspace = true alloy-primitives.workspace = true alloy-rpc-types-eth.workspace = true alloy-rpc-types.workspace = true +alloy-consensus.workspace = true op-alloy-network.workspace = true op-alloy-rpc-types.workspace = true op-alloy-consensus.workspace = true diff --git a/crates/optimism/rpc/src/eth/transaction.rs b/crates/optimism/rpc/src/eth/transaction.rs index e161504f8..b7575c244 100644 --- a/crates/optimism/rpc/src/eth/transaction.rs +++ b/crates/optimism/rpc/src/eth/transaction.rs @@ -1,5 +1,6 @@ //! Loads and formats OP transaction RPC response. +use alloy_consensus::Transaction as _; use alloy_primitives::{Bytes, B256}; use alloy_rpc_types::TransactionInfo; use op_alloy_rpc_types::Transaction; diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 0463cd9ea..aeee4232e 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -249,33 +249,6 @@ impl Transaction { } } - /// Gets the transaction's value field. - pub const fn value(&self) -> U256 { - *match self { - Self::Legacy(TxLegacy { value, .. }) | - Self::Eip2930(TxEip2930 { value, .. }) | - Self::Eip1559(TxEip1559 { value, .. }) | - Self::Eip4844(TxEip4844 { value, .. }) | - Self::Eip7702(TxEip7702 { value, .. }) => value, - #[cfg(feature = "optimism")] - Self::Deposit(TxDeposit { value, .. }) => value, - } - } - - /// Get the transaction's nonce. - pub const fn nonce(&self) -> u64 { - match self { - Self::Legacy(TxLegacy { nonce, .. }) | - Self::Eip2930(TxEip2930 { nonce, .. }) | - Self::Eip1559(TxEip1559 { nonce, .. }) | - Self::Eip4844(TxEip4844 { nonce, .. }) | - Self::Eip7702(TxEip7702 { nonce, .. }) => *nonce, - // Deposit transactions do not have nonces. - #[cfg(feature = "optimism")] - Self::Deposit(_) => 0, - } - } - /// Returns the [`AccessList`] of the transaction. /// /// Returns `None` for legacy transactions. @@ -301,19 +274,6 @@ impl Transaction { } } - /// Get the gas limit of the transaction. - pub const fn gas_limit(&self) -> u64 { - match self { - Self::Legacy(TxLegacy { gas_limit, .. }) | - Self::Eip1559(TxEip1559 { gas_limit, .. }) | - Self::Eip4844(TxEip4844 { gas_limit, .. }) | - Self::Eip7702(TxEip7702 { gas_limit, .. }) | - Self::Eip2930(TxEip2930 { gas_limit, .. }) => *gas_limit, - #[cfg(feature = "optimism")] - Self::Deposit(TxDeposit { gas_limit, .. }) => *gas_limit, - } - } - /// Returns true if the tx supports dynamic fees pub const fn is_dynamic_fee(&self) -> bool { match self { @@ -324,40 +284,6 @@ impl Transaction { } } - /// Max fee per gas for eip1559 transaction, for legacy transactions this is `gas_price`. - /// - /// This is also commonly referred to as the "Gas Fee Cap" (`GasFeeCap`). - pub const fn max_fee_per_gas(&self) -> u128 { - match self { - Self::Legacy(TxLegacy { gas_price, .. }) | - Self::Eip2930(TxEip2930 { gas_price, .. }) => *gas_price, - Self::Eip1559(TxEip1559 { max_fee_per_gas, .. }) | - Self::Eip4844(TxEip4844 { max_fee_per_gas, .. }) | - Self::Eip7702(TxEip7702 { max_fee_per_gas, .. }) => *max_fee_per_gas, - // Deposit transactions buy their L2 gas on L1 and, as such, the L2 gas is not - // refundable. - #[cfg(feature = "optimism")] - Self::Deposit(_) => 0, - } - } - - /// Max priority fee per gas for eip1559 transaction, for legacy and eip2930 transactions this - /// is `None` - /// - /// This is also commonly referred to as the "Gas Tip Cap" (`GasTipCap`). - pub const fn max_priority_fee_per_gas(&self) -> Option { - match self { - Self::Legacy(_) | Self::Eip2930(_) => None, - Self::Eip1559(TxEip1559 { max_priority_fee_per_gas, .. }) | - Self::Eip4844(TxEip4844 { max_priority_fee_per_gas, .. }) | - Self::Eip7702(TxEip7702 { max_priority_fee_per_gas, .. }) => { - Some(*max_priority_fee_per_gas) - } - #[cfg(feature = "optimism")] - Self::Deposit(_) => None, - } - } - /// Blob versioned hashes for eip4844 transaction, for legacy, eip1559, eip2930 and eip7702 /// transactions this is `None` /// @@ -373,18 +299,6 @@ impl Transaction { } } - /// Max fee per blob gas for eip4844 transaction [`TxEip4844`]. - /// - /// Returns `None` for non-eip4844 transactions. - /// - /// This is also commonly referred to as the "Blob Gas Fee Cap" (`BlobGasFeeCap`). - pub const fn max_fee_per_blob_gas(&self) -> Option { - match self { - Self::Eip4844(TxEip4844 { max_fee_per_blob_gas, .. }) => Some(*max_fee_per_blob_gas), - _ => None, - } - } - /// Returns the blob gas used for all blobs of the EIP-4844 transaction if it is an EIP-4844 /// transaction. /// @@ -394,25 +308,6 @@ impl Transaction { self.as_eip4844().map(TxEip4844::blob_gas) } - /// Return the max priority fee per gas if the transaction is an EIP-1559 transaction, and - /// otherwise return the gas price. - /// - /// # Warning - /// - /// This is different than the `max_priority_fee_per_gas` method, which returns `None` for - /// non-EIP-1559 transactions. - pub const fn priority_fee_or_price(&self) -> u128 { - match self { - Self::Legacy(TxLegacy { gas_price, .. }) | - Self::Eip2930(TxEip2930 { gas_price, .. }) => *gas_price, - Self::Eip1559(TxEip1559 { max_priority_fee_per_gas, .. }) | - Self::Eip4844(TxEip4844 { max_priority_fee_per_gas, .. }) | - Self::Eip7702(TxEip7702 { max_priority_fee_per_gas, .. }) => *max_priority_fee_per_gas, - #[cfg(feature = "optimism")] - Self::Deposit(_) => 0, - } - } - /// Returns the effective gas price for the given base fee. /// /// If the transaction is a legacy or EIP2930 transaction, the gas price is returned. @@ -923,7 +818,7 @@ impl AlloyTransaction for Transaction { } } - fn value(&self) -> alloy_primitives::U256 { + fn value(&self) -> U256 { match self { Self::Legacy(tx) => tx.value(), Self::Eip2930(tx) => tx.value(), diff --git a/crates/rpc/rpc-eth-api/Cargo.toml b/crates/rpc/rpc-eth-api/Cargo.toml index e59ee39a6..9d0f6cfd8 100644 --- a/crates/rpc/rpc-eth-api/Cargo.toml +++ b/crates/rpc/rpc-eth-api/Cargo.toml @@ -40,6 +40,7 @@ alloy-primitives.workspace = true alloy-rpc-types-eth.workspace = true alloy-rpc-types.workspace = true alloy-rpc-types-mev.workspace = true +alloy-consensus.workspace = true # rpc jsonrpsee = { workspace = true, features = ["server", "macros"] } diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index 4c2493717..54d60cb7a 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -1,6 +1,7 @@ //! Database access for `eth_` transaction RPC methods. Loads transaction and receipt data w.r.t. //! network. +use alloy_consensus::Transaction; use alloy_dyn_abi::TypedData; use alloy_eips::eip2718::Encodable2718; use alloy_network::TransactionBuilder; diff --git a/crates/rpc/rpc-eth-types/src/receipt.rs b/crates/rpc/rpc-eth-types/src/receipt.rs index e95c92f24..2668291e2 100644 --- a/crates/rpc/rpc-eth-types/src/receipt.rs +++ b/crates/rpc/rpc-eth-types/src/receipt.rs @@ -1,5 +1,6 @@ //! RPC receipt response builder, extends a layer one receipt with layer two data. +use alloy_consensus::Transaction; use alloy_primitives::{Address, TxKind}; use alloy_rpc_types::{ AnyReceiptEnvelope, AnyTransactionReceipt, Log, ReceiptWithBloom, TransactionReceipt, diff --git a/crates/rpc/rpc-eth-types/src/simulate.rs b/crates/rpc/rpc-eth-types/src/simulate.rs index 561aa360d..a673da967 100644 --- a/crates/rpc/rpc-eth-types/src/simulate.rs +++ b/crates/rpc/rpc-eth-types/src/simulate.rs @@ -1,6 +1,6 @@ //! Utilities for serving `eth_simulateV1` -use alloy_consensus::{TxEip4844Variant, TxType, TypedTransaction}; +use alloy_consensus::{Transaction as _, TxEip4844Variant, TxType, TypedTransaction}; use alloy_primitives::Parity; use alloy_rpc_types::{ simulate::{SimCallResult, SimulateError, SimulatedBlock}, diff --git a/crates/rpc/rpc/src/txpool.rs b/crates/rpc/rpc/src/txpool.rs index 5e26935ca..47aaac0bb 100644 --- a/crates/rpc/rpc/src/txpool.rs +++ b/crates/rpc/rpc/src/txpool.rs @@ -1,5 +1,6 @@ use std::{collections::BTreeMap, marker::PhantomData}; +use alloy_consensus::Transaction; use alloy_primitives::Address; use alloy_rpc_types_txpool::{ TxpoolContent, TxpoolContentFrom, TxpoolInspect, TxpoolInspectSummary, TxpoolStatus, diff --git a/crates/storage/provider/src/providers/static_file/mod.rs b/crates/storage/provider/src/providers/static_file/mod.rs index 45b7816af..52eb6ed66 100644 --- a/crates/storage/provider/src/providers/static_file/mod.rs +++ b/crates/storage/provider/src/providers/static_file/mod.rs @@ -56,6 +56,7 @@ impl Deref for LoadedJar { mod tests { use super::*; use crate::{test_utils::create_test_provider_factory, HeaderProvider}; + use alloy_consensus::Transaction; use alloy_primitives::{BlockHash, TxNumber, B256, U256}; use rand::seq::SliceRandom; use reth_db::{ From d2ca8f3a2b4139617fcafb05318447653a2a68af Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 16 Oct 2024 23:59:58 +0200 Subject: [PATCH 095/425] fix: update block interval properly (#11546) --- crates/config/src/config.rs | 49 ++++++++++++++++------------ crates/node/core/src/args/pruning.rs | 9 +++-- 2 files changed, 35 insertions(+), 23 deletions(-) diff --git a/crates/config/src/config.rs b/crates/config/src/config.rs index e4a7fc967..4c4b6066a 100644 --- a/crates/config/src/config.rs +++ b/crates/config/src/config.rs @@ -14,6 +14,9 @@ use std::{ const EXTENSION: &str = "toml"; +/// The default prune block interval +pub const DEFAULT_BLOCK_INTERVAL: usize = 5; + /// Configuration for the reth node. #[derive(Debug, Clone, Default, Deserialize, PartialEq, Eq, Serialize)] #[serde(default)] @@ -383,7 +386,7 @@ pub struct PruneConfig { impl Default for PruneConfig { fn default() -> Self { - Self { block_interval: 5, segments: PruneModes::none() } + Self { block_interval: DEFAULT_BLOCK_INTERVAL, segments: PruneModes::none() } } } @@ -397,27 +400,33 @@ impl PruneConfig { /// if the corresponding value in this config is not set. pub fn merge(&mut self, other: Option) { let Some(other) = other else { return }; - - // Merge block_interval - if self.block_interval == 0 { - self.block_interval = other.block_interval; + let Self { + block_interval, + segments: + PruneModes { + sender_recovery, + transaction_lookup, + receipts, + account_history, + storage_history, + receipts_log_filter, + }, + } = other; + + // Merge block_interval, only update if it's the default interval + if self.block_interval == DEFAULT_BLOCK_INTERVAL { + self.block_interval = block_interval; } // Merge the various segment prune modes - self.segments.sender_recovery = - self.segments.sender_recovery.or(other.segments.sender_recovery); - self.segments.transaction_lookup = - self.segments.transaction_lookup.or(other.segments.transaction_lookup); - self.segments.receipts = self.segments.receipts.or(other.segments.receipts); - self.segments.account_history = - self.segments.account_history.or(other.segments.account_history); - self.segments.storage_history = - self.segments.storage_history.or(other.segments.storage_history); - - if self.segments.receipts_log_filter.0.is_empty() && - !other.segments.receipts_log_filter.0.is_empty() - { - self.segments.receipts_log_filter = other.segments.receipts_log_filter; + self.segments.sender_recovery = self.segments.sender_recovery.or(sender_recovery); + self.segments.transaction_lookup = self.segments.transaction_lookup.or(transaction_lookup); + self.segments.receipts = self.segments.receipts.or(receipts); + self.segments.account_history = self.segments.account_history.or(account_history); + self.segments.storage_history = self.segments.storage_history.or(storage_history); + + if self.segments.receipts_log_filter.0.is_empty() && !receipts_log_filter.0.is_empty() { + self.segments.receipts_log_filter = receipts_log_filter; } } } @@ -961,7 +970,7 @@ receipts = 'full' // Check that the configuration has been merged. Any configuration present in config1 // should not be overwritten by config2 - assert_eq!(config1.block_interval, 5); + assert_eq!(config1.block_interval, 10); assert_eq!(config1.segments.sender_recovery, Some(PruneMode::Full)); assert_eq!(config1.segments.transaction_lookup, Some(PruneMode::Full)); assert_eq!(config1.segments.receipts, Some(PruneMode::Distance(1000))); diff --git a/crates/node/core/src/args/pruning.rs b/crates/node/core/src/args/pruning.rs index c0a3ae375..cd852b9e1 100644 --- a/crates/node/core/src/args/pruning.rs +++ b/crates/node/core/src/args/pruning.rs @@ -2,7 +2,7 @@ use crate::args::error::ReceiptsLogError; use alloy_primitives::{Address, BlockNumber}; -use clap::Args; +use clap::{builder::RangedU64ValueParser, Args}; use reth_chainspec::EthChainSpec; use reth_config::config::PruneConfig; use reth_prune_types::{PruneMode, PruneModes, ReceiptsLogPruneConfig, MINIMUM_PRUNING_DISTANCE}; @@ -17,7 +17,7 @@ pub struct PruningArgs { pub full: bool, /// Minimum pruning interval measured in blocks. - #[arg(long, default_value = None)] + #[arg(long, value_parser = RangedU64ValueParser::::new().range(1..),)] pub block_interval: Option, // Sender Recovery @@ -99,7 +99,7 @@ impl PruningArgs { // If --full is set, use full node defaults. if self.full { config = PruneConfig { - block_interval: 5, + block_interval: config.block_interval, segments: PruneModes { sender_recovery: Some(PruneMode::Full), transaction_lookup: None, @@ -123,6 +123,9 @@ impl PruningArgs { } // Override with any explicitly set prune.* flags. + if let Some(block_interval) = self.block_interval { + config.block_interval = block_interval as usize; + } if let Some(mode) = self.sender_recovery_prune_mode() { config.segments.sender_recovery = Some(mode); } From 5a82f20a294c817d7070ad8fa0118a18400f7b63 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 17 Oct 2024 00:03:33 +0200 Subject: [PATCH 096/425] chore: rm 1 usage of optimism feature (#11813) --- crates/storage/codecs/src/alloy/transaction/mod.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/crates/storage/codecs/src/alloy/transaction/mod.rs b/crates/storage/codecs/src/alloy/transaction/mod.rs index 86edfee5b..5b1d173a5 100644 --- a/crates/storage/codecs/src/alloy/transaction/mod.rs +++ b/crates/storage/codecs/src/alloy/transaction/mod.rs @@ -15,8 +15,6 @@ mod tests { // this check is to ensure we do not inadvertently add too many fields to a struct which would // expand the flags field and break backwards compatibility - #[cfg(feature = "optimism")] - use crate::alloy::transaction::optimism::TxDeposit; use crate::alloy::transaction::{ eip1559::TxEip1559, eip2930::TxEip2930, eip4844::TxEip4844, eip7702::TxEip7702, legacy::TxLegacy, @@ -34,6 +32,6 @@ mod tests { #[cfg(feature = "optimism")] #[test] fn test_ensure_backwards_compatibility_optimism() { - assert_eq!(TxDeposit::bitflag_encoded_bytes(), 2); + assert_eq!(crate::alloy::transaction::optimism::TxDeposit::bitflag_encoded_bytes(), 2); } } From 025cb3b70e02ee99295a5f98a117906d2938c42d Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 17 Oct 2024 00:07:27 +0200 Subject: [PATCH 097/425] primitive-traits: rm redundant `EMPTY_ROOT_HASH` definition (#11811) Co-authored-by: Matthias Seitz --- Cargo.lock | 2 ++ crates/blockchain-tree/src/blockchain_tree.rs | 4 ++-- crates/chain-state/src/test_utils.rs | 4 ++-- crates/ethereum/evm/src/execute.rs | 5 ++--- crates/primitives-traits/src/constants/mod.rs | 5 +---- crates/primitives/src/proofs.rs | 3 ++- crates/rpc/rpc-eth-api/src/helpers/pending_block.rs | 3 ++- crates/trie/db/Cargo.toml | 2 ++ crates/trie/db/tests/proof.rs | 3 ++- crates/trie/db/tests/trie.rs | 3 ++- crates/trie/db/tests/witness.rs | 3 ++- crates/trie/trie/Cargo.toml | 1 + crates/trie/trie/src/trie.rs | 2 +- crates/trie/trie/src/witness.rs | 2 +- 14 files changed, 24 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index aa7c704b4..bb2577577 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9119,6 +9119,7 @@ dependencies = [ name = "reth-trie" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-primitives", "alloy-rlp", "auto_impl", @@ -9174,6 +9175,7 @@ dependencies = [ name = "reth-trie-db" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-primitives", "alloy-rlp", "auto_impl", diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index 4bed718aa..71a58aa56 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -1376,7 +1376,7 @@ where #[cfg(test)] mod tests { use super::*; - use alloy_consensus::TxEip1559; + use alloy_consensus::{TxEip1559, EMPTY_ROOT_HASH}; use alloy_genesis::{Genesis, GenesisAccount}; use alloy_primitives::{keccak256, Address, Sealable, B256}; use assert_matches::assert_matches; @@ -1388,7 +1388,7 @@ mod tests { use reth_evm::test_utils::MockExecutorProvider; use reth_evm_ethereum::execute::EthExecutorProvider; use reth_primitives::{ - constants::{EIP1559_INITIAL_BASE_FEE, EMPTY_ROOT_HASH}, + constants::EIP1559_INITIAL_BASE_FEE, proofs::{calculate_receipt_root, calculate_transaction_root}, revm_primitives::AccountInfo, Account, BlockBody, Header, Signature, Transaction, TransactionSigned, diff --git a/crates/chain-state/src/test_utils.rs b/crates/chain-state/src/test_utils.rs index ad5f2dbdb..a820bb5cf 100644 --- a/crates/chain-state/src/test_utils.rs +++ b/crates/chain-state/src/test_utils.rs @@ -2,7 +2,7 @@ use crate::{ in_memory::ExecutedBlock, CanonStateNotification, CanonStateNotifications, CanonStateSubscriptions, }; -use alloy_consensus::{Transaction as _, TxEip1559}; +use alloy_consensus::{Transaction as _, TxEip1559, EMPTY_ROOT_HASH}; use alloy_primitives::{Address, BlockNumber, Sealable, B256, U256}; use alloy_signer::SignerSync; use alloy_signer_local::PrivateKeySigner; @@ -10,7 +10,7 @@ use rand::{thread_rng, Rng}; use reth_chainspec::{ChainSpec, EthereumHardfork, MIN_TRANSACTION_GAS}; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_primitives::{ - constants::{EIP1559_INITIAL_BASE_FEE, EMPTY_ROOT_HASH}, + constants::EIP1559_INITIAL_BASE_FEE, proofs::{calculate_receipt_root, calculate_transaction_root, calculate_withdrawals_root}, BlockBody, Header, Receipt, Receipts, Requests, SealedBlock, SealedBlockWithSenders, SealedHeader, Transaction, TransactionSigned, TransactionSignedEcRecovered, diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index f712389fe..9c7748a56 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -494,7 +494,7 @@ where #[cfg(test)] mod tests { use super::*; - use alloy_consensus::TxLegacy; + use alloy_consensus::{TxLegacy, EMPTY_ROOT_HASH}; use alloy_eips::{ eip2935::{HISTORY_STORAGE_ADDRESS, HISTORY_STORAGE_CODE}, eip4788::{BEACON_ROOTS_ADDRESS, BEACON_ROOTS_CODE, SYSTEM_ADDRESS}, @@ -503,8 +503,7 @@ mod tests { use alloy_primitives::{b256, fixed_bytes, keccak256, Bytes, TxKind, B256}; use reth_chainspec::{ChainSpecBuilder, ForkCondition}; use reth_primitives::{ - constants::{EMPTY_ROOT_HASH, ETH_TO_WEI}, - public_key_to_address, Account, Block, BlockBody, Transaction, + constants::ETH_TO_WEI, public_key_to_address, Account, Block, BlockBody, Transaction, }; use reth_revm::{ database::StateProviderDatabase, test_utils::StateProviderTest, TransitionState, diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index ed5c893dd..d40abdd64 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -1,5 +1,6 @@ //! Ethereum protocol-related constants +use alloy_consensus::EMPTY_ROOT_HASH; use alloy_primitives::{address, b256, Address, B256, U256}; use core::time::Duration; @@ -122,10 +123,6 @@ pub const KECCAK_EMPTY: B256 = pub const EMPTY_OMMER_ROOT_HASH: B256 = b256!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"); -/// Root hash of an empty trie: `0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421` -pub const EMPTY_ROOT_HASH: B256 = - b256!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"); - /// From address from Optimism system txs: `0xdeaddeaddeaddeaddeaddeaddeaddeaddead0001` pub const OP_SYSTEM_TX_FROM_ADDR: Address = address!("deaddeaddeaddeaddeaddeaddeaddeaddead0001"); diff --git a/crates/primitives/src/proofs.rs b/crates/primitives/src/proofs.rs index a12a5d6be..dc814804e 100644 --- a/crates/primitives/src/proofs.rs +++ b/crates/primitives/src/proofs.rs @@ -65,7 +65,8 @@ pub fn calculate_ommers_root(ommers: &[Header]) -> B256 { #[cfg(test)] mod tests { use super::*; - use crate::{constants::EMPTY_ROOT_HASH, Block}; + use crate::Block; + use alloy_consensus::EMPTY_ROOT_HASH; use alloy_genesis::GenesisAccount; use alloy_primitives::{b256, hex_literal::hex, Address, U256}; use alloy_rlp::Decodable; diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index 26e0ffc74..81c6a5678 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -5,6 +5,7 @@ use std::time::{Duration, Instant}; use crate::{EthApiTypes, FromEthApiError, FromEvmError}; +use alloy_consensus::EMPTY_ROOT_HASH; use alloy_primitives::{BlockNumber, B256, U256}; use alloy_rpc_types::BlockNumberOrTag; use futures::Future; @@ -12,7 +13,7 @@ use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_evm::{system_calls::SystemCaller, ConfigureEvm, ConfigureEvmEnv}; use reth_execution_types::ExecutionOutcome; use reth_primitives::{ - constants::{eip4844::MAX_DATA_GAS_PER_BLOCK, BEACON_NONCE, EMPTY_ROOT_HASH}, + constants::{eip4844::MAX_DATA_GAS_PER_BLOCK, BEACON_NONCE}, proofs::calculate_transaction_root, revm_primitives::{ BlockEnv, CfgEnv, CfgEnvWithHandlerCfg, EVMError, Env, ExecutionResult, InvalidTransaction, diff --git a/crates/trie/db/Cargo.toml b/crates/trie/db/Cargo.toml index 6d322ba3f..a0e1acbce 100644 --- a/crates/trie/db/Cargo.toml +++ b/crates/trie/db/Cargo.toml @@ -58,6 +58,8 @@ reth-trie-common = { workspace = true, features = ["test-utils", "arbitrary"] } reth-trie = { workspace = true, features = ["test-utils"] } reth-node-types.workspace = true +alloy-consensus.workspace = true + # trie triehash = "0.8" diff --git a/crates/trie/db/tests/proof.rs b/crates/trie/db/tests/proof.rs index 5ffa6729b..79a2ce96f 100644 --- a/crates/trie/db/tests/proof.rs +++ b/crates/trie/db/tests/proof.rs @@ -1,9 +1,10 @@ #![allow(missing_docs)] +use alloy_consensus::EMPTY_ROOT_HASH; use alloy_primitives::{keccak256, Address, Bytes, B256, U256}; use alloy_rlp::EMPTY_STRING_CODE; use reth_chainspec::{Chain, ChainSpec, HOLESKY, MAINNET}; -use reth_primitives::{constants::EMPTY_ROOT_HASH, Account}; +use reth_primitives::Account; use reth_provider::test_utils::{create_test_provider_factory, insert_genesis}; use reth_trie::{proof::Proof, Nibbles}; use reth_trie_common::{AccountProof, StorageProof}; diff --git a/crates/trie/db/tests/trie.rs b/crates/trie/db/tests/trie.rs index 59fffec58..f5823404c 100644 --- a/crates/trie/db/tests/trie.rs +++ b/crates/trie/db/tests/trie.rs @@ -1,5 +1,6 @@ #![allow(missing_docs)] +use alloy_consensus::EMPTY_ROOT_HASH; use alloy_primitives::{hex_literal::hex, keccak256, Address, B256, U256}; use proptest::{prelude::ProptestConfig, proptest}; use proptest_arbitrary_interop::arb; @@ -8,7 +9,7 @@ use reth_db_api::{ cursor::{DbCursorRO, DbCursorRW, DbDupCursorRO}, transaction::DbTxMut, }; -use reth_primitives::{constants::EMPTY_ROOT_HASH, Account, StorageEntry}; +use reth_primitives::{Account, StorageEntry}; use reth_provider::{ test_utils::create_test_provider_factory, DatabaseProviderRW, StorageTrieWriter, TrieWriter, }; diff --git a/crates/trie/db/tests/witness.rs b/crates/trie/db/tests/witness.rs index 20f8cfbb9..8e00472b4 100644 --- a/crates/trie/db/tests/witness.rs +++ b/crates/trie/db/tests/witness.rs @@ -1,5 +1,6 @@ #![allow(missing_docs)] +use alloy_consensus::EMPTY_ROOT_HASH; use alloy_primitives::{ keccak256, map::{HashMap, HashSet}, @@ -8,7 +9,7 @@ use alloy_primitives::{ use alloy_rlp::EMPTY_STRING_CODE; use reth_db::{cursor::DbCursorRW, tables}; use reth_db_api::transaction::DbTxMut; -use reth_primitives::{constants::EMPTY_ROOT_HASH, Account, StorageEntry}; +use reth_primitives::{Account, StorageEntry}; use reth_provider::{test_utils::create_test_provider_factory, HashingWriter}; use reth_trie::{proof::Proof, witness::TrieWitness, HashedPostState, HashedStorage, StateRoot}; use reth_trie_db::{DatabaseProof, DatabaseStateRoot, DatabaseTrieWitness}; diff --git a/crates/trie/trie/Cargo.toml b/crates/trie/trie/Cargo.toml index d0f0fa092..31b5ac3e2 100644 --- a/crates/trie/trie/Cargo.toml +++ b/crates/trie/trie/Cargo.toml @@ -24,6 +24,7 @@ revm.workspace = true # alloy alloy-rlp.workspace = true alloy-primitives.workspace = true +alloy-consensus.workspace = true # tracing tracing.workspace = true diff --git a/crates/trie/trie/src/trie.rs b/crates/trie/trie/src/trie.rs index b8aa133d6..1bf8cf1ce 100644 --- a/crates/trie/trie/src/trie.rs +++ b/crates/trie/trie/src/trie.rs @@ -9,10 +9,10 @@ use crate::{ walker::TrieWalker, HashBuilder, Nibbles, TrieAccount, }; +use alloy_consensus::EMPTY_ROOT_HASH; use alloy_primitives::{keccak256, Address, B256}; use alloy_rlp::{BufMut, Encodable}; use reth_execution_errors::{StateRootError, StorageRootError}; -use reth_primitives::constants::EMPTY_ROOT_HASH; use tracing::trace; #[cfg(feature = "metrics")] diff --git a/crates/trie/trie/src/witness.rs b/crates/trie/trie/src/witness.rs index f3b70e85a..39d82a7bd 100644 --- a/crates/trie/trie/src/witness.rs +++ b/crates/trie/trie/src/witness.rs @@ -5,6 +5,7 @@ use crate::{ trie_cursor::TrieCursorFactory, HashedPostState, }; +use alloy_consensus::EMPTY_ROOT_HASH; use alloy_primitives::{ keccak256, map::{HashMap, HashSet}, @@ -13,7 +14,6 @@ use alloy_primitives::{ use alloy_rlp::{BufMut, Decodable, Encodable}; use itertools::{Either, Itertools}; use reth_execution_errors::{StateProofError, TrieWitnessError}; -use reth_primitives::constants::EMPTY_ROOT_HASH; use reth_trie_common::{ BranchNode, HashBuilder, Nibbles, StorageMultiProof, TrieAccount, TrieNode, CHILD_INDEX_RANGE, }; From 24287e8562ab5e70d473e0c0b87f800277a84d8a Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 17 Oct 2024 01:02:44 +0200 Subject: [PATCH 098/425] primitives: use `EMPTY_ROOT_HASH` when possible (#11822) --- Cargo.lock | 1 + crates/chainspec/src/spec.rs | 4 +--- crates/consensus/common/src/validation.rs | 6 ++--- crates/net/eth-wire-types/src/header.rs | 28 ++++++----------------- crates/optimism/primitives/Cargo.toml | 1 + crates/optimism/primitives/src/bedrock.rs | 5 ++-- 6 files changed, 16 insertions(+), 29 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bb2577577..740810d1b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8227,6 +8227,7 @@ dependencies = [ name = "reth-optimism-primitives" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-primitives", "reth-primitives", "reth-primitives-traits", diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index e6e8a67d7..a8bae966b 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -1814,9 +1814,7 @@ Post-merge hard forks (timestamp based): hex!("078dc6061b1d8eaa8493384b59c9c65ceb917201221d08b80c4de6770b6ec7e7").into(); assert_eq!(chainspec.genesis_header().state_root, expected_state_root); - let expected_withdrawals_hash: B256 = - hex!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421").into(); - assert_eq!(chainspec.genesis_header().withdrawals_root, Some(expected_withdrawals_hash)); + assert_eq!(chainspec.genesis_header().withdrawals_root, Some(EMPTY_ROOT_HASH)); let expected_hash: B256 = hex!("1fc027d65f820d3eef441ebeec139ebe09e471cf98516dce7b5643ccb27f418c").into(); diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index 88a4cabe9..df66a00d1 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -300,7 +300,7 @@ pub fn validate_against_parent_4844( #[cfg(test)] mod tests { use super::*; - use alloy_consensus::TxEip4844; + use alloy_consensus::{TxEip4844, EMPTY_ROOT_HASH}; use alloy_primitives::{ hex_literal::hex, Address, BlockHash, BlockNumber, Bytes, Parity, Sealable, U256, }; @@ -444,8 +444,8 @@ mod tests { ommers_hash: hex!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347").into(), beneficiary: hex!("4675c7e5baafbffbca748158becba61ef3b0a263").into(), state_root: hex!("8337403406e368b3e40411138f4868f79f6d835825d55fd0c2f6e17b1a3948e9").into(), - transactions_root: hex!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421").into(), - receipts_root: hex!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421").into(), + transactions_root: EMPTY_ROOT_HASH, + receipts_root: EMPTY_ROOT_HASH, logs_bloom: hex!("002400000000004000220000800002000000000000000000000000000000100000000000000000100000000000000021020000000800000006000000002100040000000c0004000000000008000008200000000000000000000000008000000001040000020000020000002000000800000002000020000000022010000000000000010002001000000000020200000000000001000200880000004000000900020000000000020000000040000000000000000000000000000080000000000001000002000000000000012000200020000000000000001000000000000020000010321400000000100000000000000000000000000000400000000000000000").into(), difficulty: U256::ZERO, // total difficulty: 0xc70d815d562d3cfa955).into(), number: 0xf21d20, diff --git a/crates/net/eth-wire-types/src/header.rs b/crates/net/eth-wire-types/src/header.rs index 4075a4a92..7ecfc802d 100644 --- a/crates/net/eth-wire-types/src/header.rs +++ b/crates/net/eth-wire-types/src/header.rs @@ -87,6 +87,7 @@ impl From for bool { #[cfg(test)] mod tests { use super::*; + use alloy_consensus::EMPTY_ROOT_HASH; use alloy_primitives::{address, b256, bloom, bytes, hex, Address, Bytes, B256, U256}; use alloy_rlp::{Decodable, Encodable}; use reth_primitives::Header; @@ -201,10 +202,7 @@ mod tests { gas_used: 0x0125b8, timestamp: 0x079e, extra_data: Bytes::from_str("42").unwrap(), - mix_hash: B256::from_str( - "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", - ) - .unwrap(), + mix_hash: EMPTY_ROOT_HASH, base_fee_per_gas: Some(0x09), withdrawals_root: Some( B256::from_str("27f166f1d7c789251299535cb176ba34116e44894476a7886fe5d73d9be5c973") @@ -254,16 +252,10 @@ mod tests { gas_used: 0x02a865, timestamp: 0x079e, extra_data: Bytes::from(vec![0x42]), - mix_hash: B256::from_str( - "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", - ) - .unwrap(), + mix_hash: EMPTY_ROOT_HASH, nonce: 0u64.into(), base_fee_per_gas: Some(9), - withdrawals_root: Some( - B256::from_str("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") - .unwrap(), - ), + withdrawals_root: Some(EMPTY_ROOT_HASH), blob_gas_used: Some(0x020000), excess_blob_gas: Some(0), parent_beacon_block_root: None, @@ -291,12 +283,8 @@ mod tests { ommers_hash: b256!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"), beneficiary: address!("f97e180c050e5ab072211ad2c213eb5aee4df134"), state_root: b256!("ec229dbe85b0d3643ad0f471e6ec1a36bbc87deffbbd970762d22a53b35d068a"), - transactions_root: b256!( - "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" - ), - receipts_root: b256!( - "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" - ), + transactions_root: EMPTY_ROOT_HASH, + receipts_root: EMPTY_ROOT_HASH, logs_bloom: Default::default(), difficulty: U256::from(0), number: 0x30598, @@ -307,9 +295,7 @@ mod tests { mix_hash: b256!("70ccadc40b16e2094954b1064749cc6fbac783c1712f1b271a8aac3eda2f2325"), nonce: 0u64.into(), base_fee_per_gas: Some(7), - withdrawals_root: Some(b256!( - "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" - )), + withdrawals_root: Some(EMPTY_ROOT_HASH), parent_beacon_block_root: None, blob_gas_used: Some(0), excess_blob_gas: Some(0x1600000), diff --git a/crates/optimism/primitives/Cargo.toml b/crates/optimism/primitives/Cargo.toml index 73a3bab1e..2054de730 100644 --- a/crates/optimism/primitives/Cargo.toml +++ b/crates/optimism/primitives/Cargo.toml @@ -15,3 +15,4 @@ workspace = true reth-primitives.workspace = true reth-primitives-traits.workspace = true alloy-primitives.workspace = true +alloy-consensus.workspace = true diff --git a/crates/optimism/primitives/src/bedrock.rs b/crates/optimism/primitives/src/bedrock.rs index 4ece12ad6..1a347aeca 100644 --- a/crates/optimism/primitives/src/bedrock.rs +++ b/crates/optimism/primitives/src/bedrock.rs @@ -1,5 +1,6 @@ //! OP mainnet bedrock related data. +use alloy_consensus::EMPTY_ROOT_HASH; use alloy_primitives::{address, b256, bloom, bytes, B256, B64, U256}; use reth_primitives::Header; use reth_primitives_traits::constants::EMPTY_OMMER_ROOT_HASH; @@ -73,10 +74,10 @@ pub const BEDROCK_HEADER: Header = Header { nonce: B64::ZERO, number: 105235063, parent_hash: b256!("21a168dfa5e727926063a28ba16fd5ee84c814e847c81a699c7a0ea551e4ca50"), - receipts_root: b256!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"), + receipts_root: EMPTY_ROOT_HASH, state_root: b256!("920314c198da844a041d63bf6cbe8b59583165fd2229d1b3f599da812fd424cb"), timestamp: 1686068903, - transactions_root: b256!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"), + transactions_root: EMPTY_ROOT_HASH, ommers_hash: EMPTY_OMMER_ROOT_HASH, beneficiary: address!("4200000000000000000000000000000000000011"), withdrawals_root: None, From 1b97b1d942c671831bb81c0ccc53a9018c9febc5 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Thu, 17 Oct 2024 12:00:25 +0900 Subject: [PATCH 099/425] fix(staged-sync): prevent `StaticFileProducer` from running with an unwinded target on legacy engine (#11717) --- .../beacon/src/engine/hooks/static_file.rs | 18 +++++++++++++----- crates/stages/api/src/pipeline/mod.rs | 4 ++++ .../static-file/src/static_file_producer.rs | 14 ++++++++++++-- 3 files changed, 29 insertions(+), 7 deletions(-) diff --git a/crates/consensus/beacon/src/engine/hooks/static_file.rs b/crates/consensus/beacon/src/engine/hooks/static_file.rs index 8a5a28f95..89231ed55 100644 --- a/crates/consensus/beacon/src/engine/hooks/static_file.rs +++ b/crates/consensus/beacon/src/engine/hooks/static_file.rs @@ -9,7 +9,8 @@ use futures::FutureExt; use reth_errors::RethResult; use reth_primitives::static_file::HighestStaticFiles; use reth_provider::{ - BlockReader, DatabaseProviderFactory, StageCheckpointReader, StaticFileProviderFactory, + BlockReader, ChainStateBlockReader, DatabaseProviderFactory, StageCheckpointReader, + StaticFileProviderFactory, }; use reth_static_file::{StaticFileProducer, StaticFileProducerWithResult}; use reth_tasks::TaskSpawner; @@ -31,8 +32,9 @@ pub struct StaticFileHook { impl StaticFileHook where Provider: StaticFileProviderFactory - + DatabaseProviderFactory - + 'static, + + DatabaseProviderFactory< + Provider: StageCheckpointReader + BlockReader + ChainStateBlockReader, + > + 'static, { /// Create a new instance pub fn new( @@ -104,6 +106,11 @@ where return Ok(None) }; + let finalized_block_number = locked_static_file_producer + .last_finalized_block()? + .map(|on_disk| finalized_block_number.min(on_disk)) + .unwrap_or(finalized_block_number); + let targets = locked_static_file_producer.get_static_file_targets(HighestStaticFiles { headers: Some(finalized_block_number), @@ -137,8 +144,9 @@ where impl EngineHook for StaticFileHook where Provider: StaticFileProviderFactory - + DatabaseProviderFactory - + 'static, + + DatabaseProviderFactory< + Provider: StageCheckpointReader + BlockReader + ChainStateBlockReader, + > + 'static, { fn name(&self) -> &'static str { "StaticFile" diff --git a/crates/stages/api/src/pipeline/mod.rs b/crates/stages/api/src/pipeline/mod.rs index 1f6d9341a..14225a595 100644 --- a/crates/stages/api/src/pipeline/mod.rs +++ b/crates/stages/api/src/pipeline/mod.rs @@ -276,6 +276,10 @@ impl Pipeline { // Unwind stages in reverse order of execution let unwind_pipeline = self.stages.iter_mut().rev(); + // Legacy Engine: This prevents a race condition in which the `StaticFileProducer` could + // attempt to proceed with a finalized block which has been unwinded + let _locked_sf_producer = self.static_file_producer.lock(); + let mut provider_rw = self.provider_factory.database_provider_rw()?; for stage in unwind_pipeline { diff --git a/crates/static-file/static-file/src/static_file_producer.rs b/crates/static-file/static-file/src/static_file_producer.rs index 32565fd6d..2c442aedf 100644 --- a/crates/static-file/static-file/src/static_file_producer.rs +++ b/crates/static-file/static-file/src/static_file_producer.rs @@ -5,8 +5,8 @@ use alloy_primitives::BlockNumber; use parking_lot::Mutex; use rayon::prelude::*; use reth_provider::{ - providers::StaticFileWriter, BlockReader, DBProvider, DatabaseProviderFactory, - StageCheckpointReader, StaticFileProviderFactory, + providers::StaticFileWriter, BlockReader, ChainStateBlockReader, DBProvider, + DatabaseProviderFactory, StageCheckpointReader, StaticFileProviderFactory, }; use reth_prune_types::PruneModes; use reth_stages_types::StageId; @@ -106,6 +106,16 @@ impl StaticFileProducerInner { } } +impl StaticFileProducerInner +where + Provider: StaticFileProviderFactory + DatabaseProviderFactory, +{ + /// Returns the last finalized block number on disk. + pub fn last_finalized_block(&self) -> ProviderResult> { + self.provider.database_provider_ro()?.last_finalized_block_number() + } +} + impl StaticFileProducerInner where Provider: StaticFileProviderFactory From b57a5607cbc47f4995fa567c27351f342e08147e Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 17 Oct 2024 05:05:26 +0200 Subject: [PATCH 100/425] test: make provider compile with cargo t (#11817) --- crates/storage/provider/Cargo.toml | 2 ++ .../provider/src/providers/blockchain_provider.rs | 9 ++------- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/crates/storage/provider/Cargo.toml b/crates/storage/provider/Cargo.toml index 7ef827a37..00e1c9f09 100644 --- a/crates/storage/provider/Cargo.toml +++ b/crates/storage/provider/Cargo.toml @@ -73,8 +73,10 @@ rayon.workspace = true [dev-dependencies] reth-db = { workspace = true, features = ["test-utils"] } reth-primitives = { workspace = true, features = ["arbitrary", "test-utils"] } +reth-chain-state = { workspace = true, features = ["test-utils"] } reth-trie = { workspace = true, features = ["test-utils"] } reth-testing-utils.workspace = true +reth-ethereum-engine-primitives.workspace = true parking_lot.workspace = true tempfile.workspace = true diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index 3013be060..9e6f32b33 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -1933,7 +1933,7 @@ mod tests { /// This simulates a RPC method having a different view than when its database transaction was /// created. fn persist_block_after_db_tx_creation( - provider: Arc>, + provider: BlockchainProvider2, block_number: BlockNumber, ) { let hook_provider = provider.clone(); @@ -3142,7 +3142,6 @@ mod tests { ..Default::default() }, )?; - let provider = Arc::new(provider); $( // Since data moves for each tried method, need to recalculate everything @@ -3257,7 +3256,6 @@ mod tests { ..Default::default() }, )?; - let provider = Arc::new(provider); $( // Since data moves for each tried method, need to recalculate everything @@ -3383,7 +3381,6 @@ mod tests { ..Default::default() }, )?; - let provider = Arc::new(provider); let mut in_memory_blocks: std::collections::VecDeque<_> = in_memory_blocks.into(); @@ -3685,8 +3682,6 @@ mod tests { }, )?; - let provider = Arc::new(provider); - // Old implementation was querying the database first. This is problematic, if there are // changes AFTER the database transaction is created. let old_transaction_hash_fn = @@ -3739,7 +3734,7 @@ mod tests { correct_transaction_hash_fn( to_be_persisted_tx.hash(), provider.canonical_in_memory_state(), - provider.database.clone() + provider.database ), Ok(Some(to_be_persisted_tx)) ); From e828b34ed1332832bcebb421a98684a894c49993 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 17 Oct 2024 05:06:23 +0200 Subject: [PATCH 101/425] chore: rm features from test utils (#11816) --- .../storage/provider/src/test_utils/blocks.rs | 140 +++++++++--------- 1 file changed, 70 insertions(+), 70 deletions(-) diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index e6a886187..57e111d67 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -201,20 +201,20 @@ fn block1(number: BlockNumber) -> (SealedBlockWithSenders, ExecutionOutcome) { .revert_account_info(number, account2, Some(None)) .state_storage(account1, HashMap::from_iter([(slot, (U256::ZERO, U256::from(10)))])) .build(), - vec![vec![Some(Receipt { - tx_type: TxType::Eip2930, - success: true, - cumulative_gas_used: 300, - logs: vec![Log::new_unchecked( - Address::new([0x60; 20]), - vec![B256::with_last_byte(1), B256::with_last_byte(2)], - Bytes::default(), - )], - #[cfg(feature = "optimism")] - deposit_nonce: None, - #[cfg(feature = "optimism")] - deposit_receipt_version: None, - })]] + vec![vec![Some( + #[allow(clippy::needless_update)] // side-effect of optimism fields + Receipt { + tx_type: TxType::Eip2930, + success: true, + cumulative_gas_used: 300, + logs: vec![Log::new_unchecked( + Address::new([0x60; 20]), + vec![B256::with_last_byte(1), B256::with_last_byte(2)], + Bytes::default(), + )], + ..Default::default() + }, + )]] .into(), number, Vec::new(), @@ -263,20 +263,20 @@ fn block2( ) .revert_storage(number, account, Vec::from([(slot, U256::from(10))])) .build(), - vec![vec![Some(Receipt { - tx_type: TxType::Eip1559, - success: false, - cumulative_gas_used: 400, - logs: vec![Log::new_unchecked( - Address::new([0x61; 20]), - vec![B256::with_last_byte(3), B256::with_last_byte(4)], - Bytes::default(), - )], - #[cfg(feature = "optimism")] - deposit_nonce: None, - #[cfg(feature = "optimism")] - deposit_receipt_version: None, - })]] + vec![vec![Some( + #[allow(clippy::needless_update)] // side-effect of optimism fields + Receipt { + tx_type: TxType::Eip1559, + success: false, + cumulative_gas_used: 400, + logs: vec![Log::new_unchecked( + Address::new([0x61; 20]), + vec![B256::with_last_byte(3), B256::with_last_byte(4)], + Bytes::default(), + )], + ..Default::default() + }, + )]] .into(), number, Vec::new(), @@ -334,20 +334,20 @@ fn block3( } let execution_outcome = ExecutionOutcome::new( bundle_state_builder.build(), - vec![vec![Some(Receipt { - tx_type: TxType::Eip1559, - success: true, - cumulative_gas_used: 400, - logs: vec![Log::new_unchecked( - Address::new([0x61; 20]), - vec![B256::with_last_byte(3), B256::with_last_byte(4)], - Bytes::default(), - )], - #[cfg(feature = "optimism")] - deposit_nonce: None, - #[cfg(feature = "optimism")] - deposit_receipt_version: None, - })]] + vec![vec![Some( + #[allow(clippy::needless_update)] // side-effect of optimism fields + Receipt { + tx_type: TxType::Eip1559, + success: true, + cumulative_gas_used: 400, + logs: vec![Log::new_unchecked( + Address::new([0x61; 20]), + vec![B256::with_last_byte(3), B256::with_last_byte(4)], + Bytes::default(), + )], + ..Default::default() + }, + )]] .into(), number, Vec::new(), @@ -425,20 +425,20 @@ fn block4( } let execution_outcome = ExecutionOutcome::new( bundle_state_builder.build(), - vec![vec![Some(Receipt { - tx_type: TxType::Eip1559, - success: true, - cumulative_gas_used: 400, - logs: vec![Log::new_unchecked( - Address::new([0x61; 20]), - vec![B256::with_last_byte(3), B256::with_last_byte(4)], - Bytes::default(), - )], - #[cfg(feature = "optimism")] - deposit_nonce: None, - #[cfg(feature = "optimism")] - deposit_receipt_version: None, - })]] + vec![vec![Some( + #[allow(clippy::needless_update)] // side-effect of optimism fields + Receipt { + tx_type: TxType::Eip1559, + success: true, + cumulative_gas_used: 400, + logs: vec![Log::new_unchecked( + Address::new([0x61; 20]), + vec![B256::with_last_byte(3), B256::with_last_byte(4)], + Bytes::default(), + )], + ..Default::default() + }, + )]] .into(), number, Vec::new(), @@ -513,20 +513,20 @@ fn block5( } let execution_outcome = ExecutionOutcome::new( bundle_state_builder.build(), - vec![vec![Some(Receipt { - tx_type: TxType::Eip1559, - success: true, - cumulative_gas_used: 400, - logs: vec![Log::new_unchecked( - Address::new([0x61; 20]), - vec![B256::with_last_byte(3), B256::with_last_byte(4)], - Bytes::default(), - )], - #[cfg(feature = "optimism")] - deposit_nonce: None, - #[cfg(feature = "optimism")] - deposit_receipt_version: None, - })]] + vec![vec![Some( + #[allow(clippy::needless_update)] // side-effect of optimism fields + Receipt { + tx_type: TxType::Eip1559, + success: true, + cumulative_gas_used: 400, + logs: vec![Log::new_unchecked( + Address::new([0x61; 20]), + vec![B256::with_last_byte(3), B256::with_last_byte(4)], + Bytes::default(), + )], + ..Default::default() + }, + )]] .into(), number, Vec::new(), From e3e83b7e71c7d9f6a34b9d7b407b72ca3eda9b4e Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Thu, 17 Oct 2024 10:03:25 +0200 Subject: [PATCH 102/425] docs(trie): revealed sparse trie invariants (#11825) --- crates/trie/sparse/src/trie.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index 2edaaf76b..d8f4280e8 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -69,6 +69,13 @@ impl SparseTrie { } /// The representation of revealed sparse trie. +/// +/// ## Invariants +/// +/// - The root node is always present in `nodes` collection. +/// - Each leaf entry in `nodes` collection must have a corresponding entry in `values` collection. +/// The opposite is also true. +/// - All keys in `values` collection are full leaf paths. #[derive(PartialEq, Eq)] pub struct RevealedSparseTrie { /// All trie nodes. From 63a75fdd95ea71028d9d65c277a514f7e518676e Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Thu, 17 Oct 2024 10:07:45 +0200 Subject: [PATCH 103/425] fix(trie): intermediate trie node hashes (#11826) --- crates/trie/sparse/src/trie.rs | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index d8f4280e8..e83522ca8 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -345,9 +345,7 @@ impl RevealedSparseTrie { } else { let value = self.values.get(&path).unwrap(); let rlp_node = LeafNodeRef { key, value }.rlp(&mut self.rlp_buf); - if let Some(node_hash) = rlp_node.as_hash() { - *hash = Some(node_hash); - } + *hash = rlp_node.as_hash(); rlp_node } } @@ -360,9 +358,7 @@ impl RevealedSparseTrie { let (_, child) = rlp_node_stack.pop().unwrap(); self.rlp_buf.clear(); let rlp_node = ExtensionNodeRef::new(key, &child).rlp(&mut self.rlp_buf); - if let Some(node_hash) = rlp_node.as_hash() { - *hash = Some(node_hash); - } + *hash = rlp_node.as_hash(); rlp_node } else { path_stack.extend([path, child_path]); // need to get rlp node for child first @@ -400,9 +396,7 @@ impl RevealedSparseTrie { self.rlp_buf.clear(); let rlp_node = BranchNodeRef::new(&branch_value_stack_buf, *state_mask) .rlp(&mut self.rlp_buf); - if let Some(node_hash) = rlp_node.as_hash() { - *hash = Some(node_hash); - } + *hash = rlp_node.as_hash(); rlp_node } }; From 491f154c3437d36d3a8add91f76efce8eea28a63 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 17 Oct 2024 11:30:26 +0200 Subject: [PATCH 104/425] primitives-traits: rm redundant definitions of `EMPTY_OMMER_ROOT_HASH` (#11820) --- Cargo.lock | 4 ++++ crates/ethereum/consensus/Cargo.toml | 1 + crates/ethereum/consensus/src/lib.rs | 2 +- crates/ethereum/payload/Cargo.toml | 1 + crates/ethereum/payload/src/lib.rs | 3 ++- crates/optimism/consensus/Cargo.toml | 1 + crates/optimism/consensus/src/lib.rs | 5 ++--- crates/optimism/payload/Cargo.toml | 1 + crates/optimism/payload/src/builder.rs | 3 ++- crates/optimism/primitives/src/bedrock.rs | 3 +-- crates/primitives-traits/src/constants/mod.rs | 4 ---- crates/primitives/src/lib.rs | 4 ++-- crates/primitives/src/proofs.rs | 4 ++-- crates/rpc/rpc-eth-api/src/helpers/pending_block.rs | 4 ++-- crates/rpc/rpc-types-compat/src/engine/payload.rs | 3 ++- 15 files changed, 24 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 740810d1b..e434e80cf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7270,6 +7270,7 @@ dependencies = [ name = "reth-ethereum-consensus" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-primitives", "reth-chainspec", "reth-consensus", @@ -7320,6 +7321,7 @@ dependencies = [ name = "reth-ethereum-payload-builder" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-primitives", "reth-basic-payload-builder", "reth-chain-state", @@ -8086,6 +8088,7 @@ dependencies = [ name = "reth-optimism-consensus" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-primitives", "reth-chainspec", "reth-consensus", @@ -8194,6 +8197,7 @@ dependencies = [ name = "reth-optimism-payload-builder" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rlp", diff --git a/crates/ethereum/consensus/Cargo.toml b/crates/ethereum/consensus/Cargo.toml index 02d217b63..af934d3e2 100644 --- a/crates/ethereum/consensus/Cargo.toml +++ b/crates/ethereum/consensus/Cargo.toml @@ -19,5 +19,6 @@ reth-consensus.workspace = true # alloy alloy-primitives.workspace = true +alloy-consensus.workspace = true tracing.workspace = true diff --git a/crates/ethereum/consensus/src/lib.rs b/crates/ethereum/consensus/src/lib.rs index e74f3498f..8f2a8a720 100644 --- a/crates/ethereum/consensus/src/lib.rs +++ b/crates/ethereum/consensus/src/lib.rs @@ -8,6 +8,7 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +use alloy_consensus::EMPTY_OMMER_ROOT_HASH; use alloy_primitives::U256; use reth_chainspec::{EthChainSpec, EthereumHardfork, EthereumHardforks}; use reth_consensus::{Consensus, ConsensusError, PostExecutionInput}; @@ -19,7 +20,6 @@ use reth_consensus_common::validation::{ }; use reth_primitives::{ constants::MINIMUM_GAS_LIMIT, BlockWithSenders, Header, SealedBlock, SealedHeader, - EMPTY_OMMER_ROOT_HASH, }; use std::{fmt::Debug, sync::Arc, time::SystemTime}; diff --git a/crates/ethereum/payload/Cargo.toml b/crates/ethereum/payload/Cargo.toml index f169d58f7..ce37a4f8e 100644 --- a/crates/ethereum/payload/Cargo.toml +++ b/crates/ethereum/payload/Cargo.toml @@ -34,6 +34,7 @@ revm-primitives.workspace = true # alloy alloy-primitives.workspace = true +alloy-consensus.workspace = true # misc tracing.workspace = true diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 248aa3486..dcf54fc02 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -9,6 +9,7 @@ #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] #![allow(clippy::useless_let_if_seq)] +use alloy_consensus::EMPTY_OMMER_ROOT_HASH; use alloy_primitives::U256; use reth_basic_payload_builder::{ commit_withdrawals, is_better_payload, BuildArguments, BuildOutcome, PayloadBuilder, @@ -26,7 +27,7 @@ use reth_primitives::{ constants::{eip4844::MAX_DATA_GAS_PER_BLOCK, BEACON_NONCE}, proofs::{self, calculate_requests_root}, revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg}, - Block, BlockBody, EthereumHardforks, Header, Receipt, EMPTY_OMMER_ROOT_HASH, + Block, BlockBody, EthereumHardforks, Header, Receipt, }; use reth_provider::{ChainSpecProvider, StateProviderFactory}; use reth_revm::database::StateProviderDatabase; diff --git a/crates/optimism/consensus/Cargo.toml b/crates/optimism/consensus/Cargo.toml index f5f061c59..e2520c893 100644 --- a/crates/optimism/consensus/Cargo.toml +++ b/crates/optimism/consensus/Cargo.toml @@ -25,6 +25,7 @@ reth-optimism-chainspec.workspace = true # ethereum alloy-primitives.workspace = true +alloy-consensus.workspace = true tracing.workspace = true diff --git a/crates/optimism/consensus/src/lib.rs b/crates/optimism/consensus/src/lib.rs index fe67ff1bc..16c1d5d37 100644 --- a/crates/optimism/consensus/src/lib.rs +++ b/crates/optimism/consensus/src/lib.rs @@ -9,6 +9,7 @@ // The `optimism` feature must be enabled to use this crate. #![cfg(feature = "optimism")] +use alloy_consensus::EMPTY_OMMER_ROOT_HASH; use alloy_primitives::{B64, U256}; use reth_chainspec::EthereumHardforks; use reth_consensus::{Consensus, ConsensusError, PostExecutionInput}; @@ -20,9 +21,7 @@ use reth_consensus_common::validation::{ }; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_forks::OptimismHardforks; -use reth_primitives::{ - BlockWithSenders, GotExpected, Header, SealedBlock, SealedHeader, EMPTY_OMMER_ROOT_HASH, -}; +use reth_primitives::{BlockWithSenders, GotExpected, Header, SealedBlock, SealedHeader}; use std::{sync::Arc, time::SystemTime}; mod proof; diff --git a/crates/optimism/payload/Cargo.toml b/crates/optimism/payload/Cargo.toml index e1d6fe47d..46cc82edb 100644 --- a/crates/optimism/payload/Cargo.toml +++ b/crates/optimism/payload/Cargo.toml @@ -41,6 +41,7 @@ alloy-rlp.workspace = true op-alloy-rpc-types-engine.workspace = true revm-primitives.workspace = true alloy-rpc-types-engine.workspace = true +alloy-consensus.workspace = true # misc tracing.workspace = true diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 0a8dcdb12..e590635f5 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -2,6 +2,7 @@ use std::sync::Arc; +use alloy_consensus::EMPTY_OMMER_ROOT_HASH; use alloy_primitives::U256; use reth_basic_payload_builder::*; use reth_chain_state::ExecutedBlock; @@ -16,7 +17,7 @@ use reth_primitives::{ constants::BEACON_NONCE, proofs, revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg}, - Block, BlockBody, Header, Receipt, TxType, EMPTY_OMMER_ROOT_HASH, + Block, BlockBody, Header, Receipt, TxType, }; use reth_provider::StateProviderFactory; use reth_revm::database::StateProviderDatabase; diff --git a/crates/optimism/primitives/src/bedrock.rs b/crates/optimism/primitives/src/bedrock.rs index 1a347aeca..bd4229858 100644 --- a/crates/optimism/primitives/src/bedrock.rs +++ b/crates/optimism/primitives/src/bedrock.rs @@ -1,9 +1,8 @@ //! OP mainnet bedrock related data. -use alloy_consensus::EMPTY_ROOT_HASH; +use alloy_consensus::{EMPTY_OMMER_ROOT_HASH, EMPTY_ROOT_HASH}; use alloy_primitives::{address, b256, bloom, bytes, B256, B64, U256}; use reth_primitives::Header; -use reth_primitives_traits::constants::EMPTY_OMMER_ROOT_HASH; /// Transaction 0x9ed8f713b2cc6439657db52dcd2fdb9cc944915428f3c6e2a7703e242b259cb9 in block 985, /// replayed in blocks: diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index d40abdd64..5d64b911b 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -119,10 +119,6 @@ pub const DEV_GENESIS_HASH: B256 = pub const KECCAK_EMPTY: B256 = b256!("c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"); -/// Ommer root of empty list: `0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347` -pub const EMPTY_OMMER_ROOT_HASH: B256 = - b256!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"); - /// From address from Optimism system txs: `0xdeaddeaddeaddeaddeaddeaddeaddeaddead0001` pub const OP_SYSTEM_TX_FROM_ADDR: Address = address!("deaddeaddeaddeaddeaddeaddeaddeaddead0001"); diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index ec65cbf20..a59e72bbd 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -40,8 +40,8 @@ pub use block::{ #[cfg(feature = "reth-codec")] pub use compression::*; pub use constants::{ - DEV_GENESIS_HASH, EMPTY_OMMER_ROOT_HASH, HOLESKY_GENESIS_HASH, KECCAK_EMPTY, - MAINNET_GENESIS_HASH, SEPOLIA_GENESIS_HASH, + DEV_GENESIS_HASH, HOLESKY_GENESIS_HASH, KECCAK_EMPTY, MAINNET_GENESIS_HASH, + SEPOLIA_GENESIS_HASH, }; pub use receipt::{ gas_spent_by_transactions, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, Receipts, diff --git a/crates/primitives/src/proofs.rs b/crates/primitives/src/proofs.rs index dc814804e..4efbb588e 100644 --- a/crates/primitives/src/proofs.rs +++ b/crates/primitives/src/proofs.rs @@ -1,10 +1,10 @@ //! Helper function for calculating Merkle proofs and hashes. use crate::{ - constants::EMPTY_OMMER_ROOT_HASH, Header, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, - Request, TransactionSigned, Withdrawal, + Header, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, Request, TransactionSigned, Withdrawal, }; use alloc::vec::Vec; +use alloy_consensus::EMPTY_OMMER_ROOT_HASH; use alloy_eips::{eip2718::Encodable2718, eip7685::Encodable7685}; use alloy_primitives::{keccak256, B256}; use reth_trie_common::root::{ordered_trie_root, ordered_trie_root_with_encoder}; diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index 81c6a5678..832cf1705 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -5,7 +5,7 @@ use std::time::{Duration, Instant}; use crate::{EthApiTypes, FromEthApiError, FromEvmError}; -use alloy_consensus::EMPTY_ROOT_HASH; +use alloy_consensus::{EMPTY_OMMER_ROOT_HASH, EMPTY_ROOT_HASH}; use alloy_primitives::{BlockNumber, B256, U256}; use alloy_rpc_types::BlockNumberOrTag; use futures::Future; @@ -20,7 +20,7 @@ use reth_primitives::{ ResultAndState, SpecId, }, Block, BlockBody, Header, Receipt, Requests, SealedBlockWithSenders, SealedHeader, - TransactionSignedEcRecovered, EMPTY_OMMER_ROOT_HASH, + TransactionSignedEcRecovered, }; use reth_provider::{ BlockReader, BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, ProviderError, diff --git a/crates/rpc/rpc-types-compat/src/engine/payload.rs b/crates/rpc/rpc-types-compat/src/engine/payload.rs index 84943b60e..e6f2f97ca 100644 --- a/crates/rpc/rpc-types-compat/src/engine/payload.rs +++ b/crates/rpc/rpc-types-compat/src/engine/payload.rs @@ -1,6 +1,7 @@ //! Standalone Conversion Functions for Handling Different Versions of Execution Payloads in //! Ethereum's Engine +use alloy_consensus::EMPTY_OMMER_ROOT_HASH; use alloy_eips::eip2718::{Decodable2718, Encodable2718}; use alloy_primitives::{B256, U256}; use alloy_rpc_types_engine::{ @@ -9,7 +10,7 @@ use alloy_rpc_types_engine::{ ExecutionPayloadV3, ExecutionPayloadV4, PayloadError, }; use reth_primitives::{ - constants::{EMPTY_OMMER_ROOT_HASH, MAXIMUM_EXTRA_DATA_SIZE}, + constants::MAXIMUM_EXTRA_DATA_SIZE, proofs::{self}, Block, BlockBody, Header, Request, SealedBlock, TransactionSigned, Withdrawals, }; From bac244ae970f201afda63811b7b8d8b901fb94de Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Thu, 17 Oct 2024 11:20:56 +0100 Subject: [PATCH 105/425] feat(trie): sparse trie leaf removal (#11752) Co-authored-by: Roman Krasiuk --- Cargo.lock | 8 +- crates/trie/sparse/Cargo.toml | 11 +- crates/trie/sparse/src/trie.rs | 708 ++++++++++++++++++++++++++++++--- 3 files changed, 669 insertions(+), 58 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e434e80cf..d31239808 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4464,7 +4464,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -9248,9 +9248,13 @@ dependencies = [ "assert_matches", "criterion", "itertools 0.13.0", + "pretty_assertions", "proptest", + "rand 0.8.5", "rayon", "reth-primitives", + "reth-testing-utils", + "reth-tracing", "reth-trie", "reth-trie-common", "smallvec", @@ -11388,7 +11392,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] diff --git a/crates/trie/sparse/Cargo.toml b/crates/trie/sparse/Cargo.toml index c31bbe2df..4ba6ed0f2 100644 --- a/crates/trie/sparse/Cargo.toml +++ b/crates/trie/sparse/Cargo.toml @@ -15,6 +15,7 @@ workspace = true [dependencies] # reth reth-primitives.workspace = true +reth-tracing.workspace = true reth-trie-common.workspace = true reth-trie.workspace = true @@ -26,18 +27,22 @@ alloy-rlp.workspace = true tracing.workspace = true # misc -thiserror.workspace = true rayon.workspace = true smallvec = { workspace = true, features = ["const_new"] } +thiserror.workspace = true [dev-dependencies] reth-primitives = { workspace = true, features = ["test-utils", "arbitrary"] } -reth-trie-common = { workspace = true, features = ["test-utils", "arbitrary"] } +reth-testing-utils.workspace = true reth-trie = { workspace = true, features = ["test-utils"] } +reth-trie-common = { workspace = true, features = ["test-utils", "arbitrary"] } + assert_matches.workspace = true +criterion.workspace = true itertools.workspace = true +pretty_assertions = "1.4" proptest.workspace = true -criterion.workspace = true +rand.workspace = true [[bench]] name = "root" diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index e83522ca8..0b9ffb5c0 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -1,6 +1,7 @@ use crate::{SparseTrieError, SparseTrieResult}; use alloy_primitives::{hex, keccak256, map::HashMap, B256}; use alloy_rlp::Decodable; +use reth_tracing::tracing::debug; use reth_trie::{ prefix_set::{PrefixSet, PrefixSetMut}, RlpNode, @@ -264,8 +265,244 @@ impl RevealedSparseTrie { } /// Remove leaf node from the trie. - pub fn remove_leaf(&mut self, _path: Nibbles) { - unimplemented!() + pub fn remove_leaf(&mut self, path: Nibbles) -> SparseTrieResult<()> { + self.prefix_set.insert(path.clone()); + let existing = self.values.remove(&path); + if existing.is_none() { + // trie structure unchanged, return immediately + return Ok(()) + } + + let mut removed_nodes = self.take_nodes_for_path(&path)?; + debug!(target: "trie::sparse", ?path, ?removed_nodes, "Removed nodes for path"); + // Pop the first node from the stack which is the leaf node we want to remove. + let mut child = removed_nodes.pop().expect("leaf exists"); + #[cfg(debug_assertions)] + { + let mut child_path = child.path.clone(); + let SparseNode::Leaf { key, .. } = &child.node else { panic!("expected leaf node") }; + child_path.extend_from_slice_unchecked(key); + assert_eq!(child_path, path); + } + + // If we don't have any other removed nodes, insert an empty node at the root. + if removed_nodes.is_empty() { + debug_assert!(self.nodes.is_empty()); + self.nodes.insert(Nibbles::default(), SparseNode::Empty); + + return Ok(()) + } + + // Walk the stack of removed nodes from the back and re-insert them back into the trie, + // adjusting the node type as needed. + while let Some(removed_node) = removed_nodes.pop() { + let removed_path = removed_node.path; + + let new_node = match &removed_node.node { + SparseNode::Empty => return Err(SparseTrieError::Blind), + SparseNode::Hash(hash) => { + return Err(SparseTrieError::BlindedNode { path: removed_path, hash: *hash }) + } + SparseNode::Leaf { .. } => { + unreachable!("we already popped the leaf node") + } + SparseNode::Extension { key, .. } => { + // If the node is an extension node, we need to look at its child to see if we + // need to merge them. + match &child.node { + SparseNode::Empty => return Err(SparseTrieError::Blind), + SparseNode::Hash(hash) => { + return Err(SparseTrieError::BlindedNode { + path: child.path, + hash: *hash, + }) + } + // For a leaf node, we collapse the extension node into a leaf node, + // extending the key. While it's impossible to encounter an extension node + // followed by a leaf node in a complete trie, it's possible here because we + // could have downgraded the extension node's child into a leaf node from + // another node type. + SparseNode::Leaf { key: leaf_key, .. } => { + self.nodes.remove(&child.path); + + let mut new_key = key.clone(); + new_key.extend_from_slice_unchecked(leaf_key); + SparseNode::new_leaf(new_key) + } + // For an extension node, we collapse them into one extension node, + // extending the key + SparseNode::Extension { key: extension_key, .. } => { + self.nodes.remove(&child.path); + + let mut new_key = key.clone(); + new_key.extend_from_slice_unchecked(extension_key); + SparseNode::new_ext(new_key) + } + // For a branch node, we just leave the extension node as-is. + SparseNode::Branch { .. } => removed_node.node, + } + } + SparseNode::Branch { mut state_mask, hash: _ } => { + // If the node is a branch node, we need to check the number of children left + // after deleting the child at the given nibble. + + if let Some(removed_nibble) = removed_node.unset_branch_nibble { + state_mask.unset_bit(removed_nibble); + } + + // If only one child is left set in the branch node, we need to collapse it. + if state_mask.count_bits() == 1 { + let child_nibble = + state_mask.first_set_bit_index().expect("state mask is not empty"); + + // Get full path of the only child node left. + let mut child_path = removed_path.clone(); + child_path.push_unchecked(child_nibble); + + // Remove the only child node. + let child = self.nodes.get(&child_path).unwrap(); + + debug!(target: "trie::sparse", ?removed_path, ?child_path, ?child, "Branch node has only one child"); + + let mut delete_child = false; + let new_node = match child { + SparseNode::Empty => return Err(SparseTrieError::Blind), + SparseNode::Hash(hash) => { + return Err(SparseTrieError::BlindedNode { + path: child_path, + hash: *hash, + }) + } + // If the only child is a leaf node, we downgrade the branch node into a + // leaf node, prepending the nibble to the key, and delete the old + // child. + SparseNode::Leaf { key, .. } => { + delete_child = true; + + let mut new_key = Nibbles::from_nibbles_unchecked([child_nibble]); + new_key.extend_from_slice_unchecked(key); + SparseNode::new_leaf(new_key) + } + // If the only child node is an extension node, we downgrade the branch + // node into an even longer extension node, prepending the nibble to the + // key, and delete the old child. + SparseNode::Extension { key, .. } => { + delete_child = true; + + let mut new_key = Nibbles::from_nibbles_unchecked([child_nibble]); + new_key.extend_from_slice_unchecked(key); + SparseNode::new_ext(new_key) + } + // If the only child is a branch node, we downgrade the current branch + // node into a one-nibble extension node. + SparseNode::Branch { .. } => { + SparseNode::new_ext(Nibbles::from_nibbles_unchecked([child_nibble])) + } + }; + + if delete_child { + self.nodes.remove(&child_path); + } + + new_node + } + // If more than one child is left set in the branch, we just re-insert it + // as-is. + else { + SparseNode::new_branch(state_mask) + } + } + }; + + child = RemovedSparseNode { + path: removed_path.clone(), + node: new_node.clone(), + unset_branch_nibble: None, + }; + debug!(target: "trie::sparse", ?removed_path, ?new_node, "Re-inserting the node"); + self.nodes.insert(removed_path, new_node); + } + + Ok(()) + } + + /// Traverse trie nodes down to the leaf node and collect all nodes along the path. + fn take_nodes_for_path(&mut self, path: &Nibbles) -> SparseTrieResult> { + let mut current = Nibbles::default(); // Start traversal from the root + let mut nodes = Vec::new(); // Collect traversed nodes + + while let Some(node) = self.nodes.remove(¤t) { + match &node { + SparseNode::Empty => return Err(SparseTrieError::Blind), + SparseNode::Hash(hash) => { + return Err(SparseTrieError::BlindedNode { path: current, hash: *hash }) + } + SparseNode::Leaf { key: _key, .. } => { + // Leaf node is always the one that we're deleting, and no other leaf nodes can + // be found during traversal. + + #[cfg(debug_assertions)] + { + let mut current = current.clone(); + current.extend_from_slice_unchecked(_key); + assert_eq!(¤t, path); + } + + nodes.push(RemovedSparseNode { + path: current.clone(), + node, + unset_branch_nibble: None, + }); + break + } + SparseNode::Extension { key, .. } => { + #[cfg(debug_assertions)] + { + let mut current = current.clone(); + current.extend_from_slice_unchecked(key); + assert!(path.starts_with(¤t)); + } + + let path = current.clone(); + current.extend_from_slice_unchecked(key); + nodes.push(RemovedSparseNode { path, node, unset_branch_nibble: None }); + } + SparseNode::Branch { state_mask, .. } => { + let nibble = path[current.len()]; + debug_assert!(state_mask.is_bit_set(nibble)); + + // If the branch node has a child that is a leaf node that we're removing, + // we need to unset this nibble. + // Any other branch nodes will not require unsetting the nibble, because + // deleting one leaf node can not remove the whole path + // where the branch node is located. + let mut child_path = + Nibbles::from_nibbles([current.as_slice(), &[nibble]].concat()); + let unset_branch_nibble = self + .nodes + .get(&child_path) + .map_or(false, move |node| match node { + SparseNode::Leaf { key, .. } => { + // Get full path of the leaf node + child_path.extend_from_slice_unchecked(key); + &child_path == path + } + _ => false, + }) + .then_some(nibble); + + nodes.push(RemovedSparseNode { + path: current.clone(), + node, + unset_branch_nibble, + }); + + current.push_unchecked(nibble); + } + } + } + + Ok(nodes) } /// Return the root of the sparse trie. @@ -476,13 +713,87 @@ impl SparseNode { } } +#[derive(Debug)] +struct RemovedSparseNode { + path: Nibbles, + node: SparseNode, + unset_branch_nibble: Option, +} + #[cfg(test)] mod tests { + use std::collections::BTreeMap; + use super::*; use alloy_primitives::U256; use itertools::Itertools; use proptest::prelude::*; - use reth_trie_common::HashBuilder; + use rand::seq::IteratorRandom; + use reth_testing_utils::generators; + use reth_trie::{BranchNode, ExtensionNode, LeafNode}; + use reth_trie_common::{ + proof::{ProofNodes, ProofRetainer}, + HashBuilder, + }; + + /// Calculate the state root by feeding the provided state to the hash builder and retaining the + /// proofs for the provided targets. + /// + /// Returns the state root and the retained proof nodes. + fn hash_builder_root_with_proofs>( + state: impl IntoIterator, + proof_targets: impl IntoIterator, + ) -> (B256, ProofNodes) { + let mut hash_builder = + HashBuilder::default().with_proof_retainer(ProofRetainer::from_iter(proof_targets)); + for (key, value) in state { + hash_builder.add_leaf(key, value.as_ref()); + } + (hash_builder.root(), hash_builder.take_proof_nodes()) + } + + /// Assert that the sparse trie nodes and the proof nodes from the hash builder are equal. + fn assert_eq_sparse_trie_proof_nodes( + sparse_trie: &RevealedSparseTrie, + proof_nodes: ProofNodes, + ) { + let proof_nodes = proof_nodes + .into_nodes_sorted() + .into_iter() + .map(|(path, node)| (path, TrieNode::decode(&mut node.as_ref()).unwrap())); + + let sparse_nodes = sparse_trie.nodes.iter().sorted_by_key(|(path, _)| *path); + + for ((proof_node_path, proof_node), (sparse_node_path, sparse_node)) in + proof_nodes.zip(sparse_nodes) + { + assert_eq!(&proof_node_path, sparse_node_path); + + let equals = match (&proof_node, &sparse_node) { + // Both nodes are empty + (TrieNode::EmptyRoot, SparseNode::Empty) => true, + // Both nodes are branches and have the same state mask + ( + TrieNode::Branch(BranchNode { state_mask: proof_state_mask, .. }), + SparseNode::Branch { state_mask: sparse_state_mask, .. }, + ) => proof_state_mask == sparse_state_mask, + // Both nodes are extensions and have the same key + ( + TrieNode::Extension(ExtensionNode { key: proof_key, .. }), + SparseNode::Extension { key: sparse_key, .. }, + ) | + // Both nodes are leaves and have the same key + ( + TrieNode::Leaf(LeafNode { key: proof_key, .. }), + SparseNode::Leaf { key: sparse_key, .. }, + ) => proof_key == sparse_key, + // Empty and hash nodes are specific to the sparse trie, skip them + (_, SparseNode::Empty | SparseNode::Hash(_)) => continue, + _ => false, + }; + assert!(equals, "proof node: {:?}, sparse node: {:?}", proof_node, sparse_node); + } + } #[test] fn sparse_trie_is_blind() { @@ -495,14 +806,15 @@ mod tests { let path = Nibbles::unpack(B256::with_last_byte(42)); let value = alloy_rlp::encode_fixed_size(&U256::from(1)); - let mut hash_builder = HashBuilder::default(); - hash_builder.add_leaf(path.clone(), &value); - let expected = hash_builder.root(); + let (hash_builder_root, hash_builder_proof_nodes) = + hash_builder_root_with_proofs([(path.clone(), &value)], [path.clone()]); let mut sparse = RevealedSparseTrie::default(); sparse.update_leaf(path, value.to_vec()).unwrap(); - let root = sparse.root(); - assert_eq!(root, expected); + let sparse_root = sparse.root(); + + assert_eq!(sparse_root, hash_builder_root); + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); } #[test] @@ -510,18 +822,19 @@ mod tests { let paths = (0..=16).map(|b| Nibbles::unpack(B256::with_last_byte(b))).collect::>(); let value = alloy_rlp::encode_fixed_size(&U256::from(1)); - let mut hash_builder = HashBuilder::default(); - for path in &paths { - hash_builder.add_leaf(path.clone(), &value); - } - let expected = hash_builder.root(); + let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( + paths.iter().cloned().zip(std::iter::repeat_with(|| value.clone())), + paths.clone(), + ); let mut sparse = RevealedSparseTrie::default(); for path in &paths { sparse.update_leaf(path.clone(), value.to_vec()).unwrap(); } - let root = sparse.root(); - assert_eq!(root, expected); + let sparse_root = sparse.root(); + + assert_eq!(sparse_root, hash_builder_root); + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); } #[test] @@ -529,18 +842,19 @@ mod tests { let paths = (239..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::>(); let value = alloy_rlp::encode_fixed_size(&U256::from(1)); - let mut hash_builder = HashBuilder::default(); - for path in &paths { - hash_builder.add_leaf(path.clone(), &value); - } - let expected = hash_builder.root(); + let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( + paths.iter().cloned().zip(std::iter::repeat_with(|| value.clone())), + paths.clone(), + ); let mut sparse = RevealedSparseTrie::default(); for path in &paths { sparse.update_leaf(path.clone(), value.to_vec()).unwrap(); } - let root = sparse.root(); - assert_eq!(root, expected); + let sparse_root = sparse.root(); + + assert_eq!(sparse_root, hash_builder_root); + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); } #[test] @@ -556,18 +870,19 @@ mod tests { .collect::>(); let value = alloy_rlp::encode_fixed_size(&U256::from(1)); - let mut hash_builder = HashBuilder::default(); - for path in paths.iter().sorted_unstable_by_key(|key| *key) { - hash_builder.add_leaf(path.clone(), &value); - } - let expected = hash_builder.root(); + let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( + paths.iter().sorted_unstable().cloned().zip(std::iter::repeat_with(|| value.clone())), + paths.clone(), + ); let mut sparse = RevealedSparseTrie::default(); for path in &paths { sparse.update_leaf(path.clone(), value.to_vec()).unwrap(); } - let root = sparse.root(); - assert_eq!(root, expected); + let sparse_root = sparse.root(); + + assert_eq!(sparse_root, hash_builder_root); + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); } #[test] @@ -576,52 +891,339 @@ mod tests { let old_value = alloy_rlp::encode_fixed_size(&U256::from(1)); let new_value = alloy_rlp::encode_fixed_size(&U256::from(2)); - let mut hash_builder = HashBuilder::default(); - for path in paths.iter().sorted_unstable_by_key(|key| *key) { - hash_builder.add_leaf(path.clone(), &old_value); - } - let expected = hash_builder.root(); + let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( + paths.iter().cloned().zip(std::iter::repeat_with(|| old_value.clone())), + paths.clone(), + ); let mut sparse = RevealedSparseTrie::default(); for path in &paths { sparse.update_leaf(path.clone(), old_value.to_vec()).unwrap(); } - let root = sparse.root(); - assert_eq!(root, expected); + let sparse_root = sparse.root(); - let mut hash_builder = HashBuilder::default(); - for path in paths.iter().sorted_unstable_by_key(|key| *key) { - hash_builder.add_leaf(path.clone(), &new_value); - } - let expected = hash_builder.root(); + assert_eq!(sparse_root, hash_builder_root); + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + + let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( + paths.iter().cloned().zip(std::iter::repeat_with(|| new_value.clone())), + paths.clone(), + ); for path in &paths { sparse.update_leaf(path.clone(), new_value.to_vec()).unwrap(); } - let root = sparse.root(); - assert_eq!(root, expected); + let sparse_root = sparse.root(); + + assert_eq!(sparse_root, hash_builder_root); + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + } + + #[test] + fn sparse_trie_remove_leaf() { + reth_tracing::init_test_tracing(); + + let mut sparse = RevealedSparseTrie::default(); + + let value = alloy_rlp::encode_fixed_size(&U256::ZERO).to_vec(); + + sparse + .update_leaf(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]), value.clone()) + .unwrap(); + sparse + .update_leaf(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]), value.clone()) + .unwrap(); + sparse + .update_leaf(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3]), value.clone()) + .unwrap(); + sparse + .update_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2]), value.clone()) + .unwrap(); + sparse + .update_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2]), value.clone()) + .unwrap(); + sparse.update_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0]), value).unwrap(); + + // Extension (Key = 5) + // └── Branch (Mask = 1011) + // ├── 0 -> Extension (Key = 23) + // │ └── Branch (Mask = 0101) + // │ ├── 1 -> Leaf (Key = 1, Path = 50231) + // │ └── 3 -> Leaf (Key = 3, Path = 50233) + // ├── 2 -> Leaf (Key = 013, Path = 52013) + // └── 3 -> Branch (Mask = 0101) + // ├── 1 -> Leaf (Key = 3102, Path = 53102) + // └── 3 -> Branch (Mask = 1010) + // ├── 0 -> Leaf (Key = 3302, Path = 53302) + // └── 2 -> Leaf (Key = 3320, Path = 53320) + pretty_assertions::assert_eq!( + sparse.nodes.clone().into_iter().collect::>(), + BTreeMap::from_iter([ + (Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), + (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1101.into())), + ( + Nibbles::from_nibbles([0x5, 0x0]), + SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3])) + ), + ( + Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]), + SparseNode::new_branch(0b1010.into()) + ), + ( + Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]), + SparseNode::new_leaf(Nibbles::new()) + ), + ( + Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]), + SparseNode::new_leaf(Nibbles::new()) + ), + ( + Nibbles::from_nibbles([0x5, 0x2]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x0, 0x1, 0x3])) + ), + (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into())), + ( + Nibbles::from_nibbles([0x5, 0x3, 0x1]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x0, 0x2])) + ), + (Nibbles::from_nibbles([0x5, 0x3, 0x3]), SparseNode::new_branch(0b0101.into())), + ( + Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x2])) + ), + ( + Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x0])) + ) + ]) + ); + + sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])).unwrap(); + + // Extension (Key = 5) + // └── Branch (Mask = 1001) + // ├── 0 -> Extension (Key = 23) + // │ └── Branch (Mask = 0101) + // │ ├── 1 -> Leaf (Key = 0231, Path = 50231) + // │ └── 3 -> Leaf (Key = 0233, Path = 50233) + // └── 3 -> Branch (Mask = 0101) + // ├── 1 -> Leaf (Key = 3102, Path = 53102) + // └── 3 -> Branch (Mask = 1010) + // ├── 0 -> Leaf (Key = 3302, Path = 53302) + // └── 2 -> Leaf (Key = 3320, Path = 53320) + pretty_assertions::assert_eq!( + sparse.nodes.clone().into_iter().collect::>(), + BTreeMap::from_iter([ + (Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), + (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into())), + ( + Nibbles::from_nibbles([0x5, 0x0]), + SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3])) + ), + ( + Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]), + SparseNode::new_branch(0b1010.into()) + ), + ( + Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]), + SparseNode::new_leaf(Nibbles::new()) + ), + ( + Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]), + SparseNode::new_leaf(Nibbles::new()) + ), + (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into())), + ( + Nibbles::from_nibbles([0x5, 0x3, 0x1]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x0, 0x2])) + ), + (Nibbles::from_nibbles([0x5, 0x3, 0x3]), SparseNode::new_branch(0b0101.into())), + ( + Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x2])) + ), + ( + Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x0])) + ) + ]) + ); + + sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])).unwrap(); + + // Extension (Key = 5) + // └── Branch (Mask = 1001) + // ├── 0 -> Leaf (Key = 0233, Path = 50233) + // └── 3 -> Branch (Mask = 0101) + // ├── 1 -> Leaf (Key = 3102, Path = 53102) + // └── 3 -> Branch (Mask = 1010) + // ├── 0 -> Leaf (Key = 3302, Path = 53302) + // └── 2 -> Leaf (Key = 3320, Path = 53320) + pretty_assertions::assert_eq!( + sparse.nodes.clone().into_iter().collect::>(), + BTreeMap::from_iter([ + (Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), + (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into())), + ( + Nibbles::from_nibbles([0x5, 0x0]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x2, 0x3, 0x3])) + ), + (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into())), + ( + Nibbles::from_nibbles([0x5, 0x3, 0x1]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x0, 0x2])) + ), + (Nibbles::from_nibbles([0x5, 0x3, 0x3]), SparseNode::new_branch(0b0101.into())), + ( + Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x2])) + ), + ( + Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x0])) + ) + ]) + ); + + sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])).unwrap(); + + // Extension (Key = 5) + // └── Branch (Mask = 1001) + // ├── 0 -> Leaf (Key = 0233, Path = 50233) + // └── 3 -> Branch (Mask = 1010) + // ├── 0 -> Leaf (Key = 3302, Path = 53302) + // └── 2 -> Leaf (Key = 3320, Path = 53320) + pretty_assertions::assert_eq!( + sparse.nodes.clone().into_iter().collect::>(), + BTreeMap::from_iter([ + (Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), + (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into())), + ( + Nibbles::from_nibbles([0x5, 0x0]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x2, 0x3, 0x3])) + ), + ( + Nibbles::from_nibbles([0x5, 0x3]), + SparseNode::new_ext(Nibbles::from_nibbles([0x3])) + ), + (Nibbles::from_nibbles([0x5, 0x3, 0x3]), SparseNode::new_branch(0b0101.into())), + ( + Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x2])) + ), + ( + Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x0])) + ) + ]) + ); + + sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])).unwrap(); + + // Extension (Key = 5) + // └── Branch (Mask = 1001) + // ├── 0 -> Leaf (Key = 0233, Path = 50233) + // └── 3 -> Leaf (Key = 3302, Path = 53302) + pretty_assertions::assert_eq!( + sparse.nodes.clone().into_iter().collect::>(), + BTreeMap::from_iter([ + (Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), + (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into())), + ( + Nibbles::from_nibbles([0x5, 0x0]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x2, 0x3, 0x3])) + ), + ( + Nibbles::from_nibbles([0x5, 0x3]), + SparseNode::new_leaf(Nibbles::from_nibbles([0x3, 0x0, 0x2])) + ), + ]) + ); + + sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])).unwrap(); + + // Leaf (Key = 53302) + pretty_assertions::assert_eq!( + sparse.nodes.clone().into_iter().collect::>(), + BTreeMap::from_iter([( + Nibbles::new(), + SparseNode::new_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])) + ),]) + ); + + sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])).unwrap(); + + // Empty + pretty_assertions::assert_eq!( + sparse.nodes.clone().into_iter().collect::>(), + BTreeMap::from_iter([(Nibbles::new(), SparseNode::Empty),]) + ); } #[test] - fn sparse_trie_empty_update_fuzz() { + fn sparse_trie_fuzz() { proptest!(ProptestConfig::with_cases(10), |(updates: Vec>)| { - let mut state = std::collections::BTreeMap::default(); + let mut rng = generators::rng(); + + let mut state = BTreeMap::default(); + let mut unpacked_state = BTreeMap::default(); let mut sparse = RevealedSparseTrie::default(); for update in updates { - for (key, value) in &update { - sparse.update_leaf(Nibbles::unpack(key), alloy_rlp::encode_fixed_size(value).to_vec()).unwrap(); + let keys_to_delete_len = update.len() / 2; + + let unpacked_update = update.iter().map(|(key, value)| ( + Nibbles::unpack(key), + alloy_rlp::encode_fixed_size(value).to_vec() + )); + + // Insert state updates into the sparse trie and calculate the root + for (key, value) in unpacked_update.clone() { + sparse.update_leaf(key, value).unwrap(); } - let root = sparse.root(); + let sparse_root = sparse.root(); + // Insert state updates into the hash builder and calculate the root + unpacked_state.extend(unpacked_update); state.extend(update); - let mut hash_builder = HashBuilder::default(); - for (key, value) in &state { - hash_builder.add_leaf(Nibbles::unpack(key), &alloy_rlp::encode_fixed_size(value)); + let keys = state.keys().map(Nibbles::unpack).collect::>(); + let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( + unpacked_state.clone(), + keys, + ); + + // Assert that the sparse trie root matches the hash builder root + assert_eq!(sparse_root, hash_builder_root); + // Assert that the sparse trie nodes match the hash builder proof nodes + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + + // Delete some keys from both the hash builder and the sparse trie and check + // that the sparse trie root still matches the hash builder root + + let keys_to_delete = state + .keys() + .choose_multiple(&mut rng, keys_to_delete_len) + .into_iter() + .copied() + .collect::>(); + for key in keys_to_delete { + state.remove(&key).unwrap(); + unpacked_state.remove(&Nibbles::unpack(key)).unwrap(); + sparse.remove_leaf(Nibbles::unpack(key)).unwrap(); } - let expected = hash_builder.root(); - assert_eq!(root, expected); + let sparse_root = sparse.root(); + + let keys = state.keys().map(Nibbles::unpack).collect::>(); + let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( + unpacked_state.clone(), + keys, + ); + + // Assert that the sparse trie root matches the hash builder root + assert_eq!(sparse_root, hash_builder_root); + // Assert that the sparse trie nodes match the hash builder proof nodes + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); } }); } From 4254b80a89630f1cb875d05c4b3c32f39af86b6b Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Thu, 17 Oct 2024 12:27:57 +0200 Subject: [PATCH 106/425] bench(trie): avoid unnecessary clones in hash builder repeated bench (#11827) --- crates/trie/sparse/benches/root.rs | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/crates/trie/sparse/benches/root.rs b/crates/trie/sparse/benches/root.rs index 4078eb7af..248e3caee 100644 --- a/crates/trie/sparse/benches/root.rs +++ b/crates/trie/sparse/benches/root.rs @@ -90,12 +90,23 @@ pub fn calculate_root_from_leaves_repeated(c: &mut Criterion) { }, |(init_storage, storage_updates, mut trie_updates)| { let mut storage = init_storage; - for update in storage_updates { + let mut storage_updates = storage_updates.into_iter().peekable(); + while let Some(update) = storage_updates.next() { storage.extend(&update); let prefix_set = update.construct_prefix_set().freeze(); - let storage_sorted = storage.clone().into_sorted(); - let trie_updates_sorted = trie_updates.clone().into_sorted(); + let (storage_sorted, trie_updates_sorted) = + if storage_updates.peek().is_some() { + ( + storage.clone().into_sorted(), + trie_updates.clone().into_sorted(), + ) + } else { + ( + std::mem::take(&mut storage).into_sorted(), + std::mem::take(&mut trie_updates).into_sorted(), + ) + }; let walker = TrieWalker::new( InMemoryStorageTrieCursor::new( @@ -133,7 +144,9 @@ pub fn calculate_root_from_leaves_repeated(c: &mut Criterion) { } hb.root(); - trie_updates.finalize(node_iter.walker, hb); + if storage_updates.peek().is_some() { + trie_updates.finalize(node_iter.walker, hb); + } } }, ) From b47ce92d9a049f1069162d7d8e5c63b056d3c36e Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Thu, 17 Oct 2024 04:36:25 -0600 Subject: [PATCH 107/425] replace ChainSpec to use EthereumHardforks trait (#11824) --- crates/payload/primitives/src/lib.rs | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/crates/payload/primitives/src/lib.rs b/crates/payload/primitives/src/lib.rs index 5d1004051..8173cae34 100644 --- a/crates/payload/primitives/src/lib.rs +++ b/crates/payload/primitives/src/lib.rs @@ -26,7 +26,7 @@ pub use traits::{ mod payload; pub use payload::PayloadOrAttributes; -use reth_chainspec::{ChainSpec, EthereumHardforks}; +use reth_chainspec::EthereumHardforks; /// The types that are used by the engine API. pub trait PayloadTypes: Send + Sync + Unpin + core::fmt::Debug + Clone + 'static { /// The built payload type. @@ -125,8 +125,8 @@ pub fn validate_payload_timestamp( /// Validates the presence of the `withdrawals` field according to the payload timestamp. /// After Shanghai, withdrawals field must be [Some]. /// Before Shanghai, withdrawals field must be [None]; -pub fn validate_withdrawals_presence( - chain_spec: &ChainSpec, +pub fn validate_withdrawals_presence( + chain_spec: &T, version: EngineApiMessageVersion, message_validation_kind: MessageValidationKind, timestamp: u64, @@ -210,8 +210,8 @@ pub fn validate_withdrawals_presence( /// `MessageValidationKind::Payload`, then the error code will be `-32602: Invalid params`. If the /// parameter is `MessageValidationKind::PayloadAttributes`, then the error code will be `-38003: /// Invalid payload attributes`. -pub fn validate_parent_beacon_block_root_presence( - chain_spec: &ChainSpec, +pub fn validate_parent_beacon_block_root_presence( + chain_spec: &T, version: EngineApiMessageVersion, validation_kind: MessageValidationKind, timestamp: u64, @@ -298,13 +298,14 @@ impl MessageValidationKind { /// either an execution payload, or payload attributes. /// /// The version is provided by the [`EngineApiMessageVersion`] argument. -pub fn validate_version_specific_fields( - chain_spec: &ChainSpec, +pub fn validate_version_specific_fields( + chain_spec: &T, version: EngineApiMessageVersion, payload_or_attrs: PayloadOrAttributes<'_, Type>, ) -> Result<(), EngineObjectValidationError> where Type: PayloadAttributes, + T: EthereumHardforks, { validate_withdrawals_presence( chain_spec, From f8902b59f5ccc626469e419c273e724a6bb79bae Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 17 Oct 2024 12:54:28 +0200 Subject: [PATCH 108/425] chore: pedantic style change (#11832) --- crates/primitives/src/transaction/mod.rs | 35 +++++++++++------------- 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index aeee4232e..da1a2d871 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1,18 +1,15 @@ //! Transaction types. use crate::BlockHashOrNumber; -use alloy_eips::eip7702::SignedAuthorization; -use alloy_primitives::{keccak256, Address, ChainId, TxKind, B256, U256}; - use alloy_consensus::{ - SignableTransaction, Transaction as AlloyTransaction, TxEip1559, TxEip2930, TxEip4844, - TxEip7702, TxLegacy, + SignableTransaction, Transaction as _, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy, }; use alloy_eips::{ eip2718::{Decodable2718, Eip2718Error, Eip2718Result, Encodable2718}, eip2930::AccessList, + eip7702::SignedAuthorization, }; -use alloy_primitives::{Bytes, TxHash}; +use alloy_primitives::{keccak256, Address, Bytes, ChainId, TxHash, TxKind, B256, U256}; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header}; use core::mem; use derive_more::{AsRef, Deref}; @@ -709,7 +706,7 @@ impl Encodable for Transaction { } } -impl AlloyTransaction for Transaction { +impl alloy_consensus::Transaction for Transaction { fn chain_id(&self) -> Option { match self { Self::Legacy(tx) => tx.chain_id(), @@ -782,18 +779,6 @@ impl AlloyTransaction for Transaction { } } - fn priority_fee_or_price(&self) -> u128 { - match self { - Self::Legacy(tx) => tx.priority_fee_or_price(), - Self::Eip2930(tx) => tx.priority_fee_or_price(), - Self::Eip1559(tx) => tx.priority_fee_or_price(), - Self::Eip4844(tx) => tx.priority_fee_or_price(), - Self::Eip7702(tx) => tx.priority_fee_or_price(), - #[cfg(feature = "optimism")] - Self::Deposit(tx) => tx.priority_fee_or_price(), - } - } - fn max_fee_per_blob_gas(&self) -> Option { match self { Self::Legacy(tx) => tx.max_fee_per_blob_gas(), @@ -806,6 +791,18 @@ impl AlloyTransaction for Transaction { } } + fn priority_fee_or_price(&self) -> u128 { + match self { + Self::Legacy(tx) => tx.priority_fee_or_price(), + Self::Eip2930(tx) => tx.priority_fee_or_price(), + Self::Eip1559(tx) => tx.priority_fee_or_price(), + Self::Eip4844(tx) => tx.priority_fee_or_price(), + Self::Eip7702(tx) => tx.priority_fee_or_price(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.priority_fee_or_price(), + } + } + fn to(&self) -> TxKind { match self { Self::Legacy(tx) => tx.to(), From b77265e61b06cb6402ac210a01396ebf57c55418 Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Thu, 17 Oct 2024 18:59:27 +0800 Subject: [PATCH 109/425] reth-bench: rm redundant clone (#11829) --- bin/reth-bench/src/bench/new_payload_fcu.rs | 3 +-- bin/reth-bench/src/bench/new_payload_only.rs | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/bin/reth-bench/src/bench/new_payload_fcu.rs b/bin/reth-bench/src/bench/new_payload_fcu.rs index a8c18b48a..ca5359fb8 100644 --- a/bin/reth-bench/src/bench/new_payload_fcu.rs +++ b/bin/reth-bench/src/bench/new_payload_fcu.rs @@ -37,9 +37,8 @@ pub struct Command { impl Command { /// Execute `benchmark new-payload-fcu` command pub async fn execute(self, _ctx: CliContext) -> eyre::Result<()> { - let cloned_args = self.benchmark.clone(); let BenchContext { benchmark_mode, block_provider, auth_provider, mut next_block } = - BenchContext::new(&cloned_args, self.rpc_url).await?; + BenchContext::new(&self.benchmark, self.rpc_url).await?; let (sender, mut receiver) = tokio::sync::mpsc::channel(1000); tokio::task::spawn(async move { diff --git a/bin/reth-bench/src/bench/new_payload_only.rs b/bin/reth-bench/src/bench/new_payload_only.rs index e6392318a..85342d1af 100644 --- a/bin/reth-bench/src/bench/new_payload_only.rs +++ b/bin/reth-bench/src/bench/new_payload_only.rs @@ -35,11 +35,10 @@ pub struct Command { impl Command { /// Execute `benchmark new-payload-only` command pub async fn execute(self, _ctx: CliContext) -> eyre::Result<()> { - let cloned_args = self.benchmark.clone(); // TODO: this could be just a function I guess, but destructuring makes the code slightly // more readable than a 4 element tuple. let BenchContext { benchmark_mode, block_provider, auth_provider, mut next_block } = - BenchContext::new(&cloned_args, self.rpc_url).await?; + BenchContext::new(&self.benchmark, self.rpc_url).await?; let (sender, mut receiver) = tokio::sync::mpsc::channel(1000); tokio::task::spawn(async move { From 3bc3e7169990b9340e60c394b0e3241728fd841c Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 17 Oct 2024 13:21:40 +0200 Subject: [PATCH 110/425] primitives: use `EMPTY_OMMER_ROOT_HASH` const when possible (#11833) --- crates/consensus/common/src/validation.rs | 4 ++-- crates/net/eth-wire-types/src/header.rs | 11 ++++------- crates/storage/codecs/src/alloy/header.rs | 3 ++- crates/storage/provider/src/test_utils/blocks.rs | 5 ++--- 4 files changed, 10 insertions(+), 13 deletions(-) diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index df66a00d1..711e7772b 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -300,7 +300,7 @@ pub fn validate_against_parent_4844( #[cfg(test)] mod tests { use super::*; - use alloy_consensus::{TxEip4844, EMPTY_ROOT_HASH}; + use alloy_consensus::{TxEip4844, EMPTY_OMMER_ROOT_HASH, EMPTY_ROOT_HASH}; use alloy_primitives::{ hex_literal::hex, Address, BlockHash, BlockNumber, Bytes, Parity, Sealable, U256, }; @@ -441,7 +441,7 @@ mod tests { let header = Header { parent_hash: hex!("859fad46e75d9be177c2584843501f2270c7e5231711e90848290d12d7c6dcdd").into(), - ommers_hash: hex!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347").into(), + ommers_hash: EMPTY_OMMER_ROOT_HASH, beneficiary: hex!("4675c7e5baafbffbca748158becba61ef3b0a263").into(), state_root: hex!("8337403406e368b3e40411138f4868f79f6d835825d55fd0c2f6e17b1a3948e9").into(), transactions_root: EMPTY_ROOT_HASH, diff --git a/crates/net/eth-wire-types/src/header.rs b/crates/net/eth-wire-types/src/header.rs index 7ecfc802d..b25a7568b 100644 --- a/crates/net/eth-wire-types/src/header.rs +++ b/crates/net/eth-wire-types/src/header.rs @@ -87,7 +87,7 @@ impl From for bool { #[cfg(test)] mod tests { use super::*; - use alloy_consensus::EMPTY_ROOT_HASH; + use alloy_consensus::{EMPTY_OMMER_ROOT_HASH, EMPTY_ROOT_HASH}; use alloy_primitives::{address, b256, bloom, bytes, hex, Address, Bytes, B256, U256}; use alloy_rlp::{Decodable, Encodable}; use reth_primitives::Header; @@ -124,7 +124,7 @@ mod tests { .unwrap(); let header = Header { parent_hash: b256!("e0a94a7a3c9617401586b1a27025d2d9671332d22d540e0af72b069170380f2a"), - ommers_hash: b256!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"), + ommers_hash: EMPTY_OMMER_ROOT_HASH, beneficiary: address!("ba5e000000000000000000000000000000000000"), state_root: b256!("ec3c94b18b8a1cff7d60f8d258ec723312932928626b4c9355eb4ab3568ec7f7"), transactions_root: b256!("50f738580ed699f0469702c7ccc63ed2e51bc034be9479b7bff4e68dee84accf"), @@ -228,10 +228,7 @@ mod tests { "3a9b485972e7353edd9152712492f0c58d89ef80623686b6bf947a4a6dce6cb6", ) .unwrap(), - ommers_hash: B256::from_str( - "1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", - ) - .unwrap(), + ommers_hash: EMPTY_OMMER_ROOT_HASH, beneficiary: Address::from_str("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba").unwrap(), state_root: B256::from_str( "3c837fc158e3e93eafcaf2e658a02f5d8f99abc9f1c4c66cdea96c0ca26406ae", @@ -280,7 +277,7 @@ mod tests { "13a7ec98912f917b3e804654e37c9866092043c13eb8eab94eb64818e886cff5", ) .unwrap(), - ommers_hash: b256!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"), + ommers_hash: EMPTY_OMMER_ROOT_HASH, beneficiary: address!("f97e180c050e5ab072211ad2c213eb5aee4df134"), state_root: b256!("ec229dbe85b0d3643ad0f471e6ec1a36bbc87deffbbd970762d22a53b35d068a"), transactions_root: EMPTY_ROOT_HASH, diff --git a/crates/storage/codecs/src/alloy/header.rs b/crates/storage/codecs/src/alloy/header.rs index 3a17ed1fd..623eded9e 100644 --- a/crates/storage/codecs/src/alloy/header.rs +++ b/crates/storage/codecs/src/alloy/header.rs @@ -126,12 +126,13 @@ impl Compact for AlloyHeader { #[cfg(test)] mod tests { use super::*; + use alloy_consensus::EMPTY_OMMER_ROOT_HASH; use alloy_primitives::{address, b256, bloom, bytes, hex}; /// Holesky block #1947953 const HOLESKY_BLOCK: Header = Header { parent_hash: b256!("8605e0c46689f66b3deed82598e43d5002b71a929023b665228728f0c6e62a95"), - ommers_hash: b256!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"), + ommers_hash: EMPTY_OMMER_ROOT_HASH, beneficiary: address!("c6e2459991bfe27cca6d86722f35da23a1e4cb97"), state_root: b256!("edad188ca5647d62f4cca417c11a1afbadebce30d23260767f6f587e9b3b9993"), transactions_root: b256!("4daf25dc08a841aa22aa0d3cb3e1f159d4dcaf6a6063d4d36bfac11d3fdb63ee"), diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index 57e111d67..2a70664b1 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -1,6 +1,6 @@ //! Dummy blocks and data for tests use crate::{DatabaseProviderRW, ExecutionOutcome}; -use alloy_consensus::TxLegacy; +use alloy_consensus::{TxLegacy, EMPTY_OMMER_ROOT_HASH}; use alloy_primitives::{ b256, hex_literal::hex, map::HashMap, Address, BlockNumber, Bytes, Log, Parity, Sealable, TxKind, B256, U256, @@ -66,8 +66,7 @@ pub(crate) static TEST_BLOCK: LazyLock = LazyLock::new(|| SealedBlo Header { parent_hash: hex!("c86e8cc0310ae7c531c758678ddbfd16fc51c8cef8cec650b032de9869e8b94f") .into(), - ommers_hash: hex!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347") - .into(), + ommers_hash: EMPTY_OMMER_ROOT_HASH, beneficiary: hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba").into(), state_root: hex!("50554882fbbda2c2fd93fdc466db9946ea262a67f7a76cc169e714f105ab583d") .into(), From 52407b18de50a07efde4d441d9107ab78f1c19ae Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Thu, 17 Oct 2024 15:14:48 +0200 Subject: [PATCH 111/425] chore(sdk): incorporate block module into `reth-primitives-traits` (#11835) --- crates/primitives-traits/src/block.rs | 99 ---------------------- crates/primitives-traits/src/block/body.rs | 72 ++-------------- crates/primitives-traits/src/block/mod.rs | 47 +++++----- crates/primitives-traits/src/lib.rs | 3 + 4 files changed, 39 insertions(+), 182 deletions(-) delete mode 100644 crates/primitives-traits/src/block.rs diff --git a/crates/primitives-traits/src/block.rs b/crates/primitives-traits/src/block.rs deleted file mode 100644 index 02f581801..000000000 --- a/crates/primitives-traits/src/block.rs +++ /dev/null @@ -1,99 +0,0 @@ -//! Block abstraction. - -pub mod body; - -use alloc::fmt; -use core::ops; - -use alloy_consensus::BlockHeader; -use alloy_primitives::{Address, Sealable, B256}; - -use crate::{traits::BlockBody, BlockWithSenders, SealedBlock, SealedHeader}; - -/// Abstraction of block data type. -pub trait Block: - fmt::Debug - + Clone - + PartialEq - + Eq - + Default - + serde::Serialize - + for<'a> serde::Deserialize<'a> - + From<(Self::Header, Self::Body)> - + Into<(Self::Header, Self::Body)> -{ - /// Header part of the block. - type Header: BlockHeader + Sealable; - - /// The block's body contains the transactions in the block. - type Body: BlockBody; - - /// A block and block hash. - type SealedBlock; - - /// A block and addresses of senders of transactions in it. - type BlockWithSenders; - - /// Returns reference to [`BlockHeader`] type. - fn header(&self) -> &Self::Header; - - /// Returns reference to [`BlockBody`] type. - fn body(&self) -> &Self::Body; - - /// Calculate the header hash and seal the block so that it can't be changed. - // todo: can be default impl if sealed block type is made generic over header and body and - // migrated to alloy - fn seal_slow(self) -> Self::SealedBlock; - - /// Seal the block with a known hash. - /// - /// WARNING: This method does not perform validation whether the hash is correct. - // todo: can be default impl if sealed block type is made generic over header and body and - // migrated to alloy - fn seal(self, hash: B256) -> Self::SealedBlock; - - /// Expensive operation that recovers transaction signer. See - /// [`SealedBlockWithSenders`](reth_primitives::SealedBlockWithSenders). - fn senders(&self) -> Option> { - self.body().recover_signers() - } - - /// Transform into a [`BlockWithSenders`]. - /// - /// # Panics - /// - /// If the number of senders does not match the number of transactions in the block - /// and the signer recovery for one of the transactions fails. - /// - /// Note: this is expected to be called with blocks read from disk. - #[track_caller] - fn with_senders_unchecked(self, senders: Vec
) -> Self::BlockWithSenders { - self.try_with_senders_unchecked(senders).expect("stored block is valid") - } - - /// Transform into a [`BlockWithSenders`] using the given senders. - /// - /// If the number of senders does not match the number of transactions in the block, this falls - /// back to manually recovery, but _without ensuring that the signature has a low `s` value_. - /// See also [`TransactionSigned::recover_signer_unchecked`] - /// - /// Returns an error if a signature is invalid. - // todo: can be default impl if block with senders type is made generic over block and migrated - // to alloy - #[track_caller] - fn try_with_senders_unchecked( - self, - senders: Vec
, - ) -> Result; - - /// **Expensive**. Transform into a [`BlockWithSenders`] by recovering senders in the contained - /// transactions. - /// - /// Returns `None` if a transaction is invalid. - // todo: can be default impl if sealed block type is made generic over header and body and - // migrated to alloy - fn with_recovered_senders(self) -> Option; - - /// Calculates a heuristic for the in-memory size of the [`Block`]. - fn size(&self) -> usize; -} diff --git a/crates/primitives-traits/src/block/body.rs b/crates/primitives-traits/src/block/body.rs index 03246c68b..85eeda166 100644 --- a/crates/primitives-traits/src/block/body.rs +++ b/crates/primitives-traits/src/block/body.rs @@ -1,11 +1,10 @@ //! Block body abstraction. -use alloc::fmt; -use core::ops; +use alloc::{fmt, vec::Vec}; -use alloy_consensus::{BlockHeader,Request, Transaction, TxType}; +use alloy_consensus::{BlockHeader, Request, Transaction, TxType}; +use alloy_eips::eip4895::Withdrawal; use alloy_primitives::{Address, B256}; -use alloy_eips::eip1559::Withdrawal; use crate::Block; @@ -37,7 +36,7 @@ pub trait BlockBody: /// Returns reference to transactions in block. fn transactions(&self) -> &[Self::SignedTransaction]; - /// Returns [`Withdrawals`] in the block, if any. + /// Returns `Withdrawals` in the block, if any. // todo: branch out into extension trait fn withdrawals(&self) -> Option<&Self::Withdrawals>; @@ -60,13 +59,13 @@ pub trait BlockBody: /// Calculate the withdrawals root for the block body, if withdrawals exist. If there are no /// withdrawals, this will return `None`. - // todo: can be default impl if `calculate_withdrawals_root` made into a method on + // todo: can be default impl if `calculate_withdrawals_root` made into a method on // `Withdrawals` and `Withdrawals` moved to alloy fn calculate_withdrawals_root(&self) -> Option; /// Calculate the requests root for the block body, if requests exist. If there are no /// requests, this will return `None`. - // todo: can be default impl if `calculate_requests_root` made into a method on + // todo: can be default impl if `calculate_requests_root` made into a method on // `Requests` and `Requests` moved to alloy fn calculate_requests_root(&self) -> Option; @@ -75,17 +74,17 @@ pub trait BlockBody: /// Returns whether or not the block body contains any blob transactions. fn has_blob_transactions(&self) -> bool { - self.transactions().iter().any(|tx| tx.ty() as u8 == TxType::Eip4844 as u8) + self.transactions().iter().any(|tx| tx.ty() == TxType::Eip4844 as u8) } /// Returns whether or not the block body contains any EIP-7702 transactions. fn has_eip7702_transactions(&self) -> bool { - self.transactions().iter().any(|tx| tx.ty() as u8 == TxType::Eip7702 as u8) + self.transactions().iter().any(|tx| tx.ty() == TxType::Eip7702 as u8) } /// Returns an iterator over all blob transactions of the block fn blob_transactions_iter(&self) -> impl Iterator + '_ { - self.transactions().iter().filter(|tx| tx.ty() as u8 == TxType::Eip4844 as u8) + self.transactions().iter().filter(|tx| tx.ty() == TxType::Eip4844 as u8) } /// Returns only the blob transactions, if any, from the block body. @@ -104,56 +103,3 @@ pub trait BlockBody: /// Calculates a heuristic for the in-memory size of the [`BlockBody`]. fn size(&self) -> usize; } - -impl BlockBody for T -where - T: ops::Deref - + Clone - + fmt::Debug - + PartialEq - + Eq - + Default - + serde::Serialize - + for<'de> serde::Deserialize<'de> - + alloy_rlp::Encodable - + alloy_rlp::Decodable, -{ - type Header = ::Header; - type SignedTransaction = ::SignedTransaction; - - fn transactions(&self) -> &Vec { - self.deref().transactions() - } - - fn withdrawals(&self) -> Option<&Withdrawals> { - self.deref().withdrawals() - } - - fn ommers(&self) -> &Vec { - self.deref().ommers() - } - - fn requests(&self) -> Option<&Requests> { - self.deref().requests() - } - - fn calculate_tx_root(&self) -> B256 { - self.deref().calculate_tx_root() - } - - fn calculate_ommers_root(&self) -> B256 { - self.deref().calculate_ommers_root() - } - - fn recover_signers(&self) -> Option> { - self.deref().recover_signers() - } - - fn blob_versioned_hashes_iter(&self) -> impl Iterator + '_ { - self.deref().blob_versioned_hashes_iter() - } - - fn size(&self) -> usize { - self.deref().size() - } -} diff --git a/crates/primitives-traits/src/block/mod.rs b/crates/primitives-traits/src/block/mod.rs index 02f581801..395cf61df 100644 --- a/crates/primitives-traits/src/block/mod.rs +++ b/crates/primitives-traits/src/block/mod.rs @@ -2,15 +2,22 @@ pub mod body; -use alloc::fmt; -use core::ops; +use alloc::{fmt, vec::Vec}; use alloy_consensus::BlockHeader; use alloy_primitives::{Address, Sealable, B256}; -use crate::{traits::BlockBody, BlockWithSenders, SealedBlock, SealedHeader}; +use crate::BlockBody; + +/// Helper trait, unifies behaviour required of a block header. +pub trait Header: BlockHeader + Sealable {} + +impl Header for T where T: BlockHeader + Sealable {} /// Abstraction of block data type. +// todo: make sealable super-trait, depends on +// todo: make with senders extension trait, so block can be impl by block type already containing +// senders pub trait Block: fmt::Debug + Clone @@ -23,16 +30,16 @@ pub trait Block: + Into<(Self::Header, Self::Body)> { /// Header part of the block. - type Header: BlockHeader + Sealable; + type Header: Header; /// The block's body contains the transactions in the block. type Body: BlockBody; /// A block and block hash. - type SealedBlock; + type SealedBlock; /// A block and addresses of senders of transactions in it. - type BlockWithSenders; + type BlockWithSenders; /// Returns reference to [`BlockHeader`] type. fn header(&self) -> &Self::Header; @@ -41,24 +48,24 @@ pub trait Block: fn body(&self) -> &Self::Body; /// Calculate the header hash and seal the block so that it can't be changed. - // todo: can be default impl if sealed block type is made generic over header and body and + // todo: can be default impl if sealed block type is made generic over header and body and // migrated to alloy - fn seal_slow(self) -> Self::SealedBlock; + fn seal_slow(self) -> Self::SealedBlock; /// Seal the block with a known hash. /// /// WARNING: This method does not perform validation whether the hash is correct. - // todo: can be default impl if sealed block type is made generic over header and body and + // todo: can be default impl if sealed block type is made generic over header and body and // migrated to alloy - fn seal(self, hash: B256) -> Self::SealedBlock; + fn seal(self, hash: B256) -> Self::SealedBlock; /// Expensive operation that recovers transaction signer. See - /// [`SealedBlockWithSenders`](reth_primitives::SealedBlockWithSenders). + /// `SealedBlockWithSenders`. fn senders(&self) -> Option> { self.body().recover_signers() } - /// Transform into a [`BlockWithSenders`]. + /// Transform into a `BlockWithSenders`. /// /// # Panics /// @@ -67,32 +74,32 @@ pub trait Block: /// /// Note: this is expected to be called with blocks read from disk. #[track_caller] - fn with_senders_unchecked(self, senders: Vec
) -> Self::BlockWithSenders { + fn with_senders_unchecked(self, senders: Vec
) -> Self::BlockWithSenders { self.try_with_senders_unchecked(senders).expect("stored block is valid") } - /// Transform into a [`BlockWithSenders`] using the given senders. + /// Transform into a `BlockWithSenders` using the given senders. /// /// If the number of senders does not match the number of transactions in the block, this falls /// back to manually recovery, but _without ensuring that the signature has a low `s` value_. - /// See also [`TransactionSigned::recover_signer_unchecked`] + /// See also `SignedTransaction::recover_signer_unchecked`. /// /// Returns an error if a signature is invalid. - // todo: can be default impl if block with senders type is made generic over block and migrated + // todo: can be default impl if block with senders type is made generic over block and migrated // to alloy #[track_caller] fn try_with_senders_unchecked( self, senders: Vec
, - ) -> Result; + ) -> Result, Self>; - /// **Expensive**. Transform into a [`BlockWithSenders`] by recovering senders in the contained + /// **Expensive**. Transform into a `BlockWithSenders` by recovering senders in the contained /// transactions. /// /// Returns `None` if a transaction is invalid. - // todo: can be default impl if sealed block type is made generic over header and body and + // todo: can be default impl if sealed block type is made generic over header and body and // migrated to alloy - fn with_recovered_senders(self) -> Option; + fn with_recovered_senders(self) -> Option>; /// Calculates a heuristic for the in-memory size of the [`Block`]. fn size(&self) -> usize; diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index dd68607f5..8c54bd68c 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -32,6 +32,9 @@ pub use integer_list::{IntegerList, IntegerListError}; pub mod request; pub use request::{Request, Requests}; +pub mod block; +pub use block::{body::BlockBody, Block}; + mod withdrawal; pub use withdrawal::{Withdrawal, Withdrawals}; From 6ba4bbe4aad49e483b0670903bdb0328d9ca1b57 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 17 Oct 2024 15:54:32 +0200 Subject: [PATCH 112/425] chore: make op-evm compile with no-std (#11834) --- Cargo.lock | 2 +- crates/optimism/evm/Cargo.toml | 4 +++- crates/optimism/evm/src/error.rs | 13 ++++++++----- crates/optimism/evm/src/execute.rs | 3 ++- crates/optimism/evm/src/l1.rs | 2 +- crates/optimism/evm/src/lib.rs | 6 +++++- crates/optimism/evm/src/strategy.rs | 3 ++- 7 files changed, 22 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d31239808..855e3de5e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8108,6 +8108,7 @@ dependencies = [ "alloy-eips", "alloy-genesis", "alloy-primitives", + "derive_more 1.0.0", "op-alloy-consensus", "reth-chainspec", "reth-consensus", @@ -8123,7 +8124,6 @@ dependencies = [ "reth-revm", "revm", "revm-primitives", - "thiserror", "tracing", ] diff --git a/crates/optimism/evm/Cargo.toml b/crates/optimism/evm/Cargo.toml index 0a22dcfdd..a1e2021a4 100644 --- a/crates/optimism/evm/Cargo.toml +++ b/crates/optimism/evm/Cargo.toml @@ -37,7 +37,7 @@ revm.workspace = true revm-primitives.workspace = true # misc -thiserror.workspace = true +derive_more.workspace = true tracing.workspace = true [dev-dependencies] @@ -51,6 +51,8 @@ alloy-genesis.workspace = true alloy-consensus.workspace = true [features] +default = ["std"] +std = [] optimism = [ "reth-primitives/optimism", "reth-execution-types/optimism", diff --git a/crates/optimism/evm/src/error.rs b/crates/optimism/evm/src/error.rs index c5c6a0a4a..71f8709e1 100644 --- a/crates/optimism/evm/src/error.rs +++ b/crates/optimism/evm/src/error.rs @@ -1,27 +1,30 @@ //! Error types for the Optimism EVM module. +use alloc::string::String; use reth_evm::execute::BlockExecutionError; /// Optimism Block Executor Errors -#[derive(thiserror::Error, Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, derive_more::Display)] pub enum OptimismBlockExecutionError { /// Error when trying to parse L1 block info - #[error("could not get L1 block info from L2 block: {message:?}")] + #[display("could not get L1 block info from L2 block: {message}")] L1BlockInfoError { /// The inner error message message: String, }, /// Thrown when force deploy of create2deployer code fails. - #[error("failed to force create2deployer account code")] + #[display("failed to force create2deployer account code")] ForceCreate2DeployerFail, /// Thrown when a blob transaction is included in a sequencer's block. - #[error("blob transaction included in sequencer block")] + #[display("blob transaction included in sequencer block")] BlobTransactionRejected, /// Thrown when a database account could not be loaded. - #[error("failed to load account {0}")] + #[display("failed to load account {_0}")] AccountLoadFailed(alloy_primitives::Address), } +impl core::error::Error for OptimismBlockExecutionError {} + impl From for BlockExecutionError { fn from(err: OptimismBlockExecutionError) -> Self { Self::other(err) diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index f7da1c250..263420623 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -3,8 +3,10 @@ use crate::{ l1::ensure_create2_deployer, OpChainSpec, OptimismBlockExecutionError, OptimismEvmConfig, }; +use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_consensus::Transaction as _; use alloy_primitives::{BlockNumber, U256}; +use core::fmt::Display; use reth_chainspec::{ChainSpec, EthereumHardforks}; use reth_evm::{ execute::{ @@ -27,7 +29,6 @@ use revm_primitives::{ db::{Database, DatabaseCommit}, BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, }; -use std::{fmt::Display, sync::Arc}; use tracing::trace; /// Provides executors to execute regular optimism blocks diff --git a/crates/optimism/evm/src/l1.rs b/crates/optimism/evm/src/l1.rs index 3412501eb..e0668ab02 100644 --- a/crates/optimism/evm/src/l1.rs +++ b/crates/optimism/evm/src/l1.rs @@ -1,6 +1,7 @@ //! Optimism-specific implementation and utilities for the executor use crate::OptimismBlockExecutionError; +use alloc::{string::ToString, sync::Arc}; use alloy_primitives::{address, b256, hex, Address, Bytes, B256, U256}; use reth_chainspec::ChainSpec; use reth_execution_errors::BlockExecutionError; @@ -11,7 +12,6 @@ use revm::{ primitives::{Bytecode, HashMap, SpecId}, DatabaseCommit, L1BlockInfo, }; -use std::sync::Arc; use tracing::trace; /// The address of the create2 deployer diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index 4d0f9d89f..eb067da32 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -6,9 +6,14 @@ issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] // The `optimism` feature must be enabled to use this crate. #![cfg(feature = "optimism")] +#[macro_use] +extern crate alloc; + +use alloc::{sync::Arc, vec::Vec}; use alloy_primitives::{Address, U256}; use reth_evm::{ConfigureEvm, ConfigureEvmEnv, NextBlockEnvAttributes}; use reth_optimism_chainspec::OpChainSpec; @@ -18,7 +23,6 @@ use reth_primitives::{ Head, Header, TransactionSigned, }; use reth_revm::{inspector_handle_register, Database, Evm, EvmBuilder, GetInspector}; -use std::sync::Arc; mod config; pub use config::{revm_spec, revm_spec_by_timestamp_after_bedrock}; diff --git a/crates/optimism/evm/src/strategy.rs b/crates/optimism/evm/src/strategy.rs index fe8164cc7..9ba43604e 100644 --- a/crates/optimism/evm/src/strategy.rs +++ b/crates/optimism/evm/src/strategy.rs @@ -1,7 +1,9 @@ //! Optimism block execution strategy, use crate::{l1::ensure_create2_deployer, OptimismBlockExecutionError, OptimismEvmConfig}; +use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_consensus::Transaction as _; +use core::fmt::Display; use reth_chainspec::EthereumHardforks; use reth_consensus::ConsensusError; use reth_evm::{ @@ -24,7 +26,6 @@ use reth_revm::{ use revm_primitives::{ db::DatabaseCommit, BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, U256, }; -use std::{fmt::Display, sync::Arc}; use tracing::trace; /// Factory for [`OpExecutionStrategy`]. From 2131c87edb8f3c7459a73c19de9034fd1e28a77c Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Thu, 17 Oct 2024 22:40:10 +0800 Subject: [PATCH 113/425] refactor: rm redundant clones in tests (#11840) --- crates/prune/prune/src/segments/receipts.rs | 6 ++++-- crates/prune/prune/src/segments/user/receipts_by_logs.rs | 1 + crates/prune/prune/src/segments/user/sender_recovery.rs | 7 ++++--- crates/prune/prune/src/segments/user/transaction_lookup.rs | 6 ++++-- 4 files changed, 13 insertions(+), 7 deletions(-) diff --git a/crates/prune/prune/src/segments/receipts.rs b/crates/prune/prune/src/segments/receipts.rs index 05482d659..c081bf88c 100644 --- a/crates/prune/prune/src/segments/receipts.rs +++ b/crates/prune/prune/src/segments/receipts.rs @@ -109,12 +109,14 @@ mod tests { let mut receipts = Vec::new(); for block in &blocks { + receipts.reserve_exact(block.body.transactions.len()); for transaction in &block.body.transactions { receipts .push((receipts.len() as u64, random_receipt(&mut rng, transaction, Some(0)))); } } - db.insert_receipts(receipts.clone()).expect("insert receipts"); + let receipts_len = receipts.len(); + db.insert_receipts(receipts).expect("insert receipts"); assert_eq!( db.table::().unwrap().len(), @@ -194,7 +196,7 @@ mod tests { assert_eq!( db.table::().unwrap().len(), - receipts.len() - (last_pruned_tx_number + 1) + receipts_len - (last_pruned_tx_number + 1) ); assert_eq!( db.factory diff --git a/crates/prune/prune/src/segments/user/receipts_by_logs.rs b/crates/prune/prune/src/segments/user/receipts_by_logs.rs index 05bc40b6c..ee2accee1 100644 --- a/crates/prune/prune/src/segments/user/receipts_by_logs.rs +++ b/crates/prune/prune/src/segments/user/receipts_by_logs.rs @@ -263,6 +263,7 @@ mod tests { let (deposit_contract_addr, _) = random_eoa_account(&mut rng); for block in &blocks { + receipts.reserve_exact(block.body.size()); for (txi, transaction) in block.body.transactions.iter().enumerate() { let mut receipt = random_receipt(&mut rng, transaction, Some(1)); receipt.logs.push(random_log( diff --git a/crates/prune/prune/src/segments/user/sender_recovery.rs b/crates/prune/prune/src/segments/user/sender_recovery.rs index bd86f3e65..f189e6c36 100644 --- a/crates/prune/prune/src/segments/user/sender_recovery.rs +++ b/crates/prune/prune/src/segments/user/sender_recovery.rs @@ -110,6 +110,7 @@ mod tests { let mut transaction_senders = Vec::new(); for block in &blocks { + transaction_senders.reserve_exact(block.body.transactions.len()); for transaction in &block.body.transactions { transaction_senders.push(( transaction_senders.len() as u64, @@ -117,8 +118,8 @@ mod tests { )); } } - db.insert_transaction_senders(transaction_senders.clone()) - .expect("insert transaction senders"); + let transaction_senders_len = transaction_senders.len(); + db.insert_transaction_senders(transaction_senders).expect("insert transaction senders"); assert_eq!( db.table::().unwrap().len(), @@ -202,7 +203,7 @@ mod tests { assert_eq!( db.table::().unwrap().len(), - transaction_senders.len() - (last_pruned_tx_number + 1) + transaction_senders_len - (last_pruned_tx_number + 1) ); assert_eq!( db.factory diff --git a/crates/prune/prune/src/segments/user/transaction_lookup.rs b/crates/prune/prune/src/segments/user/transaction_lookup.rs index bb8196cdb..2df8cccf3 100644 --- a/crates/prune/prune/src/segments/user/transaction_lookup.rs +++ b/crates/prune/prune/src/segments/user/transaction_lookup.rs @@ -140,11 +140,13 @@ mod tests { let mut tx_hash_numbers = Vec::new(); for block in &blocks { + tx_hash_numbers.reserve_exact(block.body.transactions.len()); for transaction in &block.body.transactions { tx_hash_numbers.push((transaction.hash, tx_hash_numbers.len() as u64)); } } - db.insert_tx_hash_numbers(tx_hash_numbers.clone()).expect("insert tx hash numbers"); + let tx_hash_numbers_len = tx_hash_numbers.len(); + db.insert_tx_hash_numbers(tx_hash_numbers).expect("insert tx hash numbers"); assert_eq!( db.table::().unwrap().len(), @@ -228,7 +230,7 @@ mod tests { assert_eq!( db.table::().unwrap().len(), - tx_hash_numbers.len() - (last_pruned_tx_number + 1) + tx_hash_numbers_len - (last_pruned_tx_number + 1) ); assert_eq!( db.factory From 9db28d91a47e309960f554479a5fbfa61a56dc4e Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Thu, 17 Oct 2024 18:29:17 +0200 Subject: [PATCH 114/425] chore(sdk): Impl `alloy_consensus::Transaction` for `TransactionSigned` (#11843) --- crates/primitives/src/transaction/mod.rs | 66 ++++++++++++++++++- .../rpc-types-compat/src/transaction/mod.rs | 4 +- crates/rpc/rpc/src/eth/helpers/types.rs | 5 +- crates/rpc/rpc/src/txpool.rs | 2 +- crates/transaction-pool/src/traits.rs | 4 +- 5 files changed, 71 insertions(+), 10 deletions(-) diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index da1a2d871..10fddcb4c 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1351,6 +1351,68 @@ impl TransactionSigned { } } +impl alloy_consensus::Transaction for TransactionSigned { + fn chain_id(&self) -> Option { + self.deref().chain_id() + } + + fn nonce(&self) -> u64 { + self.deref().nonce() + } + + fn gas_limit(&self) -> u64 { + self.deref().gas_limit() + } + + fn gas_price(&self) -> Option { + self.deref().gas_price() + } + + fn max_fee_per_gas(&self) -> u128 { + self.deref().max_fee_per_gas() + } + + fn max_priority_fee_per_gas(&self) -> Option { + self.deref().max_priority_fee_per_gas() + } + + fn max_fee_per_blob_gas(&self) -> Option { + self.deref().max_fee_per_blob_gas() + } + + fn priority_fee_or_price(&self) -> u128 { + self.deref().priority_fee_or_price() + } + + fn to(&self) -> TxKind { + alloy_consensus::Transaction::to(self.deref()) + } + + fn value(&self) -> U256 { + self.deref().value() + } + + fn input(&self) -> &[u8] { + self.deref().input() + } + + fn ty(&self) -> u8 { + self.deref().ty() + } + + fn access_list(&self) -> Option<&AccessList> { + self.deref().access_list() + } + + fn blob_versioned_hashes(&self) -> Option<&[B256]> { + alloy_consensus::Transaction::blob_versioned_hashes(self.deref()) + } + + fn authorization_list(&self) -> Option<&[SignedAuthorization]> { + self.deref().authorization_list() + } +} + impl From for TransactionSigned { fn from(recovered: TransactionSignedEcRecovered) -> Self { recovered.signed_transaction @@ -2181,8 +2243,8 @@ mod tests { let tx = TransactionSigned::decode_2718(&mut data.as_slice()).unwrap(); let sender = tx.recover_signer().unwrap(); assert_eq!(sender, address!("001e2b7dE757bA469a57bF6b23d982458a07eFcE")); - assert_eq!(tx.to(), Some(address!("D9e1459A7A482635700cBc20BBAF52D495Ab9C96"))); - assert_eq!(tx.input().as_ref(), hex!("1b55ba3a")); + assert_eq!(tx.to(), Some(address!("D9e1459A7A482635700cBc20BBAF52D495Ab9C96")).into()); + assert_eq!(tx.input(), hex!("1b55ba3a")); let encoded = tx.encoded_2718(); assert_eq!(encoded.as_ref(), data.to_vec()); } diff --git a/crates/rpc/rpc-types-compat/src/transaction/mod.rs b/crates/rpc/rpc-types-compat/src/transaction/mod.rs index a489a5886..7ffd48cb1 100644 --- a/crates/rpc/rpc-types-compat/src/transaction/mod.rs +++ b/crates/rpc/rpc-types-compat/src/transaction/mod.rs @@ -48,9 +48,7 @@ pub trait TransactionCompat: Send + Sync + Unpin + Clone + fmt::Debug { // baseFee` let gas_price = base_fee .and_then(|base_fee| { - signed_tx - .effective_tip_per_gas(Some(base_fee)) - .map(|tip| tip + base_fee as u128) + signed_tx.effective_tip_per_gas(base_fee).map(|tip| tip + base_fee as u128) }) .unwrap_or_else(|| signed_tx.max_fee_per_gas()); diff --git a/crates/rpc/rpc/src/eth/helpers/types.rs b/crates/rpc/rpc/src/eth/helpers/types.rs index 982afdcac..ab7b1a268 100644 --- a/crates/rpc/rpc/src/eth/helpers/types.rs +++ b/crates/rpc/rpc/src/eth/helpers/types.rs @@ -37,8 +37,9 @@ where let GasPrice { gas_price, max_fee_per_gas } = Self::gas_price(&signed_tx, base_fee.map(|fee| fee as u64)); + let input = signed_tx.input().to_vec().into(); let chain_id = signed_tx.chain_id(); - let blob_versioned_hashes = signed_tx.blob_versioned_hashes(); + let blob_versioned_hashes = signed_tx.blob_versioned_hashes().map(|hs| hs.to_vec()); let access_list = signed_tx.access_list().cloned(); let authorization_list = signed_tx.authorization_list().map(|l| l.to_vec()); @@ -60,7 +61,7 @@ where max_priority_fee_per_gas: signed_tx.max_priority_fee_per_gas(), signature: Some(signature), gas: signed_tx.gas_limit(), - input: signed_tx.input().clone(), + input, chain_id, access_list, transaction_type: Some(signed_tx.tx_type() as u8), diff --git a/crates/rpc/rpc/src/txpool.rs b/crates/rpc/rpc/src/txpool.rs index 47aaac0bb..d40cdc5cd 100644 --- a/crates/rpc/rpc/src/txpool.rs +++ b/crates/rpc/rpc/src/txpool.rs @@ -101,7 +101,7 @@ where entry.insert( tx.nonce().to_string(), TxpoolInspectSummary { - to: tx.to(), + to: tx.to().into(), value: tx.value(), gas: tx.gas_limit() as u128, gas_price: tx.transaction.max_fee_per_gas(), diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index d19381935..ee4a5ada3 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -1183,7 +1183,7 @@ impl PoolTransaction for EthPooledTransaction { /// For EIP-1559 transactions: `min(max_fee_per_gas - base_fee, max_priority_fee_per_gas)`. /// For legacy transactions: `gas_price - base_fee`. fn effective_tip_per_gas(&self, base_fee: u64) -> Option { - self.transaction.effective_tip_per_gas(Some(base_fee)) + self.transaction.effective_tip_per_gas(base_fee) } /// Returns the max priority fee per gas if the transaction is an EIP-1559 transaction, and @@ -1199,7 +1199,7 @@ impl PoolTransaction for EthPooledTransaction { } fn input(&self) -> &[u8] { - self.transaction.input().as_ref() + self.transaction.input() } /// Returns a measurement of the heap usage of this type and all its internals. From 1aa3ce1a5a99d9ffd050d18b8a2fc621c22e80b2 Mon Sep 17 00:00:00 2001 From: Kunal Arora <55632507+aroralanuk@users.noreply.github.com> Date: Thu, 17 Oct 2024 22:02:29 +0530 Subject: [PATCH 115/425] feat(cli): add ChainSpecParser type to rethCli (#11772) --- crates/cli/cli/src/lib.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/cli/cli/src/lib.rs b/crates/cli/cli/src/lib.rs index f7bf716ea..e2c55057a 100644 --- a/crates/cli/cli/src/lib.rs +++ b/crates/cli/cli/src/lib.rs @@ -15,6 +15,7 @@ use std::{borrow::Cow, ffi::OsString}; /// The chainspec module defines the different chainspecs that can be used by the node. pub mod chainspec; +use crate::chainspec::ChainSpecParser; /// Reth based node cli. /// @@ -23,6 +24,9 @@ pub mod chainspec; /// It provides commonly used functionality for running commands and information about the CL, such /// as the name and version. pub trait RethCli: Sized { + /// The associated `ChainSpecParser` type + type ChainSpecParser: ChainSpecParser; + /// The name of the implementation, eg. `reth`, `op-reth`, etc. fn name(&self) -> Cow<'static, str>; From 0a1473b6e7949875f480043fa6874bdc583786d2 Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Fri, 18 Oct 2024 00:40:59 +0800 Subject: [PATCH 116/425] perf(blockchain-tree:) use `Vec::reserve_exact` (#11839) --- crates/blockchain-tree/src/blockchain_tree.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index 71a58aa56..db43dffcd 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -902,6 +902,7 @@ where // check unconnected block buffer for children of the chains let mut all_chain_blocks = Vec::new(); for chain in self.state.chains.values() { + all_chain_blocks.reserve_exact(chain.blocks().len()); for (&number, block) in chain.blocks() { all_chain_blocks.push(BlockNumHash { number, hash: block.hash() }) } From 76edc388237c18a586038924bc39dcbc6c72eaac Mon Sep 17 00:00:00 2001 From: Dan Cline <6798349+Rjected@users.noreply.github.com> Date: Thu, 17 Oct 2024 13:48:05 -0400 Subject: [PATCH 117/425] fix(rpc): apply beacon root contract call in debug_traceTransaction (#11845) --- crates/rpc/rpc/src/debug.rs | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index acf215b3b..164f402e4 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -16,6 +16,7 @@ use jsonrpsee::core::RpcResult; use reth_chainspec::EthereumHardforks; use reth_evm::{ execute::{BlockExecutorProvider, Executor}, + system_calls::SystemCaller, ConfigureEvmEnv, }; use reth_primitives::{Block, BlockId, BlockNumberOrTag, TransactionSignedEcRecovered}; @@ -26,7 +27,7 @@ use reth_provider::{ use reth_revm::database::StateProviderDatabase; use reth_rpc_api::DebugApiServer; use reth_rpc_eth_api::{ - helpers::{Call, EthApiSpec, EthTransactions, TraceExt}, + helpers::{Call, EthApiSpec, EthTransactions, LoadState, TraceExt}, EthApiTypes, FromEthApiError, }; use reth_rpc_eth_types::{EthApiError, StateCacheDb}; @@ -245,6 +246,7 @@ where // block the transaction is included in let state_at: BlockId = block.parent_hash.into(); let block_hash = block.hash(); + let parent_beacon_block_root = block.parent_beacon_block_root; let this = self.clone(); self.eth_api() @@ -255,6 +257,26 @@ where let tx = transaction.into_recovered(); let mut db = CacheDB::new(StateProviderDatabase::new(state)); + + // apply relevant system calls + let mut system_caller = SystemCaller::new( + Call::evm_config(this.eth_api()).clone(), + LoadState::provider(this.eth_api()).chain_spec(), + ); + + system_caller + .pre_block_beacon_root_contract_call( + &mut db, + &cfg, + &block_env, + parent_beacon_block_root, + ) + .map_err(|_| { + EthApiError::EvmCustom( + "failed to apply 4788 beacon root system call".to_string(), + ) + })?; + // replay all transactions prior to the targeted transaction let index = this.eth_api().replay_transactions_until( &mut db, From 52848a352a241cb1d16ef5040ef2c4b16b0cac15 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 17 Oct 2024 20:13:05 +0200 Subject: [PATCH 118/425] fix: check for prague timestmap on pool init (#11847) --- crates/transaction-pool/src/validate/eth.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/transaction-pool/src/validate/eth.rs b/crates/transaction-pool/src/validate/eth.rs index 49165f189..2594e569a 100644 --- a/crates/transaction-pool/src/validate/eth.rs +++ b/crates/transaction-pool/src/validate/eth.rs @@ -639,6 +639,7 @@ impl EthTransactionValidatorBuilder { pub fn with_head_timestamp(mut self, timestamp: u64) -> Self { self.cancun = self.chain_spec.is_cancun_active_at_timestamp(timestamp); self.shanghai = self.chain_spec.is_shanghai_active_at_timestamp(timestamp); + self.prague = self.chain_spec.is_prague_active_at_timestamp(timestamp); self } From a6c8bda029eb32703ae77a8eea4e69aa0ec00a10 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 17 Oct 2024 21:44:04 +0200 Subject: [PATCH 119/425] primitives: use alloy `MAINNET_GENESIS_HASH` constant (#11848) --- Cargo.lock | 5 +++++ crates/chainspec/Cargo.toml | 1 + crates/chainspec/src/spec.rs | 3 ++- crates/ethereum-forks/Cargo.toml | 1 + crates/ethereum-forks/src/forkid.rs | 11 ++++------- crates/ethereum/node/Cargo.toml | 1 + crates/ethereum/node/tests/e2e/blobs.rs | 8 +++----- crates/net/eth-wire-types/src/status.rs | 13 ++++--------- crates/primitives-traits/src/constants/mod.rs | 5 ----- crates/primitives/src/lib.rs | 5 +---- crates/storage/db-common/Cargo.toml | 1 + crates/storage/db-common/src/init.rs | 3 ++- examples/manual-p2p/Cargo.toml | 2 ++ examples/manual-p2p/src/main.rs | 3 ++- 14 files changed, 29 insertions(+), 33 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 855e3de5e..600d61e97 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2857,6 +2857,7 @@ dependencies = [ name = "example-manual-p2p" version = "0.0.0" dependencies = [ + "alloy-consensus", "eyre", "futures", "reth-chainspec", @@ -6528,6 +6529,7 @@ name = "reth-chainspec" version = "1.1.0" dependencies = [ "alloy-chains", + "alloy-consensus", "alloy-eips", "alloy-genesis", "alloy-primitives", @@ -6813,6 +6815,7 @@ dependencies = [ name = "reth-db-common" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-genesis", "alloy-primitives", "boyer-moore-magiclen", @@ -7303,6 +7306,7 @@ name = "reth-ethereum-forks" version = "1.1.0" dependencies = [ "alloy-chains", + "alloy-consensus", "alloy-primitives", "alloy-rlp", "arbitrary", @@ -7931,6 +7935,7 @@ dependencies = [ name = "reth-node-ethereum" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-genesis", "alloy-primitives", "eyre", diff --git a/crates/chainspec/Cargo.toml b/crates/chainspec/Cargo.toml index b44a606b6..2864427c2 100644 --- a/crates/chainspec/Cargo.toml +++ b/crates/chainspec/Cargo.toml @@ -23,6 +23,7 @@ alloy-eips = { workspace = true, features = ["serde"] } alloy-genesis.workspace = true alloy-primitives = { workspace = true, features = ["rand", "rlp"] } alloy-trie.workspace = true +alloy-consensus.workspace = true # misc auto_impl.workspace = true diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index a8bae966b..deaca188a 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -7,6 +7,7 @@ use alloy_primitives::{address, b256, Address, BlockNumber, B256, U256}; use alloy_trie::EMPTY_ROOT_HASH; use derive_more::From; +use alloy_consensus::constants::MAINNET_GENESIS_HASH; use reth_ethereum_forks::{ ChainHardforks, DisplayHardforks, EthereumHardfork, EthereumHardforks, ForkCondition, ForkFilter, ForkFilterKey, ForkHash, ForkId, Hardfork, Hardforks, Head, DEV_HARDFORKS, @@ -18,7 +19,7 @@ use reth_network_peers::{ use reth_primitives_traits::{ constants::{ DEV_GENESIS_HASH, EIP1559_INITIAL_BASE_FEE, EMPTY_WITHDRAWALS, ETHEREUM_BLOCK_GAS_LIMIT, - HOLESKY_GENESIS_HASH, MAINNET_GENESIS_HASH, SEPOLIA_GENESIS_HASH, + HOLESKY_GENESIS_HASH, SEPOLIA_GENESIS_HASH, }, Header, SealedHeader, }; diff --git a/crates/ethereum-forks/Cargo.toml b/crates/ethereum-forks/Cargo.toml index 08a0bc98d..62ea234cd 100644 --- a/crates/ethereum-forks/Cargo.toml +++ b/crates/ethereum-forks/Cargo.toml @@ -36,6 +36,7 @@ auto_impl.workspace = true [dev-dependencies] arbitrary = { workspace = true, features = ["derive"] } proptest.workspace = true +alloy-consensus.workspace = true [features] default = ["std", "serde", "rustc-hash"] diff --git a/crates/ethereum-forks/src/forkid.rs b/crates/ethereum-forks/src/forkid.rs index 6876d0eb9..b612f3b0b 100644 --- a/crates/ethereum-forks/src/forkid.rs +++ b/crates/ethereum-forks/src/forkid.rs @@ -446,15 +446,12 @@ impl Cache { #[cfg(test)] mod tests { use super::*; - use alloy_primitives::b256; - - const GENESIS_HASH: B256 = - b256!("d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3"); + use alloy_consensus::constants::MAINNET_GENESIS_HASH; // EIP test vectors. #[test] fn forkhash() { - let mut fork_hash = ForkHash::from(GENESIS_HASH); + let mut fork_hash = ForkHash::from(MAINNET_GENESIS_HASH); assert_eq!(fork_hash.0, hex!("fc64ec04")); fork_hash += 1_150_000u64; @@ -468,7 +465,7 @@ mod tests { fn compatibility_check() { let mut filter = ForkFilter::new( Head { number: 0, ..Default::default() }, - GENESIS_HASH, + MAINNET_GENESIS_HASH, 0, vec![ ForkFilterKey::Block(1_150_000), @@ -727,7 +724,7 @@ mod tests { let mut fork_filter = ForkFilter::new( Head { number: 0, ..Default::default() }, - GENESIS_HASH, + MAINNET_GENESIS_HASH, 0, vec![ForkFilterKey::Block(b1), ForkFilterKey::Block(b2)], ); diff --git a/crates/ethereum/node/Cargo.toml b/crates/ethereum/node/Cargo.toml index 7a323f91d..213071cbf 100644 --- a/crates/ethereum/node/Cargo.toml +++ b/crates/ethereum/node/Cargo.toml @@ -52,6 +52,7 @@ alloy-genesis.workspace = true tokio.workspace = true futures-util.workspace = true serde_json.workspace = true +alloy-consensus.workspace = true [features] default = [] diff --git a/crates/ethereum/node/tests/e2e/blobs.rs b/crates/ethereum/node/tests/e2e/blobs.rs index 9390b34f4..b4d9a532a 100644 --- a/crates/ethereum/node/tests/e2e/blobs.rs +++ b/crates/ethereum/node/tests/e2e/blobs.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use alloy_consensus::constants::MAINNET_GENESIS_HASH; use alloy_genesis::Genesis; -use alloy_primitives::b256; use reth::{ args::RpcServerArgs, builder::{NodeBuilder, NodeConfig, NodeHandle}, @@ -75,13 +75,11 @@ async fn can_handle_blobs() -> eyre::Result<()> { .submit_payload(blob_payload, blob_attr, PayloadStatusEnum::Valid, versioned_hashes.clone()) .await?; - let genesis_hash = b256!("d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3"); - let (_, _) = tokio::join!( // send fcu with blob hash - node.engine_api.update_forkchoice(genesis_hash, blob_block_hash), + node.engine_api.update_forkchoice(MAINNET_GENESIS_HASH, blob_block_hash), // send fcu with normal hash - node.engine_api.update_forkchoice(genesis_hash, payload.block().hash()) + node.engine_api.update_forkchoice(MAINNET_GENESIS_HASH, payload.block().hash()) ); // submit normal payload diff --git a/crates/net/eth-wire-types/src/status.rs b/crates/net/eth-wire-types/src/status.rs index a5e7530ec..90e1731c9 100644 --- a/crates/net/eth-wire-types/src/status.rs +++ b/crates/net/eth-wire-types/src/status.rs @@ -138,10 +138,10 @@ impl Default for Status { /// /// # Example /// ``` +/// use alloy_consensus::constants::MAINNET_GENESIS_HASH; /// use alloy_primitives::{B256, U256}; /// use reth_chainspec::{Chain, EthereumHardfork, MAINNET}; /// use reth_eth_wire_types::{EthVersion, Status}; -/// use reth_primitives::MAINNET_GENESIS_HASH; /// /// // this is just an example status message! /// let status = Status::builder() @@ -216,6 +216,7 @@ impl StatusBuilder { #[cfg(test)] mod tests { use crate::{EthVersion, Status}; + use alloy_consensus::constants::MAINNET_GENESIS_HASH; use alloy_genesis::Genesis; use alloy_primitives::{hex, B256, U256}; use alloy_rlp::{Decodable, Encodable}; @@ -235,10 +236,7 @@ mod tests { "feb27336ca7923f8fab3bd617fcb6e75841538f71c1bcfc267d7838489d9e13d", ) .unwrap(), - genesis: B256::from_str( - "d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3", - ) - .unwrap(), + genesis: MAINNET_GENESIS_HASH, forkid: ForkId { hash: ForkHash([0xb7, 0x15, 0x07, 0x7d]), next: 0 }, }; @@ -258,10 +256,7 @@ mod tests { "feb27336ca7923f8fab3bd617fcb6e75841538f71c1bcfc267d7838489d9e13d", ) .unwrap(), - genesis: B256::from_str( - "d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3", - ) - .unwrap(), + genesis: MAINNET_GENESIS_HASH, forkid: ForkId { hash: ForkHash([0xb7, 0x15, 0x07, 0x7d]), next: 0 }, }; let status = Status::decode(&mut &data[..]).unwrap(); diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index 5d64b911b..890287f8b 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -98,11 +98,6 @@ pub const FINNEY_TO_WEI: u128 = (GWEI_TO_WEI as u128) * 1_000_000; /// Multiplier for converting ether to wei. pub const ETH_TO_WEI: u128 = FINNEY_TO_WEI * 1000; -/// The Ethereum mainnet genesis hash: -/// `0x0d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3` -pub const MAINNET_GENESIS_HASH: B256 = - b256!("d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3"); - /// Sepolia genesis hash: `0x25a5cc106eea7138acab33231d7160d69cb777ee0c2c553fcddf5138993e6dd9` pub const SEPOLIA_GENESIS_HASH: B256 = b256!("25a5cc106eea7138acab33231d7160d69cb777ee0c2c553fcddf5138993e6dd9"); diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index a59e72bbd..4a8d812ef 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -39,10 +39,7 @@ pub use block::{ }; #[cfg(feature = "reth-codec")] pub use compression::*; -pub use constants::{ - DEV_GENESIS_HASH, HOLESKY_GENESIS_HASH, KECCAK_EMPTY, MAINNET_GENESIS_HASH, - SEPOLIA_GENESIS_HASH, -}; +pub use constants::{DEV_GENESIS_HASH, HOLESKY_GENESIS_HASH, KECCAK_EMPTY, SEPOLIA_GENESIS_HASH}; pub use receipt::{ gas_spent_by_transactions, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, Receipts, }; diff --git a/crates/storage/db-common/Cargo.toml b/crates/storage/db-common/Cargo.toml index 7fc487969..9e4954357 100644 --- a/crates/storage/db-common/Cargo.toml +++ b/crates/storage/db-common/Cargo.toml @@ -42,6 +42,7 @@ tracing.workspace = true [dev-dependencies] reth-primitives-traits.workspace = true reth-provider = { workspace = true, features = ["test-utils"] } +alloy-consensus.workspace = true [lints] workspace = true diff --git a/crates/storage/db-common/src/init.rs b/crates/storage/db-common/src/init.rs index 3962dfd69..f0695421e 100644 --- a/crates/storage/db-common/src/init.rs +++ b/crates/storage/db-common/src/init.rs @@ -581,6 +581,7 @@ struct GenesisAccountWithAddress { #[cfg(test)] mod tests { use super::*; + use alloy_consensus::constants::MAINNET_GENESIS_HASH; use alloy_genesis::Genesis; use reth_chainspec::{Chain, ChainSpec, HOLESKY, MAINNET, SEPOLIA}; use reth_db::DatabaseEnv; @@ -591,7 +592,7 @@ mod tests { transaction::DbTx, Database, }; - use reth_primitives::{HOLESKY_GENESIS_HASH, MAINNET_GENESIS_HASH, SEPOLIA_GENESIS_HASH}; + use reth_primitives::{HOLESKY_GENESIS_HASH, SEPOLIA_GENESIS_HASH}; use reth_primitives_traits::IntegerList; use reth_provider::{ test_utils::{create_test_provider_factory_with_chain_spec, MockNodeTypesWithDB}, diff --git a/examples/manual-p2p/Cargo.toml b/examples/manual-p2p/Cargo.toml index 2303bfbfe..b1642f66c 100644 --- a/examples/manual-p2p/Cargo.toml +++ b/examples/manual-p2p/Cargo.toml @@ -14,6 +14,8 @@ reth-eth-wire.workspace = true reth-ecies.workspace = true reth-network-peers.workspace = true +alloy-consensus.workspace = true + secp256k1 = { workspace = true, features = ["global-context", "rand-std", "recovery"] } futures.workspace = true diff --git a/examples/manual-p2p/src/main.rs b/examples/manual-p2p/src/main.rs index edaaf8584..857a8a1c1 100644 --- a/examples/manual-p2p/src/main.rs +++ b/examples/manual-p2p/src/main.rs @@ -8,6 +8,7 @@ use std::time::Duration; +use alloy_consensus::constants::MAINNET_GENESIS_HASH; use futures::StreamExt; use reth_chainspec::{Chain, MAINNET}; use reth_discv4::{DiscoveryUpdate, Discv4, Discv4ConfigBuilder, DEFAULT_DISCOVERY_ADDRESS}; @@ -17,7 +18,7 @@ use reth_eth_wire::{ }; use reth_network::config::rng_secret_key; use reth_network_peers::{mainnet_nodes, pk2id, NodeRecord}; -use reth_primitives::{EthereumHardfork, Head, MAINNET_GENESIS_HASH}; +use reth_primitives::{EthereumHardfork, Head}; use secp256k1::{SecretKey, SECP256K1}; use std::sync::LazyLock; use tokio::net::TcpStream; From 8eb5d4f04782ae6d22bc242fc0ec7db63115b315 Mon Sep 17 00:00:00 2001 From: "0xriazaka.eth" <168359025+0xriazaka@users.noreply.github.com> Date: Thu, 17 Oct 2024 21:28:13 +0100 Subject: [PATCH 120/425] Remove unsafe from impl Compact for ClientVersion (#11318) Co-authored-by: Emilia Hane Co-authored-by: DaniPopes <57450786+DaniPopes@users.noreply.github.com> --- crates/storage/codecs/src/lib.rs | 15 +++++++++++++++ crates/storage/db-models/src/client_version.rs | 18 +++++++----------- 2 files changed, 22 insertions(+), 11 deletions(-) diff --git a/crates/storage/codecs/src/lib.rs b/crates/storage/codecs/src/lib.rs index 8608c5eb8..54ca046cb 100644 --- a/crates/storage/codecs/src/lib.rs +++ b/crates/storage/codecs/src/lib.rs @@ -78,6 +78,21 @@ pub trait Compact: Sized { } } +impl Compact for alloc::string::String { + fn to_compact(&self, buf: &mut B) -> usize + where + B: bytes::BufMut + AsMut<[u8]>, + { + self.as_bytes().to_compact(buf) + } + + fn from_compact(buf: &[u8], len: usize) -> (Self, &[u8]) { + let (vec, buf) = Vec::::from_compact(buf, len); + let string = Self::from_utf8(vec).unwrap(); // Safe conversion + (string, buf) + } +} + impl Compact for &T { fn to_compact(&self, buf: &mut B) -> usize where diff --git a/crates/storage/db-models/src/client_version.rs b/crates/storage/db-models/src/client_version.rs index de074ac88..a28e7385f 100644 --- a/crates/storage/db-models/src/client_version.rs +++ b/crates/storage/db-models/src/client_version.rs @@ -28,20 +28,16 @@ impl Compact for ClientVersion { where B: bytes::BufMut + AsMut<[u8]>, { - self.version.as_bytes().to_compact(buf); - self.git_sha.as_bytes().to_compact(buf); - self.build_timestamp.as_bytes().to_compact(buf) + self.version.to_compact(buf); + self.git_sha.to_compact(buf); + self.build_timestamp.to_compact(buf) } fn from_compact(buf: &[u8], len: usize) -> (Self, &[u8]) { - let (version, buf) = Vec::::from_compact(buf, len); - let (git_sha, buf) = Vec::::from_compact(buf, len); - let (build_timestamp, buf) = Vec::::from_compact(buf, len); - let client_version = Self { - version: unsafe { String::from_utf8_unchecked(version) }, - git_sha: unsafe { String::from_utf8_unchecked(git_sha) }, - build_timestamp: unsafe { String::from_utf8_unchecked(build_timestamp) }, - }; + let (version, buf) = String::from_compact(buf, len); + let (git_sha, buf) = String::from_compact(buf, len); + let (build_timestamp, buf) = String::from_compact(buf, len); + let client_version = Self { version, git_sha, build_timestamp }; (client_version, buf) } } From b57cbfd21ba4a39d31c64cf4c7bbc5449a86afb5 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 17 Oct 2024 22:29:31 +0200 Subject: [PATCH 121/425] primitives: use alloy `DEV_GENESIS_HASH` constant (#11849) --- Cargo.lock | 1 + crates/chainspec/src/spec.rs | 4 ++-- crates/optimism/chainspec/Cargo.toml | 1 + crates/optimism/chainspec/src/dev.rs | 2 +- crates/primitives-traits/src/constants/mod.rs | 4 ---- crates/primitives/src/lib.rs | 2 +- 6 files changed, 6 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 600d61e97..1d9f74a20 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8032,6 +8032,7 @@ name = "reth-optimism-chainspec" version = "1.1.0" dependencies = [ "alloy-chains", + "alloy-consensus", "alloy-genesis", "alloy-primitives", "derive_more 1.0.0", diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index deaca188a..adfee564d 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -7,7 +7,7 @@ use alloy_primitives::{address, b256, Address, BlockNumber, B256, U256}; use alloy_trie::EMPTY_ROOT_HASH; use derive_more::From; -use alloy_consensus::constants::MAINNET_GENESIS_HASH; +use alloy_consensus::constants::{DEV_GENESIS_HASH, MAINNET_GENESIS_HASH}; use reth_ethereum_forks::{ ChainHardforks, DisplayHardforks, EthereumHardfork, EthereumHardforks, ForkCondition, ForkFilter, ForkFilterKey, ForkHash, ForkId, Hardfork, Hardforks, Head, DEV_HARDFORKS, @@ -18,7 +18,7 @@ use reth_network_peers::{ }; use reth_primitives_traits::{ constants::{ - DEV_GENESIS_HASH, EIP1559_INITIAL_BASE_FEE, EMPTY_WITHDRAWALS, ETHEREUM_BLOCK_GAS_LIMIT, + EIP1559_INITIAL_BASE_FEE, EMPTY_WITHDRAWALS, ETHEREUM_BLOCK_GAS_LIMIT, HOLESKY_GENESIS_HASH, SEPOLIA_GENESIS_HASH, }, Header, SealedHeader, diff --git a/crates/optimism/chainspec/Cargo.toml b/crates/optimism/chainspec/Cargo.toml index c9f951c8d..efc9bf0b0 100644 --- a/crates/optimism/chainspec/Cargo.toml +++ b/crates/optimism/chainspec/Cargo.toml @@ -25,6 +25,7 @@ reth-optimism-forks.workspace = true alloy-chains.workspace = true alloy-genesis.workspace = true alloy-primitives.workspace = true +alloy-consensus.workspace = true # op op-alloy-rpc-types.workspace = true diff --git a/crates/optimism/chainspec/src/dev.rs b/crates/optimism/chainspec/src/dev.rs index cb8163dfc..eae25f73e 100644 --- a/crates/optimism/chainspec/src/dev.rs +++ b/crates/optimism/chainspec/src/dev.rs @@ -3,10 +3,10 @@ use alloc::sync::Arc; use alloy_chains::Chain; +use alloy_consensus::constants::DEV_GENESIS_HASH; use alloy_primitives::U256; use reth_chainspec::{once_cell_set, BaseFeeParams, BaseFeeParamsKind, ChainSpec}; use reth_optimism_forks::DEV_HARDFORKS; -use reth_primitives_traits::constants::DEV_GENESIS_HASH; use crate::{LazyLock, OpChainSpec}; diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index 890287f8b..eade39989 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -106,10 +106,6 @@ pub const SEPOLIA_GENESIS_HASH: B256 = pub const HOLESKY_GENESIS_HASH: B256 = b256!("b5f7f912443c940f21fd611f12828d75b534364ed9e95ca4e307729a4661bde4"); -/// Testnet genesis hash: `0x2f980576711e3617a5e4d83dd539548ec0f7792007d505a3d2e9674833af2d7c` -pub const DEV_GENESIS_HASH: B256 = - b256!("2f980576711e3617a5e4d83dd539548ec0f7792007d505a3d2e9674833af2d7c"); - /// Keccak256 over empty array: `0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470` pub const KECCAK_EMPTY: B256 = b256!("c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"); diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 4a8d812ef..a39139349 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -39,7 +39,7 @@ pub use block::{ }; #[cfg(feature = "reth-codec")] pub use compression::*; -pub use constants::{DEV_GENESIS_HASH, HOLESKY_GENESIS_HASH, KECCAK_EMPTY, SEPOLIA_GENESIS_HASH}; +pub use constants::{HOLESKY_GENESIS_HASH, KECCAK_EMPTY, SEPOLIA_GENESIS_HASH}; pub use receipt::{ gas_spent_by_transactions, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, Receipts, }; From bc43613be35f316304f7ce4a2225855ac26b5923 Mon Sep 17 00:00:00 2001 From: DaniPopes <57450786+DaniPopes@users.noreply.github.com> Date: Thu, 17 Oct 2024 22:33:37 +0200 Subject: [PATCH 122/425] chore: disable SC2034 in check_wasm.sh (#11854) --- .github/assets/check_wasm.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/assets/check_wasm.sh b/.github/assets/check_wasm.sh index 1b1c0641f..52d700941 100755 --- a/.github/assets/check_wasm.sh +++ b/.github/assets/check_wasm.sh @@ -3,7 +3,10 @@ set +e # Disable immediate exit on error # Array of crates to compile crates=($(cargo metadata --format-version=1 --no-deps | jq -r '.packages[].name' | grep '^reth' | sort)) + # Array of crates to exclude +# Used with the `contains` function. +# shellcheck disable=SC2034 exclude_crates=( # The following are not working yet, but known to be fixable reth-exex-types # https://github.com/paradigmxyz/reth/issues/9946 From 96ad6d5bd525c821ea95122290c76cf133f5d36d Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 18 Oct 2024 00:40:15 +0200 Subject: [PATCH 123/425] chore: rm unused reth-revm c-kzg feature (#11860) --- crates/revm/Cargo.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/revm/Cargo.toml b/crates/revm/Cargo.toml index 7ffb06ce9..b2d3bccde 100644 --- a/crates/revm/Cargo.toml +++ b/crates/revm/Cargo.toml @@ -32,8 +32,7 @@ reth-ethereum-forks.workspace = true alloy-primitives.workspace = true [features] -default = ["std", "c-kzg"] +default = ["std"] std = [] -c-kzg = ["revm/c-kzg"] test-utils = ["dep:reth-trie"] serde = ["revm/serde"] From f3c0dda0d34960b6c28e27b41bdc2c86b712c09e Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 18 Oct 2024 00:40:33 +0200 Subject: [PATCH 124/425] perf: use existing block hash functions (#11858) --- crates/storage/storage-api/src/block_id.rs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/crates/storage/storage-api/src/block_id.rs b/crates/storage/storage-api/src/block_id.rs index 3d9df2e32..55cd6ab1c 100644 --- a/crates/storage/storage-api/src/block_id.rs +++ b/crates/storage/storage-api/src/block_id.rs @@ -82,11 +82,10 @@ pub trait BlockIdReader: BlockNumReader + Send + Sync { BlockNumberOrTag::Pending => self .pending_block_num_hash() .map(|res_opt| res_opt.map(|num_hash| num_hash.hash)), - _ => self - .convert_block_number(num)? - .map(|num| self.block_hash(num)) - .transpose() - .map(|maybe_hash| maybe_hash.flatten()), + BlockNumberOrTag::Finalized => self.finalized_block_hash(), + BlockNumberOrTag::Safe => self.safe_block_hash(), + BlockNumberOrTag::Earliest => self.block_hash(0), + BlockNumberOrTag::Number(num) => self.block_hash(num), }, } } From 62e7625b16549530beafef1e0c6a6d9ed2bde649 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Fri, 18 Oct 2024 00:41:59 +0200 Subject: [PATCH 125/425] primitives: use alloy `*_TX_TYPE_ID` constants (#11853) --- crates/primitives/src/lib.rs | 2 -- crates/primitives/src/receipt.rs | 8 ++++---- crates/primitives/src/transaction/mod.rs | 7 +++---- crates/primitives/src/transaction/pooled.rs | 3 ++- crates/primitives/src/transaction/sidecar.rs | 6 ++++-- crates/primitives/src/transaction/tx_type.rs | 20 ++++--------------- crates/transaction-pool/src/config.rs | 6 ++---- crates/transaction-pool/src/pool/txpool.rs | 10 +++++----- .../transaction-pool/src/test_utils/mock.rs | 6 ++++-- crates/transaction-pool/src/traits.rs | 6 ++++-- crates/transaction-pool/src/validate/eth.rs | 6 ++++-- 11 files changed, 36 insertions(+), 44 deletions(-) diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index a39139349..796090d79 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -61,8 +61,6 @@ pub use transaction::{ util::secp256k1::{public_key_to_address, recover_signer_unchecked, sign_message}, InvalidTransactionError, Signature, Transaction, TransactionMeta, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, TxHashOrNumber, TxType, - EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, - LEGACY_TX_TYPE_ID, }; // Re-exports diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index cfd831ed0..9006a067e 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -1,10 +1,10 @@ #[cfg(feature = "reth-codec")] use crate::compression::{RECEIPT_COMPRESSOR, RECEIPT_DECOMPRESSOR}; -use crate::{ - logs_bloom, TxType, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, - EIP7702_TX_TYPE_ID, -}; +use crate::{logs_bloom, TxType}; use alloc::{vec, vec::Vec}; +use alloy_consensus::constants::{ + EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, +}; use alloy_primitives::{Bloom, Bytes, Log, B256}; use alloy_rlp::{length_of_length, Decodable, Encodable, RlpDecodable, RlpEncodable}; use bytes::{Buf, BufMut}; diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 10fddcb4c..b7eeeadc8 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1,6 +1,8 @@ //! Transaction types. use crate::BlockHashOrNumber; +#[cfg(any(test, feature = "reth-codec"))] +use alloy_consensus::constants::{EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID}; use alloy_consensus::{ SignableTransaction, Transaction as _, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy, }; @@ -37,10 +39,7 @@ pub use compat::FillTxEnv; pub use signature::{ extract_chain_id, legacy_parity, recover_signer, recover_signer_unchecked, Signature, }; -pub use tx_type::{ - TxType, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, - LEGACY_TX_TYPE_ID, -}; +pub use tx_type::TxType; pub use variant::TransactionSignedVariant; pub(crate) mod access_list; diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index ec49f44a6..32d4da659 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -8,9 +8,10 @@ use super::{ }; use crate::{ BlobTransaction, BlobTransactionSidecar, Signature, Transaction, TransactionSigned, - TransactionSignedEcRecovered, EIP4844_TX_TYPE_ID, + TransactionSignedEcRecovered, }; use alloy_consensus::{ + constants::EIP4844_TX_TYPE_ID, transaction::{TxEip1559, TxEip2930, TxEip4844, TxLegacy}, SignableTransaction, TxEip4844WithSidecar, }; diff --git a/crates/primitives/src/transaction/sidecar.rs b/crates/primitives/src/transaction/sidecar.rs index 87b8c1fbf..edc1427d1 100644 --- a/crates/primitives/src/transaction/sidecar.rs +++ b/crates/primitives/src/transaction/sidecar.rs @@ -1,7 +1,9 @@ #![cfg_attr(docsrs, doc(cfg(feature = "c-kzg")))] -use crate::{Signature, Transaction, TransactionSigned, EIP4844_TX_TYPE_ID}; -use alloy_consensus::{transaction::TxEip4844, TxEip4844WithSidecar}; +use crate::{Signature, Transaction, TransactionSigned}; +use alloy_consensus::{ + constants::EIP4844_TX_TYPE_ID, transaction::TxEip4844, TxEip4844WithSidecar, +}; use alloy_primitives::{keccak256, TxHash}; use alloy_rlp::{Decodable, Error as RlpError, Header}; use serde::{Deserialize, Serialize}; diff --git a/crates/primitives/src/transaction/tx_type.rs b/crates/primitives/src/transaction/tx_type.rs index c55e0d3c6..b6e1ecbf2 100644 --- a/crates/primitives/src/transaction/tx_type.rs +++ b/crates/primitives/src/transaction/tx_type.rs @@ -1,3 +1,7 @@ +use alloy_consensus::constants::{ + EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, + LEGACY_TX_TYPE_ID, +}; use alloy_primitives::{U64, U8}; use alloy_rlp::{Decodable, Encodable}; use serde::{Deserialize, Serialize}; @@ -23,22 +27,6 @@ pub(crate) const COMPACT_IDENTIFIER_EIP1559: usize = 2; #[cfg(any(test, feature = "reth-codec"))] pub(crate) const COMPACT_EXTENDED_IDENTIFIER_FLAG: usize = 3; -/// Identifier for legacy transaction, however [`TxLegacy`](alloy_consensus::TxLegacy) this is -/// technically not typed. -pub const LEGACY_TX_TYPE_ID: u8 = 0; - -/// Identifier for [`TxEip2930`](alloy_consensus::TxEip2930) transaction. -pub const EIP2930_TX_TYPE_ID: u8 = 1; - -/// Identifier for [`TxEip1559`](alloy_consensus::TxEip1559) transaction. -pub const EIP1559_TX_TYPE_ID: u8 = 2; - -/// Identifier for [`TxEip4844`](alloy_consensus::TxEip4844) transaction. -pub const EIP4844_TX_TYPE_ID: u8 = 3; - -/// Identifier for [`TxEip7702`](alloy_consensus::TxEip7702) transaction. -pub const EIP7702_TX_TYPE_ID: u8 = 4; - /// Identifier for [`TxDeposit`](op_alloy_consensus::TxDeposit) transaction. #[cfg(feature = "optimism")] pub const DEPOSIT_TX_TYPE_ID: u8 = 126; diff --git a/crates/transaction-pool/src/config.rs b/crates/transaction-pool/src/config.rs index 1b4b010a8..30703f888 100644 --- a/crates/transaction-pool/src/config.rs +++ b/crates/transaction-pool/src/config.rs @@ -2,11 +2,9 @@ use crate::{ pool::{NEW_TX_LISTENER_BUFFER_SIZE, PENDING_TX_LISTENER_BUFFER_SIZE}, PoolSize, TransactionOrigin, }; +use alloy_consensus::constants::EIP4844_TX_TYPE_ID; use alloy_primitives::Address; -use reth_primitives::{ - constants::{ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE}, - EIP4844_TX_TYPE_ID, -}; +use reth_primitives::constants::{ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE}; use std::{collections::HashSet, ops::Mul}; /// Guarantees max transactions for one sender, compatible with geth/erigon diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index 9d284392d..a85a9a185 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -18,14 +18,14 @@ use crate::{ PoolConfig, PoolResult, PoolTransaction, PriceBumpConfig, TransactionOrdering, ValidPoolTransaction, U256, }; -use alloy_primitives::{Address, TxHash, B256}; -use reth_primitives::{ - constants::{ - eip4844::BLOB_TX_MIN_BLOB_GASPRICE, ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE, - }, +use alloy_consensus::constants::{ EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, LEGACY_TX_TYPE_ID, }; +use alloy_primitives::{Address, TxHash, B256}; +use reth_primitives::constants::{ + eip4844::BLOB_TX_MIN_BLOB_GASPRICE, ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE, +}; use rustc_hash::FxHashMap; use smallvec::SmallVec; use std::{ diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index e2b5f373e..474cf5cc8 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -7,7 +7,10 @@ use crate::{ CoinbaseTipOrdering, EthBlobTransactionSidecar, EthPoolTransaction, PoolTransaction, ValidPoolTransaction, }; -use alloy_consensus::{TxEip1559, TxEip2930, TxEip4844, TxLegacy}; +use alloy_consensus::{ + constants::{EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, LEGACY_TX_TYPE_ID}, + TxEip1559, TxEip2930, TxEip4844, TxLegacy, +}; use alloy_eips::eip2930::AccessList; use alloy_primitives::{Address, Bytes, ChainId, TxHash, TxKind, B256, U256}; use paste::paste; @@ -20,7 +23,6 @@ use reth_primitives::{ transaction::TryFromRecoveredTransactionError, BlobTransactionSidecar, BlobTransactionValidationError, PooledTransactionsElementEcRecovered, Signature, Transaction, TransactionSigned, TransactionSignedEcRecovered, TxType, - EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, LEGACY_TX_TYPE_ID, }; use std::{ops::Range, sync::Arc, time::Instant, vec::IntoIter}; diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index ee4a5ada3..fcfdae4ed 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -7,7 +7,10 @@ use crate::{ validate::ValidPoolTransaction, AllTransactionsEvents, }; -use alloy_consensus::Transaction as _; +use alloy_consensus::{ + constants::{EIP1559_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID}, + Transaction as _, +}; use alloy_eips::{eip2718::Encodable2718, eip2930::AccessList, eip4844::BlobAndProofV1}; use alloy_primitives::{Address, TxHash, TxKind, B256, U256}; use futures_util::{ready, Stream}; @@ -17,7 +20,6 @@ use reth_primitives::{ kzg::KzgSettings, transaction::TryFromRecoveredTransactionError, BlobTransactionSidecar, BlobTransactionValidationError, PooledTransactionsElement, PooledTransactionsElementEcRecovered, SealedBlock, Transaction, TransactionSignedEcRecovered, - EIP1559_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, }; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; diff --git a/crates/transaction-pool/src/validate/eth.rs b/crates/transaction-pool/src/validate/eth.rs index 2594e569a..22744c58a 100644 --- a/crates/transaction-pool/src/validate/eth.rs +++ b/crates/transaction-pool/src/validate/eth.rs @@ -11,11 +11,13 @@ use crate::{ EthBlobTransactionSidecar, EthPoolTransaction, LocalTransactionConfig, PoolTransaction, TransactionValidationOutcome, TransactionValidationTaskExecutor, TransactionValidator, }; +use alloy_consensus::constants::{ + EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, + LEGACY_TX_TYPE_ID, +}; use reth_chainspec::{ChainSpec, EthereumHardforks}; use reth_primitives::{ constants::eip4844::MAX_BLOBS_PER_BLOCK, GotExpected, InvalidTransactionError, SealedBlock, - EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, - LEGACY_TX_TYPE_ID, }; use reth_storage_api::{AccountReader, StateProviderFactory}; use reth_tasks::TaskSpawner; From dfcaad4608797ed05a8b896fcfad83a83a6292af Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 18 Oct 2024 00:42:24 +0200 Subject: [PATCH 126/425] chore: remove some cfg imports (#11864) --- crates/primitives/src/block.rs | 4 +--- crates/primitives/src/receipt.rs | 6 +++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index de0817fb0..b7e11f7b9 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -10,8 +10,6 @@ use alloy_primitives::{Address, Bytes, Sealable, B256}; use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; use derive_more::{Deref, DerefMut}; #[cfg(any(test, feature = "arbitrary"))] -use proptest::prelude::prop_compose; -#[cfg(any(test, feature = "arbitrary"))] pub use reth_primitives_traits::test_utils::{generate_valid_header, valid_header_strategy}; use reth_primitives_traits::Requests; use serde::{Deserialize, Serialize}; @@ -20,7 +18,7 @@ use serde::{Deserialize, Serialize}; // a block with `None` withdrawals and `Some` requests, in which case we end up trying to decode the // requests as withdrawals #[cfg(any(feature = "arbitrary", test))] -prop_compose! { +proptest::prelude::prop_compose! { pub fn empty_requests_strategy()(_ in 0..1) -> Option { None } diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index 9006a067e..b117f8d96 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -11,14 +11,14 @@ use bytes::{Buf, BufMut}; use core::{cmp::Ordering, ops::Deref}; use derive_more::{DerefMut, From, IntoIterator}; #[cfg(feature = "reth-codec")] -use reth_codecs::{Compact, CompactZstd}; +use reth_codecs::Compact; use serde::{Deserialize, Serialize}; /// Receipt containing result of transaction execution. #[derive( Clone, Debug, PartialEq, Eq, Default, RlpEncodable, RlpDecodable, Serialize, Deserialize, )] -#[cfg_attr(any(test, feature = "reth-codec"), derive(CompactZstd))] +#[cfg_attr(any(test, feature = "reth-codec"), derive(reth_codecs::CompactZstd))] #[cfg_attr(any(test, feature = "reth-codec"), reth_codecs::add_arbitrary_tests)] #[rlp(trailing)] pub struct Receipt { @@ -130,7 +130,7 @@ impl From for ReceiptWithBloom { /// [`Receipt`] with calculated bloom filter. #[derive(Clone, Debug, PartialEq, Eq, Default, Serialize, Deserialize)] #[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] -#[cfg_attr(any(test, feature = "reth-codec"), derive(Compact))] +#[cfg_attr(any(test, feature = "reth-codec"), derive(reth_codecs::Compact))] #[cfg_attr(any(test, feature = "reth-codec"), reth_codecs::add_arbitrary_tests(compact))] pub struct ReceiptWithBloom { /// Bloom filter build from logs. From 0c70f6bd3519d5c5bce85481bea728df89fdabf3 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Fri, 18 Oct 2024 10:25:22 +0200 Subject: [PATCH 127/425] primitives: use alloy `KECCAK_EMPTY` constant (#11851) --- Cargo.lock | 1 + crates/ethereum/evm/src/lib.rs | 3 ++- crates/optimism/evm/src/lib.rs | 3 ++- crates/primitives-traits/src/constants/mod.rs | 4 ---- crates/primitives/src/lib.rs | 2 +- crates/rpc/rpc-eth-api/src/helpers/state.rs | 3 ++- crates/storage/storage-api/Cargo.toml | 1 + crates/storage/storage-api/src/state.rs | 3 ++- crates/trie/common/src/proofs.rs | 5 +++-- 9 files changed, 14 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1d9f74a20..2aa5675cd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9002,6 +9002,7 @@ dependencies = [ name = "reth-storage-api" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "auto_impl", diff --git a/crates/ethereum/evm/src/lib.rs b/crates/ethereum/evm/src/lib.rs index ed18a24fb..ac9bb5a0b 100644 --- a/crates/ethereum/evm/src/lib.rs +++ b/crates/ethereum/evm/src/lib.rs @@ -194,13 +194,14 @@ impl ConfigureEvm for EthEvmConfig { #[cfg(test)] mod tests { use super::*; + use alloy_consensus::constants::KECCAK_EMPTY; use alloy_genesis::Genesis; use alloy_primitives::{B256, U256}; use reth_chainspec::{Chain, ChainSpec, MAINNET}; use reth_evm::execute::ProviderError; use reth_primitives::{ revm_primitives::{BlockEnv, CfgEnv, SpecId}, - Header, KECCAK_EMPTY, + Header, }; use reth_revm::{ db::{CacheDB, EmptyDBTyped}, diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index eb067da32..3eda2878c 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -208,6 +208,7 @@ impl ConfigureEvm for OptimismEvmConfig { #[cfg(test)] mod tests { use super::*; + use alloy_consensus::constants::KECCAK_EMPTY; use alloy_genesis::Genesis; use alloy_primitives::{B256, U256}; use reth_chainspec::ChainSpec; @@ -216,7 +217,7 @@ mod tests { use reth_optimism_chainspec::BASE_MAINNET; use reth_primitives::{ revm_primitives::{BlockEnv, CfgEnv, SpecId}, - Header, Receipt, Receipts, SealedBlockWithSenders, TxType, KECCAK_EMPTY, + Header, Receipt, Receipts, SealedBlockWithSenders, TxType, }; use reth_revm::{ db::{CacheDB, EmptyDBTyped}, diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index eade39989..33101b2c0 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -106,10 +106,6 @@ pub const SEPOLIA_GENESIS_HASH: B256 = pub const HOLESKY_GENESIS_HASH: B256 = b256!("b5f7f912443c940f21fd611f12828d75b534364ed9e95ca4e307729a4661bde4"); -/// Keccak256 over empty array: `0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470` -pub const KECCAK_EMPTY: B256 = - b256!("c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"); - /// From address from Optimism system txs: `0xdeaddeaddeaddeaddeaddeaddeaddeaddead0001` pub const OP_SYSTEM_TX_FROM_ADDR: Address = address!("deaddeaddeaddeaddeaddeaddeaddeaddead0001"); diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 796090d79..a9e8c0820 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -39,7 +39,7 @@ pub use block::{ }; #[cfg(feature = "reth-codec")] pub use compression::*; -pub use constants::{HOLESKY_GENESIS_HASH, KECCAK_EMPTY, SEPOLIA_GENESIS_HASH}; +pub use constants::{HOLESKY_GENESIS_HASH, SEPOLIA_GENESIS_HASH}; pub use receipt::{ gas_spent_by_transactions, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, Receipts, }; diff --git a/crates/rpc/rpc-eth-api/src/helpers/state.rs b/crates/rpc/rpc-eth-api/src/helpers/state.rs index 7b11ce6af..f2fc13f5d 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/state.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/state.rs @@ -1,13 +1,14 @@ //! Loads a pending block from database. Helper trait for `eth_` block, transaction, call and trace //! RPC methods. +use alloy_consensus::constants::KECCAK_EMPTY; use alloy_primitives::{Address, Bytes, B256, U256}; use alloy_rpc_types::{serde_helpers::JsonStorageKey, Account, EIP1186AccountProofResponse}; use futures::Future; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_errors::RethError; use reth_evm::ConfigureEvmEnv; -use reth_primitives::{BlockId, Header, KECCAK_EMPTY}; +use reth_primitives::{BlockId, Header}; use reth_provider::{ BlockIdReader, BlockNumReader, ChainSpecProvider, StateProvider, StateProviderBox, StateProviderFactory, diff --git a/crates/storage/storage-api/Cargo.toml b/crates/storage/storage-api/Cargo.toml index 51d8eabfc..0ae8b2845 100644 --- a/crates/storage/storage-api/Cargo.toml +++ b/crates/storage/storage-api/Cargo.toml @@ -26,5 +26,6 @@ reth-trie.workspace = true # ethereum alloy-eips.workspace = true alloy-primitives.workspace = true +alloy-consensus.workspace = true auto_impl.workspace = true diff --git a/crates/storage/storage-api/src/state.rs b/crates/storage/storage-api/src/state.rs index 9a3b855ff..d37940f04 100644 --- a/crates/storage/storage-api/src/state.rs +++ b/crates/storage/storage-api/src/state.rs @@ -2,11 +2,12 @@ use super::{ AccountReader, BlockHashReader, BlockIdReader, StateProofProvider, StateRootProvider, StorageRootProvider, }; +use alloy_consensus::constants::KECCAK_EMPTY; use alloy_eips::{BlockId, BlockNumHash, BlockNumberOrTag}; use alloy_primitives::{Address, BlockHash, BlockNumber, StorageKey, StorageValue, B256, U256}; use auto_impl::auto_impl; use reth_execution_types::ExecutionOutcome; -use reth_primitives::{Bytecode, KECCAK_EMPTY}; +use reth_primitives::Bytecode; use reth_storage_errors::provider::{ProviderError, ProviderResult}; /// Type alias of boxed [`StateProvider`]. diff --git a/crates/trie/common/src/proofs.rs b/crates/trie/common/src/proofs.rs index 8aca67f8d..a94b2b96f 100644 --- a/crates/trie/common/src/proofs.rs +++ b/crates/trie/common/src/proofs.rs @@ -1,6 +1,7 @@ //! Merkle trie proofs. use crate::{Nibbles, TrieAccount}; +use alloy_consensus::constants::KECCAK_EMPTY; use alloy_primitives::{keccak256, Address, Bytes, B256, U256}; use alloy_rlp::{encode_fixed_size, Decodable, EMPTY_STRING_CODE}; use alloy_trie::{ @@ -9,13 +10,13 @@ use alloy_trie::{ EMPTY_ROOT_HASH, }; use itertools::Itertools; -use reth_primitives_traits::{constants::KECCAK_EMPTY, Account}; +use reth_primitives_traits::Account; use serde::{Deserialize, Serialize}; use std::collections::HashMap; /// The state multiproof of target accounts and multiproofs of their storage tries. /// Multiproof is effectively a state subtrie that only contains the nodes -/// in the paths of target accounts. +/// in the paths of target accounts. #[derive(Clone, Default, Debug)] pub struct MultiProof { /// State trie multiproof for requested accounts. From 5859f93c56352a619aaae6598ee91dfcc11cd38d Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Fri, 18 Oct 2024 10:53:21 +0200 Subject: [PATCH 128/425] primitives: use alloy `EMPTY_` constants (#11852) --- Cargo.lock | 1 + crates/chainspec/src/spec.rs | 5 +++-- crates/payload/basic/Cargo.toml | 1 + crates/payload/basic/src/lib.rs | 3 ++- crates/primitives-traits/src/constants/mod.rs | 10 ---------- crates/primitives/src/alloy_compat.rs | 8 +++++--- 6 files changed, 12 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2aa5675cd..2f8a10571 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6340,6 +6340,7 @@ dependencies = [ name = "reth-basic-payload-builder" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-primitives", "alloy-rlp", "futures-core", diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index adfee564d..59e1a5ce1 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -2,6 +2,7 @@ pub use alloy_eips::eip1559::BaseFeeParams; use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_chains::{Chain, NamedChain}; +use alloy_consensus::constants::EMPTY_WITHDRAWALS; use alloy_genesis::Genesis; use alloy_primitives::{address, b256, Address, BlockNumber, B256, U256}; use alloy_trie::EMPTY_ROOT_HASH; @@ -18,8 +19,8 @@ use reth_network_peers::{ }; use reth_primitives_traits::{ constants::{ - EIP1559_INITIAL_BASE_FEE, EMPTY_WITHDRAWALS, ETHEREUM_BLOCK_GAS_LIMIT, - HOLESKY_GENESIS_HASH, SEPOLIA_GENESIS_HASH, + EIP1559_INITIAL_BASE_FEE, ETHEREUM_BLOCK_GAS_LIMIT, HOLESKY_GENESIS_HASH, + SEPOLIA_GENESIS_HASH, }, Header, SealedHeader, }; diff --git a/crates/payload/basic/Cargo.toml b/crates/payload/basic/Cargo.toml index 939eb5b54..f201df0c1 100644 --- a/crates/payload/basic/Cargo.toml +++ b/crates/payload/basic/Cargo.toml @@ -26,6 +26,7 @@ reth-tasks.workspace = true alloy-rlp.workspace = true alloy-primitives.workspace = true revm.workspace = true +alloy-consensus.workspace = true # async tokio = { workspace = true, features = ["sync", "time"] } diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index f9487ec78..835f20f3e 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -9,6 +9,7 @@ #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] use crate::metrics::PayloadBuilderMetrics; +use alloy_consensus::constants::EMPTY_WITHDRAWALS; use alloy_primitives::{Bytes, B256, U256}; use futures_core::ready; use futures_util::FutureExt; @@ -18,7 +19,7 @@ use reth_payload_builder::{ }; use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes, PayloadBuilderError}; use reth_primitives::{ - constants::{EMPTY_WITHDRAWALS, RETH_CLIENT_VERSION, SLOT_DURATION}, + constants::{RETH_CLIENT_VERSION, SLOT_DURATION}, proofs, BlockNumberOrTag, SealedBlock, Withdrawals, }; use reth_provider::{ diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index 33101b2c0..a4918137e 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -1,6 +1,5 @@ //! Ethereum protocol-related constants -use alloy_consensus::EMPTY_ROOT_HASH; use alloy_primitives::{address, b256, Address, B256, U256}; use core::time::Duration; @@ -112,15 +111,6 @@ pub const OP_SYSTEM_TX_FROM_ADDR: Address = address!("deaddeaddeaddeaddeaddeadde /// To address from Optimism system txs: `0x4200000000000000000000000000000000000015` pub const OP_SYSTEM_TX_TO_ADDR: Address = address!("4200000000000000000000000000000000000015"); -/// Transactions root of empty receipts set. -pub const EMPTY_RECEIPTS: B256 = EMPTY_ROOT_HASH; - -/// Transactions root of empty transactions set. -pub const EMPTY_TRANSACTIONS: B256 = EMPTY_ROOT_HASH; - -/// Withdrawals root of empty withdrawals set. -pub const EMPTY_WITHDRAWALS: B256 = EMPTY_ROOT_HASH; - /// The number of blocks to unwind during a reorg that already became a part of canonical chain. /// /// In reality, the node can end up in this particular situation very rarely. It would happen only diff --git a/crates/primitives/src/alloy_compat.rs b/crates/primitives/src/alloy_compat.rs index c9bdfad89..0ac1458c5 100644 --- a/crates/primitives/src/alloy_compat.rs +++ b/crates/primitives/src/alloy_compat.rs @@ -1,11 +1,13 @@ //! Common conversions from alloy types. use crate::{ - constants::EMPTY_TRANSACTIONS, transaction::extract_chain_id, Block, BlockBody, Signature, - Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, TxType, + transaction::extract_chain_id, Block, BlockBody, Signature, Transaction, TransactionSigned, + TransactionSignedEcRecovered, TransactionSignedNoHash, TxType, }; use alloc::{string::ToString, vec::Vec}; -use alloy_consensus::{Transaction as _, TxEip1559, TxEip2930, TxEip4844, TxLegacy}; +use alloy_consensus::{ + constants::EMPTY_TRANSACTIONS, Transaction as _, TxEip1559, TxEip2930, TxEip4844, TxLegacy, +}; use alloy_primitives::{Parity, TxKind}; use alloy_rlp::Error as RlpError; use alloy_serde::WithOtherFields; From cb604826b74859c41e2b39d5bfdcc1d4a12046e2 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Fri, 18 Oct 2024 11:23:25 +0200 Subject: [PATCH 129/425] chore(sdk): Define `NodePrimitives::Block` (#11399) --- Cargo.lock | 2 ++ crates/ethereum/node/src/node.rs | 16 +++++++++++++--- crates/node/types/Cargo.toml | 4 +++- crates/node/types/src/lib.rs | 13 +++++++++---- crates/optimism/node/src/node.rs | 14 +++++++++++--- 5 files changed, 38 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2f8a10571..c8b4f7632 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8026,6 +8026,8 @@ dependencies = [ "reth-chainspec", "reth-db-api", "reth-engine-primitives", + "reth-primitives", + "reth-primitives-traits", ] [[package]] diff --git a/crates/ethereum/node/src/node.rs b/crates/ethereum/node/src/node.rs index 82f313fbb..a890810b0 100644 --- a/crates/ethereum/node/src/node.rs +++ b/crates/ethereum/node/src/node.rs @@ -11,7 +11,9 @@ use reth_ethereum_engine_primitives::{ }; use reth_evm_ethereum::execute::EthExecutorProvider; use reth_network::NetworkHandle; -use reth_node_api::{ConfigureEvm, EngineValidator, FullNodeComponents, NodeTypesWithDB}; +use reth_node_api::{ + ConfigureEvm, EngineValidator, FullNodeComponents, NodePrimitives, NodeTypesWithDB, +}; use reth_node_builder::{ components::{ ComponentsBuilder, ConsensusBuilder, EngineValidatorBuilder, ExecutorBuilder, @@ -22,7 +24,7 @@ use reth_node_builder::{ BuilderContext, Node, NodeAdapter, NodeComponentsBuilder, PayloadBuilderConfig, PayloadTypes, }; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; -use reth_primitives::Header; +use reth_primitives::{Block, Header}; use reth_provider::CanonStateSubscriptions; use reth_rpc::EthApi; use reth_tracing::tracing::{debug, info}; @@ -33,6 +35,14 @@ use reth_transaction_pool::{ use crate::{EthEngineTypes, EthEvmConfig}; +/// Ethereum primitive types. +#[derive(Debug)] +pub struct EthPrimitives; + +impl NodePrimitives for EthPrimitives { + type Block = Block; +} + /// Type configuration for a regular Ethereum node. #[derive(Debug, Default, Clone, Copy)] #[non_exhaustive] @@ -69,7 +79,7 @@ impl EthereumNode { } impl NodeTypes for EthereumNode { - type Primitives = (); + type Primitives = EthPrimitives; type ChainSpec = ChainSpec; } diff --git a/crates/node/types/Cargo.toml b/crates/node/types/Cargo.toml index f04925d9c..b28dcfba5 100644 --- a/crates/node/types/Cargo.toml +++ b/crates/node/types/Cargo.toml @@ -14,4 +14,6 @@ workspace = true # reth reth-chainspec.workspace = true reth-db-api.workspace = true -reth-engine-primitives.workspace = true \ No newline at end of file +reth-engine-primitives.workspace = true +reth-primitives.workspace = true +reth-primitives-traits.workspace = true \ No newline at end of file diff --git a/crates/node/types/src/lib.rs b/crates/node/types/src/lib.rs index 2c72e02d3..5ba03e679 100644 --- a/crates/node/types/src/lib.rs +++ b/crates/node/types/src/lib.rs @@ -8,6 +8,8 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +pub use reth_primitives_traits::{Block, BlockBody}; + use std::marker::PhantomData; use reth_chainspec::EthChainSpec; @@ -18,11 +20,14 @@ use reth_db_api::{ use reth_engine_primitives::EngineTypes; /// Configures all the primitive types of the node. -// TODO(mattsse): this is currently a placeholder -pub trait NodePrimitives {} +pub trait NodePrimitives { + /// Block primitive. + type Block; +} -// TODO(mattsse): Placeholder -impl NodePrimitives for () {} +impl NodePrimitives for () { + type Block = reth_primitives::Block; +} /// The type that configures the essential types of an Ethereum-like node. /// diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 648da85d0..c2576d318 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -6,7 +6,7 @@ use reth_basic_payload_builder::{BasicPayloadJobGenerator, BasicPayloadJobGenera use reth_chainspec::{EthChainSpec, Hardforks}; use reth_evm::ConfigureEvm; use reth_network::{NetworkConfig, NetworkHandle, NetworkManager}; -use reth_node_api::{EngineValidator, FullNodeComponents, NodeAddOns}; +use reth_node_api::{EngineValidator, FullNodeComponents, NodeAddOns, NodePrimitives}; use reth_node_builder::{ components::{ ComponentsBuilder, ConsensusBuilder, EngineValidatorBuilder, ExecutorBuilder, @@ -21,7 +21,7 @@ use reth_optimism_consensus::OptimismBeaconConsensus; use reth_optimism_evm::{OpExecutorProvider, OptimismEvmConfig}; use reth_optimism_rpc::OpEthApi; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; -use reth_primitives::Header; +use reth_primitives::{Block, Header}; use reth_provider::CanonStateSubscriptions; use reth_tracing::tracing::{debug, info}; use reth_transaction_pool::{ @@ -36,6 +36,14 @@ use crate::{ OptimismEngineTypes, }; +/// Optimism primitive types. +#[derive(Debug)] +pub struct OpPrimitives; + +impl NodePrimitives for OpPrimitives { + type Block = Block; +} + /// Type configuration for a regular Optimism node. #[derive(Debug, Default, Clone)] #[non_exhaustive] @@ -113,7 +121,7 @@ where } impl NodeTypes for OptimismNode { - type Primitives = (); + type Primitives = OpPrimitives; type ChainSpec = OpChainSpec; } From cfd066c0714e7b8375e667995a9acc812f7dfd15 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Fri, 18 Oct 2024 11:43:23 +0200 Subject: [PATCH 130/425] chore(sdk): `SignedTransaction` abstraction (#11432) Co-authored-by: Matthias Seitz --- crates/primitives-traits/src/lib.rs | 2 +- crates/primitives-traits/src/mod.rs | 0 .../{transaction.rs => transaction/mod.rs} | 2 + .../src/transaction/signed.rs | 72 +++++++++++++++++++ crates/primitives/src/traits/mod.rs | 9 +++ 5 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 crates/primitives-traits/src/mod.rs rename crates/primitives-traits/src/{transaction.rs => transaction/mod.rs} (97%) create mode 100644 crates/primitives-traits/src/transaction/signed.rs create mode 100644 crates/primitives/src/traits/mod.rs diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index 8c54bd68c..dd10ac9c5 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -24,7 +24,7 @@ pub mod receipt; pub use receipt::Receipt; pub mod transaction; -pub use transaction::Transaction; +pub use transaction::{signed::SignedTransaction, Transaction}; mod integer_list; pub use integer_list::{IntegerList, IntegerListError}; diff --git a/crates/primitives-traits/src/mod.rs b/crates/primitives-traits/src/mod.rs new file mode 100644 index 000000000..e69de29bb diff --git a/crates/primitives-traits/src/transaction.rs b/crates/primitives-traits/src/transaction/mod.rs similarity index 97% rename from crates/primitives-traits/src/transaction.rs rename to crates/primitives-traits/src/transaction/mod.rs index 93645ead8..a306c5f76 100644 --- a/crates/primitives-traits/src/transaction.rs +++ b/crates/primitives-traits/src/transaction/mod.rs @@ -1,5 +1,7 @@ //! Transaction abstraction +pub mod signed; + use alloc::fmt; use reth_codecs::Compact; diff --git a/crates/primitives-traits/src/transaction/signed.rs b/crates/primitives-traits/src/transaction/signed.rs new file mode 100644 index 000000000..1bc8308b1 --- /dev/null +++ b/crates/primitives-traits/src/transaction/signed.rs @@ -0,0 +1,72 @@ +//! API of a signed transaction. + +use alloc::fmt; +use core::hash::Hash; + +use alloy_consensus::Transaction; +use alloy_eips::eip2718::{Decodable2718, Encodable2718}; +use alloy_primitives::{keccak256, Address, TxHash, B256}; + +/// A signed transaction. +pub trait SignedTransaction: + fmt::Debug + + Clone + + PartialEq + + Eq + + Hash + + Send + + Sync + + serde::Serialize + + for<'a> serde::Deserialize<'a> + + alloy_rlp::Encodable + + alloy_rlp::Decodable + + Encodable2718 + + Decodable2718 +{ + /// Transaction type that is signed. + type Transaction: Transaction; + + /// Signature type that results from signing transaction. + type Signature; + + /// Returns reference to transaction hash. + fn tx_hash(&self) -> &TxHash; + + /// Returns reference to transaction. + fn transaction(&self) -> &Self::Transaction; + + /// Returns reference to signature. + fn signature(&self) -> &Self::Signature; + + /// Recover signer from signature and hash. + /// + /// Returns `None` if the transaction's signature is invalid following [EIP-2](https://eips.ethereum.org/EIPS/eip-2), see also `reth_primitives::transaction::recover_signer`. + /// + /// Note: + /// + /// This can fail for some early ethereum mainnet transactions pre EIP-2, use + /// [`Self::recover_signer_unchecked`] if you want to recover the signer without ensuring that + /// the signature has a low `s` value. + fn recover_signer(&self) -> Option
; + + /// Recover signer from signature and hash _without ensuring that the signature has a low `s` + /// value_. + /// + /// Returns `None` if the transaction's signature is invalid, see also + /// `reth_primitives::transaction::recover_signer_unchecked`. + fn recover_signer_unchecked(&self) -> Option
; + + /// Create a new signed transaction from a transaction and its signature. + /// + /// This will also calculate the transaction hash using its encoding. + fn from_transaction_and_signature( + transaction: Self::Transaction, + signature: Self::Signature, + ) -> Self; + + /// Calculate transaction hash, eip2728 transaction does not contain rlp header and start with + /// tx type. + fn recalculate_hash(&self) -> B256 { + keccak256(self.encoded_2718()) + } +} diff --git a/crates/primitives/src/traits/mod.rs b/crates/primitives/src/traits/mod.rs new file mode 100644 index 000000000..49fb73ea5 --- /dev/null +++ b/crates/primitives/src/traits/mod.rs @@ -0,0 +1,9 @@ +//! Abstractions of primitive data types + +pub mod block; +pub mod transaction; + +pub use block::{body::BlockBody, Block}; +pub use transaction::signed::SignedTransaction; + +pub use alloy_consensus::BlockHeader; From 8d32fd788bfa8c2e041bd1b93ecca37093a09c5a Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 18 Oct 2024 14:45:51 +0400 Subject: [PATCH 131/425] feat: allow awaiting payload in progress (#11823) Co-authored-by: Matthias Seitz --- crates/e2e-test-utils/src/payload.rs | 2 +- crates/engine/local/src/miner.rs | 9 ++- crates/payload/basic/src/lib.rs | 15 +++- crates/payload/builder/src/lib.rs | 6 +- crates/payload/builder/src/noop.rs | 2 +- crates/payload/builder/src/service.rs | 79 +++++++++++----------- crates/payload/builder/src/test_utils.rs | 7 +- crates/payload/builder/src/traits.rs | 20 +++++- crates/payload/primitives/src/lib.rs | 22 ++++++ crates/payload/primitives/src/traits.rs | 40 +++++------ examples/custom-payload-builder/src/job.rs | 6 +- 11 files changed, 127 insertions(+), 81 deletions(-) diff --git a/crates/e2e-test-utils/src/payload.rs b/crates/e2e-test-utils/src/payload.rs index 1f9a89307..946d9af57 100644 --- a/crates/e2e-test-utils/src/payload.rs +++ b/crates/e2e-test-utils/src/payload.rs @@ -28,7 +28,7 @@ impl PayloadTestContext { ) -> eyre::Result { self.timestamp += 1; let attributes: E::PayloadBuilderAttributes = attributes_generator(self.timestamp); - self.payload_builder.new_payload(attributes.clone()).await.unwrap(); + self.payload_builder.send_new_payload(attributes.clone()).await.unwrap()?; Ok(attributes) } diff --git a/crates/engine/local/src/miner.rs b/crates/engine/local/src/miner.rs index f20d70b14..8bcb7083a 100644 --- a/crates/engine/local/src/miner.rs +++ b/crates/engine/local/src/miner.rs @@ -9,7 +9,7 @@ use reth_chainspec::EthereumHardforks; use reth_engine_primitives::EngineTypes; use reth_payload_builder::PayloadBuilderHandle; use reth_payload_primitives::{ - BuiltPayload, PayloadAttributesBuilder, PayloadBuilder, PayloadTypes, + BuiltPayload, PayloadAttributesBuilder, PayloadBuilder, PayloadKind, PayloadTypes, }; use reth_provider::{BlockReader, ChainSpecProvider}; use reth_rpc_types_compat::engine::payload::block_to_payload; @@ -202,10 +202,9 @@ where let payload_id = res.payload_id.ok_or_eyre("No payload id")?; - // wait for some time to let the payload be built - tokio::time::sleep(Duration::from_millis(200)).await; - - let Some(Ok(payload)) = self.payload_builder.best_payload(payload_id).await else { + let Some(Ok(payload)) = + self.payload_builder.resolve_kind(payload_id, PayloadKind::WaitForPending).await + else { eyre::bail!("No payload") }; diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index 835f20f3e..7416283c1 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -17,7 +17,9 @@ use reth_chainspec::{ChainSpec, EthereumHardforks}; use reth_payload_builder::{ database::CachedReads, KeepPayloadJobAlive, PayloadId, PayloadJob, PayloadJobGenerator, }; -use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes, PayloadBuilderError}; +use reth_payload_primitives::{ + BuiltPayload, PayloadBuilderAttributes, PayloadBuilderError, PayloadKind, +}; use reth_primitives::{ constants::{RETH_CLIENT_VERSION, SLOT_DURATION}, proofs, BlockNumberOrTag, SealedBlock, Withdrawals, @@ -474,7 +476,10 @@ where Ok(self.config.attributes.clone()) } - fn resolve(&mut self) -> (Self::ResolvePayloadFuture, KeepPayloadJobAlive) { + fn resolve_kind( + &mut self, + kind: PayloadKind, + ) -> (Self::ResolvePayloadFuture, KeepPayloadJobAlive) { let best_payload = self.best_payload.take(); if best_payload.is_none() && self.pending_block.is_none() { @@ -530,7 +535,11 @@ where }; } - let fut = ResolveBestPayload { best_payload, maybe_better, empty_payload }; + let fut = ResolveBestPayload { + best_payload, + maybe_better, + empty_payload: empty_payload.filter(|_| kind != PayloadKind::WaitForPending), + }; (fut, KeepPayloadJobAlive::No) } diff --git a/crates/payload/builder/src/lib.rs b/crates/payload/builder/src/lib.rs index 70b4296da..0df15f5b0 100644 --- a/crates/payload/builder/src/lib.rs +++ b/crates/payload/builder/src/lib.rs @@ -28,7 +28,7 @@ //! use std::pin::Pin; //! use std::task::{Context, Poll}; //! use alloy_primitives::U256; -//! use reth_payload_builder::{EthBuiltPayload, PayloadBuilderError, KeepPayloadJobAlive, EthPayloadBuilderAttributes, PayloadJob, PayloadJobGenerator}; +//! use reth_payload_builder::{EthBuiltPayload, PayloadBuilderError, KeepPayloadJobAlive, EthPayloadBuilderAttributes, PayloadJob, PayloadJobGenerator, PayloadKind}; //! use reth_primitives::{Block, Header}; //! //! /// The generator type that creates new jobs that builds empty blocks. @@ -73,7 +73,7 @@ //! Ok(self.attributes.clone()) //! } //! -//! fn resolve(&mut self) -> (Self::ResolvePayloadFuture, KeepPayloadJobAlive) { +//! fn resolve_kind(&mut self, _kind: PayloadKind) -> (Self::ResolvePayloadFuture, KeepPayloadJobAlive) { //! let payload = self.best_payload(); //! (futures_util::future::ready(payload), KeepPayloadJobAlive::No) //! } @@ -112,7 +112,7 @@ pub mod noop; pub mod test_utils; pub use alloy_rpc_types::engine::PayloadId; -pub use reth_payload_primitives::PayloadBuilderError; +pub use reth_payload_primitives::{PayloadBuilderError, PayloadKind}; pub use service::{ PayloadBuilderHandle, PayloadBuilderService, PayloadServiceCommand, PayloadStore, }; diff --git a/crates/payload/builder/src/noop.rs b/crates/payload/builder/src/noop.rs index 06da7dcfa..cbf21f1ce 100644 --- a/crates/payload/builder/src/noop.rs +++ b/crates/payload/builder/src/noop.rs @@ -51,7 +51,7 @@ where } PayloadServiceCommand::BestPayload(_, tx) => tx.send(None).ok(), PayloadServiceCommand::PayloadAttributes(_, tx) => tx.send(None).ok(), - PayloadServiceCommand::Resolve(_, tx) => tx.send(None).ok(), + PayloadServiceCommand::Resolve(_, _, tx) => tx.send(None).ok(), PayloadServiceCommand::Subscribe(_) => None, }; } diff --git a/crates/payload/builder/src/service.rs b/crates/payload/builder/src/service.rs index 1ebf6770c..853c69e90 100644 --- a/crates/payload/builder/src/service.rs +++ b/crates/payload/builder/src/service.rs @@ -11,7 +11,7 @@ use alloy_rpc_types::engine::PayloadId; use futures_util::{future::FutureExt, Stream, StreamExt}; use reth_payload_primitives::{ BuiltPayload, Events, PayloadBuilder, PayloadBuilderAttributes, PayloadBuilderError, - PayloadEvents, PayloadTypes, + PayloadEvents, PayloadKind, PayloadTypes, }; use reth_provider::CanonStateNotification; use std::{ @@ -45,11 +45,20 @@ where /// /// Note: depending on the installed [`PayloadJobGenerator`], this may or may not terminate the /// job, See [`PayloadJob::resolve`]. + pub async fn resolve_kind( + &self, + id: PayloadId, + kind: PayloadKind, + ) -> Option> { + self.inner.resolve_kind(id, kind).await + } + + /// Resolves the payload job and returns the best payload that has been built so far. pub async fn resolve( &self, id: PayloadId, ) -> Option> { - self.inner.resolve(id).await + self.resolve_kind(id, PayloadKind::Earliest).await } /// Returns the best payload for the given identifier. @@ -110,16 +119,13 @@ where type PayloadType = T; type Error = PayloadBuilderError; - async fn send_and_resolve_payload( + fn send_new_payload( &self, attr: ::PayloadBuilderAttributes, - ) -> Result::BuiltPayload>, Self::Error> { - let rx = self.send_new_payload(attr); - let id = rx.await??; - + ) -> Receiver> { let (tx, rx) = oneshot::channel(); - let _ = self.to_service.send(PayloadServiceCommand::Resolve(id, tx)); - rx.await?.ok_or(PayloadBuilderError::MissingPayload) + let _ = self.to_service.send(PayloadServiceCommand::BuildNewPayload(attr, tx)); + rx } /// Note: this does not resolve the job if it's still in progress. @@ -132,21 +138,17 @@ where rx.await.ok()? } - fn send_new_payload( + async fn resolve_kind( &self, - attr: ::PayloadBuilderAttributes, - ) -> Receiver> { + id: PayloadId, + kind: PayloadKind, + ) -> Option> { let (tx, rx) = oneshot::channel(); - let _ = self.to_service.send(PayloadServiceCommand::BuildNewPayload(attr, tx)); - rx - } - - /// Note: if there's already payload in progress with same identifier, it will be returned. - async fn new_payload( - &self, - attr: ::PayloadBuilderAttributes, - ) -> Result { - self.send_new_payload(attr).await? + self.to_service.send(PayloadServiceCommand::Resolve(id, kind, tx)).ok()?; + match rx.await.transpose()? { + Ok(fut) => Some(fut.await), + Err(e) => Some(Err(e.into())), + } } async fn subscribe(&self) -> Result, Self::Error> { @@ -168,19 +170,6 @@ where Self { to_service } } - /// Resolves the payload job and returns the best payload that has been built so far. - /// - /// Note: depending on the installed [`PayloadJobGenerator`], this may or may not terminate the - /// job, See [`PayloadJob::resolve`]. - async fn resolve(&self, id: PayloadId) -> Option> { - let (tx, rx) = oneshot::channel(); - self.to_service.send(PayloadServiceCommand::Resolve(id, tx)).ok()?; - match rx.await.transpose()? { - Ok(fut) => Some(fut.await), - Err(e) => Some(Err(e.into())), - } - } - /// Returns the payload attributes associated with the given identifier. /// /// Note: this returns the attributes of the payload and does not resolve the job. @@ -296,11 +285,15 @@ where /// Returns the best payload for the given identifier that has been built so far and terminates /// the job if requested. - fn resolve(&mut self, id: PayloadId) -> Option> { + fn resolve( + &mut self, + id: PayloadId, + kind: PayloadKind, + ) -> Option> { trace!(%id, "resolving payload job"); let job = self.payload_jobs.iter().position(|(_, job_id)| *job_id == id)?; - let (fut, keep_alive) = self.payload_jobs[job].0.resolve(); + let (fut, keep_alive) = self.payload_jobs[job].0.resolve_kind(kind); if keep_alive == KeepPayloadJobAlive::No { let (_, id) = self.payload_jobs.swap_remove(job); @@ -437,8 +430,8 @@ where let attributes = this.payload_attributes(id); let _ = tx.send(attributes); } - PayloadServiceCommand::Resolve(id, tx) => { - let _ = tx.send(this.resolve(id)); + PayloadServiceCommand::Resolve(id, strategy, tx) => { + let _ = tx.send(this.resolve(id, strategy)); } PayloadServiceCommand::Subscribe(tx) => { let new_rx = this.payload_events.subscribe(); @@ -469,7 +462,11 @@ pub enum PayloadServiceCommand { oneshot::Sender>>, ), /// Resolve the payload and return the payload - Resolve(PayloadId, oneshot::Sender>>), + Resolve( + PayloadId, + /* kind: */ PayloadKind, + oneshot::Sender>>, + ), /// Payload service events Subscribe(oneshot::Sender>>), } @@ -489,7 +486,7 @@ where Self::PayloadAttributes(f0, f1) => { f.debug_tuple("PayloadAttributes").field(&f0).field(&f1).finish() } - Self::Resolve(f0, _f1) => f.debug_tuple("Resolve").field(&f0).finish(), + Self::Resolve(f0, f1, _f2) => f.debug_tuple("Resolve").field(&f0).field(&f1).finish(), Self::Subscribe(f0) => f.debug_tuple("Subscribe").field(&f0).finish(), } } diff --git a/crates/payload/builder/src/test_utils.rs b/crates/payload/builder/src/test_utils.rs index 55b9b84f4..6990dc9b1 100644 --- a/crates/payload/builder/src/test_utils.rs +++ b/crates/payload/builder/src/test_utils.rs @@ -7,7 +7,7 @@ use crate::{ use alloy_primitives::U256; use reth_chain_state::ExecutedBlock; -use reth_payload_primitives::{PayloadBuilderError, PayloadTypes}; +use reth_payload_primitives::{PayloadBuilderError, PayloadKind, PayloadTypes}; use reth_primitives::Block; use reth_provider::CanonStateNotification; use std::{ @@ -96,7 +96,10 @@ impl PayloadJob for TestPayloadJob { Ok(self.attr.clone()) } - fn resolve(&mut self) -> (Self::ResolvePayloadFuture, KeepPayloadJobAlive) { + fn resolve_kind( + &mut self, + _kind: PayloadKind, + ) -> (Self::ResolvePayloadFuture, KeepPayloadJobAlive) { let fut = futures_util::future::ready(self.best_payload()); (fut, KeepPayloadJobAlive::No) } diff --git a/crates/payload/builder/src/traits.rs b/crates/payload/builder/src/traits.rs index 8d448eeff..62dadeb45 100644 --- a/crates/payload/builder/src/traits.rs +++ b/crates/payload/builder/src/traits.rs @@ -1,6 +1,8 @@ //! Trait abstractions used by the payload crate. -use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes, PayloadBuilderError}; +use reth_payload_primitives::{ + BuiltPayload, PayloadBuilderAttributes, PayloadBuilderError, PayloadKind, +}; use reth_provider::CanonStateNotification; use std::future::Future; @@ -53,7 +55,21 @@ pub trait PayloadJob: Future> + Send + /// If this returns [`KeepPayloadJobAlive::Yes`], then the [`PayloadJob`] will be polled /// once more. If this returns [`KeepPayloadJobAlive::No`] then the [`PayloadJob`] will be /// dropped after this call. - fn resolve(&mut self) -> (Self::ResolvePayloadFuture, KeepPayloadJobAlive); + /// + /// The [`PayloadKind`] determines how the payload should be resolved in the + /// `ResolvePayloadFuture`. [`PayloadKind::Earliest`] should return the earliest available + /// payload (as fast as possible), e.g. racing an empty payload job against a pending job if + /// there's no payload available yet. [`PayloadKind::WaitForPending`] is allowed to wait + /// until a built payload is available. + fn resolve_kind( + &mut self, + kind: PayloadKind, + ) -> (Self::ResolvePayloadFuture, KeepPayloadJobAlive); + + /// Resolves the payload as fast as possible. + fn resolve(&mut self) -> (Self::ResolvePayloadFuture, KeepPayloadJobAlive) { + self.resolve_kind(PayloadKind::Earliest) + } } /// Whether the payload job should be kept alive or terminated after the payload was requested by diff --git a/crates/payload/primitives/src/lib.rs b/crates/payload/primitives/src/lib.rs index 8173cae34..08aa42800 100644 --- a/crates/payload/primitives/src/lib.rs +++ b/crates/payload/primitives/src/lib.rs @@ -342,6 +342,28 @@ pub enum EngineApiMessageVersion { V4, } +/// Determines how we should choose the payload to return. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub enum PayloadKind { + /// Returns the next best available payload (the earliest available payload). + /// This does not wait for a real for pending job to finish if there's no best payload yet and + /// is allowed to race various payload jobs (empty, pending best) against each other and + /// returns whichever job finishes faster. + /// + /// This should be used when it's more important to return a valid payload as fast as possible. + /// For example, the engine API timeout for `engine_getPayload` is 1s and clients should rather + /// return an empty payload than indefinitely waiting for the pending payload job to finish and + /// risk missing the deadline. + #[default] + Earliest, + /// Only returns once we have at least one built payload. + /// + /// Compared to [`PayloadKind::Earliest`] this does not race an empty payload job against the + /// already in progress one, and returns the best available built payload or awaits the job in + /// progress. + WaitForPending, +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/payload/primitives/src/traits.rs b/crates/payload/primitives/src/traits.rs index 494ed68aa..ce98fcad3 100644 --- a/crates/payload/primitives/src/traits.rs +++ b/crates/payload/primitives/src/traits.rs @@ -1,4 +1,4 @@ -use crate::{PayloadBuilderError, PayloadEvents, PayloadTypes}; +use crate::{PayloadEvents, PayloadKind, PayloadTypes}; use alloy_primitives::{Address, B256, U256}; use alloy_rpc_types::{ engine::{PayloadAttributes as EthPayloadAttributes, PayloadId}, @@ -7,12 +7,8 @@ use alloy_rpc_types::{ use op_alloy_rpc_types_engine::OpPayloadAttributes; use reth_chain_state::ExecutedBlock; use reth_primitives::{SealedBlock, Withdrawals}; -use std::{future::Future, pin::Pin}; use tokio::sync::oneshot; -pub(crate) type PayloadFuture

= - Pin> + Send + Sync>>; - /// A type that can request, subscribe to and resolve payloads. #[async_trait::async_trait] pub trait PayloadBuilder: Send + Unpin { @@ -21,12 +17,13 @@ pub trait PayloadBuilder: Send + Unpin { /// The error type returned by the builder. type Error; - /// Sends a message to the service to start building a new payload for the given payload - /// attributes and returns a future that resolves to the payload. - async fn send_and_resolve_payload( + /// Sends a message to the service to start building a new payload for the given payload. + /// + /// Returns a receiver that will receive the payload id. + fn send_new_payload( &self, attr: ::PayloadBuilderAttributes, - ) -> Result::BuiltPayload>, Self::Error>; + ) -> oneshot::Receiver>; /// Returns the best payload for the given identifier. async fn best_payload( @@ -34,22 +31,21 @@ pub trait PayloadBuilder: Send + Unpin { id: PayloadId, ) -> Option::BuiltPayload, Self::Error>>; - /// Sends a message to the service to start building a new payload for the given payload. - /// - /// This is the same as [`PayloadBuilder::new_payload`] but does not wait for the result - /// and returns the receiver instead - fn send_new_payload( + /// Resolves the payload job and returns the best payload that has been built so far. + async fn resolve_kind( &self, - attr: ::PayloadBuilderAttributes, - ) -> oneshot::Receiver>; + id: PayloadId, + kind: PayloadKind, + ) -> Option::BuiltPayload, Self::Error>>; - /// Starts building a new payload for the given payload attributes. - /// - /// Returns the identifier of the payload. - async fn new_payload( + /// Resolves the payload job as fast and possible and returns the best payload that has been + /// built so far. + async fn resolve( &self, - attr: ::PayloadBuilderAttributes, - ) -> Result; + id: PayloadId, + ) -> Option::BuiltPayload, Self::Error>> { + self.resolve_kind(id, PayloadKind::Earliest).await + } /// Sends a message to the service to subscribe to payload events. /// Returns a receiver that will receive them. diff --git a/examples/custom-payload-builder/src/job.rs b/examples/custom-payload-builder/src/job.rs index 26b594be9..014198259 100644 --- a/examples/custom-payload-builder/src/job.rs +++ b/examples/custom-payload-builder/src/job.rs @@ -3,6 +3,7 @@ use reth::{ providers::StateProviderFactory, tasks::TaskSpawner, transaction_pool::TransactionPool, }; use reth_basic_payload_builder::{PayloadBuilder, PayloadConfig}; +use reth_node_api::PayloadKind; use reth_payload_builder::{KeepPayloadJobAlive, PayloadBuilderError, PayloadJob}; use std::{ @@ -52,7 +53,10 @@ where Ok(self.config.attributes.clone()) } - fn resolve(&mut self) -> (Self::ResolvePayloadFuture, KeepPayloadJobAlive) { + fn resolve_kind( + &mut self, + _kind: PayloadKind, + ) -> (Self::ResolvePayloadFuture, KeepPayloadJobAlive) { let payload = self.best_payload(); (futures_util::future::ready(payload), KeepPayloadJobAlive::No) } From 9c8f5d89d884e3a4c0b2e8120b5f4c6121e626f1 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 18 Oct 2024 13:47:56 +0200 Subject: [PATCH 132/425] chore: rm v2 get bodies functions (#11870) --- crates/rpc/rpc-api/src/engine.rs | 23 ++------ crates/rpc/rpc-engine-api/src/capabilities.rs | 2 - crates/rpc/rpc-engine-api/src/engine_api.rs | 52 ++----------------- crates/rpc/rpc-engine-api/src/metrics.rs | 4 -- .../rpc-types-compat/src/engine/payload.rs | 50 +----------------- 5 files changed, 8 insertions(+), 123 deletions(-) diff --git a/crates/rpc/rpc-api/src/engine.rs b/crates/rpc/rpc-api/src/engine.rs index 50181d23a..eedada8ff 100644 --- a/crates/rpc/rpc-api/src/engine.rs +++ b/crates/rpc/rpc-api/src/engine.rs @@ -10,9 +10,9 @@ use alloy_rpc_types::{ state::StateOverride, BlockOverrides, EIP1186AccountProofResponse, Filter, Log, SyncStatus, }; use alloy_rpc_types_engine::{ - ClientVersionV1, ExecutionPayloadBodiesV1, ExecutionPayloadBodiesV2, ExecutionPayloadInputV2, - ExecutionPayloadV1, ExecutionPayloadV3, ExecutionPayloadV4, ForkchoiceState, ForkchoiceUpdated, - PayloadId, PayloadStatus, TransitionConfiguration, + ClientVersionV1, ExecutionPayloadBodiesV1, ExecutionPayloadInputV2, ExecutionPayloadV1, + ExecutionPayloadV3, ExecutionPayloadV4, ForkchoiceState, ForkchoiceUpdated, PayloadId, + PayloadStatus, TransitionConfiguration, }; use alloy_rpc_types_eth::transaction::TransactionRequest; use alloy_serde::JsonStorageKey; @@ -146,13 +146,6 @@ pub trait EngineApi { block_hashes: Vec, ) -> RpcResult; - /// See also - #[method(name = "getPayloadBodiesByHashV2")] - async fn get_payload_bodies_by_hash_v2( - &self, - block_hashes: Vec, - ) -> RpcResult; - /// See also /// /// Returns the execution payload bodies by the range starting at `start`, containing `count` @@ -172,16 +165,6 @@ pub trait EngineApi { count: U64, ) -> RpcResult; - /// See also - /// - /// Similar to `getPayloadBodiesByRangeV1`, but returns [`ExecutionPayloadBodiesV2`] - #[method(name = "getPayloadBodiesByRangeV2")] - async fn get_payload_bodies_by_range_v2( - &self, - start: U64, - count: U64, - ) -> RpcResult; - /// See also /// /// Note: This method will be deprecated after the cancun hardfork: diff --git a/crates/rpc/rpc-engine-api/src/capabilities.rs b/crates/rpc/rpc-engine-api/src/capabilities.rs index de4d96231..af0609b0d 100644 --- a/crates/rpc/rpc-engine-api/src/capabilities.rs +++ b/crates/rpc/rpc-engine-api/src/capabilities.rs @@ -17,8 +17,6 @@ pub const CAPABILITIES: &[&str] = &[ "engine_newPayloadV4", "engine_getPayloadBodiesByHashV1", "engine_getPayloadBodiesByRangeV1", - "engine_getPayloadBodiesByHashV2", - "engine_getPayloadBodiesByRangeV2", "engine_getBlobsV1", ]; diff --git a/crates/rpc/rpc-engine-api/src/engine_api.rs b/crates/rpc/rpc-engine-api/src/engine_api.rs index 252808c14..fb7f98ed2 100644 --- a/crates/rpc/rpc-engine-api/src/engine_api.rs +++ b/crates/rpc/rpc-engine-api/src/engine_api.rs @@ -5,9 +5,8 @@ use alloy_eips::eip4844::BlobAndProofV1; use alloy_primitives::{BlockHash, BlockNumber, B256, U64}; use alloy_rpc_types_engine::{ CancunPayloadFields, ClientVersionV1, ExecutionPayload, ExecutionPayloadBodiesV1, - ExecutionPayloadBodiesV2, ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV3, - ExecutionPayloadV4, ForkchoiceState, ForkchoiceUpdated, PayloadId, PayloadStatus, - TransitionConfiguration, + ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV3, ExecutionPayloadV4, + ForkchoiceState, ForkchoiceUpdated, PayloadId, PayloadStatus, TransitionConfiguration, }; use async_trait::async_trait; use jsonrpsee_core::RpcResult; @@ -23,7 +22,7 @@ use reth_payload_primitives::{ use reth_primitives::{Block, BlockHashOrNumber, EthereumHardfork}; use reth_rpc_api::EngineApiServer; use reth_rpc_types_compat::engine::payload::{ - convert_payload_input_v2_to_payload, convert_to_payload_body_v1, convert_to_payload_body_v2, + convert_payload_input_v2_to_payload, convert_to_payload_body_v1, }; use reth_storage_api::{BlockReader, HeaderProvider, StateProviderFactory}; use reth_tasks::TaskSpawner; @@ -451,18 +450,6 @@ where self.get_payload_bodies_by_range_with(start, count, convert_to_payload_body_v1).await } - /// Returns the execution payload bodies by the range starting at `start`, containing `count` - /// blocks. - /// - /// Same as [`Self::get_payload_bodies_by_range_v1`] but as [`ExecutionPayloadBodiesV2`]. - pub async fn get_payload_bodies_by_range_v2( - &self, - start: BlockNumber, - count: u64, - ) -> EngineApiResult { - self.get_payload_bodies_by_range_with(start, count, convert_to_payload_body_v2).await - } - /// Called to retrieve execution payload bodies by hashes. async fn get_payload_bodies_by_hash_with( &self, @@ -509,16 +496,6 @@ where self.get_payload_bodies_by_hash_with(hashes, convert_to_payload_body_v1).await } - /// Called to retrieve execution payload bodies by hashes. - /// - /// Same as [`Self::get_payload_bodies_by_hash_v1`] but as [`ExecutionPayloadBodiesV2`]. - pub async fn get_payload_bodies_by_hash_v2( - &self, - hashes: Vec, - ) -> EngineApiResult { - self.get_payload_bodies_by_hash_with(hashes, convert_to_payload_body_v2).await - } - /// Called to verify network configuration parameters and ensure that Consensus and Execution /// layers are using the latest configuration. pub fn exchange_transition_configuration( @@ -846,17 +823,6 @@ where Ok(res.await?) } - async fn get_payload_bodies_by_hash_v2( - &self, - block_hashes: Vec, - ) -> RpcResult { - trace!(target: "rpc::engine", "Serving engine_getPayloadBodiesByHashV2"); - let start = Instant::now(); - let res = Self::get_payload_bodies_by_hash_v2(self, block_hashes); - self.inner.metrics.latency.get_payload_bodies_by_hash_v2.record(start.elapsed()); - Ok(res.await?) - } - /// Handler for `engine_getPayloadBodiesByRangeV1` /// /// See also @@ -885,18 +851,6 @@ where Ok(res?) } - async fn get_payload_bodies_by_range_v2( - &self, - start: U64, - count: U64, - ) -> RpcResult { - trace!(target: "rpc::engine", "Serving engine_getPayloadBodiesByRangeV2"); - let start_time = Instant::now(); - let res = Self::get_payload_bodies_by_range_v2(self, start.to(), count.to()).await; - self.inner.metrics.latency.get_payload_bodies_by_range_v2.record(start_time.elapsed()); - Ok(res?) - } - /// Handler for `engine_exchangeTransitionConfigurationV1` /// See also async fn exchange_transition_configuration( diff --git a/crates/rpc/rpc-engine-api/src/metrics.rs b/crates/rpc/rpc-engine-api/src/metrics.rs index 2c4216664..8d0106f9d 100644 --- a/crates/rpc/rpc-engine-api/src/metrics.rs +++ b/crates/rpc/rpc-engine-api/src/metrics.rs @@ -44,12 +44,8 @@ pub(crate) struct EngineApiLatencyMetrics { pub(crate) get_payload_v4: Histogram, /// Latency for `engine_getPayloadBodiesByRangeV1` pub(crate) get_payload_bodies_by_range_v1: Histogram, - /// Latency for `engine_getPayloadBodiesByRangeV2` - pub(crate) get_payload_bodies_by_range_v2: Histogram, /// Latency for `engine_getPayloadBodiesByHashV1` pub(crate) get_payload_bodies_by_hash_v1: Histogram, - /// Latency for `engine_getPayloadBodiesByHashV2` - pub(crate) get_payload_bodies_by_hash_v2: Histogram, /// Latency for `engine_exchangeTransitionConfigurationV1` pub(crate) exchange_transition_configuration: Histogram, } diff --git a/crates/rpc/rpc-types-compat/src/engine/payload.rs b/crates/rpc/rpc-types-compat/src/engine/payload.rs index e6f2f97ca..cd9ce1cbf 100644 --- a/crates/rpc/rpc-types-compat/src/engine/payload.rs +++ b/crates/rpc/rpc-types-compat/src/engine/payload.rs @@ -6,8 +6,8 @@ use alloy_eips::eip2718::{Decodable2718, Encodable2718}; use alloy_primitives::{B256, U256}; use alloy_rpc_types_engine::{ payload::{ExecutionPayloadBodyV1, ExecutionPayloadFieldV2, ExecutionPayloadInputV2}, - ExecutionPayload, ExecutionPayloadBodyV2, ExecutionPayloadV1, ExecutionPayloadV2, - ExecutionPayloadV3, ExecutionPayloadV4, PayloadError, + ExecutionPayload, ExecutionPayloadV1, ExecutionPayloadV2, ExecutionPayloadV3, + ExecutionPayloadV4, PayloadError, }; use reth_primitives::{ constants::MAXIMUM_EXTRA_DATA_SIZE, @@ -381,52 +381,6 @@ pub fn convert_to_payload_body_v1(value: Block) -> ExecutionPayloadBodyV1 { } } -/// Converts [`Block`] to [`ExecutionPayloadBodyV2`] -pub fn convert_to_payload_body_v2(value: Block) -> ExecutionPayloadBodyV2 { - let transactions = value.body.transactions.into_iter().map(|tx| { - let mut out = Vec::new(); - tx.encode_2718(&mut out); - out.into() - }); - - let mut payload = ExecutionPayloadBodyV2 { - transactions: transactions.collect(), - withdrawals: value.body.withdrawals.map(Withdrawals::into_inner), - deposit_requests: None, - withdrawal_requests: None, - consolidation_requests: None, - }; - - if let Some(requests) = value.body.requests { - let (deposit_requests, withdrawal_requests, consolidation_requests) = - requests.into_iter().fold( - (Vec::new(), Vec::new(), Vec::new()), - |(mut deposits, mut withdrawals, mut consolidation_requests), request| { - match request { - Request::DepositRequest(r) => { - deposits.push(r); - } - Request::WithdrawalRequest(r) => { - withdrawals.push(r); - } - Request::ConsolidationRequest(r) => { - consolidation_requests.push(r); - } - _ => {} - }; - - (deposits, withdrawals, consolidation_requests) - }, - ); - - payload.deposit_requests = Some(deposit_requests); - payload.withdrawal_requests = Some(withdrawal_requests); - payload.consolidation_requests = Some(consolidation_requests); - } - - payload -} - /// Transforms a [`SealedBlock`] into a [`ExecutionPayloadV1`] pub fn execution_payload_from_sealed_block(value: SealedBlock) -> ExecutionPayloadV1 { let transactions = value.raw_transactions(); From 587c91f1cf988cf9f99bb251bf5da7f86ff5600d Mon Sep 17 00:00:00 2001 From: Ayodeji Akinola Date: Fri, 18 Oct 2024 16:17:11 +0100 Subject: [PATCH 133/425] Optimize Sender Recovery Process (#11385) --- .../stages/src/stages/sender_recovery.rs | 146 +++++++++++------- 1 file changed, 86 insertions(+), 60 deletions(-) diff --git a/crates/stages/stages/src/stages/sender_recovery.rs b/crates/stages/stages/src/stages/sender_recovery.rs index a85b0bc60..a4eda6394 100644 --- a/crates/stages/stages/src/stages/sender_recovery.rs +++ b/crates/stages/stages/src/stages/sender_recovery.rs @@ -29,6 +29,9 @@ const BATCH_SIZE: usize = 100_000; /// Maximum number of senders to recover per rayon worker job. const WORKER_CHUNK_SIZE: usize = 100; +/// Type alias for a sender that transmits the result of sender recovery. +type RecoveryResultSender = mpsc::Sender>>; + /// The sender recovery stage iterates over existing transactions, /// recovers the transaction signer and stores them /// in [`TransactionSenders`][reth_db::tables::TransactionSenders] table. @@ -100,8 +103,10 @@ where .map(|start| start..std::cmp::min(start + BATCH_SIZE as u64, tx_range.end)) .collect::>>(); + let tx_batch_sender = setup_range_recovery(provider); + for range in batch { - recover_range(range, provider, &mut senders_cursor)?; + recover_range(range, provider, tx_batch_sender.clone(), &mut senders_cursor)?; } Ok(ExecOutput { @@ -136,15 +141,16 @@ where fn recover_range( tx_range: Range, provider: &Provider, + tx_batch_sender: mpsc::Sender, RecoveryResultSender)>>, senders_cursor: &mut CURSOR, ) -> Result<(), StageError> where Provider: DBProvider + HeaderProvider + StaticFileProviderFactory, CURSOR: DbCursorRW, { - debug!(target: "sync::stages::sender_recovery", ?tx_range, "Recovering senders batch"); + debug!(target: "sync::stages::sender_recovery", ?tx_range, "Sending batch for processing"); - // Preallocate channels + // Preallocate channels for each chunks in the batch let (chunks, receivers): (Vec<_>, Vec<_>) = tx_range .clone() .step_by(WORKER_CHUNK_SIZE) @@ -156,62 +162,9 @@ where }) .unzip(); - let static_file_provider = provider.static_file_provider(); - - // We do not use `tokio::task::spawn_blocking` because, during a shutdown, - // there will be a timeout grace period in which Tokio does not allow spawning - // additional blocking tasks. This would cause this function to return - // `SenderRecoveryStageError::RecoveredSendersMismatch` at the end. - // - // However, using `std::thread::spawn` allows us to utilize the timeout grace - // period to complete some work without throwing errors during the shutdown. - std::thread::spawn(move || { - for (chunk_range, recovered_senders_tx) in chunks { - // Read the raw value, and let the rayon worker to decompress & decode. - let chunk = match static_file_provider.fetch_range_with_predicate( - StaticFileSegment::Transactions, - chunk_range.clone(), - |cursor, number| { - Ok(cursor - .get_one::>>( - number.into(), - )? - .map(|tx| (number, tx))) - }, - |_| true, - ) { - Ok(chunk) => chunk, - Err(err) => { - // We exit early since we could not process this chunk. - let _ = recovered_senders_tx - .send(Err(Box::new(SenderRecoveryStageError::StageError(err.into())))); - break - } - }; - - // Spawn the task onto the global rayon pool - // This task will send the results through the channel after it has read the transaction - // and calculated the sender. - rayon::spawn(move || { - let mut rlp_buf = Vec::with_capacity(128); - for (number, tx) in chunk { - let res = tx - .value() - .map_err(|err| Box::new(SenderRecoveryStageError::StageError(err.into()))) - .and_then(|tx| recover_sender((number, tx), &mut rlp_buf)); - - let is_err = res.is_err(); - - let _ = recovered_senders_tx.send(res); - - // Finish early - if is_err { - break - } - } - }); - } - }); + if let Some(err) = tx_batch_sender.send(chunks).err() { + return Err(StageError::Fatal(err.into())); + } debug!(target: "sync::stages::sender_recovery", ?tx_range, "Appending recovered senders to the database"); @@ -235,6 +188,7 @@ where provider.sealed_header(block_number)?.ok_or_else(|| { ProviderError::HeaderNotFound(block_number.into()) })?; + Err(StageError::Block { block: Box::new(sealed_header), error: BlockErrorKind::Validation( @@ -269,10 +223,82 @@ where .into(), )); } - Ok(()) } +/// Spawns a thread to handle the recovery of transaction senders for +/// specified chunks of a given batch. It processes incoming ranges, fetching and recovering +/// transactions in parallel using global rayon pool +fn setup_range_recovery( + provider: &Provider, +) -> mpsc::Sender, RecoveryResultSender)>> +where + Provider: DBProvider + HeaderProvider + StaticFileProviderFactory, +{ + let (tx_sender, tx_receiver) = mpsc::channel::, RecoveryResultSender)>>(); + let static_file_provider = provider.static_file_provider(); + + // We do not use `tokio::task::spawn_blocking` because, during a shutdown, + // there will be a timeout grace period in which Tokio does not allow spawning + // additional blocking tasks. This would cause this function to return + // `SenderRecoveryStageError::RecoveredSendersMismatch` at the end. + // + // However, using `std::thread::spawn` allows us to utilize the timeout grace + // period to complete some work without throwing errors during the shutdown. + std::thread::spawn(move || { + while let Ok(chunks) = tx_receiver.recv() { + for (chunk_range, recovered_senders_tx) in chunks { + // Read the raw value, and let the rayon worker to decompress & decode. + let chunk = match static_file_provider.fetch_range_with_predicate( + StaticFileSegment::Transactions, + chunk_range.clone(), + |cursor, number| { + Ok(cursor + .get_one::>>( + number.into(), + )? + .map(|tx| (number, tx))) + }, + |_| true, + ) { + Ok(chunk) => chunk, + Err(err) => { + // We exit early since we could not process this chunk. + let _ = recovered_senders_tx + .send(Err(Box::new(SenderRecoveryStageError::StageError(err.into())))); + break + } + }; + + // Spawn the task onto the global rayon pool + // This task will send the results through the channel after it has read the + // transaction and calculated the sender. + rayon::spawn(move || { + let mut rlp_buf = Vec::with_capacity(128); + for (number, tx) in chunk { + let res = tx + .value() + .map_err(|err| { + Box::new(SenderRecoveryStageError::StageError(err.into())) + }) + .and_then(|tx| recover_sender((number, tx), &mut rlp_buf)); + + let is_err = res.is_err(); + + let _ = recovered_senders_tx.send(res); + + // Finish early + if is_err { + break + } + } + }); + } + } + }); + tx_sender +} + #[inline] fn recover_sender( (tx_id, tx): (TxNumber, TransactionSignedNoHash), From a908f977736ebd0d609fc3d2f4a6904a14f0bd73 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 18 Oct 2024 20:21:55 +0200 Subject: [PATCH 134/425] chore: simplify update fn (#11880) --- crates/transaction-pool/src/pool/pending.rs | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/crates/transaction-pool/src/pool/pending.rs b/crates/transaction-pool/src/pool/pending.rs index ff3ecf65a..ff5269014 100644 --- a/crates/transaction-pool/src/pool/pending.rs +++ b/crates/transaction-pool/src/pool/pending.rs @@ -197,7 +197,7 @@ impl PendingPool { } } else { self.size_of += tx.transaction.size(); - self.update_independents_and_highest_nonces(&tx, &id); + self.update_independents_and_highest_nonces(&tx); self.all.insert(tx.clone()); self.by_id.insert(id, tx); } @@ -243,7 +243,7 @@ impl PendingPool { tx.priority = self.ordering.priority(&tx.transaction.transaction, base_fee); self.size_of += tx.transaction.size(); - self.update_independents_and_highest_nonces(&tx, &id); + self.update_independents_and_highest_nonces(&tx); self.all.insert(tx.clone()); self.by_id.insert(id, tx); } @@ -254,12 +254,8 @@ impl PendingPool { /// Updates the independent transaction and highest nonces set, assuming the given transaction /// is being _added_ to the pool. - fn update_independents_and_highest_nonces( - &mut self, - tx: &PendingTransaction, - tx_id: &TransactionId, - ) { - let ancestor_id = tx_id.unchecked_ancestor(); + fn update_independents_and_highest_nonces(&mut self, tx: &PendingTransaction) { + let ancestor_id = tx.transaction.id().unchecked_ancestor(); if let Some(ancestor) = ancestor_id.and_then(|id| self.by_id.get(&id)) { // the transaction already has an ancestor, so we only need to ensure that the // highest nonces set actually contains the highest nonce for that sender @@ -305,7 +301,7 @@ impl PendingPool { let priority = self.ordering.priority(&tx.transaction, base_fee); let tx = PendingTransaction { submission_id, transaction: tx, priority }; - self.update_independents_and_highest_nonces(&tx, &tx_id); + self.update_independents_and_highest_nonces(&tx); self.all.insert(tx.clone()); // send the new transaction to any existing pendingpool static file iterators From eee5e0d41f601dca62339b09b666432377717bd0 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Fri, 18 Oct 2024 22:08:09 +0200 Subject: [PATCH 135/425] bump rust to 1.82 (#11876) --- .github/workflows/lint.yml | 4 ++-- Cargo.toml | 6 ++---- README.md | 2 +- clippy.toml | 18 ++++++++++++++++-- .../src/providers/static_file/manager.rs | 1 + 5 files changed, 22 insertions(+), 9 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index efa38857e..65c01c3a4 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -103,7 +103,7 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@master with: - toolchain: "1.81" # MSRV + toolchain: "1.82" # MSRV - uses: Swatinem/rust-cache@v2 with: cache-on-failure: true @@ -163,7 +163,7 @@ jobs: - uses: dtolnay/rust-toolchain@nightly - uses: dtolnay/rust-toolchain@master with: - toolchain: "1.81" # MSRV + toolchain: "1.82" # MSRV - uses: Swatinem/rust-cache@v2 with: cache-on-failure: true diff --git a/Cargo.toml b/Cargo.toml index e3ec1c1fb..541110969 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [workspace.package] version = "1.1.0" edition = "2021" -rust-version = "1.81" +rust-version = "1.82" license = "MIT OR Apache-2.0" homepage = "https://paradigmxyz.github.io/reth" repository = "https://github.com/paradigmxyz/reth" @@ -410,9 +410,7 @@ reth-trie-db = { path = "crates/trie/db" } reth-trie-parallel = { path = "crates/trie/parallel" } # revm -revm = { version = "14.0.3", features = [ - "std", -], default-features = false } +revm = { version = "14.0.3", features = ["std"], default-features = false } revm-inspectors = "0.8.1" revm-primitives = { version = "10.0.0", features = [ "std", diff --git a/README.md b/README.md index 7fae4d0b6..8a6b8ddb4 100644 --- a/README.md +++ b/README.md @@ -87,7 +87,7 @@ When updating this, also update: - .github/workflows/lint.yml --> -The Minimum Supported Rust Version (MSRV) of this project is [1.81.0](https://blog.rust-lang.org/2024/09/05/Rust-1.81.0.html). +The Minimum Supported Rust Version (MSRV) of this project is [1.82.0](https://blog.rust-lang.org/2024/10/17/Rust-1.82.0.html). See the book for detailed instructions on how to [build from source](https://paradigmxyz.github.io/reth/installation/source.html). diff --git a/clippy.toml b/clippy.toml index cdfa4bc93..862c56863 100644 --- a/clippy.toml +++ b/clippy.toml @@ -1,3 +1,17 @@ -msrv = "1.81" +msrv = "1.82" too-large-for-stack = 128 -doc-valid-idents = ["P2P", "ExEx", "ExExes", "IPv4", "IPv6", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "WAL", "MessagePack"] +doc-valid-idents = [ + "P2P", + "ExEx", + "ExExes", + "IPv4", + "IPv6", + "KiB", + "MiB", + "GiB", + "TiB", + "PiB", + "EiB", + "WAL", + "MessagePack", +] diff --git a/crates/storage/provider/src/providers/static_file/manager.rs b/crates/storage/provider/src/providers/static_file/manager.rs index e233332a0..6f5cf07c9 100644 --- a/crates/storage/provider/src/providers/static_file/manager.rs +++ b/crates/storage/provider/src/providers/static_file/manager.rs @@ -143,6 +143,7 @@ impl StaticFileProvider { // appending/truncating rows for segment in event.paths { // Ensure it's a file with the .conf extension + #[allow(clippy::nonminimal_bool)] if !segment .extension() .is_some_and(|s| s.to_str() == Some(CONFIG_FILE_EXTENSION)) From 655fc1a55a6dae5dfd522f1981142524c0ef6924 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Sat, 19 Oct 2024 00:13:02 +0200 Subject: [PATCH 136/425] rpc: add unit tests for `RpcModuleSelection` (#11883) --- crates/rpc/rpc-server-types/src/module.rs | 235 +++++++++++++++++++++- 1 file changed, 232 insertions(+), 3 deletions(-) diff --git a/crates/rpc/rpc-server-types/src/module.rs b/crates/rpc/rpc-server-types/src/module.rs index 72a5e7c85..56417dda7 100644 --- a/crates/rpc/rpc-server-types/src/module.rs +++ b/crates/rpc/rpc-server-types/src/module.rs @@ -199,9 +199,12 @@ impl FromStr for RpcModuleSelection { } let mut modules = s.split(',').map(str::trim).peekable(); let first = modules.peek().copied().ok_or(ParseError::VariantNotFound)?; - match first { - "all" | "All" => Ok(Self::All), - "none" | "None" => Ok(Self::Selection(Default::default())), + // We convert to lowercase to make the comparison case-insensitive + // + // This is a way to allow typing "all" and "ALL" and "All" and "aLl" etc. + match first.to_lowercase().as_str() { + "all" => Ok(Self::All), + "none" => Ok(Self::Selection(Default::default())), _ => Self::try_from_selection(modules), } } @@ -329,3 +332,229 @@ impl Serialize for RethRpcModule { s.serialize_str(self.as_ref()) } } + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_all_modules() { + let all_modules = RpcModuleSelection::all_modules(); + assert_eq!(all_modules.len(), RethRpcModule::variant_count()); + } + + #[test] + fn test_standard_modules() { + let standard_modules = RpcModuleSelection::standard_modules(); + let expected_modules: HashSet = + HashSet::from([RethRpcModule::Eth, RethRpcModule::Net, RethRpcModule::Web3]); + assert_eq!(standard_modules, expected_modules); + } + + #[test] + fn test_default_ipc_modules() { + let default_ipc_modules = RpcModuleSelection::default_ipc_modules(); + assert_eq!(default_ipc_modules, RpcModuleSelection::all_modules()); + } + + #[test] + fn test_try_from_selection_success() { + let selection = vec!["eth", "admin"]; + let config = RpcModuleSelection::try_from_selection(selection).unwrap(); + assert_eq!(config, RpcModuleSelection::from([RethRpcModule::Eth, RethRpcModule::Admin])); + } + + #[test] + fn test_rpc_module_selection_len() { + let all_modules = RpcModuleSelection::All; + let standard = RpcModuleSelection::Standard; + let selection = RpcModuleSelection::from([RethRpcModule::Eth, RethRpcModule::Admin]); + + assert_eq!(all_modules.len(), RethRpcModule::variant_count()); + assert_eq!(standard.len(), 3); + assert_eq!(selection.len(), 2); + } + + #[test] + fn test_rpc_module_selection_is_empty() { + let empty_selection = RpcModuleSelection::from(HashSet::new()); + assert!(empty_selection.is_empty()); + + let non_empty_selection = RpcModuleSelection::from([RethRpcModule::Eth]); + assert!(!non_empty_selection.is_empty()); + } + + #[test] + fn test_rpc_module_selection_iter_selection() { + let all_modules = RpcModuleSelection::All; + let standard = RpcModuleSelection::Standard; + let selection = RpcModuleSelection::from([RethRpcModule::Eth, RethRpcModule::Admin]); + + assert_eq!(all_modules.iter_selection().count(), RethRpcModule::variant_count()); + assert_eq!(standard.iter_selection().count(), 3); + assert_eq!(selection.iter_selection().count(), 2); + } + + #[test] + fn test_rpc_module_selection_to_selection() { + let all_modules = RpcModuleSelection::All; + let standard = RpcModuleSelection::Standard; + let selection = RpcModuleSelection::from([RethRpcModule::Eth, RethRpcModule::Admin]); + + assert_eq!(all_modules.to_selection(), RpcModuleSelection::all_modules()); + assert_eq!(standard.to_selection(), RpcModuleSelection::standard_modules()); + assert_eq!( + selection.to_selection(), + HashSet::from([RethRpcModule::Eth, RethRpcModule::Admin]) + ); + } + + #[test] + fn test_rpc_module_selection_are_identical() { + // Test scenario: both selections are `All` + // + // Since both selections include all possible RPC modules, they should be considered + // identical. + let all_modules = RpcModuleSelection::All; + assert!(RpcModuleSelection::are_identical(Some(&all_modules), Some(&all_modules))); + + // Test scenario: both `http` and `ws` are `None` + // + // When both arguments are `None`, the function should return `true` because no modules are + // selected. + assert!(RpcModuleSelection::are_identical(None, None)); + + // Test scenario: both selections contain identical sets of specific modules + // + // In this case, both selections contain the same modules (`Eth` and `Admin`), + // so they should be considered identical. + let selection1 = RpcModuleSelection::from([RethRpcModule::Eth, RethRpcModule::Admin]); + let selection2 = RpcModuleSelection::from([RethRpcModule::Eth, RethRpcModule::Admin]); + assert!(RpcModuleSelection::are_identical(Some(&selection1), Some(&selection2))); + + // Test scenario: one selection is `All`, the other is `Standard` + // + // `All` includes all possible modules, while `Standard` includes a specific set of modules. + // Since `Standard` does not cover all modules, these two selections should not be + // considered identical. + let standard = RpcModuleSelection::Standard; + assert!(!RpcModuleSelection::are_identical(Some(&all_modules), Some(&standard))); + + // Test scenario: one is `None`, the other is an empty selection + // + // When one selection is `None` and the other is an empty selection (no modules), + // they should be considered identical because neither selects any modules. + let empty_selection = RpcModuleSelection::Selection(HashSet::new()); + assert!(RpcModuleSelection::are_identical(None, Some(&empty_selection))); + assert!(RpcModuleSelection::are_identical(Some(&empty_selection), None)); + + // Test scenario: one is `None`, the other is a non-empty selection + // + // If one selection is `None` and the other contains modules, they should not be considered + // identical because `None` represents no selection, while the other explicitly + // selects modules. + let non_empty_selection = RpcModuleSelection::from([RethRpcModule::Eth]); + assert!(!RpcModuleSelection::are_identical(None, Some(&non_empty_selection))); + assert!(!RpcModuleSelection::are_identical(Some(&non_empty_selection), None)); + + // Test scenario: `All` vs. non-full selection + // + // If one selection is `All` (which includes all modules) and the other contains only a + // subset of modules, they should not be considered identical. + let partial_selection = RpcModuleSelection::from([RethRpcModule::Eth, RethRpcModule::Net]); + assert!(!RpcModuleSelection::are_identical(Some(&all_modules), Some(&partial_selection))); + + // Test scenario: full selection vs `All` + // + // If the other selection explicitly selects all available modules, it should be identical + // to `All`. + let full_selection = + RpcModuleSelection::from(RethRpcModule::modules().into_iter().collect::>()); + assert!(RpcModuleSelection::are_identical(Some(&all_modules), Some(&full_selection))); + + // Test scenario: different non-empty selections + // + // If the two selections contain different sets of modules, they should not be considered + // identical. + let selection3 = RpcModuleSelection::from([RethRpcModule::Eth, RethRpcModule::Net]); + let selection4 = RpcModuleSelection::from([RethRpcModule::Eth, RethRpcModule::Web3]); + assert!(!RpcModuleSelection::are_identical(Some(&selection3), Some(&selection4))); + + // Test scenario: `Standard` vs an equivalent selection + // The `Standard` selection includes a predefined set of modules. If we explicitly create + // a selection with the same set of modules, they should be considered identical. + let matching_standard = + RpcModuleSelection::from([RethRpcModule::Eth, RethRpcModule::Net, RethRpcModule::Web3]); + assert!(RpcModuleSelection::are_identical(Some(&standard), Some(&matching_standard))); + + // Test scenario: `Standard` vs non-matching selection + // + // If the selection does not match the modules included in `Standard`, they should not be + // considered identical. + let non_matching_standard = + RpcModuleSelection::from([RethRpcModule::Eth, RethRpcModule::Net]); + assert!(!RpcModuleSelection::are_identical(Some(&standard), Some(&non_matching_standard))); + } + + #[test] + fn test_rpc_module_selection_from_str() { + // Test empty string returns default selection + let result = RpcModuleSelection::from_str(""); + assert!(result.is_ok()); + assert_eq!(result.unwrap(), RpcModuleSelection::Selection(Default::default())); + + // Test "all" (case insensitive) returns All variant + let result = RpcModuleSelection::from_str("all"); + assert!(result.is_ok()); + assert_eq!(result.unwrap(), RpcModuleSelection::All); + + let result = RpcModuleSelection::from_str("All"); + assert!(result.is_ok()); + assert_eq!(result.unwrap(), RpcModuleSelection::All); + + let result = RpcModuleSelection::from_str("ALL"); + assert!(result.is_ok()); + assert_eq!(result.unwrap(), RpcModuleSelection::All); + + // Test "none" (case insensitive) returns empty selection + let result = RpcModuleSelection::from_str("none"); + assert!(result.is_ok()); + assert_eq!(result.unwrap(), RpcModuleSelection::Selection(Default::default())); + + let result = RpcModuleSelection::from_str("None"); + assert!(result.is_ok()); + assert_eq!(result.unwrap(), RpcModuleSelection::Selection(Default::default())); + + let result = RpcModuleSelection::from_str("NONE"); + assert!(result.is_ok()); + assert_eq!(result.unwrap(), RpcModuleSelection::Selection(Default::default())); + + // Test valid selections: "eth,admin" + let result = RpcModuleSelection::from_str("eth,admin"); + assert!(result.is_ok()); + let expected_selection = + RpcModuleSelection::from([RethRpcModule::Eth, RethRpcModule::Admin]); + assert_eq!(result.unwrap(), expected_selection); + + // Test valid selection with extra spaces: " eth , admin " + let result = RpcModuleSelection::from_str(" eth , admin "); + assert!(result.is_ok()); + assert_eq!(result.unwrap(), expected_selection); + + // Test invalid selection should return error + let result = RpcModuleSelection::from_str("invalid,unknown"); + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), ParseError::VariantNotFound); + + // Test single valid selection: "eth" + let result = RpcModuleSelection::from_str("eth"); + assert!(result.is_ok()); + let expected_selection = RpcModuleSelection::from([RethRpcModule::Eth]); + assert_eq!(result.unwrap(), expected_selection); + + // Test single invalid selection: "unknown" + let result = RpcModuleSelection::from_str("unknown"); + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), ParseError::VariantNotFound); + } +} From a4126b3a53f900483fe8946c8067aade68cf5cad Mon Sep 17 00:00:00 2001 From: greged93 <82421016+greged93@users.noreply.github.com> Date: Sat, 19 Oct 2024 00:15:08 +0200 Subject: [PATCH 137/425] feat: tasks executor metrics in grafana (#11815) Co-authored-by: Dan Cline <6798349+Rjected@users.noreply.github.com> --- etc/docker-compose.yml | 2 +- etc/grafana/dashboards/overview.json | 399 +++++++++++++++++++++------ 2 files changed, 315 insertions(+), 86 deletions(-) diff --git a/etc/docker-compose.yml b/etc/docker-compose.yml index 618aa6f5a..cd7dd6dd2 100644 --- a/etc/docker-compose.yml +++ b/etc/docker-compose.yml @@ -65,7 +65,7 @@ services: sh -c "cp -r /etc/grafana/provisioning_temp/dashboards/. /etc/grafana/provisioning/dashboards && find /etc/grafana/provisioning/dashboards/ -name '*.json' -exec sed -i 's/$${DS_PROMETHEUS}/Prometheus/g' {} \+ && /run.sh" - + volumes: mainnet_data: driver: local diff --git a/etc/grafana/dashboards/overview.json b/etc/grafana/dashboards/overview.json index 15786764f..8c77f5979 100644 --- a/etc/grafana/dashboards/overview.json +++ b/etc/grafana/dashboards/overview.json @@ -1007,13 +1007,242 @@ "title": "Sync progress (stage progress as highest block number reached)", "type": "timeseries" }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "description": "Tracks the number of critical tasks currently ran by the executor.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "semi-dark-red", + "value": 0 + } + ] + }, + "unit": "tasks" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 20 + }, + "id": 248, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "expr": "reth_executor_spawn_critical_tasks_total{instance=\"$instance\"}- reth_executor_spawn_finished_critical_tasks_total{instance=\"$instance\"}", + "hide": false, + "instant": false, + "legendFormat": "Tasks running", + "range": true, + "refId": "C" + } + ], + "title": "Task Executor critical tasks", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "description": "Tracks the number of regular tasks currently ran by the executor.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "semi-dark-red", + "value": 80 + } + ] + }, + "unit": "tasks/s" + }, + "overrides": [ + { + "matcher": { + "id": "byFrameRefID", + "options": "C" + }, + "properties": [ + { + "id": "unit", + "value": "tasks" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 20 + }, + "id": 247, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "disableTextWrap": false, + "editorMode": "builder", + "exemplar": false, + "expr": "rate(reth_executor_spawn_regular_tasks_total{instance=\"$instance\"}[$__rate_interval])", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "Tasks started", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "expr": "reth_executor_spawn_regular_tasks_total{instance=\"$instance\"}- reth_executor_spawn_finished_regular_tasks_total{instance=\"$instance\"}", + "hide": false, + "instant": false, + "legendFormat": "Tasks running", + "range": true, + "refId": "C" + } + ], + "title": "Task Executor regular tasks", + "type": "timeseries" + }, { "collapsed": false, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 20 + "y": 28 }, "id": 38, "panels": [], @@ -1085,7 +1314,7 @@ "h": 8, "w": 12, "x": 0, - "y": 21 + "y": 29 }, "id": 40, "options": { @@ -1145,7 +1374,7 @@ "h": 8, "w": 12, "x": 12, - "y": 21 + "y": 29 }, "id": 42, "maxDataPoints": 25, @@ -1273,7 +1502,7 @@ "h": 8, "w": 12, "x": 0, - "y": 29 + "y": 37 }, "id": 117, "options": { @@ -1370,7 +1599,7 @@ "h": 8, "w": 12, "x": 12, - "y": 29 + "y": 37 }, "id": 116, "options": { @@ -1471,7 +1700,7 @@ "h": 8, "w": 12, "x": 0, - "y": 37 + "y": 45 }, "id": 119, "options": { @@ -1572,7 +1801,7 @@ "h": 8, "w": 12, "x": 12, - "y": 37 + "y": 45 }, "id": 118, "options": { @@ -1634,7 +1863,7 @@ "h": 8, "w": 12, "x": 0, - "y": 45 + "y": 53 }, "id": 48, "options": { @@ -1746,7 +1975,7 @@ "h": 8, "w": 12, "x": 12, - "y": 45 + "y": 53 }, "id": 52, "options": { @@ -1804,7 +2033,7 @@ "h": 8, "w": 12, "x": 0, - "y": 53 + "y": 61 }, "id": 50, "options": { @@ -1972,7 +2201,7 @@ "h": 8, "w": 12, "x": 12, - "y": 53 + "y": 61 }, "id": 58, "options": { @@ -2073,7 +2302,7 @@ "h": 8, "w": 12, "x": 0, - "y": 61 + "y": 69 }, "id": 113, "options": { @@ -2110,7 +2339,7 @@ "h": 1, "w": 24, "x": 0, - "y": 69 + "y": 77 }, "id": 203, "panels": [], @@ -2144,7 +2373,7 @@ "h": 8, "w": 8, "x": 0, - "y": 70 + "y": 78 }, "id": 202, "options": { @@ -2305,7 +2534,7 @@ "h": 8, "w": 8, "x": 8, - "y": 70 + "y": 78 }, "id": 204, "options": { @@ -2455,7 +2684,7 @@ "h": 8, "w": 8, "x": 16, - "y": 70 + "y": 78 }, "id": 205, "options": { @@ -2556,7 +2785,7 @@ "h": 8, "w": 12, "x": 0, - "y": 78 + "y": 86 }, "id": 206, "options": { @@ -2653,7 +2882,7 @@ "h": 8, "w": 12, "x": 12, - "y": 78 + "y": 86 }, "id": 207, "options": { @@ -2690,7 +2919,7 @@ "h": 1, "w": 24, "x": 0, - "y": 86 + "y": 94 }, "id": 46, "panels": [], @@ -2761,7 +2990,7 @@ "h": 8, "w": 24, "x": 0, - "y": 87 + "y": 95 }, "id": 56, "options": { @@ -2857,7 +3086,7 @@ "h": 11, "w": 24, "x": 0, - "y": 95 + "y": 103 }, "id": 240, "options": { @@ -2916,7 +3145,7 @@ "h": 1, "w": 24, "x": 0, - "y": 106 + "y": 114 }, "id": 24, "panels": [], @@ -3014,7 +3243,7 @@ "h": 8, "w": 12, "x": 0, - "y": 107 + "y": 115 }, "id": 26, "options": { @@ -3148,7 +3377,7 @@ "h": 8, "w": 12, "x": 12, - "y": 107 + "y": 115 }, "id": 33, "options": { @@ -3268,7 +3497,7 @@ "h": 8, "w": 12, "x": 0, - "y": 115 + "y": 123 }, "id": 36, "options": { @@ -3317,7 +3546,7 @@ "h": 1, "w": 24, "x": 0, - "y": 123 + "y": 131 }, "id": 32, "panels": [], @@ -3425,7 +3654,7 @@ "h": 8, "w": 12, "x": 0, - "y": 124 + "y": 132 }, "id": 30, "options": { @@ -3591,7 +3820,7 @@ "h": 8, "w": 12, "x": 12, - "y": 124 + "y": 132 }, "id": 28, "options": { @@ -3711,7 +3940,7 @@ "h": 8, "w": 12, "x": 0, - "y": 132 + "y": 140 }, "id": 35, "options": { @@ -3837,7 +4066,7 @@ "h": 8, "w": 12, "x": 12, - "y": 132 + "y": 140 }, "id": 73, "options": { @@ -3964,7 +4193,7 @@ "h": 8, "w": 12, "x": 0, - "y": 140 + "y": 148 }, "id": 102, "options": { @@ -4027,7 +4256,7 @@ "h": 1, "w": 24, "x": 0, - "y": 148 + "y": 156 }, "id": 79, "panels": [], @@ -4101,7 +4330,7 @@ "h": 8, "w": 12, "x": 0, - "y": 149 + "y": 157 }, "id": 74, "options": { @@ -4198,7 +4427,7 @@ "h": 8, "w": 12, "x": 12, - "y": 149 + "y": 157 }, "id": 80, "options": { @@ -4295,7 +4524,7 @@ "h": 8, "w": 12, "x": 0, - "y": 157 + "y": 165 }, "id": 81, "options": { @@ -4392,7 +4621,7 @@ "h": 8, "w": 12, "x": 12, - "y": 157 + "y": 165 }, "id": 114, "options": { @@ -4489,7 +4718,7 @@ "h": 8, "w": 12, "x": 12, - "y": 165 + "y": 173 }, "id": 190, "options": { @@ -4527,7 +4756,7 @@ "h": 1, "w": 24, "x": 0, - "y": 173 + "y": 181 }, "id": 87, "panels": [], @@ -4601,7 +4830,7 @@ "h": 8, "w": 12, "x": 0, - "y": 174 + "y": 182 }, "id": 83, "options": { @@ -4697,7 +4926,7 @@ "h": 8, "w": 12, "x": 12, - "y": 174 + "y": 182 }, "id": 84, "options": { @@ -4805,7 +5034,7 @@ "h": 8, "w": 12, "x": 0, - "y": 182 + "y": 190 }, "id": 85, "options": { @@ -4902,7 +5131,7 @@ "h": 8, "w": 12, "x": 12, - "y": 182 + "y": 190 }, "id": 210, "options": { @@ -5227,7 +5456,7 @@ "h": 8, "w": 12, "x": 0, - "y": 190 + "y": 198 }, "id": 211, "options": { @@ -5552,7 +5781,7 @@ "h": 8, "w": 12, "x": 12, - "y": 190 + "y": 198 }, "id": 212, "options": { @@ -5775,9 +6004,9 @@ "h": 8, "w": 24, "x": 0, - "y": 198 + "y": 206 }, - "id": 213, + "id": 213, "options": { "legend": { "calcs": [], @@ -5811,7 +6040,7 @@ "h": 1, "w": 24, "x": 0, - "y": 198 + "y": 214 }, "id": 214, "panels": [], @@ -5883,7 +6112,7 @@ "h": 8, "w": 12, "x": 0, - "y": 199 + "y": 215 }, "id": 215, "options": { @@ -5979,7 +6208,7 @@ "h": 8, "w": 12, "x": 12, - "y": 199 + "y": 215 }, "id": 216, "options": { @@ -6030,7 +6259,7 @@ "h": 1, "w": 24, "x": 0, - "y": 207 + "y": 223 }, "id": 68, "panels": [], @@ -6104,7 +6333,7 @@ "h": 8, "w": 12, "x": 0, - "y": 208 + "y": 224 }, "id": 60, "options": { @@ -6200,7 +6429,7 @@ "h": 8, "w": 12, "x": 12, - "y": 208 + "y": 224 }, "id": 62, "options": { @@ -6296,7 +6525,7 @@ "h": 8, "w": 12, "x": 0, - "y": 216 + "y": 232 }, "id": 64, "options": { @@ -6333,7 +6562,7 @@ "h": 1, "w": 24, "x": 0, - "y": 224 + "y": 240 }, "id": 97, "panels": [], @@ -6418,7 +6647,7 @@ "h": 8, "w": 12, "x": 0, - "y": 225 + "y": 241 }, "id": 98, "options": { @@ -6581,7 +6810,7 @@ "h": 8, "w": 12, "x": 12, - "y": 225 + "y": 241 }, "id": 101, "options": { @@ -6679,7 +6908,7 @@ "h": 8, "w": 12, "x": 0, - "y": 233 + "y": 249 }, "id": 99, "options": { @@ -6777,7 +7006,7 @@ "h": 8, "w": 12, "x": 12, - "y": 233 + "y": 249 }, "id": 100, "options": { @@ -6815,7 +7044,7 @@ "h": 1, "w": 24, "x": 0, - "y": 241 + "y": 257 }, "id": 105, "panels": [], @@ -6888,7 +7117,7 @@ "h": 8, "w": 12, "x": 0, - "y": 242 + "y": 258 }, "id": 106, "options": { @@ -6986,7 +7215,7 @@ "h": 8, "w": 12, "x": 12, - "y": 242 + "y": 258 }, "id": 107, "options": { @@ -7083,7 +7312,7 @@ "h": 8, "w": 12, "x": 0, - "y": 250 + "y": 266 }, "id": 217, "options": { @@ -7121,7 +7350,7 @@ "h": 1, "w": 24, "x": 0, - "y": 258 + "y": 274 }, "id": 108, "panels": [], @@ -7219,7 +7448,7 @@ "h": 8, "w": 12, "x": 0, - "y": 259 + "y": 275 }, "id": 109, "options": { @@ -7281,7 +7510,7 @@ "h": 8, "w": 12, "x": 12, - "y": 259 + "y": 275 }, "id": 111, "maxDataPoints": 25, @@ -7411,7 +7640,7 @@ "h": 8, "w": 12, "x": 0, - "y": 267 + "y": 283 }, "id": 120, "options": { @@ -7469,7 +7698,7 @@ "h": 8, "w": 12, "x": 12, - "y": 267 + "y": 283 }, "id": 112, "maxDataPoints": 25, @@ -7623,7 +7852,7 @@ "h": 8, "w": 12, "x": 0, - "y": 275 + "y": 291 }, "id": 198, "options": { @@ -7809,9 +8038,9 @@ "h": 8, "w": 12, "x": 12, - "y": 275 + "y": 291 }, - "id": 213, + "id": 246, "options": { "legend": { "calcs": [], @@ -7848,7 +8077,7 @@ "h": 1, "w": 24, "x": 0, - "y": 283 + "y": 299 }, "id": 236, "panels": [], @@ -7920,7 +8149,7 @@ "h": 8, "w": 12, "x": 0, - "y": 284 + "y": 300 }, "id": 237, "options": { @@ -8017,7 +8246,7 @@ "h": 8, "w": 12, "x": 12, - "y": 284 + "y": 300 }, "id": 238, "options": { @@ -8114,7 +8343,7 @@ "h": 8, "w": 12, "x": 0, - "y": 292 + "y": 308 }, "id": 239, "options": { @@ -8223,7 +8452,7 @@ "h": 8, "w": 12, "x": 12, - "y": 292 + "y": 308 }, "id": 219, "options": { @@ -8288,7 +8517,7 @@ "h": 8, "w": 12, "x": 0, - "y": 300 + "y": 316 }, "id": 220, "options": { @@ -8332,7 +8561,7 @@ "h": 1, "w": 24, "x": 0, - "y": 308 + "y": 324 }, "id": 241, "panels": [], @@ -8405,7 +8634,7 @@ "h": 8, "w": 12, "x": 0, - "y": 309 + "y": 325 }, "id": 243, "options": { @@ -8517,7 +8746,7 @@ "h": 8, "w": 12, "x": 12, - "y": 309 + "y": 325 }, "id": 244, "options": { @@ -8630,7 +8859,7 @@ "h": 8, "w": 12, "x": 0, - "y": 317 + "y": 333 }, "id": 245, "options": { @@ -8669,7 +8898,7 @@ "h": 1, "w": 24, "x": 0, - "y": 325 + "y": 341 }, "id": 226, "panels": [], @@ -8767,7 +8996,7 @@ "h": 8, "w": 12, "x": 0, - "y": 326 + "y": 342 }, "id": 225, "options": { @@ -8896,7 +9125,7 @@ "h": 8, "w": 12, "x": 12, - "y": 326 + "y": 342 }, "id": 227, "options": { @@ -9025,7 +9254,7 @@ "h": 8, "w": 12, "x": 0, - "y": 334 + "y": 350 }, "id": 235, "options": { @@ -9154,7 +9383,7 @@ "h": 8, "w": 12, "x": 12, - "y": 334 + "y": 350 }, "id": 234, "options": { From a6daafc6a4f087101a849a28a334df34275ffcb6 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Sat, 19 Oct 2024 10:27:29 +0200 Subject: [PATCH 138/425] refactor(txpool): small refactor for `InMemoryBlobStore` impl (#11886) --- crates/transaction-pool/src/blobstore/mem.rs | 28 +++++--------------- 1 file changed, 6 insertions(+), 22 deletions(-) diff --git a/crates/transaction-pool/src/blobstore/mem.rs b/crates/transaction-pool/src/blobstore/mem.rs index 15160c2c3..c98a01b88 100644 --- a/crates/transaction-pool/src/blobstore/mem.rs +++ b/crates/transaction-pool/src/blobstore/mem.rs @@ -76,42 +76,26 @@ impl BlobStore for InMemoryBlobStore { // Retrieves the decoded blob data for the given transaction hash. fn get(&self, tx: B256) -> Result, BlobStoreError> { - let store = self.inner.store.read(); - Ok(store.get(&tx).cloned()) + Ok(self.inner.store.read().get(&tx).cloned()) } fn contains(&self, tx: B256) -> Result { - let store = self.inner.store.read(); - Ok(store.contains_key(&tx)) + Ok(self.inner.store.read().contains_key(&tx)) } fn get_all( &self, txs: Vec, ) -> Result, BlobStoreError> { - let mut items = Vec::with_capacity(txs.len()); let store = self.inner.store.read(); - for tx in txs { - if let Some(item) = store.get(&tx) { - items.push((tx, item.clone())); - } - } - - Ok(items) + Ok(txs.into_iter().filter_map(|tx| store.get(&tx).map(|item| (tx, item.clone()))).collect()) } fn get_exact(&self, txs: Vec) -> Result, BlobStoreError> { - let mut items = Vec::with_capacity(txs.len()); let store = self.inner.store.read(); - for tx in txs { - if let Some(item) = store.get(&tx) { - items.push(item.clone()); - } else { - return Err(BlobStoreError::MissingSidecar(tx)) - } - } - - Ok(items) + txs.into_iter() + .map(|tx| store.get(&tx).cloned().ok_or_else(|| BlobStoreError::MissingSidecar(tx))) + .collect() } fn get_by_versioned_hashes( From 2f559c62bf94e0fed23e500336ceed1e941e0ed5 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Sat, 19 Oct 2024 10:28:10 +0200 Subject: [PATCH 139/425] primitives: use alloy `MAXIMUM_EXTRA_DATA_SIZE` constant (#11881) --- Cargo.lock | 1 + crates/consensus/common/Cargo.toml | 1 + crates/consensus/common/src/validation.rs | 6 ++---- crates/node/core/Cargo.toml | 5 ++--- crates/node/core/src/args/payload_builder.rs | 5 ++--- crates/primitives-traits/src/constants/mod.rs | 3 --- crates/rpc/rpc-types-compat/src/engine/payload.rs | 3 +-- 7 files changed, 9 insertions(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c8b4f7632..a97c01e01 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7886,6 +7886,7 @@ dependencies = [ name = "reth-node-core" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-primitives", "alloy-rpc-types-engine", "clap", diff --git a/crates/consensus/common/Cargo.toml b/crates/consensus/common/Cargo.toml index 66a92270d..eaae1301b 100644 --- a/crates/consensus/common/Cargo.toml +++ b/crates/consensus/common/Cargo.toml @@ -19,6 +19,7 @@ reth-consensus.workspace = true # ethereum alloy-primitives.workspace = true revm-primitives.workspace = true +alloy-consensus.workspace = true [dev-dependencies] reth-storage-api.workspace = true diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index 711e7772b..4c2e6b192 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -1,12 +1,10 @@ //! Collection of methods for block validation. +use alloy_consensus::constants::MAXIMUM_EXTRA_DATA_SIZE; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_consensus::ConsensusError; use reth_primitives::{ - constants::{ - eip4844::{DATA_GAS_PER_BLOB, MAX_DATA_GAS_PER_BLOCK}, - MAXIMUM_EXTRA_DATA_SIZE, - }, + constants::eip4844::{DATA_GAS_PER_BLOB, MAX_DATA_GAS_PER_BLOCK}, EthereumHardfork, GotExpected, Header, SealedBlock, SealedHeader, }; use revm_primitives::calc_excess_blob_gas; diff --git a/crates/node/core/Cargo.toml b/crates/node/core/Cargo.toml index 3ac90a888..d7d95751c 100644 --- a/crates/node/core/Cargo.toml +++ b/crates/node/core/Cargo.toml @@ -39,6 +39,7 @@ reth-stages-types.workspace = true # ethereum alloy-primitives.workspace = true alloy-rpc-types-engine = { workspace = true, features = ["jwt"] } +alloy-consensus.workspace = true # misc eyre.workspace = true @@ -76,9 +77,7 @@ tokio.workspace = true tempfile.workspace = true [features] -optimism = [ - "reth-primitives/optimism" -] +optimism = ["reth-primitives/optimism"] # Features for vergen to generate correct env vars jemalloc = [] asm-keccak = [] diff --git a/crates/node/core/src/args/payload_builder.rs b/crates/node/core/src/args/payload_builder.rs index aec35253a..4a18fd5b0 100644 --- a/crates/node/core/src/args/payload_builder.rs +++ b/crates/node/core/src/args/payload_builder.rs @@ -1,12 +1,11 @@ use crate::{cli::config::PayloadBuilderConfig, version::default_extradata}; +use alloy_consensus::constants::MAXIMUM_EXTRA_DATA_SIZE; use clap::{ builder::{RangedU64ValueParser, TypedValueParser}, Arg, Args, Command, }; use reth_cli_util::{parse_duration_from_secs, parse_duration_from_secs_or_ms}; -use reth_primitives::constants::{ - ETHEREUM_BLOCK_GAS_LIMIT, MAXIMUM_EXTRA_DATA_SIZE, SLOT_DURATION, -}; +use reth_primitives::constants::{ETHEREUM_BLOCK_GAS_LIMIT, SLOT_DURATION}; use std::{borrow::Cow, ffi::OsStr, time::Duration}; /// Parameters for configuring the Payload Builder diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index a4918137e..482852bdc 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -13,9 +13,6 @@ pub const RETH_CLIENT_VERSION: &str = concat!("reth/v", env!("CARGO_PKG_VERSION" /// The first four bytes of the call data for a function call specifies the function to be called. pub const SELECTOR_LEN: usize = 4; -/// Maximum extra data size in a block after genesis -pub const MAXIMUM_EXTRA_DATA_SIZE: usize = 32; - /// An EPOCH is a series of 32 slots. pub const EPOCH_SLOTS: u64 = 32; diff --git a/crates/rpc/rpc-types-compat/src/engine/payload.rs b/crates/rpc/rpc-types-compat/src/engine/payload.rs index cd9ce1cbf..3bbee2b00 100644 --- a/crates/rpc/rpc-types-compat/src/engine/payload.rs +++ b/crates/rpc/rpc-types-compat/src/engine/payload.rs @@ -1,7 +1,7 @@ //! Standalone Conversion Functions for Handling Different Versions of Execution Payloads in //! Ethereum's Engine -use alloy_consensus::EMPTY_OMMER_ROOT_HASH; +use alloy_consensus::{constants::MAXIMUM_EXTRA_DATA_SIZE, EMPTY_OMMER_ROOT_HASH}; use alloy_eips::eip2718::{Decodable2718, Encodable2718}; use alloy_primitives::{B256, U256}; use alloy_rpc_types_engine::{ @@ -10,7 +10,6 @@ use alloy_rpc_types_engine::{ ExecutionPayloadV4, PayloadError, }; use reth_primitives::{ - constants::MAXIMUM_EXTRA_DATA_SIZE, proofs::{self}, Block, BlockBody, Header, Request, SealedBlock, TransactionSigned, Withdrawals, }; From da5079d11fa7f1595323b5636b928fb61f1e61cf Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Sat, 19 Oct 2024 10:28:42 +0200 Subject: [PATCH 140/425] test(txpool): add unit test for `BlobStoreCanonTracker` (#11885) --- crates/transaction-pool/Cargo.toml | 6 +- .../transaction-pool/src/blobstore/tracker.rs | 88 +++++++++++++++++++ 2 files changed, 93 insertions(+), 1 deletion(-) diff --git a/crates/transaction-pool/Cargo.toml b/crates/transaction-pool/Cargo.toml index 887543b52..cdac6a1aa 100644 --- a/crates/transaction-pool/Cargo.toml +++ b/crates/transaction-pool/Cargo.toml @@ -74,7 +74,11 @@ serde_json.workspace = true default = ["serde"] serde = ["dep:serde"] test-utils = ["rand", "paste", "serde"] -arbitrary = ["proptest", "reth-primitives/arbitrary", "proptest-arbitrary-interop"] +arbitrary = [ + "proptest", + "reth-primitives/arbitrary", + "proptest-arbitrary-interop", +] [[bench]] name = "truncate" diff --git a/crates/transaction-pool/src/blobstore/tracker.rs b/crates/transaction-pool/src/blobstore/tracker.rs index e6041fa12..f22dcf570 100644 --- a/crates/transaction-pool/src/blobstore/tracker.rs +++ b/crates/transaction-pool/src/blobstore/tracker.rs @@ -81,6 +81,13 @@ pub enum BlobStoreUpdates { #[cfg(test)] mod tests { + use alloy_consensus::Header; + use reth_execution_types::Chain; + use reth_primitives::{ + BlockBody, SealedBlock, SealedBlockWithSenders, SealedHeader, Transaction, + TransactionSigned, + }; + use super::*; #[test] @@ -101,4 +108,85 @@ mod tests { BlobStoreUpdates::Finalized(block2.into_iter().chain(block3).collect::>()) ); } + + #[test] + fn test_add_new_chain_blocks() { + let mut tracker = BlobStoreCanonTracker::default(); + + // Create sample transactions + let tx1_hash = B256::random(); // EIP-4844 transaction + let tx2_hash = B256::random(); // EIP-4844 transaction + let tx3_hash = B256::random(); // Non-EIP-4844 transaction + + // Creating a first block with EIP-4844 transactions + let block1 = SealedBlockWithSenders { + block: SealedBlock { + header: SealedHeader::new( + Header { number: 10, ..Default::default() }, + B256::random(), + ), + body: BlockBody { + transactions: vec![ + TransactionSigned { + hash: tx1_hash, + transaction: Transaction::Eip4844(Default::default()), + ..Default::default() + }, + TransactionSigned { + hash: tx2_hash, + transaction: Transaction::Eip4844(Default::default()), + ..Default::default() + }, + // Another transaction that is not EIP-4844 + TransactionSigned { + hash: B256::random(), + transaction: Transaction::Eip7702(Default::default()), + ..Default::default() + }, + ], + ..Default::default() + }, + }, + ..Default::default() + }; + + // Creating a second block with EIP-1559 and EIP-2930 transactions + // Note: This block does not contain any EIP-4844 transactions + let block2 = SealedBlockWithSenders { + block: SealedBlock { + header: SealedHeader::new( + Header { number: 11, ..Default::default() }, + B256::random(), + ), + body: BlockBody { + transactions: vec![ + TransactionSigned { + hash: tx3_hash, + transaction: Transaction::Eip1559(Default::default()), + ..Default::default() + }, + TransactionSigned { + hash: tx2_hash, + transaction: Transaction::Eip2930(Default::default()), + ..Default::default() + }, + ], + ..Default::default() + }, + }, + ..Default::default() + }; + + // Extract blocks from the chain + let chain = Chain::new(vec![block1, block2], Default::default(), None); + let blocks = chain.into_inner().0; + + // Add new chain blocks to the tracker + tracker.add_new_chain_blocks(&blocks); + + // Tx1 and tx2 should be in the block containing EIP-4844 transactions + assert_eq!(tracker.blob_txs_in_blocks.get(&10).unwrap(), &vec![tx1_hash, tx2_hash]); + // No transactions should be in the block containing non-EIP-4844 transactions + assert!(tracker.blob_txs_in_blocks.get(&11).unwrap().is_empty()); + } } From 2ae93682b4978bf80bf27c777334ead30d3e04f5 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Sat, 19 Oct 2024 14:08:34 +0400 Subject: [PATCH 141/425] refactor: move `EngineValidator` setup to `RpcAddOns` (#11850) --- Cargo.lock | 1 - crates/ethereum/node/src/node.rs | 19 ++-- crates/exex/test-utils/Cargo.toml | 1 - crates/exex/test-utils/src/lib.rs | 11 +- crates/node/api/src/node.rs | 7 -- crates/node/builder/src/builder/states.rs | 5 - crates/node/builder/src/components/builder.rs | 104 ++++-------------- crates/node/builder/src/components/engine.rs | 38 ------- crates/node/builder/src/components/mod.rs | 29 +---- crates/node/builder/src/rpc.rs | 70 ++++++++++-- crates/optimism/node/src/node.rs | 37 ++++--- examples/custom-engine-types/src/main.rs | 37 +++++-- 12 files changed, 140 insertions(+), 219 deletions(-) delete mode 100644 crates/node/builder/src/components/engine.rs diff --git a/Cargo.lock b/Cargo.lock index a97c01e01..98bfe1f8e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7495,7 +7495,6 @@ dependencies = [ "reth-consensus", "reth-db", "reth-db-common", - "reth-ethereum-engine-primitives", "reth-evm", "reth-execution-types", "reth-exex", diff --git a/crates/ethereum/node/src/node.rs b/crates/ethereum/node/src/node.rs index a890810b0..d3301b208 100644 --- a/crates/ethereum/node/src/node.rs +++ b/crates/ethereum/node/src/node.rs @@ -12,15 +12,16 @@ use reth_ethereum_engine_primitives::{ use reth_evm_ethereum::execute::EthExecutorProvider; use reth_network::NetworkHandle; use reth_node_api::{ - ConfigureEvm, EngineValidator, FullNodeComponents, NodePrimitives, NodeTypesWithDB, + AddOnsContext, ConfigureEvm, EngineValidator, FullNodeComponents, NodePrimitives, + NodeTypesWithDB, }; use reth_node_builder::{ components::{ - ComponentsBuilder, ConsensusBuilder, EngineValidatorBuilder, ExecutorBuilder, - NetworkBuilder, PayloadServiceBuilder, PoolBuilder, + ComponentsBuilder, ConsensusBuilder, ExecutorBuilder, NetworkBuilder, + PayloadServiceBuilder, PoolBuilder, }, node::{FullNodeTypes, NodeTypes, NodeTypesWithEngine}, - rpc::RpcAddOns, + rpc::{EngineValidatorBuilder, RpcAddOns}, BuilderContext, Node, NodeAdapter, NodeComponentsBuilder, PayloadBuilderConfig, PayloadTypes, }; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; @@ -57,7 +58,6 @@ impl EthereumNode { EthereumNetworkBuilder, EthereumExecutorBuilder, EthereumConsensusBuilder, - EthereumEngineValidatorBuilder, > where Node: FullNodeTypes>, @@ -74,7 +74,6 @@ impl EthereumNode { .network(EthereumNetworkBuilder::default()) .executor(EthereumExecutorBuilder::default()) .consensus(EthereumConsensusBuilder::default()) - .engine_validator(EthereumEngineValidatorBuilder::default()) } } @@ -96,6 +95,7 @@ pub type EthereumAddOns = RpcAddOns< NetworkHandle, ::Evm, >, + EthereumEngineValidatorBuilder, >; impl Node for EthereumNode @@ -110,7 +110,6 @@ where EthereumNetworkBuilder, EthereumExecutorBuilder, EthereumConsensusBuilder, - EthereumEngineValidatorBuilder, >; type AddOns = EthereumAddOns< @@ -347,12 +346,12 @@ pub struct EthereumEngineValidatorBuilder; impl EngineValidatorBuilder for EthereumEngineValidatorBuilder where Types: NodeTypesWithEngine, - Node: FullNodeTypes, + Node: FullNodeComponents, EthereumEngineValidator: EngineValidator, { type Validator = EthereumEngineValidator; - async fn build_validator(self, ctx: &BuilderContext) -> eyre::Result { - Ok(EthereumEngineValidator::new(ctx.chain_spec())) + async fn build(self, ctx: &AddOnsContext<'_, Node>) -> eyre::Result { + Ok(EthereumEngineValidator::new(ctx.config.chain.clone())) } } diff --git a/crates/exex/test-utils/Cargo.toml b/crates/exex/test-utils/Cargo.toml index 8488cdb8b..b850295a3 100644 --- a/crates/exex/test-utils/Cargo.toml +++ b/crates/exex/test-utils/Cargo.toml @@ -31,7 +31,6 @@ reth-primitives.workspace = true reth-provider = { workspace = true, features = ["test-utils"] } reth-tasks.workspace = true reth-transaction-pool = { workspace = true, features = ["test-utils"] } -reth-ethereum-engine-primitives.workspace = true ## async futures-util.workspace = true diff --git a/crates/exex/test-utils/src/lib.rs b/crates/exex/test-utils/src/lib.rs index 9e17013c4..9b86da7c7 100644 --- a/crates/exex/test-utils/src/lib.rs +++ b/crates/exex/test-utils/src/lib.rs @@ -24,7 +24,6 @@ use reth_db::{ DatabaseEnv, }; use reth_db_common::init::init_genesis; -use reth_ethereum_engine_primitives::EthereumEngineValidator; use reth_evm::test_utils::MockExecutorProvider; use reth_execution_types::Chain; use reth_exex::{ExExContext, ExExEvent, ExExNotification, ExExNotifications, Wal}; @@ -41,10 +40,7 @@ use reth_node_builder::{ }; use reth_node_core::node_config::NodeConfig; use reth_node_ethereum::{ - node::{ - EthereumAddOns, EthereumEngineValidatorBuilder, EthereumNetworkBuilder, - EthereumPayloadBuilder, - }, + node::{EthereumAddOns, EthereumNetworkBuilder, EthereumPayloadBuilder}, EthEngineTypes, EthEvmConfig, }; use reth_payload_builder::noop::NoopPayloadBuilderService; @@ -140,7 +136,6 @@ where EthereumNetworkBuilder, TestExecutorBuilder, TestConsensusBuilder, - EthereumEngineValidatorBuilder, >; type AddOns = EthereumAddOns< NodeAdapter>::Components>, @@ -154,7 +149,6 @@ where .network(EthereumNetworkBuilder::default()) .executor(TestExecutorBuilder::default()) .consensus(TestConsensusBuilder::default()) - .engine_validator(EthereumEngineValidatorBuilder::default()) } fn add_ons(&self) -> Self::AddOns { @@ -284,8 +278,6 @@ pub async fn test_exex_context_with_chain_spec( let tasks = TaskManager::current(); let task_executor = tasks.executor(); - let engine_validator = EthereumEngineValidator::new(chain_spec.clone()); - let components = NodeAdapter::, _>, _> { components: Components { transaction_pool, @@ -294,7 +286,6 @@ pub async fn test_exex_context_with_chain_spec( consensus, network, payload_builder, - engine_validator, }, task_executor, provider, diff --git a/crates/node/api/src/node.rs b/crates/node/api/src/node.rs index 40c2a3a60..3173fd2b3 100644 --- a/crates/node/api/src/node.rs +++ b/crates/node/api/src/node.rs @@ -5,7 +5,6 @@ use std::{future::Future, marker::PhantomData}; use alloy_rpc_types_engine::JwtSecret; use reth_beacon_consensus::BeaconConsensusEngineHandle; use reth_consensus::Consensus; -use reth_engine_primitives::EngineValidator; use reth_evm::execute::BlockExecutorProvider; use reth_network_api::FullNetwork; use reth_node_core::node_config::NodeConfig; @@ -64,9 +63,6 @@ pub trait FullNodeComponents: FullNodeTypes + Clone + 'static { /// Network API. type Network: FullNetwork; - /// Validator for the engine API. - type EngineValidator: EngineValidator<::Engine>; - /// Returns the transaction pool of the node. fn pool(&self) -> &Self::Pool; @@ -87,9 +83,6 @@ pub trait FullNodeComponents: FullNodeTypes + Clone + 'static { &self, ) -> &PayloadBuilderHandle<::Engine>; - /// Returns the engine validator. - fn engine_validator(&self) -> &Self::EngineValidator; - /// Returns the provider of the node. fn provider(&self) -> &Self::Provider; diff --git a/crates/node/builder/src/builder/states.rs b/crates/node/builder/src/builder/states.rs index c4da466f2..e75a07802 100644 --- a/crates/node/builder/src/builder/states.rs +++ b/crates/node/builder/src/builder/states.rs @@ -97,7 +97,6 @@ impl> FullNodeComponents for NodeAdapter< type Executor = C::Executor; type Network = C::Network; type Consensus = C::Consensus; - type EngineValidator = C::EngineValidator; fn pool(&self) -> &Self::Pool { self.components.pool() @@ -130,10 +129,6 @@ impl> FullNodeComponents for NodeAdapter< fn consensus(&self) -> &Self::Consensus { self.components.consensus() } - - fn engine_validator(&self) -> &Self::EngineValidator { - self.components.engine_validator() - } } impl> Clone for NodeAdapter { diff --git a/crates/node/builder/src/components/builder.rs b/crates/node/builder/src/components/builder.rs index ab8e29929..48a0ba9b5 100644 --- a/crates/node/builder/src/components/builder.rs +++ b/crates/node/builder/src/components/builder.rs @@ -4,7 +4,6 @@ use std::{future::Future, marker::PhantomData}; use reth_consensus::Consensus; use reth_evm::execute::BlockExecutorProvider; -use reth_node_api::{EngineValidator, NodeTypesWithEngine}; use reth_primitives::Header; use reth_transaction_pool::TransactionPool; @@ -16,8 +15,6 @@ use crate::{ BuilderContext, ConfigureEvm, FullNodeTypes, }; -use super::EngineValidatorBuilder; - /// A generic, general purpose and customizable [`NodeComponentsBuilder`] implementation. /// /// This type is stateful and captures the configuration of the node's components. @@ -38,23 +35,22 @@ use super::EngineValidatorBuilder; /// All component builders are captured in the builder state and will be consumed once the node is /// launched. #[derive(Debug)] -pub struct ComponentsBuilder { +pub struct ComponentsBuilder { pool_builder: PoolB, payload_builder: PayloadB, network_builder: NetworkB, executor_builder: ExecB, consensus_builder: ConsB, - engine_validator_builder: EVB, _marker: PhantomData, } -impl - ComponentsBuilder +impl + ComponentsBuilder { /// Configures the node types. pub fn node_types( self, - ) -> ComponentsBuilder + ) -> ComponentsBuilder where Types: FullNodeTypes, { @@ -64,7 +60,6 @@ impl network_builder, executor_builder: evm_builder, consensus_builder, - engine_validator_builder, _marker, } = self; ComponentsBuilder { @@ -73,7 +68,6 @@ impl payload_builder, network_builder, consensus_builder, - engine_validator_builder, _marker: Default::default(), } } @@ -86,7 +80,6 @@ impl network_builder: self.network_builder, executor_builder: self.executor_builder, consensus_builder: self.consensus_builder, - engine_validator_builder: self.engine_validator_builder, _marker: self._marker, } } @@ -99,7 +92,6 @@ impl network_builder: self.network_builder, executor_builder: self.executor_builder, consensus_builder: self.consensus_builder, - engine_validator_builder: self.engine_validator_builder, _marker: self._marker, } } @@ -112,7 +104,6 @@ impl network_builder: f(self.network_builder), executor_builder: self.executor_builder, consensus_builder: self.consensus_builder, - engine_validator_builder: self.engine_validator_builder, _marker: self._marker, } } @@ -125,7 +116,6 @@ impl network_builder: self.network_builder, executor_builder: f(self.executor_builder), consensus_builder: self.consensus_builder, - engine_validator_builder: self.engine_validator_builder, _marker: self._marker, } } @@ -138,14 +128,13 @@ impl network_builder: self.network_builder, executor_builder: self.executor_builder, consensus_builder: f(self.consensus_builder), - engine_validator_builder: self.engine_validator_builder, _marker: self._marker, } } } -impl - ComponentsBuilder +impl + ComponentsBuilder where Node: FullNodeTypes, { @@ -156,7 +145,7 @@ where pub fn pool( self, pool_builder: PB, - ) -> ComponentsBuilder + ) -> ComponentsBuilder where PB: PoolBuilder, { @@ -166,7 +155,6 @@ where network_builder, executor_builder: evm_builder, consensus_builder, - engine_validator_builder, _marker, } = self; ComponentsBuilder { @@ -175,14 +163,13 @@ where network_builder, executor_builder: evm_builder, consensus_builder, - engine_validator_builder, _marker, } } } -impl - ComponentsBuilder +impl + ComponentsBuilder where Node: FullNodeTypes, PoolB: PoolBuilder, @@ -194,7 +181,7 @@ where pub fn network( self, network_builder: NB, - ) -> ComponentsBuilder + ) -> ComponentsBuilder where NB: NetworkBuilder, { @@ -204,7 +191,6 @@ where network_builder: _, executor_builder: evm_builder, consensus_builder, - engine_validator_builder, _marker, } = self; ComponentsBuilder { @@ -213,7 +199,6 @@ where network_builder, executor_builder: evm_builder, consensus_builder, - engine_validator_builder, _marker, } } @@ -225,7 +210,7 @@ where pub fn payload( self, payload_builder: PB, - ) -> ComponentsBuilder + ) -> ComponentsBuilder where PB: PayloadServiceBuilder, { @@ -235,7 +220,6 @@ where network_builder, executor_builder: evm_builder, consensus_builder, - engine_validator_builder, _marker, } = self; ComponentsBuilder { @@ -244,7 +228,6 @@ where network_builder, executor_builder: evm_builder, consensus_builder, - engine_validator_builder, _marker, } } @@ -256,7 +239,7 @@ where pub fn executor( self, executor_builder: EB, - ) -> ComponentsBuilder + ) -> ComponentsBuilder where EB: ExecutorBuilder, { @@ -266,7 +249,6 @@ where network_builder, executor_builder: _, consensus_builder, - engine_validator_builder, _marker, } = self; ComponentsBuilder { @@ -275,7 +257,6 @@ where network_builder, executor_builder, consensus_builder, - engine_validator_builder, _marker, } } @@ -287,7 +268,7 @@ where pub fn consensus( self, consensus_builder: CB, - ) -> ComponentsBuilder + ) -> ComponentsBuilder where CB: ConsensusBuilder, { @@ -297,38 +278,7 @@ where network_builder, executor_builder, consensus_builder: _, - engine_validator_builder, - _marker, - } = self; - ComponentsBuilder { - pool_builder, - payload_builder, - network_builder, - executor_builder, - consensus_builder, - engine_validator_builder, - _marker, - } - } - /// Configures the consensus builder. - /// - /// This accepts a [`ConsensusBuilder`] instance that will be used to create the node's - /// components for consensus. - pub fn engine_validator( - self, - engine_validator_builder: EngineVB, - ) -> ComponentsBuilder - where - EngineVB: EngineValidatorBuilder, - { - let Self { - pool_builder, - payload_builder, - network_builder, - executor_builder, - consensus_builder, - engine_validator_builder: _, _marker, } = self; ComponentsBuilder { @@ -337,14 +287,13 @@ where network_builder, executor_builder, consensus_builder, - engine_validator_builder, _marker, } } } -impl NodeComponentsBuilder - for ComponentsBuilder +impl NodeComponentsBuilder + for ComponentsBuilder where Node: FullNodeTypes, PoolB: PoolBuilder, @@ -352,16 +301,8 @@ where PayloadB: PayloadServiceBuilder, ExecB: ExecutorBuilder, ConsB: ConsensusBuilder, - EVB: EngineValidatorBuilder, { - type Components = Components< - Node, - PoolB::Pool, - ExecB::EVM, - ExecB::Executor, - ConsB::Consensus, - EVB::Validator, - >; + type Components = Components; async fn build_components( self, @@ -373,7 +314,6 @@ where network_builder, executor_builder: evm_builder, consensus_builder, - engine_validator_builder, _marker, } = self; @@ -382,7 +322,6 @@ where let network = network_builder.build_network(context, pool.clone()).await?; let payload_builder = payload_builder.spawn_payload_service(context, pool.clone()).await?; let consensus = consensus_builder.build_consensus(context).await?; - let engine_validator = engine_validator_builder.build_validator(context).await?; Ok(Components { transaction_pool: pool, @@ -391,12 +330,11 @@ where payload_builder, executor, consensus, - engine_validator, }) } } -impl Default for ComponentsBuilder<(), (), (), (), (), (), ()> { +impl Default for ComponentsBuilder<(), (), (), (), (), ()> { fn default() -> Self { Self { pool_builder: (), @@ -404,7 +342,6 @@ impl Default for ComponentsBuilder<(), (), (), (), (), (), ()> { network_builder: (), executor_builder: (), consensus_builder: (), - engine_validator_builder: (), _marker: Default::default(), } } @@ -430,18 +367,17 @@ pub trait NodeComponentsBuilder: Send { ) -> impl Future> + Send; } -impl NodeComponentsBuilder for F +impl NodeComponentsBuilder for F where Node: FullNodeTypes, F: FnOnce(&BuilderContext) -> Fut + Send, - Fut: Future>> + Send, + Fut: Future>> + Send, Pool: TransactionPool + Unpin + 'static, EVM: ConfigureEvm

, Executor: BlockExecutorProvider, Cons: Consensus + Clone + Unpin + 'static, - Val: EngineValidator<::Engine> + Clone + Unpin + 'static, { - type Components = Components; + type Components = Components; fn build_components( self, diff --git a/crates/node/builder/src/components/engine.rs b/crates/node/builder/src/components/engine.rs deleted file mode 100644 index b3ee7cbbb..000000000 --- a/crates/node/builder/src/components/engine.rs +++ /dev/null @@ -1,38 +0,0 @@ -//! Consensus component for the node builder. -use reth_node_api::{EngineValidator, NodeTypesWithEngine}; - -use crate::{BuilderContext, FullNodeTypes}; -use std::future::Future; - -/// A type that knows how to build the engine validator. -pub trait EngineValidatorBuilder: Send { - /// The consensus implementation to build. - type Validator: EngineValidator<::Engine> - + Clone - + Unpin - + 'static; - - /// Creates the engine validator. - fn build_validator( - self, - ctx: &BuilderContext, - ) -> impl Future> + Send; -} - -impl EngineValidatorBuilder for F -where - Node: FullNodeTypes, - Validator: - EngineValidator<::Engine> + Clone + Unpin + 'static, - F: FnOnce(&BuilderContext) -> Fut + Send, - Fut: Future> + Send, -{ - type Validator = Validator; - - fn build_validator( - self, - ctx: &BuilderContext, - ) -> impl Future> { - self(ctx) - } -} diff --git a/crates/node/builder/src/components/mod.rs b/crates/node/builder/src/components/mod.rs index ff1646593..42001fc10 100644 --- a/crates/node/builder/src/components/mod.rs +++ b/crates/node/builder/src/components/mod.rs @@ -9,7 +9,6 @@ mod builder; mod consensus; -mod engine; mod execute; mod network; mod payload; @@ -17,7 +16,6 @@ mod pool; pub use builder::*; pub use consensus::*; -pub use engine::*; pub use execute::*; pub use network::*; pub use payload::*; @@ -27,7 +25,7 @@ use reth_consensus::Consensus; use reth_evm::execute::BlockExecutorProvider; use reth_network::NetworkHandle; use reth_network_api::FullNetwork; -use reth_node_api::{EngineValidator, NodeTypesWithEngine}; +use reth_node_api::NodeTypesWithEngine; use reth_payload_builder::PayloadBuilderHandle; use reth_primitives::Header; use reth_transaction_pool::TransactionPool; @@ -55,9 +53,6 @@ pub trait NodeComponents: Clone + Unpin + Send + Sync + 'stati /// Network API. type Network: FullNetwork; - /// Validator for the engine API. - type EngineValidator: EngineValidator<::Engine>; - /// Returns the transaction pool of the node. fn pool(&self) -> &Self::Pool; @@ -75,16 +70,13 @@ pub trait NodeComponents: Clone + Unpin + Send + Sync + 'stati /// Returns the handle to the payload builder service. fn payload_builder(&self) -> &PayloadBuilderHandle<::Engine>; - - /// Returns the engine validator. - fn engine_validator(&self) -> &Self::EngineValidator; } /// All the components of the node. /// /// This provides access to all the components of the node. #[derive(Debug)] -pub struct Components { +pub struct Components { /// The transaction pool of the node. pub transaction_pool: Pool, /// The node's EVM configuration, defining settings for the Ethereum Virtual Machine. @@ -97,26 +89,22 @@ pub struct Components::Engine>, - /// The validator for the engine API. - pub engine_validator: Validator, } -impl NodeComponents - for Components +impl NodeComponents + for Components where Node: FullNodeTypes, Pool: TransactionPool + Unpin + 'static, EVM: ConfigureEvm
, Executor: BlockExecutorProvider, Cons: Consensus + Clone + Unpin + 'static, - Val: EngineValidator<::Engine> + Clone + Unpin + 'static, { type Pool = Pool; type Evm = EVM; type Executor = Executor; type Consensus = Cons; type Network = NetworkHandle; - type EngineValidator = Val; fn pool(&self) -> &Self::Pool { &self.transaction_pool @@ -143,21 +131,15 @@ where ) -> &PayloadBuilderHandle<::Engine> { &self.payload_builder } - - fn engine_validator(&self) -> &Self::EngineValidator { - &self.engine_validator - } } -impl Clone - for Components +impl Clone for Components where Node: FullNodeTypes, Pool: TransactionPool, EVM: ConfigureEvm
, Executor: BlockExecutorProvider, Cons: Consensus + Clone, - Val: EngineValidator<::Engine>, { fn clone(&self) -> Self { Self { @@ -167,7 +149,6 @@ where consensus: self.consensus.clone(), network: self.network.clone(), payload_builder: self.payload_builder.clone(), - engine_validator: self.engine_validator.clone(), } } } diff --git a/crates/node/builder/src/rpc.rs b/crates/node/builder/src/rpc.rs index d8cce9217..18293118d 100644 --- a/crates/node/builder/src/rpc.rs +++ b/crates/node/builder/src/rpc.rs @@ -2,6 +2,7 @@ use std::{ fmt::{self, Debug}, + future::Future, marker::PhantomData, ops::{Deref, DerefMut}, }; @@ -9,7 +10,7 @@ use std::{ use alloy_rpc_types::engine::ClientVersionV1; use futures::TryFutureExt; use reth_node_api::{ - AddOnsContext, FullNodeComponents, NodeAddOns, NodeTypes, NodeTypesWithEngine, + AddOnsContext, EngineValidator, FullNodeComponents, NodeAddOns, NodeTypes, NodeTypesWithEngine, }; use reth_node_core::{ node_config::NodeConfig, @@ -327,31 +328,38 @@ where /// Node add-ons containing RPC server configuration, with customizable eth API handler. #[allow(clippy::type_complexity)] -pub struct RpcAddOns { +pub struct RpcAddOns { /// Additional RPC add-ons. pub hooks: RpcHooks, /// Builder for `EthApi` eth_api_builder: Box) -> EthApi + Send + Sync>, + /// Engine validator + engine_validator_builder: EV, _pd: PhantomData<(Node, EthApi)>, } -impl Debug for RpcAddOns { +impl Debug + for RpcAddOns +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RpcAddOns") .field("hooks", &self.hooks) .field("eth_api_builder", &"...") + .field("engine_validator_builder", &self.engine_validator_builder) .finish() } } -impl RpcAddOns { +impl RpcAddOns { /// Creates a new instance of the RPC add-ons. pub fn new( eth_api_builder: impl FnOnce(&EthApiBuilderCtx) -> EthApi + Send + Sync + 'static, + engine_validator_builder: EV, ) -> Self { Self { hooks: RpcHooks::default(), eth_api_builder: Box::new(eth_api_builder), + engine_validator_builder, _pd: PhantomData, } } @@ -377,23 +385,28 @@ impl RpcAddOns { } } -impl> Default - for RpcAddOns +impl Default for RpcAddOns +where + Node: FullNodeComponents, + EthApi: EthApiTypes + EthApiBuilder, + EV: Default, { fn default() -> Self { - Self::new(EthApi::build) + Self::new(EthApi::build, EV::default()) } } -impl NodeAddOns for RpcAddOns +impl NodeAddOns for RpcAddOns where N: FullNodeComponents, EthApi: EthApiTypes + FullEthApiServer + AddDevSigners + Unpin + 'static, + EV: EngineValidatorBuilder, { type Handle = RpcHandle; async fn launch_add_ons(self, ctx: AddOnsContext<'_, N>) -> eyre::Result { let AddOnsContext { node, config, beacon_engine_handle, jwt_secret } = ctx; + let Self { eth_api_builder, engine_validator_builder, hooks, _pd: _ } = self; let client = ClientVersionV1 { code: CLIENT_CODE, @@ -411,7 +424,7 @@ where Box::new(node.task_executor().clone()), client, EngineCapabilities::default(), - node.engine_validator().clone(), + engine_validator_builder.build(&ctx).await?, ); info!(target: "reth::cli", "Engine API handler initialized"); @@ -427,7 +440,7 @@ where .with_executor(node.task_executor().clone()) .with_evm_config(node.evm_config().clone()) .with_block_executor(node.block_executor().clone()) - .build_with_auth_server(module_config, engine_api, self.eth_api_builder); + .build_with_auth_server(module_config, engine_api, eth_api_builder); // in dev mode we generate 20 random dev-signer accounts if config.dev.dev { @@ -443,7 +456,7 @@ where auth_module: &mut auth_module, }; - let RpcHooks { on_rpc_started, extend_rpc_modules } = self.hooks; + let RpcHooks { on_rpc_started, extend_rpc_modules } = hooks; extend_rpc_modules.extend_rpc_modules(ctx)?; @@ -503,7 +516,7 @@ pub trait RethRpcAddOns: fn hooks_mut(&mut self) -> &mut RpcHooks; } -impl RethRpcAddOns for RpcAddOns +impl RethRpcAddOns for RpcAddOns where Self: NodeAddOns>, { @@ -525,3 +538,36 @@ impl EthApiBuilder for EthApi: Send { + /// The consensus implementation to build. + type Validator: EngineValidator<::Engine> + + Clone + + Unpin + + 'static; + + /// Creates the engine validator. + fn build( + self, + ctx: &AddOnsContext<'_, Node>, + ) -> impl Future> + Send; +} + +impl EngineValidatorBuilder for F +where + Node: FullNodeComponents, + Validator: + EngineValidator<::Engine> + Clone + Unpin + 'static, + F: FnOnce(&AddOnsContext<'_, Node>) -> Fut + Send, + Fut: Future> + Send, +{ + type Validator = Validator; + + fn build( + self, + ctx: &AddOnsContext<'_, Node>, + ) -> impl Future> { + self(ctx) + } +} diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index c2576d318..175b2d4bf 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -6,14 +6,16 @@ use reth_basic_payload_builder::{BasicPayloadJobGenerator, BasicPayloadJobGenera use reth_chainspec::{EthChainSpec, Hardforks}; use reth_evm::ConfigureEvm; use reth_network::{NetworkConfig, NetworkHandle, NetworkManager}; -use reth_node_api::{EngineValidator, FullNodeComponents, NodeAddOns, NodePrimitives}; +use reth_node_api::{ + AddOnsContext, EngineValidator, FullNodeComponents, NodeAddOns, NodePrimitives, +}; use reth_node_builder::{ components::{ - ComponentsBuilder, ConsensusBuilder, EngineValidatorBuilder, ExecutorBuilder, - NetworkBuilder, PayloadServiceBuilder, PoolBuilder, PoolBuilderConfigOverrides, + ComponentsBuilder, ConsensusBuilder, ExecutorBuilder, NetworkBuilder, + PayloadServiceBuilder, PoolBuilder, PoolBuilderConfigOverrides, }, node::{FullNodeTypes, NodeTypes, NodeTypesWithEngine}, - rpc::{RethRpcAddOns, RpcAddOns, RpcHandle}, + rpc::{EngineValidatorBuilder, RethRpcAddOns, RpcAddOns, RpcHandle}, BuilderContext, Node, NodeAdapter, NodeComponentsBuilder, PayloadBuilderConfig, }; use reth_optimism_chainspec::OpChainSpec; @@ -68,7 +70,6 @@ impl OptimismNode { OptimismNetworkBuilder, OptimismExecutorBuilder, OptimismConsensusBuilder, - OptimismEngineValidatorBuilder, > where Node: FullNodeTypes< @@ -86,7 +87,6 @@ impl OptimismNode { }) .executor(OptimismExecutorBuilder::default()) .consensus(OptimismConsensusBuilder::default()) - .engine_validator(OptimismEngineValidatorBuilder::default()) } } @@ -103,7 +103,6 @@ where OptimismNetworkBuilder, OptimismExecutorBuilder, OptimismConsensusBuilder, - OptimismEngineValidatorBuilder, >; type AddOns = OptimismAddOns< @@ -131,7 +130,9 @@ impl NodeTypesWithEngine for OptimismNode { /// Add-ons w.r.t. optimism. #[derive(Debug)] -pub struct OptimismAddOns(pub RpcAddOns>); +pub struct OptimismAddOns( + pub RpcAddOns, OptimismEngineValidatorBuilder>, +); impl Default for OptimismAddOns { fn default() -> Self { @@ -142,12 +143,14 @@ impl Default for OptimismAddOns { impl OptimismAddOns { /// Create a new instance with the given `sequencer_http` URL. pub fn new(sequencer_http: Option) -> Self { - Self(RpcAddOns::new(move |ctx| OpEthApi::new(ctx, sequencer_http))) + Self(RpcAddOns::new(move |ctx| OpEthApi::new(ctx, sequencer_http), Default::default())) } } -impl>> NodeAddOns - for OptimismAddOns +impl NodeAddOns for OptimismAddOns +where + N: FullNodeComponents>, + OptimismEngineValidator: EngineValidator<::Engine>, { type Handle = RpcHandle>; @@ -159,8 +162,10 @@ impl>> NodeAddOn } } -impl>> RethRpcAddOns - for OptimismAddOns +impl RethRpcAddOns for OptimismAddOns +where + N: FullNodeComponents>, + OptimismEngineValidator: EngineValidator<::Engine>, { type EthApi = OpEthApi; @@ -458,12 +463,12 @@ pub struct OptimismEngineValidatorBuilder; impl EngineValidatorBuilder for OptimismEngineValidatorBuilder where Types: NodeTypesWithEngine, - Node: FullNodeTypes, + Node: FullNodeComponents, OptimismEngineValidator: EngineValidator, { type Validator = OptimismEngineValidator; - async fn build_validator(self, ctx: &BuilderContext) -> eyre::Result { - Ok(OptimismEngineValidator::new(ctx.chain_spec())) + async fn build(self, ctx: &AddOnsContext<'_, Node>) -> eyre::Result { + Ok(OptimismEngineValidator::new(ctx.config.chain.clone())) } } diff --git a/examples/custom-engine-types/src/main.rs b/examples/custom-engine-types/src/main.rs index f833da862..135c4f3f2 100644 --- a/examples/custom-engine-types/src/main.rs +++ b/examples/custom-engine-types/src/main.rs @@ -34,12 +34,15 @@ use alloy_rpc_types::{ use reth::{ api::PayloadTypes, builder::{ - components::{ComponentsBuilder, EngineValidatorBuilder, PayloadServiceBuilder}, + components::{ComponentsBuilder, PayloadServiceBuilder}, node::{NodeTypes, NodeTypesWithEngine}, + rpc::{EngineValidatorBuilder, RpcAddOns}, BuilderContext, FullNodeTypes, Node, NodeAdapter, NodeBuilder, NodeComponentsBuilder, PayloadBuilderConfig, }, + network::NetworkHandle, providers::{CanonStateSubscriptions, StateProviderFactory}, + rpc::eth::EthApi, tasks::TaskManager, transaction_pool::TransactionPool, }; @@ -50,13 +53,13 @@ use reth_basic_payload_builder::{ use reth_chainspec::{Chain, ChainSpec, ChainSpecProvider}; use reth_node_api::{ payload::{EngineApiMessageVersion, EngineObjectValidationError, PayloadOrAttributes}, - validate_version_specific_fields, EngineTypes, EngineValidator, PayloadAttributes, - PayloadBuilderAttributes, + validate_version_specific_fields, AddOnsContext, EngineTypes, EngineValidator, + FullNodeComponents, PayloadAttributes, PayloadBuilderAttributes, }; use reth_node_core::{args::RpcServerArgs, node_config::NodeConfig}; use reth_node_ethereum::{ node::{ - EthereumAddOns, EthereumConsensusBuilder, EthereumExecutorBuilder, EthereumNetworkBuilder, + EthereumConsensusBuilder, EthereumExecutorBuilder, EthereumNetworkBuilder, EthereumPoolBuilder, }, EthEvmConfig, @@ -202,12 +205,14 @@ pub struct CustomEngineValidatorBuilder; impl EngineValidatorBuilder for CustomEngineValidatorBuilder where - N: FullNodeTypes>, + N: FullNodeComponents< + Types: NodeTypesWithEngine, + >, { type Validator = CustomEngineValidator; - async fn build_validator(self, ctx: &BuilderContext) -> eyre::Result { - Ok(CustomEngineValidator { chain_spec: ctx.chain_spec() }) + async fn build(self, ctx: &AddOnsContext<'_, N>) -> eyre::Result { + Ok(CustomEngineValidator { chain_spec: ctx.config.chain.clone() }) } } @@ -226,6 +231,18 @@ impl NodeTypesWithEngine for MyCustomNode { type Engine = CustomEngineTypes; } +/// Custom addons configuring RPC types +pub type MyNodeAddOns = RpcAddOns< + N, + EthApi< + ::Provider, + ::Pool, + NetworkHandle, + ::Evm, + >, + CustomEngineValidatorBuilder, +>; + /// Implement the Node trait for the custom node /// /// This provides a preset configuration for the node @@ -240,9 +257,8 @@ where EthereumNetworkBuilder, EthereumExecutorBuilder, EthereumConsensusBuilder, - CustomEngineValidatorBuilder, >; - type AddOns = EthereumAddOns< + type AddOns = MyNodeAddOns< NodeAdapter>::Components>, >; @@ -254,11 +270,10 @@ where .network(EthereumNetworkBuilder::default()) .executor(EthereumExecutorBuilder::default()) .consensus(EthereumConsensusBuilder::default()) - .engine_validator(CustomEngineValidatorBuilder::default()) } fn add_ons(&self) -> Self::AddOns { - EthereumAddOns::default() + MyNodeAddOns::default() } } From 3bd695ee630f3979a2ea53f2308bdb1c9b2f18c1 Mon Sep 17 00:00:00 2001 From: Oliver Date: Sat, 19 Oct 2024 14:48:35 +0200 Subject: [PATCH 142/425] feat: update el requests for devnet 4 (#11865) Co-authored-by: Matthias Seitz --- Cargo.lock | 229 ++++++++++++------ Cargo.toml | 127 +++++----- bin/reth-bench/src/authenticated_transport.rs | 3 +- bin/reth-bench/src/valid_payload.rs | 8 - .../src/commands/debug_cmd/replay_engine.rs | 4 +- crates/blockchain-tree/src/blockchain_tree.rs | 1 - crates/chain-state/src/in_memory.rs | 3 +- crates/chain-state/src/test_utils.rs | 6 +- crates/chainspec/Cargo.toml | 1 - crates/chainspec/src/spec.rs | 10 +- crates/cli/commands/src/stage/drop.rs | 1 - crates/consensus/auto-seal/Cargo.toml | 1 + crates/consensus/auto-seal/src/lib.rs | 10 +- crates/consensus/beacon/src/engine/handle.rs | 11 +- crates/consensus/beacon/src/engine/message.rs | 5 + crates/consensus/beacon/src/engine/mod.rs | 23 +- .../consensus/beacon/src/engine/test_utils.rs | 2 +- crates/consensus/common/src/validation.rs | 27 +-- crates/consensus/consensus/Cargo.toml | 1 + crates/consensus/consensus/src/lib.rs | 27 ++- crates/e2e-test-utils/src/transaction.rs | 3 +- .../engine/invalid-block-hooks/src/witness.rs | 2 +- crates/engine/local/src/miner.rs | 2 + crates/engine/tree/src/tree/mod.rs | 26 +- crates/engine/util/src/engine_store.rs | 8 +- crates/engine/util/src/reorg.rs | 74 +++--- crates/engine/util/src/skip_new_payload.rs | 14 +- crates/ethereum/consensus/Cargo.toml | 1 + crates/ethereum/consensus/src/lib.rs | 8 +- crates/ethereum/consensus/src/validation.rs | 19 +- .../ethereum/engine-primitives/src/payload.rs | 19 +- crates/ethereum/evm/src/eip6110.rs | 91 +++---- crates/ethereum/evm/src/execute.rs | 61 +++-- crates/ethereum/evm/src/strategy.rs | 52 ++-- crates/ethereum/payload/Cargo.toml | 3 +- crates/ethereum/payload/src/lib.rs | 17 +- crates/evm/execution-types/Cargo.toml | 7 +- crates/evm/execution-types/src/execute.rs | 6 +- .../execution-types/src/execution_outcome.rs | 86 ++----- crates/evm/src/execute.rs | 20 +- crates/evm/src/system_calls/eip7002.rs | 49 +--- crates/evm/src/system_calls/eip7251.rs | 53 +--- crates/evm/src/system_calls/mod.rs | 19 +- crates/evm/src/test_utils.rs | 6 +- crates/exex/exex/src/backfill/test_utils.rs | 4 +- crates/net/eth-wire-types/src/blocks.rs | 10 +- crates/net/eth-wire-types/src/header.rs | 6 +- crates/net/p2p/src/full_block.rs | 16 -- crates/optimism/evm/Cargo.toml | 3 +- crates/optimism/evm/src/execute.rs | 7 +- crates/optimism/evm/src/lib.rs | 1 - crates/optimism/evm/src/strategy.rs | 9 +- crates/optimism/payload/src/builder.rs | 4 +- crates/optimism/payload/src/payload.rs | 6 +- crates/optimism/primitives/src/bedrock.rs | 2 +- crates/optimism/rpc/src/eth/receipt.rs | 2 - crates/optimism/storage/src/lib.rs | 3 +- crates/payload/validator/Cargo.toml | 2 + crates/payload/validator/src/lib.rs | 13 +- crates/primitives-traits/Cargo.toml | 2 +- crates/primitives-traits/src/block/body.rs | 17 +- .../src/header/test_utils.rs | 2 +- crates/primitives-traits/src/lib.rs | 3 - crates/primitives-traits/src/mod.rs | 0 crates/primitives-traits/src/request.rs | 58 ----- crates/primitives/Cargo.toml | 2 - crates/primitives/src/alloy_compat.rs | 3 - crates/primitives/src/block.rs | 71 +----- crates/primitives/src/lib.rs | 2 +- crates/primitives/src/proofs.rs | 11 +- crates/primitives/src/transaction/mod.rs | 40 +-- crates/revm/Cargo.toml | 3 + crates/revm/src/batch.rs | 7 +- crates/rpc/rpc-api/src/engine.rs | 7 +- crates/rpc/rpc-engine-api/src/engine_api.rs | 42 +++- crates/rpc/rpc-engine-api/tests/it/payload.rs | 15 +- .../rpc-eth-api/src/helpers/pending_block.rs | 20 +- crates/rpc/rpc-eth-types/src/receipt.rs | 2 - crates/rpc/rpc-types-compat/src/block.rs | 4 +- .../rpc-types-compat/src/engine/payload.rs | 137 ++--------- crates/rpc/rpc/src/debug.rs | 2 +- crates/rpc/rpc/src/otterscan.rs | 1 - crates/rpc/rpc/src/txpool.rs | 2 +- crates/stages/stages/src/stages/bodies.rs | 14 -- .../codecs/src/alloy/authorization_list.rs | 4 +- crates/storage/codecs/src/alloy/header.rs | 10 +- crates/storage/codecs/src/alloy/mod.rs | 3 - crates/storage/codecs/src/alloy/request.rs | 40 --- crates/storage/db-api/src/models/mod.rs | 4 +- crates/storage/db/src/tables/mod.rs | 7 +- .../src/providers/blockchain_provider.rs | 55 +---- .../src/providers/database/metrics.rs | 4 - .../provider/src/providers/database/mod.rs | 14 +- .../src/providers/database/provider.rs | 150 +++--------- crates/storage/provider/src/providers/mod.rs | 12 +- .../src/providers/static_file/manager.rs | 15 +- .../storage/provider/src/test_utils/blocks.rs | 1 - .../storage/provider/src/test_utils/mock.rs | 16 +- .../storage/provider/src/test_utils/noop.rs | 12 +- crates/storage/storage-api/src/block.rs | 5 +- crates/storage/storage-api/src/lib.rs | 3 - crates/storage/storage-api/src/requests.rs | 14 -- docs/crates/db.md | 4 +- testing/ef-tests/src/models.rs | 4 +- testing/testing-utils/Cargo.toml | 4 +- testing/testing-utils/src/generators.rs | 47 +--- 106 files changed, 800 insertions(+), 1329 deletions(-) delete mode 100644 crates/primitives-traits/src/mod.rs delete mode 100644 crates/primitives-traits/src/request.rs delete mode 100644 crates/storage/codecs/src/alloy/request.rs delete mode 100644 crates/storage/storage-api/src/requests.rs diff --git a/Cargo.lock b/Cargo.lock index 98bfe1f8e..82f42b07b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -112,9 +112,9 @@ dependencies = [ [[package]] name = "alloy-consensus" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "705687d5bfd019fee57cf9e206b27b30a9a9617535d5590a02b171e813208f8e" +checksum = "42642aed67f938363d9c7543e5ca4163cfb4205d9ec15fe933dc4e865d2932dd" dependencies = [ "alloy-eips", "alloy-primitives", @@ -161,9 +161,9 @@ dependencies = [ [[package]] name = "alloy-eip7702" -version = "0.1.1" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea59dc42102bc9a1905dc57901edc6dd48b9f38115df86c7d252acba70d71d04" +checksum = "eeffd2590ce780ddfaa9d0ae340eb2b4e08627650c4676eef537cef0b4bf535d" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -176,9 +176,9 @@ dependencies = [ [[package]] name = "alloy-eips" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ffb906284a1e1f63c4607da2068c8197458a352d0b3e9796e67353d72a9be85" +checksum = "9fbc52a30df46f9831ed74557dfad0d94b12420393662a8b9ef90e2d6c8cb4b0" dependencies = [ "alloy-eip2930", "alloy-eip7702", @@ -188,6 +188,8 @@ dependencies = [ "arbitrary", "c-kzg", "derive_more 1.0.0", + "ethereum_ssz", + "ethereum_ssz_derive", "once_cell", "serde", "sha2 0.10.8", @@ -195,9 +197,9 @@ dependencies = [ [[package]] name = "alloy-genesis" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8429cf4554eed9b40feec7f4451113e76596086447550275e3def933faf47ce3" +checksum = "0787d1688b9806290313cc335d416cc7ee39b11e3245f3d218544c62572d92ba" dependencies = [ "alloy-primitives", "alloy-serde", @@ -218,9 +220,9 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8fa8a1a3c4cbd221f2b8e3693aeb328fca79a757fe556ed08e47bbbc2a70db7" +checksum = "d55a16a5f9ca498a217c060414bcd1c43e934235dc8058b31b87dcd69ff4f105" dependencies = [ "alloy-primitives", "alloy-sol-types", @@ -232,9 +234,9 @@ dependencies = [ [[package]] name = "alloy-network" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85fa23a6a9d612b52e402c995f2d582c25165ec03ac6edf64c861a76bc5b87cd" +checksum = "3d236a8c3e1d5adc09b1b63c81815fc9b757d9a4ba9482cc899f9679b55dd437" dependencies = [ "alloy-consensus", "alloy-eips", @@ -253,9 +255,9 @@ dependencies = [ [[package]] name = "alloy-network-primitives" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "801492711d4392b2ccf5fc0bc69e299fa1aab15167d74dcaa9aab96a54f684bd" +checksum = "cd15a0990fa8a56d85a42d6a689719aa4eebf5e2f1a5c5354658c0bfc52cac9a" dependencies = [ "alloy-consensus", "alloy-eips", @@ -266,9 +268,9 @@ dependencies = [ [[package]] name = "alloy-node-bindings" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f1334a738aa1710cb8227441b3fcc319202ce78e967ef37406940242df4a454" +checksum = "2249f3c3ce446cf4063fe3d1aa7530823643c2706a1cc63045e0683ebc497a0a" dependencies = [ "alloy-genesis", "alloy-primitives", @@ -315,9 +317,9 @@ dependencies = [ [[package]] name = "alloy-provider" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcfaa4ffec0af04e3555686b8aacbcdf7d13638133a0672749209069750f78a6" +checksum = "316f522bb6f9ac3805132112197957013b570e20cfdad058e8339dae6030c849" dependencies = [ "alloy-chains", "alloy-consensus", @@ -341,21 +343,24 @@ dependencies = [ "futures", "futures-utils-wasm", "lru", + "parking_lot 0.12.3", "pin-project", "reqwest", + "schnellru", "serde", "serde_json", "thiserror", "tokio", "tracing", "url", + "wasmtimer", ] [[package]] name = "alloy-pubsub" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f32cef487122ae75c91eb50154c70801d71fabdb976fec6c49e0af5e6486ab15" +checksum = "222cd9b17b1c5ad48de51a88ffbdb17f17145170288f22662f80ac88739125e6" dependencies = [ "alloy-json-rpc", "alloy-primitives", @@ -394,9 +399,9 @@ dependencies = [ [[package]] name = "alloy-rpc-client" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "370143ed581aace6e663342d21d209c6b2e34ee6142f7d6675adb518deeaf0dc" +checksum = "5b2ab59712c594c9624aaa69e38e4d38f180cb569f1fa46cdaf8c21fd50793e5" dependencies = [ "alloy-json-rpc", "alloy-primitives", @@ -414,13 +419,14 @@ dependencies = [ "tower 0.5.1", "tracing", "url", + "wasmtimer", ] [[package]] name = "alloy-rpc-types" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ffc534b7919e18f35e3aa1f507b6f3d9d92ec298463a9f6beaac112809d8d06" +checksum = "ba21284319e12d053baa204d438db6c1577aedd94c1298e4becefdac1f9cec87" dependencies = [ "alloy-primitives", "alloy-rpc-types-engine", @@ -431,9 +437,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-admin" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb520ed46cc5b7d8c014a73fdd77b6a310383a2a5c0a5ae3c9b8055881f062b7" +checksum = "416cc9f391d0b876c4c8da85f7131e771a88a55b917cc9a35e1724d9409e3b1c" dependencies = [ "alloy-genesis", "alloy-primitives", @@ -443,9 +449,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-anvil" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d780adaa5d95b07ad92006b2feb68ecfa7e2015f7d5976ceaac4c906c73ebd07" +checksum = "ba40bea86c3102b9ed9b3be579e32e0b3e54e766248d873de5fc0437238c8df2" dependencies = [ "alloy-primitives", "alloy-serde", @@ -454,9 +460,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-beacon" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a8dc5980fe30203d698627cddb5f0cedc57f900c8b5e1229c8b9448e37acb4a" +checksum = "b535781fe224c101c3d957b514cb9f438d165ff0280e5c0b2f87a0d9a2950593" dependencies = [ "alloy-eips", "alloy-primitives", @@ -468,9 +474,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-debug" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59d8f8c5bfb160081a772f1f68eb9a37e8929c4ef74e5d01f5b78c2b645a5c5e" +checksum = "4303deacf4cbf12ed4431a5a1bbc3284f0defb4b8b72d9aa2b888656cc5ae657" dependencies = [ "alloy-primitives", "serde", @@ -478,9 +484,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-engine" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0285c4c09f838ab830048b780d7f4a4f460f309aa1194bb049843309524c64c" +checksum = "44848fced3b42260b9cb61f22102246636dfe5a2d0132f8d10a617df3cb1a74b" dependencies = [ "alloy-consensus", "alloy-eips", @@ -488,6 +494,8 @@ dependencies = [ "alloy-rlp", "alloy-serde", "derive_more 1.0.0", + "ethereum_ssz", + "ethereum_ssz_derive", "jsonrpsee-types", "jsonwebtoken", "rand 0.8.5", @@ -497,9 +505,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-eth" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413f4aa3ccf2c3e4234a047c5fa4727916d7daf25a89f9b765df0ba09784fd87" +checksum = "35894711990019fafff0012b82b9176cbb744516eb2a9bbe6b8e5cae522163ee" dependencies = [ "alloy-consensus", "alloy-eips", @@ -517,9 +525,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-mev" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cec23ce56c869eec5f6b6fd6a8a92b5aa0cfaf8d7be3a96502e537554dc7430" +checksum = "cac6250cad380a005ecb5ffc6d2facf03df0e72628d819a63dd8c3ade7a766ff" dependencies = [ "alloy-eips", "alloy-primitives", @@ -530,9 +538,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-trace" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "017cad3e5793c5613588c1f9732bcbad77e820ba7d0feaba3527749f856fdbc5" +checksum = "f568c5624881896d8a25e19acbdcbabadd8df339427ea2f10b2ee447d57c4509" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -544,9 +552,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-txpool" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b230e321c416be7f50530159392b4c41a45596d40d97e185575bcd0b545e521" +checksum = "d4a37d2e1ed9b7daf20ad0b3e0092613cbae46737e0e988b23caa556c7067ce6" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -556,9 +564,9 @@ dependencies = [ [[package]] name = "alloy-serde" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dff0ab1cdd43ca001e324dc27ee0e8606bd2161d6623c63e0e0b8c4dfc13600" +checksum = "2843c195675f06b29c09a4315cccdc233ab5bdc7c0a3775909f9f0cab5e9ae0f" dependencies = [ "alloy-primitives", "arbitrary", @@ -568,9 +576,9 @@ dependencies = [ [[package]] name = "alloy-signer" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fd4e0ad79c81a27ca659be5d176ca12399141659fef2bcbfdc848da478f4504" +checksum = "88b2a00d9803dfef99963303ffe41a7bf2221f3342f0a503d6741a9f4a18e5e5" dependencies = [ "alloy-primitives", "async-trait", @@ -582,9 +590,9 @@ dependencies = [ [[package]] name = "alloy-signer-local" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "494e0a256f3e99f2426f994bcd1be312c02cb8f88260088dacb33a8b8936475f" +checksum = "5a2505d4f8c98dcae86152d58d549cb4bcf953f8352fca903410e0a0ef535571" dependencies = [ "alloy-consensus", "alloy-network", @@ -670,9 +678,9 @@ dependencies = [ [[package]] name = "alloy-transport" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ac3e97dad3d31770db0fc89bd6a63b789fbae78963086733f960cf32c483904" +checksum = "9dc2c8f6b8c227ef0398f702d954c4ab572c2ead3c1ed4a5157aa1cbaf959747" dependencies = [ "alloy-json-rpc", "base64 0.22.1", @@ -685,13 +693,14 @@ dependencies = [ "tower 0.5.1", "tracing", "url", + "wasmtimer", ] [[package]] name = "alloy-transport-http" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b367dcccada5b28987c2296717ee04b9a5637aacd78eacb1726ef211678b5212" +checksum = "dd328e990d57f4c4e63899fb2c26877597d6503f8e0022a3d71b2d753ecbfc0c" dependencies = [ "alloy-json-rpc", "alloy-transport", @@ -704,9 +713,9 @@ dependencies = [ [[package]] name = "alloy-transport-ipc" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b90cf9cde7f2fce617da52768ee28f522264b282d148384a4ca0ea85af04fa3a" +checksum = "89aea26aaf1d67904a7ff95ec4a24ddd5e7d419a6945f641b885962d7c2803e2" dependencies = [ "alloy-json-rpc", "alloy-pubsub", @@ -723,9 +732,9 @@ dependencies = [ [[package]] name = "alloy-transport-ws" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7153b88690de6a50bba81c11e1d706bc41dbb90126d607404d60b763f6a3947f" +checksum = "e222e950ecc4ea12fbfb524b9a2275cac2cd5f57c8ce25bcaf1bd3ff80dd8fc8" dependencies = [ "alloy-pubsub", "alloy-transport", @@ -2665,6 +2674,47 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "ethereum_serde_utils" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70cbccfccf81d67bff0ab36e591fa536c8a935b078a7b0e58c1d00d418332fc9" +dependencies = [ + "alloy-primitives", + "hex", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "ethereum_ssz" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfbba28f4f3f32d92c06a64f5bf6c4537b5d4e21f28c689bd2bbaecfea4e0d3e" +dependencies = [ + "alloy-primitives", + "derivative", + "ethereum_serde_utils", + "itertools 0.13.0", + "serde", + "serde_derive", + "smallvec", + "typenum", +] + +[[package]] +name = "ethereum_ssz_derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d37845ba7c16bf4be8be4b5786f03a2ba5f2fda0d7f9e7cb2282f69cff420d7" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.79", +] + [[package]] name = "event-listener" version = "2.5.3" @@ -5157,9 +5207,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "op-alloy-consensus" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ea7162170c6f3cad8f67f4dd7108e3f78349fd553da5b8bebff1e7ef8f38896" +checksum = "99d49163f952491820088dd0e66f3a35d63337c3066eceff0a931bf83a8e2101" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5175,9 +5225,9 @@ dependencies = [ [[package]] name = "op-alloy-genesis" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f3d31dfbbd8dd898c7512f8ce7d30103980485416f668566100b0ed0994b958" +checksum = "8e46c2ab105f679f0cbfbc3fb762f3456d4b8556c841e667fc8f3c2226eb6c1e" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5189,9 +5239,9 @@ dependencies = [ [[package]] name = "op-alloy-network" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d113b325527ba7da271a8793f1c14bdf7f035ce9e0611e668c36fc6812568c7f" +checksum = "75ff1ea317441b9eb6317b24d13f9088e3b14ef48b15bfb6a125ca404df036d8" dependencies = [ "alloy-consensus", "alloy-network", @@ -5203,9 +5253,9 @@ dependencies = [ [[package]] name = "op-alloy-protocol" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "310873e4fbfc41986716c4fb6000a8b49d025d932d2c261af58271c434b05288" +checksum = "6c439457b2a1791325603fc18a94cc175e0b4b1127f11ff8a45071f05d044dcb" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5220,9 +5270,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "323c65880e2561aa87f74f8af260fd15b9cc930c448c88a60ae95af86c88c634" +checksum = "9c9556293835232b019ec9c6fd84e4265a3151111af60ea09b5b513e3dbed41c" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5237,16 +5287,18 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types-engine" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349e7b420f45d1a00216ec4c65fcf3f0057a841bc39732c405c85ae782b94121" +checksum = "8a42a5ac4e07ed226b6a2aeefaad9b2cc7ec160e372ba626a4214d681a355fc2" dependencies = [ "alloy-primitives", "alloy-rpc-types-engine", "alloy-serde", "derive_more 1.0.0", + "ethereum_ssz", "op-alloy-protocol", "serde", + "snap", ] [[package]] @@ -6310,6 +6362,7 @@ dependencies = [ name = "reth-auto-seal-consensus" version = "1.1.0" dependencies = [ + "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", "futures-util", @@ -6700,6 +6753,7 @@ dependencies = [ name = "reth-consensus" version = "1.1.0" dependencies = [ + "alloy-eips", "alloy-primitives", "auto_impl", "derive_more 1.0.0", @@ -7275,6 +7329,7 @@ name = "reth-ethereum-consensus" version = "1.1.0" dependencies = [ "alloy-consensus", + "alloy-eips", "alloy-primitives", "reth-chainspec", "reth-consensus", @@ -7327,6 +7382,7 @@ name = "reth-ethereum-payload-builder" version = "1.1.0" dependencies = [ "alloy-consensus", + "alloy-eips", "alloy-primitives", "reth-basic-payload-builder", "reth-chain-state", @@ -8343,6 +8399,8 @@ dependencies = [ name = "reth-payload-validator" version = "1.1.0" dependencies = [ + "alloy-eips", + "alloy-primitives", "alloy-rpc-types", "reth-chainspec", "reth-primitives", @@ -8524,6 +8582,7 @@ dependencies = [ name = "reth-revm" version = "1.1.0" dependencies = [ + "alloy-eips", "alloy-primitives", "reth-chainspec", "reth-consensus-common", @@ -9274,9 +9333,9 @@ dependencies = [ [[package]] name = "revm" -version = "14.0.3" +version = "16.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "641702b12847f9ed418d552f4fcabe536d867a2c980e96b6e7e25d7b992f929f" +checksum = "34e44692d5736cc44c697a372e507890f8797f06d1541c5f4b9bec594d90fd8a" dependencies = [ "auto_impl", "cfg-if", @@ -9289,9 +9348,9 @@ dependencies = [ [[package]] name = "revm-inspectors" -version = "0.8.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43c44af0bf801f48d25f7baf25cf72aff4c02d610f83b428175228162fef0246" +checksum = "a64e2246ad480167548724eb9c9c66945241b867c7d50894de3ca860c9823a45" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -9308,9 +9367,9 @@ dependencies = [ [[package]] name = "revm-interpreter" -version = "10.0.3" +version = "12.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e5e14002afae20b5bf1566f22316122f42f57517000e559c55b25bf7a49cba2" +checksum = "6f89940d17d5d077570de1977f52f69049595322e237cb6c754c3d47f668f023" dependencies = [ "revm-primitives", "serde", @@ -9318,9 +9377,9 @@ dependencies = [ [[package]] name = "revm-precompile" -version = "11.0.3" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3198c06247e8d4ad0d1312591edf049b0de4ddffa9fecb625c318fd67db8639b" +checksum = "d8f816aaea3245cbdbe7fdd84955df33597f9322c7912c3e3ba7bc855e03211f" dependencies = [ "aurora-engine-modexp", "blst", @@ -9338,9 +9397,9 @@ dependencies = [ [[package]] name = "revm-primitives" -version = "10.0.0" +version = "12.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f1525851a03aff9a9d6a1d018b414d76252d6802ab54695b27093ecd7e7a101" +checksum = "532411bbde45a46707c1d434dcdc29866cf261c1b748fb01b303ce3b4310b361" dependencies = [ "alloy-eip2930", "alloy-eip7702", @@ -11355,6 +11414,20 @@ dependencies = [ "web-sys", ] +[[package]] +name = "wasmtimer" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7ed9d8b15c7fb594d72bfb4b5a276f3d2029333cd93a932f376f5937f6f80ee" +dependencies = [ + "futures", + "js-sys", + "parking_lot 0.12.3", + "pin-utils", + "slab", + "wasm-bindgen", +] + [[package]] name = "web-sys" version = "0.3.72" diff --git a/Cargo.toml b/Cargo.toml index 541110969..6c66e501e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -410,9 +410,9 @@ reth-trie-db = { path = "crates/trie/db" } reth-trie-parallel = { path = "crates/trie/parallel" } # revm -revm = { version = "14.0.3", features = ["std"], default-features = false } -revm-inspectors = "0.8.1" -revm-primitives = { version = "10.0.0", features = [ +revm = { version = "16.0.0", features = ["std"], default-features = false } +revm-inspectors = "0.9.0" +revm-primitives = { version = "12.0.0", features = [ "std", ], default-features = false } @@ -424,45 +424,45 @@ alloy-rlp = "0.3.4" alloy-sol-types = "0.8.0" alloy-trie = { version = "0.7", default-features = false } -alloy-consensus = { version = "0.4.2", default-features = false } -alloy-eips = { version = "0.4.2", default-features = false } -alloy-genesis = { version = "0.4.2", default-features = false } -alloy-json-rpc = { version = "0.4.2", default-features = false } -alloy-network = { version = "0.4.2", default-features = false } -alloy-network-primitives = { version = "0.4.2", default-features = false } -alloy-node-bindings = { version = "0.4.2", default-features = false } -alloy-provider = { version = "0.4.2", features = [ +alloy-consensus = { version = "0.5.2", default-features = false } +alloy-eips = { version = "0.5.2", default-features = false } +alloy-genesis = { version = "0.5.2", default-features = false } +alloy-json-rpc = { version = "0.5.2", default-features = false } +alloy-network = { version = "0.5.2", default-features = false } +alloy-network-primitives = { version = "0.5.2", default-features = false } +alloy-node-bindings = { version = "0.5.2", default-features = false } +alloy-provider = { version = "0.5.2", features = [ "reqwest", ], default-features = false } -alloy-pubsub = { version = "0.4.2", default-features = false } -alloy-rpc-client = { version = "0.4.2", default-features = false } -alloy-rpc-types = { version = "0.4.2", features = [ +alloy-pubsub = { version = "0.5.2", default-features = false } +alloy-rpc-client = { version = "0.5.2", default-features = false } +alloy-rpc-types = { version = "0.5.2", features = [ "eth", ], default-features = false } -alloy-rpc-types-admin = { version = "0.4.2", default-features = false } -alloy-rpc-types-anvil = { version = "0.4.2", default-features = false } -alloy-rpc-types-beacon = { version = "0.4.2", default-features = false } -alloy-rpc-types-debug = { version = "0.4.2", default-features = false } -alloy-rpc-types-engine = { version = "0.4.2", default-features = false } -alloy-rpc-types-eth = { version = "0.4.2", default-features = false } -alloy-rpc-types-mev = { version = "0.4.2", default-features = false } -alloy-rpc-types-trace = { version = "0.4.2", default-features = false } -alloy-rpc-types-txpool = { version = "0.4.2", default-features = false } -alloy-serde = { version = "0.4.2", default-features = false } -alloy-signer = { version = "0.4.2", default-features = false } -alloy-signer-local = { version = "0.4.2", default-features = false } -alloy-transport = { version = "0.4.2" } -alloy-transport-http = { version = "0.4.2", features = [ +alloy-rpc-types-admin = { version = "0.5.2", default-features = false } +alloy-rpc-types-anvil = { version = "0.5.2", default-features = false } +alloy-rpc-types-beacon = { version = "0.5.2", default-features = false } +alloy-rpc-types-debug = { version = "0.5.2", default-features = false } +alloy-rpc-types-engine = { version = "0.5.2", default-features = false } +alloy-rpc-types-eth = { version = "0.5.2", default-features = false } +alloy-rpc-types-mev = { version = "0.5.2", default-features = false } +alloy-rpc-types-trace = { version = "0.5.2", default-features = false } +alloy-rpc-types-txpool = { version = "0.5.2", default-features = false } +alloy-serde = { version = "0.5.2", default-features = false } +alloy-signer = { version = "0.5.2", default-features = false } +alloy-signer-local = { version = "0.5.2", default-features = false } +alloy-transport = { version = "0.5.2" } +alloy-transport-http = { version = "0.5.2", features = [ "reqwest-rustls-tls", ], default-features = false } -alloy-transport-ipc = { version = "0.4.2", default-features = false } -alloy-transport-ws = { version = "0.4.2", default-features = false } +alloy-transport-ipc = { version = "0.5.2", default-features = false } +alloy-transport-ws = { version = "0.5.2", default-features = false } # op -op-alloy-rpc-types = "0.4" -op-alloy-rpc-types-engine = "0.4" -op-alloy-network = "0.4" -op-alloy-consensus = "0.4" +op-alloy-rpc-types = "0.5" +op-alloy-rpc-types-engine = "0.5" +op-alloy-network = "0.5" +op-alloy-consensus = "0.5" # misc aquamarine = "0.5" @@ -593,30 +593,35 @@ tikv-jemalloc-ctl = "0.6" tikv-jemallocator = "0.6" tracy-client = "0.17.3" -[patch.crates-io] -#alloy-consensus = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-eips = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-genesis = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-json-rpc = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-network = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-node-bindings = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-provider = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-pubsub = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-rpc-client = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-rpc-types = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-rpc-types-admin = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-rpc-types-anvil = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-rpc-types-beacon = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-rpc-types-debug = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-rpc-types-engine = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-rpc-types-eth = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-rpc-types-mev = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-rpc-types-trace = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-rpc-types-txpool = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-serde = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-signer = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-signer-local = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-transport = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-transport-http = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-transport-ipc = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} -#alloy-transport-ws = { git = "https://github.com/alloy-rs/alloy", rev = "8c499409"} +#[patch.crates-io] +#alloy-consensus = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-eips = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-genesis = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-json-rpc = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-network = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-node-bindings = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-provider = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-pubsub = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-rpc-client = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-rpc-types = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-rpc-types-admin = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-rpc-types-anvil = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-rpc-types-beacon = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-rpc-types-debug = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-rpc-types-engine = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-rpc-types-eth = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-rpc-types-mev = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-rpc-types-trace = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-rpc-types-txpool = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-serde = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-signer = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-signer-local = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-transport = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-transport-http = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-transport-ipc = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } +#alloy-transport-ws = { git = "https://github.com/alloy-rs/alloy", rev = "a971b3a" } + +#op-alloy-rpc-types = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } +#op-alloy-rpc-types-engine = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } +#op-alloy-network = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } +#op-alloy-consensus = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } diff --git a/bin/reth-bench/src/authenticated_transport.rs b/bin/reth-bench/src/authenticated_transport.rs index c946d244d..72c4fd298 100644 --- a/bin/reth-bench/src/authenticated_transport.rs +++ b/bin/reth-bench/src/authenticated_transport.rs @@ -84,7 +84,8 @@ impl InnerTransport { let (auth, claims) = build_auth(jwt).map_err(|e| AuthenticatedTransportError::InvalidJwt(e.to_string()))?; - let inner = WsConnect { url: url.to_string(), auth: Some(auth) } + let inner = WsConnect::new(url.clone()) + .with_auth(auth) .into_service() .await .map(Self::Ws) diff --git a/bin/reth-bench/src/valid_payload.rs b/bin/reth-bench/src/valid_payload.rs index 6353aea71..b00f4ddcd 100644 --- a/bin/reth-bench/src/valid_payload.rs +++ b/bin/reth-bench/src/valid_payload.rs @@ -215,14 +215,6 @@ pub(crate) async fn call_new_payload>( versioned_hashes: Vec, ) -> TransportResult { match payload { - ExecutionPayload::V4(_payload) => { - todo!("V4 payloads not supported yet"); - // auth_provider - // .new_payload_v4_wait(payload, versioned_hashes, parent_beacon_block_root, ...) - // .await?; - // - // Ok(EngineApiMessageVersion::V4) - } ExecutionPayload::V3(payload) => { // We expect the caller let parent_beacon_block_root = parent_beacon_block_root diff --git a/bin/reth/src/commands/debug_cmd/replay_engine.rs b/bin/reth/src/commands/debug_cmd/replay_engine.rs index cbffa1f0e..de497cbe0 100644 --- a/bin/reth/src/commands/debug_cmd/replay_engine.rs +++ b/bin/reth/src/commands/debug_cmd/replay_engine.rs @@ -171,7 +171,9 @@ impl> Command { debug!(target: "reth::cli", ?response, "Received for forkchoice updated"); } StoredEngineApiMessage::NewPayload { payload, cancun_fields } => { - let response = beacon_engine_handle.new_payload(payload, cancun_fields).await?; + // todo: prague (last arg) + let response = + beacon_engine_handle.new_payload(payload, cancun_fields, None).await?; debug!(target: "reth::cli", ?response, "Received for new payload"); } }; diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index db43dffcd..1e2ed2a4a 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -1635,7 +1635,6 @@ mod tests { transactions: body.clone().into_iter().map(|tx| tx.into_signed()).collect(), ommers: Vec::new(), withdrawals: Some(Withdrawals::default()), - requests: None, }, }, body.iter().map(|tx| tx.signer()).collect(), diff --git a/crates/chain-state/src/in_memory.rs b/crates/chain-state/src/in_memory.rs index f157da5ff..be33e1fd7 100644 --- a/crates/chain-state/src/in_memory.rs +++ b/crates/chain-state/src/in_memory.rs @@ -865,10 +865,11 @@ impl NewCanonicalChain { mod tests { use super::*; use crate::test_utils::TestBlockBuilder; + use alloy_eips::eip7685::Requests; use alloy_primitives::{map::HashSet, BlockNumber, Bytes, StorageKey, StorageValue}; use rand::Rng; use reth_errors::ProviderResult; - use reth_primitives::{Account, Bytecode, Receipt, Requests}; + use reth_primitives::{Account, Bytecode, Receipt}; use reth_storage_api::{ AccountReader, BlockHashReader, StateProofProvider, StateProvider, StateRootProvider, StorageRootProvider, diff --git a/crates/chain-state/src/test_utils.rs b/crates/chain-state/src/test_utils.rs index a820bb5cf..f1648ab6b 100644 --- a/crates/chain-state/src/test_utils.rs +++ b/crates/chain-state/src/test_utils.rs @@ -3,6 +3,7 @@ use crate::{ CanonStateSubscriptions, }; use alloy_consensus::{Transaction as _, TxEip1559, EMPTY_ROOT_HASH}; +use alloy_eips::eip7685::Requests; use alloy_primitives::{Address, BlockNumber, Sealable, B256, U256}; use alloy_signer::SignerSync; use alloy_signer_local::PrivateKeySigner; @@ -12,8 +13,8 @@ use reth_execution_types::{Chain, ExecutionOutcome}; use reth_primitives::{ constants::EIP1559_INITIAL_BASE_FEE, proofs::{calculate_receipt_root, calculate_transaction_root, calculate_withdrawals_root}, - BlockBody, Header, Receipt, Receipts, Requests, SealedBlock, SealedBlockWithSenders, - SealedHeader, Transaction, TransactionSigned, TransactionSignedEcRecovered, + BlockBody, Header, Receipt, Receipts, SealedBlock, SealedBlockWithSenders, SealedHeader, + Transaction, TransactionSigned, TransactionSignedEcRecovered, }; use reth_trie::{root::state_root_unhashed, updates::TrieUpdates, HashedPostState}; use revm::{db::BundleState, primitives::AccountInfo}; @@ -169,7 +170,6 @@ impl TestBlockBuilder { transactions: transactions.into_iter().map(|tx| tx.into_signed()).collect(), ommers: Vec::new(), withdrawals: Some(vec![].into()), - requests: None, }, }; diff --git a/crates/chainspec/Cargo.toml b/crates/chainspec/Cargo.toml index 2864427c2..87df28322 100644 --- a/crates/chainspec/Cargo.toml +++ b/crates/chainspec/Cargo.toml @@ -22,7 +22,6 @@ alloy-chains = { workspace = true, features = ["serde", "rlp"] } alloy-eips = { workspace = true, features = ["serde"] } alloy-genesis.workspace = true alloy-primitives = { workspace = true, features = ["rand", "rlp"] } -alloy-trie.workspace = true alloy-consensus.workspace = true # misc diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index 59e1a5ce1..a7f45727d 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -3,9 +3,9 @@ pub use alloy_eips::eip1559::BaseFeeParams; use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_chains::{Chain, NamedChain}; use alloy_consensus::constants::EMPTY_WITHDRAWALS; +use alloy_eips::eip7685::EMPTY_REQUESTS_HASH; use alloy_genesis::Genesis; use alloy_primitives::{address, b256, Address, BlockNumber, B256, U256}; -use alloy_trie::EMPTY_ROOT_HASH; use derive_more::From; use alloy_consensus::constants::{DEV_GENESIS_HASH, MAINNET_GENESIS_HASH}; @@ -284,8 +284,9 @@ impl ChainSpec { }; // If Prague is activated at genesis we set requests root to an empty trie root. - let requests_root = - self.is_prague_active_at_timestamp(self.genesis.timestamp).then_some(EMPTY_ROOT_HASH); + let requests_hash = self + .is_prague_active_at_timestamp(self.genesis.timestamp) + .then_some(EMPTY_REQUESTS_HASH); Header { gas_limit: self.genesis.gas_limit, @@ -301,7 +302,7 @@ impl ChainSpec { parent_beacon_block_root, blob_gas_used: blob_gas_used.map(Into::into), excess_blob_gas: excess_blob_gas.map(Into::into), - requests_root, + requests_hash, ..Default::default() } } @@ -940,6 +941,7 @@ mod tests { use alloy_chains::Chain; use alloy_genesis::{ChainConfig, GenesisAccount}; use alloy_primitives::{b256, hex}; + use alloy_trie::EMPTY_ROOT_HASH; use reth_ethereum_forks::{ForkCondition, ForkHash, ForkId, Head}; use reth_trie_common::TrieAccount; diff --git a/crates/cli/commands/src/stage/drop.rs b/crates/cli/commands/src/stage/drop.rs index 9e0396404..3a277cabd 100644 --- a/crates/cli/commands/src/stage/drop.rs +++ b/crates/cli/commands/src/stage/drop.rs @@ -81,7 +81,6 @@ impl> Command tx.clear::()?; tx.clear::()?; tx.clear::()?; - tx.clear::()?; reset_stage_checkpoint(tx, StageId::Bodies)?; insert_genesis_header(&provider_rw.0, &static_file_provider, &self.env.chain)?; diff --git a/crates/consensus/auto-seal/Cargo.toml b/crates/consensus/auto-seal/Cargo.toml index b4b281230..249858711 100644 --- a/crates/consensus/auto-seal/Cargo.toml +++ b/crates/consensus/auto-seal/Cargo.toml @@ -31,6 +31,7 @@ reth-tokio-util.workspace = true reth-trie.workspace = true # ethereum +alloy-eips.workspace = true alloy-primitives.workspace = true revm-primitives.workspace = true alloy-rpc-types-engine.workspace = true diff --git a/crates/consensus/auto-seal/src/lib.rs b/crates/consensus/auto-seal/src/lib.rs index 261227f10..16299e19b 100644 --- a/crates/consensus/auto-seal/src/lib.rs +++ b/crates/consensus/auto-seal/src/lib.rs @@ -15,6 +15,7 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +use alloy_eips::eip7685::Requests; use alloy_primitives::{BlockHash, BlockNumber, Bloom, B256, U256}; use reth_beacon_consensus::BeaconEngineMessage; use reth_chainspec::{EthChainSpec, EthereumHardforks}; @@ -25,7 +26,7 @@ use reth_execution_errors::{ }; use reth_execution_types::ExecutionOutcome; use reth_primitives::{ - proofs, Block, BlockBody, BlockHashOrNumber, BlockWithSenders, Header, Requests, SealedBlock, + proofs, Block, BlockBody, BlockHashOrNumber, BlockWithSenders, Header, SealedBlock, SealedHeader, TransactionSigned, Withdrawals, }; use reth_provider::{BlockReaderIdExt, StateProviderFactory, StateRootProvider}; @@ -301,7 +302,7 @@ impl StorageInner { timestamp, base_fee_per_gas, blob_gas_used, - requests_root: requests.map(|r| proofs::calculate_requests_root(&r.0)), + requests_hash: requests.map(|r| r.requests_hash()), ..Default::default() }; @@ -366,7 +367,6 @@ impl StorageInner { transactions, ommers: ommers.clone(), withdrawals: withdrawals.clone(), - requests: requests.clone(), }, } .with_recovered_senders() @@ -390,7 +390,7 @@ impl StorageInner { // root here let Block { mut header, body, .. } = block.block; - let body = BlockBody { transactions: body.transactions, ommers, withdrawals, requests }; + let body = BlockBody { transactions: body.transactions, ommers, withdrawals }; trace!(target: "consensus::auto", ?execution_outcome, ?header, ?body, "executed block, calculating state root and completing header"); @@ -682,7 +682,7 @@ mod tests { timestamp, base_fee_per_gas: None, blob_gas_used: Some(0), - requests_root: None, + requests_hash: None, excess_blob_gas: Some(0), ..Default::default() } diff --git a/crates/consensus/beacon/src/engine/handle.rs b/crates/consensus/beacon/src/engine/handle.rs index 65b7c38df..1f4445901 100644 --- a/crates/consensus/beacon/src/engine/handle.rs +++ b/crates/consensus/beacon/src/engine/handle.rs @@ -4,6 +4,7 @@ use crate::{ engine::message::OnForkChoiceUpdated, BeaconConsensusEngineEvent, BeaconEngineMessage, BeaconForkChoiceUpdateError, BeaconOnNewPayloadError, }; +use alloy_primitives::Bytes; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ForkchoiceState, ForkchoiceUpdated, PayloadStatus, }; @@ -47,9 +48,17 @@ where &self, payload: ExecutionPayload, cancun_fields: Option, + execution_requests: Option>, ) -> Result { let (tx, rx) = oneshot::channel(); - let _ = self.to_engine.send(BeaconEngineMessage::NewPayload { payload, cancun_fields, tx }); + // HACK(onbjerg): We should have a pectra payload fields struct, this is just a temporary + // workaround. + let _ = self.to_engine.send(BeaconEngineMessage::NewPayload { + payload, + cancun_fields, + execution_requests, + tx, + }); rx.await.map_err(|_| BeaconOnNewPayloadError::EngineUnavailable)? } diff --git a/crates/consensus/beacon/src/engine/message.rs b/crates/consensus/beacon/src/engine/message.rs index fdaad0cc4..45d0c57f4 100644 --- a/crates/consensus/beacon/src/engine/message.rs +++ b/crates/consensus/beacon/src/engine/message.rs @@ -1,4 +1,5 @@ use crate::engine::{error::BeaconOnNewPayloadError, forkchoice::ForkchoiceStatus}; +use alloy_primitives::Bytes; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ForkChoiceUpdateResult, ForkchoiceState, ForkchoiceUpdateError, ForkchoiceUpdated, PayloadId, PayloadStatus, PayloadStatusEnum, @@ -146,6 +147,10 @@ pub enum BeaconEngineMessage { payload: ExecutionPayload, /// The cancun-related newPayload fields, if any. cancun_fields: Option, + // HACK(onbjerg): We should have a pectra payload fields struct, this is just a temporary + // workaround. + /// The pectra EIP-7685 execution requests. + execution_requests: Option>, /// The sender for returning payload status result. tx: oneshot::Sender>, }, diff --git a/crates/consensus/beacon/src/engine/mod.rs b/crates/consensus/beacon/src/engine/mod.rs index ccea982bf..edd3d6db3 100644 --- a/crates/consensus/beacon/src/engine/mod.rs +++ b/crates/consensus/beacon/src/engine/mod.rs @@ -1,4 +1,4 @@ -use alloy_primitives::{BlockNumber, B256}; +use alloy_primitives::{BlockNumber, Bytes, B256}; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, PayloadStatusEnum, PayloadValidationError, @@ -1085,6 +1085,9 @@ where &mut self, payload: ExecutionPayload, cancun_fields: Option, + // HACK(onbjerg): We should have a pectra payload fields struct, this is just a temporary + // workaround. + execution_requests: Option>, ) -> Result, BeaconOnNewPayloadError> { self.metrics.new_payload_messages.increment(1); @@ -1114,10 +1117,11 @@ where // // This validation **MUST** be instantly run in all cases even during active sync process. let parent_hash = payload.parent_hash(); - let block = match self - .payload_validator - .ensure_well_formed_payload(payload, cancun_fields.into()) - { + let block = match self.payload_validator.ensure_well_formed_payload( + payload, + cancun_fields.into(), + execution_requests, + ) { Ok(block) => block, Err(error) => { error!(target: "consensus::engine", %error, "Invalid payload"); @@ -1862,8 +1866,13 @@ where BeaconEngineMessage::ForkchoiceUpdated { state, payload_attrs, tx } => { this.on_forkchoice_updated(state, payload_attrs, tx); } - BeaconEngineMessage::NewPayload { payload, cancun_fields, tx } => { - match this.on_new_payload(payload, cancun_fields) { + BeaconEngineMessage::NewPayload { + payload, + cancun_fields, + execution_requests, + tx, + } => { + match this.on_new_payload(payload, cancun_fields, execution_requests) { Ok(Either::Right(block)) => { this.set_blockchain_tree_action( BlockchainTreeAction::InsertNewPayload { block, tx }, diff --git a/crates/consensus/beacon/src/engine/test_utils.rs b/crates/consensus/beacon/src/engine/test_utils.rs index 633ae03d8..7e9e1ec6b 100644 --- a/crates/consensus/beacon/src/engine/test_utils.rs +++ b/crates/consensus/beacon/src/engine/test_utils.rs @@ -70,7 +70,7 @@ impl TestEnv { payload: T, cancun_fields: Option, ) -> Result { - self.engine_handle.new_payload(payload.into(), cancun_fields).await + self.engine_handle.new_payload(payload.into(), cancun_fields, None).await } /// Sends the `ExecutionPayload` message to the consensus engine and retries if the engine diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index 4c2e6b192..dabb8c3c3 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -75,24 +75,6 @@ pub fn validate_cancun_gas(block: &SealedBlock) -> Result<(), ConsensusError> { Ok(()) } -/// Validate that requests root is present if Prague is active. -/// -/// See [EIP-7685]: General purpose execution layer requests -/// -/// [EIP-7685]: https://eips.ethereum.org/EIPS/eip-7685 -#[inline] -pub fn validate_prague_request(block: &SealedBlock) -> Result<(), ConsensusError> { - let requests_root = - block.body.calculate_requests_root().ok_or(ConsensusError::BodyRequestsMissing)?; - let header_requests_root = block.requests_root.ok_or(ConsensusError::RequestsRootMissing)?; - if requests_root != *header_requests_root { - return Err(ConsensusError::BodyRequestsRootDiff( - GotExpected { got: requests_root, expected: header_requests_root }.into(), - )); - } - Ok(()) -} - /// Validate a block without regard for state: /// /// - Compares the ommer hash in the block header to the block body @@ -125,10 +107,6 @@ pub fn validate_block_pre_execution( validate_cancun_gas(block)?; } - if chain_spec.is_prague_active_at_timestamp(block.timestamp) { - validate_prague_request(block)?; - } - Ok(()) } @@ -458,7 +436,7 @@ mod tests { blob_gas_used: None, excess_blob_gas: None, parent_beacon_block_root: None, - requests_root: None + requests_hash: None }; // size: 0x9b5 @@ -478,7 +456,7 @@ mod tests { ( SealedBlock { header: SealedHeader::new(header, seal), - body: BlockBody { transactions, ommers, withdrawals: None, requests: None }, + body: BlockBody { transactions, ommers, withdrawals: None }, }, parent, ) @@ -550,7 +528,6 @@ mod tests { transactions: vec![transaction], ommers: vec![], withdrawals: Some(Withdrawals::default()), - requests: None, }; let block = SealedBlock::new(header, body); diff --git a/crates/consensus/consensus/Cargo.toml b/crates/consensus/consensus/Cargo.toml index 660b43865..1736caab5 100644 --- a/crates/consensus/consensus/Cargo.toml +++ b/crates/consensus/consensus/Cargo.toml @@ -15,6 +15,7 @@ workspace = true reth-primitives.workspace = true # ethereum +alloy-eips.workspace = true alloy-primitives.workspace = true # misc diff --git a/crates/consensus/consensus/src/lib.rs b/crates/consensus/consensus/src/lib.rs index 91b93c8a7..4bf5da3b1 100644 --- a/crates/consensus/consensus/src/lib.rs +++ b/crates/consensus/consensus/src/lib.rs @@ -12,10 +12,11 @@ extern crate alloc; use alloc::{fmt::Debug, vec::Vec}; +use alloy_eips::eip7685::Requests; use alloy_primitives::{BlockHash, BlockNumber, Bloom, B256, U256}; use reth_primitives::{ constants::MINIMUM_GAS_LIMIT, BlockWithSenders, GotExpected, GotExpectedBoxed, Header, - InvalidTransactionError, Receipt, Request, SealedBlock, SealedHeader, + InvalidTransactionError, Receipt, SealedBlock, SealedHeader, }; /// A consensus implementation that does nothing. @@ -31,12 +32,12 @@ pub struct PostExecutionInput<'a> { /// Receipts of the block. pub receipts: &'a [Receipt], /// EIP-7685 requests of the block. - pub requests: &'a [Request], + pub requests: &'a Requests, } impl<'a> PostExecutionInput<'a> { /// Creates a new instance of `PostExecutionInput`. - pub const fn new(receipts: &'a [Receipt], requests: &'a [Request]) -> Self { + pub const fn new(receipts: &'a [Receipt], requests: &'a Requests) -> Self { Self { receipts, requests } } } @@ -170,10 +171,10 @@ pub enum ConsensusError { #[display("mismatched block withdrawals root: {_0}")] BodyWithdrawalsRootDiff(GotExpectedBoxed), - /// Error when the requests root in the block is different from the expected requests - /// root. - #[display("mismatched block requests root: {_0}")] - BodyRequestsRootDiff(GotExpectedBoxed), + /// Error when the requests hash in the block is different from the expected requests + /// hash. + #[display("mismatched block requests hash: {_0}")] + BodyRequestsHashDiff(GotExpectedBoxed), /// Error when a block with a specific hash and number is already known. #[display("block with [hash={hash}, number={number}] is already known")] @@ -248,17 +249,17 @@ pub enum ConsensusError { #[display("missing withdrawals root")] WithdrawalsRootMissing, - /// Error when the requests root is missing. - #[display("missing requests root")] - RequestsRootMissing, + /// Error when the requests hash is missing. + #[display("missing requests hash")] + RequestsHashMissing, /// Error when an unexpected withdrawals root is encountered. #[display("unexpected withdrawals root")] WithdrawalsRootUnexpected, - /// Error when an unexpected requests root is encountered. - #[display("unexpected requests root")] - RequestsRootUnexpected, + /// Error when an unexpected requests hash is encountered. + #[display("unexpected requests hash")] + RequestsHashUnexpected, /// Error when withdrawals are missing. #[display("missing withdrawals")] diff --git a/crates/e2e-test-utils/src/transaction.rs b/crates/e2e-test-utils/src/transaction.rs index 049603044..58a25dc12 100644 --- a/crates/e2e-test-utils/src/transaction.rs +++ b/crates/e2e-test-utils/src/transaction.rs @@ -56,8 +56,7 @@ impl TransactionTestContext { delegate_to: Address, wallet: PrivateKeySigner, ) -> TxEnvelope { - let authorization = - Authorization { chain_id: U256::from(chain_id), address: delegate_to, nonce: 0 }; + let authorization = Authorization { chain_id, address: delegate_to, nonce: 0 }; let signature = wallet .sign_hash_sync(&authorization.signature_hash()) .expect("could not sign authorization"); diff --git a/crates/engine/invalid-block-hooks/src/witness.rs b/crates/engine/invalid-block-hooks/src/witness.rs index bb227e304..ab73a8190 100644 --- a/crates/engine/invalid-block-hooks/src/witness.rs +++ b/crates/engine/invalid-block-hooks/src/witness.rs @@ -162,7 +162,7 @@ where let response = ExecutionWitness { state: HashMap::from_iter(state), codes: Default::default(), - keys: Some(state_preimages), + keys: state_preimages, }; let re_executed_witness_path = self.save_file( format!("{}_{}.witness.re_executed.json", block.number, block.hash()), diff --git a/crates/engine/local/src/miner.rs b/crates/engine/local/src/miner.rs index 8bcb7083a..552cbd047 100644 --- a/crates/engine/local/src/miner.rs +++ b/crates/engine/local/src/miner.rs @@ -222,6 +222,8 @@ where self.to_engine.send(BeaconEngineMessage::NewPayload { payload: block_to_payload(payload.block().clone()), cancun_fields, + // todo: prague + execution_requests: None, tx, })?; diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index 3eadbbd52..c63c8fbe2 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -7,7 +7,7 @@ use crate::{ use alloy_eips::BlockNumHash; use alloy_primitives::{ map::{HashMap, HashSet}, - BlockNumber, B256, U256, + BlockNumber, Bytes, B256, U256, }; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, PayloadStatusEnum, @@ -721,6 +721,7 @@ where &mut self, payload: ExecutionPayload, cancun_fields: Option, + execution_requests: Option>, ) -> Result, InsertBlockFatalError> { trace!(target: "engine::tree", "invoked new payload"); self.metrics.engine.new_payload_messages.increment(1); @@ -751,10 +752,11 @@ where // // This validation **MUST** be instantly run in all cases even during active sync process. let parent_hash = payload.parent_hash(); - let block = match self - .payload_validator - .ensure_well_formed_payload(payload, cancun_fields.into()) - { + let block = match self.payload_validator.ensure_well_formed_payload( + payload, + cancun_fields.into(), + execution_requests, + ) { Ok(block) => block, Err(error) => { error!(target: "engine::tree", %error, "Invalid payload"); @@ -1236,8 +1238,14 @@ where error!(target: "engine::tree", "Failed to send event: {err:?}"); } } - BeaconEngineMessage::NewPayload { payload, cancun_fields, tx } => { - let output = self.on_new_payload(payload, cancun_fields); + BeaconEngineMessage::NewPayload { + payload, + cancun_fields, + execution_requests, + tx, + } => { + let output = + self.on_new_payload(payload, cancun_fields, execution_requests); if let Err(err) = tx.send(output.map(|o| o.outcome).map_err(|e| { reth_beacon_consensus::BeaconOnNewPayloadError::Internal( Box::new(e), @@ -2852,6 +2860,7 @@ mod tests { parent_beacon_block_root: block.parent_beacon_block_root.unwrap(), versioned_hashes: vec![], }), + None, ) .unwrap(); } @@ -3114,7 +3123,7 @@ mod tests { let mut test_harness = TestHarness::new(HOLESKY.clone()); - let outcome = test_harness.tree.on_new_payload(payload.into(), None).unwrap(); + let outcome = test_harness.tree.on_new_payload(payload.into(), None, None).unwrap(); assert!(outcome.outcome.is_syncing()); // ensure block is buffered @@ -3159,6 +3168,7 @@ mod tests { BeaconEngineMessage::NewPayload { payload: payload.clone().into(), cancun_fields: None, + execution_requests: None, tx, } .into(), diff --git a/crates/engine/util/src/engine_store.rs b/crates/engine/util/src/engine_store.rs index 1f3445199..de193bf3b 100644 --- a/crates/engine/util/src/engine_store.rs +++ b/crates/engine/util/src/engine_store.rs @@ -73,7 +73,13 @@ impl EngineMessageStore { })?, )?; } - BeaconEngineMessage::NewPayload { payload, cancun_fields, tx: _tx } => { + // todo(onbjerg): execution requests + BeaconEngineMessage::NewPayload { + payload, + cancun_fields, + execution_requests: _, + tx: _tx, + } => { let filename = format!("{}-new_payload-{}.json", timestamp, payload.block_hash()); fs::write( self.path.join(filename), diff --git a/crates/engine/util/src/reorg.rs b/crates/engine/util/src/reorg.rs index abfa23a57..90b5c90aa 100644 --- a/crates/engine/util/src/reorg.rs +++ b/crates/engine/util/src/reorg.rs @@ -1,7 +1,7 @@ //! Stream wrapper that simulates reorgs. use alloy_consensus::Transaction; -use alloy_primitives::U256; +use alloy_primitives::{Bytes, U256}; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, }; @@ -147,7 +147,12 @@ where let next = ready!(this.stream.poll_next_unpin(cx)); let item = match (next, &this.last_forkchoice_state) { ( - Some(BeaconEngineMessage::NewPayload { payload, cancun_fields, tx }), + Some(BeaconEngineMessage::NewPayload { + payload, + cancun_fields, + execution_requests, + tx, + }), Some(last_forkchoice_state), ) if this.forkchoice_states_forwarded > this.frequency && // Only enter reorg state if new payload attaches to current head. @@ -162,26 +167,29 @@ where // forkchoice state. We will rely on CL to reorg us back to canonical chain. // TODO: This is an expensive blocking operation, ideally it's spawned as a task // so that the stream could yield the control back. - let (reorg_payload, reorg_cancun_fields) = match create_reorg_head( - this.provider, - this.evm_config, - this.payload_validator, - *this.depth, - payload.clone(), - cancun_fields.clone(), - ) { - Ok(result) => result, - Err(error) => { - error!(target: "engine::stream::reorg", %error, "Error attempting to create reorg head"); - // Forward the payload and attempt to create reorg on top of - // the next one - return Poll::Ready(Some(BeaconEngineMessage::NewPayload { - payload, - cancun_fields, - tx, - })) - } - }; + let (reorg_payload, reorg_cancun_fields, reorg_execution_requests) = + match create_reorg_head( + this.provider, + this.evm_config, + this.payload_validator, + *this.depth, + payload.clone(), + cancun_fields.clone(), + execution_requests.clone(), + ) { + Ok(result) => result, + Err(error) => { + error!(target: "engine::stream::reorg", %error, "Error attempting to create reorg head"); + // Forward the payload and attempt to create reorg on top of + // the next one + return Poll::Ready(Some(BeaconEngineMessage::NewPayload { + payload, + cancun_fields, + execution_requests, + tx, + })) + } + }; let reorg_forkchoice_state = ForkchoiceState { finalized_block_hash: last_forkchoice_state.finalized_block_hash, safe_block_hash: last_forkchoice_state.safe_block_hash, @@ -197,11 +205,17 @@ where let queue = VecDeque::from([ // Current payload - BeaconEngineMessage::NewPayload { payload, cancun_fields, tx }, + BeaconEngineMessage::NewPayload { + payload, + cancun_fields, + execution_requests, + tx, + }, // Reorg payload BeaconEngineMessage::NewPayload { payload: reorg_payload, cancun_fields: reorg_cancun_fields, + execution_requests: reorg_execution_requests, tx: reorg_payload_tx, }, // Reorg forkchoice state @@ -236,7 +250,8 @@ fn create_reorg_head( mut depth: usize, next_payload: ExecutionPayload, next_cancun_fields: Option, -) -> RethResult<(ExecutionPayload, Option)> + next_execution_requests: Option>, +) -> RethResult<(ExecutionPayload, Option, Option>)> where Provider: BlockReader + StateProviderFactory, Evm: ConfigureEvm
, @@ -246,7 +261,11 @@ where // Ensure next payload is valid. let next_block = payload_validator - .ensure_well_formed_payload(next_payload, next_cancun_fields.into()) + .ensure_well_formed_payload( + next_payload, + next_cancun_fields.into(), + next_execution_requests, + ) .map_err(RethError::msg)?; // Fetch reorg target block depending on its depth and its parent. @@ -401,7 +420,7 @@ where transactions_root: proofs::calculate_transaction_root(&transactions), receipts_root: outcome.receipts_root_slow(reorg_target.header.number).unwrap(), logs_bloom: outcome.block_logs_bloom(reorg_target.header.number).unwrap(), - requests_root: None, // TODO(prague) + requests_hash: None, // TODO(prague) gas_used: cumulative_gas_used, blob_gas_used: blob_gas_used.map(Into::into), excess_blob_gas: excess_blob_gas.map(Into::into), @@ -411,7 +430,6 @@ where transactions, ommers: reorg_target.body.ommers, withdrawals: reorg_target.body.withdrawals, - requests: None, // TODO(prague) }, } .seal_slow(); @@ -422,5 +440,7 @@ where .header .parent_beacon_block_root .map(|root| CancunPayloadFields { parent_beacon_block_root: root, versioned_hashes }), + // todo(prague) + None, )) } diff --git a/crates/engine/util/src/skip_new_payload.rs b/crates/engine/util/src/skip_new_payload.rs index d2450711e..47c48282e 100644 --- a/crates/engine/util/src/skip_new_payload.rs +++ b/crates/engine/util/src/skip_new_payload.rs @@ -41,7 +41,12 @@ where loop { let next = ready!(this.stream.poll_next_unpin(cx)); let item = match next { - Some(BeaconEngineMessage::NewPayload { payload, cancun_fields, tx }) => { + Some(BeaconEngineMessage::NewPayload { + payload, + cancun_fields, + execution_requests, + tx, + }) => { if this.skipped < this.threshold { *this.skipped += 1; tracing::warn!( @@ -56,7 +61,12 @@ where continue } *this.skipped = 0; - Some(BeaconEngineMessage::NewPayload { payload, cancun_fields, tx }) + Some(BeaconEngineMessage::NewPayload { + payload, + cancun_fields, + execution_requests, + tx, + }) } next => next, }; diff --git a/crates/ethereum/consensus/Cargo.toml b/crates/ethereum/consensus/Cargo.toml index af934d3e2..bace4195c 100644 --- a/crates/ethereum/consensus/Cargo.toml +++ b/crates/ethereum/consensus/Cargo.toml @@ -18,6 +18,7 @@ reth-primitives.workspace = true reth-consensus.workspace = true # alloy +alloy-eips.workspace = true alloy-primitives.workspace = true alloy-consensus.workspace = true diff --git a/crates/ethereum/consensus/src/lib.rs b/crates/ethereum/consensus/src/lib.rs index 8f2a8a720..dd286584a 100644 --- a/crates/ethereum/consensus/src/lib.rs +++ b/crates/ethereum/consensus/src/lib.rs @@ -121,11 +121,11 @@ impl Consensu } if self.chain_spec.is_prague_active_at_timestamp(header.timestamp) { - if header.requests_root.is_none() { - return Err(ConsensusError::RequestsRootMissing) + if header.requests_hash.is_none() { + return Err(ConsensusError::RequestsHashMissing) } - } else if header.requests_root.is_some() { - return Err(ConsensusError::RequestsRootUnexpected) + } else if header.requests_hash.is_some() { + return Err(ConsensusError::RequestsHashUnexpected) } Ok(()) diff --git a/crates/ethereum/consensus/src/validation.rs b/crates/ethereum/consensus/src/validation.rs index e510a91ab..f990ecc57 100644 --- a/crates/ethereum/consensus/src/validation.rs +++ b/crates/ethereum/consensus/src/validation.rs @@ -1,7 +1,8 @@ +use alloy_eips::eip7685::Requests; use alloy_primitives::{Bloom, B256}; use reth_chainspec::EthereumHardforks; use reth_consensus::ConsensusError; -use reth_primitives::{gas_spent_by_transactions, BlockWithSenders, GotExpected, Receipt, Request}; +use reth_primitives::{gas_spent_by_transactions, BlockWithSenders, GotExpected, Receipt}; /// Validate a block with regard to execution results: /// @@ -11,7 +12,7 @@ pub fn validate_block_post_execution( block: &BlockWithSenders, chain_spec: &ChainSpec, receipts: &[Receipt], - requests: &[Request], + requests: &Requests, ) -> Result<(), ConsensusError> { // Check if gas used matches the value set in header. let cumulative_gas_used = @@ -36,15 +37,15 @@ pub fn validate_block_post_execution( } } - // Validate that the header requests root matches the calculated requests root + // Validate that the header requests hash matches the calculated requests hash if chain_spec.is_prague_active_at_timestamp(block.timestamp) { - let Some(header_requests_root) = block.header.requests_root else { - return Err(ConsensusError::RequestsRootMissing) + let Some(header_requests_hash) = block.header.requests_hash else { + return Err(ConsensusError::RequestsHashMissing) }; - let requests_root = reth_primitives::proofs::calculate_requests_root(requests); - if requests_root != header_requests_root { - return Err(ConsensusError::BodyRequestsRootDiff( - GotExpected::new(requests_root, header_requests_root).into(), + let requests_hash = requests.requests_hash(); + if requests_hash != header_requests_hash { + return Err(ConsensusError::BodyRequestsHashDiff( + GotExpected::new(requests_hash, header_requests_hash).into(), )) } } diff --git a/crates/ethereum/engine-primitives/src/payload.rs b/crates/ethereum/engine-primitives/src/payload.rs index ae370fdb9..1ad9c5450 100644 --- a/crates/ethereum/engine-primitives/src/payload.rs +++ b/crates/ethereum/engine-primitives/src/payload.rs @@ -1,6 +1,6 @@ //! Contains types required for building a payload. -use alloy_eips::eip4844::BlobTransactionSidecar; +use alloy_eips::{eip4844::BlobTransactionSidecar, eip7685::Requests}; use alloy_primitives::{Address, B256, U256}; use alloy_rlp::Encodable; use alloy_rpc_types_engine::{ @@ -11,8 +11,7 @@ use reth_chain_state::ExecutedBlock; use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes}; use reth_primitives::{SealedBlock, Withdrawals}; use reth_rpc_types_compat::engine::payload::{ - block_to_payload_v1, block_to_payload_v3, block_to_payload_v4, - convert_block_to_payload_field_v2, + block_to_payload_v1, block_to_payload_v3, convert_block_to_payload_field_v2, }; use std::convert::Infallible; @@ -142,10 +141,17 @@ impl From for ExecutionPayloadEnvelopeV3 { impl From for ExecutionPayloadEnvelopeV4 { fn from(value: EthBuiltPayload) -> Self { - let EthBuiltPayload { block, fees, sidecars, .. } = value; - + let EthBuiltPayload { block, fees, sidecars, executed_block, .. } = value; + + // if we have an executed block, we pop off the first set of requests from the execution + // outcome. the assumption here is that there will always only be one block in the execution + // outcome. + let execution_requests = executed_block + .and_then(|block| block.execution_outcome().requests.first().cloned()) + .map(Requests::take) + .unwrap_or_default(); Self { - execution_payload: block_to_payload_v4(block), + execution_payload: block_to_payload_v3(block), block_value: fees, // From the engine API spec: // @@ -157,6 +163,7 @@ impl From for ExecutionPayloadEnvelopeV4 { // should_override_builder: false, blobs_bundle: sidecars.into_iter().map(Into::into).collect::>().into(), + execution_requests, } } } diff --git a/crates/ethereum/evm/src/eip6110.rs b/crates/ethereum/evm/src/eip6110.rs index e78becd96..4cf1c6ae9 100644 --- a/crates/ethereum/evm/src/eip6110.rs +++ b/crates/ethereum/evm/src/eip6110.rs @@ -1,11 +1,11 @@ //! EIP-6110 deposit requests parsing use alloc::{string::ToString, vec::Vec}; -use alloy_eips::eip6110::{DepositRequest, MAINNET_DEPOSIT_CONTRACT_ADDRESS}; -use alloy_primitives::Log; +use alloy_eips::eip6110::MAINNET_DEPOSIT_CONTRACT_ADDRESS; +use alloy_primitives::{Bytes, Log}; use alloy_sol_types::{sol, SolEvent}; use reth_chainspec::ChainSpec; use reth_evm::execute::BlockValidationError; -use reth_primitives::{Receipt, Request}; +use reth_primitives::Receipt; sol! { #[allow(missing_docs)] @@ -20,73 +20,57 @@ sol! { /// Parse [deposit contract](https://etherscan.io/address/0x00000000219ab540356cbb839cbe05303d7705fa) /// (address is from the passed [`ChainSpec`]) deposits from receipts, and return them as a -/// [vector](Vec) of (requests)[Request]. +/// [vector](Vec) of (requests)[`alloy_eips::eip7685::Requests`]. pub fn parse_deposits_from_receipts<'a, I>( chain_spec: &ChainSpec, receipts: I, -) -> Result, BlockValidationError> +) -> Result where I: IntoIterator, { + let mut requests = Vec::new(); let deposit_contract_address = chain_spec .deposit_contract .as_ref() .map_or(MAINNET_DEPOSIT_CONTRACT_ADDRESS, |contract| contract.address); - receipts + let logs: Vec<_> = receipts .into_iter() - .flat_map(|receipt| receipt.logs.iter()) - // No need to filter for topic because there's only one event and that's the Deposit event - // in the deposit contract. + .flat_map(|receipt| &receipt.logs) + // No need to filter for topic because there's only one event and that's the Deposit + // event in the deposit contract. .filter(|log| log.address == deposit_contract_address) - .map(|log| { - let decoded_log = DepositEvent::decode_log(log, false)?; - let deposit = parse_deposit_from_log(&decoded_log); - Ok(Request::DepositRequest(deposit)) - }) - .collect::, _>>() - .map_err(|err: alloy_sol_types::Error| { - BlockValidationError::DepositRequestDecode(err.to_string()) - }) + .collect(); + + for log in &logs { + let decoded_log = + DepositEvent::decode_log(log, false).map_err(|err: alloy_sol_types::Error| { + BlockValidationError::DepositRequestDecode(err.to_string()) + })?; + requests.extend(parse_deposit_from_log(&decoded_log).as_ref()) + } + + Ok(requests.into()) } -fn parse_deposit_from_log(log: &Log) -> DepositRequest { +fn parse_deposit_from_log(log: &Log) -> Bytes { // SAFETY: These `expect` https://github.com/ethereum/consensus-specs/blob/5f48840f4d768bf0e0a8156a3ed06ec333589007/solidity_deposit_contract/deposit_contract.sol#L107-L110 // are safe because the `DepositEvent` is the only event in the deposit contract and the length // checks are done there. - DepositRequest { - pubkey: log - .pubkey - .as_ref() - .try_into() - .expect("pubkey length should be enforced in deposit contract"), - withdrawal_credentials: log - .withdrawal_credentials - .as_ref() - .try_into() - .expect("withdrawal_credentials length should be enforced in deposit contract"), - amount: u64::from_le_bytes( - log.amount - .as_ref() - .try_into() - .expect("amount length should be enforced in deposit contract"), - ), - signature: log - .signature - .as_ref() - .try_into() - .expect("signature length should be enforced in deposit contract"), - index: u64::from_le_bytes( - log.index - .as_ref() - .try_into() - .expect("deposit index length should be enforced in deposit contract"), - ), - } + [ + log.pubkey.as_ref(), + log.withdrawal_credentials.as_ref(), + log.amount.as_ref(), + log.signature.as_ref(), + log.index.as_ref(), + ] + .concat() + .into() } #[cfg(test)] mod tests { use super::*; + use alloy_primitives::bytes; use reth_chainspec::MAINNET; use reth_primitives::TxType; @@ -119,9 +103,12 @@ mod tests { }, ]; - let requests = parse_deposits_from_receipts(&MAINNET, &receipts).unwrap(); - assert_eq!(requests.len(), 2); - assert_eq!(requests[0].as_deposit_request().unwrap().amount, 32e9 as u64); - assert_eq!(requests[1].as_deposit_request().unwrap().amount, 32e9 as u64); + let request_data = parse_deposits_from_receipts(&MAINNET, &receipts).unwrap(); + assert_eq!( + request_data, + bytes!( + "998c8086669bf65e24581cda47d8537966e9f5066fc6ffdcba910a1bfb91eae7a4873fcce166a1c4ea217e6b1afd396201000000000000000000000001c340fb72ed14d4eaa71f7633ee9e33b88d4f39004059730700000098ddbffd700c1aac324cfdf0492ff289223661eb26718ce3651ba2469b22f480d56efab432ed91af05a006bde0c1ea68134e0acd8cacca0c13ad1f716db874b44abfcc966368019753174753bca3af2ea84bc569c46f76592a91e97f311eddece474160000000000a1a2ba870a90e889aa594a0cc1c6feffb94c2d8f65646c937f1f456a315ef649533e25a4614d8f4f66ebdb06481b90af0100000000000000000000000a0f04a231efbc29e1db7d086300ff550211c2f60040597307000000ad416d590e1a7f52baff770a12835b68904efad22cc9f8ba531e50cbbd26f32b9c7373cf6538a0577f501e4d3e3e63e208767bcccaae94e1e3720bfb734a286f9c017d17af46536545ccb7ca94d71f295e71f6d25bf978c09ada6f8d3f7ba039e374160000000000" + ) + ); } } diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index 9c7748a56..428458fcd 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -4,8 +4,9 @@ use crate::{ dao_fork::{DAO_HARDFORK_BENEFICIARY, DAO_HARDKFORK_ACCOUNTS}, EthEvmConfig, }; -use alloc::{boxed::Box, sync::Arc, vec, vec::Vec}; +use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_consensus::Transaction as _; +use alloy_eips::eip7685::Requests; use alloy_primitives::{BlockNumber, U256}; use core::fmt::Display; use reth_chainspec::{ChainSpec, EthereumHardforks, MAINNET}; @@ -19,7 +20,7 @@ use reth_evm::{ ConfigureEvm, }; use reth_execution_types::ExecutionOutcome; -use reth_primitives::{BlockWithSenders, EthereumHardfork, Header, Receipt, Request}; +use reth_primitives::{BlockWithSenders, EthereumHardfork, Header, Receipt}; use reth_prune_types::PruneModes; use reth_revm::{ batch::BlockBatchRecord, @@ -104,7 +105,7 @@ where #[derive(Debug, Clone)] struct EthExecuteOutput { receipts: Vec, - requests: Vec, + requests: Requests, gas_used: u64, } @@ -122,7 +123,7 @@ where EvmConfig: ConfigureEvm
, { /// Executes the transactions in the block and returns the receipts of the transactions in the - /// block, the total gas used and the list of EIP-7685 [requests](Request). + /// block, the total gas used and the list of EIP-7685 [requests](Requests). /// /// This applies the pre-execution and post-execution changes that require an [EVM](Evm), and /// executes the transactions. @@ -205,11 +206,11 @@ where let deposit_requests = crate::eip6110::parse_deposits_from_receipts(&self.chain_spec, &receipts)?; - let post_execution_requests = system_caller.apply_post_execution_changes(&mut evm)?; - - [deposit_requests, post_execution_requests].concat() + let mut requests = Requests::new(vec![deposit_requests]); + requests.extend(system_caller.apply_post_execution_changes(&mut evm)?); + requests } else { - vec![] + Requests::default() }; Ok(EthExecuteOutput { receipts, requests, gas_used: cumulative_gas_used }) @@ -283,7 +284,7 @@ where /// Execute a single block and apply the state changes to the internal state. /// /// Returns the receipts of the transactions in the block, the total gas used and the list of - /// EIP-7685 [requests](Request). + /// EIP-7685 [requests](Requests). /// /// Returns an error if execution fails. fn execute_without_verification_with_state_hook( @@ -494,11 +495,12 @@ where #[cfg(test)] mod tests { use super::*; - use alloy_consensus::{TxLegacy, EMPTY_ROOT_HASH}; + use alloy_consensus::TxLegacy; use alloy_eips::{ eip2935::{HISTORY_STORAGE_ADDRESS, HISTORY_STORAGE_CODE}, eip4788::{BEACON_ROOTS_ADDRESS, BEACON_ROOTS_CODE, SYSTEM_ADDRESS}, eip7002::{WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS, WITHDRAWAL_REQUEST_PREDEPLOY_CODE}, + eip7685::EMPTY_REQUESTS_HASH, }; use alloy_primitives::{b256, fixed_bytes, keccak256, Bytes, TxKind, B256}; use reth_chainspec::{ChainSpecBuilder, ForkCondition}; @@ -583,7 +585,6 @@ mod tests { transactions: vec![], ommers: vec![], withdrawals: None, - requests: None, }, }, senders: vec![], @@ -612,12 +613,7 @@ mod tests { &BlockWithSenders { block: Block { header: header.clone(), - body: BlockBody { - transactions: vec![], - ommers: vec![], - withdrawals: None, - requests: None, - }, + body: BlockBody { transactions: vec![], ommers: vec![], withdrawals: None }, }, senders: vec![], }, @@ -684,7 +680,6 @@ mod tests { transactions: vec![], ommers: vec![], withdrawals: None, - requests: None, }, }, senders: vec![], @@ -739,7 +734,6 @@ mod tests { transactions: vec![], ommers: vec![], withdrawals: None, - requests: None, }, }, senders: vec![], @@ -1016,7 +1010,7 @@ mod tests { parent_hash: B256::random(), timestamp: 1, number: fork_activation_block, - requests_root: Some(EMPTY_ROOT_HASH), + requests_hash: Some(EMPTY_REQUESTS_HASH), ..Header::default() }; let provider = executor_provider(chain_spec); @@ -1075,7 +1069,7 @@ mod tests { parent_hash: B256::random(), timestamp: 1, number: fork_activation_block, - requests_root: Some(EMPTY_ROOT_HASH), + requests_hash: Some(EMPTY_REQUESTS_HASH), ..Header::default() }; @@ -1121,7 +1115,7 @@ mod tests { ); let mut header = chain_spec.genesis_header().clone(); - header.requests_root = Some(EMPTY_ROOT_HASH); + header.requests_hash = Some(EMPTY_REQUESTS_HASH); let header_hash = header.hash_slow(); let provider = executor_provider(chain_spec); @@ -1159,7 +1153,7 @@ mod tests { parent_hash: header_hash, timestamp: 1, number: 1, - requests_root: Some(EMPTY_ROOT_HASH), + requests_hash: Some(EMPTY_REQUESTS_HASH), ..Header::default() }; let header_hash = header.hash_slow(); @@ -1196,7 +1190,7 @@ mod tests { parent_hash: header_hash, timestamp: 1, number: 2, - requests_root: Some(EMPTY_ROOT_HASH), + requests_hash: Some(EMPTY_REQUESTS_HASH), ..Header::default() }; @@ -1254,15 +1248,16 @@ mod tests { HashMap::default(), ); - // https://github.com/lightclient/7002asm/blob/e0d68e04d15f25057af7b6d180423d94b6b3bdb3/test/Contract.t.sol.in#L49-L64 + // https://github.com/lightclient/sys-asm/blob/9282bdb9fd64e024e27f60f507486ffb2183cba2/test/Withdrawal.t.sol.in#L36 let validator_public_key = fixed_bytes!("111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111"); - let withdrawal_amount = fixed_bytes!("2222222222222222"); + let withdrawal_amount = fixed_bytes!("0203040506070809"); let input: Bytes = [&validator_public_key[..], &withdrawal_amount[..]].concat().into(); assert_eq!(input.len(), 56); let mut header = chain_spec.genesis_header().clone(); header.gas_limit = 1_500_000; - header.gas_used = 134_807; + // measured + header.gas_used = 135_856; header.receipts_root = b256!("b31a3e47b902e9211c4d349af4e4c5604ce388471e79ca008907ae4616bb0ed3"); @@ -1272,10 +1267,10 @@ mod tests { chain_id: Some(chain_spec.chain.id()), nonce: 1, gas_price: header.base_fee_per_gas.unwrap().into(), - gas_limit: 134_807, + gas_limit: header.gas_used, to: TxKind::Call(WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS), // `MIN_WITHDRAWAL_REQUEST_FEE` - value: U256::from(1), + value: U256::from(2), input, }), ); @@ -1302,11 +1297,9 @@ mod tests { let receipt = receipts.first().unwrap(); assert!(receipt.success); - let request = requests.first().unwrap(); - let withdrawal_request = request.as_withdrawal_request().unwrap(); - assert_eq!(withdrawal_request.source_address, sender_address); - assert_eq!(withdrawal_request.validator_pubkey, validator_public_key); - assert_eq!(withdrawal_request.amount, u64::from_be_bytes(withdrawal_amount.into())); + assert!(requests[0].is_empty(), "there should be no deposits"); + assert!(!requests[1].is_empty(), "there should be a withdrawal"); + assert!(requests[2].is_empty(), "there should be no consolidations"); } #[test] diff --git a/crates/ethereum/evm/src/strategy.rs b/crates/ethereum/evm/src/strategy.rs index 7a297be49..714f673c8 100644 --- a/crates/ethereum/evm/src/strategy.rs +++ b/crates/ethereum/evm/src/strategy.rs @@ -6,6 +6,7 @@ use crate::{ }; use alloc::sync::Arc; use alloy_consensus::Transaction as _; +use alloy_eips::eip7685::Requests; use core::fmt::Display; use reth_chainspec::{ChainSpec, EthereumHardfork, EthereumHardforks, MAINNET}; use reth_consensus::ConsensusError; @@ -18,7 +19,7 @@ use reth_evm::{ system_calls::{OnStateHook, SystemCaller}, ConfigureEvm, ConfigureEvmEnv, }; -use reth_primitives::{BlockWithSenders, Header, Receipt, Request}; +use reth_primitives::{BlockWithSenders, Header, Receipt}; use reth_revm::{ db::{states::bundle_state::BundleRetention, BundleState}, state_change::post_block_balance_increments, @@ -194,7 +195,7 @@ where block: &BlockWithSenders, total_difficulty: U256, receipts: &[Receipt], - ) -> Result, Self::Error> { + ) -> Result { let env = self.evm_env_for_block(&block.header, total_difficulty); let mut evm = self.evm_config.evm_with_env(&mut self.state, env); @@ -203,12 +204,11 @@ where let deposit_requests = crate::eip6110::parse_deposits_from_receipts(&self.chain_spec, receipts)?; - let post_execution_requests = - self.system_caller.apply_post_execution_changes(&mut evm)?; - - [deposit_requests, post_execution_requests].concat() + let mut requests = Requests::new(vec![deposit_requests]); + requests.extend(self.system_caller.apply_post_execution_changes(&mut evm)?); + requests } else { - vec![] + Requests::default() }; drop(evm); @@ -257,7 +257,7 @@ where &self, block: &BlockWithSenders, receipts: &[Receipt], - requests: &[Request], + requests: &Requests, ) -> Result<(), ConsensusError> { validate_block_post_execution(block, &self.chain_spec.clone(), receipts, requests) } @@ -266,11 +266,12 @@ where #[cfg(test)] mod tests { use super::*; - use alloy_consensus::{TxLegacy, EMPTY_ROOT_HASH}; + use alloy_consensus::TxLegacy; use alloy_eips::{ eip2935::{HISTORY_STORAGE_ADDRESS, HISTORY_STORAGE_CODE}, eip4788::{BEACON_ROOTS_ADDRESS, BEACON_ROOTS_CODE, SYSTEM_ADDRESS}, eip7002::{WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS, WITHDRAWAL_REQUEST_PREDEPLOY_CODE}, + eip7685::EMPTY_REQUESTS_HASH, }; use alloy_primitives::{b256, fixed_bytes, keccak256, Bytes, TxKind, B256}; use reth_chainspec::{ChainSpecBuilder, ForkCondition}; @@ -365,7 +366,6 @@ mod tests { transactions: vec![], ommers: vec![], withdrawals: None, - requests: None, }, }, senders: vec![], @@ -397,7 +397,6 @@ mod tests { transactions: vec![], ommers: vec![], withdrawals: None, - requests: None, }, }, senders: vec![], @@ -468,7 +467,6 @@ mod tests { transactions: vec![], ommers: vec![], withdrawals: None, - requests: None, }, }, senders: vec![], @@ -523,7 +521,6 @@ mod tests { transactions: vec![], ommers: vec![], withdrawals: None, - requests: None, }, }, senders: vec![], @@ -797,7 +794,7 @@ mod tests { parent_hash: B256::random(), timestamp: 1, number: fork_activation_block, - requests_root: Some(EMPTY_ROOT_HASH), + requests_hash: Some(EMPTY_REQUESTS_HASH), ..Header::default() }; let provider = executor_provider(chain_spec); @@ -855,7 +852,7 @@ mod tests { parent_hash: B256::random(), timestamp: 1, number: fork_activation_block, - requests_root: Some(EMPTY_ROOT_HASH), + requests_hash: Some(EMPTY_REQUESTS_HASH), ..Header::default() }; @@ -901,7 +898,7 @@ mod tests { ); let mut header = chain_spec.genesis_header().clone(); - header.requests_root = Some(EMPTY_ROOT_HASH); + header.requests_hash = Some(EMPTY_REQUESTS_HASH); let header_hash = header.hash_slow(); let provider = executor_provider(chain_spec); @@ -938,7 +935,7 @@ mod tests { parent_hash: header_hash, timestamp: 1, number: 1, - requests_root: Some(EMPTY_ROOT_HASH), + requests_hash: Some(EMPTY_REQUESTS_HASH), ..Header::default() }; let header_hash = header.hash_slow(); @@ -977,7 +974,7 @@ mod tests { parent_hash: header_hash, timestamp: 1, number: 2, - requests_root: Some(EMPTY_ROOT_HASH), + requests_hash: Some(EMPTY_REQUESTS_HASH), ..Header::default() }; @@ -1039,15 +1036,16 @@ mod tests { HashMap::default(), ); - // https://github.com/lightclient/7002asm/blob/e0d68e04d15f25057af7b6d180423d94b6b3bdb3/test/Contract.t.sol.in#L49-L64 + // https://github.com/lightclient/sys-asm/blob/9282bdb9fd64e024e27f60f507486ffb2183cba2/test/Withdrawal.t.sol.in#L36 let validator_public_key = fixed_bytes!("111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111"); - let withdrawal_amount = fixed_bytes!("2222222222222222"); + let withdrawal_amount = fixed_bytes!("0203040506070809"); let input: Bytes = [&validator_public_key[..], &withdrawal_amount[..]].concat().into(); assert_eq!(input.len(), 56); let mut header = chain_spec.genesis_header().clone(); header.gas_limit = 1_500_000; - header.gas_used = 134_807; + // measured + header.gas_used = 135_856; header.receipts_root = b256!("b31a3e47b902e9211c4d349af4e4c5604ce388471e79ca008907ae4616bb0ed3"); @@ -1057,10 +1055,10 @@ mod tests { chain_id: Some(chain_spec.chain.id()), nonce: 1, gas_price: header.base_fee_per_gas.unwrap().into(), - gas_limit: 134_807, + gas_limit: header.gas_used, to: TxKind::Call(WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS), // `MIN_WITHDRAWAL_REQUEST_FEE` - value: U256::from(1), + value: U256::from(2), input, }), ); @@ -1087,11 +1085,9 @@ mod tests { let receipt = receipts.first().unwrap(); assert!(receipt.success); - let request = requests.first().unwrap(); - let withdrawal_request = request.as_withdrawal_request().unwrap(); - assert_eq!(withdrawal_request.source_address, sender_address); - assert_eq!(withdrawal_request.validator_pubkey, validator_public_key); - assert_eq!(withdrawal_request.amount, u64::from_be_bytes(withdrawal_amount.into())); + assert!(requests[0].is_empty(), "there should be no deposits"); + assert!(!requests[1].is_empty(), "there should be a withdrawal"); + assert!(requests[2].is_empty(), "there should be no consolidations"); } #[test] diff --git a/crates/ethereum/payload/Cargo.toml b/crates/ethereum/payload/Cargo.toml index ce37a4f8e..443e837b2 100644 --- a/crates/ethereum/payload/Cargo.toml +++ b/crates/ethereum/payload/Cargo.toml @@ -33,8 +33,9 @@ revm.workspace = true revm-primitives.workspace = true # alloy -alloy-primitives.workspace = true +alloy-eips.workspace = true alloy-consensus.workspace = true +alloy-primitives.workspace = true # misc tracing.workspace = true diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index dcf54fc02..e09228302 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -10,6 +10,7 @@ #![allow(clippy::useless_let_if_seq)] use alloy_consensus::EMPTY_OMMER_ROOT_HASH; +use alloy_eips::eip7685::Requests; use alloy_primitives::U256; use reth_basic_payload_builder::{ commit_withdrawals, is_better_payload, BuildArguments, BuildOutcome, PayloadBuilder, @@ -25,7 +26,7 @@ use reth_payload_builder::{EthBuiltPayload, EthPayloadBuilderAttributes}; use reth_payload_primitives::{PayloadBuilderAttributes, PayloadBuilderError}; use reth_primitives::{ constants::{eip4844::MAX_DATA_GAS_PER_BLOCK, BEACON_NONCE}, - proofs::{self, calculate_requests_root}, + proofs::{self}, revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg}, Block, BlockBody, EthereumHardforks, Header, Receipt, }; @@ -308,9 +309,7 @@ where } // calculate the requests and the requests root - let (requests, requests_root) = if chain_spec - .is_prague_active_at_timestamp(attributes.timestamp) - { + let requests = if chain_spec.is_prague_active_at_timestamp(attributes.timestamp) { let deposit_requests = parse_deposits_from_receipts(&chain_spec, receipts.iter().flatten()) .map_err(|err| PayloadBuilderError::Internal(RethError::Execution(err.into())))?; let withdrawal_requests = system_caller @@ -328,11 +327,9 @@ where ) .map_err(|err| PayloadBuilderError::Internal(err.into()))?; - let requests = [deposit_requests, withdrawal_requests, consolidation_requests].concat(); - let requests_root = calculate_requests_root(&requests); - (Some(requests.into()), Some(requests_root)) + Some(Requests::new(vec![deposit_requests, withdrawal_requests, consolidation_requests])) } else { - (None, None) + None }; let WithdrawalsOutcome { withdrawals_root, withdrawals } = @@ -414,13 +411,13 @@ where parent_beacon_block_root: attributes.parent_beacon_block_root, blob_gas_used: blob_gas_used.map(Into::into), excess_blob_gas: excess_blob_gas.map(Into::into), - requests_root, + requests_hash: requests.map(|r| r.requests_hash()), }; // seal the block let block = Block { header, - body: BlockBody { transactions: executed_txs, ommers: vec![], withdrawals, requests }, + body: BlockBody { transactions: executed_txs, ommers: vec![], withdrawals }, }; let sealed_block = block.seal_slow(); diff --git a/crates/evm/execution-types/Cargo.toml b/crates/evm/execution-types/Cargo.toml index 9bd653732..49e962302 100644 --- a/crates/evm/execution-types/Cargo.toml +++ b/crates/evm/execution-types/Cargo.toml @@ -25,7 +25,6 @@ serde = { workspace = true, optional = true } serde_with = { workspace = true, optional = true } [dev-dependencies] -alloy-eips.workspace = true arbitrary.workspace = true bincode.workspace = true rand.workspace = true @@ -35,5 +34,9 @@ reth-primitives = { workspace = true, features = ["arbitrary", "test-utils"] } default = ["std"] optimism = ["reth-primitives/optimism", "revm/optimism"] serde = ["dep:serde", "reth-trie/serde", "revm/serde"] -serde-bincode-compat = ["reth-primitives/serde-bincode-compat", "reth-trie/serde-bincode-compat", "serde_with"] +serde-bincode-compat = [ + "reth-primitives/serde-bincode-compat", + "reth-trie/serde-bincode-compat", + "serde_with", +] std = [] diff --git a/crates/evm/execution-types/src/execute.rs b/crates/evm/execution-types/src/execute.rs index 0cf5d7050..ae5ad2c0b 100644 --- a/crates/evm/execution-types/src/execute.rs +++ b/crates/evm/execution-types/src/execute.rs @@ -1,5 +1,5 @@ +use alloy_eips::eip7685::Requests; use alloy_primitives::U256; -use reth_primitives::Request; use revm::db::BundleState; /// A helper type for ethereum block inputs that consists of a block and the total difficulty. @@ -33,8 +33,8 @@ pub struct BlockExecutionOutput { pub state: BundleState, /// All the receipts of the transactions in the block. pub receipts: Vec, - /// All the EIP-7685 requests of the transactions in the block. - pub requests: Vec, + /// All the EIP-7685 requests in the block. + pub requests: Requests, /// The total gas used by the block. pub gas_used: u64, } diff --git a/crates/evm/execution-types/src/execution_outcome.rs b/crates/evm/execution-types/src/execution_outcome.rs index 08ddf9e41..0fde01547 100644 --- a/crates/evm/execution-types/src/execution_outcome.rs +++ b/crates/evm/execution-types/src/execution_outcome.rs @@ -1,6 +1,7 @@ use crate::BlockExecutionOutput; +use alloy_eips::eip7685::Requests; use alloy_primitives::{Address, BlockNumber, Bloom, Log, B256, U256}; -use reth_primitives::{logs_bloom, Account, Bytecode, Receipt, Receipts, Requests, StorageEntry}; +use reth_primitives::{logs_bloom, Account, Bytecode, Receipt, Receipts, StorageEntry}; use reth_trie::HashedPostState; use revm::{ db::{states::BundleState, BundleAccount}, @@ -357,7 +358,7 @@ impl From<(BlockExecutionOutput, BlockNumber)> for ExecutionOutcome { bundle: value.0.state, receipts: Receipts::from(value.0.receipts), first_block: value.1, - requests: vec![Requests::from(value.0.requests)], + requests: vec![value.0.requests], } } } @@ -365,9 +366,9 @@ impl From<(BlockExecutionOutput, BlockNumber)> for ExecutionOutcome { #[cfg(test)] mod tests { use super::*; - use alloy_eips::{eip6110::DepositRequest, eip7002::WithdrawalRequest}; - use alloy_primitives::{Address, FixedBytes, LogData, B256}; - use reth_primitives::{Receipts, Request, Requests, TxType}; + use alloy_eips::eip7685::Requests; + use alloy_primitives::{bytes, Address, LogData, B256}; + use reth_primitives::{Receipts, TxType}; use std::collections::HashMap; #[test] @@ -393,29 +394,8 @@ mod tests { })]], }; - // Create a Requests object with a vector of requests, including DepositRequest and - // WithdrawalRequest - let requests = vec![Requests(vec![ - Request::DepositRequest(DepositRequest { - pubkey: FixedBytes::<48>::from([1; 48]), - withdrawal_credentials: B256::from([0; 32]), - amount: 1111, - signature: FixedBytes::<96>::from([2; 96]), - index: 222, - }), - Request::DepositRequest(DepositRequest { - pubkey: FixedBytes::<48>::from([23; 48]), - withdrawal_credentials: B256::from([0; 32]), - amount: 34343, - signature: FixedBytes::<96>::from([43; 96]), - index: 1212, - }), - Request::WithdrawalRequest(WithdrawalRequest { - source_address: Address::from([1; 20]), - validator_pubkey: FixedBytes::<48>::from([10; 48]), - amount: 72, - }), - ])]; + // Create a Requests object with a vector of requests + let requests = vec![Requests::new(vec![bytes!("dead"), bytes!("beef"), bytes!("beebee")])]; // Define the first block number let first_block = 123; @@ -657,17 +637,12 @@ mod tests { // Define the first block number let first_block = 123; - // Create a DepositRequest object with specific attributes. - let request = Request::DepositRequest(DepositRequest { - pubkey: FixedBytes::<48>::from([1; 48]), - withdrawal_credentials: B256::from([0; 32]), - amount: 1111, - signature: FixedBytes::<96>::from([2; 96]), - index: 222, - }); + // Create a request. + let request = bytes!("deadbeef"); // Create a vector of Requests containing the request. - let requests = vec![Requests(vec![request]), Requests(vec![request])]; + let requests = + vec![Requests::new(vec![request.clone()]), Requests::new(vec![request.clone()])]; // Create a ExecutionOutcome object with the created bundle, receipts, requests, and // first_block @@ -681,7 +656,7 @@ mod tests { assert_eq!(exec_res.receipts, Receipts { receipt_vec: vec![vec![Some(receipt)]] }); // Assert that the requests are properly cut after reverting to the initial block number. - assert_eq!(exec_res.requests, vec![Requests(vec![request])]); + assert_eq!(exec_res.requests, vec![Requests::new(vec![request])]); // Assert that the revert_to method returns false when attempting to revert to a block // number greater than the initial block number. @@ -709,17 +684,11 @@ mod tests { // Create a Receipts object containing the receipt. let receipts = Receipts { receipt_vec: vec![vec![Some(receipt.clone())]] }; - // Create a DepositRequest object with specific attributes. - let request = Request::DepositRequest(DepositRequest { - pubkey: FixedBytes::<48>::from([1; 48]), - withdrawal_credentials: B256::from([0; 32]), - amount: 1111, - signature: FixedBytes::<96>::from([2; 96]), - index: 222, - }); + // Create a request. + let request = bytes!("deadbeef"); // Create a vector of Requests containing the request. - let requests = vec![Requests(vec![request])]; + let requests = vec![Requests::new(vec![request.clone()])]; // Define the initial block number. let first_block = 123; @@ -739,7 +708,7 @@ mod tests { receipts: Receipts { receipt_vec: vec![vec![Some(receipt.clone())], vec![Some(receipt)]] }, - requests: vec![Requests(vec![request]), Requests(vec![request])], + requests: vec![Requests::new(vec![request.clone()]), Requests::new(vec![request])], first_block: 123, } ); @@ -771,18 +740,15 @@ mod tests { // Define the first block number let first_block = 123; - // Create a DepositRequest object with specific attributes. - let request = Request::DepositRequest(DepositRequest { - pubkey: FixedBytes::<48>::from([1; 48]), - withdrawal_credentials: B256::from([0; 32]), - amount: 1111, - signature: FixedBytes::<96>::from([2; 96]), - index: 222, - }); + // Create a request. + let request = bytes!("deadbeef"); // Create a vector of Requests containing the request. - let requests = - vec![Requests(vec![request]), Requests(vec![request]), Requests(vec![request])]; + let requests = vec![ + Requests::new(vec![request.clone()]), + Requests::new(vec![request.clone()]), + Requests::new(vec![request.clone()]), + ]; // Create a ExecutionOutcome object with the created bundle, receipts, requests, and // first_block @@ -796,7 +762,7 @@ mod tests { let lower_execution_outcome = ExecutionOutcome { bundle: Default::default(), receipts: Receipts { receipt_vec: vec![vec![Some(receipt.clone())]] }, - requests: vec![Requests(vec![request])], + requests: vec![Requests::new(vec![request.clone()])], first_block, }; @@ -806,7 +772,7 @@ mod tests { receipts: Receipts { receipt_vec: vec![vec![Some(receipt.clone())], vec![Some(receipt)]], }, - requests: vec![Requests(vec![request]), Requests(vec![request])], + requests: vec![Requests::new(vec![request.clone()]), Requests::new(vec![request])], first_block: 124, }; diff --git a/crates/evm/src/execute.rs b/crates/evm/src/execute.rs index f52325b43..d7c8590ee 100644 --- a/crates/evm/src/execute.rs +++ b/crates/evm/src/execute.rs @@ -9,10 +9,11 @@ pub use reth_storage_errors::provider::ProviderError; use crate::system_calls::OnStateHook; use alloc::{boxed::Box, vec::Vec}; +use alloy_eips::eip7685::Requests; use alloy_primitives::BlockNumber; use core::{fmt::Display, marker::PhantomData}; use reth_consensus::ConsensusError; -use reth_primitives::{BlockWithSenders, Receipt, Request}; +use reth_primitives::{BlockWithSenders, Receipt}; use reth_prune_types::PruneModes; use reth_revm::batch::BlockBatchRecord; use revm::{db::BundleState, State}; @@ -190,7 +191,7 @@ pub trait BlockExecutionStrategy { block: &BlockWithSenders, total_difficulty: U256, receipts: &[Receipt], - ) -> Result, Self::Error>; + ) -> Result; /// Returns a reference to the current state. fn state_ref(&self) -> &State; @@ -209,7 +210,7 @@ pub trait BlockExecutionStrategy { &self, block: &BlockWithSenders, receipts: &[Receipt], - requests: &[Request], + requests: &Requests, ) -> Result<(), ConsensusError>; } @@ -450,10 +451,10 @@ where #[cfg(test)] mod tests { use super::*; - use alloy_eips::eip6110::DepositRequest; use alloy_primitives::U256; use reth_chainspec::{ChainSpec, MAINNET}; use revm::db::{CacheDB, EmptyDBTyped}; + use revm_primitives::bytes; use std::sync::Arc; #[derive(Clone, Default)] @@ -545,14 +546,14 @@ mod tests { _evm_config: EvmConfig, state: State, execute_transactions_result: (Vec, u64), - apply_post_execution_changes_result: Vec, + apply_post_execution_changes_result: Requests, finish_result: BundleState, } #[derive(Clone)] struct TestExecutorStrategyFactory { execute_transactions_result: (Vec, u64), - apply_post_execution_changes_result: Vec, + apply_post_execution_changes_result: Requests, finish_result: BundleState, } @@ -607,7 +608,7 @@ mod tests { _block: &BlockWithSenders, _total_difficulty: U256, _receipts: &[Receipt], - ) -> Result, Self::Error> { + ) -> Result { Ok(self.apply_post_execution_changes_result.clone()) } @@ -629,7 +630,7 @@ mod tests { &self, _block: &BlockWithSenders, _receipts: &[Receipt], - _requests: &[Request], + _requests: &Requests, ) -> Result<(), ConsensusError> { Ok(()) } @@ -651,8 +652,7 @@ mod tests { let expected_gas_used = 10; let expected_receipts = vec![Receipt::default()]; let expected_execute_transactions_result = (expected_receipts.clone(), expected_gas_used); - let expected_apply_post_execution_changes_result = - vec![Request::DepositRequest(DepositRequest::default())]; + let expected_apply_post_execution_changes_result = Requests::new(vec![bytes!("deadbeef")]); let expected_finish_result = BundleState::default(); let strategy_factory = TestExecutorStrategyFactory { diff --git a/crates/evm/src/system_calls/eip7002.rs b/crates/evm/src/system_calls/eip7002.rs index 9af944e42..5e36f2bde 100644 --- a/crates/evm/src/system_calls/eip7002.rs +++ b/crates/evm/src/system_calls/eip7002.rs @@ -1,10 +1,10 @@ //! [EIP-7002](https://eips.ethereum.org/EIPS/eip-7002) system call implementation. use crate::ConfigureEvm; -use alloc::{boxed::Box, format, string::ToString, vec::Vec}; -use alloy_eips::eip7002::{WithdrawalRequest, WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS}; -use alloy_primitives::{bytes::Buf, Address, Bytes, FixedBytes}; +use alloc::{boxed::Box, format}; +use alloy_eips::eip7002::WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS; +use alloy_primitives::Bytes; use reth_execution_errors::{BlockExecutionError, BlockValidationError}; -use reth_primitives::{Header, Request}; +use reth_primitives::Header; use revm::{interpreter::Host, Database, Evm}; use revm_primitives::{ExecutionResult, ResultAndState}; @@ -62,52 +62,23 @@ where Ok(res) } -/// Parses the withdrawal requests from the execution output. +/// Calls the withdrawals requests system contract, and returns the requests from the execution +/// output. #[inline] -pub(crate) fn post_commit(result: ExecutionResult) -> Result, BlockExecutionError> { - let mut data = match result { +pub(crate) fn post_commit(result: ExecutionResult) -> Result { + match result { ExecutionResult::Success { output, .. } => Ok(output.into_data()), ExecutionResult::Revert { output, .. } => { Err(BlockValidationError::WithdrawalRequestsContractCall { message: format!("execution reverted: {output}"), - }) + } + .into()) } ExecutionResult::Halt { reason, .. } => { Err(BlockValidationError::WithdrawalRequestsContractCall { message: format!("execution halted: {reason:?}"), - }) - } - }?; - - // Withdrawals are encoded as a series of withdrawal requests, each with the following - // format: - // - // +------+--------+--------+ - // | addr | pubkey | amount | - // +------+--------+--------+ - // 20 48 8 - - const WITHDRAWAL_REQUEST_SIZE: usize = 20 + 48 + 8; - let mut withdrawal_requests = Vec::with_capacity(data.len() / WITHDRAWAL_REQUEST_SIZE); - while data.has_remaining() { - if data.remaining() < WITHDRAWAL_REQUEST_SIZE { - return Err(BlockValidationError::WithdrawalRequestsContractCall { - message: "invalid withdrawal request length".to_string(), } .into()) } - - let mut source_address = Address::ZERO; - data.copy_to_slice(source_address.as_mut_slice()); - - let mut validator_pubkey = FixedBytes::<48>::ZERO; - data.copy_to_slice(validator_pubkey.as_mut_slice()); - - let amount = data.get_u64(); - - withdrawal_requests - .push(WithdrawalRequest { source_address, validator_pubkey, amount }.into()); } - - Ok(withdrawal_requests) } diff --git a/crates/evm/src/system_calls/eip7251.rs b/crates/evm/src/system_calls/eip7251.rs index f09d4be81..7a55c7a5a 100644 --- a/crates/evm/src/system_calls/eip7251.rs +++ b/crates/evm/src/system_calls/eip7251.rs @@ -1,10 +1,10 @@ //! [EIP-7251](https://eips.ethereum.org/EIPS/eip-7251) system call implementation. use crate::ConfigureEvm; -use alloc::{boxed::Box, format, string::ToString, vec::Vec}; -use alloy_eips::eip7251::{ConsolidationRequest, CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS}; -use alloy_primitives::{bytes::Buf, Address, Bytes, FixedBytes}; +use alloc::{boxed::Box, format}; +use alloy_eips::eip7251::CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS; +use alloy_primitives::Bytes; use reth_execution_errors::{BlockExecutionError, BlockValidationError}; -use reth_primitives::{Header, Request}; +use reth_primitives::Header; use revm::{interpreter::Host, Database, Evm}; use revm_primitives::{ExecutionResult, ResultAndState}; @@ -64,56 +64,23 @@ where Ok(res) } -/// Parses the consolidation requests from the execution output. +/// Calls the consolidation requests system contract, and returns the requests from the execution +/// output. #[inline] -pub(crate) fn post_commit(result: ExecutionResult) -> Result, BlockExecutionError> { - let mut data = match result { +pub(crate) fn post_commit(result: ExecutionResult) -> Result { + match result { ExecutionResult::Success { output, .. } => Ok(output.into_data()), ExecutionResult::Revert { output, .. } => { Err(BlockValidationError::ConsolidationRequestsContractCall { message: format!("execution reverted: {output}"), - }) + } + .into()) } ExecutionResult::Halt { reason, .. } => { Err(BlockValidationError::ConsolidationRequestsContractCall { message: format!("execution halted: {reason:?}"), - }) - } - }?; - - // Consolidations are encoded as a series of consolidation requests, each with the following - // format: - // - // +------+--------+---------------+ - // | addr | pubkey | target pubkey | - // +------+--------+---------------+ - // 20 48 48 - - const CONSOLIDATION_REQUEST_SIZE: usize = 20 + 48 + 48; - let mut consolidation_requests = Vec::with_capacity(data.len() / CONSOLIDATION_REQUEST_SIZE); - while data.has_remaining() { - if data.remaining() < CONSOLIDATION_REQUEST_SIZE { - return Err(BlockValidationError::ConsolidationRequestsContractCall { - message: "invalid consolidation request length".to_string(), } .into()) } - - let mut source_address = Address::ZERO; - data.copy_to_slice(source_address.as_mut_slice()); - - let mut source_pubkey = FixedBytes::<48>::ZERO; - data.copy_to_slice(source_pubkey.as_mut_slice()); - - let mut target_pubkey = FixedBytes::<48>::ZERO; - data.copy_to_slice(target_pubkey.as_mut_slice()); - - consolidation_requests.push(Request::ConsolidationRequest(ConsolidationRequest { - source_address, - source_pubkey, - target_pubkey, - })); } - - Ok(consolidation_requests) } diff --git a/crates/evm/src/system_calls/mod.rs b/crates/evm/src/system_calls/mod.rs index 5dc3f35bd..d71dcfeda 100644 --- a/crates/evm/src/system_calls/mod.rs +++ b/crates/evm/src/system_calls/mod.rs @@ -1,11 +1,13 @@ //! System contract call functions. use crate::ConfigureEvm; -use alloc::{boxed::Box, vec::Vec}; +use alloc::{boxed::Box, vec}; +use alloy_eips::eip7685::Requests; +use alloy_primitives::Bytes; use core::fmt::Display; use reth_chainspec::EthereumHardforks; use reth_execution_errors::BlockExecutionError; -use reth_primitives::{Block, Header, Request}; +use reth_primitives::{Block, Header}; use revm::{Database, DatabaseCommit, Evm}; use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, B256}; @@ -119,17 +121,18 @@ where pub fn apply_post_execution_changes( &mut self, evm: &mut Evm<'_, Ext, DB>, - ) -> Result, BlockExecutionError> + ) -> Result where DB: Database + DatabaseCommit, DB::Error: Display, { + // todo // Collect all EIP-7685 requests let withdrawal_requests = self.apply_withdrawal_requests_contract_call(evm)?; // Collect all EIP-7251 requests let consolidation_requests = self.apply_consolidation_requests_contract_call(evm)?; - Ok([withdrawal_requests, consolidation_requests].concat()) + Ok(Requests::new(vec![withdrawal_requests, consolidation_requests])) } /// Applies the pre-block call to the EIP-2935 blockhashes contract. @@ -247,7 +250,7 @@ where db: &mut DB, initialized_cfg: &CfgEnvWithHandlerCfg, initialized_block_env: &BlockEnv, - ) -> Result, BlockExecutionError> + ) -> Result where DB: Database + DatabaseCommit, DB::Error: Display, @@ -263,7 +266,7 @@ where pub fn apply_withdrawal_requests_contract_call( &mut self, evm: &mut Evm<'_, Ext, DB>, - ) -> Result, BlockExecutionError> + ) -> Result where DB: Database + DatabaseCommit, DB::Error: Display, @@ -285,7 +288,7 @@ where db: &mut DB, initialized_cfg: &CfgEnvWithHandlerCfg, initialized_block_env: &BlockEnv, - ) -> Result, BlockExecutionError> + ) -> Result where DB: Database + DatabaseCommit, DB::Error: Display, @@ -301,7 +304,7 @@ where pub fn apply_consolidation_requests_contract_call( &mut self, evm: &mut Evm<'_, Ext, DB>, - ) -> Result, BlockExecutionError> + ) -> Result where DB: Database + DatabaseCommit, DB::Error: Display, diff --git a/crates/evm/src/test_utils.rs b/crates/evm/src/test_utils.rs index 261b36420..c20f43dca 100644 --- a/crates/evm/src/test_utils.rs +++ b/crates/evm/src/test_utils.rs @@ -7,6 +7,7 @@ use crate::{ }, system_calls::OnStateHook, }; +use alloy_eips::eip7685::Requests; use alloy_primitives::BlockNumber; use parking_lot::Mutex; use reth_execution_errors::BlockExecutionError; @@ -62,7 +63,10 @@ impl Executor for MockExecutorProvider { Ok(BlockExecutionOutput { state: bundle, receipts: receipts.into_iter().flatten().flatten().collect(), - requests: requests.into_iter().flatten().collect(), + requests: requests.into_iter().fold(Requests::default(), |mut reqs, req| { + reqs.extend(req); + reqs + }), gas_used: 0, }) } diff --git a/crates/exex/exex/src/backfill/test_utils.rs b/crates/exex/exex/src/backfill/test_utils.rs index 1c793975c..5eaf92bfe 100644 --- a/crates/exex/exex/src/backfill/test_utils.rs +++ b/crates/exex/exex/src/backfill/test_utils.rs @@ -10,7 +10,7 @@ use reth_evm::execute::{ }; use reth_evm_ethereum::execute::EthExecutorProvider; use reth_primitives::{ - constants::ETH_TO_WEI, Block, BlockBody, BlockWithSenders, Header, Receipt, Requests, + constants::ETH_TO_WEI, Block, BlockBody, BlockWithSenders, Header, Receipt, SealedBlockWithSenders, Transaction, }; use reth_provider::{ @@ -29,7 +29,7 @@ pub(crate) fn to_execution_outcome( bundle: block_execution_output.state.clone(), receipts: block_execution_output.receipts.clone().into(), first_block: block_number, - requests: vec![Requests(block_execution_output.requests.clone())], + requests: vec![block_execution_output.requests.clone()], } } diff --git a/crates/net/eth-wire-types/src/blocks.rs b/crates/net/eth-wire-types/src/blocks.rs index 6e5483f3a..d60c63fc1 100644 --- a/crates/net/eth-wire-types/src/blocks.rs +++ b/crates/net/eth-wire-types/src/blocks.rs @@ -277,7 +277,7 @@ mod tests { blob_gas_used: None, excess_blob_gas: None, parent_beacon_block_root: None, - requests_root: None + requests_hash: None }, ]), }.encode(&mut data); @@ -312,7 +312,7 @@ mod tests { blob_gas_used: None, excess_blob_gas: None, parent_beacon_block_root: None, - requests_root: None + requests_hash: None }, ]), }; @@ -412,11 +412,10 @@ mod tests { blob_gas_used: None, excess_blob_gas: None, parent_beacon_block_root: None, - requests_root: None + requests_hash: None }, ], withdrawals: None, - requests: None } ]), }; @@ -488,11 +487,10 @@ mod tests { blob_gas_used: None, excess_blob_gas: None, parent_beacon_block_root: None, - requests_root: None + requests_hash: None }, ], withdrawals: None, - requests: None } ]), }; diff --git a/crates/net/eth-wire-types/src/header.rs b/crates/net/eth-wire-types/src/header.rs index b25a7568b..8c11bfa82 100644 --- a/crates/net/eth-wire-types/src/header.rs +++ b/crates/net/eth-wire-types/src/header.rs @@ -143,7 +143,7 @@ mod tests { blob_gas_used: None, excess_blob_gas: None, parent_beacon_block_root: None, - requests_root: None + requests_hash: None }; assert_eq!(header.hash_slow(), expected_hash); } @@ -256,7 +256,7 @@ mod tests { blob_gas_used: Some(0x020000), excess_blob_gas: Some(0), parent_beacon_block_root: None, - requests_root: None, + requests_hash: None, }; let header = Header::decode(&mut data.as_slice()).unwrap(); @@ -296,7 +296,7 @@ mod tests { parent_beacon_block_root: None, blob_gas_used: Some(0), excess_blob_gas: Some(0x1600000), - requests_root: None, + requests_hash: None, }; let header = Header::decode(&mut data.as_slice()).unwrap(); diff --git a/crates/net/p2p/src/full_block.rs b/crates/net/p2p/src/full_block.rs index 0116f1348..e5129b686 100644 --- a/crates/net/p2p/src/full_block.rs +++ b/crates/net/p2p/src/full_block.rs @@ -347,22 +347,6 @@ fn ensure_valid_body_response( _ => return Err(ConsensusError::WithdrawalsRootUnexpected), } - match (header.requests_root, &block.requests) { - (Some(header_requests_root), Some(requests)) => { - let requests = requests.0.as_slice(); - let requests_root = reth_primitives::proofs::calculate_requests_root(requests); - if requests_root != header_requests_root { - return Err(ConsensusError::BodyRequestsRootDiff( - GotExpected { got: requests_root, expected: header_requests_root }.into(), - )) - } - } - (None, None) => { - // this is ok because we assume the fork is not active in this case - } - _ => return Err(ConsensusError::RequestsRootUnexpected), - } - Ok(()) } diff --git a/crates/optimism/evm/Cargo.toml b/crates/optimism/evm/Cargo.toml index a1e2021a4..f251347c5 100644 --- a/crates/optimism/evm/Cargo.toml +++ b/crates/optimism/evm/Cargo.toml @@ -23,6 +23,7 @@ reth-prune-types.workspace = true reth-consensus.workspace = true # ethereum +alloy-eips.workspace = true alloy-primitives.workspace = true op-alloy-consensus.workspace = true alloy-consensus.workspace = true @@ -41,8 +42,6 @@ derive_more.workspace = true tracing.workspace = true [dev-dependencies] -alloy-eips.workspace = true - reth-evm = { workspace = true, features = ["test-utils"] } reth-revm = { workspace = true, features = ["test-utils"] } reth-primitives = { workspace = true, features = ["test-utils"] } diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index 263420623..3a86f5bba 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -5,6 +5,7 @@ use crate::{ }; use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_consensus::Transaction as _; +use alloy_eips::eip7685::Requests; use alloy_primitives::{BlockNumber, U256}; use core::fmt::Display; use reth_chainspec::{ChainSpec, EthereumHardforks}; @@ -380,7 +381,7 @@ where Ok(BlockExecutionOutput { state: self.state.take_bundle(), receipts, - requests: vec![], + requests: Requests::default(), gas_used, }) } @@ -403,7 +404,7 @@ where Ok(BlockExecutionOutput { state: self.state.take_bundle(), receipts, - requests: vec![], + requests: Requests::default(), gas_used, }) } @@ -429,7 +430,7 @@ where Ok(BlockExecutionOutput { state: self.state.take_bundle(), receipts, - requests: vec![], + requests: Requests::default(), gas_used, }) } diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index 3eda2878c..ffc82fde4 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -10,7 +10,6 @@ // The `optimism` feature must be enabled to use this crate. #![cfg(feature = "optimism")] -#[macro_use] extern crate alloc; use alloc::{sync::Arc, vec::Vec}; diff --git a/crates/optimism/evm/src/strategy.rs b/crates/optimism/evm/src/strategy.rs index 9ba43604e..199a8a4d3 100644 --- a/crates/optimism/evm/src/strategy.rs +++ b/crates/optimism/evm/src/strategy.rs @@ -3,6 +3,7 @@ use crate::{l1::ensure_create2_deployer, OptimismBlockExecutionError, OptimismEvmConfig}; use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_consensus::Transaction as _; +use alloy_eips::eip7685::Requests; use core::fmt::Display; use reth_chainspec::EthereumHardforks; use reth_consensus::ConsensusError; @@ -17,7 +18,7 @@ use reth_evm::{ use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::validate_block_post_execution; use reth_optimism_forks::OptimismHardfork; -use reth_primitives::{BlockWithSenders, Header, Receipt, Request, TxType}; +use reth_primitives::{BlockWithSenders, Header, Receipt, TxType}; use reth_revm::{ db::{states::bundle_state::BundleRetention, BundleState}, state_change::post_block_balance_increments, @@ -235,7 +236,7 @@ where block: &BlockWithSenders, total_difficulty: U256, _receipts: &[Receipt], - ) -> Result, Self::Error> { + ) -> Result { let balance_increments = post_block_balance_increments(&self.chain_spec.clone(), block, total_difficulty); // increment balances @@ -243,7 +244,7 @@ where .increment_balances(balance_increments) .map_err(|_| BlockValidationError::IncrementBalanceFailed)?; - Ok(vec![]) + Ok(Requests::default()) } fn state_ref(&self) -> &State { @@ -267,7 +268,7 @@ where &self, block: &BlockWithSenders, receipts: &[Receipt], - _requests: &[Request], + _requests: &Requests, ) -> Result<(), ConsensusError> { validate_block_post_execution(block, &self.chain_spec.clone(), receipts) } diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index e590635f5..b6ab9b879 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -507,13 +507,13 @@ where parent_beacon_block_root: attributes.payload_attributes.parent_beacon_block_root, blob_gas_used, excess_blob_gas: excess_blob_gas.map(Into::into), - requests_root: None, + requests_hash: None, }; // seal the block let block = Block { header, - body: BlockBody { transactions: executed_txs, ommers: vec![], withdrawals, requests: None }, + body: BlockBody { transactions: executed_txs, ommers: vec![], withdrawals }, }; let sealed_block = block.seal_slow(); diff --git a/crates/optimism/payload/src/payload.rs b/crates/optimism/payload/src/payload.rs index 122c2fde5..d5d1620e5 100644 --- a/crates/optimism/payload/src/payload.rs +++ b/crates/optimism/payload/src/payload.rs @@ -18,8 +18,7 @@ use reth_primitives::{ transaction::WithEncoded, BlobTransactionSidecar, SealedBlock, TransactionSigned, Withdrawals, }; use reth_rpc_types_compat::engine::payload::{ - block_to_payload_v1, block_to_payload_v3, block_to_payload_v4, - convert_block_to_payload_field_v2, + block_to_payload_v1, block_to_payload_v3, convert_block_to_payload_field_v2, }; use std::sync::Arc; @@ -249,7 +248,7 @@ impl From for OpExecutionPayloadEnvelopeV4 { B256::ZERO }; Self { - execution_payload: block_to_payload_v4(block), + execution_payload: block_to_payload_v3(block), block_value: fees, // From the engine API spec: // @@ -262,6 +261,7 @@ impl From for OpExecutionPayloadEnvelopeV4 { should_override_builder: false, blobs_bundle: sidecars.into_iter().map(Into::into).collect::>().into(), parent_beacon_block_root, + execution_requests: vec![], } } } diff --git a/crates/optimism/primitives/src/bedrock.rs b/crates/optimism/primitives/src/bedrock.rs index bd4229858..7153ae315 100644 --- a/crates/optimism/primitives/src/bedrock.rs +++ b/crates/optimism/primitives/src/bedrock.rs @@ -85,7 +85,7 @@ pub const BEDROCK_HEADER: Header = Header { blob_gas_used: None, excess_blob_gas: None, parent_beacon_block_root: None, - requests_root: None, + requests_hash: None, }; /// Bedrock total difficulty on Optimism Mainnet. diff --git a/crates/optimism/rpc/src/eth/receipt.rs b/crates/optimism/rpc/src/eth/receipt.rs index 200b626d8..f2f09cdc7 100644 --- a/crates/optimism/rpc/src/eth/receipt.rs +++ b/crates/optimism/rpc/src/eth/receipt.rs @@ -227,7 +227,6 @@ impl OpReceiptBuilder { from, to, contract_address, - state_root, authorization_list, } = core_receipt; @@ -265,7 +264,6 @@ impl OpReceiptBuilder { from, to, contract_address, - state_root, authorization_list, }; diff --git a/crates/optimism/storage/src/lib.rs b/crates/optimism/storage/src/lib.rs index d435ed1d8..347b690c5 100644 --- a/crates/optimism/storage/src/lib.rs +++ b/crates/optimism/storage/src/lib.rs @@ -16,7 +16,7 @@ mod tests { CompactClientVersion, CompactU256, CompactU64, StoredBlockBodyIndices, StoredBlockOmmers, StoredBlockWithdrawals, }; - use reth_primitives::{Account, Receipt, ReceiptWithBloom, Requests, Withdrawals}; + use reth_primitives::{Account, Receipt, ReceiptWithBloom, Withdrawals}; use reth_prune_types::{PruneCheckpoint, PruneMode, PruneSegment}; use reth_stages_types::{ AccountHashingCheckpoint, CheckpointBlockRange, EntitiesCheckpoint, ExecutionCheckpoint, @@ -74,6 +74,5 @@ mod tests { validate_bitflag_backwards_compat!(StoredBlockWithdrawals, UnusedBits::Zero); validate_bitflag_backwards_compat!(StorageHashingCheckpoint, UnusedBits::NotZero); validate_bitflag_backwards_compat!(Withdrawals, UnusedBits::Zero); - validate_bitflag_backwards_compat!(Requests, UnusedBits::Zero); } } diff --git a/crates/payload/validator/Cargo.toml b/crates/payload/validator/Cargo.toml index 2662b987f..a96799d7b 100644 --- a/crates/payload/validator/Cargo.toml +++ b/crates/payload/validator/Cargo.toml @@ -18,4 +18,6 @@ reth-primitives.workspace = true reth-rpc-types-compat.workspace = true # alloy +alloy-eips.workspace = true +alloy-primitives.workspace = true alloy-rpc-types = { workspace = true, features = ["engine"] } diff --git a/crates/payload/validator/src/lib.rs b/crates/payload/validator/src/lib.rs index 55002b0a9..fdcd9244a 100644 --- a/crates/payload/validator/src/lib.rs +++ b/crates/payload/validator/src/lib.rs @@ -8,6 +8,8 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +use alloy_eips::eip7685::Requests; +use alloy_primitives::Bytes; use alloy_rpc_types::engine::{ExecutionPayload, MaybeCancunPayloadFields, PayloadError}; use reth_chainspec::EthereumHardforks; use reth_primitives::SealedBlock; @@ -112,12 +114,17 @@ impl ExecutionPayloadValidator { &self, payload: ExecutionPayload, cancun_fields: MaybeCancunPayloadFields, + execution_requests: Option>, ) -> Result { let expected_hash = payload.block_hash(); // First parse the block - let sealed_block = - try_into_block(payload, cancun_fields.parent_beacon_block_root())?.seal_slow(); + let sealed_block = try_into_block( + payload, + cancun_fields.parent_beacon_block_root(), + execution_requests.map(Requests::new), + )? + .seal_slow(); // Ensure the hash included in the payload matches the block hash if expected_hash != sealed_block.hash() { @@ -162,7 +169,7 @@ impl ExecutionPayloadValidator { let shanghai_active = self.is_shanghai_active_at_timestamp(sealed_block.timestamp); if !shanghai_active && sealed_block.body.withdrawals.is_some() { // shanghai not active but withdrawals present - return Err(PayloadError::PreShanghaiBlockWithWitdrawals) + return Err(PayloadError::PreShanghaiBlockWithWithdrawals) } if !self.is_prague_active_at_timestamp(sealed_block.timestamp) && diff --git a/crates/primitives-traits/Cargo.toml b/crates/primitives-traits/Cargo.toml index b34987327..2fec75666 100644 --- a/crates/primitives-traits/Cargo.toml +++ b/crates/primitives-traits/Cargo.toml @@ -64,4 +64,4 @@ arbitrary = [ "dep:proptest", "dep:proptest-arbitrary-interop", ] -serde-bincode-compat = ["serde_with", "alloy-consensus/serde-bincode-compat"] \ No newline at end of file +serde-bincode-compat = ["serde_with", "alloy-consensus/serde-bincode-compat"] diff --git a/crates/primitives-traits/src/block/body.rs b/crates/primitives-traits/src/block/body.rs index 85eeda166..c9b673ec7 100644 --- a/crates/primitives-traits/src/block/body.rs +++ b/crates/primitives-traits/src/block/body.rs @@ -2,8 +2,8 @@ use alloc::{fmt, vec::Vec}; -use alloy_consensus::{BlockHeader, Request, Transaction, TxType}; -use alloy_eips::eip4895::Withdrawal; +use alloy_consensus::{BlockHeader, Transaction, TxType}; +use alloy_eips::{eip4895::Withdrawal, eip7685::Requests}; use alloy_primitives::{Address, B256}; use crate::Block; @@ -30,9 +30,6 @@ pub trait BlockBody: /// Withdrawals in block. type Withdrawals: Iterator; - /// Requests in block. - type Requests: Iterator; - /// Returns reference to transactions in block. fn transactions(&self) -> &[Self::SignedTransaction]; @@ -43,8 +40,8 @@ pub trait BlockBody: /// Returns reference to uncle block headers. fn ommers(&self) -> &[Self::Header]; - /// Returns [`Request`] in block, if any. - fn requests(&self) -> Option<&Self::Requests>; + /// Returns [`Requests`] in block, if any. + fn requests(&self) -> Option<&Requests>; /// Create a [`Block`] from the body and its header. fn into_block>(self, header: Self::Header) -> T { @@ -63,12 +60,6 @@ pub trait BlockBody: // `Withdrawals` and `Withdrawals` moved to alloy fn calculate_withdrawals_root(&self) -> Option; - /// Calculate the requests root for the block body, if requests exist. If there are no - /// requests, this will return `None`. - // todo: can be default impl if `calculate_requests_root` made into a method on - // `Requests` and `Requests` moved to alloy - fn calculate_requests_root(&self) -> Option; - /// Recover signer addresses for all transactions in the block body. fn recover_signers(&self) -> Option>; diff --git a/crates/primitives-traits/src/header/test_utils.rs b/crates/primitives-traits/src/header/test_utils.rs index ef5c0d025..c5f6e86b9 100644 --- a/crates/primitives-traits/src/header/test_utils.rs +++ b/crates/primitives-traits/src/header/test_utils.rs @@ -37,7 +37,7 @@ pub const fn generate_valid_header( } // Placeholder for future EIP adjustments - header.requests_root = None; + header.requests_hash = None; header } diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index dd10ac9c5..a77669ec3 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -29,9 +29,6 @@ pub use transaction::{signed::SignedTransaction, Transaction}; mod integer_list; pub use integer_list::{IntegerList, IntegerListError}; -pub mod request; -pub use request::{Request, Requests}; - pub mod block; pub use block::{body::BlockBody, Block}; diff --git a/crates/primitives-traits/src/mod.rs b/crates/primitives-traits/src/mod.rs deleted file mode 100644 index e69de29bb..000000000 diff --git a/crates/primitives-traits/src/request.rs b/crates/primitives-traits/src/request.rs deleted file mode 100644 index c08af3fd6..000000000 --- a/crates/primitives-traits/src/request.rs +++ /dev/null @@ -1,58 +0,0 @@ -//! EIP-7685 requests. - -use alloc::vec::Vec; -pub use alloy_consensus::Request; -use alloy_eips::eip7685::{Decodable7685, Encodable7685}; -use alloy_rlp::{Decodable, Encodable}; -use derive_more::{Deref, DerefMut, From, IntoIterator}; -use reth_codecs::{add_arbitrary_tests, Compact}; -use revm_primitives::Bytes; -use serde::{Deserialize, Serialize}; - -/// A list of EIP-7685 requests. -#[derive( - Debug, - Clone, - PartialEq, - Eq, - Default, - Hash, - Deref, - DerefMut, - From, - IntoIterator, - Serialize, - Deserialize, - Compact, -)] -#[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] -#[add_arbitrary_tests(compact)] -pub struct Requests(pub Vec); - -impl Encodable for Requests { - fn encode(&self, out: &mut dyn bytes::BufMut) { - let mut h = alloy_rlp::Header { list: true, payload_length: 0 }; - - let mut encoded = Vec::new(); - for req in &self.0 { - let encoded_req = req.encoded_7685(); - h.payload_length += encoded_req.len(); - encoded.push(Bytes::from(encoded_req)); - } - - h.encode(out); - for req in encoded { - req.encode(out); - } - } -} - -impl Decodable for Requests { - fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { - Ok( as Decodable>::decode(buf)? - .into_iter() - .map(|bytes| Request::decode_7685(&mut bytes.as_ref())) - .collect::, alloy_eips::eip7685::Eip7685Error>>() - .map(Self)?) - } -} diff --git a/crates/primitives/Cargo.toml b/crates/primitives/Cargo.toml index 8596f8d76..05ccd9081 100644 --- a/crates/primitives/Cargo.toml +++ b/crates/primitives/Cargo.toml @@ -60,7 +60,6 @@ zstd = { workspace = true, features = ["experimental"], optional = true } # arbitrary utils arbitrary = { workspace = true, features = ["derive"], optional = true } -proptest = { workspace = true, optional = true } [dev-dependencies] # eth @@ -97,7 +96,6 @@ reth-codec = ["dep:reth-codecs", "dep:zstd", "dep:modular-bitfield", "std"] asm-keccak = ["alloy-primitives/asm-keccak"] arbitrary = [ "dep:arbitrary", - "dep:proptest", "alloy-eips/arbitrary", "rand", "reth-codec", diff --git a/crates/primitives/src/alloy_compat.rs b/crates/primitives/src/alloy_compat.rs index 0ac1458c5..917baef66 100644 --- a/crates/primitives/src/alloy_compat.rs +++ b/crates/primitives/src/alloy_compat.rs @@ -48,9 +48,6 @@ impl TryFrom Option { - None - } -} - /// Ethereum full block. /// /// Withdrawals can be optionally included at the end of the RLP encoded message. @@ -120,7 +109,6 @@ mod block_rlp { transactions: Vec, ommers: Vec
, withdrawals: Option, - requests: Option, } #[derive(RlpEncodable)] @@ -130,50 +118,34 @@ mod block_rlp { transactions: &'a Vec, ommers: &'a Vec
, withdrawals: Option<&'a Withdrawals>, - requests: Option<&'a Requests>, } impl<'a> From<&'a Block> for HelperRef<'a, Header> { fn from(block: &'a Block) -> Self { - let Block { header, body: BlockBody { transactions, ommers, withdrawals, requests } } = - block; - Self { - header, - transactions, - ommers, - withdrawals: withdrawals.as_ref(), - requests: requests.as_ref(), - } + let Block { header, body: BlockBody { transactions, ommers, withdrawals } } = block; + Self { header, transactions, ommers, withdrawals: withdrawals.as_ref() } } } impl<'a> From<&'a SealedBlock> for HelperRef<'a, SealedHeader> { fn from(block: &'a SealedBlock) -> Self { - let SealedBlock { - header, - body: BlockBody { transactions, ommers, withdrawals, requests }, - } = block; - Self { - header, - transactions, - ommers, - withdrawals: withdrawals.as_ref(), - requests: requests.as_ref(), - } + let SealedBlock { header, body: BlockBody { transactions, ommers, withdrawals } } = + block; + Self { header, transactions, ommers, withdrawals: withdrawals.as_ref() } } } impl Decodable for Block { fn decode(b: &mut &[u8]) -> alloy_rlp::Result { - let Helper { header, transactions, ommers, withdrawals, requests } = Helper::decode(b)?; - Ok(Self { header, body: BlockBody { transactions, ommers, withdrawals, requests } }) + let Helper { header, transactions, ommers, withdrawals } = Helper::decode(b)?; + Ok(Self { header, body: BlockBody { transactions, ommers, withdrawals } }) } } impl Decodable for SealedBlock { fn decode(b: &mut &[u8]) -> alloy_rlp::Result { - let Helper { header, transactions, ommers, withdrawals, requests } = Helper::decode(b)?; - Ok(Self { header, body: BlockBody { transactions, ommers, withdrawals, requests } }) + let Helper { header, transactions, ommers, withdrawals } = Helper::decode(b)?; + Ok(Self { header, body: BlockBody { transactions, ommers, withdrawals } }) } } @@ -215,13 +187,7 @@ impl<'a> arbitrary::Arbitrary<'a> for Block { Ok(Self { header: u.arbitrary()?, - body: BlockBody { - transactions, - ommers, - // for now just generate empty requests, see HACK above - requests: u.arbitrary()?, - withdrawals: u.arbitrary()?, - }, + body: BlockBody { transactions, ommers, withdrawals: u.arbitrary()? }, }) } } @@ -570,8 +536,6 @@ pub struct BlockBody { pub ommers: Vec
, /// Withdrawals in the block. pub withdrawals: Option, - /// Requests in the block. - pub requests: Option, } impl BlockBody { @@ -596,12 +560,6 @@ impl BlockBody { self.withdrawals.as_ref().map(|w| crate::proofs::calculate_withdrawals_root(w)) } - /// Calculate the requests root for the block body, if requests exist. If there are no - /// requests, this will return `None`. - pub fn calculate_requests_root(&self) -> Option { - self.requests.as_ref().map(|r| crate::proofs::calculate_requests_root(&r.0)) - } - /// Recover signer addresses for all transactions in the block body. pub fn recover_signers(&self) -> Option> { TransactionSigned::recover_signers(&self.transactions, self.transactions.len()) @@ -670,7 +628,6 @@ impl From for BlockBody { transactions: block.body.transactions, ommers: block.body.ommers, withdrawals: block.body.withdrawals, - requests: block.body.requests, } } } @@ -692,8 +649,7 @@ impl<'a> arbitrary::Arbitrary<'a> for BlockBody { }) .collect::>>()?; - // for now just generate empty requests, see HACK above - Ok(Self { transactions, ommers, requests: None, withdrawals: u.arbitrary()? }) + Ok(Self { transactions, ommers, withdrawals: u.arbitrary()? }) } } @@ -703,7 +659,7 @@ pub(super) mod serde_bincode_compat { use alloc::{borrow::Cow, vec::Vec}; use alloy_consensus::serde_bincode_compat::Header; use alloy_primitives::Address; - use reth_primitives_traits::{serde_bincode_compat::SealedHeader, Requests, Withdrawals}; + use reth_primitives_traits::{serde_bincode_compat::SealedHeader, Withdrawals}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_with::{DeserializeAs, SerializeAs}; @@ -729,7 +685,6 @@ pub(super) mod serde_bincode_compat { transactions: Vec>, ommers: Vec>, withdrawals: Cow<'a, Option>, - requests: Cow<'a, Option>, } impl<'a> From<&'a super::BlockBody> for BlockBody<'a> { @@ -738,7 +693,6 @@ pub(super) mod serde_bincode_compat { transactions: value.transactions.iter().map(Into::into).collect(), ommers: value.ommers.iter().map(Into::into).collect(), withdrawals: Cow::Borrowed(&value.withdrawals), - requests: Cow::Borrowed(&value.requests), } } } @@ -749,7 +703,6 @@ pub(super) mod serde_bincode_compat { transactions: value.transactions.into_iter().map(Into::into).collect(), ommers: value.ommers.into_iter().map(Into::into).collect(), withdrawals: value.withdrawals.into_owned(), - requests: value.requests.into_owned(), } } } diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index a9e8c0820..7a9a6bf45 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -45,7 +45,7 @@ pub use receipt::{ }; pub use reth_primitives_traits::{ logs_bloom, Account, Bytecode, GotExpected, GotExpectedBoxed, Header, HeaderError, Log, - LogData, Request, Requests, SealedHeader, StorageEntry, Withdrawal, Withdrawals, + LogData, SealedHeader, StorageEntry, Withdrawal, Withdrawals, }; pub use static_file::StaticFileSegment; diff --git a/crates/primitives/src/proofs.rs b/crates/primitives/src/proofs.rs index 4efbb588e..169724670 100644 --- a/crates/primitives/src/proofs.rs +++ b/crates/primitives/src/proofs.rs @@ -1,11 +1,11 @@ //! Helper function for calculating Merkle proofs and hashes. use crate::{ - Header, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, Request, TransactionSigned, Withdrawal, + Header, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, TransactionSigned, Withdrawal, }; use alloc::vec::Vec; use alloy_consensus::EMPTY_OMMER_ROOT_HASH; -use alloy_eips::{eip2718::Encodable2718, eip7685::Encodable7685}; +use alloy_eips::eip2718::Encodable2718; use alloy_primitives::{keccak256, B256}; use reth_trie_common::root::{ordered_trie_root, ordered_trie_root_with_encoder}; @@ -29,13 +29,6 @@ pub fn calculate_receipt_root(receipts: &[ReceiptWithBloom]) -> B256 { ordered_trie_root_with_encoder(receipts, |r, buf| r.encode_inner(buf, false)) } -/// Calculate [EIP-7685](https://eips.ethereum.org/EIPS/eip-7685) requests root. -/// -/// NOTE: The requests are encoded as `id + request` -pub fn calculate_requests_root(requests: &[Request]) -> B256 { - ordered_trie_root_with_encoder(requests, |item, buf| item.encode_7685(buf)) -} - /// Calculates the receipt root for a header. pub fn calculate_receipt_root_ref(receipts: &[ReceiptWithBloomRef<'_>]) -> B256 { ordered_trie_root_with_encoder(receipts, |r, buf| r.encode_inner(buf, false)) diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index b7eeeadc8..1d410da1e 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -802,18 +802,6 @@ impl alloy_consensus::Transaction for Transaction { } } - fn to(&self) -> TxKind { - match self { - Self::Legacy(tx) => tx.to(), - Self::Eip2930(tx) => tx.to(), - Self::Eip1559(tx) => tx.to(), - Self::Eip4844(tx) => tx.to(), - Self::Eip7702(tx) => tx.to(), - #[cfg(feature = "optimism")] - Self::Deposit(tx) => tx.to(), - } - } - fn value(&self) -> U256 { match self { Self::Legacy(tx) => tx.value(), @@ -826,7 +814,7 @@ impl alloy_consensus::Transaction for Transaction { } } - fn input(&self) -> &[u8] { + fn input(&self) -> &Bytes { match self { Self::Legacy(tx) => tx.input(), Self::Eip2930(tx) => tx.input(), @@ -885,6 +873,18 @@ impl alloy_consensus::Transaction for Transaction { Self::Deposit(tx) => tx.authorization_list(), } } + + fn kind(&self) -> TxKind { + match self { + Self::Legacy(tx) => tx.kind(), + Self::Eip2930(tx) => tx.kind(), + Self::Eip1559(tx) => tx.kind(), + Self::Eip4844(tx) => tx.kind(), + Self::Eip7702(tx) => tx.kind(), + #[cfg(feature = "optimism")] + Self::Deposit(tx) => tx.kind(), + } + } } /// Signed transaction without its Hash. Used type for inserting into the DB. @@ -1383,15 +1383,11 @@ impl alloy_consensus::Transaction for TransactionSigned { self.deref().priority_fee_or_price() } - fn to(&self) -> TxKind { - alloy_consensus::Transaction::to(self.deref()) - } - fn value(&self) -> U256 { self.deref().value() } - fn input(&self) -> &[u8] { + fn input(&self) -> &Bytes { self.deref().input() } @@ -1410,6 +1406,10 @@ impl alloy_consensus::Transaction for TransactionSigned { fn authorization_list(&self) -> Option<&[SignedAuthorization]> { self.deref().authorization_list() } + + fn kind(&self) -> TxKind { + self.deref().kind() + } } impl From for TransactionSigned { @@ -2242,8 +2242,8 @@ mod tests { let tx = TransactionSigned::decode_2718(&mut data.as_slice()).unwrap(); let sender = tx.recover_signer().unwrap(); assert_eq!(sender, address!("001e2b7dE757bA469a57bF6b23d982458a07eFcE")); - assert_eq!(tx.to(), Some(address!("D9e1459A7A482635700cBc20BBAF52D495Ab9C96")).into()); - assert_eq!(tx.input(), hex!("1b55ba3a")); + assert_eq!(tx.to(), Some(address!("D9e1459A7A482635700cBc20BBAF52D495Ab9C96"))); + assert_eq!(tx.input().as_ref(), hex!("1b55ba3a")); let encoded = tx.encoded_2718(); assert_eq!(encoded.as_ref(), data.to_vec()); } diff --git a/crates/revm/Cargo.toml b/crates/revm/Cargo.toml index b2d3bccde..5772af0dc 100644 --- a/crates/revm/Cargo.toml +++ b/crates/revm/Cargo.toml @@ -21,6 +21,9 @@ reth-consensus-common.workspace = true reth-prune-types.workspace = true reth-storage-api.workspace = true reth-trie = { workspace = true, optional = true } + +# alloy +alloy-eips.workspace = true alloy-primitives.workspace = true # revm diff --git a/crates/revm/src/batch.rs b/crates/revm/src/batch.rs index a63681aa1..be3ef0a37 100644 --- a/crates/revm/src/batch.rs +++ b/crates/revm/src/batch.rs @@ -1,9 +1,10 @@ //! Helper for handling execution of multiple blocks. use alloc::vec::Vec; +use alloy_eips::eip7685::Requests; use alloy_primitives::{map::HashSet, Address, BlockNumber}; use reth_execution_errors::{BlockExecutionError, InternalBlockExecutionError}; -use reth_primitives::{Receipt, Receipts, Request, Requests}; +use reth_primitives::{Receipt, Receipts}; use reth_prune_types::{PruneMode, PruneModes, PruneSegmentError, MINIMUM_PRUNING_DISTANCE}; use revm::db::states::bundle_state::BundleRetention; @@ -170,8 +171,8 @@ impl BlockBatchRecord { } /// Save EIP-7685 requests to the executor. - pub fn save_requests(&mut self, requests: Vec) { - self.requests.push(requests.into()); + pub fn save_requests(&mut self, requests: Requests) { + self.requests.push(requests); } } diff --git a/crates/rpc/rpc-api/src/engine.rs b/crates/rpc/rpc-api/src/engine.rs index eedada8ff..e89f7b8d3 100644 --- a/crates/rpc/rpc-api/src/engine.rs +++ b/crates/rpc/rpc-api/src/engine.rs @@ -11,8 +11,8 @@ use alloy_rpc_types::{ }; use alloy_rpc_types_engine::{ ClientVersionV1, ExecutionPayloadBodiesV1, ExecutionPayloadInputV2, ExecutionPayloadV1, - ExecutionPayloadV3, ExecutionPayloadV4, ForkchoiceState, ForkchoiceUpdated, PayloadId, - PayloadStatus, TransitionConfiguration, + ExecutionPayloadV3, ForkchoiceState, ForkchoiceUpdated, PayloadId, PayloadStatus, + TransitionConfiguration, }; use alloy_rpc_types_eth::transaction::TransactionRequest; use alloy_serde::JsonStorageKey; @@ -54,9 +54,10 @@ pub trait EngineApi { #[method(name = "newPayloadV4")] async fn new_payload_v4( &self, - payload: ExecutionPayloadV4, + payload: ExecutionPayloadV3, versioned_hashes: Vec, parent_beacon_block_root: B256, + execution_requests: Vec, ) -> RpcResult; /// See also diff --git a/crates/rpc/rpc-engine-api/src/engine_api.rs b/crates/rpc/rpc-engine-api/src/engine_api.rs index fb7f98ed2..d0b19a7b4 100644 --- a/crates/rpc/rpc-engine-api/src/engine_api.rs +++ b/crates/rpc/rpc-engine-api/src/engine_api.rs @@ -2,11 +2,11 @@ use crate::{ capabilities::EngineCapabilities, metrics::EngineApiMetrics, EngineApiError, EngineApiResult, }; use alloy_eips::eip4844::BlobAndProofV1; -use alloy_primitives::{BlockHash, BlockNumber, B256, U64}; +use alloy_primitives::{BlockHash, BlockNumber, Bytes, B256, U64}; use alloy_rpc_types_engine::{ CancunPayloadFields, ClientVersionV1, ExecutionPayload, ExecutionPayloadBodiesV1, - ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV3, ExecutionPayloadV4, - ForkchoiceState, ForkchoiceUpdated, PayloadId, PayloadStatus, TransitionConfiguration, + ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV3, ForkchoiceState, + ForkchoiceUpdated, PayloadId, PayloadStatus, TransitionConfiguration, }; use async_trait::async_trait; use jsonrpsee_core::RpcResult; @@ -140,7 +140,7 @@ where self.inner .validator .validate_version_specific_fields(EngineApiMessageVersion::V1, payload_or_attrs)?; - Ok(self.inner.beacon_consensus.new_payload(payload, None).await?) + Ok(self.inner.beacon_consensus.new_payload(payload, None, None).await?) } /// See also @@ -156,7 +156,7 @@ where self.inner .validator .validate_version_specific_fields(EngineApiMessageVersion::V2, payload_or_attrs)?; - Ok(self.inner.beacon_consensus.new_payload(payload, None).await?) + Ok(self.inner.beacon_consensus.new_payload(payload, None, None).await?) } /// See also @@ -178,15 +178,18 @@ where let cancun_fields = CancunPayloadFields { versioned_hashes, parent_beacon_block_root }; - Ok(self.inner.beacon_consensus.new_payload(payload, Some(cancun_fields)).await?) + Ok(self.inner.beacon_consensus.new_payload(payload, Some(cancun_fields), None).await?) } /// See also pub async fn new_payload_v4( &self, - payload: ExecutionPayloadV4, + payload: ExecutionPayloadV3, versioned_hashes: Vec, parent_beacon_block_root: B256, + // TODO(onbjerg): Figure out why we even get these here, since we'll check the requests + // from execution against the requests root in the header. + execution_requests: Vec, ) -> EngineApiResult { let payload = ExecutionPayload::from(payload); let payload_or_attrs = @@ -200,7 +203,13 @@ where let cancun_fields = CancunPayloadFields { versioned_hashes, parent_beacon_block_root }; - Ok(self.inner.beacon_consensus.new_payload(payload, Some(cancun_fields)).await?) + // HACK(onbjerg): We should have a pectra payload fields struct, this is just a temporary + // workaround. + Ok(self + .inner + .beacon_consensus + .new_payload(payload, Some(cancun_fields), Some(execution_requests)) + .await?) } /// Sends a message to the beacon consensus engine to update the fork choice _without_ @@ -370,7 +379,7 @@ where .map_err(|_| EngineApiError::UnknownPayload)? .try_into() .map_err(|_| { - warn!("could not transform built payload into ExecutionPayloadV4"); + warn!("could not transform built payload into ExecutionPayloadV3"); EngineApiError::UnknownPayload }) } @@ -665,15 +674,22 @@ where /// See also async fn new_payload_v4( &self, - payload: ExecutionPayloadV4, + payload: ExecutionPayloadV3, versioned_hashes: Vec, parent_beacon_block_root: B256, + execution_requests: Vec, ) -> RpcResult { trace!(target: "rpc::engine", "Serving engine_newPayloadV4"); let start = Instant::now(); - let gas_used = payload.payload_inner.payload_inner.payload_inner.gas_used; - let res = - Self::new_payload_v4(self, payload, versioned_hashes, parent_beacon_block_root).await; + let gas_used = payload.payload_inner.payload_inner.gas_used; + let res = Self::new_payload_v4( + self, + payload, + versioned_hashes, + parent_beacon_block_root, + execution_requests, + ) + .await; let elapsed = start.elapsed(); self.inner.metrics.latency.new_payload_v4.record(elapsed); self.inner.metrics.new_payload_response.update_response_metrics(&res, gas_used, elapsed); diff --git a/crates/rpc/rpc-engine-api/tests/it/payload.rs b/crates/rpc/rpc-engine-api/tests/it/payload.rs index c08c30c1d..007a62db0 100644 --- a/crates/rpc/rpc-engine-api/tests/it/payload.rs +++ b/crates/rpc/rpc-engine-api/tests/it/payload.rs @@ -75,7 +75,7 @@ fn payload_validation() { b }); - assert_matches!(try_into_sealed_block(block_with_valid_extra_data, None), Ok(_)); + assert_matches!(try_into_sealed_block(block_with_valid_extra_data, None, None), Ok(_)); // Invalid extra data let block_with_invalid_extra_data = Bytes::from_static(&[0; 33]); @@ -84,7 +84,7 @@ fn payload_validation() { b }); assert_matches!( - try_into_sealed_block(invalid_extra_data_block,None), + try_into_sealed_block(invalid_extra_data_block, None, None), Err(PayloadError::ExtraData(data)) if data == block_with_invalid_extra_data ); @@ -94,8 +94,7 @@ fn payload_validation() { b }); assert_matches!( - - try_into_sealed_block(block_with_zero_base_fee,None), + try_into_sealed_block(block_with_zero_base_fee, None, None), Err(PayloadError::BaseFee(val)) if val.is_zero() ); @@ -114,8 +113,7 @@ fn payload_validation() { b }); assert_matches!( - try_into_sealed_block(block_with_ommers.clone(),None), - + try_into_sealed_block(block_with_ommers.clone(), None, None), Err(PayloadError::BlockHash { consensus, .. }) if consensus == block_with_ommers.block_hash() ); @@ -126,9 +124,8 @@ fn payload_validation() { b }); assert_matches!( - try_into_sealed_block(block_with_difficulty.clone(),None), + try_into_sealed_block(block_with_difficulty.clone(), None, None), Err(PayloadError::BlockHash { consensus, .. }) if consensus == block_with_difficulty.block_hash() - ); // None zero nonce @@ -137,7 +134,7 @@ fn payload_validation() { b }); assert_matches!( - try_into_sealed_block(block_with_nonce.clone(),None), + try_into_sealed_block(block_with_nonce.clone(), None, None), Err(PayloadError::BlockHash { consensus, .. }) if consensus == block_with_nonce.block_hash() ); diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index 832cf1705..5e12e41e5 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -5,7 +5,8 @@ use std::time::{Duration, Instant}; use crate::{EthApiTypes, FromEthApiError, FromEvmError}; -use alloy_consensus::{EMPTY_OMMER_ROOT_HASH, EMPTY_ROOT_HASH}; +use alloy_consensus::EMPTY_OMMER_ROOT_HASH; +use alloy_eips::eip7685::EMPTY_REQUESTS_HASH; use alloy_primitives::{BlockNumber, B256, U256}; use alloy_rpc_types::BlockNumberOrTag; use futures::Future; @@ -19,7 +20,7 @@ use reth_primitives::{ BlockEnv, CfgEnv, CfgEnvWithHandlerCfg, EVMError, Env, ExecutionResult, InvalidTransaction, ResultAndState, SpecId, }, - Block, BlockBody, Header, Receipt, Requests, SealedBlockWithSenders, SealedHeader, + Block, BlockBody, Header, Receipt, SealedBlockWithSenders, SealedHeader, TransactionSignedEcRecovered, }; use reth_provider::{ @@ -417,14 +418,9 @@ pub trait LoadPendingBlock: EthApiTypes { let blob_gas_used = (cfg.handler_cfg.spec_id >= SpecId::CANCUN).then_some(sum_blob_gas_used); - // note(onbjerg): the rpc spec has not been changed to include requests, so for now we just - // set these to empty - let (requests, requests_root) = - if chain_spec.is_prague_active_at_timestamp(block_env.timestamp.to::()) { - (Some(Requests::default()), Some(EMPTY_ROOT_HASH)) - } else { - (None, None) - }; + let requests_hash = chain_spec + .is_prague_active_at_timestamp(block_env.timestamp.to::()) + .then_some(EMPTY_REQUESTS_HASH); let header = Header { parent_hash, @@ -447,7 +443,7 @@ pub trait LoadPendingBlock: EthApiTypes { excess_blob_gas: block_env.get_blob_excess_gas().map(Into::into), extra_data: Default::default(), parent_beacon_block_root, - requests_root, + requests_hash, }; // Convert Vec> to Vec @@ -456,7 +452,7 @@ pub trait LoadPendingBlock: EthApiTypes { // seal the block let block = Block { header, - body: BlockBody { transactions: executed_txs, ommers: vec![], withdrawals, requests }, + body: BlockBody { transactions: executed_txs, ommers: vec![], withdrawals }, }; Ok((SealedBlockWithSenders { block: block.seal_slow(), senders }, receipts)) } diff --git a/crates/rpc/rpc-eth-types/src/receipt.rs b/crates/rpc/rpc-eth-types/src/receipt.rs index 2668291e2..c3232f238 100644 --- a/crates/rpc/rpc-eth-types/src/receipt.rs +++ b/crates/rpc/rpc-eth-types/src/receipt.rs @@ -102,8 +102,6 @@ impl ReceiptBuilder { gas_used: gas_used as u128, contract_address, effective_gas_price: transaction.effective_gas_price(meta.base_fee), - // TODO pre-byzantium receipts have a post-transaction state root - state_root: None, // EIP-4844 fields blob_gas_price, blob_gas_used: blob_gas_used.map(u128::from), diff --git a/crates/rpc/rpc-types-compat/src/block.rs b/crates/rpc/rpc-types-compat/src/block.rs index fc8ea9e1c..a650a69c1 100644 --- a/crates/rpc/rpc-types-compat/src/block.rs +++ b/crates/rpc/rpc-types-compat/src/block.rs @@ -124,7 +124,7 @@ pub fn from_primitive_with_hash(primitive_header: reth_primitives::SealedHeader) blob_gas_used, excess_blob_gas, parent_beacon_block_root, - requests_root, + requests_hash, } = header; Header { @@ -150,7 +150,7 @@ pub fn from_primitive_with_hash(primitive_header: reth_primitives::SealedHeader) excess_blob_gas, parent_beacon_block_root, total_difficulty: None, - requests_root, + requests_hash, } } diff --git a/crates/rpc/rpc-types-compat/src/engine/payload.rs b/crates/rpc/rpc-types-compat/src/engine/payload.rs index 3bbee2b00..b63b7453a 100644 --- a/crates/rpc/rpc-types-compat/src/engine/payload.rs +++ b/crates/rpc/rpc-types-compat/src/engine/payload.rs @@ -2,16 +2,18 @@ //! Ethereum's Engine use alloy_consensus::{constants::MAXIMUM_EXTRA_DATA_SIZE, EMPTY_OMMER_ROOT_HASH}; -use alloy_eips::eip2718::{Decodable2718, Encodable2718}; +use alloy_eips::{ + eip2718::{Decodable2718, Encodable2718}, + eip7685::Requests, +}; use alloy_primitives::{B256, U256}; use alloy_rpc_types_engine::{ payload::{ExecutionPayloadBodyV1, ExecutionPayloadFieldV2, ExecutionPayloadInputV2}, - ExecutionPayload, ExecutionPayloadV1, ExecutionPayloadV2, ExecutionPayloadV3, - ExecutionPayloadV4, PayloadError, + ExecutionPayload, ExecutionPayloadV1, ExecutionPayloadV2, ExecutionPayloadV3, PayloadError, }; use reth_primitives::{ proofs::{self}, - Block, BlockBody, Header, Request, SealedBlock, TransactionSigned, Withdrawals, + Block, BlockBody, Header, SealedBlock, TransactionSigned, Withdrawals, }; /// Converts [`ExecutionPayloadV1`] to [`Block`] @@ -67,7 +69,7 @@ pub fn try_payload_v1_to_block(payload: ExecutionPayloadV1) -> Result Result Result { - let ExecutionPayloadV4 { - payload_inner, - deposit_requests, - withdrawal_requests, - consolidation_requests, - } = payload; - let mut block = try_payload_v3_to_block(payload_inner)?; - - // attach requests with asc type identifiers - let requests = deposit_requests - .into_iter() - .map(Request::DepositRequest) - .chain(withdrawal_requests.into_iter().map(Request::WithdrawalRequest)) - .chain(consolidation_requests.into_iter().map(Request::ConsolidationRequest)) - .collect::>(); - - let requests_root = proofs::calculate_requests_root(&requests); - block.header.requests_root = Some(requests_root); - block.body.requests = Some(requests.into()); - - Ok(block) -} - /// Converts [`SealedBlock`] to [`ExecutionPayload`] pub fn block_to_payload(value: SealedBlock) -> ExecutionPayload { - if value.header.requests_root.is_some() { + if value.header.requests_hash.is_some() { // block with requests root: V3 - ExecutionPayload::V4(block_to_payload_v4(value)) + ExecutionPayload::V3(block_to_payload_v3(value)) } else if value.header.parent_beacon_block_root.is_some() { // block with parent beacon block root: V3 ExecutionPayload::V3(block_to_payload_v3(value)) @@ -217,37 +194,6 @@ pub fn block_to_payload_v3(value: SealedBlock) -> ExecutionPayloadV3 { } } -/// Converts [`SealedBlock`] to [`ExecutionPayloadV4`] -pub fn block_to_payload_v4(mut value: SealedBlock) -> ExecutionPayloadV4 { - let (deposit_requests, withdrawal_requests, consolidation_requests) = - value.body.requests.take().unwrap_or_default().into_iter().fold( - (Vec::new(), Vec::new(), Vec::new()), - |(mut deposits, mut withdrawals, mut consolidation_requests), request| { - match request { - Request::DepositRequest(r) => { - deposits.push(r); - } - Request::WithdrawalRequest(r) => { - withdrawals.push(r); - } - Request::ConsolidationRequest(r) => { - consolidation_requests.push(r); - } - _ => {} - }; - - (deposits, withdrawals, consolidation_requests) - }, - ); - - ExecutionPayloadV4 { - deposit_requests, - withdrawal_requests, - consolidation_requests, - payload_inner: block_to_payload_v3(value), - } -} - /// Converts [`SealedBlock`] to [`ExecutionPayloadFieldV2`] pub fn convert_block_to_payload_field_v2(value: SealedBlock) -> ExecutionPayloadFieldV2 { // if there are withdrawals, return V2 @@ -312,15 +258,16 @@ pub fn convert_block_to_payload_input_v2(value: SealedBlock) -> ExecutionPayload pub fn try_into_block( value: ExecutionPayload, parent_beacon_block_root: Option, + execution_requests: Option, ) -> Result { let mut base_payload = match value { ExecutionPayload::V1(payload) => try_payload_v1_to_block(payload)?, ExecutionPayload::V2(payload) => try_payload_v2_to_block(payload)?, ExecutionPayload::V3(payload) => try_payload_v3_to_block(payload)?, - ExecutionPayload::V4(payload) => try_payload_v4_to_block(payload)?, }; base_payload.header.parent_beacon_block_root = parent_beacon_block_root; + base_payload.header.requests_hash = execution_requests.map(|reqs| reqs.requests_hash()); Ok(base_payload) } @@ -338,9 +285,10 @@ pub fn try_into_block( pub fn try_into_sealed_block( payload: ExecutionPayload, parent_beacon_block_root: Option, + execution_requests: Option, ) -> Result { let block_hash = payload.block_hash(); - let base_payload = try_into_block(payload, parent_beacon_block_root)?; + let base_payload = try_into_block(payload, parent_beacon_block_root, execution_requests)?; // validate block hash and return validate_block_hash(block_hash, base_payload) @@ -404,13 +352,12 @@ pub fn execution_payload_from_sealed_block(value: SealedBlock) -> ExecutionPaylo #[cfg(test)] mod tests { use super::{ - block_to_payload_v3, try_into_block, try_payload_v3_to_block, try_payload_v4_to_block, - validate_block_hash, + block_to_payload_v3, try_into_block, try_payload_v3_to_block, validate_block_hash, }; use alloy_primitives::{b256, hex, Bytes, U256}; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ExecutionPayloadV1, ExecutionPayloadV2, - ExecutionPayloadV3, ExecutionPayloadV4, + ExecutionPayloadV3, }; #[test] @@ -628,60 +575,10 @@ mod tests { let cancun_fields = CancunPayloadFields { parent_beacon_block_root, versioned_hashes }; // convert into block - let block = try_into_block(payload, Some(cancun_fields.parent_beacon_block_root)).unwrap(); + let block = + try_into_block(payload, Some(cancun_fields.parent_beacon_block_root), None).unwrap(); // Ensure the actual hash is calculated if we set the fields to what they should be validate_block_hash(block_hash_with_blob_fee_fields, block).unwrap(); } - - #[test] - fn parse_payload_v4() { - let s = r#"{ - "baseFeePerGas": "0x2ada43", - "blobGasUsed": "0x0", - "blockHash": "0x86eeb2a4b656499f313b601e1dcaedfeacccab27131b6d4ea99bc69a57607f7d", - "blockNumber": "0x2c", - "depositRequests": [ - { - "amount": "0xe8d4a51000", - "index": "0x0", - "pubkey": "0xaab5f2b3aad5c2075faf0c1d8937c7de51a53b765a21b4173eb2975878cea05d9ed3428b77f16a981716aa32af74c464", - "signature": "0xa889cd238be2dae44f2a3c24c04d686c548f6f82eb44d4604e1bc455b6960efb72b117e878068a8f2cfb91ad84b7ebce05b9254207aa51a1e8a3383d75b5a5bd2439f707636ea5b17b2b594b989c93b000b33e5dff6e4bed9d53a6d2d6889b0c", - "withdrawalCredentials": "0x00ab9364f8bf7561862ea0fc3b69c424c94ace406c4dc36ddfbf8a9d72051c80" - }, - { - "amount": "0xe8d4a51000", - "index": "0x1", - "pubkey": "0xb0b1b3b51cf688ead965a954c5cc206ba4e76f3f8efac60656ae708a9aad63487a2ca1fb30ccaf2ebe1028a2b2886b1b", - "signature": "0xb9759766e9bb191b1c457ae1da6cdf71a23fb9d8bc9f845eaa49ee4af280b3b9720ac4d81e64b1b50a65db7b8b4e76f1176a12e19d293d75574600e99fbdfecc1ab48edaeeffb3226cd47691d24473821dad0c6ff3973f03e4aa89f418933a56", - "withdrawalCredentials": "0x002d2b75f4a27f78e585a4735a40ab2437eceb12ec39938a94dc785a54d62513" - } - ], - "excessBlobGas": "0x0", - "extraData": "0x726574682f76302e322e302d626574612e372f6c696e7578", - "feeRecipient": "0x8943545177806ed17b9f23f0a21ee5948ecaa776", - "gasLimit": "0x1855e85", - "gasUsed": "0x25f98", - "logsBloom": "0x10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000", - "parentHash": "0xd753194ef19b5c566b7eca6e9ebcca03895b548e1e93a20a23d922ba0bc210d4", - "prevRandao": "0x8c52256fd491776dc32f531ad4c0dc1444684741bca15f54c9cd40c60142df90", - "receiptsRoot": "0x510e7fb94279897e5dcd6c1795f6137d8fe02e49e871bfea7999fd21a89f66aa", - "stateRoot": "0x59ae0706a2b47162666fc7af3e30ff7aa34154954b68cc6aed58c3af3d58c9c2", - "timestamp": "0x6643c5a9", - "transactions": [ - "0x02f9021e8330182480843b9aca0085174876e80083030d40944242424242424242424242424242424242424242893635c9adc5dea00000b901a422895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000012049f42823819771c6bbbd9cb6649850083fd3b6e5d0beb1069342c32d65a3b0990000000000000000000000000000000000000000000000000000000000000030aab5f2b3aad5c2075faf0c1d8937c7de51a53b765a21b4173eb2975878cea05d9ed3428b77f16a981716aa32af74c46400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000ab9364f8bf7561862ea0fc3b69c424c94ace406c4dc36ddfbf8a9d72051c800000000000000000000000000000000000000000000000000000000000000060a889cd238be2dae44f2a3c24c04d686c548f6f82eb44d4604e1bc455b6960efb72b117e878068a8f2cfb91ad84b7ebce05b9254207aa51a1e8a3383d75b5a5bd2439f707636ea5b17b2b594b989c93b000b33e5dff6e4bed9d53a6d2d6889b0cc080a0db786f0d89923949e533680524f003cebd66f32fbd30429a6b6bfbd3258dcf60a05241c54e05574765f7ddc1a742ae06b044edfe02bffb202bf172be97397eeca9", - "0x02f9021e8330182401843b9aca0085174876e80083030d40944242424242424242424242424242424242424242893635c9adc5dea00000b901a422895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000120d694d6a0b0103651aafd87db6c88297175d7317c6e6da53ccf706c3c991c91fd0000000000000000000000000000000000000000000000000000000000000030b0b1b3b51cf688ead965a954c5cc206ba4e76f3f8efac60656ae708a9aad63487a2ca1fb30ccaf2ebe1028a2b2886b1b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020002d2b75f4a27f78e585a4735a40ab2437eceb12ec39938a94dc785a54d625130000000000000000000000000000000000000000000000000000000000000060b9759766e9bb191b1c457ae1da6cdf71a23fb9d8bc9f845eaa49ee4af280b3b9720ac4d81e64b1b50a65db7b8b4e76f1176a12e19d293d75574600e99fbdfecc1ab48edaeeffb3226cd47691d24473821dad0c6ff3973f03e4aa89f418933a56c080a099dc5b94a51e9b91a6425b1fed9792863006496ab71a4178524819d7db0c5e88a0119748e62700234079d91ae80f4676f9e0f71b260e9b46ef9b4aff331d3c2318" - ], - "withdrawalRequests": [], - "withdrawals": [], - "consolidationRequests": [] - }"#; - - let payload = serde_json::from_str::(s).unwrap(); - let mut block = try_payload_v4_to_block(payload).unwrap(); - block.header.parent_beacon_block_root = - Some(b256!("d9851db05fa63593f75e2b12c4bba9f47740613ca57da3b523a381b8c27f3297")); - let hash = block.seal_slow().hash(); - assert_eq!(hash, b256!("86eeb2a4b656499f313b601e1dcaedfeacccab27131b6d4ea99bc69a57607f7d")) - } } diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 164f402e4..66465ef47 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -686,7 +686,7 @@ where let state = state_provider.witness(Default::default(), hashed_state).map_err(Into::into)?; - Ok(ExecutionWitness { state: state.into_iter().collect(), codes, keys: Some(keys) }) + Ok(ExecutionWitness { state: state.into_iter().collect(), codes, keys }) }) .await } diff --git a/crates/rpc/rpc/src/otterscan.rs b/crates/rpc/rpc/src/otterscan.rs index 45722978f..024bdd172 100644 --- a/crates/rpc/rpc/src/otterscan.rs +++ b/crates/rpc/rpc/src/otterscan.rs @@ -261,7 +261,6 @@ where from: receipt.from(), to: receipt.to(), contract_address: receipt.contract_address(), - state_root: receipt.state_root(), authorization_list: receipt .authorization_list() .map(<[SignedAuthorization]>::to_vec), diff --git a/crates/rpc/rpc/src/txpool.rs b/crates/rpc/rpc/src/txpool.rs index d40cdc5cd..47aaac0bb 100644 --- a/crates/rpc/rpc/src/txpool.rs +++ b/crates/rpc/rpc/src/txpool.rs @@ -101,7 +101,7 @@ where entry.insert( tx.nonce().to_string(), TxpoolInspectSummary { - to: tx.to().into(), + to: tx.to(), value: tx.value(), gas: tx.gas_limit() as u128, gas_price: tx.transaction.max_fee_per_gas(), diff --git a/crates/stages/stages/src/stages/bodies.rs b/crates/stages/stages/src/stages/bodies.rs index 2d441dee2..93d8a1229 100644 --- a/crates/stages/stages/src/stages/bodies.rs +++ b/crates/stages/stages/src/stages/bodies.rs @@ -121,7 +121,6 @@ where let mut tx_block_cursor = tx.cursor_write::()?; let mut ommers_cursor = tx.cursor_write::()?; let mut withdrawals_cursor = tx.cursor_write::()?; - let mut requests_cursor = tx.cursor_write::()?; // Get id for the next tx_num of zero if there are no transactions. let mut next_tx_num = tx_block_cursor.last()?.map(|(id, _)| id + 1).unwrap_or_default(); @@ -234,13 +233,6 @@ where .append(block_number, StoredBlockWithdrawals { withdrawals })?; } } - - // Write requests if any - if let Some(requests) = block.body.requests { - if !requests.0.is_empty() { - requests_cursor.append(block_number, requests)?; - } - } } BlockResponse::Empty(_) => {} }; @@ -276,7 +268,6 @@ where let mut body_cursor = tx.cursor_write::()?; let mut ommers_cursor = tx.cursor_write::()?; let mut withdrawals_cursor = tx.cursor_write::()?; - let mut requests_cursor = tx.cursor_write::()?; // Cursors to unwind transitions let mut tx_block_cursor = tx.cursor_write::()?; @@ -296,11 +287,6 @@ where withdrawals_cursor.delete_current()?; } - // Delete the requests entry if any - if requests_cursor.seek_exact(number)?.is_some() { - requests_cursor.delete_current()?; - } - // Delete all transaction to block values. if !block_meta.is_empty() && tx_block_cursor.seek_exact(block_meta.last_tx_num())?.is_some() diff --git a/crates/storage/codecs/src/alloy/authorization_list.rs b/crates/storage/codecs/src/alloy/authorization_list.rs index 3efe13590..2b013c0d3 100644 --- a/crates/storage/codecs/src/alloy/authorization_list.rs +++ b/crates/storage/codecs/src/alloy/authorization_list.rs @@ -13,7 +13,7 @@ use reth_codecs_derive::add_arbitrary_tests; #[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] #[add_arbitrary_tests(compact)] pub(crate) struct Authorization { - chain_id: U256, + chain_id: u64, address: Address, nonce: u64, } @@ -78,7 +78,7 @@ mod tests { #[test] fn test_roundtrip_compact_authorization_list_item() { let authorization = AlloyAuthorization { - chain_id: U256::from(1), + chain_id: 1u64, address: address!("dac17f958d2ee523a2206206994597c13d831ec7"), nonce: 1, } diff --git a/crates/storage/codecs/src/alloy/header.rs b/crates/storage/codecs/src/alloy/header.rs index 623eded9e..90e67b1e3 100644 --- a/crates/storage/codecs/src/alloy/header.rs +++ b/crates/storage/codecs/src/alloy/header.rs @@ -45,7 +45,7 @@ pub(crate) struct Header { #[cfg_attr(test, derive(serde::Serialize, serde::Deserialize))] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] pub(crate) struct HeaderExt { - requests_root: Option, + requests_hash: Option, } impl HeaderExt { @@ -53,7 +53,7 @@ impl HeaderExt { /// /// Required since [`Header`] uses `Option` as a field. const fn into_option(self) -> Option { - if self.requests_root.is_some() { + if self.requests_hash.is_some() { Some(self) } else { None @@ -66,7 +66,7 @@ impl Compact for AlloyHeader { where B: bytes::BufMut + AsMut<[u8]>, { - let extra_fields = HeaderExt { requests_root: self.requests_root }; + let extra_fields = HeaderExt { requests_hash: self.requests_hash }; let header = Header { parent_hash: self.parent_hash, @@ -116,7 +116,7 @@ impl Compact for AlloyHeader { blob_gas_used: header.blob_gas_used, excess_blob_gas: header.excess_blob_gas, parent_beacon_block_root: header.parent_beacon_block_root, - requests_root: header.extra_fields.and_then(|h| h.requests_root), + requests_hash: header.extra_fields.and_then(|h| h.requests_hash), extra_data: header.extra_data, }; (alloy_header, buf) @@ -176,7 +176,7 @@ mod tests { #[test] fn test_extra_fields() { let mut header = HOLESKY_BLOCK; - header.extra_fields = Some(HeaderExt { requests_root: Some(B256::random()) }); + header.extra_fields = Some(HeaderExt { requests_hash: Some(B256::random()) }); let mut encoded_header = vec![]; let len = header.to_compact(&mut encoded_header); diff --git a/crates/storage/codecs/src/alloy/mod.rs b/crates/storage/codecs/src/alloy/mod.rs index 942258d06..ed77876c5 100644 --- a/crates/storage/codecs/src/alloy/mod.rs +++ b/crates/storage/codecs/src/alloy/mod.rs @@ -3,7 +3,6 @@ mod authorization_list; mod genesis_account; mod header; mod log; -mod request; mod signature; mod transaction; mod trie; @@ -14,7 +13,6 @@ mod withdrawal; mod tests { use crate::{ alloy::{ - authorization_list::Authorization, genesis_account::{GenesisAccount, GenesisAccountRef, StorageEntries, StorageEntry}, header::{Header, HeaderExt}, transaction::{ @@ -38,7 +36,6 @@ mod tests { validate_bitflag_backwards_compat!(StorageEntries, UnusedBits::Zero); validate_bitflag_backwards_compat!(StorageEntry, UnusedBits::Zero); - validate_bitflag_backwards_compat!(Authorization, UnusedBits::NotZero); validate_bitflag_backwards_compat!(GenesisAccountRef<'_>, UnusedBits::NotZero); validate_bitflag_backwards_compat!(GenesisAccount, UnusedBits::NotZero); validate_bitflag_backwards_compat!(TxEip1559, UnusedBits::NotZero); diff --git a/crates/storage/codecs/src/alloy/request.rs b/crates/storage/codecs/src/alloy/request.rs deleted file mode 100644 index 2447160be..000000000 --- a/crates/storage/codecs/src/alloy/request.rs +++ /dev/null @@ -1,40 +0,0 @@ -//! Native Compact codec impl for EIP-7685 requests. - -use crate::Compact; -use alloy_consensus::Request; -use alloy_eips::eip7685::{Decodable7685, Encodable7685}; -use alloy_primitives::Bytes; -use bytes::BufMut; - -impl Compact for Request { - fn to_compact(&self, buf: &mut B) -> usize - where - B: BufMut + AsMut<[u8]>, - { - let encoded: Bytes = self.encoded_7685().into(); - encoded.to_compact(buf) - } - - fn from_compact(buf: &[u8], _: usize) -> (Self, &[u8]) { - let (raw, buf) = Bytes::from_compact(buf, buf.len()); - - (Self::decode_7685(&mut raw.as_ref()).expect("invalid eip-7685 request in db"), buf) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use proptest::proptest; - use proptest_arbitrary_interop::arb; - - proptest! { - #[test] - fn roundtrip(request in arb::()) { - let mut buf = Vec::::new(); - request.to_compact(&mut buf); - let (decoded, _) = Request::from_compact(&buf, buf.len()); - assert_eq!(request, decoded); - } - } -} diff --git a/crates/storage/db-api/src/models/mod.rs b/crates/storage/db-api/src/models/mod.rs index 0f35a558a..b077027f2 100644 --- a/crates/storage/db-api/src/models/mod.rs +++ b/crates/storage/db-api/src/models/mod.rs @@ -8,7 +8,7 @@ use alloy_genesis::GenesisAccount; use alloy_primitives::{Address, Bytes, Log, B256, U256}; use reth_codecs::{add_arbitrary_tests, Compact}; use reth_primitives::{ - Account, Bytecode, Header, Receipt, Requests, StorageEntry, TransactionSignedNoHash, TxType, + Account, Bytecode, Header, Receipt, StorageEntry, TransactionSignedNoHash, TxType, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::StageCheckpoint; @@ -230,7 +230,6 @@ impl_compression_for_compact!( StageCheckpoint, PruneCheckpoint, ClientVersion, - Requests, // Non-DB GenesisAccount ); @@ -366,6 +365,5 @@ mod tests { validate_bitflag_backwards_compat!(StoredBlockWithdrawals, UnusedBits::Zero); validate_bitflag_backwards_compat!(StorageHashingCheckpoint, UnusedBits::NotZero); validate_bitflag_backwards_compat!(Withdrawals, UnusedBits::Zero); - validate_bitflag_backwards_compat!(Requests, UnusedBits::Zero); } } diff --git a/crates/storage/db/src/tables/mod.rs b/crates/storage/db/src/tables/mod.rs index 83a063903..27f58f8a1 100644 --- a/crates/storage/db/src/tables/mod.rs +++ b/crates/storage/db/src/tables/mod.rs @@ -30,9 +30,7 @@ use reth_db_api::{ }, table::{Decode, DupSort, Encode, Table}, }; -use reth_primitives::{ - Account, Bytecode, Header, Receipt, Requests, StorageEntry, TransactionSignedNoHash, -}; +use reth_primitives::{Account, Bytecode, Header, Receipt, StorageEntry, TransactionSignedNoHash}; use reth_primitives_traits::IntegerList; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::StageCheckpoint; @@ -404,9 +402,6 @@ tables! { /// Stores the history of client versions that have accessed the database with write privileges by unix timestamp in seconds. table VersionHistory; - /// Stores EIP-7685 EL -> CL requests, indexed by block number. - table BlockRequests; - /// Stores generic chain state info, like the last finalized block. table ChainState; } diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index 9e6f32b33..1866610e3 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -4,7 +4,7 @@ use crate::{ CanonStateSubscriptions, ChainSpecProvider, ChainStateBlockReader, ChangeSetReader, DatabaseProviderFactory, DatabaseProviderRO, EvmEnvProvider, HeaderProvider, ProviderError, ProviderFactory, PruneCheckpointReader, ReceiptProvider, ReceiptProviderIdExt, - RequestsProvider, StageCheckpointReader, StateProviderBox, StateProviderFactory, StateReader, + StageCheckpointReader, StateProviderBox, StateProviderFactory, StateReader, StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, HashOrNumber}; @@ -1200,24 +1200,6 @@ impl WithdrawalsProvider for BlockchainProvider2 { } } -impl RequestsProvider for BlockchainProvider2 { - fn requests_by_block( - &self, - id: BlockHashOrNumber, - timestamp: u64, - ) -> ProviderResult> { - if !self.chain_spec().is_prague_active_at_timestamp(timestamp) { - return Ok(None) - } - - self.get_in_memory_or_storage_by_block( - id, - |db_provider| db_provider.requests_by_block(id, timestamp), - |block_state| Ok(block_state.block_ref().block().body.requests.clone()), - ) - } -} - impl StageCheckpointReader for BlockchainProvider2 { fn get_stage_checkpoint(&self, id: StageId) -> ProviderResult> { self.database.provider()?.get_stage_checkpoint(id) @@ -1747,8 +1729,8 @@ mod tests { use reth_storage_api::{ BlockHashReader, BlockIdReader, BlockNumReader, BlockReader, BlockReaderIdExt, BlockSource, ChangeSetReader, DatabaseProviderFactory, HeaderProvider, ReceiptProvider, - ReceiptProviderIdExt, RequestsProvider, StateProviderFactory, TransactionVariant, - TransactionsProvider, WithdrawalsProvider, + ReceiptProviderIdExt, StateProviderFactory, TransactionVariant, TransactionsProvider, + WithdrawalsProvider, }; use reth_testing_utils::generators::{ self, random_block, random_block_range, random_changeset_range, random_eoa_accounts, @@ -2849,37 +2831,6 @@ mod tests { Ok(()) } - #[test] - fn test_requests_provider() -> eyre::Result<()> { - let mut rng = generators::rng(); - let chain_spec = Arc::new(ChainSpecBuilder::mainnet().prague_activated().build()); - let (provider, database_blocks, in_memory_blocks, _) = - provider_with_chain_spec_and_random_blocks( - &mut rng, - chain_spec.clone(), - TEST_BLOCKS_COUNT, - TEST_BLOCKS_COUNT, - BlockRangeParams { requests_count: Some(1..2), ..Default::default() }, - )?; - - let database_block = database_blocks.first().unwrap().clone(); - let in_memory_block = in_memory_blocks.last().unwrap().clone(); - - let prague_timestamp = - chain_spec.hardforks.fork(EthereumHardfork::Prague).as_timestamp().unwrap(); - - assert_eq!( - provider.requests_by_block(database_block.number.into(), prague_timestamp,)?, - database_block.body.requests.clone() - ); - assert_eq!( - provider.requests_by_block(in_memory_block.number.into(), prague_timestamp,)?, - in_memory_block.body.requests.clone() - ); - - Ok(()) - } - #[test] fn test_state_provider_factory() -> eyre::Result<()> { let mut rng = generators::rng(); diff --git a/crates/storage/provider/src/providers/database/metrics.rs b/crates/storage/provider/src/providers/database/metrics.rs index ba43298c3..7e9ee7202 100644 --- a/crates/storage/provider/src/providers/database/metrics.rs +++ b/crates/storage/provider/src/providers/database/metrics.rs @@ -61,7 +61,6 @@ pub(crate) enum Action { InsertTransactions, InsertTransactionHashNumbers, InsertBlockWithdrawals, - InsertBlockRequests, InsertBlockBodyIndices, InsertTransactionBlocks, GetNextTxNum, @@ -106,8 +105,6 @@ struct DatabaseProviderMetrics { insert_tx_hash_numbers: Histogram, /// Duration of insert block withdrawals insert_block_withdrawals: Histogram, - /// Duration of insert block requests - insert_block_requests: Histogram, /// Duration of insert block body indices insert_block_body_indices: Histogram, /// Duration of insert transaction blocks @@ -139,7 +136,6 @@ impl DatabaseProviderMetrics { Action::InsertTransactions => self.insert_transactions.record(duration), Action::InsertTransactionHashNumbers => self.insert_tx_hash_numbers.record(duration), Action::InsertBlockWithdrawals => self.insert_block_withdrawals.record(duration), - Action::InsertBlockRequests => self.insert_block_requests.record(duration), Action::InsertBlockBodyIndices => self.insert_block_body_indices.record(duration), Action::InsertTransactionBlocks => self.insert_tx_blocks.record(duration), Action::GetNextTxNum => self.get_next_tx_num.record(duration), diff --git a/crates/storage/provider/src/providers/database/mod.rs b/crates/storage/provider/src/providers/database/mod.rs index 520b51452..54186dca6 100644 --- a/crates/storage/provider/src/providers/database/mod.rs +++ b/crates/storage/provider/src/providers/database/mod.rs @@ -4,8 +4,8 @@ use crate::{ traits::{BlockSource, ReceiptProvider}, BlockHashReader, BlockNumReader, BlockReader, ChainSpecProvider, DatabaseProviderFactory, EvmEnvProvider, HeaderProvider, HeaderSyncGap, HeaderSyncGapProvider, ProviderError, - PruneCheckpointReader, RequestsProvider, StageCheckpointReader, StateProviderBox, - StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, + PruneCheckpointReader, StageCheckpointReader, StateProviderBox, StaticFileProviderFactory, + TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; use alloy_eips::BlockHashOrNumber; use alloy_primitives::{Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; @@ -519,16 +519,6 @@ impl WithdrawalsProvider for ProviderFactory { } } -impl RequestsProvider for ProviderFactory { - fn requests_by_block( - &self, - id: BlockHashOrNumber, - timestamp: u64, - ) -> ProviderResult> { - self.provider()?.requests_by_block(id, timestamp) - } -} - impl StageCheckpointReader for ProviderFactory { fn get_stage_checkpoint(&self, id: StageId) -> ProviderResult> { self.provider()?.get_stage_checkpoint(id) diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 8140700fa..308fa364a 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -11,10 +11,10 @@ use crate::{ DBProvider, EvmEnvProvider, HashingWriter, HeaderProvider, HeaderSyncGap, HeaderSyncGapProvider, HistoricalStateProvider, HistoricalStateProviderRef, HistoryWriter, LatestStateProvider, LatestStateProviderRef, OriginalValuesKnown, ProviderError, - PruneCheckpointReader, PruneCheckpointWriter, RequestsProvider, RevertsInit, - StageCheckpointReader, StateChangeWriter, StateProviderBox, StateReader, StateWriter, - StaticFileProviderFactory, StatsReader, StorageReader, StorageTrieWriter, TransactionVariant, - TransactionsProvider, TransactionsProviderExt, TrieWriter, WithdrawalsProvider, + PruneCheckpointReader, PruneCheckpointWriter, RevertsInit, StageCheckpointReader, + StateChangeWriter, StateProviderBox, StateReader, StateWriter, StaticFileProviderFactory, + StatsReader, StorageReader, StorageTrieWriter, TransactionVariant, TransactionsProvider, + TransactionsProviderExt, TrieWriter, WithdrawalsProvider, }; use alloy_eips::BlockHashOrNumber; use alloy_primitives::{keccak256, Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; @@ -40,7 +40,7 @@ use reth_evm::ConfigureEvmEnv; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_network_p2p::headers::downloader::SyncTarget; use reth_primitives::{ - Account, Block, BlockBody, BlockWithSenders, Bytecode, GotExpected, Header, Receipt, Requests, + Account, Block, BlockBody, BlockWithSenders, Bytecode, GotExpected, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, StaticFileSegment, StorageEntry, TransactionMeta, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, Withdrawal, Withdrawals, @@ -500,7 +500,6 @@ impl DatabaseProvider { Vec
, Vec
, Option, - Option, ) -> ProviderResult>, { let Some(block_number) = self.convert_hash_or_number(id)? else { return Ok(None) }; @@ -509,7 +508,6 @@ impl DatabaseProvider { let ommers = self.ommers(block_number.into())?.unwrap_or_default(); let withdrawals = self.withdrawals_by_block(block_number.into(), header.as_ref().timestamp)?; - let requests = self.requests_by_block(block_number.into(), header.as_ref().timestamp)?; // Get the block body // @@ -540,7 +538,7 @@ impl DatabaseProvider { }) .collect(); - construct_block(header, body, senders, ommers, withdrawals, requests) + construct_block(header, body, senders, ommers, withdrawals) } /// Returns a range of blocks from the database. @@ -551,7 +549,6 @@ impl DatabaseProvider { /// - Range of transaction numbers /// – Ommers /// – Withdrawals - /// – Requests /// – Senders fn block_range( &self, @@ -563,13 +560,7 @@ impl DatabaseProvider { Spec: EthereumHardforks, H: AsRef
, HF: FnOnce(RangeInclusive) -> ProviderResult>, - F: FnMut( - H, - Range, - Vec
, - Option, - Option, - ) -> ProviderResult, + F: FnMut(H, Range, Vec
, Option) -> ProviderResult, { if range.is_empty() { return Ok(Vec::new()) @@ -581,7 +572,6 @@ impl DatabaseProvider { let headers = headers_range(range)?; let mut ommers_cursor = self.tx.cursor_read::()?; let mut withdrawals_cursor = self.tx.cursor_read::()?; - let mut requests_cursor = self.tx.cursor_read::()?; let mut block_body_cursor = self.tx.cursor_read::()?; for header in headers { @@ -608,13 +598,6 @@ impl DatabaseProvider { } else { None }; - let requests = - if self.chain_spec.is_prague_active_at_timestamp(header_ref.timestamp) { - (requests_cursor.seek_exact(header_ref.number)?.unwrap_or_default().1) - .into() - } else { - None - }; let ommers = if self.chain_spec.final_paris_total_difficulty(header_ref.number).is_some() { Vec::new() @@ -625,7 +608,7 @@ impl DatabaseProvider { .unwrap_or_default() }; - if let Ok(b) = assemble_block(header, tx_range, ommers, withdrawals, requests) { + if let Ok(b) = assemble_block(header, tx_range, ommers, withdrawals) { blocks.push(b); } } @@ -643,7 +626,6 @@ impl DatabaseProvider { /// - Transactions /// – Ommers /// – Withdrawals - /// – Requests /// – Senders fn block_with_senders_range( &self, @@ -660,14 +642,13 @@ impl DatabaseProvider { Vec, Vec
, Option, - Option, Vec
, ) -> ProviderResult, { let mut tx_cursor = self.tx.cursor_read::()?; let mut senders_cursor = self.tx.cursor_read::()?; - self.block_range(range, headers_range, |header, tx_range, ommers, withdrawals, requests| { + self.block_range(range, headers_range, |header, tx_range, ommers, withdrawals| { let (body, senders) = if tx_range.is_empty() { (Vec::new(), Vec::new()) } else { @@ -699,7 +680,7 @@ impl DatabaseProvider { (body, senders) }; - assemble_block(header, body, ommers, withdrawals, requests, senders) + assemble_block(header, body, ommers, withdrawals, senders) }) } @@ -781,7 +762,6 @@ impl DatabaseProvider { // - Bodies (transactions) // - Uncles/ommers // - Withdrawals - // - Requests // - Signers let block_headers = self.get::(range.clone())?; @@ -792,7 +772,6 @@ impl DatabaseProvider { let block_header_hashes = self.get::(range.clone())?; let block_ommers = self.get::(range.clone())?; let block_withdrawals = self.get::(range.clone())?; - let block_requests = self.get::(range.clone())?; let block_tx = self.get_block_transaction_range(range)?; let mut blocks = Vec::with_capacity(block_headers.len()); @@ -805,10 +784,8 @@ impl DatabaseProvider { // Ommers can be empty for some blocks let mut block_ommers_iter = block_ommers.into_iter(); let mut block_withdrawals_iter = block_withdrawals.into_iter(); - let mut block_requests_iter = block_requests.into_iter(); let mut block_ommers = block_ommers_iter.next(); let mut block_withdrawals = block_withdrawals_iter.next(); - let mut block_requests = block_requests_iter.next(); for ((main_block_number, header), (_, header_hash), (_, tx)) in izip!(block_header_iter, block_header_hashes_iter, block_tx_iter) @@ -841,24 +818,10 @@ impl DatabaseProvider { withdrawals = None } - // requests can be missing - let prague_is_active = self.chain_spec.is_prague_active_at_timestamp(header.timestamp); - let mut requests = Some(Requests::default()); - if prague_is_active { - if let Some((block_number, _)) = block_requests.as_ref() { - if *block_number == main_block_number { - requests = Some(block_requests.take().unwrap().1); - block_requests = block_requests_iter.next(); - } - } - } else { - requests = None; - } - blocks.push(SealedBlockWithSenders { block: SealedBlock { header, - body: BlockBody { transactions, ommers, withdrawals, requests }, + body: BlockBody { transactions, ommers, withdrawals }, }, senders, }) @@ -1222,7 +1185,6 @@ impl DatabaseProvider { /// * [`CanonicalHeaders`](tables::CanonicalHeaders) /// * [`BlockOmmers`](tables::BlockOmmers) /// * [`BlockWithdrawals`](tables::BlockWithdrawals) - /// * [`BlockRequests`](tables::BlockRequests) /// * [`HeaderTerminalDifficulties`](tables::HeaderTerminalDifficulties) /// /// This will also remove transaction data according to @@ -1242,7 +1204,6 @@ impl DatabaseProvider { self.remove::(range.clone())?; self.remove::(range.clone())?; self.remove::(range.clone())?; - self.remove::(range.clone())?; self.remove_block_transaction_range(range.clone())?; self.remove::(range)?; @@ -1256,7 +1217,6 @@ impl DatabaseProvider { /// * [`CanonicalHeaders`](tables::CanonicalHeaders) /// * [`BlockOmmers`](tables::BlockOmmers) /// * [`BlockWithdrawals`](tables::BlockWithdrawals) - /// * [`BlockRequests`](tables::BlockRequests) /// * [`HeaderTerminalDifficulties`](tables::HeaderTerminalDifficulties) /// /// This will also remove transaction data according to @@ -1274,7 +1234,6 @@ impl DatabaseProvider { // - Bodies (transactions) // - Uncles/ommers // - Withdrawals - // - Requests // - Signers let block_headers = self.take::(range.clone())?; @@ -1288,7 +1247,6 @@ impl DatabaseProvider { let block_header_hashes = self.take::(range.clone())?; let block_ommers = self.take::(range.clone())?; let block_withdrawals = self.take::(range.clone())?; - let block_requests = self.take::(range.clone())?; let block_tx = self.take_block_transaction_range(range.clone())?; let mut blocks = Vec::with_capacity(block_headers.len()); @@ -1304,10 +1262,8 @@ impl DatabaseProvider { // Ommers can be empty for some blocks let mut block_ommers_iter = block_ommers.into_iter(); let mut block_withdrawals_iter = block_withdrawals.into_iter(); - let mut block_requests_iter = block_requests.into_iter(); let mut block_ommers = block_ommers_iter.next(); let mut block_withdrawals = block_withdrawals_iter.next(); - let mut block_requests = block_requests_iter.next(); for ((main_block_number, header), (_, header_hash), (_, tx)) in izip!(block_header_iter, block_header_hashes_iter, block_tx_iter) @@ -1340,24 +1296,10 @@ impl DatabaseProvider { withdrawals = None } - // requests can be missing - let prague_is_active = self.chain_spec.is_prague_active_at_timestamp(header.timestamp); - let mut requests = Some(Requests::default()); - if prague_is_active { - if let Some((block_number, _)) = block_requests.as_ref() { - if *block_number == main_block_number { - requests = Some(block_requests.take().unwrap().1); - block_requests = block_requests_iter.next(); - } - } - } else { - requests = None; - } - blocks.push(SealedBlockWithSenders { block: SealedBlock { header, - body: BlockBody { transactions, ommers, withdrawals, requests }, + body: BlockBody { transactions, ommers, withdrawals }, }, senders, }) @@ -1726,7 +1668,6 @@ impl BlockReader for DatabasePr if let Some(header) = self.header_by_number(number)? { let withdrawals = self.withdrawals_by_block(number.into(), header.timestamp)?; let ommers = self.ommers(number.into())?.unwrap_or_default(); - let requests = self.requests_by_block(number.into(), header.timestamp)?; // If the body indices are not found, this means that the transactions either do not // exist in the database yet, or they do exit but are not indexed. // If they exist but are not indexed, we don't have enough @@ -1738,7 +1679,7 @@ impl BlockReader for DatabasePr return Ok(Some(Block { header, - body: BlockBody { transactions, ommers, withdrawals, requests }, + body: BlockBody { transactions, ommers, withdrawals }, })) } } @@ -1798,8 +1739,8 @@ impl BlockReader for DatabasePr id, transaction_kind, |block_number| self.header_by_number(block_number), - |header, transactions, senders, ommers, withdrawals, requests| { - Block { header, body: BlockBody { transactions, ommers, withdrawals, requests } } + |header, transactions, senders, ommers, withdrawals| { + Block { header, body: BlockBody { transactions, ommers, withdrawals } } // Note: we're using unchecked here because we know the block contains valid txs // wrt to its height and can ignore the s value check so pre // EIP-2 txs are allowed @@ -1819,17 +1760,14 @@ impl BlockReader for DatabasePr id, transaction_kind, |block_number| self.sealed_header(block_number), - |header, transactions, senders, ommers, withdrawals, requests| { - SealedBlock { - header, - body: BlockBody { transactions, ommers, withdrawals, requests }, - } - // Note: we're using unchecked here because we know the block contains valid txs - // wrt to its height and can ignore the s value check so pre - // EIP-2 txs are allowed - .try_with_senders_unchecked(senders) - .map(Some) - .map_err(|_| ProviderError::SenderRecoveryError) + |header, transactions, senders, ommers, withdrawals| { + SealedBlock { header, body: BlockBody { transactions, ommers, withdrawals } } + // Note: we're using unchecked here because we know the block contains valid txs + // wrt to its height and can ignore the s value check so pre + // EIP-2 txs are allowed + .try_with_senders_unchecked(senders) + .map(Some) + .map_err(|_| ProviderError::SenderRecoveryError) }, ) } @@ -1839,7 +1777,7 @@ impl BlockReader for DatabasePr self.block_range( range, |range| self.headers_range(range), - |header, tx_range, ommers, withdrawals, requests| { + |header, tx_range, ommers, withdrawals| { let transactions = if tx_range.is_empty() { Vec::new() } else { @@ -1848,10 +1786,7 @@ impl BlockReader for DatabasePr .map(Into::into) .collect() }; - Ok(Block { - header, - body: BlockBody { transactions, ommers, withdrawals, requests }, - }) + Ok(Block { header, body: BlockBody { transactions, ommers, withdrawals } }) }, ) } @@ -1863,8 +1798,8 @@ impl BlockReader for DatabasePr self.block_with_senders_range( range, |range| self.headers_range(range), - |header, transactions, ommers, withdrawals, requests, senders| { - Block { header, body: BlockBody { transactions, ommers, withdrawals, requests } } + |header, transactions, ommers, withdrawals, senders| { + Block { header, body: BlockBody { transactions, ommers, withdrawals } } .try_with_senders_unchecked(senders) .map_err(|_| ProviderError::SenderRecoveryError) }, @@ -1878,12 +1813,9 @@ impl BlockReader for DatabasePr self.block_with_senders_range( range, |range| self.sealed_headers_range(range), - |header, transactions, ommers, withdrawals, requests, senders| { + |header, transactions, ommers, withdrawals, senders| { SealedBlockWithSenders::new( - SealedBlock { - header, - body: BlockBody { transactions, ommers, withdrawals, requests }, - }, + SealedBlock { header, body: BlockBody { transactions, ommers, withdrawals } }, senders, ) .ok_or(ProviderError::SenderRecoveryError) @@ -2200,24 +2132,6 @@ impl WithdrawalsProvider } } -impl RequestsProvider - for DatabaseProvider -{ - fn requests_by_block( - &self, - id: BlockHashOrNumber, - timestamp: u64, - ) -> ProviderResult> { - if self.chain_spec.is_prague_active_at_timestamp(timestamp) { - if let Some(number) = self.convert_hash_or_number(id)? { - let requests = self.tx.get::(number)?; - return Ok(requests) - } - } - Ok(None) - } -} - impl EvmEnvProvider for DatabaseProvider { @@ -3413,7 +3327,6 @@ impl(block_number, requests)?; - durations_recorder.record_relative(metrics::Action::InsertBlockRequests); - } - let block_indices = StoredBlockBodyIndices { first_tx_num, tx_count }; self.tx.put::(block_number, block_indices.clone())?; durations_recorder.record_relative(metrics::Action::InsertBlockBodyIndices); diff --git a/crates/storage/provider/src/providers/mod.rs b/crates/storage/provider/src/providers/mod.rs index 561e1d974..b98bbf5be 100644 --- a/crates/storage/provider/src/providers/mod.rs +++ b/crates/storage/provider/src/providers/mod.rs @@ -3,7 +3,7 @@ use crate::{ BlockSource, BlockchainTreePendingStateProvider, CanonChainTracker, CanonStateNotifications, CanonStateSubscriptions, ChainSpecProvider, ChainStateBlockReader, ChangeSetReader, DatabaseProviderFactory, EvmEnvProvider, FullExecutionDataProvider, HeaderProvider, - ProviderError, PruneCheckpointReader, ReceiptProvider, ReceiptProviderIdExt, RequestsProvider, + ProviderError, PruneCheckpointReader, ReceiptProvider, ReceiptProviderIdExt, StageCheckpointReader, StateProviderBox, StateProviderFactory, StaticFileProviderFactory, TransactionVariant, TransactionsProvider, TreeViewer, WithdrawalsProvider, }; @@ -504,16 +504,6 @@ impl WithdrawalsProvider for BlockchainProvider { } } -impl RequestsProvider for BlockchainProvider { - fn requests_by_block( - &self, - id: BlockHashOrNumber, - timestamp: u64, - ) -> ProviderResult> { - self.database.requests_by_block(id, timestamp) - } -} - impl StageCheckpointReader for BlockchainProvider { fn get_stage_checkpoint(&self, id: StageId) -> ProviderResult> { self.database.provider()?.get_stage_checkpoint(id) diff --git a/crates/storage/provider/src/providers/static_file/manager.rs b/crates/storage/provider/src/providers/static_file/manager.rs index 6f5cf07c9..20d6a1b18 100644 --- a/crates/storage/provider/src/providers/static_file/manager.rs +++ b/crates/storage/provider/src/providers/static_file/manager.rs @@ -4,8 +4,8 @@ use super::{ }; use crate::{ to_range, BlockHashReader, BlockNumReader, BlockReader, BlockSource, HeaderProvider, - ReceiptProvider, RequestsProvider, StageCheckpointReader, StatsReader, TransactionVariant, - TransactionsProvider, TransactionsProviderExt, WithdrawalsProvider, + ReceiptProvider, StageCheckpointReader, StatsReader, TransactionVariant, TransactionsProvider, + TransactionsProviderExt, WithdrawalsProvider, }; use alloy_eips::BlockHashOrNumber; use alloy_primitives::{keccak256, Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; @@ -1642,17 +1642,6 @@ impl WithdrawalsProvider for StaticFileProvider { } } -impl RequestsProvider for StaticFileProvider { - fn requests_by_block( - &self, - _id: BlockHashOrNumber, - _timestamp: u64, - ) -> ProviderResult> { - // Required data not present in static_files - Err(ProviderError::UnsupportedProvider) - } -} - impl StatsReader for StaticFileProvider { fn count_entries(&self) -> ProviderResult { match T::NAME { diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index 2a70664b1..07486f555 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -40,7 +40,6 @@ pub fn assert_genesis_block( ); assert_eq!(tx.table::().unwrap(), vec![]); assert_eq!(tx.table::().unwrap(), vec![]); - assert_eq!(tx.table::().unwrap(), vec![]); assert_eq!(tx.table::().unwrap(), vec![]); assert_eq!(tx.table::().unwrap(), vec![]); assert_eq!(tx.table::().unwrap(), vec![]); diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index c7c94b939..08530acf0 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -2,9 +2,9 @@ use crate::{ traits::{BlockSource, ReceiptProvider}, AccountReader, BlockExecutionReader, BlockHashReader, BlockIdReader, BlockNumReader, BlockReader, BlockReaderIdExt, ChainSpecProvider, ChangeSetReader, DatabaseProvider, - EvmEnvProvider, HeaderProvider, ReceiptProviderIdExt, RequestsProvider, StateProvider, - StateProviderBox, StateProviderFactory, StateReader, StateRootProvider, TransactionVariant, - TransactionsProvider, WithdrawalsProvider, + EvmEnvProvider, HeaderProvider, ReceiptProviderIdExt, StateProvider, StateProviderBox, + StateProviderFactory, StateReader, StateRootProvider, TransactionVariant, TransactionsProvider, + WithdrawalsProvider, }; use alloy_consensus::constants::EMPTY_ROOT_HASH; use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumberOrTag}; @@ -809,16 +809,6 @@ impl WithdrawalsProvider for MockEthProvider { } } -impl RequestsProvider for MockEthProvider { - fn requests_by_block( - &self, - _id: BlockHashOrNumber, - _timestamp: u64, - ) -> ProviderResult> { - Ok(None) - } -} - impl ChangeSetReader for MockEthProvider { fn account_block_changeset( &self, diff --git a/crates/storage/provider/src/test_utils/noop.rs b/crates/storage/provider/src/test_utils/noop.rs index 0a205389c..f6f7e185d 100644 --- a/crates/storage/provider/src/test_utils/noop.rs +++ b/crates/storage/provider/src/test_utils/noop.rs @@ -37,7 +37,7 @@ use crate::{ traits::{BlockSource, ReceiptProvider}, AccountReader, BlockHashReader, BlockIdReader, BlockNumReader, BlockReader, BlockReaderIdExt, ChainSpecProvider, ChangeSetReader, EvmEnvProvider, HeaderProvider, PruneCheckpointReader, - ReceiptProviderIdExt, RequestsProvider, StageCheckpointReader, StateProvider, StateProviderBox, + ReceiptProviderIdExt, StageCheckpointReader, StateProvider, StateProviderBox, StateProviderFactory, StateRootProvider, StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; @@ -539,16 +539,6 @@ impl WithdrawalsProvider for NoopProvider { } } -impl RequestsProvider for NoopProvider { - fn requests_by_block( - &self, - _id: BlockHashOrNumber, - _timestamp: u64, - ) -> ProviderResult> { - Ok(None) - } -} - impl PruneCheckpointReader for NoopProvider { fn get_prune_checkpoint( &self, diff --git a/crates/storage/storage-api/src/block.rs b/crates/storage/storage-api/src/block.rs index a3b0cc743..01238be74 100644 --- a/crates/storage/storage-api/src/block.rs +++ b/crates/storage/storage-api/src/block.rs @@ -1,6 +1,6 @@ use crate::{ - BlockNumReader, HeaderProvider, ReceiptProvider, ReceiptProviderIdExt, RequestsProvider, - TransactionVariant, TransactionsProvider, WithdrawalsProvider, + BlockNumReader, HeaderProvider, ReceiptProvider, ReceiptProviderIdExt, TransactionVariant, + TransactionsProvider, WithdrawalsProvider, }; use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumberOrTag}; use alloy_primitives::{BlockNumber, Sealable, B256}; @@ -52,7 +52,6 @@ pub trait BlockReader: + HeaderProvider + TransactionsProvider + ReceiptProvider - + RequestsProvider + WithdrawalsProvider + Send + Sync diff --git a/crates/storage/storage-api/src/lib.rs b/crates/storage/storage-api/src/lib.rs index 3f93bbbde..4e589242a 100644 --- a/crates/storage/storage-api/src/lib.rs +++ b/crates/storage/storage-api/src/lib.rs @@ -31,9 +31,6 @@ pub use prune_checkpoint::*; mod receipts; pub use receipts::*; -mod requests; -pub use requests::*; - mod stage_checkpoint; pub use stage_checkpoint::*; diff --git a/crates/storage/storage-api/src/requests.rs b/crates/storage/storage-api/src/requests.rs deleted file mode 100644 index 02818c429..000000000 --- a/crates/storage/storage-api/src/requests.rs +++ /dev/null @@ -1,14 +0,0 @@ -use alloy_eips::BlockHashOrNumber; -use reth_primitives::Requests; -use reth_storage_errors::provider::ProviderResult; - -/// Client trait for fetching EIP-7685 [Requests] for blocks. -#[auto_impl::auto_impl(&, Arc)] -pub trait RequestsProvider: Send + Sync { - /// Get withdrawals by block id. - fn requests_by_block( - &self, - id: BlockHashOrNumber, - timestamp: u64, - ) -> ProviderResult>; -} diff --git a/docs/crates/db.md b/docs/crates/db.md index 3ccfb72e3..79eeae5ee 100644 --- a/docs/crates/db.md +++ b/docs/crates/db.md @@ -61,7 +61,6 @@ There are many tables within the node, all used to store different types of data - StageCheckpointProgresses - PruneCheckpoints - VersionHistory -- BlockRequests - ChainState
@@ -283,7 +282,6 @@ fn unwind(&mut self, provider: &DatabaseProviderRW, input: UnwindInput) { let mut body_cursor = tx.cursor_write::()?; let mut ommers_cursor = tx.cursor_write::()?; let mut withdrawals_cursor = tx.cursor_write::()?; - let mut requests_cursor = tx.cursor_write::()?; // Cursors to unwind transitions let mut tx_block_cursor = tx.cursor_write::()?; @@ -322,7 +320,7 @@ fn unwind(&mut self, provider: &DatabaseProviderRW, input: UnwindInput) { } ``` -This function first grabs a mutable cursor for the `BlockBodyIndices`, `BlockOmmers`, `BlockWithdrawals`, `BlockRequests`, `TransactionBlocks` tables. +This function first grabs a mutable cursor for the `BlockBodyIndices`, `BlockOmmers`, `BlockWithdrawals`, `TransactionBlocks` tables. Then it gets a walker of the block body cursor, and then walk backwards through the cursor to delete the block body entries from the last block number to the block number specified in the `UnwindInput` struct. diff --git a/testing/ef-tests/src/models.rs b/testing/ef-tests/src/models.rs index 3f3df1536..30e5e5bb2 100644 --- a/testing/ef-tests/src/models.rs +++ b/testing/ef-tests/src/models.rs @@ -87,7 +87,7 @@ pub struct Header { /// Parent beacon block root. pub parent_beacon_block_root: Option, /// Requests root. - pub requests_root: Option, + pub requests_hash: Option, } impl From
for SealedHeader { @@ -113,7 +113,7 @@ impl From
for SealedHeader { blob_gas_used: value.blob_gas_used.map(|v| v.to::()), excess_blob_gas: value.excess_blob_gas.map(|v| v.to::()), parent_beacon_block_root: value.parent_beacon_block_root, - requests_root: value.requests_root, + requests_hash: value.requests_hash, }; Self::new(header, value.hash) } diff --git a/testing/testing-utils/Cargo.toml b/testing/testing-utils/Cargo.toml index 49a59ecf6..98bfeabdf 100644 --- a/testing/testing-utils/Cargo.toml +++ b/testing/testing-utils/Cargo.toml @@ -14,10 +14,12 @@ workspace = true [dependencies] reth-primitives = { workspace = true, features = ["secp256k1"] } -alloy-eips.workspace = true alloy-genesis.workspace = true alloy-primitives.workspace = true alloy-consensus.workspace = true rand.workspace = true secp256k1 = { workspace = true, features = ["rand"] } + +[dev-dependencies] +alloy-eips.workspace = true diff --git a/testing/testing-utils/src/generators.rs b/testing/testing-utils/src/generators.rs index d07af00ce..571727cb2 100644 --- a/testing/testing-utils/src/generators.rs +++ b/testing/testing-utils/src/generators.rs @@ -1,17 +1,14 @@ //! Generators for different data structures like block headers, block bodies and ranges of those. use alloy_consensus::{Transaction as _, TxLegacy}; -use alloy_eips::{ - eip6110::DepositRequest, eip7002::WithdrawalRequest, eip7251::ConsolidationRequest, -}; use alloy_primitives::{Address, BlockNumber, Bytes, Parity, Sealable, TxKind, B256, U256}; pub use rand::Rng; use rand::{ distributions::uniform::SampleRange, rngs::StdRng, seq::SliceRandom, thread_rng, SeedableRng, }; use reth_primitives::{ - proofs, sign_message, Account, BlockBody, Header, Log, Receipt, Request, Requests, SealedBlock, - SealedHeader, StorageEntry, Transaction, TransactionSigned, Withdrawal, Withdrawals, + proofs, sign_message, Account, BlockBody, Header, Log, Receipt, SealedBlock, SealedHeader, + StorageEntry, Transaction, TransactionSigned, Withdrawal, Withdrawals, }; use secp256k1::{Keypair, Secp256k1}; use std::{ @@ -201,11 +198,6 @@ pub fn random_block(rng: &mut R, number: u64, block_params: BlockParams) let transactions_root = proofs::calculate_transaction_root(&transactions); let ommers_hash = proofs::calculate_ommers_root(&ommers); - let requests = block_params - .requests_count - .map(|count| (0..count).map(|_| random_request(rng)).collect::>()); - let requests_root = requests.as_ref().map(|requests| proofs::calculate_requests_root(requests)); - let withdrawals = block_params.withdrawals_count.map(|count| { (0..count) .map(|i| Withdrawal { @@ -226,7 +218,8 @@ pub fn random_block(rng: &mut R, number: u64, block_params: BlockParams) transactions_root, ommers_hash, base_fee_per_gas: Some(rng.gen()), - requests_root, + // TODO(onbjerg): Proper EIP-7685 request support + requests_hash: None, withdrawals_root, ..Default::default() } @@ -236,12 +229,7 @@ pub fn random_block(rng: &mut R, number: u64, block_params: BlockParams) SealedBlock { header: SealedHeader::new(header, seal), - body: BlockBody { - transactions, - ommers, - withdrawals: withdrawals.map(Withdrawals::new), - requests: requests.map(Requests), - }, + body: BlockBody { transactions, ommers, withdrawals: withdrawals.map(Withdrawals::new) }, } } @@ -470,31 +458,6 @@ pub fn random_log(rng: &mut R, address: Option
, topics_count: O ) } -/// Generate random request -pub fn random_request(rng: &mut R) -> Request { - let request_type = rng.gen_range(0..3); - match request_type { - 0 => Request::DepositRequest(DepositRequest { - pubkey: rng.gen(), - withdrawal_credentials: rng.gen(), - amount: rng.gen(), - signature: rng.gen(), - index: rng.gen(), - }), - 1 => Request::WithdrawalRequest(WithdrawalRequest { - source_address: rng.gen(), - validator_pubkey: rng.gen(), - amount: rng.gen(), - }), - 2 => Request::ConsolidationRequest(ConsolidationRequest { - source_address: rng.gen(), - source_pubkey: rng.gen(), - target_pubkey: rng.gen(), - }), - _ => panic!("invalid request type"), - } -} - #[cfg(test)] mod tests { use super::*; From 3793b907eadc620687f0b65327cdbd4c4e6b1abf Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 19 Oct 2024 15:05:53 +0200 Subject: [PATCH 143/425] chore: better start finish persisted block logs (#11893) Co-authored-by: Oliver --- crates/engine/tree/src/tree/mod.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index c63c8fbe2..02802dff6 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -1145,6 +1145,7 @@ where if blocks_to_persist.is_empty() { debug!(target: "engine::tree", "Returned empty set of blocks to persist"); } else { + debug!(target: "engine::tree", blocks = ?blocks_to_persist.iter().map(|block| block.block.num_hash()).collect::>(), "Persisting blocks"); let (tx, rx) = oneshot::channel(); let _ = self.persistence.save_blocks(blocks_to_persist, tx); self.persistence_state.start(rx); @@ -1173,7 +1174,7 @@ where return Ok(()) }; - trace!(target: "engine::tree", ?last_persisted_block_hash, ?last_persisted_block_number, "Finished persisting, calling finish"); + debug!(target: "engine::tree", ?last_persisted_block_hash, ?last_persisted_block_number, "Finished persisting, calling finish"); self.persistence_state .finish(last_persisted_block_hash, last_persisted_block_number); self.on_new_persisted_block()?; From ddc5ac3fa750580eda224d37c086c4a7443728d4 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Sat, 19 Oct 2024 15:12:28 +0200 Subject: [PATCH 144/425] refactor(rpc): small refactor in `trace_filter` (#11894) --- crates/rpc/rpc/src/trace.rs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/crates/rpc/rpc/src/trace.rs b/crates/rpc/rpc/src/trace.rs index 8ac532ff3..b9b15b536 100644 --- a/crates/rpc/rpc/src/trace.rs +++ b/crates/rpc/rpc/src/trace.rs @@ -314,13 +314,15 @@ where // add reward traces for all blocks for block in &blocks { if let Some(base_block_reward) = self.calculate_base_block_reward(&block.header)? { - let mut traces = self.extract_reward_traces( - &block.header, - &block.body.ommers, - base_block_reward, + all_traces.extend( + self.extract_reward_traces( + &block.header, + &block.body.ommers, + base_block_reward, + ) + .into_iter() + .filter(|trace| matcher.matches(&trace.trace)), ); - traces.retain(|trace| matcher.matches(&trace.trace)); - all_traces.extend(traces); } else { // no block reward, means we're past the Paris hardfork and don't expect any rewards // because the blocks in ascending order From 1a1aa2f8c3acb4ae32b0d1dadc9221fdf75fabdd Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 19 Oct 2024 15:18:20 +0200 Subject: [PATCH 145/425] feat: add map_pool fn (#11890) --- crates/payload/basic/src/lib.rs | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index 7416283c1..70d22250d 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -799,6 +799,21 @@ impl BuildArguments(self, f: F) -> BuildArguments + where + F: FnOnce(Pool) -> P, + { + BuildArguments { + client: self.client, + pool: f(self.pool), + cached_reads: self.cached_reads, + config: self.config, + cancel: self.cancel, + best_payload: self.best_payload, + } + } } /// A trait for building payloads that encapsulate Ethereum transactions. From 1efa764b34f163e70890bbd54eac64ad2b6adcce Mon Sep 17 00:00:00 2001 From: Dan Cline <6798349+Rjected@users.noreply.github.com> Date: Sat, 19 Oct 2024 09:29:29 -0400 Subject: [PATCH 146/425] chore(engine): rename enveloped associated types to envelope (#11812) Co-authored-by: Matthias Seitz --- crates/e2e-test-utils/src/engine_api.rs | 6 ++-- crates/e2e-test-utils/src/node.rs | 6 ++-- crates/engine/primitives/src/lib.rs | 30 ++++++++++++++++---- crates/ethereum/engine-primitives/src/lib.rs | 6 ++-- crates/optimism/node/src/engine.rs | 6 ++-- crates/rpc/rpc-api/src/engine.rs | 15 ++++++++-- crates/rpc/rpc-engine-api/src/engine_api.rs | 12 ++++---- examples/custom-engine-types/src/main.rs | 6 ++-- 8 files changed, 57 insertions(+), 30 deletions(-) diff --git a/crates/e2e-test-utils/src/engine_api.rs b/crates/e2e-test-utils/src/engine_api.rs index 1b0ff9b54..f4aa8fdf5 100644 --- a/crates/e2e-test-utils/src/engine_api.rs +++ b/crates/e2e-test-utils/src/engine_api.rs @@ -29,7 +29,7 @@ impl EngineApiTestContext { pub async fn get_payload_v3( &self, payload_id: PayloadId, - ) -> eyre::Result { + ) -> eyre::Result { Ok(EngineApiClient::::get_payload_v3(&self.engine_api_client, payload_id).await?) } @@ -50,10 +50,10 @@ impl EngineApiTestContext { versioned_hashes: Vec, ) -> eyre::Result where - E::ExecutionPayloadV3: From + PayloadEnvelopeExt, + E::ExecutionPayloadEnvelopeV3: From + PayloadEnvelopeExt, { // setup payload for submission - let envelope_v3: ::ExecutionPayloadV3 = payload.into(); + let envelope_v3: ::ExecutionPayloadEnvelopeV3 = payload.into(); // submit payload to engine api let submission = EngineApiClient::::new_payload_v3( diff --git a/crates/e2e-test-utils/src/node.rs b/crates/e2e-test-utils/src/node.rs index c22913ba2..776a437a5 100644 --- a/crates/e2e-test-utils/src/node.rs +++ b/crates/e2e-test-utils/src/node.rs @@ -88,7 +88,7 @@ where attributes_generator: impl Fn(u64) -> Engine::PayloadBuilderAttributes + Copy, ) -> eyre::Result> where - Engine::ExecutionPayloadV3: From + PayloadEnvelopeExt, + Engine::ExecutionPayloadEnvelopeV3: From + PayloadEnvelopeExt, AddOns::EthApi: EthApiSpec + EthTransactions + TraceExt + FullEthApiTypes, { let mut chain = Vec::with_capacity(length as usize); @@ -113,7 +113,7 @@ where attributes_generator: impl Fn(u64) -> Engine::PayloadBuilderAttributes, ) -> eyre::Result<(Engine::BuiltPayload, Engine::PayloadBuilderAttributes)> where - ::ExecutionPayloadV3: + ::ExecutionPayloadEnvelopeV3: From + PayloadEnvelopeExt, { // trigger new payload building draining the pool @@ -135,7 +135,7 @@ where attributes_generator: impl Fn(u64) -> Engine::PayloadBuilderAttributes, ) -> eyre::Result<(Engine::BuiltPayload, Engine::PayloadBuilderAttributes)> where - ::ExecutionPayloadV3: + ::ExecutionPayloadEnvelopeV3: From + PayloadEnvelopeExt, { let (payload, eth_attr) = self.new_payload(attributes_generator).await?; diff --git a/crates/engine/primitives/src/lib.rs b/crates/engine/primitives/src/lib.rs index 2cf1366eb..fab96b0d1 100644 --- a/crates/engine/primitives/src/lib.rs +++ b/crates/engine/primitives/src/lib.rs @@ -24,9 +24,9 @@ use serde::{de::DeserializeOwned, ser::Serialize}; pub trait EngineTypes: PayloadTypes< BuiltPayload: TryInto - + TryInto - + TryInto - + TryInto, + + TryInto + + TryInto + + TryInto, > + DeserializeOwned + Serialize + 'static @@ -34,11 +34,29 @@ pub trait EngineTypes: /// Execution Payload V1 type. type ExecutionPayloadV1: DeserializeOwned + Serialize + Clone + Unpin + Send + Sync + 'static; /// Execution Payload V2 type. - type ExecutionPayloadV2: DeserializeOwned + Serialize + Clone + Unpin + Send + Sync + 'static; + type ExecutionPayloadEnvelopeV2: DeserializeOwned + + Serialize + + Clone + + Unpin + + Send + + Sync + + 'static; /// Execution Payload V3 type. - type ExecutionPayloadV3: DeserializeOwned + Serialize + Clone + Unpin + Send + Sync + 'static; + type ExecutionPayloadEnvelopeV3: DeserializeOwned + + Serialize + + Clone + + Unpin + + Send + + Sync + + 'static; /// Execution Payload V4 type. - type ExecutionPayloadV4: DeserializeOwned + Serialize + Clone + Unpin + Send + Sync + 'static; + type ExecutionPayloadEnvelopeV4: DeserializeOwned + + Serialize + + Clone + + Unpin + + Send + + Sync + + 'static; } /// Type that validates the payloads sent to the engine. diff --git a/crates/ethereum/engine-primitives/src/lib.rs b/crates/ethereum/engine-primitives/src/lib.rs index 034a8c6bf..20a558836 100644 --- a/crates/ethereum/engine-primitives/src/lib.rs +++ b/crates/ethereum/engine-primitives/src/lib.rs @@ -44,9 +44,9 @@ where + TryInto, { type ExecutionPayloadV1 = ExecutionPayloadV1; - type ExecutionPayloadV2 = ExecutionPayloadEnvelopeV2; - type ExecutionPayloadV3 = ExecutionPayloadEnvelopeV3; - type ExecutionPayloadV4 = ExecutionPayloadEnvelopeV4; + type ExecutionPayloadEnvelopeV2 = ExecutionPayloadEnvelopeV2; + type ExecutionPayloadEnvelopeV3 = ExecutionPayloadEnvelopeV3; + type ExecutionPayloadEnvelopeV4 = ExecutionPayloadEnvelopeV4; } /// A default payload type for [`EthEngineTypes`] diff --git a/crates/optimism/node/src/engine.rs b/crates/optimism/node/src/engine.rs index a83f4c696..cec609671 100644 --- a/crates/optimism/node/src/engine.rs +++ b/crates/optimism/node/src/engine.rs @@ -38,9 +38,9 @@ where + TryInto, { type ExecutionPayloadV1 = ExecutionPayloadV1; - type ExecutionPayloadV2 = ExecutionPayloadEnvelopeV2; - type ExecutionPayloadV3 = OpExecutionPayloadEnvelopeV3; - type ExecutionPayloadV4 = OpExecutionPayloadEnvelopeV4; + type ExecutionPayloadEnvelopeV2 = ExecutionPayloadEnvelopeV2; + type ExecutionPayloadEnvelopeV3 = OpExecutionPayloadEnvelopeV3; + type ExecutionPayloadEnvelopeV4 = OpExecutionPayloadEnvelopeV4; } /// A default payload type for [`OptimismEngineTypes`] diff --git a/crates/rpc/rpc-api/src/engine.rs b/crates/rpc/rpc-api/src/engine.rs index e89f7b8d3..458768c38 100644 --- a/crates/rpc/rpc-api/src/engine.rs +++ b/crates/rpc/rpc-api/src/engine.rs @@ -118,7 +118,10 @@ pub trait EngineApi { /// payload build process at the time of receiving this call. Note: /// > Provider software MAY stop the corresponding build process after serving this call. #[method(name = "getPayloadV2")] - async fn get_payload_v2(&self, payload_id: PayloadId) -> RpcResult; + async fn get_payload_v2( + &self, + payload_id: PayloadId, + ) -> RpcResult; /// Post Cancun payload handler which also returns a blobs bundle. /// @@ -128,7 +131,10 @@ pub trait EngineApi { /// payload build process at the time of receiving this call. Note: /// > Provider software MAY stop the corresponding build process after serving this call. #[method(name = "getPayloadV3")] - async fn get_payload_v3(&self, payload_id: PayloadId) -> RpcResult; + async fn get_payload_v3( + &self, + payload_id: PayloadId, + ) -> RpcResult; /// Post Prague payload handler. /// @@ -138,7 +144,10 @@ pub trait EngineApi { /// payload build process at the time of receiving this call. Note: /// > Provider software MAY stop the corresponding build process after serving this call. #[method(name = "getPayloadV4")] - async fn get_payload_v4(&self, payload_id: PayloadId) -> RpcResult; + async fn get_payload_v4( + &self, + payload_id: PayloadId, + ) -> RpcResult; /// See also #[method(name = "getPayloadBodiesByHashV1")] diff --git a/crates/rpc/rpc-engine-api/src/engine_api.rs b/crates/rpc/rpc-engine-api/src/engine_api.rs index d0b19a7b4..9450b2c92 100644 --- a/crates/rpc/rpc-engine-api/src/engine_api.rs +++ b/crates/rpc/rpc-engine-api/src/engine_api.rs @@ -289,7 +289,7 @@ where pub async fn get_payload_v2( &self, payload_id: PayloadId, - ) -> EngineApiResult { + ) -> EngineApiResult { // First we fetch the payload attributes to check the timestamp let attributes = self.get_payload_attributes(payload_id).await?; @@ -324,7 +324,7 @@ where pub async fn get_payload_v3( &self, payload_id: PayloadId, - ) -> EngineApiResult { + ) -> EngineApiResult { // First we fetch the payload attributes to check the timestamp let attributes = self.get_payload_attributes(payload_id).await?; @@ -359,7 +359,7 @@ where pub async fn get_payload_v4( &self, payload_id: PayloadId, - ) -> EngineApiResult { + ) -> EngineApiResult { // First we fetch the payload attributes to check the timestamp let attributes = self.get_payload_attributes(payload_id).await?; @@ -778,7 +778,7 @@ where async fn get_payload_v2( &self, payload_id: PayloadId, - ) -> RpcResult { + ) -> RpcResult { trace!(target: "rpc::engine", "Serving engine_getPayloadV2"); let start = Instant::now(); let res = Self::get_payload_v2(self, payload_id).await; @@ -798,7 +798,7 @@ where async fn get_payload_v3( &self, payload_id: PayloadId, - ) -> RpcResult { + ) -> RpcResult { trace!(target: "rpc::engine", "Serving engine_getPayloadV3"); let start = Instant::now(); let res = Self::get_payload_v3(self, payload_id).await; @@ -818,7 +818,7 @@ where async fn get_payload_v4( &self, payload_id: PayloadId, - ) -> RpcResult { + ) -> RpcResult { trace!(target: "rpc::engine", "Serving engine_getPayloadV4"); let start = Instant::now(); let res = Self::get_payload_v4(self, payload_id).await; diff --git a/examples/custom-engine-types/src/main.rs b/examples/custom-engine-types/src/main.rs index 135c4f3f2..46a7d7d9a 100644 --- a/examples/custom-engine-types/src/main.rs +++ b/examples/custom-engine-types/src/main.rs @@ -157,9 +157,9 @@ impl PayloadTypes for CustomEngineTypes { impl EngineTypes for CustomEngineTypes { type ExecutionPayloadV1 = ExecutionPayloadV1; - type ExecutionPayloadV2 = ExecutionPayloadEnvelopeV2; - type ExecutionPayloadV3 = ExecutionPayloadEnvelopeV3; - type ExecutionPayloadV4 = ExecutionPayloadEnvelopeV4; + type ExecutionPayloadEnvelopeV2 = ExecutionPayloadEnvelopeV2; + type ExecutionPayloadEnvelopeV3 = ExecutionPayloadEnvelopeV3; + type ExecutionPayloadEnvelopeV4 = ExecutionPayloadEnvelopeV4; } /// Custom engine validator From c803012085d65eb3111e942eea1c7af42ca75114 Mon Sep 17 00:00:00 2001 From: Oliver Date: Sat, 19 Oct 2024 17:17:14 +0200 Subject: [PATCH 147/425] chore: use `Requests` instead of `Vec` (#11895) --- Cargo.lock | 3 ++- crates/consensus/beacon/Cargo.toml | 1 + crates/consensus/beacon/src/engine/handle.rs | 4 ++-- crates/consensus/beacon/src/engine/message.rs | 4 ++-- crates/consensus/beacon/src/engine/mod.rs | 5 +++-- crates/engine/tree/src/tree/mod.rs | 5 +++-- crates/engine/util/Cargo.toml | 5 ++--- crates/engine/util/src/reorg.rs | 7 ++++--- crates/payload/validator/Cargo.toml | 1 - crates/payload/validator/src/lib.rs | 12 ++++-------- crates/rpc/rpc-api/src/engine.rs | 4 ++-- crates/rpc/rpc-engine-api/src/engine_api.rs | 8 ++++---- 12 files changed, 29 insertions(+), 30 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 82f42b07b..b3a39cb58 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6417,6 +6417,7 @@ dependencies = [ name = "reth-beacon-consensus" version = "1.1.0" dependencies = [ + "alloy-eips", "alloy-genesis", "alloy-primitives", "alloy-rpc-types-engine", @@ -7217,6 +7218,7 @@ name = "reth-engine-util" version = "1.1.0" dependencies = [ "alloy-consensus", + "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", "eyre", @@ -8400,7 +8402,6 @@ name = "reth-payload-validator" version = "1.1.0" dependencies = [ "alloy-eips", - "alloy-primitives", "alloy-rpc-types", "reth-chainspec", "reth-primitives", diff --git a/crates/consensus/beacon/Cargo.toml b/crates/consensus/beacon/Cargo.toml index f13668126..192ae2b93 100644 --- a/crates/consensus/beacon/Cargo.toml +++ b/crates/consensus/beacon/Cargo.toml @@ -31,6 +31,7 @@ reth-node-types.workspace = true reth-chainspec = { workspace = true, optional = true } # ethereum +alloy-eips.workspace = true alloy-primitives.workspace = true alloy-rpc-types-engine.workspace = true diff --git a/crates/consensus/beacon/src/engine/handle.rs b/crates/consensus/beacon/src/engine/handle.rs index 1f4445901..bb5c4dee1 100644 --- a/crates/consensus/beacon/src/engine/handle.rs +++ b/crates/consensus/beacon/src/engine/handle.rs @@ -4,7 +4,7 @@ use crate::{ engine::message::OnForkChoiceUpdated, BeaconConsensusEngineEvent, BeaconEngineMessage, BeaconForkChoiceUpdateError, BeaconOnNewPayloadError, }; -use alloy_primitives::Bytes; +use alloy_eips::eip7685::Requests; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ForkchoiceState, ForkchoiceUpdated, PayloadStatus, }; @@ -48,7 +48,7 @@ where &self, payload: ExecutionPayload, cancun_fields: Option, - execution_requests: Option>, + execution_requests: Option, ) -> Result { let (tx, rx) = oneshot::channel(); // HACK(onbjerg): We should have a pectra payload fields struct, this is just a temporary diff --git a/crates/consensus/beacon/src/engine/message.rs b/crates/consensus/beacon/src/engine/message.rs index 45d0c57f4..56328f03d 100644 --- a/crates/consensus/beacon/src/engine/message.rs +++ b/crates/consensus/beacon/src/engine/message.rs @@ -1,5 +1,5 @@ use crate::engine::{error::BeaconOnNewPayloadError, forkchoice::ForkchoiceStatus}; -use alloy_primitives::Bytes; +use alloy_eips::eip7685::Requests; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ForkChoiceUpdateResult, ForkchoiceState, ForkchoiceUpdateError, ForkchoiceUpdated, PayloadId, PayloadStatus, PayloadStatusEnum, @@ -150,7 +150,7 @@ pub enum BeaconEngineMessage { // HACK(onbjerg): We should have a pectra payload fields struct, this is just a temporary // workaround. /// The pectra EIP-7685 execution requests. - execution_requests: Option>, + execution_requests: Option, /// The sender for returning payload status result. tx: oneshot::Sender>, }, diff --git a/crates/consensus/beacon/src/engine/mod.rs b/crates/consensus/beacon/src/engine/mod.rs index edd3d6db3..cff648b28 100644 --- a/crates/consensus/beacon/src/engine/mod.rs +++ b/crates/consensus/beacon/src/engine/mod.rs @@ -1,4 +1,5 @@ -use alloy_primitives::{BlockNumber, Bytes, B256}; +use alloy_eips::eip7685::Requests; +use alloy_primitives::{BlockNumber, B256}; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, PayloadStatusEnum, PayloadValidationError, @@ -1087,7 +1088,7 @@ where cancun_fields: Option, // HACK(onbjerg): We should have a pectra payload fields struct, this is just a temporary // workaround. - execution_requests: Option>, + execution_requests: Option, ) -> Result, BeaconOnNewPayloadError> { self.metrics.new_payload_messages.increment(1); diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index 02802dff6..a2abd3f53 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -7,7 +7,7 @@ use crate::{ use alloy_eips::BlockNumHash; use alloy_primitives::{ map::{HashMap, HashSet}, - BlockNumber, Bytes, B256, U256, + BlockNumber, B256, U256, }; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, PayloadStatusEnum, @@ -70,6 +70,7 @@ use crate::{ engine::{EngineApiKind, EngineApiRequest}, tree::metrics::EngineApiMetrics, }; +use alloy_eips::eip7685::Requests; pub use config::TreeConfig; pub use invalid_block_hook::{InvalidBlockHooks, NoopInvalidBlockHook}; pub use persistence_state::PersistenceState; @@ -721,7 +722,7 @@ where &mut self, payload: ExecutionPayload, cancun_fields: Option, - execution_requests: Option>, + execution_requests: Option, ) -> Result, InsertBlockFatalError> { trace!(target: "engine::tree", "invoked new payload"); self.metrics.engine.new_payload_messages.increment(1); diff --git a/crates/engine/util/Cargo.toml b/crates/engine/util/Cargo.toml index c11948b94..35a7e74bb 100644 --- a/crates/engine/util/Cargo.toml +++ b/crates/engine/util/Cargo.toml @@ -27,6 +27,7 @@ revm-primitives.workspace = true reth-trie.workspace = true # alloy +alloy-eips.workspace = true alloy-primitives.workspace = true alloy-rpc-types-engine.workspace = true alloy-consensus.workspace = true @@ -49,6 +50,4 @@ itertools.workspace = true tracing.workspace = true [features] -optimism = [ - "reth-beacon-consensus/optimism", -] +optimism = ["reth-beacon-consensus/optimism"] diff --git a/crates/engine/util/src/reorg.rs b/crates/engine/util/src/reorg.rs index 90b5c90aa..e07b18b92 100644 --- a/crates/engine/util/src/reorg.rs +++ b/crates/engine/util/src/reorg.rs @@ -1,7 +1,8 @@ //! Stream wrapper that simulates reorgs. use alloy_consensus::Transaction; -use alloy_primitives::{Bytes, U256}; +use alloy_eips::eip7685::Requests; +use alloy_primitives::U256; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, }; @@ -250,8 +251,8 @@ fn create_reorg_head( mut depth: usize, next_payload: ExecutionPayload, next_cancun_fields: Option, - next_execution_requests: Option>, -) -> RethResult<(ExecutionPayload, Option, Option>)> + next_execution_requests: Option, +) -> RethResult<(ExecutionPayload, Option, Option)> where Provider: BlockReader + StateProviderFactory, Evm: ConfigureEvm
, diff --git a/crates/payload/validator/Cargo.toml b/crates/payload/validator/Cargo.toml index a96799d7b..619b99f28 100644 --- a/crates/payload/validator/Cargo.toml +++ b/crates/payload/validator/Cargo.toml @@ -19,5 +19,4 @@ reth-rpc-types-compat.workspace = true # alloy alloy-eips.workspace = true -alloy-primitives.workspace = true alloy-rpc-types = { workspace = true, features = ["engine"] } diff --git a/crates/payload/validator/src/lib.rs b/crates/payload/validator/src/lib.rs index fdcd9244a..3ec7b206a 100644 --- a/crates/payload/validator/src/lib.rs +++ b/crates/payload/validator/src/lib.rs @@ -9,7 +9,6 @@ #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] use alloy_eips::eip7685::Requests; -use alloy_primitives::Bytes; use alloy_rpc_types::engine::{ExecutionPayload, MaybeCancunPayloadFields, PayloadError}; use reth_chainspec::EthereumHardforks; use reth_primitives::SealedBlock; @@ -114,17 +113,14 @@ impl ExecutionPayloadValidator { &self, payload: ExecutionPayload, cancun_fields: MaybeCancunPayloadFields, - execution_requests: Option>, + execution_requests: Option, ) -> Result { let expected_hash = payload.block_hash(); // First parse the block - let sealed_block = try_into_block( - payload, - cancun_fields.parent_beacon_block_root(), - execution_requests.map(Requests::new), - )? - .seal_slow(); + let sealed_block = + try_into_block(payload, cancun_fields.parent_beacon_block_root(), execution_requests)? + .seal_slow(); // Ensure the hash included in the payload matches the block hash if expected_hash != sealed_block.hash() { diff --git a/crates/rpc/rpc-api/src/engine.rs b/crates/rpc/rpc-api/src/engine.rs index 458768c38..ddf6d8461 100644 --- a/crates/rpc/rpc-api/src/engine.rs +++ b/crates/rpc/rpc-api/src/engine.rs @@ -3,7 +3,7 @@ //! This contains the `engine_` namespace and the subset of the `eth_` namespace that is exposed to //! the consensus client. -use alloy_eips::{eip4844::BlobAndProofV1, BlockId, BlockNumberOrTag}; +use alloy_eips::{eip4844::BlobAndProofV1, eip7685::Requests, BlockId, BlockNumberOrTag}; use alloy_json_rpc::RpcObject; use alloy_primitives::{Address, BlockHash, Bytes, B256, U256, U64}; use alloy_rpc_types::{ @@ -57,7 +57,7 @@ pub trait EngineApi { payload: ExecutionPayloadV3, versioned_hashes: Vec, parent_beacon_block_root: B256, - execution_requests: Vec, + execution_requests: Requests, ) -> RpcResult; /// See also diff --git a/crates/rpc/rpc-engine-api/src/engine_api.rs b/crates/rpc/rpc-engine-api/src/engine_api.rs index 9450b2c92..ca055a77e 100644 --- a/crates/rpc/rpc-engine-api/src/engine_api.rs +++ b/crates/rpc/rpc-engine-api/src/engine_api.rs @@ -1,8 +1,8 @@ use crate::{ capabilities::EngineCapabilities, metrics::EngineApiMetrics, EngineApiError, EngineApiResult, }; -use alloy_eips::eip4844::BlobAndProofV1; -use alloy_primitives::{BlockHash, BlockNumber, Bytes, B256, U64}; +use alloy_eips::{eip4844::BlobAndProofV1, eip7685::Requests}; +use alloy_primitives::{BlockHash, BlockNumber, B256, U64}; use alloy_rpc_types_engine::{ CancunPayloadFields, ClientVersionV1, ExecutionPayload, ExecutionPayloadBodiesV1, ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV3, ForkchoiceState, @@ -189,7 +189,7 @@ where parent_beacon_block_root: B256, // TODO(onbjerg): Figure out why we even get these here, since we'll check the requests // from execution against the requests root in the header. - execution_requests: Vec, + execution_requests: Requests, ) -> EngineApiResult { let payload = ExecutionPayload::from(payload); let payload_or_attrs = @@ -677,7 +677,7 @@ where payload: ExecutionPayloadV3, versioned_hashes: Vec, parent_beacon_block_root: B256, - execution_requests: Vec, + execution_requests: Requests, ) -> RpcResult { trace!(target: "rpc::engine", "Serving engine_newPayloadV4"); let start = Instant::now(); From a78de201b349c10afef2403ea4e6e57fe075f7bf Mon Sep 17 00:00:00 2001 From: Gerson <71728860+Gerson2102@users.noreply.github.com> Date: Sat, 19 Oct 2024 10:01:26 -0600 Subject: [PATCH 148/425] Refactor of state_change functionality (#11878) Co-authored-by: Matthias Seitz --- Cargo.lock | 6 +++--- crates/engine/invalid-block-hooks/src/witness.rs | 5 +++-- crates/engine/util/src/reorg.rs | 6 ++++-- crates/ethereum/evm/src/execute.rs | 2 +- crates/ethereum/evm/src/strategy.rs | 2 +- crates/evm/Cargo.toml | 2 ++ crates/evm/src/lib.rs | 1 + crates/{revm => evm}/src/state_change.rs | 2 ++ crates/optimism/evm/src/execute.rs | 6 ++---- crates/optimism/evm/src/strategy.rs | 2 +- crates/payload/basic/Cargo.toml | 2 +- crates/payload/basic/src/lib.rs | 2 +- crates/revm/Cargo.toml | 2 -- crates/revm/src/lib.rs | 3 --- crates/rpc/rpc-eth-api/src/helpers/pending_block.rs | 11 ++++++----- 15 files changed, 28 insertions(+), 26 deletions(-) rename crates/{revm => evm}/src/state_change.rs (99%) diff --git a/Cargo.lock b/Cargo.lock index b3a39cb58..57dc9176b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6400,12 +6400,12 @@ dependencies = [ "futures-util", "metrics", "reth-chainspec", + "reth-evm", "reth-metrics", "reth-payload-builder", "reth-payload-primitives", "reth-primitives", "reth-provider", - "reth-revm", "reth-tasks", "reth-transaction-pool", "revm", @@ -7427,6 +7427,8 @@ dependencies = [ "parking_lot 0.12.3", "reth-chainspec", "reth-consensus", + "reth-consensus-common", + "reth-ethereum-forks", "reth-execution-errors", "reth-execution-types", "reth-metrics", @@ -8585,8 +8587,6 @@ version = "1.1.0" dependencies = [ "alloy-eips", "alloy-primitives", - "reth-chainspec", - "reth-consensus-common", "reth-ethereum-forks", "reth-execution-errors", "reth-primitives", diff --git a/crates/engine/invalid-block-hooks/src/witness.rs b/crates/engine/invalid-block-hooks/src/witness.rs index ab73a8190..416c4adb4 100644 --- a/crates/engine/invalid-block-hooks/src/witness.rs +++ b/crates/engine/invalid-block-hooks/src/witness.rs @@ -6,14 +6,15 @@ use eyre::OptionExt; use pretty_assertions::Comparison; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_engine_primitives::InvalidBlockHook; -use reth_evm::{system_calls::SystemCaller, ConfigureEvm}; +use reth_evm::{ + state_change::post_block_balance_increments, system_calls::SystemCaller, ConfigureEvm, +}; use reth_primitives::{Header, Receipt, SealedBlockWithSenders, SealedHeader}; use reth_provider::{BlockExecutionOutput, ChainSpecProvider, StateProviderFactory}; use reth_revm::{ database::StateProviderDatabase, db::states::bundle_state::BundleRetention, primitives::{BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg}, - state_change::post_block_balance_increments, DatabaseCommit, StateBuilder, }; use reth_rpc_api::DebugApiClient; diff --git a/crates/engine/util/src/reorg.rs b/crates/engine/util/src/reorg.rs index e07b18b92..85216e32f 100644 --- a/crates/engine/util/src/reorg.rs +++ b/crates/engine/util/src/reorg.rs @@ -12,14 +12,16 @@ use reth_beacon_consensus::{BeaconEngineMessage, BeaconOnNewPayloadError, OnFork use reth_engine_primitives::EngineTypes; use reth_errors::{BlockExecutionError, BlockValidationError, RethError, RethResult}; use reth_ethereum_forks::EthereumHardforks; -use reth_evm::{system_calls::SystemCaller, ConfigureEvm}; +use reth_evm::{ + state_change::post_block_withdrawals_balance_increments, system_calls::SystemCaller, + ConfigureEvm, +}; use reth_payload_validator::ExecutionPayloadValidator; use reth_primitives::{proofs, Block, BlockBody, Header, Receipt, Receipts}; use reth_provider::{BlockReader, ExecutionOutcome, ProviderError, StateProviderFactory}; use reth_revm::{ database::StateProviderDatabase, db::{states::bundle_state::BundleRetention, State}, - state_change::post_block_withdrawals_balance_increments, DatabaseCommit, }; use reth_rpc_types_compat::engine::payload::block_to_payload; diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index 428458fcd..b4a90d409 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -16,6 +16,7 @@ use reth_evm::{ BatchExecutor, BlockExecutionError, BlockExecutionInput, BlockExecutionOutput, BlockExecutorProvider, BlockValidationError, Executor, ProviderError, }, + state_change::post_block_balance_increments, system_calls::{NoopHook, OnStateHook, SystemCaller}, ConfigureEvm, }; @@ -25,7 +26,6 @@ use reth_prune_types::PruneModes; use reth_revm::{ batch::BlockBatchRecord, db::{states::bundle_state::BundleRetention, State}, - state_change::post_block_balance_increments, Evm, }; use revm_primitives::{ diff --git a/crates/ethereum/evm/src/strategy.rs b/crates/ethereum/evm/src/strategy.rs index 714f673c8..55fbfffc8 100644 --- a/crates/ethereum/evm/src/strategy.rs +++ b/crates/ethereum/evm/src/strategy.rs @@ -16,13 +16,13 @@ use reth_evm::{ BlockExecutionError, BlockExecutionStrategy, BlockExecutionStrategyFactory, BlockValidationError, ProviderError, }, + state_change::post_block_balance_increments, system_calls::{OnStateHook, SystemCaller}, ConfigureEvm, ConfigureEvmEnv, }; use reth_primitives::{BlockWithSenders, Header, Receipt}; use reth_revm::{ db::{states::bundle_state::BundleRetention, BundleState}, - state_change::post_block_balance_increments, Database, DatabaseCommit, State, }; use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, U256}; diff --git a/crates/evm/Cargo.toml b/crates/evm/Cargo.toml index 6081eae42..6c16973b2 100644 --- a/crates/evm/Cargo.toml +++ b/crates/evm/Cargo.toml @@ -14,6 +14,7 @@ workspace = true # reth reth-chainspec.workspace = true reth-consensus.workspace = true +reth-consensus-common.workspace = true reth-execution-errors.workspace = true reth-execution-types.workspace = true reth-metrics = { workspace = true, optional = true } @@ -37,6 +38,7 @@ parking_lot = { workspace = true, optional = true } [dev-dependencies] parking_lot.workspace = true +reth-ethereum-forks.workspace = true [features] default = ["std"] diff --git a/crates/evm/src/lib.rs b/crates/evm/src/lib.rs index 66026a07c..b75feea83 100644 --- a/crates/evm/src/lib.rs +++ b/crates/evm/src/lib.rs @@ -31,6 +31,7 @@ pub mod execute; pub mod metrics; pub mod noop; pub mod provider; +pub mod state_change; pub mod system_calls; #[cfg(any(test, feature = "test-utils"))] diff --git a/crates/revm/src/state_change.rs b/crates/evm/src/state_change.rs similarity index 99% rename from crates/revm/src/state_change.rs rename to crates/evm/src/state_change.rs index afe92561b..2d5209015 100644 --- a/crates/revm/src/state_change.rs +++ b/crates/evm/src/state_change.rs @@ -1,3 +1,5 @@ +//! State changes that are not related to transactions. + use alloy_primitives::{map::HashMap, Address, U256}; use reth_chainspec::EthereumHardforks; use reth_consensus_common::calc; diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index 3a86f5bba..d15cdee13 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -14,6 +14,7 @@ use reth_evm::{ BatchExecutor, BlockExecutionError, BlockExecutionInput, BlockExecutionOutput, BlockExecutorProvider, BlockValidationError, Executor, ProviderError, }, + state_change::post_block_balance_increments, system_calls::{NoopHook, OnStateHook, SystemCaller}, ConfigureEvm, }; @@ -22,10 +23,7 @@ use reth_optimism_consensus::validate_block_post_execution; use reth_optimism_forks::OptimismHardfork; use reth_primitives::{BlockWithSenders, Header, Receipt, Receipts, TxType}; use reth_prune_types::PruneModes; -use reth_revm::{ - batch::BlockBatchRecord, db::states::bundle_state::BundleRetention, - state_change::post_block_balance_increments, Evm, State, -}; +use reth_revm::{batch::BlockBatchRecord, db::states::bundle_state::BundleRetention, Evm, State}; use revm_primitives::{ db::{Database, DatabaseCommit}, BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, diff --git a/crates/optimism/evm/src/strategy.rs b/crates/optimism/evm/src/strategy.rs index 199a8a4d3..c626bb665 100644 --- a/crates/optimism/evm/src/strategy.rs +++ b/crates/optimism/evm/src/strategy.rs @@ -12,6 +12,7 @@ use reth_evm::{ BlockExecutionError, BlockExecutionStrategy, BlockExecutionStrategyFactory, BlockValidationError, ProviderError, }, + state_change::post_block_balance_increments, system_calls::{OnStateHook, SystemCaller}, ConfigureEvm, ConfigureEvmEnv, }; @@ -21,7 +22,6 @@ use reth_optimism_forks::OptimismHardfork; use reth_primitives::{BlockWithSenders, Header, Receipt, TxType}; use reth_revm::{ db::{states::bundle_state::BundleRetention, BundleState}, - state_change::post_block_balance_increments, Database, State, }; use revm_primitives::{ diff --git a/crates/payload/basic/Cargo.toml b/crates/payload/basic/Cargo.toml index f201df0c1..904776889 100644 --- a/crates/payload/basic/Cargo.toml +++ b/crates/payload/basic/Cargo.toml @@ -15,12 +15,12 @@ workspace = true # reth reth-chainspec.workspace = true reth-primitives.workspace = true -reth-revm.workspace = true reth-transaction-pool.workspace = true reth-provider.workspace = true reth-payload-builder.workspace = true reth-payload-primitives.workspace = true reth-tasks.workspace = true +reth-evm.workspace = true # ethereum alloy-rlp.workspace = true diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index 70d22250d..fcc8be9a8 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -14,6 +14,7 @@ use alloy_primitives::{Bytes, B256, U256}; use futures_core::ready; use futures_util::FutureExt; use reth_chainspec::{ChainSpec, EthereumHardforks}; +use reth_evm::state_change::post_block_withdrawals_balance_increments; use reth_payload_builder::{ database::CachedReads, KeepPayloadJobAlive, PayloadId, PayloadJob, PayloadJobGenerator, }; @@ -27,7 +28,6 @@ use reth_primitives::{ use reth_provider::{ BlockReaderIdExt, BlockSource, CanonStateNotification, ProviderError, StateProviderFactory, }; -use reth_revm::state_change::post_block_withdrawals_balance_increments; use reth_tasks::TaskSpawner; use reth_transaction_pool::TransactionPool; use revm::{Database, State}; diff --git a/crates/revm/Cargo.toml b/crates/revm/Cargo.toml index 5772af0dc..668abb79e 100644 --- a/crates/revm/Cargo.toml +++ b/crates/revm/Cargo.toml @@ -13,11 +13,9 @@ workspace = true [dependencies] # reth -reth-chainspec.workspace = true reth-primitives.workspace = true reth-storage-errors.workspace = true reth-execution-errors.workspace = true -reth-consensus-common.workspace = true reth-prune-types.workspace = true reth-storage-api.workspace = true reth-trie = { workspace = true, optional = true } diff --git a/crates/revm/src/lib.rs b/crates/revm/src/lib.rs index 5515357d0..02eb182ee 100644 --- a/crates/revm/src/lib.rs +++ b/crates/revm/src/lib.rs @@ -16,9 +16,6 @@ pub mod database; pub mod batch; -/// State changes that are not related to transactions. -pub mod state_change; - /// Common test helpers #[cfg(any(test, feature = "test-utils"))] pub mod test_utils; diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index 5e12e41e5..407ddf187 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -11,7 +11,10 @@ use alloy_primitives::{BlockNumber, B256, U256}; use alloy_rpc_types::BlockNumberOrTag; use futures::Future; use reth_chainspec::{EthChainSpec, EthereumHardforks}; -use reth_evm::{system_calls::SystemCaller, ConfigureEvm, ConfigureEvmEnv}; +use reth_evm::{ + state_change::post_block_withdrawals_balance_increments, system_calls::SystemCaller, + ConfigureEvm, ConfigureEvmEnv, +}; use reth_execution_types::ExecutionOutcome; use reth_primitives::{ constants::{eip4844::MAX_DATA_GAS_PER_BLOCK, BEACON_NONCE}, @@ -27,9 +30,7 @@ use reth_provider::{ BlockReader, BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, ProviderError, ReceiptProvider, StateProviderFactory, }; -use reth_revm::{ - database::StateProviderDatabase, state_change::post_block_withdrawals_balance_increments, -}; +use reth_revm::database::StateProviderDatabase; use reth_rpc_eth_types::{EthApiError, PendingBlock, PendingBlockEnv, PendingBlockEnvOrigin}; use reth_transaction_pool::{BestTransactionsAttributes, TransactionPool}; use reth_trie::HashedPostState; @@ -157,7 +158,7 @@ pub trait LoadPendingBlock: EthApiTypes { pending.origin.header().hash() == pending_block.block.parent_hash && now <= pending_block.expires_at { - return Ok(Some((pending_block.block.clone(), pending_block.receipts.clone()))) + return Ok(Some((pending_block.block.clone(), pending_block.receipts.clone()))); } } From f8969cbbc2afc9c77b02ac2711f2ba7fc83e2c95 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 19 Oct 2024 18:14:02 +0200 Subject: [PATCH 149/425] docs: add hardfork checklist (#11897) Co-authored-by: Oliver --- HARDFORK-CHECKLIST.md | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 HARDFORK-CHECKLIST.md diff --git a/HARDFORK-CHECKLIST.md b/HARDFORK-CHECKLIST.md new file mode 100644 index 000000000..80ebfc20c --- /dev/null +++ b/HARDFORK-CHECKLIST.md @@ -0,0 +1,21 @@ +# Non-exhaustive checklist for integrating new changes for an upcoming hard fork/devnet + +## Introducing new EIP types or changes to primitive types + +- Make required changes to primitive data structures on [alloy](https://github.com/alloy-rs/alloy) +- All new EIP data structures/constants/helpers etc. go into the `alloy-eips` crate at first. +- New transaction types go into `alloy-consensus` +- If there are changes to existing data structures, such as `Header` or `Block`, apply them to the types in `alloy-consensus` (e.g. new `request_hashes` field in Prague) + +## Engine API + +- If there are changes to the engine API (e.g. a new `engine_newPayloadVx` and `engine_getPayloadVx` pair) add the new types to the `alloy-rpc-types-engine` crate. +- If there are new parameters to the `engine_newPayloadVx` endpoint, add them to the `ExecutionPayloadSidecar` container type. This types contains all additional parameters that are required to convert an `ExecutionPayload` to an EL block. + +## Reth changes + +### Updates to the engine API + +- Add new endpoints to the `EngineApi` trait and implement endpoints. +- Update the `ExceuctionPayload` + `ExecutionPayloadSidecar` to `Block` conversion if there are any additional parameters. +- Update version specific validation checks in the `EngineValidator` trait. \ No newline at end of file From e2ecb6224dae896653ac2f624abc46b328b7a88b Mon Sep 17 00:00:00 2001 From: Hai | RISE <150876604+hai-rise@users.noreply.github.com> Date: Sat, 19 Oct 2024 23:54:07 +0700 Subject: [PATCH 150/425] chore: remove unused deps (#11898) --- Cargo.lock | 52 --------------------- bin/reth/Cargo.toml | 1 - book/sources/exex/remote/Cargo.toml | 10 ++-- crates/e2e-test-utils/Cargo.toml | 8 +--- crates/engine/tree/Cargo.toml | 3 +- crates/ethereum-forks/Cargo.toml | 1 - crates/ethereum/node/Cargo.toml | 2 - crates/exex/exex/Cargo.toml | 6 ++- crates/net/network-types/Cargo.toml | 2 +- crates/node/core/Cargo.toml | 1 - crates/node/metrics/Cargo.toml | 1 - crates/optimism/node/Cargo.toml | 13 ------ crates/optimism/primitives/Cargo.toml | 1 - crates/primitives/Cargo.toml | 1 - crates/rpc/rpc-api/Cargo.toml | 5 +- crates/rpc/rpc-builder/Cargo.toml | 2 - crates/stages/stages/Cargo.toml | 7 ++- crates/static-file/static-file/Cargo.toml | 3 -- crates/storage/codecs/Cargo.toml | 2 - crates/storage/db-models/Cargo.toml | 8 +--- crates/storage/db/Cargo.toml | 1 - crates/trie/db/Cargo.toml | 11 ----- crates/trie/parallel/Cargo.toml | 7 ++- crates/trie/sparse/Cargo.toml | 6 --- crates/trie/trie/Cargo.toml | 7 --- examples/custom-rlpx-subprotocol/Cargo.toml | 2 - examples/polygon-p2p/Cargo.toml | 1 - 27 files changed, 23 insertions(+), 141 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 57dc9176b..62ff16bb9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2881,8 +2881,6 @@ dependencies = [ "reth-network", "reth-network-api", "reth-node-ethereum", - "reth-primitives", - "reth-provider", "tokio", "tokio-stream", "tracing", @@ -2972,7 +2970,6 @@ dependencies = [ "reth-discv4", "reth-network", "reth-primitives", - "reth-provider", "reth-tracing", "secp256k1", "serde_json", @@ -6312,7 +6309,6 @@ dependencies = [ "reth-consensus-common", "reth-db", "reth-db-api", - "reth-discv4", "reth-downloaders", "reth-engine-util", "reth-errors", @@ -6708,7 +6704,6 @@ dependencies = [ "alloy-eips", "alloy-genesis", "alloy-primitives", - "alloy-rlp", "alloy-trie", "arbitrary", "bytes", @@ -6716,7 +6711,6 @@ dependencies = [ "op-alloy-consensus", "proptest", "proptest-arbitrary-interop", - "rand 0.8.5", "reth-codecs-derive", "serde", "serde_json", @@ -6817,7 +6811,6 @@ dependencies = [ "paste", "pprof", "proptest", - "rand 0.8.5", "reth-db-api", "reth-fs-util", "reth-libmdbx", @@ -7041,22 +7034,16 @@ dependencies = [ "eyre", "futures-util", "jsonrpsee", - "jsonrpsee-types", "op-alloy-rpc-types-engine", "reth", "reth-chainspec", "reth-db", - "reth-engine-local", "reth-network-peers", "reth-node-builder", - "reth-node-ethereum", "reth-payload-builder", "reth-payload-primitives", - "reth-primitives", "reth-provider", - "reth-rpc", "reth-rpc-layer", - "reth-rpc-types-compat", "reth-stages-types", "reth-tokio-util", "reth-tracing", @@ -7177,7 +7164,6 @@ dependencies = [ "assert_matches", "futures", "metrics", - "rand 0.8.5", "reth-beacon-consensus", "reth-blockchain-tree", "reth-blockchain-tree-api", @@ -7515,7 +7501,6 @@ dependencies = [ "reth-chain-state", "reth-chainspec", "reth-config", - "reth-db-api", "reth-db-common", "reth-evm", "reth-evm-ethereum", @@ -7984,7 +7969,6 @@ dependencies = [ "serde", "shellexpand", "strum", - "tempfile", "thiserror", "tokio", "toml", @@ -8001,7 +7985,6 @@ dependencies = [ "alloy-primitives", "eyre", "futures", - "futures-util", "reth", "reth-auto-seal-consensus", "reth-basic-payload-builder", @@ -8017,7 +8000,6 @@ dependencies = [ "reth-network", "reth-node-api", "reth-node-builder", - "reth-node-core", "reth-payload-builder", "reth-primitives", "reth-provider", @@ -8066,7 +8048,6 @@ dependencies = [ "metrics-util", "procfs 0.16.0", "reqwest", - "reth-chainspec", "reth-db-api", "reth-metrics", "reth-provider", @@ -8215,15 +8196,11 @@ dependencies = [ "alloy-genesis", "alloy-primitives", "alloy-rpc-types-engine", - "async-trait", "clap", "eyre", - "jsonrpsee", - "jsonrpsee-types", "op-alloy-consensus", "op-alloy-rpc-types-engine", "parking_lot 0.12.3", - "reqwest", "reth", "reth-auto-seal-consensus", "reth-basic-payload-builder", @@ -8231,7 +8208,6 @@ dependencies = [ "reth-chainspec", "reth-consensus", "reth-db", - "reth-discv5", "reth-e2e-test-utils", "reth-engine-local", "reth-evm", @@ -8248,18 +8224,12 @@ dependencies = [ "reth-primitives", "reth-provider", "reth-revm", - "reth-rpc", - "reth-rpc-eth-api", - "reth-rpc-eth-types", - "reth-rpc-types-compat", "reth-tracing", "reth-transaction-pool", "revm", "serde", "serde_json", - "thiserror", "tokio", - "tracing", ] [[package]] @@ -8303,7 +8273,6 @@ dependencies = [ "alloy-consensus", "alloy-primitives", "reth-primitives", - "reth-primitives-traits", ] [[package]] @@ -8688,7 +8657,6 @@ dependencies = [ "reth-network-peers", "reth-primitives", "reth-rpc-eth-api", - "serde_json", ] [[package]] @@ -8736,7 +8704,6 @@ dependencies = [ "reth-metrics", "reth-network-api", "reth-network-peers", - "reth-node-api", "reth-node-core", "reth-payload-builder", "reth-primitives", @@ -8750,7 +8717,6 @@ dependencies = [ "reth-rpc-server-types", "reth-rpc-types-compat", "reth-tasks", - "reth-tokio-util", "reth-tracing", "reth-transaction-pool", "serde", @@ -8973,7 +8939,6 @@ dependencies = [ "reth-testing-utils", "reth-trie", "reth-trie-db", - "serde_json", "tempfile", "thiserror", "tokio", @@ -9033,11 +8998,8 @@ dependencies = [ "assert_matches", "parking_lot 0.12.3", "rayon", - "reth-chainspec", "reth-db", "reth-db-api", - "reth-nippy-jar", - "reth-node-types", "reth-provider", "reth-prune-types", "reth-stages", @@ -9200,13 +9162,11 @@ dependencies = [ "auto_impl", "bincode", "criterion", - "derive_more 1.0.0", "itertools 0.13.0", "metrics", "proptest", "proptest-arbitrary-interop", "rayon", - "reth-chainspec", "reth-execution-errors", "reth-metrics", "reth-primitives", @@ -9217,7 +9177,6 @@ dependencies = [ "serde", "serde_json", "serde_with", - "tokio", "tracing", "triehash", ] @@ -9253,22 +9212,17 @@ dependencies = [ "alloy-consensus", "alloy-primitives", "alloy-rlp", - "auto_impl", "derive_more 1.0.0", - "itertools 0.13.0", "metrics", "proptest", "proptest-arbitrary-interop", - "rayon", "reth-chainspec", "reth-db", "reth-db-api", "reth-execution-errors", "reth-metrics", - "reth-node-types", "reth-primitives", "reth-provider", - "reth-stages-types", "reth-storage-errors", "reth-trie", "reth-trie-common", @@ -9276,8 +9230,6 @@ dependencies = [ "serde", "serde_json", "similar-asserts", - "tokio", - "tokio-stream", "tracing", "triehash", ] @@ -9297,7 +9249,6 @@ dependencies = [ "rand 0.8.5", "rayon", "reth-db", - "reth-db-api", "reth-execution-errors", "reth-metrics", "reth-primitives", @@ -9321,15 +9272,12 @@ dependencies = [ "pretty_assertions", "proptest", "rand 0.8.5", - "rayon", - "reth-primitives", "reth-testing-utils", "reth-tracing", "reth-trie", "reth-trie-common", "smallvec", "thiserror", - "tracing", ] [[package]] diff --git a/bin/reth/Cargo.toml b/bin/reth/Cargo.toml index 476f9cd5c..8b2b77dd6 100644 --- a/bin/reth/Cargo.toml +++ b/bin/reth/Cargo.toml @@ -96,7 +96,6 @@ backon.workspace = true similar-asserts.workspace = true [dev-dependencies] -reth-discv4.workspace = true tempfile.workspace = true [features] diff --git a/book/sources/exex/remote/Cargo.toml b/book/sources/exex/remote/Cargo.toml index 6eeb848ca..6cca3a841 100644 --- a/book/sources/exex/remote/Cargo.toml +++ b/book/sources/exex/remote/Cargo.toml @@ -6,9 +6,11 @@ edition = "2021" [dependencies] # reth reth = { git = "https://github.com/paradigmxyz/reth.git" } -reth-exex = { git = "https://github.com/paradigmxyz/reth.git", features = ["serde"] } -reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth.git"} -reth-node-api = { git = "https://github.com/paradigmxyz/reth.git"} +reth-exex = { git = "https://github.com/paradigmxyz/reth.git", features = [ + "serde", +] } +reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth.git" } +reth-node-api = { git = "https://github.com/paradigmxyz/reth.git" } reth-tracing = { git = "https://github.com/paradigmxyz/reth.git" } # async @@ -49,4 +51,4 @@ path = "src/exex.rs" [[bin]] name = "consumer" -path = "src/consumer.rs" \ No newline at end of file +path = "src/consumer.rs" diff --git a/crates/e2e-test-utils/Cargo.toml b/crates/e2e-test-utils/Cargo.toml index 2742d7040..9fa3e2b60 100644 --- a/crates/e2e-test-utils/Cargo.toml +++ b/crates/e2e-test-utils/Cargo.toml @@ -13,11 +13,8 @@ workspace = true [dependencies] reth.workspace = true reth-chainspec.workspace = true -reth-engine-local.workspace = true -reth-primitives.workspace = true reth-tracing.workspace = true reth-db = { workspace = true, features = ["test-utils"] } -reth-rpc.workspace = true reth-rpc-layer.workspace = true reth-payload-builder = { workspace = true, features = ["test-utils"] } reth-payload-primitives.workspace = true @@ -26,11 +23,8 @@ reth-node-builder = { workspace = true, features = ["test-utils"] } reth-tokio-util.workspace = true reth-stages-types.workspace = true reth-network-peers.workspace = true -reth-node-ethereum.workspace = true -reth-rpc-types-compat.workspace = true # rpc -jsonrpsee-types.workspace = true jsonrpsee.workspace = true # ethereum @@ -48,4 +42,4 @@ alloy-signer-local = { workspace = true, features = ["mnemonic"] } alloy-rpc-types.workspace = true alloy-network.workspace = true alloy-consensus = { workspace = true, features = ["kzg"] } -tracing.workspace = true \ No newline at end of file +tracing.workspace = true diff --git a/crates/engine/tree/Cargo.toml b/crates/engine/tree/Cargo.toml index 91c9cd542..3a618f4fd 100644 --- a/crates/engine/tree/Cargo.toml +++ b/crates/engine/tree/Cargo.toml @@ -76,7 +76,6 @@ reth-chainspec.workspace = true alloy-rlp.workspace = true assert_matches.workspace = true -rand.workspace = true [features] test-utils = [ @@ -86,5 +85,5 @@ test-utils = [ "reth-prune-types", "reth-stages/test-utils", "reth-static-file", - "reth-tracing" + "reth-tracing", ] diff --git a/crates/ethereum-forks/Cargo.toml b/crates/ethereum-forks/Cargo.toml index 62ea234cd..7b4b6c53c 100644 --- a/crates/ethereum-forks/Cargo.toml +++ b/crates/ethereum-forks/Cargo.toml @@ -35,7 +35,6 @@ auto_impl.workspace = true [dev-dependencies] arbitrary = { workspace = true, features = ["derive"] } -proptest.workspace = true alloy-consensus.workspace = true [features] diff --git a/crates/ethereum/node/Cargo.toml b/crates/ethereum/node/Cargo.toml index 213071cbf..29093adc8 100644 --- a/crates/ethereum/node/Cargo.toml +++ b/crates/ethereum/node/Cargo.toml @@ -43,14 +43,12 @@ reth-chainspec.workspace = true reth-db.workspace = true reth-exex.workspace = true reth-node-api.workspace = true -reth-node-core.workspace = true reth-e2e-test-utils.workspace = true reth-tasks.workspace = true futures.workspace = true alloy-primitives.workspace = true alloy-genesis.workspace = true tokio.workspace = true -futures-util.workspace = true serde_json.workspace = true alloy-consensus.workspace = true diff --git a/crates/exex/exex/Cargo.toml b/crates/exex/exex/Cargo.toml index 6a3815e40..27a9d1576 100644 --- a/crates/exex/exex/Cargo.toml +++ b/crates/exex/exex/Cargo.toml @@ -17,7 +17,10 @@ reth-chain-state.workspace = true reth-chainspec.workspace = true reth-config.workspace = true reth-evm.workspace = true -reth-exex-types = { workspace = true, features = ["serde", "serde-bincode-compat"] } +reth-exex-types = { workspace = true, features = [ + "serde", + "serde-bincode-compat", +] } reth-fs-util.workspace = true reth-metrics.workspace = true reth-node-api.workspace = true @@ -51,7 +54,6 @@ tracing.workspace = true [dev-dependencies] reth-blockchain-tree.workspace = true -reth-db-api.workspace = true reth-db-common.workspace = true reth-evm-ethereum.workspace = true reth-node-api.workspace = true diff --git a/crates/net/network-types/Cargo.toml b/crates/net/network-types/Cargo.toml index 97c8e65cb..c9b8fdd5b 100644 --- a/crates/net/network-types/Cargo.toml +++ b/crates/net/network-types/Cargo.toml @@ -22,7 +22,7 @@ serde = { workspace = true, optional = true } humantime-serde = { workspace = true, optional = true } serde_json = { workspace = true } -# misc +# misc tracing.workspace = true [features] diff --git a/crates/node/core/Cargo.toml b/crates/node/core/Cargo.toml index d7d95751c..a6ae1db5e 100644 --- a/crates/node/core/Cargo.toml +++ b/crates/node/core/Cargo.toml @@ -74,7 +74,6 @@ futures.workspace = true # test vectors generation proptest.workspace = true tokio.workspace = true -tempfile.workspace = true [features] optimism = ["reth-primitives/optimism"] diff --git a/crates/node/metrics/Cargo.toml b/crates/node/metrics/Cargo.toml index 76a3a7f66..9efdbd495 100644 --- a/crates/node/metrics/Cargo.toml +++ b/crates/node/metrics/Cargo.toml @@ -35,7 +35,6 @@ procfs = "0.16.0" [dev-dependencies] reqwest.workspace = true -reth-chainspec.workspace = true socket2 = { version = "0.5", default-features = false } reth-provider = { workspace = true, features = ["test-utils"] } diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index 8e359e602..fbe787d6e 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -19,7 +19,6 @@ reth-payload-builder.workspace = true reth-auto-seal-consensus.workspace = true reth-basic-payload-builder.workspace = true reth-consensus.workspace = true -reth-rpc-types-compat.workspace = true reth-node-api.workspace = true reth-node-builder.workspace = true reth-tracing.workspace = true @@ -29,10 +28,6 @@ reth-network.workspace = true reth-evm.workspace = true reth-revm = { workspace = true, features = ["std"] } reth-beacon-consensus.workspace = true -reth-discv5.workspace = true -reth-rpc-eth-types.workspace = true -reth-rpc-eth-api.workspace = true -reth-rpc.workspace = true # op-reth reth-optimism-payload-builder.workspace = true @@ -51,21 +46,13 @@ alloy-primitives.workspace = true op-alloy-rpc-types-engine.workspace = true alloy-rpc-types-engine.workspace = true -# async -async-trait.workspace = true -reqwest = { workspace = true, features = ["rustls-tls-native-roots"] } -tracing.workspace = true - # misc clap.workspace = true serde.workspace = true eyre.workspace = true parking_lot.workspace = true -thiserror.workspace = true # rpc -jsonrpsee.workspace = true -jsonrpsee-types.workspace = true serde_json.workspace = true [dev-dependencies] diff --git a/crates/optimism/primitives/Cargo.toml b/crates/optimism/primitives/Cargo.toml index 2054de730..a2d4c20a8 100644 --- a/crates/optimism/primitives/Cargo.toml +++ b/crates/optimism/primitives/Cargo.toml @@ -13,6 +13,5 @@ workspace = true [dependencies] reth-primitives.workspace = true -reth-primitives-traits.workspace = true alloy-primitives.workspace = true alloy-consensus.workspace = true diff --git a/crates/primitives/Cargo.toml b/crates/primitives/Cargo.toml index 05ccd9081..5661fb8f8 100644 --- a/crates/primitives/Cargo.toml +++ b/crates/primitives/Cargo.toml @@ -75,7 +75,6 @@ alloy-genesis.workspace = true arbitrary = { workspace = true, features = ["derive"] } assert_matches.workspace = true bincode.workspace = true -modular-bitfield.workspace = true proptest-arbitrary-interop.workspace = true proptest.workspace = true rand.workspace = true diff --git a/crates/rpc/rpc-api/Cargo.toml b/crates/rpc/rpc-api/Cargo.toml index 6e9e469ec..363e22955 100644 --- a/crates/rpc/rpc-api/Cargo.toml +++ b/crates/rpc/rpc-api/Cargo.toml @@ -37,12 +37,9 @@ alloy-rpc-types-engine.workspace = true # misc jsonrpsee = { workspace = true, features = ["server", "macros"] } -[dev-dependencies] -serde_json.workspace = true - [features] client = [ "jsonrpsee/client", "jsonrpsee/async-client", - "reth-rpc-eth-api/client" + "reth-rpc-eth-api/client", ] diff --git a/crates/rpc/rpc-builder/Cargo.toml b/crates/rpc/rpc-builder/Cargo.toml index 817d2a3d7..cc72c2ebf 100644 --- a/crates/rpc/rpc-builder/Cargo.toml +++ b/crates/rpc/rpc-builder/Cargo.toml @@ -64,8 +64,6 @@ reth-rpc-api = { workspace = true, features = ["client"] } reth-rpc-engine-api.workspace = true reth-tracing.workspace = true reth-transaction-pool = { workspace = true, features = ["test-utils"] } -reth-tokio-util.workspace = true -reth-node-api.workspace = true reth-rpc-types-compat.workspace = true alloy-primitives.workspace = true diff --git a/crates/stages/stages/Cargo.toml b/crates/stages/stages/Cargo.toml index 3d4227d8a..81f35a4b3 100644 --- a/crates/stages/stages/Cargo.toml +++ b/crates/stages/stages/Cargo.toml @@ -24,7 +24,9 @@ reth-evm.workspace = true reth-exex.workspace = true reth-network-p2p.workspace = true reth-primitives = { workspace = true, features = ["secp256k1"] } -reth-primitives-traits = { workspace = true, features = ["serde-bincode-compat"] } +reth-primitives-traits = { workspace = true, features = [ + "serde-bincode-compat", +] } reth-provider.workspace = true reth-execution-types.workspace = true reth-prune.workspace = true @@ -82,9 +84,6 @@ tempfile.workspace = true # Stage benchmarks criterion = { workspace = true, features = ["async_tokio"] } -# io -serde_json.workspace = true - [target.'cfg(not(target_os = "windows"))'.dev-dependencies] pprof = { workspace = true, features = [ "flamegraph", diff --git a/crates/static-file/static-file/Cargo.toml b/crates/static-file/static-file/Cargo.toml index 8fa89e12e..d22b116cd 100644 --- a/crates/static-file/static-file/Cargo.toml +++ b/crates/static-file/static-file/Cargo.toml @@ -13,17 +13,14 @@ workspace = true [dependencies] # reth -reth-chainspec.workspace = true reth-db.workspace = true reth-db-api.workspace = true reth-provider.workspace = true reth-storage-errors.workspace = true -reth-nippy-jar.workspace = true reth-tokio-util.workspace = true reth-prune-types.workspace = true reth-static-file-types.workspace = true reth-stages-types.workspace = true -reth-node-types.workspace = true alloy-primitives.workspace = true diff --git a/crates/storage/codecs/Cargo.toml b/crates/storage/codecs/Cargo.toml index 640ec8c95..21a1897f1 100644 --- a/crates/storage/codecs/Cargo.toml +++ b/crates/storage/codecs/Cargo.toml @@ -39,8 +39,6 @@ alloy-primitives = { workspace = true, features = [ "rand", ] } alloy-consensus = { workspace = true, features = ["arbitrary"] } -alloy-rlp.workspace = true -rand.workspace = true test-fuzz.workspace = true serde_json.workspace = true diff --git a/crates/storage/db-models/Cargo.toml b/crates/storage/db-models/Cargo.toml index 9bcd54f38..492178775 100644 --- a/crates/storage/db-models/Cargo.toml +++ b/crates/storage/db-models/Cargo.toml @@ -35,15 +35,9 @@ proptest = { workspace = true, optional = true } reth-primitives = { workspace = true, features = ["arbitrary"] } reth-codecs.workspace = true -arbitrary = { workspace = true, features = ["derive"] } proptest-arbitrary-interop.workspace = true -proptest.workspace = true test-fuzz.workspace = true [features] test-utils = ["arbitrary"] -arbitrary = [ - "reth-primitives/arbitrary", - "dep:arbitrary", - "dep:proptest", -] +arbitrary = ["reth-primitives/arbitrary", "dep:arbitrary", "dep:proptest"] diff --git a/crates/storage/db/Cargo.toml b/crates/storage/db/Cargo.toml index a075f7724..356672f25 100644 --- a/crates/storage/db/Cargo.toml +++ b/crates/storage/db/Cargo.toml @@ -58,7 +58,6 @@ strum = { workspace = true, features = ["derive"], optional = true } [dev-dependencies] # reth libs with arbitrary reth-primitives = { workspace = true, features = ["arbitrary"] } -rand.workspace = true serde_json.workspace = true tempfile.workspace = true test-fuzz.workspace = true diff --git a/crates/trie/db/Cargo.toml b/crates/trie/db/Cargo.toml index a0e1acbce..e75b0456e 100644 --- a/crates/trie/db/Cargo.toml +++ b/crates/trie/db/Cargo.toml @@ -17,7 +17,6 @@ reth-primitives.workspace = true reth-execution-errors.workspace = true reth-db.workspace = true reth-db-api.workspace = true -reth-stages-types.workspace = true reth-storage-errors.workspace = true reth-trie-common.workspace = true reth-trie.workspace = true @@ -32,10 +31,7 @@ alloy-primitives.workspace = true tracing.workspace = true # misc -rayon.workspace = true derive_more.workspace = true -auto_impl.workspace = true -itertools.workspace = true # `metrics` feature reth-metrics = { workspace = true, optional = true } @@ -56,7 +52,6 @@ reth-provider = { workspace = true, features = ["test-utils"] } reth-storage-errors.workspace = true reth-trie-common = { workspace = true, features = ["test-utils", "arbitrary"] } reth-trie = { workspace = true, features = ["test-utils"] } -reth-node-types.workspace = true alloy-consensus.workspace = true @@ -66,12 +61,6 @@ triehash = "0.8" # misc proptest.workspace = true proptest-arbitrary-interop.workspace = true -tokio = { workspace = true, default-features = false, features = [ - "sync", - "rt", - "macros", -] } -tokio-stream.workspace = true serde_json.workspace = true similar-asserts.workspace = true diff --git a/crates/trie/parallel/Cargo.toml b/crates/trie/parallel/Cargo.toml index 64a4644bd..cc35fe9f9 100644 --- a/crates/trie/parallel/Cargo.toml +++ b/crates/trie/parallel/Cargo.toml @@ -15,7 +15,6 @@ workspace = true # reth reth-primitives.workspace = true reth-db.workspace = true -reth-db-api.workspace = true reth-trie.workspace = true reth-trie-db.workspace = true reth-execution-errors.workspace = true @@ -46,7 +45,11 @@ reth-trie = { workspace = true, features = ["test-utils"] } # misc rand.workspace = true -tokio = { workspace = true, default-features = false, features = ["sync", "rt", "macros"] } +tokio = { workspace = true, default-features = false, features = [ + "sync", + "rt", + "macros", +] } rayon.workspace = true criterion = { workspace = true, features = ["async_tokio"] } proptest.workspace = true diff --git a/crates/trie/sparse/Cargo.toml b/crates/trie/sparse/Cargo.toml index 4ba6ed0f2..26d036f57 100644 --- a/crates/trie/sparse/Cargo.toml +++ b/crates/trie/sparse/Cargo.toml @@ -14,7 +14,6 @@ workspace = true [dependencies] # reth -reth-primitives.workspace = true reth-tracing.workspace = true reth-trie-common.workspace = true reth-trie.workspace = true @@ -23,16 +22,11 @@ reth-trie.workspace = true alloy-primitives.workspace = true alloy-rlp.workspace = true -# tracing -tracing.workspace = true - # misc -rayon.workspace = true smallvec = { workspace = true, features = ["const_new"] } thiserror.workspace = true [dev-dependencies] -reth-primitives = { workspace = true, features = ["test-utils", "arbitrary"] } reth-testing-utils.workspace = true reth-trie = { workspace = true, features = ["test-utils"] } reth-trie-common = { workspace = true, features = ["test-utils", "arbitrary"] } diff --git a/crates/trie/trie/Cargo.toml b/crates/trie/trie/Cargo.toml index 31b5ac3e2..77fc57397 100644 --- a/crates/trie/trie/Cargo.toml +++ b/crates/trie/trie/Cargo.toml @@ -31,7 +31,6 @@ tracing.workspace = true # misc rayon.workspace = true -derive_more.workspace = true auto_impl.workspace = true itertools.workspace = true @@ -50,7 +49,6 @@ serde_with = { workspace = true, optional = true } [dev-dependencies] # reth -reth-chainspec.workspace = true reth-primitives = { workspace = true, features = ["test-utils", "arbitrary"] } reth-trie-common = { workspace = true, features = ["test-utils", "arbitrary"] } @@ -60,11 +58,6 @@ triehash = "0.8" # misc proptest.workspace = true proptest-arbitrary-interop.workspace = true -tokio = { workspace = true, default-features = false, features = [ - "sync", - "rt", - "macros", -] } serde_json.workspace = true criterion.workspace = true bincode.workspace = true diff --git a/examples/custom-rlpx-subprotocol/Cargo.toml b/examples/custom-rlpx-subprotocol/Cargo.toml index d59d16f35..18c136671 100644 --- a/examples/custom-rlpx-subprotocol/Cargo.toml +++ b/examples/custom-rlpx-subprotocol/Cargo.toml @@ -13,8 +13,6 @@ reth-eth-wire.workspace = true reth-network.workspace = true reth-network-api.workspace = true reth-node-ethereum.workspace = true -reth-provider = { workspace = true, features = ["test-utils"] } -reth-primitives.workspace = true reth.workspace = true tokio-stream.workspace = true eyre.workspace = true diff --git a/examples/polygon-p2p/Cargo.toml b/examples/polygon-p2p/Cargo.toml index bdf9a27ce..e18f32a64 100644 --- a/examples/polygon-p2p/Cargo.toml +++ b/examples/polygon-p2p/Cargo.toml @@ -20,6 +20,5 @@ reth-primitives.workspace = true serde_json.workspace = true reth-tracing.workspace = true tokio-stream.workspace = true -reth-provider = { workspace = true, features = ["test-utils"] } reth-discv4 = { workspace = true, features = ["test-utils"] } alloy-primitives.workspace = true From d0ac83394616e97ccc559c92e0f594b18cfa4440 Mon Sep 17 00:00:00 2001 From: greged93 <82421016+greged93@users.noreply.github.com> Date: Sat, 19 Oct 2024 18:56:48 +0200 Subject: [PATCH 151/425] perf: avoid cloning in payload builder (#11899) --- crates/ethereum/payload/src/lib.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index e09228302..7f94acf72 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -339,11 +339,12 @@ where // and 4788 contract call db.merge_transitions(BundleRetention::Reverts); + let requests_hash = requests.as_ref().map(|requests| requests.requests_hash()); let execution_outcome = ExecutionOutcome::new( db.take_bundle(), - vec![receipts.clone()].into(), + vec![receipts].into(), block_number, - vec![requests.clone().unwrap_or_default()], + vec![requests.unwrap_or_default()], ); let receipts_root = execution_outcome.receipts_root_slow(block_number).expect("Number is in range"); @@ -411,7 +412,7 @@ where parent_beacon_block_root: attributes.parent_beacon_block_root, blob_gas_used: blob_gas_used.map(Into::into), excess_blob_gas: excess_blob_gas.map(Into::into), - requests_hash: requests.map(|r| r.requests_hash()), + requests_hash, }; // seal the block From cd828c06d9fa9638951060c6ca3772d9a7c619a6 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Sat, 19 Oct 2024 19:59:32 +0200 Subject: [PATCH 152/425] feat: switch to composable executor for Ethereum (#11838) --- Cargo.lock | 2 +- crates/ethereum/evm/Cargo.toml | 3 +- crates/ethereum/evm/src/execute.rs | 643 ++++------ crates/ethereum/evm/src/lib.rs | 4 +- crates/ethereum/evm/src/strategy.rs | 1176 ------------------ crates/ethereum/node/Cargo.toml | 1 + crates/ethereum/node/src/evm.rs | 4 +- crates/ethereum/node/src/lib.rs | 4 +- crates/ethereum/node/src/node.rs | 8 +- crates/rpc/rpc-builder/tests/it/utils.rs | 9 +- crates/stages/stages/src/stages/execution.rs | 8 +- examples/custom-evm/src/main.rs | 9 +- examples/stateful-precompile/src/main.rs | 15 +- 13 files changed, 277 insertions(+), 1609 deletions(-) delete mode 100644 crates/ethereum/evm/src/strategy.rs diff --git a/Cargo.lock b/Cargo.lock index 62ff16bb9..25a62d0c0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7443,7 +7443,6 @@ dependencies = [ "reth-evm", "reth-execution-types", "reth-primitives", - "reth-prune-types", "reth-revm", "reth-testing-utils", "revm-primitives", @@ -7995,6 +7994,7 @@ dependencies = [ "reth-e2e-test-utils", "reth-ethereum-engine-primitives", "reth-ethereum-payload-builder", + "reth-evm", "reth-evm-ethereum", "reth-exex", "reth-network", diff --git a/crates/ethereum/evm/Cargo.toml b/crates/ethereum/evm/Cargo.toml index 7215efa68..8cbc92f90 100644 --- a/crates/ethereum/evm/Cargo.toml +++ b/crates/ethereum/evm/Cargo.toml @@ -18,8 +18,6 @@ reth-evm.workspace = true reth-primitives = { workspace = true, features = ["reth-codec"] } reth-revm.workspace = true reth-ethereum-consensus.workspace = true -reth-prune-types.workspace = true -reth-execution-types.workspace = true reth-consensus.workspace = true # Ethereum @@ -36,6 +34,7 @@ reth-testing-utils.workspace = true reth-evm = { workspace = true, features = ["test-utils"] } reth-revm = { workspace = true, features = ["test-utils"] } reth-primitives = { workspace = true, features = ["secp256k1"] } +reth-execution-types.workspace = true secp256k1.workspace = true serde_json.workspace = true alloy-genesis.workspace = true diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index b4a90d409..185f351dd 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -1,158 +1,161 @@ -//! Ethereum block executor. +//! Ethereum block execution strategy. use crate::{ dao_fork::{DAO_HARDFORK_BENEFICIARY, DAO_HARDKFORK_ACCOUNTS}, EthEvmConfig, }; -use alloc::{boxed::Box, sync::Arc, vec::Vec}; +use alloc::{boxed::Box, sync::Arc, vec, vec::Vec}; use alloy_consensus::Transaction as _; use alloy_eips::eip7685::Requests; -use alloy_primitives::{BlockNumber, U256}; use core::fmt::Display; -use reth_chainspec::{ChainSpec, EthereumHardforks, MAINNET}; +use reth_chainspec::{ChainSpec, EthereumHardfork, EthereumHardforks, MAINNET}; +use reth_consensus::ConsensusError; use reth_ethereum_consensus::validate_block_post_execution; use reth_evm::{ execute::{ - BatchExecutor, BlockExecutionError, BlockExecutionInput, BlockExecutionOutput, - BlockExecutorProvider, BlockValidationError, Executor, ProviderError, + BasicBlockExecutorProvider, BlockExecutionError, BlockExecutionStrategy, + BlockExecutionStrategyFactory, BlockValidationError, ProviderError, }, state_change::post_block_balance_increments, - system_calls::{NoopHook, OnStateHook, SystemCaller}, + system_calls::{OnStateHook, SystemCaller}, ConfigureEvm, }; -use reth_execution_types::ExecutionOutcome; -use reth_primitives::{BlockWithSenders, EthereumHardfork, Header, Receipt}; -use reth_prune_types::PruneModes; -use reth_revm::{ - batch::BlockBatchRecord, - db::{states::bundle_state::BundleRetention, State}, - Evm, -}; +use reth_primitives::{BlockWithSenders, Receipt}; +use reth_revm::db::{states::bundle_state::BundleRetention, BundleState, State}; use revm_primitives::{ db::{Database, DatabaseCommit}, - BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, + BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, U256, }; -/// Provides executors to execute regular ethereum blocks +/// Factory for [`EthExecutionStrategy`]. #[derive(Debug, Clone)] -pub struct EthExecutorProvider { +pub struct EthExecutionStrategyFactory { + /// The chainspec chain_spec: Arc, + /// How to create an EVM. evm_config: EvmConfig, } -impl EthExecutorProvider { - /// Creates a new default ethereum executor provider. +impl EthExecutionStrategyFactory { + /// Creates a new default ethereum executor strategy factory. pub fn ethereum(chain_spec: Arc) -> Self { Self::new(chain_spec.clone(), EthEvmConfig::new(chain_spec)) } - /// Returns a new provider for the mainnet. + /// Returns a new factory for the mainnet. pub fn mainnet() -> Self { Self::ethereum(MAINNET.clone()) } } -impl EthExecutorProvider { - /// Creates a new executor provider. +impl EthExecutionStrategyFactory { + /// Creates a new executor strategy factory. pub const fn new(chain_spec: Arc, evm_config: EvmConfig) -> Self { Self { chain_spec, evm_config } } } -impl EthExecutorProvider -where - EvmConfig: ConfigureEvm
, -{ - fn eth_executor(&self, db: DB) -> EthBlockExecutor - where - DB: Database>, - { - EthBlockExecutor::new( - self.chain_spec.clone(), - self.evm_config.clone(), - State::builder().with_database(db).with_bundle_update().without_state_clear().build(), - ) - } -} - -impl BlockExecutorProvider for EthExecutorProvider +impl BlockExecutionStrategyFactory for EthExecutionStrategyFactory where - EvmConfig: ConfigureEvm
, + EvmConfig: + Clone + Unpin + Sync + Send + 'static + ConfigureEvm
, { - type Executor + Display>> = - EthBlockExecutor; - - type BatchExecutor + Display>> = - EthBatchExecutor; + type Strategy + Display>> = + EthExecutionStrategy; - fn executor(&self, db: DB) -> Self::Executor + fn create_strategy(&self, db: DB) -> Self::Strategy where DB: Database + Display>, { - self.eth_executor(db) + let state = + State::builder().with_database(db).with_bundle_update().without_state_clear().build(); + EthExecutionStrategy::new(state, self.chain_spec.clone(), self.evm_config.clone()) } - - fn batch_executor(&self, db: DB) -> Self::BatchExecutor - where - DB: Database + Display>, - { - let executor = self.eth_executor(db); - EthBatchExecutor { executor, batch_record: BlockBatchRecord::default() } - } -} - -/// Helper type for the output of executing a block. -#[derive(Debug, Clone)] -struct EthExecuteOutput { - receipts: Vec, - requests: Requests, - gas_used: u64, } -/// Helper container type for EVM with chain spec. -#[derive(Debug, Clone)] -struct EthEvmExecutor { +/// Block execution strategy for Ethereum. +#[allow(missing_debug_implementations)] +pub struct EthExecutionStrategy +where + EvmConfig: Clone, +{ /// The chainspec chain_spec: Arc, /// How to create an EVM. evm_config: EvmConfig, + /// Current state for block execution. + state: State, + /// Utility to call system smart contracts. + system_caller: SystemCaller, } -impl EthEvmExecutor +impl EthExecutionStrategy where - EvmConfig: ConfigureEvm
, + EvmConfig: Clone, { - /// Executes the transactions in the block and returns the receipts of the transactions in the - /// block, the total gas used and the list of EIP-7685 [requests](Requests). - /// - /// This applies the pre-execution and post-execution changes that require an [EVM](Evm), and - /// executes the transactions. - /// - /// The optional `state_hook` will be executed with the state changes if present. + /// Creates a new [`EthExecutionStrategy`] + pub fn new(state: State, chain_spec: Arc, evm_config: EvmConfig) -> Self { + let system_caller = SystemCaller::new(evm_config.clone(), (*chain_spec).clone()); + Self { state, chain_spec, evm_config, system_caller } + } +} + +impl EthExecutionStrategy +where + DB: Database + Display>, + EvmConfig: ConfigureEvm
, +{ + /// Configures a new evm configuration and block environment for the given block. /// - /// # Note + /// # Caution /// - /// It does __not__ apply post-execution changes that do not require an [EVM](Evm), for that see - /// [`EthBlockExecutor::post_execution`]. - fn execute_state_transitions( + /// This does not initialize the tx environment. + fn evm_env_for_block( &self, + header: &alloy_consensus::Header, + total_difficulty: U256, + ) -> EnvWithHandlerCfg { + let mut cfg = CfgEnvWithHandlerCfg::new(Default::default(), Default::default()); + let mut block_env = BlockEnv::default(); + self.evm_config.fill_cfg_and_block_env(&mut cfg, &mut block_env, header, total_difficulty); + + EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, Default::default()) + } +} + +impl BlockExecutionStrategy for EthExecutionStrategy +where + DB: Database + Display>, + EvmConfig: ConfigureEvm
, +{ + type Error = BlockExecutionError; + + fn apply_pre_execution_changes( + &mut self, block: &BlockWithSenders, - mut evm: Evm<'_, Ext, &mut State>, - state_hook: Option, - ) -> Result - where - DB: Database, - DB::Error: Into + Display, - F: OnStateHook + 'static, - { - let mut system_caller = SystemCaller::new(self.evm_config.clone(), &self.chain_spec); - if let Some(hook) = state_hook { - system_caller.with_state_hook(Some(Box::new(hook) as Box)); - } + total_difficulty: U256, + ) -> Result<(), Self::Error> { + // Set state clear flag if the block is after the Spurious Dragon hardfork. + let state_clear_flag = + (*self.chain_spec).is_spurious_dragon_active_at_block(block.header.number); + self.state.set_state_clear_flag(state_clear_flag); + + let env = self.evm_env_for_block(&block.header, total_difficulty); + let mut evm = self.evm_config.evm_with_env(&mut self.state, env); + + self.system_caller.apply_pre_execution_changes(block, &mut evm)?; + + Ok(()) + } - system_caller.apply_pre_execution_changes(block, &mut evm)?; + fn execute_transactions( + &mut self, + block: &BlockWithSenders, + total_difficulty: U256, + ) -> Result<(Vec, u64), Self::Error> { + let env = self.evm_env_for_block(&block.header, total_difficulty); + let mut evm = self.evm_config.evm_with_env(&mut self.state, env); - // execute transactions let mut cumulative_gas_used = 0; let mut receipts = Vec::with_capacity(block.body.transactions.len()); for (sender, transaction) in block.transactions_with_sender() { @@ -178,7 +181,7 @@ where error: Box::new(new_err), } })?; - system_caller.on_state(&result_and_state); + self.system_caller.on_state(&result_and_state); let ResultAndState { result, state } = result_and_state; evm.db_mut().commit(state); @@ -200,137 +203,36 @@ where }, ); } + Ok((receipts, cumulative_gas_used)) + } + + fn apply_post_execution_changes( + &mut self, + block: &BlockWithSenders, + total_difficulty: U256, + receipts: &[Receipt], + ) -> Result { + let env = self.evm_env_for_block(&block.header, total_difficulty); + let mut evm = self.evm_config.evm_with_env(&mut self.state, env); let requests = if self.chain_spec.is_prague_active_at_timestamp(block.timestamp) { // Collect all EIP-6110 deposits let deposit_requests = - crate::eip6110::parse_deposits_from_receipts(&self.chain_spec, &receipts)?; + crate::eip6110::parse_deposits_from_receipts(&self.chain_spec, receipts)?; let mut requests = Requests::new(vec![deposit_requests]); - requests.extend(system_caller.apply_post_execution_changes(&mut evm)?); + requests.extend(self.system_caller.apply_post_execution_changes(&mut evm)?); requests } else { Requests::default() }; + drop(evm); - Ok(EthExecuteOutput { receipts, requests, gas_used: cumulative_gas_used }) - } -} - -/// A basic Ethereum block executor. -/// -/// Expected usage: -/// - Create a new instance of the executor. -/// - Execute the block. -#[derive(Debug)] -pub struct EthBlockExecutor { - /// Chain specific evm config that's used to execute a block. - executor: EthEvmExecutor, - /// The state to use for execution - state: State, -} - -impl EthBlockExecutor { - /// Creates a new Ethereum block executor. - pub const fn new(chain_spec: Arc, evm_config: EvmConfig, state: State) -> Self { - Self { executor: EthEvmExecutor { chain_spec, evm_config }, state } - } - - #[inline] - fn chain_spec(&self) -> &ChainSpec { - &self.executor.chain_spec - } - - /// Returns mutable reference to the state that wraps the underlying database. - #[allow(unused)] - fn state_mut(&mut self) -> &mut State { - &mut self.state - } -} - -impl EthBlockExecutor -where - EvmConfig: ConfigureEvm
, - DB: Database + Display>, -{ - /// Configures a new evm configuration and block environment for the given block. - /// - /// # Caution - /// - /// This does not initialize the tx environment. - fn evm_env_for_block(&self, header: &Header, total_difficulty: U256) -> EnvWithHandlerCfg { - let mut cfg = CfgEnvWithHandlerCfg::new(Default::default(), Default::default()); - let mut block_env = BlockEnv::default(); - self.executor.evm_config.fill_cfg_and_block_env( - &mut cfg, - &mut block_env, - header, - total_difficulty, - ); - - EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, Default::default()) - } - - /// Convenience method to invoke `execute_without_verification_with_state_hook` setting the - /// state hook as `None`. - fn execute_without_verification( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - ) -> Result { - self.execute_without_verification_with_state_hook(block, total_difficulty, None::) - } - - /// Execute a single block and apply the state changes to the internal state. - /// - /// Returns the receipts of the transactions in the block, the total gas used and the list of - /// EIP-7685 [requests](Requests). - /// - /// Returns an error if execution fails. - fn execute_without_verification_with_state_hook( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - state_hook: Option, - ) -> Result - where - F: OnStateHook + 'static, - { - // 1. prepare state on new block - self.on_new_block(&block.header); - - // 2. configure the evm and execute - let env = self.evm_env_for_block(&block.header, total_difficulty); - let output = { - let evm = self.executor.evm_config.evm_with_env(&mut self.state, env); - self.executor.execute_state_transitions(block, evm, state_hook) - }?; - - // 3. apply post execution changes - self.post_execution(block, total_difficulty)?; - - Ok(output) - } - - /// Apply settings before a new block is executed. - pub(crate) fn on_new_block(&mut self, header: &Header) { - // Set state clear flag if the block is after the Spurious Dragon hardfork. - let state_clear_flag = self.chain_spec().is_spurious_dragon_active_at_block(header.number); - self.state.set_state_clear_flag(state_clear_flag); - } - - /// Apply post execution state changes that do not require an [EVM](Evm), such as: block - /// rewards, withdrawals, and irregular DAO hardfork state change - pub fn post_execution( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - ) -> Result<(), BlockExecutionError> { let mut balance_increments = - post_block_balance_increments(self.chain_spec(), block, total_difficulty); + post_block_balance_increments(&self.chain_spec, block, total_difficulty); // Irregular state change at Ethereum DAO hardfork - if self.chain_spec().fork(EthereumHardfork::Dao).transitions_at_block(block.number) { + if self.chain_spec.fork(EthereumHardfork::Dao).transitions_at_block(block.number) { // drain balances from hardcoded addresses. let drained_balance: u128 = self .state @@ -347,155 +249,59 @@ where .increment_balances(balance_increments) .map_err(|_| BlockValidationError::IncrementBalanceFailed)?; - Ok(()) + Ok(requests) } -} - -impl Executor for EthBlockExecutor -where - EvmConfig: ConfigureEvm
, - DB: Database + Display>, -{ - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; - type Output = BlockExecutionOutput; - type Error = BlockExecutionError; - /// Executes the block and commits the changes to the internal state. - /// - /// Returns the receipts of the transactions in the block. - /// - /// Returns an error if the block could not be executed or failed verification. - fn execute(mut self, input: Self::Input<'_>) -> Result { - let BlockExecutionInput { block, total_difficulty } = input; - let EthExecuteOutput { receipts, requests, gas_used } = - self.execute_without_verification(block, total_difficulty)?; - - // NOTE: we need to merge keep the reverts for the bundle retention - self.state.merge_transitions(BundleRetention::Reverts); - - Ok(BlockExecutionOutput { state: self.state.take_bundle(), receipts, requests, gas_used }) + fn state_ref(&self) -> &State { + &self.state } - fn execute_with_state_closure( - mut self, - input: Self::Input<'_>, - mut witness: F, - ) -> Result - where - F: FnMut(&State), - { - let BlockExecutionInput { block, total_difficulty } = input; - let EthExecuteOutput { receipts, requests, gas_used } = - self.execute_without_verification(block, total_difficulty)?; + fn state_mut(&mut self) -> &mut State { + &mut self.state + } - // NOTE: we need to merge keep the reverts for the bundle retention - self.state.merge_transitions(BundleRetention::Reverts); - witness(&self.state); - Ok(BlockExecutionOutput { state: self.state.take_bundle(), receipts, requests, gas_used }) + fn with_state_hook(&mut self, hook: Option>) { + self.system_caller.with_state_hook(hook); } - fn execute_with_state_hook( - mut self, - input: Self::Input<'_>, - state_hook: F, - ) -> Result - where - F: OnStateHook + 'static, - { - let BlockExecutionInput { block, total_difficulty } = input; - let EthExecuteOutput { receipts, requests, gas_used } = self - .execute_without_verification_with_state_hook( - block, - total_difficulty, - Some(state_hook), - )?; - - // NOTE: we need to merge keep the reverts for the bundle retention + fn finish(&mut self) -> BundleState { self.state.merge_transitions(BundleRetention::Reverts); - Ok(BlockExecutionOutput { state: self.state.take_bundle(), receipts, requests, gas_used }) + self.state.take_bundle() } -} -/// An executor for a batch of blocks. -/// -/// State changes are tracked until the executor is finalized. -#[derive(Debug)] -pub struct EthBatchExecutor { - /// The executor used to execute single blocks - /// - /// All state changes are committed to the [State]. - executor: EthBlockExecutor, - /// Keeps track of the batch and records receipts based on the configured prune mode - batch_record: BlockBatchRecord, -} -impl EthBatchExecutor { - /// Returns mutable reference to the state that wraps the underlying database. - #[allow(unused)] - fn state_mut(&mut self) -> &mut State { - self.executor.state_mut() + fn validate_block_post_execution( + &self, + block: &BlockWithSenders, + receipts: &[Receipt], + requests: &Requests, + ) -> Result<(), ConsensusError> { + validate_block_post_execution(block, &self.chain_spec.clone(), receipts, requests) } } -impl BatchExecutor for EthBatchExecutor -where - EvmConfig: ConfigureEvm
, - DB: Database + Display>, -{ - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; - type Output = ExecutionOutcome; - type Error = BlockExecutionError; - - fn execute_and_verify_one(&mut self, input: Self::Input<'_>) -> Result<(), Self::Error> { - let BlockExecutionInput { block, total_difficulty } = input; - - if self.batch_record.first_block().is_none() { - self.batch_record.set_first_block(block.number); - } - - let EthExecuteOutput { receipts, requests, gas_used: _ } = - self.executor.execute_without_verification(block, total_difficulty)?; - - validate_block_post_execution(block, self.executor.chain_spec(), &receipts, &requests)?; - - // prepare the state according to the prune mode - let retention = self.batch_record.bundle_retention(block.number); - self.executor.state.merge_transitions(retention); - - // store receipts in the set - self.batch_record.save_receipts(receipts)?; - - // store requests in the set - self.batch_record.save_requests(requests); - - Ok(()) - } - - fn finalize(mut self) -> Self::Output { - ExecutionOutcome::new( - self.executor.state.take_bundle(), - self.batch_record.take_receipts(), - self.batch_record.first_block().unwrap_or_default(), - self.batch_record.take_requests(), - ) - } - - fn set_tip(&mut self, tip: BlockNumber) { - self.batch_record.set_tip(tip); - } +/// Helper type with backwards compatible methods to obtain Ethereum executor +/// providers. +#[derive(Debug)] +pub struct EthExecutorProvider; - fn set_prune_modes(&mut self, prune_modes: PruneModes) { - self.batch_record.set_prune_modes(prune_modes); +impl EthExecutorProvider { + /// Creates a new default ethereum executor provider. + pub fn ethereum( + chain_spec: Arc, + ) -> BasicBlockExecutorProvider { + BasicBlockExecutorProvider::new(EthExecutionStrategyFactory::ethereum(chain_spec)) } - fn size_hint(&self) -> Option { - Some(self.executor.state.bundle_state.size_hint()) + /// Returns a new provider for the mainnet. + pub fn mainnet() -> BasicBlockExecutorProvider { + BasicBlockExecutorProvider::new(EthExecutionStrategyFactory::mainnet()) } } #[cfg(test)] mod tests { use super::*; - use alloy_consensus::TxLegacy; + use alloy_consensus::{Header, TxLegacy}; use alloy_eips::{ eip2935::{HISTORY_STORAGE_ADDRESS, HISTORY_STORAGE_CODE}, eip4788::{BEACON_ROOTS_ADDRESS, BEACON_ROOTS_CODE, SYSTEM_ADDRESS}, @@ -504,6 +310,10 @@ mod tests { }; use alloy_primitives::{b256, fixed_bytes, keccak256, Bytes, TxKind, B256}; use reth_chainspec::{ChainSpecBuilder, ForkCondition}; + use reth_evm::execute::{ + BasicBlockExecutorProvider, BatchExecutor, BlockExecutorProvider, Executor, + }; + use reth_execution_types::BlockExecutionOutput; use reth_primitives::{ constants::ETH_TO_WEI, public_key_to_address, Account, Block, BlockBody, Transaction, }; @@ -553,8 +363,13 @@ mod tests { db } - fn executor_provider(chain_spec: Arc) -> EthExecutorProvider { - EthExecutorProvider { evm_config: EthEvmConfig::new(chain_spec.clone()), chain_spec } + fn executor_provider( + chain_spec: Arc, + ) -> BasicBlockExecutorProvider { + let strategy_factory = + EthExecutionStrategyFactory::new(chain_spec.clone(), EthEvmConfig::new(chain_spec)); + + BasicBlockExecutorProvider::new(strategy_factory) } #[test] @@ -573,10 +388,11 @@ mod tests { let provider = executor_provider(chain_spec); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + // attempt to execute a block without parent beacon block root, expect err - let err = provider - .executor(StateProviderDatabase::new(&db)) - .execute( + let err = executor + .execute_and_verify_one( ( &BlockWithSenders { block: Block { @@ -605,19 +421,24 @@ mod tests { // fix header, set a gas limit header.parent_beacon_block_root = Some(B256::with_last_byte(0x69)); - let mut executor = provider.executor(StateProviderDatabase::new(&db)); - // Now execute a block with the fixed header, ensure that it does not fail executor - .execute_without_verification( - &BlockWithSenders { - block: Block { - header: header.clone(), - body: BlockBody { transactions: vec![], ommers: vec![], withdrawals: None }, + .execute_and_verify_one( + ( + &BlockWithSenders { + block: Block { + header: header.clone(), + body: BlockBody { + transactions: vec![], + ommers: vec![], + withdrawals: None, + }, + }, + senders: vec![], }, - senders: vec![], - }, - U256::ZERO, + U256::ZERO, + ) + .into(), ) .unwrap(); @@ -631,16 +452,17 @@ mod tests { let parent_beacon_block_root_index = timestamp_index % history_buffer_length + history_buffer_length; - // get timestamp storage and compare - let timestamp_storage = - executor.state.storage(BEACON_ROOTS_ADDRESS, U256::from(timestamp_index)).unwrap(); + let timestamp_storage = executor.with_state_mut(|state| { + state.storage(BEACON_ROOTS_ADDRESS, U256::from(timestamp_index)).unwrap() + }); assert_eq!(timestamp_storage, U256::from(header.timestamp)); // get parent beacon block root storage and compare - let parent_beacon_block_root_storage = executor - .state - .storage(BEACON_ROOTS_ADDRESS, U256::from(parent_beacon_block_root_index)) - .expect("storage value should exist"); + let parent_beacon_block_root_storage = executor.with_state_mut(|state| { + state + .storage(BEACON_ROOTS_ADDRESS, U256::from(parent_beacon_block_root_index)) + .expect("storage value should exist") + }); assert_eq!(parent_beacon_block_root_storage, U256::from(0x69)); } @@ -747,7 +569,8 @@ mod tests { ); // ensure that the nonce of the system address account has not changed - let nonce = executor.state_mut().basic(SYSTEM_ADDRESS).unwrap().unwrap().nonce; + let nonce = + executor.with_state_mut(|state| state.basic(SYSTEM_ADDRESS).unwrap().unwrap().nonce); assert_eq!(nonce, 0); } @@ -805,11 +628,12 @@ mod tests { // there is no system contract call so there should be NO STORAGE CHANGES // this means we'll check the transition state - let transition_state = executor - .state_mut() - .transition_state - .take() - .expect("the evm should be initialized with bundle updates"); + let transition_state = executor.with_state_mut(|state| { + state + .transition_state + .take() + .expect("the evm should be initialized with bundle updates") + }); // assert that it is the default (empty) transition state assert_eq!(transition_state, TransitionState::default()); @@ -867,17 +691,15 @@ mod tests { timestamp_index % history_buffer_length + history_buffer_length; // get timestamp storage and compare - let timestamp_storage = executor - .state_mut() - .storage(BEACON_ROOTS_ADDRESS, U256::from(timestamp_index)) - .unwrap(); + let timestamp_storage = executor.with_state_mut(|state| { + state.storage(BEACON_ROOTS_ADDRESS, U256::from(timestamp_index)).unwrap() + }); assert_eq!(timestamp_storage, U256::from(header.timestamp)); // get parent beacon block root storage and compare - let parent_beacon_block_root_storage = executor - .state_mut() - .storage(BEACON_ROOTS_ADDRESS, U256::from(parent_beacon_block_root_index)) - .unwrap(); + let parent_beacon_block_root_storage = executor.with_state_mut(|state| { + state.storage(BEACON_ROOTS_ADDRESS, U256::from(parent_beacon_block_root_index)).unwrap() + }); assert_eq!(parent_beacon_block_root_storage, U256::from(0x69)); } @@ -903,7 +725,6 @@ mod tests { db } - #[test] fn eip_2935_pre_fork() { let db = create_state_provider_with_block_hashes(1); @@ -942,12 +763,11 @@ mod tests { // // we load the account first, because revm expects it to be // loaded - executor.state_mut().basic(HISTORY_STORAGE_ADDRESS).unwrap(); - assert!(executor - .state_mut() + executor.with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap()); + assert!(executor.with_state_mut(|state| state .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) .unwrap() - .is_zero()); + .is_zero())); } #[test] @@ -986,12 +806,11 @@ mod tests { // // we load the account first, because revm expects it to be // loaded - executor.state_mut().basic(HISTORY_STORAGE_ADDRESS).unwrap(); - assert!(executor - .state_mut() + executor.with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap()); + assert!(executor.with_state_mut(|state| state .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) .unwrap() - .is_zero()); + .is_zero())); } #[test] @@ -1033,21 +852,20 @@ mod tests { ); // the hash for the ancestor of the fork activation block should be present - assert!(executor.state_mut().basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some()); + assert!(executor + .with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some())); assert_ne!( - executor - .state_mut() + executor.with_state_mut(|state| state .storage(HISTORY_STORAGE_ADDRESS, U256::from(fork_activation_block - 1)) - .unwrap(), + .unwrap()), U256::ZERO ); // the hash of the block itself should not be in storage - assert!(executor - .state_mut() + assert!(executor.with_state_mut(|state| state .storage(HISTORY_STORAGE_ADDRESS, U256::from(fork_activation_block)) .unwrap() - .is_zero()); + .is_zero())); } #[test] @@ -1090,15 +908,15 @@ mod tests { ); // the hash for the ancestor of the fork activation block should be present - assert!(executor.state_mut().basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some()); + assert!(executor + .with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some())); assert_ne!( - executor - .state_mut() + executor.with_state_mut(|state| state .storage( HISTORY_STORAGE_ADDRESS, U256::from(fork_activation_block % BLOCKHASH_SERVE_WINDOW as u64 - 1) ) - .unwrap(), + .unwrap()), U256::ZERO ); } @@ -1141,12 +959,11 @@ mod tests { // // we load the account first, because revm expects it to be // loaded - executor.state_mut().basic(HISTORY_STORAGE_ADDRESS).unwrap(); - assert!(executor - .state_mut() + executor.with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap()); + assert!(executor.with_state_mut(|state| state .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) .unwrap() - .is_zero()); + .is_zero())); // attempt to execute block 1, this should not fail let header = Header { @@ -1174,16 +991,18 @@ mod tests { ); // the block hash of genesis should now be in storage, but not block 1 - assert!(executor.state_mut().basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some()); + assert!(executor + .with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some())); assert_ne!( - executor.state_mut().storage(HISTORY_STORAGE_ADDRESS, U256::ZERO).unwrap(), + executor.with_state_mut(|state| state + .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) + .unwrap()), U256::ZERO ); - assert!(executor - .state_mut() + assert!(executor.with_state_mut(|state| state .storage(HISTORY_STORAGE_ADDRESS, U256::from(1)) .unwrap() - .is_zero()); + .is_zero())); // attempt to execute block 2, this should not fail let header = Header { @@ -1210,20 +1029,24 @@ mod tests { ); // the block hash of genesis and block 1 should now be in storage, but not block 2 - assert!(executor.state_mut().basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some()); + assert!(executor + .with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some())); assert_ne!( - executor.state_mut().storage(HISTORY_STORAGE_ADDRESS, U256::ZERO).unwrap(), + executor.with_state_mut(|state| state + .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) + .unwrap()), U256::ZERO ); assert_ne!( - executor.state_mut().storage(HISTORY_STORAGE_ADDRESS, U256::from(1)).unwrap(), + executor.with_state_mut(|state| state + .storage(HISTORY_STORAGE_ADDRESS, U256::from(1)) + .unwrap()), U256::ZERO ); - assert!(executor - .state_mut() + assert!(executor.with_state_mut(|state| state .storage(HISTORY_STORAGE_ADDRESS, U256::from(2)) .unwrap() - .is_zero()); + .is_zero())); } #[test] diff --git a/crates/ethereum/evm/src/lib.rs b/crates/ethereum/evm/src/lib.rs index ac9bb5a0b..9abb11976 100644 --- a/crates/ethereum/evm/src/lib.rs +++ b/crates/ethereum/evm/src/lib.rs @@ -17,7 +17,7 @@ extern crate alloc; -use alloc::vec::Vec; +use alloc::{sync::Arc, vec::Vec}; use alloy_primitives::{Address, Bytes, TxKind, U256}; use reth_chainspec::{ChainSpec, Head}; use reth_evm::{ConfigureEvm, ConfigureEvmEnv, NextBlockEnvAttributes}; @@ -25,7 +25,6 @@ use reth_primitives::{transaction::FillTxEnv, Header, TransactionSigned}; use revm_primitives::{ AnalysisKind, BlobExcessGasAndPrice, BlockEnv, CfgEnv, CfgEnvWithHandlerCfg, Env, SpecId, TxEnv, }; -use std::sync::Arc; mod config; pub use config::{revm_spec, revm_spec_by_timestamp_after_merge}; @@ -33,7 +32,6 @@ use reth_ethereum_forks::EthereumHardfork; use reth_primitives::constants::EIP1559_INITIAL_BASE_FEE; pub mod execute; -pub mod strategy; /// Ethereum DAO hardfork state change data. pub mod dao_fork; diff --git a/crates/ethereum/evm/src/strategy.rs b/crates/ethereum/evm/src/strategy.rs deleted file mode 100644 index 55fbfffc8..000000000 --- a/crates/ethereum/evm/src/strategy.rs +++ /dev/null @@ -1,1176 +0,0 @@ -//! Ethereum block execution strategy, - -use crate::{ - dao_fork::{DAO_HARDFORK_BENEFICIARY, DAO_HARDKFORK_ACCOUNTS}, - EthEvmConfig, -}; -use alloc::sync::Arc; -use alloy_consensus::Transaction as _; -use alloy_eips::eip7685::Requests; -use core::fmt::Display; -use reth_chainspec::{ChainSpec, EthereumHardfork, EthereumHardforks, MAINNET}; -use reth_consensus::ConsensusError; -use reth_ethereum_consensus::validate_block_post_execution; -use reth_evm::{ - execute::{ - BlockExecutionError, BlockExecutionStrategy, BlockExecutionStrategyFactory, - BlockValidationError, ProviderError, - }, - state_change::post_block_balance_increments, - system_calls::{OnStateHook, SystemCaller}, - ConfigureEvm, ConfigureEvmEnv, -}; -use reth_primitives::{BlockWithSenders, Header, Receipt}; -use reth_revm::{ - db::{states::bundle_state::BundleRetention, BundleState}, - Database, DatabaseCommit, State, -}; -use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, U256}; - -/// Factory for [`EthExecutionStrategy`]. -#[derive(Debug, Clone)] -pub struct EthExecutionStrategyFactory { - /// The chainspec - chain_spec: Arc, - /// How to create an EVM. - evm_config: EvmConfig, -} - -impl EthExecutionStrategyFactory { - /// Creates a new default ethereum executor strategy factory. - pub fn ethereum(chain_spec: Arc) -> Self { - Self::new(chain_spec.clone(), EthEvmConfig::new(chain_spec)) - } - - /// Returns a new factory for the mainnet. - pub fn mainnet() -> Self { - Self::ethereum(MAINNET.clone()) - } -} - -impl EthExecutionStrategyFactory { - /// Creates a new executor strategy factory. - pub const fn new(chain_spec: Arc, evm_config: EvmConfig) -> Self { - Self { chain_spec, evm_config } - } -} - -impl BlockExecutionStrategyFactory for EthExecutionStrategyFactory { - type Strategy + Display>> = EthExecutionStrategy; - - fn create_strategy(&self, db: DB) -> Self::Strategy - where - DB: Database + Display>, - { - let state = - State::builder().with_database(db).with_bundle_update().without_state_clear().build(); - EthExecutionStrategy::new(state, self.chain_spec.clone(), self.evm_config.clone()) - } -} - -/// Block execution strategy for Ethereum. -#[allow(missing_debug_implementations)] -pub struct EthExecutionStrategy { - /// The chainspec - chain_spec: Arc, - /// How to create an EVM. - evm_config: EvmConfig, - /// Current state for block execution. - state: State, - /// Utility to call system smart contracts. - system_caller: SystemCaller, -} - -impl EthExecutionStrategy { - /// Creates a new [`EthExecutionStrategy`] - pub fn new(state: State, chain_spec: Arc, evm_config: EthEvmConfig) -> Self { - let system_caller = SystemCaller::new(evm_config.clone(), (*chain_spec).clone()); - Self { state, chain_spec, evm_config, system_caller } - } -} - -impl EthExecutionStrategy -where - DB: Database + Display>, - EvmConfig: ConfigureEvm
, -{ - /// Configures a new evm configuration and block environment for the given block. - /// - /// # Caution - /// - /// This does not initialize the tx environment. - fn evm_env_for_block(&self, header: &Header, total_difficulty: U256) -> EnvWithHandlerCfg { - let mut cfg = CfgEnvWithHandlerCfg::new(Default::default(), Default::default()); - let mut block_env = BlockEnv::default(); - self.evm_config.fill_cfg_and_block_env(&mut cfg, &mut block_env, header, total_difficulty); - - EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, Default::default()) - } -} - -impl BlockExecutionStrategy for EthExecutionStrategy -where - DB: Database + Display>, -{ - type Error = BlockExecutionError; - - fn apply_pre_execution_changes( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - ) -> Result<(), Self::Error> { - // Set state clear flag if the block is after the Spurious Dragon hardfork. - let state_clear_flag = - (*self.chain_spec).is_spurious_dragon_active_at_block(block.header.number); - self.state.set_state_clear_flag(state_clear_flag); - - let env = self.evm_env_for_block(&block.header, total_difficulty); - let mut evm = self.evm_config.evm_with_env(&mut self.state, env); - - self.system_caller.apply_pre_execution_changes(block, &mut evm)?; - - Ok(()) - } - - fn execute_transactions( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - ) -> Result<(Vec, u64), Self::Error> { - let env = self.evm_env_for_block(&block.header, total_difficulty); - let mut evm = self.evm_config.evm_with_env(&mut self.state, env); - - let mut cumulative_gas_used = 0; - let mut receipts = Vec::with_capacity(block.body.transactions.len()); - for (sender, transaction) in block.transactions_with_sender() { - // The sum of the transaction’s gas limit, Tg, and the gas utilized in this block prior, - // must be no greater than the block’s gasLimit. - let block_available_gas = block.header.gas_limit - cumulative_gas_used; - if transaction.gas_limit() > block_available_gas { - return Err(BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas { - transaction_gas_limit: transaction.gas_limit(), - block_available_gas, - } - .into()) - } - - self.evm_config.fill_tx_env(evm.tx_mut(), transaction, *sender); - - // Execute transaction. - let result_and_state = evm.transact().map_err(move |err| { - let new_err = err.map_db_err(|e| e.into()); - // Ensure hash is calculated for error log, if not already done - BlockValidationError::EVM { - hash: transaction.recalculate_hash(), - error: Box::new(new_err), - } - })?; - self.system_caller.on_state(&result_and_state); - let ResultAndState { result, state } = result_and_state; - evm.db_mut().commit(state); - - // append gas used - cumulative_gas_used += result.gas_used(); - - // Push transaction changeset and calculate header bloom filter for receipt. - receipts.push( - #[allow(clippy::needless_update)] // side-effect of optimism fields - Receipt { - tx_type: transaction.tx_type(), - // Success flag was added in `EIP-658: Embedding transaction status code in - // receipts`. - success: result.is_success(), - cumulative_gas_used, - // convert to reth log - logs: result.into_logs(), - ..Default::default() - }, - ); - } - Ok((receipts, cumulative_gas_used)) - } - - fn apply_post_execution_changes( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - receipts: &[Receipt], - ) -> Result { - let env = self.evm_env_for_block(&block.header, total_difficulty); - let mut evm = self.evm_config.evm_with_env(&mut self.state, env); - - let requests = if self.chain_spec.is_prague_active_at_timestamp(block.timestamp) { - // Collect all EIP-6110 deposits - let deposit_requests = - crate::eip6110::parse_deposits_from_receipts(&self.chain_spec, receipts)?; - - let mut requests = Requests::new(vec![deposit_requests]); - requests.extend(self.system_caller.apply_post_execution_changes(&mut evm)?); - requests - } else { - Requests::default() - }; - drop(evm); - - let mut balance_increments = - post_block_balance_increments(&self.chain_spec, block, total_difficulty); - - // Irregular state change at Ethereum DAO hardfork - if self.chain_spec.fork(EthereumHardfork::Dao).transitions_at_block(block.number) { - // drain balances from hardcoded addresses. - let drained_balance: u128 = self - .state - .drain_balances(DAO_HARDKFORK_ACCOUNTS) - .map_err(|_| BlockValidationError::IncrementBalanceFailed)? - .into_iter() - .sum(); - - // return balance to DAO beneficiary. - *balance_increments.entry(DAO_HARDFORK_BENEFICIARY).or_default() += drained_balance; - } - // increment balances - self.state - .increment_balances(balance_increments) - .map_err(|_| BlockValidationError::IncrementBalanceFailed)?; - - Ok(requests) - } - - fn state_ref(&self) -> &State { - &self.state - } - - fn state_mut(&mut self) -> &mut State { - &mut self.state - } - - fn with_state_hook(&mut self, hook: Option>) { - self.system_caller.with_state_hook(hook); - } - - fn finish(&mut self) -> BundleState { - self.state.merge_transitions(BundleRetention::Reverts); - self.state.take_bundle() - } - - fn validate_block_post_execution( - &self, - block: &BlockWithSenders, - receipts: &[Receipt], - requests: &Requests, - ) -> Result<(), ConsensusError> { - validate_block_post_execution(block, &self.chain_spec.clone(), receipts, requests) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_consensus::TxLegacy; - use alloy_eips::{ - eip2935::{HISTORY_STORAGE_ADDRESS, HISTORY_STORAGE_CODE}, - eip4788::{BEACON_ROOTS_ADDRESS, BEACON_ROOTS_CODE, SYSTEM_ADDRESS}, - eip7002::{WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS, WITHDRAWAL_REQUEST_PREDEPLOY_CODE}, - eip7685::EMPTY_REQUESTS_HASH, - }; - use alloy_primitives::{b256, fixed_bytes, keccak256, Bytes, TxKind, B256}; - use reth_chainspec::{ChainSpecBuilder, ForkCondition}; - use reth_evm::execute::{ - BasicBlockExecutorProvider, BatchExecutor, BlockExecutorProvider, Executor, - }; - use reth_execution_types::BlockExecutionOutput; - use reth_primitives::{ - constants::ETH_TO_WEI, public_key_to_address, Account, Block, BlockBody, Transaction, - }; - use reth_revm::{ - database::StateProviderDatabase, test_utils::StateProviderTest, TransitionState, - }; - use reth_testing_utils::generators::{self, sign_tx_with_key_pair}; - use revm_primitives::BLOCKHASH_SERVE_WINDOW; - use secp256k1::{Keypair, Secp256k1}; - use std::collections::HashMap; - - fn create_state_provider_with_beacon_root_contract() -> StateProviderTest { - let mut db = StateProviderTest::default(); - - let beacon_root_contract_account = Account { - balance: U256::ZERO, - bytecode_hash: Some(keccak256(BEACON_ROOTS_CODE.clone())), - nonce: 1, - }; - - db.insert_account( - BEACON_ROOTS_ADDRESS, - beacon_root_contract_account, - Some(BEACON_ROOTS_CODE.clone()), - HashMap::default(), - ); - - db - } - - fn create_state_provider_with_withdrawal_requests_contract() -> StateProviderTest { - let mut db = StateProviderTest::default(); - - let withdrawal_requests_contract_account = Account { - nonce: 1, - balance: U256::ZERO, - bytecode_hash: Some(keccak256(WITHDRAWAL_REQUEST_PREDEPLOY_CODE.clone())), - }; - - db.insert_account( - WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS, - withdrawal_requests_contract_account, - Some(WITHDRAWAL_REQUEST_PREDEPLOY_CODE.clone()), - HashMap::default(), - ); - - db - } - - fn executor_provider( - chain_spec: Arc, - ) -> BasicBlockExecutorProvider { - let strategy_factory = - EthExecutionStrategyFactory::new(chain_spec.clone(), EthEvmConfig::new(chain_spec)); - - BasicBlockExecutorProvider::new(strategy_factory) - } - - #[test] - fn eip_4788_non_genesis_call() { - let mut header = - Header { timestamp: 1, number: 1, excess_blob_gas: Some(0), ..Header::default() }; - - let db = create_state_provider_with_beacon_root_contract(); - - let chain_spec = Arc::new( - ChainSpecBuilder::from(&*MAINNET) - .shanghai_activated() - .with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(1)) - .build(), - ); - - let provider = executor_provider(chain_spec); - - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); - - // attempt to execute a block without parent beacon block root, expect err - let err = executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { - header: header.clone(), - body: BlockBody { - transactions: vec![], - ommers: vec![], - withdrawals: None, - }, - }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .expect_err( - "Executing cancun block without parent beacon block root field should fail", - ); - - assert_eq!( - err.as_validation().unwrap().clone(), - BlockValidationError::MissingParentBeaconBlockRoot - ); - - // fix header, set a gas limit - header.parent_beacon_block_root = Some(B256::with_last_byte(0x69)); - - // Now execute a block with the fixed header, ensure that it does not fail - executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { - header: header.clone(), - body: BlockBody { - transactions: vec![], - ommers: vec![], - withdrawals: None, - }, - }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .unwrap(); - - // check the actual storage of the contract - it should be: - // * The storage value at header.timestamp % HISTORY_BUFFER_LENGTH should be - // header.timestamp - // * The storage value at header.timestamp % HISTORY_BUFFER_LENGTH + HISTORY_BUFFER_LENGTH - // // should be parent_beacon_block_root - let history_buffer_length = 8191u64; - let timestamp_index = header.timestamp % history_buffer_length; - let parent_beacon_block_root_index = - timestamp_index % history_buffer_length + history_buffer_length; - - let timestamp_storage = executor.with_state_mut(|state| { - state.storage(BEACON_ROOTS_ADDRESS, U256::from(timestamp_index)).unwrap() - }); - assert_eq!(timestamp_storage, U256::from(header.timestamp)); - - // get parent beacon block root storage and compare - let parent_beacon_block_root_storage = executor.with_state_mut(|state| { - state - .storage(BEACON_ROOTS_ADDRESS, U256::from(parent_beacon_block_root_index)) - .expect("storage value should exist") - }); - assert_eq!(parent_beacon_block_root_storage, U256::from(0x69)); - } - - #[test] - fn eip_4788_no_code_cancun() { - // This test ensures that we "silently fail" when cancun is active and there is no code at - // // BEACON_ROOTS_ADDRESS - let header = Header { - timestamp: 1, - number: 1, - parent_beacon_block_root: Some(B256::with_last_byte(0x69)), - excess_blob_gas: Some(0), - ..Header::default() - }; - - let db = StateProviderTest::default(); - - // DON'T deploy the contract at genesis - let chain_spec = Arc::new( - ChainSpecBuilder::from(&*MAINNET) - .shanghai_activated() - .with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(1)) - .build(), - ); - - let provider = executor_provider(chain_spec); - - // attempt to execute an empty block with parent beacon block root, this should not fail - provider - .batch_executor(StateProviderDatabase::new(&db)) - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { - header, - body: BlockBody { - transactions: vec![], - ommers: vec![], - withdrawals: None, - }, - }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .expect( - "Executing a block with no transactions while cancun is active should not fail", - ); - } - - #[test] - fn eip_4788_empty_account_call() { - // This test ensures that we do not increment the nonce of an empty SYSTEM_ADDRESS account - // // during the pre-block call - - let mut db = create_state_provider_with_beacon_root_contract(); - - // insert an empty SYSTEM_ADDRESS - db.insert_account(SYSTEM_ADDRESS, Account::default(), None, HashMap::default()); - - let chain_spec = Arc::new( - ChainSpecBuilder::from(&*MAINNET) - .shanghai_activated() - .with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(1)) - .build(), - ); - - let provider = executor_provider(chain_spec); - - // construct the header for block one - let header = Header { - timestamp: 1, - number: 1, - parent_beacon_block_root: Some(B256::with_last_byte(0x69)), - excess_blob_gas: Some(0), - ..Header::default() - }; - - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); - - // attempt to execute an empty block with parent beacon block root, this should not fail - executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { - header, - body: BlockBody { - transactions: vec![], - ommers: vec![], - withdrawals: None, - }, - }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .expect( - "Executing a block with no transactions while cancun is active should not fail", - ); - - // ensure that the nonce of the system address account has not changed - let nonce = - executor.with_state_mut(|state| state.basic(SYSTEM_ADDRESS).unwrap().unwrap().nonce); - assert_eq!(nonce, 0); - } - - #[test] - fn eip_4788_genesis_call() { - let db = create_state_provider_with_beacon_root_contract(); - - // activate cancun at genesis - let chain_spec = Arc::new( - ChainSpecBuilder::from(&*MAINNET) - .shanghai_activated() - .with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(0)) - .build(), - ); - - let mut header = chain_spec.genesis_header().clone(); - let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); - - // attempt to execute the genesis block with non-zero parent beacon block root, expect err - header.parent_beacon_block_root = Some(B256::with_last_byte(0x69)); - let _err = executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { header: header.clone(), body: Default::default() }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .expect_err( - "Executing genesis cancun block with non-zero parent beacon block root field - should fail", - ); - - // fix header - header.parent_beacon_block_root = Some(B256::ZERO); - - // now try to process the genesis block again, this time ensuring that a system contract - // call does not occur - executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { header, body: Default::default() }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .unwrap(); - - // there is no system contract call so there should be NO STORAGE CHANGES - // this means we'll check the transition state - let transition_state = executor.with_state_mut(|state| { - state - .transition_state - .take() - .expect("the evm should be initialized with bundle updates") - }); - - // assert that it is the default (empty) transition state - assert_eq!(transition_state, TransitionState::default()); - } - - #[test] - fn eip_4788_high_base_fee() { - // This test ensures that if we have a base fee, then we don't return an error when the - // system contract is called, due to the gas price being less than the base fee. - let header = Header { - timestamp: 1, - number: 1, - parent_beacon_block_root: Some(B256::with_last_byte(0x69)), - base_fee_per_gas: Some(u64::MAX), - excess_blob_gas: Some(0), - ..Header::default() - }; - - let db = create_state_provider_with_beacon_root_contract(); - - let chain_spec = Arc::new( - ChainSpecBuilder::from(&*MAINNET) - .shanghai_activated() - .with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(1)) - .build(), - ); - - let provider = executor_provider(chain_spec); - - // execute header - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); - - // Now execute a block with the fixed header, ensure that it does not fail - executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { header: header.clone(), body: Default::default() }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .unwrap(); - - // check the actual storage of the contract - it should be: - // * The storage value at header.timestamp % HISTORY_BUFFER_LENGTH should be - // header.timestamp - // * The storage value at header.timestamp % HISTORY_BUFFER_LENGTH + HISTORY_BUFFER_LENGTH - // // should be parent_beacon_block_root - let history_buffer_length = 8191u64; - let timestamp_index = header.timestamp % history_buffer_length; - let parent_beacon_block_root_index = - timestamp_index % history_buffer_length + history_buffer_length; - - // get timestamp storage and compare - let timestamp_storage = executor.with_state_mut(|state| { - state.storage(BEACON_ROOTS_ADDRESS, U256::from(timestamp_index)).unwrap() - }); - assert_eq!(timestamp_storage, U256::from(header.timestamp)); - - // get parent beacon block root storage and compare - let parent_beacon_block_root_storage = executor.with_state_mut(|state| { - state.storage(BEACON_ROOTS_ADDRESS, U256::from(parent_beacon_block_root_index)).unwrap() - }); - assert_eq!(parent_beacon_block_root_storage, U256::from(0x69)); - } - - /// Create a state provider with blockhashes and the EIP-2935 system contract. - fn create_state_provider_with_block_hashes(latest_block: u64) -> StateProviderTest { - let mut db = StateProviderTest::default(); - for block_number in 0..=latest_block { - db.insert_block_hash(block_number, keccak256(block_number.to_string())); - } - - let blockhashes_contract_account = Account { - balance: U256::ZERO, - bytecode_hash: Some(keccak256(HISTORY_STORAGE_CODE.clone())), - nonce: 1, - }; - - db.insert_account( - HISTORY_STORAGE_ADDRESS, - blockhashes_contract_account, - Some(HISTORY_STORAGE_CODE.clone()), - HashMap::default(), - ); - - db - } - #[test] - fn eip_2935_pre_fork() { - let db = create_state_provider_with_block_hashes(1); - - let chain_spec = Arc::new( - ChainSpecBuilder::from(&*MAINNET) - .shanghai_activated() - .with_fork(EthereumHardfork::Prague, ForkCondition::Never) - .build(), - ); - - let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); - - // construct the header for block one - let header = Header { timestamp: 1, number: 1, ..Header::default() }; - - // attempt to execute an empty block, this should not fail - executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { header, body: Default::default() }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .expect( - "Executing a block with no transactions while Prague is active should not fail", - ); - - // ensure that the block hash was *not* written to storage, since this is before the fork - // was activated - // - // we load the account first, because revm expects it to be - // loaded - executor.with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap()); - assert!(executor.with_state_mut(|state| state - .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) - .unwrap() - .is_zero())); - } - - #[test] - fn eip_2935_fork_activation_genesis() { - let db = create_state_provider_with_block_hashes(0); - - let chain_spec = Arc::new( - ChainSpecBuilder::from(&*MAINNET) - .shanghai_activated() - .with_fork(EthereumHardfork::Prague, ForkCondition::Timestamp(0)) - .build(), - ); - - let header = chain_spec.genesis_header().clone(); - let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); - - // attempt to execute genesis block, this should not fail - executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { header, body: Default::default() }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .expect( - "Executing a block with no transactions while Prague is active should not fail", - ); - - // ensure that the block hash was *not* written to storage, since there are no blocks - // preceding genesis - // - // we load the account first, because revm expects it to be - // loaded - executor.with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap()); - assert!(executor.with_state_mut(|state| state - .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) - .unwrap() - .is_zero())); - } - - #[test] - fn eip_2935_fork_activation_within_window_bounds() { - let fork_activation_block = (BLOCKHASH_SERVE_WINDOW - 10) as u64; - let db = create_state_provider_with_block_hashes(fork_activation_block); - - let chain_spec = Arc::new( - ChainSpecBuilder::from(&*MAINNET) - .shanghai_activated() - .with_fork(EthereumHardfork::Prague, ForkCondition::Timestamp(1)) - .build(), - ); - - let header = Header { - parent_hash: B256::random(), - timestamp: 1, - number: fork_activation_block, - requests_hash: Some(EMPTY_REQUESTS_HASH), - ..Header::default() - }; - let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); - - // attempt to execute the fork activation block, this should not fail - executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { header, body: Default::default() }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .expect( - "Executing a block with no transactions while Prague is active should not fail", - ); - - // the hash for the ancestor of the fork activation block should be present - assert!(executor - .with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some())); - assert_ne!( - executor.with_state_mut(|state| state - .storage(HISTORY_STORAGE_ADDRESS, U256::from(fork_activation_block - 1)) - .unwrap()), - U256::ZERO - ); - - // the hash of the block itself should not be in storage - assert!(executor.with_state_mut(|state| state - .storage(HISTORY_STORAGE_ADDRESS, U256::from(fork_activation_block)) - .unwrap() - .is_zero())); - } - - #[test] - fn eip_2935_fork_activation_outside_window_bounds() { - let fork_activation_block = (BLOCKHASH_SERVE_WINDOW + 256) as u64; - let db = create_state_provider_with_block_hashes(fork_activation_block); - - let chain_spec = Arc::new( - ChainSpecBuilder::from(&*MAINNET) - .shanghai_activated() - .with_fork(EthereumHardfork::Prague, ForkCondition::Timestamp(1)) - .build(), - ); - - let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); - - let header = Header { - parent_hash: B256::random(), - timestamp: 1, - number: fork_activation_block, - requests_hash: Some(EMPTY_REQUESTS_HASH), - ..Header::default() - }; - - // attempt to execute the fork activation block, this should not fail - executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { header, body: Default::default() }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .expect( - "Executing a block with no transactions while Prague is active should not fail", - ); - - // the hash for the ancestor of the fork activation block should be present - assert!(executor - .with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some())); - assert_ne!( - executor.with_state_mut(|state| state - .storage( - HISTORY_STORAGE_ADDRESS, - U256::from(fork_activation_block % BLOCKHASH_SERVE_WINDOW as u64 - 1) - ) - .unwrap()), - U256::ZERO - ); - } - - #[test] - fn eip_2935_state_transition_inside_fork() { - let db = create_state_provider_with_block_hashes(2); - - let chain_spec = Arc::new( - ChainSpecBuilder::from(&*MAINNET) - .shanghai_activated() - .with_fork(EthereumHardfork::Prague, ForkCondition::Timestamp(0)) - .build(), - ); - - let mut header = chain_spec.genesis_header().clone(); - header.requests_hash = Some(EMPTY_REQUESTS_HASH); - let header_hash = header.hash_slow(); - - let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); - - // attempt to execute the genesis block, this should not fail - executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { header, body: Default::default() }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .expect( - "Executing a block with no transactions while Prague is active should not fail", - ); - - // nothing should be written as the genesis has no ancestors - // - // we load the account first, because revm expects it to be - // loaded - executor.with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap()); - assert!(executor.with_state_mut(|state| state - .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) - .unwrap() - .is_zero())); - - // attempt to execute block 1, this should not fail - let header = Header { - parent_hash: header_hash, - timestamp: 1, - number: 1, - requests_hash: Some(EMPTY_REQUESTS_HASH), - ..Header::default() - }; - let header_hash = header.hash_slow(); - - executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { header, body: Default::default() }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .expect( - "Executing a block with no transactions while Prague is active should not fail", - ); - - // the block hash of genesis should now be in storage, but not block 1 - assert!(executor - .with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some())); - assert_ne!( - executor.with_state_mut(|state| state - .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) - .unwrap()), - U256::ZERO - ); - assert!(executor.with_state_mut(|state| state - .storage(HISTORY_STORAGE_ADDRESS, U256::from(1)) - .unwrap() - .is_zero())); - - // attempt to execute block 2, this should not fail - let header = Header { - parent_hash: header_hash, - timestamp: 1, - number: 2, - requests_hash: Some(EMPTY_REQUESTS_HASH), - ..Header::default() - }; - - executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { header, body: Default::default() }, - senders: vec![], - }, - U256::ZERO, - ) - .into(), - ) - .expect( - "Executing a block with no transactions while Prague is active should not fail", - ); - - // the block hash of genesis and block 1 should now be in storage, but not block 2 - assert!(executor - .with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some())); - assert_ne!( - executor.with_state_mut(|state| state - .storage(HISTORY_STORAGE_ADDRESS, U256::ZERO) - .unwrap()), - U256::ZERO - ); - assert_ne!( - executor.with_state_mut(|state| state - .storage(HISTORY_STORAGE_ADDRESS, U256::from(1)) - .unwrap()), - U256::ZERO - ); - assert!(executor.with_state_mut(|state| state - .storage(HISTORY_STORAGE_ADDRESS, U256::from(2)) - .unwrap() - .is_zero())); - } - - #[test] - fn eip_7002() { - let chain_spec = Arc::new( - ChainSpecBuilder::from(&*MAINNET) - .shanghai_activated() - .with_fork(EthereumHardfork::Prague, ForkCondition::Timestamp(0)) - .build(), - ); - - let mut db = create_state_provider_with_withdrawal_requests_contract(); - - let secp = Secp256k1::new(); - let sender_key_pair = Keypair::new(&secp, &mut generators::rng()); - let sender_address = public_key_to_address(sender_key_pair.public_key()); - - db.insert_account( - sender_address, - Account { nonce: 1, balance: U256::from(ETH_TO_WEI), bytecode_hash: None }, - None, - HashMap::default(), - ); - - // https://github.com/lightclient/sys-asm/blob/9282bdb9fd64e024e27f60f507486ffb2183cba2/test/Withdrawal.t.sol.in#L36 - let validator_public_key = fixed_bytes!("111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111"); - let withdrawal_amount = fixed_bytes!("0203040506070809"); - let input: Bytes = [&validator_public_key[..], &withdrawal_amount[..]].concat().into(); - assert_eq!(input.len(), 56); - - let mut header = chain_spec.genesis_header().clone(); - header.gas_limit = 1_500_000; - // measured - header.gas_used = 135_856; - header.receipts_root = - b256!("b31a3e47b902e9211c4d349af4e4c5604ce388471e79ca008907ae4616bb0ed3"); - - let tx = sign_tx_with_key_pair( - sender_key_pair, - Transaction::Legacy(TxLegacy { - chain_id: Some(chain_spec.chain.id()), - nonce: 1, - gas_price: header.base_fee_per_gas.unwrap().into(), - gas_limit: header.gas_used, - to: TxKind::Call(WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS), - // `MIN_WITHDRAWAL_REQUEST_FEE` - value: U256::from(2), - input, - }), - ); - - let provider = executor_provider(chain_spec); - - let executor = provider.executor(StateProviderDatabase::new(&db)); - - let BlockExecutionOutput { receipts, requests, .. } = executor - .execute( - ( - &Block { - header, - body: BlockBody { transactions: vec![tx], ..Default::default() }, - } - .with_recovered_senders() - .unwrap(), - U256::ZERO, - ) - .into(), - ) - .unwrap(); - - let receipt = receipts.first().unwrap(); - assert!(receipt.success); - - assert!(requests[0].is_empty(), "there should be no deposits"); - assert!(!requests[1].is_empty(), "there should be a withdrawal"); - assert!(requests[2].is_empty(), "there should be no consolidations"); - } - - #[test] - fn block_gas_limit_error() { - // Create a chain specification with fork conditions set for Prague - let chain_spec = Arc::new( - ChainSpecBuilder::from(&*MAINNET) - .shanghai_activated() - .with_fork(EthereumHardfork::Prague, ForkCondition::Timestamp(0)) - .build(), - ); - - // Create a state provider with the withdrawal requests contract pre-deployed - let mut db = create_state_provider_with_withdrawal_requests_contract(); - - // Initialize Secp256k1 for key pair generation - let secp = Secp256k1::new(); - // Generate a new key pair for the sender - let sender_key_pair = Keypair::new(&secp, &mut generators::rng()); - // Get the sender's address from the public key - let sender_address = public_key_to_address(sender_key_pair.public_key()); - - // Insert the sender account into the state with a nonce of 1 and a balance of 1 ETH in Wei - db.insert_account( - sender_address, - Account { nonce: 1, balance: U256::from(ETH_TO_WEI), bytecode_hash: None }, - None, - HashMap::default(), - ); - - // Define the validator public key and withdrawal amount as fixed bytes - let validator_public_key = fixed_bytes!("111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111"); - let withdrawal_amount = fixed_bytes!("2222222222222222"); - // Concatenate the validator public key and withdrawal amount into a single byte array - let input: Bytes = [&validator_public_key[..], &withdrawal_amount[..]].concat().into(); - // Ensure the input length is 56 bytes - assert_eq!(input.len(), 56); - - // Create a genesis block header with a specified gas limit and gas used - let mut header = chain_spec.genesis_header().clone(); - header.gas_limit = 1_500_000; - header.gas_used = 134_807; - header.receipts_root = - b256!("b31a3e47b902e9211c4d349af4e4c5604ce388471e79ca008907ae4616bb0ed3"); - - // Create a transaction with a gas limit higher than the block gas limit - let tx = sign_tx_with_key_pair( - sender_key_pair, - Transaction::Legacy(TxLegacy { - chain_id: Some(chain_spec.chain.id()), - nonce: 1, - gas_price: header.base_fee_per_gas.unwrap().into(), - gas_limit: 2_500_000, // higher than block gas limit - to: TxKind::Call(WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS), - value: U256::from(1), - input, - }), - ); - - // Create an executor from the state provider - let executor = executor_provider(chain_spec).executor(StateProviderDatabase::new(&db)); - - // Execute the block and capture the result - let exec_result = executor.execute( - ( - &Block { header, body: BlockBody { transactions: vec![tx], ..Default::default() } } - .with_recovered_senders() - .unwrap(), - U256::ZERO, - ) - .into(), - ); - - // Check if the execution result is an error and assert the specific error type - match exec_result { - Ok(_) => panic!("Expected block gas limit error"), - Err(err) => assert_eq!( - *err.as_validation().unwrap(), - BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas { - transaction_gas_limit: 2_500_000, - block_available_gas: 1_500_000, - } - ), - } - } -} diff --git a/crates/ethereum/node/Cargo.toml b/crates/ethereum/node/Cargo.toml index 29093adc8..a3fe5ed45 100644 --- a/crates/ethereum/node/Cargo.toml +++ b/crates/ethereum/node/Cargo.toml @@ -21,6 +21,7 @@ reth-tracing.workspace = true reth-provider.workspace = true reth-transaction-pool.workspace = true reth-network.workspace = true +reth-evm.workspace = true reth-evm-ethereum.workspace = true reth-consensus.workspace = true reth-auto-seal-consensus.workspace = true diff --git a/crates/ethereum/node/src/evm.rs b/crates/ethereum/node/src/evm.rs index d710d8d8d..bcdcaac6b 100644 --- a/crates/ethereum/node/src/evm.rs +++ b/crates/ethereum/node/src/evm.rs @@ -1,6 +1,8 @@ //! Ethereum EVM support #[doc(inline)] -pub use reth_evm_ethereum::execute::EthExecutorProvider; +pub use reth_evm::execute::BasicBlockExecutorProvider; +#[doc(inline)] +pub use reth_evm_ethereum::execute::{EthExecutionStrategyFactory, EthExecutorProvider}; #[doc(inline)] pub use reth_evm_ethereum::EthEvmConfig; diff --git a/crates/ethereum/node/src/lib.rs b/crates/ethereum/node/src/lib.rs index 37ebc33c2..421cee37f 100644 --- a/crates/ethereum/node/src/lib.rs +++ b/crates/ethereum/node/src/lib.rs @@ -14,7 +14,9 @@ use revm as _; pub use reth_ethereum_engine_primitives::EthEngineTypes; pub mod evm; -pub use evm::{EthEvmConfig, EthExecutorProvider}; +pub use evm::{ + BasicBlockExecutorProvider, EthEvmConfig, EthExecutionStrategyFactory, EthExecutorProvider, +}; pub mod node; pub use node::EthereumNode; diff --git a/crates/ethereum/node/src/node.rs b/crates/ethereum/node/src/node.rs index d3301b208..3df46b485 100644 --- a/crates/ethereum/node/src/node.rs +++ b/crates/ethereum/node/src/node.rs @@ -9,7 +9,8 @@ use reth_chainspec::ChainSpec; use reth_ethereum_engine_primitives::{ EthBuiltPayload, EthPayloadAttributes, EthPayloadBuilderAttributes, EthereumEngineValidator, }; -use reth_evm_ethereum::execute::EthExecutorProvider; +use reth_evm::execute::BasicBlockExecutorProvider; +use reth_evm_ethereum::execute::EthExecutionStrategyFactory; use reth_network::NetworkHandle; use reth_node_api::{ AddOnsContext, ConfigureEvm, EngineValidator, FullNodeComponents, NodePrimitives, @@ -136,7 +137,7 @@ where Node: FullNodeTypes, { type EVM = EthEvmConfig; - type Executor = EthExecutorProvider; + type Executor = BasicBlockExecutorProvider; async fn build_evm( self, @@ -144,7 +145,8 @@ where ) -> eyre::Result<(Self::EVM, Self::Executor)> { let chain_spec = ctx.chain_spec(); let evm_config = EthEvmConfig::new(ctx.chain_spec()); - let executor = EthExecutorProvider::new(chain_spec, evm_config.clone()); + let strategy_factory = EthExecutionStrategyFactory::new(chain_spec, evm_config.clone()); + let executor = BasicBlockExecutorProvider::new(strategy_factory); Ok((evm_config, executor)) } diff --git a/crates/rpc/rpc-builder/tests/it/utils.rs b/crates/rpc/rpc-builder/tests/it/utils.rs index 847de9956..44614ea49 100644 --- a/crates/rpc/rpc-builder/tests/it/utils.rs +++ b/crates/rpc/rpc-builder/tests/it/utils.rs @@ -4,7 +4,8 @@ use alloy_rpc_types_engine::{ClientCode, ClientVersionV1}; use reth_beacon_consensus::BeaconConsensusEngineHandle; use reth_chainspec::MAINNET; use reth_ethereum_engine_primitives::{EthEngineTypes, EthereumEngineValidator}; -use reth_evm_ethereum::{execute::EthExecutorProvider, EthEvmConfig}; +use reth_evm::execute::BasicBlockExecutorProvider; +use reth_evm_ethereum::{execute::EthExecutionStrategyFactory, EthEvmConfig}; use reth_network_api::noop::NoopNetwork; use reth_payload_builder::test_utils::spawn_test_payload_service; use reth_provider::test_utils::{NoopProvider, TestCanonStateSubscriptions}; @@ -124,7 +125,7 @@ pub fn test_rpc_builder() -> RpcModuleBuilder< TokioTaskExecutor, TestCanonStateSubscriptions, EthEvmConfig, - EthExecutorProvider, + BasicBlockExecutorProvider, > { RpcModuleBuilder::default() .with_provider(NoopProvider::default()) @@ -133,5 +134,7 @@ pub fn test_rpc_builder() -> RpcModuleBuilder< .with_executor(TokioTaskExecutor::default()) .with_events(TestCanonStateSubscriptions::default()) .with_evm_config(EthEvmConfig::new(MAINNET.clone())) - .with_block_executor(EthExecutorProvider::ethereum(MAINNET.clone())) + .with_block_executor( + BasicBlockExecutorProvider::new(EthExecutionStrategyFactory::mainnet()), + ) } diff --git a/crates/stages/stages/src/stages/execution.rs b/crates/stages/stages/src/stages/execution.rs index 7bb6ebc59..47cd9d044 100644 --- a/crates/stages/stages/src/stages/execution.rs +++ b/crates/stages/stages/src/stages/execution.rs @@ -667,7 +667,8 @@ mod tests { use assert_matches::assert_matches; use reth_chainspec::ChainSpecBuilder; use reth_db_api::{models::AccountBeforeTx, transaction::DbTxMut}; - use reth_evm_ethereum::execute::EthExecutorProvider; + use reth_evm::execute::BasicBlockExecutorProvider; + use reth_evm_ethereum::execute::EthExecutionStrategyFactory; use reth_execution_errors::BlockValidationError; use reth_primitives::{Account, Bytecode, SealedBlock, StorageEntry}; use reth_provider::{ @@ -678,10 +679,11 @@ mod tests { use reth_stages_api::StageUnitCheckpoint; use std::collections::BTreeMap; - fn stage() -> ExecutionStage { - let executor_provider = EthExecutorProvider::ethereum(Arc::new( + fn stage() -> ExecutionStage> { + let strategy_factory = EthExecutionStrategyFactory::ethereum(Arc::new( ChainSpecBuilder::mainnet().berlin_activated().build(), )); + let executor_provider = BasicBlockExecutorProvider::new(strategy_factory); ExecutionStage::new( executor_provider, ExecutionStageThresholds { diff --git a/examples/custom-evm/src/main.rs b/examples/custom-evm/src/main.rs index 9c421f9c6..55063fc9b 100644 --- a/examples/custom-evm/src/main.rs +++ b/examples/custom-evm/src/main.rs @@ -31,7 +31,7 @@ use reth_node_api::{ use reth_node_core::{args::RpcServerArgs, node_config::NodeConfig}; use reth_node_ethereum::{ node::{EthereumAddOns, EthereumPayloadBuilder}, - EthExecutorProvider, EthereumNode, + BasicBlockExecutorProvider, EthExecutionStrategyFactory, EthereumNode, }; use reth_primitives::{ revm_primitives::{CfgEnvWithHandlerCfg, TxEnv}, @@ -158,7 +158,7 @@ where Node: FullNodeTypes>, { type EVM = MyEvmConfig; - type Executor = EthExecutorProvider; + type Executor = BasicBlockExecutorProvider>; async fn build_evm( self, @@ -166,7 +166,10 @@ where ) -> eyre::Result<(Self::EVM, Self::Executor)> { Ok(( MyEvmConfig::new(ctx.chain_spec()), - EthExecutorProvider::new(ctx.chain_spec(), MyEvmConfig::new(ctx.chain_spec())), + BasicBlockExecutorProvider::new(EthExecutionStrategyFactory::new( + ctx.chain_spec(), + MyEvmConfig::new(ctx.chain_spec()), + )), )) } } diff --git a/examples/stateful-precompile/src/main.rs b/examples/stateful-precompile/src/main.rs index 26ebdfe41..b0165e4de 100644 --- a/examples/stateful-precompile/src/main.rs +++ b/examples/stateful-precompile/src/main.rs @@ -20,7 +20,10 @@ use reth::{ use reth_chainspec::{Chain, ChainSpec}; use reth_node_api::{ConfigureEvm, ConfigureEvmEnv, FullNodeTypes, NodeTypes}; use reth_node_core::{args::RpcServerArgs, node_config::NodeConfig}; -use reth_node_ethereum::{node::EthereumAddOns, EthEvmConfig, EthExecutorProvider, EthereumNode}; +use reth_node_ethereum::{ + node::EthereumAddOns, BasicBlockExecutorProvider, EthEvmConfig, EthExecutionStrategyFactory, + EthereumNode, +}; use reth_primitives::{ revm_primitives::{SpecId, StatefulPrecompileMut}, Header, TransactionSigned, @@ -224,7 +227,7 @@ where Node: FullNodeTypes>, { type EVM = MyEvmConfig; - type Executor = EthExecutorProvider; + type Executor = BasicBlockExecutorProvider>; async fn build_evm( self, @@ -234,7 +237,13 @@ where inner: EthEvmConfig::new(ctx.chain_spec()), precompile_cache: self.precompile_cache.clone(), }; - Ok((evm_config.clone(), EthExecutorProvider::new(ctx.chain_spec(), evm_config))) + Ok(( + evm_config.clone(), + BasicBlockExecutorProvider::new(EthExecutionStrategyFactory::new( + ctx.chain_spec(), + evm_config, + )), + )) } } From 6c026daf920b28cd7e97e63ef20fea6c58af0f45 Mon Sep 17 00:00:00 2001 From: greged93 <82421016+greged93@users.noreply.github.com> Date: Sat, 19 Oct 2024 22:20:23 +0200 Subject: [PATCH 153/425] docs: explain how to add metrics to grafana (#11875) --- etc/README.md | 79 ++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 75 insertions(+), 4 deletions(-) diff --git a/etc/README.md b/etc/README.md index f80b5b774..28c71b046 100644 --- a/etc/README.md +++ b/etc/README.md @@ -2,7 +2,8 @@ This directory contains miscellaneous files, such as example Grafana dashboards and Prometheus configuration. -The files in this directory may undergo a lot of changes while reth is unstable, so do not expect them to necessarily be up to date. +The files in this directory may undergo a lot of changes while reth is unstable, so do not expect them to necessarily be +up to date. ### Overview @@ -11,8 +12,78 @@ The files in this directory may undergo a lot of changes while reth is unstable, ### Docker Compose -To run Reth, Grafana or Prometheus with Docker Compose, refer to the [docker docs](/book/installation/docker.md#using-docker-compose). +To run Reth, Grafana or Prometheus with Docker Compose, refer to +the [docker docs](/book/installation/docker.md#using-docker-compose). -### Import Grafana dashboards +### Grafana -Running Grafana in Docker makes it possible to import existing dashboards, refer to [docs on how to run only Grafana in Docker](/book/installation/docker.md#using-docker-compose#run-only-grafana-in-docker). \ No newline at end of file +#### Adding a new metric to Grafana + +To set up a new metric in Reth and its Grafana dashboard: + +1. Add the metric to the codebase following the [metrics section](../docs/design/metrics.md#creating-metrics) + documentation. + +2. Build the Reth image: + + ```bash + docker build . -t reth:local + ``` + + Modify the [docker-compose](./docker-compose.yml) file to use your locally built image for the Reth service. + +3. Run Docker Compose: + + ```bash + docker compose -f etc/docker-compose.yml -f etc/lighthouse.yml up -d + ``` + +4. Access Grafana: + + - Open `http://localhost:3000/` in a browser + - Log in with username and password `admin` + - Navigate to the `Dashboards` tab + +5. Create or modify a dashboard: + + - Select an existing dashboard or create a new one + - Click `Add` > `Visualization` to create a new panel + +6. Configure your metric panel: + + - Set a panel title and description + - Select metric(s) from the `Metrics browser` or use the `PromQL` terminal + - Document your metric(s) by setting units, legends, etc. + - When adding multiple metrics, use field overwrites if needed + +7. Save and arrange: + + - Click `Apply` to save the panel + - Drag the panel to desired position on the dashboard + +8. Export the dashboard: + + - Click `Share` > `Export` + - Toggle `Export for sharing externally` + - Click `Save to file` + +9. Update dashboard file: + - Replace the content of the corresponding file in the [dashboards folder](./grafana/dashboards) with the exported + JSON + +Your new metric is now integrated into the Reth Grafana dashboard. + +#### Import Grafana dashboards + +In order to import new Grafana dashboards or update a dashboard: + +1. Go to `Home` > `Dashboards` + +2. Click `New` > `Import` + +3. Drag the JSON dashboard file to import it + +4. If updating an existing dashboard, you will need to change the name and UID of the imported dashboard in order to + avoid conflict + +5. Delete the old dashboard From cf4a4454ecec03bab4be9cf3d8bccfbd0bb215df Mon Sep 17 00:00:00 2001 From: liamaharon Date: Sun, 20 Oct 2024 07:36:11 +1100 Subject: [PATCH 154/425] fix: feature propagation (#11888) Co-authored-by: Matthias Seitz Co-authored-by: Oliver --- .config/zepter.yaml | 40 +++++++++++++++++ .github/workflows/lint.yml | 17 ++++++++ Cargo.lock | 27 +++++++++++- Cargo.toml | 2 +- bin/reth-bench/Cargo.toml | 13 ++++-- bin/reth/Cargo.toml | 11 ++++- crates/blockchain-tree/Cargo.toml | 23 +++++++++- crates/chain-state/Cargo.toml | 14 +++--- crates/chainspec/Cargo.toml | 29 ++++++++++--- crates/consensus/auto-seal/Cargo.toml | 10 ++++- crates/consensus/beacon/Cargo.toml | 10 +++-- crates/consensus/consensus/Cargo.toml | 10 ++++- crates/engine/local/Cargo.toml | 6 ++- crates/engine/tree/Cargo.toml | 24 ++++++++--- crates/engine/util/Cargo.toml | 7 ++- crates/ethereum-forks/Cargo.toml | 27 +++++++++--- crates/ethereum/evm/Cargo.toml | 12 +++++- crates/ethereum/node/Cargo.toml | 15 ++++++- crates/evm/Cargo.toml | 23 +++++++++- crates/evm/execution-errors/Cargo.toml | 7 ++- crates/evm/execution-types/Cargo.toml | 24 ++++++++--- crates/exex/exex/Cargo.toml | 12 +++++- crates/exex/types/Cargo.toml | 15 ++++++- crates/net/discv4/Cargo.toml | 11 ++++- crates/net/dns/Cargo.toml | 13 +++++- crates/net/downloaders/Cargo.toml | 16 ++++--- crates/net/eth-wire-types/Cargo.toml | 24 ++++++++--- crates/net/eth-wire/Cargo.toml | 19 +++++++-- crates/net/network-api/Cargo.toml | 8 +++- crates/net/network/Cargo.toml | 32 +++++++++++++- crates/net/p2p/Cargo.toml | 14 +++++- crates/node/builder/Cargo.toml | 19 ++++++++- crates/node/core/Cargo.toml | 14 ++++-- crates/optimism/bin/Cargo.toml | 10 ++++- crates/optimism/chainspec/Cargo.toml | 18 ++++---- crates/optimism/cli/Cargo.toml | 13 +++--- crates/optimism/evm/Cargo.toml | 21 ++++++--- crates/optimism/hardforks/Cargo.toml | 12 +++++- crates/optimism/node/Cargo.toml | 42 +++++++++++++----- crates/optimism/payload/Cargo.toml | 11 +++-- crates/optimism/rpc/Cargo.toml | 9 ++-- crates/optimism/storage/Cargo.toml | 6 ++- crates/payload/builder/Cargo.toml | 8 +++- crates/primitives-traits/Cargo.toml | 34 +++++++++++---- crates/primitives/Cargo.toml | 59 +++++++++++++++++++------- crates/revm/Cargo.toml | 21 +++++++-- crates/stages/api/Cargo.toml | 7 ++- crates/stages/stages/Cargo.toml | 25 ++++++++--- crates/storage/codecs/Cargo.toml | 10 ++++- crates/storage/db-api/Cargo.toml | 26 +++++++++--- crates/storage/db-models/Cargo.toml | 13 +++++- crates/storage/db/Cargo.toml | 24 +++++++++-- crates/storage/errors/Cargo.toml | 6 ++- crates/storage/provider/Cargo.toml | 45 +++++++++++++++----- crates/transaction-pool/Cargo.toml | 40 ++++++++++++++--- crates/trie/common/Cargo.toml | 17 ++++++-- crates/trie/db/Cargo.toml | 22 +++++++++- crates/trie/trie/Cargo.toml | 20 +++++++-- testing/ef-tests/Cargo.toml | 6 ++- 59 files changed, 876 insertions(+), 197 deletions(-) create mode 100644 .config/zepter.yaml diff --git a/.config/zepter.yaml b/.config/zepter.yaml new file mode 100644 index 000000000..8c6425f4f --- /dev/null +++ b/.config/zepter.yaml @@ -0,0 +1,40 @@ +version: + format: 1 + # Minimum zepter version that is expected to work. This is just for printing a nice error + # message when someone tries to use an older version. + binary: 0.13.2 + +# The examples in the following comments assume crate `A` to have a dependency on crate `B`. +workflows: + check: + - [ + "lint", + # Check that `A` activates the features of `B`. + "propagate-feature", + # These are the features to check: + "--features=std,optimism,dev,asm-keccak,jemalloc,jemalloc-prof,tracy-allocator,serde-bincode-compat,serde,test-utils,arbitrary,bench", + # Do not try to add a new section into `[features]` of `A` only because `B` expose that feature. There are edge-cases where this is still needed, but we can add them manually. + "--left-side-feature-missing=ignore", + # Ignore the case that `A` it outside of the workspace. Otherwise it will report errors in external dependencies that we have no influence on. + "--left-side-outside-workspace=ignore", + # Auxillary flags: + "--offline", + "--locked", + "--show-path", + "--quiet", + ] + default: + # Running `zepter` with no subcommand will check & fix. + - [$check.0, "--fix"] + +# Will be displayed when any workflow fails: +help: + text: | + Reth uses the Zepter CLI to detect abnormalities in Cargo features, e.g. missing propagation. + + It looks like one more more checks failed; please check the console output. + + You can try to automatically address them by installing zepter (`cargo install zepter --locked`) and simply running `zepter` in the workspace root. + links: + - "https://github.com/paradigmxyz/reth/pull/11888" + - "https://github.com/ggwpez/zepter" diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 65c01c3a4..1921859c2 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -219,6 +219,22 @@ jobs: env: RUSTFLAGS: -D warnings + # Check crates correctly propagate features + feature-propagation: + runs-on: ubuntu-latest + timeout-minutes: 20 + steps: + - uses: actions/checkout@v4 + - name: fetch deps + run: | + # Eagerly pull dependencies + time cargo metadata --format-version=1 --locked > /dev/null + - name: run zepter + run: | + cargo install zepter -f --locked + zepter --version + time zepter run check + lint-success: name: lint success runs-on: ubuntu-latest @@ -236,6 +252,7 @@ jobs: - grafana - no-test-deps - features + - feature-propagation timeout-minutes: 30 steps: - name: Decide whether the needed jobs succeeded or failed diff --git a/Cargo.lock b/Cargo.lock index 25a62d0c0..4def3dcab 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -516,6 +516,7 @@ dependencies = [ "alloy-rlp", "alloy-serde", "alloy-sol-types", + "arbitrary", "derive_more 1.0.0", "itertools 0.13.0", "jsonrpsee-types", @@ -1497,6 +1498,17 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "bstr" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" +dependencies = [ + "memchr", + "regex-automata 0.4.8", + "serde", +] + [[package]] name = "bumpalo" version = "3.16.0" @@ -2258,6 +2270,7 @@ dependencies = [ "lock_api", "once_cell", "parking_lot_core 0.9.10", + "serde", ] [[package]] @@ -3318,6 +3331,7 @@ version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ + "serde", "typenum", "version_check", "zeroize", @@ -4512,7 +4526,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -4623,6 +4637,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47186c6da4d81ca383c7c47c1bfc80f4b95f4720514d860a5407aaf4233f9588" dependencies = [ "linked-hash-map", + "serde", ] [[package]] @@ -4645,6 +4660,7 @@ checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", + "serde", ] [[package]] @@ -5009,6 +5025,7 @@ dependencies = [ "libc", "log", "mio 0.8.11", + "serde", "walkdir", "windows-sys 0.48.0", ] @@ -5277,6 +5294,7 @@ dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", "alloy-serde", + "arbitrary", "op-alloy-consensus", "serde", "serde_json", @@ -5370,6 +5388,7 @@ version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" dependencies = [ + "arbitrary", "arrayvec", "bitvec", "byte-slice-cast", @@ -10098,6 +10117,10 @@ name = "similar" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" +dependencies = [ + "bstr", + "unicode-segmentation", +] [[package]] name = "similar-asserts" @@ -10106,6 +10129,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe85670573cd6f0fa97940f26e7e6601213c3b0555246c24234131f88c5709e" dependencies = [ "console", + "serde", "similar", ] @@ -11016,6 +11040,7 @@ dependencies = [ "parking_lot 0.12.3", "rand 0.8.5", "resolv-conf", + "serde", "smallvec", "thiserror", "tokio", diff --git a/Cargo.toml b/Cargo.toml index 6c66e501e..5b6912c33 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -585,7 +585,7 @@ pprof = "0.13" proptest = "1.4" proptest-derive = "0.5" serial_test = { default-features = false, version = "3" } -similar-asserts = { default-features = false, version = "1.5.0" } +similar-asserts = { version = "1.5.0", features = ["serde"] } tempfile = "3.8" test-fuzz = "6" diff --git a/bin/reth-bench/Cargo.toml b/bin/reth-bench/Cargo.toml index e4e40daec..03844633a 100644 --- a/bin/reth-bench/Cargo.toml +++ b/bin/reth-bench/Cargo.toml @@ -76,9 +76,16 @@ reth-tracing.workspace = true [features] default = ["jemalloc"] -asm-keccak = ["reth-primitives/asm-keccak"] - -jemalloc = ["reth-cli-util/jemalloc"] +asm-keccak = [ + "reth-primitives/asm-keccak", + "reth-node-core/asm-keccak", + "alloy-primitives/asm-keccak" +] + +jemalloc = [ + "reth-cli-util/jemalloc", + "reth-node-core/jemalloc" +] jemalloc-prof = ["reth-cli-util/jemalloc-prof"] tracy-allocator = ["reth-cli-util/tracy-allocator"] diff --git a/bin/reth/Cargo.toml b/bin/reth/Cargo.toml index 8b2b77dd6..8380915d4 100644 --- a/bin/reth/Cargo.toml +++ b/bin/reth/Cargo.toml @@ -103,14 +103,21 @@ default = ["jemalloc"] dev = ["reth-cli-commands/dev"] -asm-keccak = ["reth-node-core/asm-keccak", "reth-primitives/asm-keccak"] +asm-keccak = [ + "reth-node-core/asm-keccak", + "reth-primitives/asm-keccak", + "alloy-primitives/asm-keccak" +] jemalloc = [ "reth-cli-util/jemalloc", "reth-node-core/jemalloc", "reth-node-metrics/jemalloc", ] -jemalloc-prof = ["reth-cli-util/jemalloc"] +jemalloc-prof = [ + "reth-cli-util/jemalloc", + "reth-cli-util/jemalloc-prof" +] tracy-allocator = ["reth-cli-util/tracy-allocator"] min-error-logs = ["tracing/release_max_level_error"] diff --git a/crates/blockchain-tree/Cargo.toml b/crates/blockchain-tree/Cargo.toml index cff117c92..aa8fab16f 100644 --- a/crates/blockchain-tree/Cargo.toml +++ b/crates/blockchain-tree/Cargo.toml @@ -63,5 +63,24 @@ alloy-genesis.workspace = true alloy-consensus.workspace = true [features] -test-utils = [] -optimism = ["reth-primitives/optimism", "reth-provider/optimism"] +test-utils = [ + "reth-chainspec/test-utils", + "reth-consensus/test-utils", + "reth-evm/test-utils", + "reth-network/test-utils", + "reth-primitives/test-utils", + "reth-revm/test-utils", + "reth-stages-api/test-utils", + "reth-db/test-utils", + "reth-db-api/test-utils", + "reth-provider/test-utils", + "reth-trie-db/test-utils", + "reth-trie/test-utils" +] +optimism = [ + "reth-primitives/optimism", + "reth-provider/optimism", + "reth-execution-types/optimism", + "reth-db/optimism", + "reth-db-api/optimism" +] diff --git a/crates/chain-state/Cargo.toml b/crates/chain-state/Cargo.toml index c9691bec4..9a88a3c54 100644 --- a/crates/chain-state/Cargo.toml +++ b/crates/chain-state/Cargo.toml @@ -56,9 +56,13 @@ revm.workspace = true [features] test-utils = [ - "alloy-signer", - "alloy-signer-local", - "alloy-consensus", - "rand", - "revm" + "alloy-signer", + "alloy-signer-local", + "alloy-consensus", + "rand", + "revm", + "reth-chainspec/test-utils", + "reth-primitives/test-utils", + "reth-trie/test-utils", + "revm?/test-utils" ] diff --git a/crates/chainspec/Cargo.toml b/crates/chainspec/Cargo.toml index 87df28322..5bac582cd 100644 --- a/crates/chainspec/Cargo.toml +++ b/crates/chainspec/Cargo.toml @@ -40,11 +40,26 @@ alloy-genesis.workspace = true [features] default = ["std"] std = [ - "alloy-chains/std", - "alloy-eips/std", - "alloy-genesis/std", - "alloy-primitives/std", - "alloy-trie/std", + "alloy-chains/std", + "alloy-eips/std", + "alloy-genesis/std", + "alloy-primitives/std", + "alloy-trie/std", + "reth-primitives-traits/std", + "alloy-consensus/std", + "once_cell/std" +] +arbitrary = [ + "alloy-chains/arbitrary", + "reth-ethereum-forks/arbitrary", + "reth-primitives-traits/arbitrary", + "reth-trie-common/arbitrary", + "alloy-consensus/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/arbitrary", + "alloy-trie/arbitrary" +] +test-utils = [ + "reth-primitives-traits/test-utils", + "reth-trie-common/test-utils" ] -arbitrary = ["alloy-chains/arbitrary"] -test-utils = [] diff --git a/crates/consensus/auto-seal/Cargo.toml b/crates/consensus/auto-seal/Cargo.toml index 249858711..f2bfb43bc 100644 --- a/crates/consensus/auto-seal/Cargo.toml +++ b/crates/consensus/auto-seal/Cargo.toml @@ -46,4 +46,12 @@ tokio-stream.workspace = true tracing.workspace = true [features] -optimism = ["reth-provider/optimism", "reth-optimism-consensus"] +optimism = [ + "reth-provider/optimism", + "reth-optimism-consensus", + "reth-beacon-consensus/optimism", + "reth-execution-types/optimism", + "reth-optimism-consensus?/optimism", + "reth-primitives/optimism", + "revm-primitives/optimism" +] diff --git a/crates/consensus/beacon/Cargo.toml b/crates/consensus/beacon/Cargo.toml index 192ae2b93..dd1e33931 100644 --- a/crates/consensus/beacon/Cargo.toml +++ b/crates/consensus/beacon/Cargo.toml @@ -78,8 +78,10 @@ assert_matches.workspace = true [features] optimism = [ - "reth-chainspec", - "reth-primitives/optimism", - "reth-provider/optimism", - "reth-blockchain-tree/optimism", + "reth-chainspec", + "reth-primitives/optimism", + "reth-provider/optimism", + "reth-blockchain-tree/optimism", + "reth-db/optimism", + "reth-db-api/optimism" ] diff --git a/crates/consensus/consensus/Cargo.toml b/crates/consensus/consensus/Cargo.toml index 1736caab5..2faf3f2ac 100644 --- a/crates/consensus/consensus/Cargo.toml +++ b/crates/consensus/consensus/Cargo.toml @@ -24,5 +24,11 @@ derive_more.workspace = true [features] default = ["std"] -std = [] -test-utils = [] +std = [ + "reth-primitives/std", + "alloy-primitives/std", + "alloy-eips/std" +] +test-utils = [ + "reth-primitives/test-utils" +] diff --git a/crates/engine/local/Cargo.toml b/crates/engine/local/Cargo.toml index f22ab1f8d..d9dc63253 100644 --- a/crates/engine/local/Cargo.toml +++ b/crates/engine/local/Cargo.toml @@ -46,4 +46,8 @@ op-alloy-rpc-types-engine = { workspace = true, optional = true } workspace = true [features] -optimism = ["op-alloy-rpc-types-engine"] +optimism = [ + "op-alloy-rpc-types-engine", + "reth-beacon-consensus/optimism", + "reth-provider/optimism" +] diff --git a/crates/engine/tree/Cargo.toml b/crates/engine/tree/Cargo.toml index 3a618f4fd..6fe741db8 100644 --- a/crates/engine/tree/Cargo.toml +++ b/crates/engine/tree/Cargo.toml @@ -79,11 +79,21 @@ assert_matches.workspace = true [features] test-utils = [ - "reth-db/test-utils", - "reth-chain-state/test-utils", - "reth-network-p2p/test-utils", - "reth-prune-types", - "reth-stages/test-utils", - "reth-static-file", - "reth-tracing", + "reth-db/test-utils", + "reth-chain-state/test-utils", + "reth-network-p2p/test-utils", + "reth-prune-types", + "reth-stages/test-utils", + "reth-static-file", + "reth-tracing", + "reth-blockchain-tree/test-utils", + "reth-chainspec/test-utils", + "reth-consensus/test-utils", + "reth-evm/test-utils", + "reth-payload-builder/test-utils", + "reth-primitives/test-utils", + "reth-revm/test-utils", + "reth-stages-api/test-utils", + "reth-provider/test-utils", + "reth-trie/test-utils" ] diff --git a/crates/engine/util/Cargo.toml b/crates/engine/util/Cargo.toml index 35a7e74bb..07aa40165 100644 --- a/crates/engine/util/Cargo.toml +++ b/crates/engine/util/Cargo.toml @@ -50,4 +50,9 @@ itertools.workspace = true tracing.workspace = true [features] -optimism = ["reth-beacon-consensus/optimism"] +optimism = [ + "reth-beacon-consensus/optimism", + "reth-primitives/optimism", + "reth-provider/optimism", + "revm-primitives/optimism" +] diff --git a/crates/ethereum-forks/Cargo.toml b/crates/ethereum-forks/Cargo.toml index 7b4b6c53c..9f7ce7ee8 100644 --- a/crates/ethereum-forks/Cargo.toml +++ b/crates/ethereum-forks/Cargo.toml @@ -39,12 +39,27 @@ alloy-consensus.workspace = true [features] default = ["std", "serde", "rustc-hash"] -arbitrary = ["dep:arbitrary", "dep:proptest", "dep:proptest-derive"] -serde = ["dep:serde"] +arbitrary = [ + "dep:arbitrary", + "dep:proptest", + "dep:proptest-derive", + "alloy-chains/arbitrary", + "alloy-consensus/arbitrary", + "alloy-primitives/arbitrary" +] +serde = [ + "dep:serde", + "alloy-chains/serde", + "alloy-consensus/serde", + "alloy-primitives/serde" +] std = [ - "alloy-chains/std", - "alloy-primitives/std", - "thiserror-no-std/std", - "rustc-hash/std", + "alloy-chains/std", + "alloy-primitives/std", + "thiserror-no-std/std", + "rustc-hash/std", + "alloy-consensus/std", + "once_cell/std", + "serde?/std" ] rustc-hash = ["dep:rustc-hash"] diff --git a/crates/ethereum/evm/Cargo.toml b/crates/ethereum/evm/Cargo.toml index 8cbc92f90..17e870e61 100644 --- a/crates/ethereum/evm/Cargo.toml +++ b/crates/ethereum/evm/Cargo.toml @@ -41,4 +41,14 @@ alloy-genesis.workspace = true [features] default = ["std"] -std = [] +std = [ + "reth-consensus/std", + "reth-primitives/std", + "reth-revm/std", + "alloy-consensus/std", + "alloy-eips/std", + "alloy-genesis/std", + "alloy-primitives/std", + "revm-primitives/std", + "secp256k1/std" +] diff --git a/crates/ethereum/node/Cargo.toml b/crates/ethereum/node/Cargo.toml index a3fe5ed45..11555cdc4 100644 --- a/crates/ethereum/node/Cargo.toml +++ b/crates/ethereum/node/Cargo.toml @@ -55,4 +55,17 @@ alloy-consensus.workspace = true [features] default = [] -test-utils = ["reth-node-builder/test-utils"] +test-utils = [ + "reth-node-builder/test-utils", + "reth-chainspec/test-utils", + "reth-consensus/test-utils", + "reth-network/test-utils", + "reth-payload-builder/test-utils", + "reth-primitives/test-utils", + "reth-revm/test-utils", + "reth-db/test-utils", + "reth-provider/test-utils", + "reth-transaction-pool/test-utils", + "revm/test-utils", + "reth-evm/test-utils" +] diff --git a/crates/evm/Cargo.toml b/crates/evm/Cargo.toml index 6c16973b2..6a1e1fe0d 100644 --- a/crates/evm/Cargo.toml +++ b/crates/evm/Cargo.toml @@ -42,5 +42,24 @@ reth-ethereum-forks.workspace = true [features] default = ["std"] -std = ["dep:metrics", "dep:reth-metrics"] -test-utils = ["dep:parking_lot"] +std = [ + "dep:metrics", + "dep:reth-metrics", + "reth-consensus/std", + "reth-primitives/std", + "reth-primitives-traits/std", + "reth-revm/std", + "alloy-eips/std", + "alloy-primitives/std", + "revm-primitives/std", + "revm/std" +] +test-utils = [ + "dep:parking_lot", + "reth-chainspec/test-utils", + "reth-consensus/test-utils", + "reth-primitives/test-utils", + "reth-primitives-traits/test-utils", + "reth-revm/test-utils", + "revm/test-utils" +] diff --git a/crates/evm/execution-errors/Cargo.toml b/crates/evm/execution-errors/Cargo.toml index d4f8534e7..721c80551 100644 --- a/crates/evm/execution-errors/Cargo.toml +++ b/crates/evm/execution-errors/Cargo.toml @@ -26,4 +26,9 @@ derive_more.workspace = true [features] default = ["std"] -std = ["reth-consensus/std"] +std = [ + "reth-consensus/std", + "alloy-eips/std", + "alloy-primitives/std", + "revm-primitives/std" +] diff --git a/crates/evm/execution-types/Cargo.toml b/crates/evm/execution-types/Cargo.toml index 49e962302..b6af3dee9 100644 --- a/crates/evm/execution-types/Cargo.toml +++ b/crates/evm/execution-types/Cargo.toml @@ -33,10 +33,24 @@ reth-primitives = { workspace = true, features = ["arbitrary", "test-utils"] } [features] default = ["std"] optimism = ["reth-primitives/optimism", "revm/optimism"] -serde = ["dep:serde", "reth-trie/serde", "revm/serde"] +serde = [ + "dep:serde", + "reth-trie/serde", + "revm/serde", + "alloy-eips/serde", + "alloy-primitives/serde", + "rand/serde" +] serde-bincode-compat = [ - "reth-primitives/serde-bincode-compat", - "reth-trie/serde-bincode-compat", - "serde_with", + "reth-primitives/serde-bincode-compat", + "reth-trie/serde-bincode-compat", + "serde_with", + "alloy-eips/serde-bincode-compat" +] +std = [ + "reth-primitives/std", + "alloy-eips/std", + "alloy-primitives/std", + "revm/std", + "serde?/std" ] -std = [] diff --git a/crates/exex/exex/Cargo.toml b/crates/exex/exex/Cargo.toml index 27a9d1576..903e11e78 100644 --- a/crates/exex/exex/Cargo.toml +++ b/crates/exex/exex/Cargo.toml @@ -70,4 +70,14 @@ tempfile.workspace = true [features] default = [] -serde = ["reth-provider/serde", "reth-exex-types/serde"] +serde = [ + "reth-provider/serde", + "reth-exex-types/serde", + "reth-revm/serde", + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-primitives/serde", + "parking_lot/serde", + "rand/serde", + "secp256k1/serde" +] diff --git a/crates/exex/types/Cargo.toml b/crates/exex/types/Cargo.toml index a146cbc22..51097d610 100644 --- a/crates/exex/types/Cargo.toml +++ b/crates/exex/types/Cargo.toml @@ -33,5 +33,16 @@ rand.workspace = true [features] default = [] -serde = ["dep:serde", "reth-execution-types/serde"] -serde-bincode-compat = ["reth-execution-types/serde-bincode-compat", "serde_with"] +serde = [ + "dep:serde", + "reth-execution-types/serde", + "alloy-eips/serde", + "alloy-primitives/serde", + "rand/serde" +] +serde-bincode-compat = [ + "reth-execution-types/serde-bincode-compat", + "serde_with", + "reth-primitives/serde-bincode-compat", + "alloy-eips/serde-bincode-compat" +] diff --git a/crates/net/discv4/Cargo.toml b/crates/net/discv4/Cargo.toml index fde652ef3..f008d03b5 100644 --- a/crates/net/discv4/Cargo.toml +++ b/crates/net/discv4/Cargo.toml @@ -51,5 +51,14 @@ reth-tracing.workspace = true [features] default = ["serde"] -serde = ["dep:serde"] +serde = [ + "dep:serde", + "alloy-primitives/serde", + "discv5/serde", + "enr/serde", + "generic-array/serde", + "parking_lot/serde", + "rand?/serde", + "secp256k1/serde" +] test-utils = ["dep:rand"] diff --git a/crates/net/dns/Cargo.toml b/crates/net/dns/Cargo.toml index 2af72afce..a52f65057 100644 --- a/crates/net/dns/Cargo.toml +++ b/crates/net/dns/Cargo.toml @@ -48,4 +48,15 @@ reth-tracing.workspace = true rand.workspace = true [features] -serde = ["dep:serde", "dep:serde_with"] +serde = [ + "dep:serde", + "dep:serde_with", + "alloy-chains/serde", + "alloy-primitives/serde", + "enr/serde", + "linked_hash_set/serde", + "parking_lot/serde", + "rand/serde", + "secp256k1/serde", + "trust-dns-resolver/serde" +] diff --git a/crates/net/downloaders/Cargo.toml b/crates/net/downloaders/Cargo.toml index 5e7f4dd47..272db6fc6 100644 --- a/crates/net/downloaders/Cargo.toml +++ b/crates/net/downloaders/Cargo.toml @@ -71,10 +71,14 @@ tempfile.workspace = true [features] test-utils = [ - "dep:tempfile", - "dep:reth-db-api", - "reth-db/test-utils", - "reth-consensus/test-utils", - "reth-network-p2p/test-utils", - "reth-testing-utils", + "dep:tempfile", + "dep:reth-db-api", + "reth-db/test-utils", + "reth-consensus/test-utils", + "reth-network-p2p/test-utils", + "reth-testing-utils", + "reth-chainspec/test-utils", + "reth-primitives/test-utils", + "reth-db-api?/test-utils", + "reth-provider/test-utils" ] diff --git a/crates/net/eth-wire-types/Cargo.toml b/crates/net/eth-wire-types/Cargo.toml index 82c9fe37a..1d2b54872 100644 --- a/crates/net/eth-wire-types/Cargo.toml +++ b/crates/net/eth-wire-types/Cargo.toml @@ -45,10 +45,22 @@ alloy-consensus.workspace = true [features] arbitrary = [ - "reth-primitives/arbitrary", - "alloy-chains/arbitrary", - "dep:arbitrary", - "dep:proptest", - "dep:proptest-arbitrary-interop", + "reth-primitives/arbitrary", + "alloy-chains/arbitrary", + "dep:arbitrary", + "dep:proptest", + "dep:proptest-arbitrary-interop", + "reth-chainspec/arbitrary", + "alloy-consensus/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/arbitrary" +] +serde = [ + "dep:serde", + "alloy-chains/serde", + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-primitives/serde", + "bytes/serde", + "rand/serde" ] -serde = ["dep:serde"] diff --git a/crates/net/eth-wire/Cargo.toml b/crates/net/eth-wire/Cargo.toml index 6eea4bc4a..b0e256fdf 100644 --- a/crates/net/eth-wire/Cargo.toml +++ b/crates/net/eth-wire/Cargo.toml @@ -66,11 +66,22 @@ alloy-eips.workspace = true [features] arbitrary = [ - "reth-primitives/arbitrary", - "reth-eth-wire-types/arbitrary", - "dep:arbitrary", + "reth-primitives/arbitrary", + "reth-eth-wire-types/arbitrary", + "dep:arbitrary", + "reth-chainspec/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/arbitrary" +] +serde = [ + "dep:serde", + "reth-eth-wire-types/serde", + "alloy-eips/serde", + "alloy-primitives/serde", + "bytes/serde", + "rand/serde", + "secp256k1/serde" ] -serde = ["dep:serde", "reth-eth-wire-types/serde"] [[test]] name = "fuzz_roundtrip" diff --git a/crates/net/network-api/Cargo.toml b/crates/net/network-api/Cargo.toml index 650d74904..6d410e9db 100644 --- a/crates/net/network-api/Cargo.toml +++ b/crates/net/network-api/Cargo.toml @@ -40,4 +40,10 @@ derive_more.workspace = true [features] default = ["serde"] -serde = ["dep:serde"] +serde = [ + "dep:serde", + "reth-eth-wire-types/serde", + "reth-network-types/serde", + "alloy-primitives/serde", + "enr/serde" +] diff --git a/crates/net/network/Cargo.toml b/crates/net/network/Cargo.toml index 1d3af517a..f444aa7fe 100644 --- a/crates/net/network/Cargo.toml +++ b/crates/net/network/Cargo.toml @@ -101,8 +101,36 @@ criterion = { workspace = true, features = ["async_tokio", "html_reports"] } [features] default = ["serde"] geth-tests = [] -serde = ["dep:serde", "secp256k1/serde", "enr/serde", "reth-network-types/serde"] -test-utils = ["dep:reth-provider", "reth-provider?/test-utils", "dep:tempfile", "reth-transaction-pool/test-utils", "reth-network-types/test-utils"] +serde = [ + "dep:serde", + "secp256k1/serde", + "enr/serde", + "reth-network-types/serde", + "reth-dns-discovery/serde", + "reth-eth-wire/serde", + "reth-provider?/serde", + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-primitives/serde", + "discv5/serde", + "parking_lot/serde", + "rand/serde", + "smallvec/serde", + "url/serde" +] +test-utils = [ + "dep:reth-provider", + "reth-provider?/test-utils", + "dep:tempfile", + "reth-transaction-pool/test-utils", + "reth-network-types/test-utils", + "reth-chainspec/test-utils", + "reth-consensus/test-utils", + "reth-discv4/test-utils", + "reth-network/test-utils", + "reth-network-p2p/test-utils", + "reth-primitives/test-utils" +] [[bench]] name = "bench" diff --git a/crates/net/p2p/Cargo.toml b/crates/net/p2p/Cargo.toml index c43f7f5b3..3b6d74c9d 100644 --- a/crates/net/p2p/Cargo.toml +++ b/crates/net/p2p/Cargo.toml @@ -43,5 +43,15 @@ tokio = { workspace = true, features = ["full"] } [features] default = ["std"] -test-utils = ["reth-consensus/test-utils", "parking_lot"] -std = ["reth-consensus/std"] +test-utils = [ + "reth-consensus/test-utils", + "parking_lot", + "reth-network-types/test-utils", + "reth-primitives/test-utils" +] +std = [ + "reth-consensus/std", + "reth-primitives/std", + "alloy-eips/std", + "alloy-primitives/std" +] diff --git a/crates/node/builder/Cargo.toml b/crates/node/builder/Cargo.toml index 53e53cd2b..86f755cb9 100644 --- a/crates/node/builder/Cargo.toml +++ b/crates/node/builder/Cargo.toml @@ -96,4 +96,21 @@ tempfile.workspace = true [features] default = [] -test-utils = ["reth-db/test-utils"] +test-utils = [ + "reth-db/test-utils", + "reth-blockchain-tree/test-utils", + "reth-chain-state/test-utils", + "reth-chainspec/test-utils", + "reth-consensus/test-utils", + "reth-engine-tree/test-utils", + "reth-evm/test-utils", + "reth-downloaders/test-utils", + "reth-network/test-utils", + "reth-network-p2p/test-utils", + "reth-payload-builder/test-utils", + "reth-primitives/test-utils", + "reth-stages/test-utils", + "reth-db-api/test-utils", + "reth-provider/test-utils", + "reth-transaction-pool/test-utils" +] diff --git a/crates/node/core/Cargo.toml b/crates/node/core/Cargo.toml index a6ae1db5e..73c552f4d 100644 --- a/crates/node/core/Cargo.toml +++ b/crates/node/core/Cargo.toml @@ -76,10 +76,18 @@ proptest.workspace = true tokio.workspace = true [features] -optimism = ["reth-primitives/optimism"] +optimism = [ + "reth-primitives/optimism", + "reth-db/optimism" +] # Features for vergen to generate correct env vars -jemalloc = [] -asm-keccak = [] +jemalloc = [ + "reth-cli-util/jemalloc" +] +asm-keccak = [ + "reth-primitives/asm-keccak", + "alloy-primitives/asm-keccak" +] [build-dependencies] vergen = { version = "8.0.0", features = ["build", "cargo", "git", "gitcl"] } diff --git a/crates/optimism/bin/Cargo.toml b/crates/optimism/bin/Cargo.toml index 2de0bb6ee..f60ef36a4 100644 --- a/crates/optimism/bin/Cargo.toml +++ b/crates/optimism/bin/Cargo.toml @@ -37,7 +37,15 @@ tracy-allocator = ["reth-cli-util/tracy-allocator"] asm-keccak = ["reth-optimism-cli/asm-keccak", "reth-optimism-node/asm-keccak"] -optimism = ["reth-optimism-cli/optimism", "reth-optimism-node/optimism"] +optimism = [ + "reth-optimism-cli/optimism", + "reth-optimism-node/optimism", + "reth-optimism-consensus/optimism", + "reth-optimism-evm/optimism", + "reth-optimism-payload-builder/optimism", + "reth-optimism-rpc/optimism", + "reth-provider/optimism" +] min-error-logs = ["tracing/release_max_level_error"] min-warn-logs = ["tracing/release_max_level_warn"] diff --git a/crates/optimism/chainspec/Cargo.toml b/crates/optimism/chainspec/Cargo.toml index efc9bf0b0..6b068dabb 100644 --- a/crates/optimism/chainspec/Cargo.toml +++ b/crates/optimism/chainspec/Cargo.toml @@ -45,12 +45,14 @@ op-alloy-rpc-types.workspace = true [features] default = ["std"] std = [ - "alloy-chains/std", - "alloy-genesis/std", - "alloy-primitives/std", - "op-alloy-rpc-types/std", - "reth-chainspec/std", - "reth-ethereum-forks/std", - "reth-primitives-traits/std", - "reth-optimism-forks/std", + "alloy-chains/std", + "alloy-genesis/std", + "alloy-primitives/std", + "op-alloy-rpc-types/std", + "reth-chainspec/std", + "reth-ethereum-forks/std", + "reth-primitives-traits/std", + "reth-optimism-forks/std", + "alloy-consensus/std", + "once_cell/std" ] diff --git a/crates/optimism/cli/Cargo.toml b/crates/optimism/cli/Cargo.toml index d53270cd6..7db41ccbe 100644 --- a/crates/optimism/cli/Cargo.toml +++ b/crates/optimism/cli/Cargo.toml @@ -73,11 +73,14 @@ reth-cli-commands.workspace = true [features] optimism = [ - "reth-primitives/optimism", - "reth-optimism-evm/optimism", - "reth-provider/optimism", - "reth-node-core/optimism", - "reth-optimism-node/optimism", + "reth-primitives/optimism", + "reth-optimism-evm/optimism", + "reth-provider/optimism", + "reth-node-core/optimism", + "reth-optimism-node/optimism", + "reth-execution-types/optimism", + "reth-db/optimism", + "reth-db-api/optimism" ] asm-keccak = [ "alloy-primitives/asm-keccak", diff --git a/crates/optimism/evm/Cargo.toml b/crates/optimism/evm/Cargo.toml index f251347c5..f6b22ad14 100644 --- a/crates/optimism/evm/Cargo.toml +++ b/crates/optimism/evm/Cargo.toml @@ -51,10 +51,21 @@ alloy-consensus.workspace = true [features] default = ["std"] -std = [] +std = [ + "reth-consensus/std", + "reth-primitives/std", + "reth-revm/std", + "alloy-consensus/std", + "alloy-eips/std", + "alloy-genesis/std", + "alloy-primitives/std", + "revm-primitives/std", + "revm/std" +] optimism = [ - "reth-primitives/optimism", - "reth-execution-types/optimism", - "reth-optimism-consensus/optimism", - "revm/optimism", + "reth-primitives/optimism", + "reth-execution-types/optimism", + "reth-optimism-consensus/optimism", + "revm/optimism", + "revm-primitives/optimism" ] diff --git a/crates/optimism/hardforks/Cargo.toml b/crates/optimism/hardforks/Cargo.toml index 815d50c6b..c30566a54 100644 --- a/crates/optimism/hardforks/Cargo.toml +++ b/crates/optimism/hardforks/Cargo.toml @@ -27,5 +27,13 @@ once_cell.workspace = true [features] default = ["std"] -std = [] -serde = ["dep:serde"] \ No newline at end of file +std = [ + "alloy-primitives/std", + "once_cell/std", + "serde?/std" +] +serde = [ + "dep:serde", + "alloy-chains/serde", + "alloy-primitives/serde" +] diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index fbe787d6e..37cf4a328 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -69,15 +69,35 @@ op-alloy-consensus.workspace = true [features] optimism = [ - "reth-primitives/optimism", - "reth-provider/optimism", - "reth-optimism-evm/optimism", - "reth-optimism-payload-builder/optimism", - "reth-beacon-consensus/optimism", - "revm/optimism", - "reth-auto-seal-consensus/optimism", - "reth-optimism-rpc/optimism", - "reth-engine-local/optimism", + "reth-primitives/optimism", + "reth-provider/optimism", + "reth-optimism-evm/optimism", + "reth-optimism-payload-builder/optimism", + "reth-beacon-consensus/optimism", + "revm/optimism", + "reth-auto-seal-consensus/optimism", + "reth-optimism-rpc/optimism", + "reth-engine-local/optimism", + "reth-optimism-consensus/optimism", + "reth-db/optimism" +] +asm-keccak = [ + "reth-primitives/asm-keccak", + "reth/asm-keccak", + "alloy-primitives/asm-keccak", + "revm/asm-keccak" +] +test-utils = [ + "reth-node-builder/test-utils", + "reth-chainspec/test-utils", + "reth-consensus/test-utils", + "reth-evm/test-utils", + "reth-network/test-utils", + "reth-payload-builder/test-utils", + "reth-primitives/test-utils", + "reth-revm/test-utils", + "reth-db/test-utils", + "reth-provider/test-utils", + "reth-transaction-pool/test-utils", + "revm/test-utils" ] -asm-keccak = ["reth-primitives/asm-keccak"] -test-utils = ["reth-node-builder/test-utils"] diff --git a/crates/optimism/payload/Cargo.toml b/crates/optimism/payload/Cargo.toml index 46cc82edb..de61def83 100644 --- a/crates/optimism/payload/Cargo.toml +++ b/crates/optimism/payload/Cargo.toml @@ -50,8 +50,11 @@ sha2.workspace = true [features] optimism = [ - "reth-primitives/optimism", - "reth-provider/optimism", - "reth-optimism-evm/optimism", - "revm/optimism", + "reth-primitives/optimism", + "reth-provider/optimism", + "reth-optimism-evm/optimism", + "revm/optimism", + "reth-execution-types/optimism", + "reth-optimism-consensus/optimism", + "revm-primitives/optimism" ] diff --git a/crates/optimism/rpc/Cargo.toml b/crates/optimism/rpc/Cargo.toml index 90984998a..dc0f96c40 100644 --- a/crates/optimism/rpc/Cargo.toml +++ b/crates/optimism/rpc/Cargo.toml @@ -63,8 +63,9 @@ reth-optimism-chainspec.workspace = true [features] optimism = [ - "reth-optimism-evm/optimism", - "reth-primitives/optimism", - "reth-provider/optimism", - "revm/optimism", + "reth-optimism-evm/optimism", + "reth-primitives/optimism", + "reth-provider/optimism", + "revm/optimism", + "reth-optimism-consensus/optimism" ] diff --git a/crates/optimism/storage/Cargo.toml b/crates/optimism/storage/Cargo.toml index 107b64db3..2b18897d9 100644 --- a/crates/optimism/storage/Cargo.toml +++ b/crates/optimism/storage/Cargo.toml @@ -20,4 +20,8 @@ reth-prune-types.workspace = true reth-stages-types.workspace = true [features] -optimism = ["reth-primitives/optimism"] \ No newline at end of file +optimism = [ + "reth-primitives/optimism", + "reth-codecs/optimism", + "reth-db-api/optimism" +] diff --git a/crates/payload/builder/Cargo.toml b/crates/payload/builder/Cargo.toml index 71f63ce34..3b71011e0 100644 --- a/crates/payload/builder/Cargo.toml +++ b/crates/payload/builder/Cargo.toml @@ -40,4 +40,10 @@ tracing.workspace = true revm.workspace = true [features] -test-utils = ["reth-chain-state"] +test-utils = [ + "reth-chain-state", + "reth-chain-state?/test-utils", + "reth-primitives/test-utils", + "reth-provider/test-utils", + "revm/test-utils" +] diff --git a/crates/primitives-traits/Cargo.toml b/crates/primitives-traits/Cargo.toml index 2fec75666..9634da40f 100644 --- a/crates/primitives-traits/Cargo.toml +++ b/crates/primitives-traits/Cargo.toml @@ -54,14 +54,30 @@ test-fuzz.workspace = true [features] default = ["std"] -std = [] -test-utils = ["arbitrary"] +std = [ + "alloy-consensus/std", + "alloy-eips/std", + "alloy-genesis/std", + "alloy-primitives/std", + "revm-primitives/std", + "serde/std" +] +test-utils = [ + "arbitrary", + "reth-codecs/test-utils" +] arbitrary = [ - "std", - "alloy-consensus/arbitrary", - "alloy-primitives/arbitrary", - "dep:arbitrary", - "dep:proptest", - "dep:proptest-arbitrary-interop", + "std", + "alloy-consensus/arbitrary", + "alloy-primitives/arbitrary", + "dep:arbitrary", + "dep:proptest", + "dep:proptest-arbitrary-interop", + "alloy-eips/arbitrary", + "revm-primitives/arbitrary" +] +serde-bincode-compat = [ + "serde_with", + "alloy-consensus/serde-bincode-compat", + "alloy-eips/serde-bincode-compat" ] -serde-bincode-compat = ["serde_with", "alloy-consensus/serde-bincode-compat"] diff --git a/crates/primitives/Cargo.toml b/crates/primitives/Cargo.toml index 5661fb8f8..566a114be 100644 --- a/crates/primitives/Cargo.toml +++ b/crates/primitives/Cargo.toml @@ -90,18 +90,41 @@ pprof = { workspace = true, features = [ [features] default = ["c-kzg", "alloy-compat", "std", "reth-codec", "secp256k1"] -std = ["reth-primitives-traits/std"] +std = [ + "reth-primitives-traits/std", + "alloy-consensus/std", + "alloy-eips/std", + "alloy-genesis/std", + "alloy-primitives/std", + "alloy-serde?/std", + "k256/std", + "once_cell/std", + "revm-primitives/std", + "secp256k1?/std", + "serde/std" +] reth-codec = ["dep:reth-codecs", "dep:zstd", "dep:modular-bitfield", "std"] -asm-keccak = ["alloy-primitives/asm-keccak"] +asm-keccak = [ + "alloy-primitives/asm-keccak", + "revm-primitives/asm-keccak" +] arbitrary = [ - "dep:arbitrary", - "alloy-eips/arbitrary", - "rand", - "reth-codec", - "reth-ethereum-forks/arbitrary", - "reth-primitives-traits/arbitrary", - "revm-primitives/arbitrary", - "secp256k1", + "dep:arbitrary", + "alloy-eips/arbitrary", + "rand", + "reth-codec", + "reth-ethereum-forks/arbitrary", + "reth-primitives-traits/arbitrary", + "revm-primitives/arbitrary", + "secp256k1", + "reth-chainspec/arbitrary", + "reth-trie-common/arbitrary", + "alloy-consensus/arbitrary", + "alloy-primitives/arbitrary", + "alloy-rpc-types?/arbitrary", + "alloy-serde?/arbitrary", + "op-alloy-consensus?/arbitrary", + "op-alloy-rpc-types?/arbitrary" ] secp256k1 = ["dep:secp256k1"] c-kzg = [ @@ -121,12 +144,18 @@ alloy-compat = [ "dep:alloy-serde", "dep:op-alloy-rpc-types", ] -test-utils = ["reth-primitives-traits/test-utils"] +test-utils = [ + "reth-primitives-traits/test-utils", + "reth-chainspec/test-utils", + "reth-codecs?/test-utils", + "reth-trie-common/test-utils" +] serde-bincode-compat = [ - "alloy-consensus/serde-bincode-compat", - "op-alloy-consensus?/serde-bincode-compat", - "reth-primitives-traits/serde-bincode-compat", - "serde_with", + "alloy-consensus/serde-bincode-compat", + "op-alloy-consensus?/serde-bincode-compat", + "reth-primitives-traits/serde-bincode-compat", + "serde_with", + "alloy-eips/serde-bincode-compat" ] [[bench]] diff --git a/crates/revm/Cargo.toml b/crates/revm/Cargo.toml index 668abb79e..8f670d364 100644 --- a/crates/revm/Cargo.toml +++ b/crates/revm/Cargo.toml @@ -34,6 +34,21 @@ alloy-primitives.workspace = true [features] default = ["std"] -std = [] -test-utils = ["dep:reth-trie"] -serde = ["revm/serde"] +std = [ + "reth-primitives/std", + "alloy-primitives/std", + "revm/std", + "alloy-eips/std" +] +test-utils = [ + "dep:reth-trie", + "reth-primitives/test-utils", + "reth-trie?/test-utils", + "revm/test-utils" +] +serde = [ + "revm/serde", + "reth-trie?/serde", + "alloy-eips/serde", + "alloy-primitives/serde" +] diff --git a/crates/stages/api/Cargo.toml b/crates/stages/api/Cargo.toml index 352d3e024..cba569a2a 100644 --- a/crates/stages/api/Cargo.toml +++ b/crates/stages/api/Cargo.toml @@ -46,4 +46,9 @@ tokio-stream.workspace = true reth-testing-utils.workspace = true [features] -test-utils = [] +test-utils = [ + "reth-consensus/test-utils", + "reth-network-p2p/test-utils", + "reth-primitives-traits/test-utils", + "reth-provider/test-utils" +] diff --git a/crates/stages/stages/Cargo.toml b/crates/stages/stages/Cargo.toml index 81f35a4b3..0b26cb6a1 100644 --- a/crates/stages/stages/Cargo.toml +++ b/crates/stages/stages/Cargo.toml @@ -93,13 +93,24 @@ pprof = { workspace = true, features = [ [features] test-utils = [ - "dep:reth-chainspec", - "reth-network-p2p/test-utils", - "reth-db/test-utils", - "reth-provider/test-utils", - "reth-stages-api/test-utils", - "dep:reth-testing-utils", - "dep:tempfile", + "dep:reth-chainspec", + "reth-network-p2p/test-utils", + "reth-db/test-utils", + "reth-provider/test-utils", + "reth-stages-api/test-utils", + "dep:reth-testing-utils", + "dep:tempfile", + "reth-chainspec?/test-utils", + "reth-consensus/test-utils", + "reth-evm/test-utils", + "reth-downloaders/test-utils", + "reth-primitives/test-utils", + "reth-primitives-traits/test-utils", + "reth-revm/test-utils", + "reth-codecs/test-utils", + "reth-db-api/test-utils", + "reth-trie-db/test-utils", + "reth-trie/test-utils" ] [[bench]] diff --git a/crates/storage/codecs/Cargo.toml b/crates/storage/codecs/Cargo.toml index 21a1897f1..2525b4e8d 100644 --- a/crates/storage/codecs/Cargo.toml +++ b/crates/storage/codecs/Cargo.toml @@ -49,7 +49,15 @@ serde.workspace = true [features] default = ["std", "alloy"] -std = ["alloy-primitives/std", "bytes/std"] +std = [ + "alloy-primitives/std", + "bytes/std", + "alloy-consensus?/std", + "alloy-eips?/std", + "alloy-genesis?/std", + "alloy-trie?/std", + "serde/std" +] alloy = [ "dep:alloy-consensus", "dep:alloy-eips", diff --git a/crates/storage/db-api/Cargo.toml b/crates/storage/db-api/Cargo.toml index d674f9d7b..932a94b98 100644 --- a/crates/storage/db-api/Cargo.toml +++ b/crates/storage/db-api/Cargo.toml @@ -56,11 +56,25 @@ proptest.workspace = true proptest-arbitrary-interop.workspace = true [features] -test-utils = ["arbitrary"] +test-utils = [ + "arbitrary", + "reth-primitives/test-utils", + "reth-primitives-traits/test-utils", + "reth-codecs/test-utils", + "reth-db-models/test-utils", + "reth-trie-common/test-utils" +] arbitrary = [ - "reth-primitives/arbitrary", - "reth-db-models/arbitrary", - "dep:arbitrary", - "dep:proptest", + "reth-primitives/arbitrary", + "reth-db-models/arbitrary", + "dep:arbitrary", + "dep:proptest", + "reth-primitives-traits/arbitrary", + "reth-trie-common/arbitrary", + "alloy-primitives/arbitrary", + "parity-scale-codec/arbitrary" +] +optimism = [ + "reth-primitives/optimism", + "reth-codecs/optimism" ] -optimism = ["reth-primitives/optimism"] diff --git a/crates/storage/db-models/Cargo.toml b/crates/storage/db-models/Cargo.toml index 492178775..31741207c 100644 --- a/crates/storage/db-models/Cargo.toml +++ b/crates/storage/db-models/Cargo.toml @@ -39,5 +39,14 @@ proptest-arbitrary-interop.workspace = true test-fuzz.workspace = true [features] -test-utils = ["arbitrary"] -arbitrary = ["reth-primitives/arbitrary", "dep:arbitrary", "dep:proptest"] +test-utils = [ + "arbitrary", + "reth-primitives/test-utils", + "reth-codecs/test-utils" +] +arbitrary = [ + "reth-primitives/arbitrary", + "dep:arbitrary", + "dep:proptest", + "alloy-primitives/arbitrary" +] diff --git a/crates/storage/db/Cargo.toml b/crates/storage/db/Cargo.toml index 356672f25..2f437e631 100644 --- a/crates/storage/db/Cargo.toml +++ b/crates/storage/db/Cargo.toml @@ -89,10 +89,28 @@ mdbx = [ "dep:strum", "dep:rustc-hash", ] -test-utils = ["dep:tempfile", "arbitrary", "parking_lot"] +test-utils = [ + "dep:tempfile", + "arbitrary", + "parking_lot", + "reth-primitives/test-utils", + "reth-primitives-traits/test-utils", + "reth-db-api/test-utils", + "reth-nippy-jar/test-utils", + "reth-trie-common/test-utils" +] bench = [] -arbitrary = ["reth-primitives/arbitrary", "reth-db-api/arbitrary"] -optimism = [] +arbitrary = [ + "reth-primitives/arbitrary", + "reth-db-api/arbitrary", + "reth-primitives-traits/arbitrary", + "reth-trie-common/arbitrary", + "alloy-primitives/arbitrary" +] +optimism = [ + "reth-primitives/optimism", + "reth-db-api/optimism" +] disable-lock = [] [[bench]] diff --git a/crates/storage/errors/Cargo.toml b/crates/storage/errors/Cargo.toml index 52c93ae4e..ecefa5f6a 100644 --- a/crates/storage/errors/Cargo.toml +++ b/crates/storage/errors/Cargo.toml @@ -25,4 +25,8 @@ derive_more.workspace = true [features] default = ["std"] -std = [] +std = [ + "reth-primitives/std", + "alloy-eips/std", + "alloy-primitives/std" +] diff --git a/crates/storage/provider/Cargo.toml b/crates/storage/provider/Cargo.toml index 00e1c9f09..b93c22cdf 100644 --- a/crates/storage/provider/Cargo.toml +++ b/crates/storage/provider/Cargo.toml @@ -88,16 +88,41 @@ alloy-consensus.workspace = true [features] optimism = [ - "reth-primitives/optimism", - "reth-execution-types/optimism", - "reth-optimism-primitives", + "reth-primitives/optimism", + "reth-execution-types/optimism", + "reth-optimism-primitives", + "reth-codecs/optimism", + "reth-db/optimism", + "reth-db-api/optimism", + "revm/optimism" +] +serde = [ + "reth-execution-types/serde", + "reth-trie-db/serde", + "reth-trie/serde", + "alloy-consensus?/serde", + "alloy-eips/serde", + "alloy-primitives/serde", + "alloy-rpc-types-engine/serde", + "dashmap/serde", + "notify/serde", + "parking_lot/serde", + "rand/serde", + "revm/serde" ] -serde = ["reth-execution-types/serde"] test-utils = [ - "reth-db/test-utils", - "reth-nippy-jar/test-utils", - "reth-trie/test-utils", - "reth-chain-state/test-utils", - "reth-ethereum-engine-primitives", - "alloy-consensus", + "reth-db/test-utils", + "reth-nippy-jar/test-utils", + "reth-trie/test-utils", + "reth-chain-state/test-utils", + "reth-ethereum-engine-primitives", + "alloy-consensus", + "reth-chainspec/test-utils", + "reth-evm/test-utils", + "reth-network-p2p/test-utils", + "reth-primitives/test-utils", + "reth-codecs/test-utils", + "reth-db-api/test-utils", + "reth-trie-db/test-utils", + "revm/test-utils" ] diff --git a/crates/transaction-pool/Cargo.toml b/crates/transaction-pool/Cargo.toml index cdac6a1aa..1bfb10d86 100644 --- a/crates/transaction-pool/Cargo.toml +++ b/crates/transaction-pool/Cargo.toml @@ -72,12 +72,42 @@ serde_json.workspace = true [features] default = ["serde"] -serde = ["dep:serde"] -test-utils = ["rand", "paste", "serde"] +serde = [ + "dep:serde", + "reth-execution-types/serde", + "reth-eth-wire-types/serde", + "reth-provider/serde", + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-primitives/serde", + "bitflags/serde", + "parking_lot/serde", + "rand?/serde", + "revm/serde", + "smallvec/serde" +] +test-utils = [ + "rand", + "paste", + "serde", + "reth-chain-state/test-utils", + "reth-chainspec/test-utils", + "reth-primitives/test-utils", + "reth-provider/test-utils", + "revm/test-utils" +] arbitrary = [ - "proptest", - "reth-primitives/arbitrary", - "proptest-arbitrary-interop", + "proptest", + "reth-primitives/arbitrary", + "proptest-arbitrary-interop", + "reth-chainspec/arbitrary", + "reth-eth-wire-types/arbitrary", + "alloy-consensus/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/arbitrary", + "bitflags/arbitrary", + "revm/arbitrary", + "smallvec/arbitrary" ] [[bench]] diff --git a/crates/trie/common/Cargo.toml b/crates/trie/common/Cargo.toml index 0bd28140f..2c6ccbfe6 100644 --- a/crates/trie/common/Cargo.toml +++ b/crates/trie/common/Cargo.toml @@ -41,8 +41,19 @@ hash-db = "=0.15.2" plain_hasher = "0.2" [features] -test-utils = ["dep:plain_hasher", "dep:hash-db", "arbitrary"] +test-utils = [ + "dep:plain_hasher", + "dep:hash-db", + "arbitrary", + "reth-primitives-traits/test-utils", + "reth-codecs/test-utils" +] arbitrary = [ - "alloy-trie/arbitrary", - "dep:arbitrary", + "alloy-trie/arbitrary", + "dep:arbitrary", + "reth-primitives-traits/arbitrary", + "alloy-consensus/arbitrary", + "alloy-primitives/arbitrary", + "nybbles/arbitrary", + "revm-primitives/arbitrary" ] diff --git a/crates/trie/db/Cargo.toml b/crates/trie/db/Cargo.toml index e75b0456e..55fa9a851 100644 --- a/crates/trie/db/Cargo.toml +++ b/crates/trie/db/Cargo.toml @@ -66,5 +66,23 @@ similar-asserts.workspace = true [features] metrics = ["reth-metrics", "reth-trie/metrics", "dep:metrics"] -serde = ["dep:serde"] -test-utils = ["triehash", "reth-trie-common/test-utils"] +serde = [ + "dep:serde", + "reth-provider/serde", + "reth-trie/serde", + "alloy-consensus/serde", + "alloy-primitives/serde", + "revm/serde", + "similar-asserts/serde" +] +test-utils = [ + "triehash", + "reth-trie-common/test-utils", + "reth-chainspec/test-utils", + "reth-primitives/test-utils", + "reth-db/test-utils", + "reth-db-api/test-utils", + "reth-provider/test-utils", + "reth-trie/test-utils", + "revm/test-utils" +] diff --git a/crates/trie/trie/Cargo.toml b/crates/trie/trie/Cargo.toml index 77fc57397..112e661c0 100644 --- a/crates/trie/trie/Cargo.toml +++ b/crates/trie/trie/Cargo.toml @@ -64,9 +64,23 @@ bincode.workspace = true [features] metrics = ["reth-metrics", "dep:metrics"] -serde = ["dep:serde"] -serde-bincode-compat = ["serde_with"] -test-utils = ["triehash", "reth-trie-common/test-utils"] +serde = [ + "dep:serde", + "alloy-consensus/serde", + "alloy-primitives/serde", + "revm/serde" +] +serde-bincode-compat = [ + "serde_with", + "reth-primitives/serde-bincode-compat", + "alloy-consensus/serde-bincode-compat" +] +test-utils = [ + "triehash", + "reth-trie-common/test-utils", + "reth-primitives/test-utils", + "revm/test-utils" +] [[bench]] name = "prefix_set" diff --git a/testing/ef-tests/Cargo.toml b/testing/ef-tests/Cargo.toml index df68f5154..a56c44ec3 100644 --- a/testing/ef-tests/Cargo.toml +++ b/testing/ef-tests/Cargo.toml @@ -13,7 +13,11 @@ workspace = true [features] ef-tests = [] -asm-keccak = ["reth-primitives/asm-keccak"] +asm-keccak = [ + "reth-primitives/asm-keccak", + "alloy-primitives/asm-keccak", + "revm/asm-keccak" +] [dependencies] reth-chainspec.workspace = true From 422ab1735407c8e9de8ffa24adb416132d41f351 Mon Sep 17 00:00:00 2001 From: caglarkaya Date: Sun, 20 Oct 2024 01:14:52 +0300 Subject: [PATCH 155/425] feat: use next free nonce in eth_sendTransaction (#11873) --- crates/rpc/rpc-eth-api/src/helpers/state.rs | 36 +++++++++++++++++++ .../rpc-eth-api/src/helpers/transaction.rs | 5 ++- 2 files changed, 38 insertions(+), 3 deletions(-) diff --git a/crates/rpc/rpc-eth-api/src/helpers/state.rs b/crates/rpc/rpc-eth-api/src/helpers/state.rs index f2fc13f5d..080d90dc3 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/state.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/state.rs @@ -272,6 +272,42 @@ pub trait LoadState: EthApiTypes { } } + /// Returns the next available nonce without gaps for the given address + /// Next available nonce is either the on chain nonce of the account or the highest consecutive + /// nonce in the pool + 1 + fn next_available_nonce( + &self, + address: Address, + ) -> impl Future> + Send + where + Self: SpawnBlocking, + { + self.spawn_blocking_io(move |this| { + // first fetch the on chain nonce of the account + let on_chain_account_nonce = this + .latest_state()? + .account_nonce(address) + .map_err(Self::Error::from_eth_err)? + .unwrap_or_default(); + + let mut next_nonce = on_chain_account_nonce; + // Retrieve the highest consecutive transaction for the sender from the transaction pool + if let Some(highest_tx) = this + .pool() + .get_highest_consecutive_transaction_by_sender(address, on_chain_account_nonce) + { + // Return the nonce of the highest consecutive transaction + 1 + next_nonce = highest_tx.nonce().checked_add(1).ok_or_else(|| { + Self::Error::from(EthApiError::InvalidTransaction( + RpcInvalidTransactionError::NonceMaxValue, + )) + })?; + } + + Ok(next_nonce) + }) + } + /// Returns the number of transactions sent from an address at the given block identifier. /// /// If this is [`BlockNumberOrTag::Pending`](reth_primitives::BlockNumberOrTag) then this will diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index 54d60cb7a..d29787d7a 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -365,9 +365,8 @@ pub trait EthTransactions: LoadTransaction { // set nonce if not already set before if request.nonce.is_none() { - let nonce = self.transaction_count(from, Some(BlockId::pending())).await?; - // note: `.to()` can't panic because the nonce is constructed from a `u64` - request.nonce = Some(nonce.to()); + let nonce = self.next_available_nonce(from).await?; + request.nonce = Some(nonce); } let chain_id = self.chain_id(); From 453ba2d9accc2b85d6495c51306843899755810f Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Sun, 20 Oct 2024 10:46:18 +0200 Subject: [PATCH 156/425] feat: switch to composable executor for Optimism (#11846) --- crates/optimism/evm/src/execute.rs | 497 ++++++++-------------------- crates/optimism/evm/src/lib.rs | 2 - crates/optimism/evm/src/strategy.rs | 494 --------------------------- crates/optimism/node/src/node.rs | 10 +- 4 files changed, 150 insertions(+), 853 deletions(-) delete mode 100644 crates/optimism/evm/src/strategy.rs diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index d15cdee13..77f670668 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -1,146 +1,134 @@ -//! Optimism block executor. +//! Optimism block execution strategy. -use crate::{ - l1::ensure_create2_deployer, OpChainSpec, OptimismBlockExecutionError, OptimismEvmConfig, -}; +use crate::{l1::ensure_create2_deployer, OptimismBlockExecutionError, OptimismEvmConfig}; use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_consensus::Transaction as _; use alloy_eips::eip7685::Requests; -use alloy_primitives::{BlockNumber, U256}; use core::fmt::Display; -use reth_chainspec::{ChainSpec, EthereumHardforks}; +use reth_chainspec::EthereumHardforks; +use reth_consensus::ConsensusError; use reth_evm::{ execute::{ - BatchExecutor, BlockExecutionError, BlockExecutionInput, BlockExecutionOutput, - BlockExecutorProvider, BlockValidationError, Executor, ProviderError, + BasicBlockExecutorProvider, BlockExecutionError, BlockExecutionStrategy, + BlockExecutionStrategyFactory, BlockValidationError, ProviderError, }, state_change::post_block_balance_increments, - system_calls::{NoopHook, OnStateHook, SystemCaller}, - ConfigureEvm, + system_calls::{OnStateHook, SystemCaller}, + ConfigureEvm, ConfigureEvmEnv, }; -use reth_execution_types::ExecutionOutcome; +use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::validate_block_post_execution; use reth_optimism_forks::OptimismHardfork; -use reth_primitives::{BlockWithSenders, Header, Receipt, Receipts, TxType}; -use reth_prune_types::PruneModes; -use reth_revm::{batch::BlockBatchRecord, db::states::bundle_state::BundleRetention, Evm, State}; +use reth_primitives::{BlockWithSenders, Header, Receipt, TxType}; +use reth_revm::{ + db::{states::bundle_state::BundleRetention, BundleState}, + Database, State, +}; use revm_primitives::{ - db::{Database, DatabaseCommit}, - BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, + db::DatabaseCommit, BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, U256, }; use tracing::trace; -/// Provides executors to execute regular optimism blocks +/// Factory for [`OpExecutionStrategy`]. #[derive(Debug, Clone)] -pub struct OpExecutorProvider { +pub struct OpExecutionStrategyFactory { + /// The chainspec chain_spec: Arc, + /// How to create an EVM. evm_config: EvmConfig, } -impl OpExecutorProvider { - /// Creates a new default optimism executor provider. +impl OpExecutionStrategyFactory { + /// Creates a new default optimism executor strategy factory. pub fn optimism(chain_spec: Arc) -> Self { Self::new(chain_spec.clone(), OptimismEvmConfig::new(chain_spec)) } } -impl OpExecutorProvider { - /// Creates a new executor provider. +impl OpExecutionStrategyFactory { + /// Creates a new executor strategy factory. pub const fn new(chain_spec: Arc, evm_config: EvmConfig) -> Self { Self { chain_spec, evm_config } } } -impl OpExecutorProvider -where - EvmConfig: ConfigureEvm
, -{ - fn op_executor(&self, db: DB) -> OpBlockExecutor - where - DB: Database + Display>, - { - OpBlockExecutor::new( - self.chain_spec.clone(), - self.evm_config.clone(), - State::builder().with_database(db).with_bundle_update().without_state_clear().build(), - ) - } -} +impl BlockExecutionStrategyFactory for OpExecutionStrategyFactory { + type Strategy + Display>> = OpExecutionStrategy; -impl BlockExecutorProvider for OpExecutorProvider -where - EvmConfig: ConfigureEvm
, -{ - type Executor + Display>> = - OpBlockExecutor; - - type BatchExecutor + Display>> = - OpBatchExecutor; - fn executor(&self, db: DB) -> Self::Executor + fn create_strategy(&self, db: DB) -> Self::Strategy where DB: Database + Display>, { - self.op_executor(db) - } - - fn batch_executor(&self, db: DB) -> Self::BatchExecutor - where - DB: Database + Display>, - { - let executor = self.op_executor(db); - OpBatchExecutor { executor, batch_record: BlockBatchRecord::default() } + let state = + State::builder().with_database(db).with_bundle_update().without_state_clear().build(); + OpExecutionStrategy::new(state, self.chain_spec.clone(), self.evm_config.clone()) } } -/// Helper container type for EVM with chain spec. -#[derive(Debug, Clone)] -pub struct OpEvmExecutor { +/// Block execution strategy for Optimism. +#[allow(missing_debug_implementations)] +pub struct OpExecutionStrategy { /// The chainspec chain_spec: Arc, /// How to create an EVM. evm_config: EvmConfig, + /// Current state for block execution. + state: State, + /// Utility to call system smart contracts. + system_caller: SystemCaller, } -impl OpEvmExecutor +impl OpExecutionStrategy { + /// Creates a new [`OpExecutionStrategy`] + pub fn new( + state: State, + chain_spec: Arc, + evm_config: OptimismEvmConfig, + ) -> Self { + let system_caller = SystemCaller::new(evm_config.clone(), (*chain_spec).clone()); + Self { state, chain_spec, evm_config, system_caller } + } +} + +impl OpExecutionStrategy { + /// Configures a new evm configuration and block environment for the given block. + /// + /// Caution: this does not initialize the tx environment. + fn evm_env_for_block(&self, header: &Header, total_difficulty: U256) -> EnvWithHandlerCfg { + let mut cfg = CfgEnvWithHandlerCfg::new(Default::default(), Default::default()); + let mut block_env = BlockEnv::default(); + self.evm_config.fill_cfg_and_block_env(&mut cfg, &mut block_env, header, total_difficulty); + + EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, Default::default()) + } +} + +impl BlockExecutionStrategy for OpExecutionStrategy where - EvmConfig: ConfigureEvm
, + DB: Database + Display>, { - /// Executes the transactions in the block and returns the receipts. - /// - /// This applies the pre-execution changes, and executes the transactions. - /// - /// The optional `state_hook` will be executed with the state changes if present. - /// - /// # Note - /// - /// It does __not__ apply post-execution changes. - fn execute_pre_and_transactions( - &self, + type Error = BlockExecutionError; + + fn apply_pre_execution_changes( + &mut self, block: &BlockWithSenders, - mut evm: Evm<'_, Ext, &mut State>, - state_hook: Option, - ) -> Result<(Vec, u64), BlockExecutionError> - where - DB: Database + Display>, - F: OnStateHook + 'static, - { - let mut system_caller = SystemCaller::new(self.evm_config.clone(), &self.chain_spec); - if let Some(hook) = state_hook { - system_caller.with_state_hook(Some(Box::new(hook) as Box)); - } + total_difficulty: U256, + ) -> Result<(), Self::Error> { + // Set state clear flag if the block is after the Spurious Dragon hardfork. + let state_clear_flag = + (*self.chain_spec).is_spurious_dragon_active_at_block(block.header.number); + self.state.set_state_clear_flag(state_clear_flag); + + let env = self.evm_env_for_block(&block.header, total_difficulty); + let mut evm = self.evm_config.evm_with_env(&mut self.state, env); - // apply pre execution changes - system_caller.apply_beacon_root_contract_call( + self.system_caller.apply_beacon_root_contract_call( block.timestamp, block.number, block.parent_beacon_block_root, &mut evm, )?; - // execute transactions - let is_regolith = - self.chain_spec.fork(OptimismHardfork::Regolith).active_at_timestamp(block.timestamp); - // Ensure that the create2deployer is force-deployed at the canyon transition. Optimism // blocks will always have at least a single transaction in them (the L1 info transaction), // so we can safely assume that this will always be triggered upon the transition and that @@ -148,6 +136,20 @@ where ensure_create2_deployer(self.chain_spec.clone(), block.timestamp, evm.db_mut()) .map_err(|_| OptimismBlockExecutionError::ForceCreate2DeployerFail)?; + Ok(()) + } + + fn execute_transactions( + &mut self, + block: &BlockWithSenders, + total_difficulty: U256, + ) -> Result<(Vec, u64), Self::Error> { + let env = self.evm_env_for_block(&block.header, total_difficulty); + let mut evm = self.evm_config.evm_with_env(&mut self.state, env); + + let is_regolith = + self.chain_spec.fork(OptimismHardfork::Regolith).active_at_timestamp(block.timestamp); + let mut cumulative_gas_used = 0; let mut receipts = Vec::with_capacity(block.body.transactions.len()); for (sender, transaction) in block.transactions_with_sender() { @@ -200,7 +202,7 @@ where ?transaction, "Executed transaction" ); - system_caller.on_state(&result_and_state); + self.system_caller.on_state(&result_and_state); let ResultAndState { result, state } = result_and_state; evm.db_mut().commit(state); @@ -225,288 +227,63 @@ where .then_some(1), }); } - drop(evm); Ok((receipts, cumulative_gas_used)) } -} - -/// A basic Optimism block executor. -/// -/// Expected usage: -/// - Create a new instance of the executor. -/// - Execute the block. -#[derive(Debug)] -pub struct OpBlockExecutor { - /// Chain specific evm config that's used to execute a block. - executor: OpEvmExecutor, - /// The state to use for execution - state: State, -} - -impl OpBlockExecutor { - /// Creates a new Optimism block executor. - pub const fn new( - chain_spec: Arc, - evm_config: EvmConfig, - state: State, - ) -> Self { - Self { executor: OpEvmExecutor { chain_spec, evm_config }, state } - } - - /// Returns the chain spec. - #[inline] - pub fn chain_spec(&self) -> &ChainSpec { - &self.executor.chain_spec - } - - /// Returns mutable reference to the state that wraps the underlying database. - pub fn state_mut(&mut self) -> &mut State { - &mut self.state - } -} - -impl OpBlockExecutor -where - EvmConfig: ConfigureEvm
, - DB: Database + Display>, -{ - /// Configures a new evm configuration and block environment for the given block. - /// - /// Caution: this does not initialize the tx environment. - fn evm_env_for_block(&self, header: &Header, total_difficulty: U256) -> EnvWithHandlerCfg { - let mut cfg = CfgEnvWithHandlerCfg::new(Default::default(), Default::default()); - let mut block_env = BlockEnv::default(); - self.executor.evm_config.fill_cfg_and_block_env( - &mut cfg, - &mut block_env, - header, - total_difficulty, - ); - - EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, Default::default()) - } - /// Convenience method to invoke `execute_without_verification_with_state_hook` setting the - /// state hook as `None`. - fn execute_without_verification( + fn apply_post_execution_changes( &mut self, block: &BlockWithSenders, total_difficulty: U256, - ) -> Result<(Vec, u64), BlockExecutionError> { - self.execute_without_verification_with_state_hook(block, total_difficulty, None::) - } - - /// Execute a single block and apply the state changes to the internal state. - /// - /// Returns the receipts of the transactions in the block and the total gas used. - /// - /// Returns an error if execution fails. - fn execute_without_verification_with_state_hook( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - state_hook: Option, - ) -> Result<(Vec, u64), BlockExecutionError> - where - F: OnStateHook + 'static, - { - // 1. prepare state on new block - self.on_new_block(&block.header); - - // 2. configure the evm and execute - let env = self.evm_env_for_block(&block.header, total_difficulty); - - let (receipts, gas_used) = { - let evm = self.executor.evm_config.evm_with_env(&mut self.state, env); - self.executor.execute_pre_and_transactions(block, evm, state_hook) - }?; - - // 3. apply post execution changes - self.post_execution(block, total_difficulty)?; - - Ok((receipts, gas_used)) - } - - /// Apply settings before a new block is executed. - pub(crate) fn on_new_block(&mut self, header: &Header) { - // Set state clear flag if the block is after the Spurious Dragon hardfork. - let state_clear_flag = self.chain_spec().is_spurious_dragon_active_at_block(header.number); - self.state.set_state_clear_flag(state_clear_flag); - } - - /// Apply post execution state changes, including block rewards, withdrawals, and irregular DAO - /// hardfork state change. - pub fn post_execution( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - ) -> Result<(), BlockExecutionError> { + _receipts: &[Receipt], + ) -> Result { let balance_increments = - post_block_balance_increments(self.chain_spec(), block, total_difficulty); + post_block_balance_increments(&self.chain_spec.clone(), block, total_difficulty); // increment balances self.state .increment_balances(balance_increments) .map_err(|_| BlockValidationError::IncrementBalanceFailed)?; - Ok(()) + Ok(Requests::default()) } -} - -impl Executor for OpBlockExecutor -where - EvmConfig: ConfigureEvm
, - DB: Database + Display>, -{ - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; - type Output = BlockExecutionOutput; - type Error = BlockExecutionError; - - /// Executes the block and commits the state changes. - /// - /// Returns the receipts of the transactions in the block. - /// - /// Returns an error if the block could not be executed or failed verification. - /// - /// State changes are committed to the database. - fn execute(mut self, input: Self::Input<'_>) -> Result { - let BlockExecutionInput { block, total_difficulty } = input; - let (receipts, gas_used) = self.execute_without_verification(block, total_difficulty)?; - // NOTE: we need to merge keep the reverts for the bundle retention - self.state.merge_transitions(BundleRetention::Reverts); - - Ok(BlockExecutionOutput { - state: self.state.take_bundle(), - receipts, - requests: Requests::default(), - gas_used, - }) + fn state_ref(&self) -> &State { + &self.state } - fn execute_with_state_closure( - mut self, - input: Self::Input<'_>, - mut witness: F, - ) -> Result - where - F: FnMut(&State), - { - let BlockExecutionInput { block, total_difficulty } = input; - let (receipts, gas_used) = self.execute_without_verification(block, total_difficulty)?; - - // NOTE: we need to merge keep the reverts for the bundle retention - self.state.merge_transitions(BundleRetention::Reverts); - witness(&self.state); - - Ok(BlockExecutionOutput { - state: self.state.take_bundle(), - receipts, - requests: Requests::default(), - gas_used, - }) + fn state_mut(&mut self) -> &mut State { + &mut self.state } - fn execute_with_state_hook( - mut self, - input: Self::Input<'_>, - state_hook: F, - ) -> Result - where - F: OnStateHook + 'static, - { - let BlockExecutionInput { block, total_difficulty } = input; - let (receipts, gas_used) = self.execute_without_verification_with_state_hook( - block, - total_difficulty, - Some(state_hook), - )?; - - // NOTE: we need to merge keep the reverts for the bundle retention - self.state.merge_transitions(BundleRetention::Reverts); - - Ok(BlockExecutionOutput { - state: self.state.take_bundle(), - receipts, - requests: Requests::default(), - gas_used, - }) + fn with_state_hook(&mut self, hook: Option>) { + self.system_caller.with_state_hook(hook); } -} - -/// An executor for a batch of blocks. -/// -/// State changes are tracked until the executor is finalized. -#[derive(Debug)] -pub struct OpBatchExecutor { - /// The executor used to execute blocks. - executor: OpBlockExecutor, - /// Keeps track of the batch and record receipts based on the configured prune mode - batch_record: BlockBatchRecord, -} -impl OpBatchExecutor { - /// Returns the receipts of the executed blocks. - pub const fn receipts(&self) -> &Receipts { - self.batch_record.receipts() + fn finish(&mut self) -> BundleState { + self.state.merge_transitions(BundleRetention::Reverts); + self.state.take_bundle() } - /// Returns mutable reference to the state that wraps the underlying database. - pub fn state_mut(&mut self) -> &mut State { - self.executor.state_mut() + fn validate_block_post_execution( + &self, + block: &BlockWithSenders, + receipts: &[Receipt], + _requests: &Requests, + ) -> Result<(), ConsensusError> { + validate_block_post_execution(block, &self.chain_spec.clone(), receipts) } } -impl BatchExecutor for OpBatchExecutor -where - EvmConfig: ConfigureEvm
, - DB: Database + Display>, -{ - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; - type Output = ExecutionOutcome; - type Error = BlockExecutionError; - - fn execute_and_verify_one(&mut self, input: Self::Input<'_>) -> Result<(), Self::Error> { - let BlockExecutionInput { block, total_difficulty } = input; - - if self.batch_record.first_block().is_none() { - self.batch_record.set_first_block(block.number); - } - - let (receipts, _gas_used) = - self.executor.execute_without_verification(block, total_difficulty)?; - - validate_block_post_execution(block, self.executor.chain_spec(), &receipts)?; - - // prepare the state according to the prune mode - let retention = self.batch_record.bundle_retention(block.number); - self.executor.state.merge_transitions(retention); - - // store receipts in the set - self.batch_record.save_receipts(receipts)?; - - Ok(()) - } - - fn finalize(mut self) -> Self::Output { - ExecutionOutcome::new( - self.executor.state.take_bundle(), - self.batch_record.take_receipts(), - self.batch_record.first_block().unwrap_or_default(), - self.batch_record.take_requests(), - ) - } - - fn set_tip(&mut self, tip: BlockNumber) { - self.batch_record.set_tip(tip); - } - - fn set_prune_modes(&mut self, prune_modes: PruneModes) { - self.batch_record.set_prune_modes(prune_modes); - } +/// Helper type with backwards compatible methods to obtain executor providers. +#[derive(Debug)] +pub struct OpExecutorProvider; - fn size_hint(&self) -> Option { - Some(self.executor.state.bundle_state.size_hint()) +impl OpExecutorProvider { + /// Creates a new default optimism executor strategy factory. + pub fn optimism( + chain_spec: Arc, + ) -> BasicBlockExecutorProvider { + BasicBlockExecutorProvider::new(OpExecutionStrategyFactory::optimism(chain_spec)) } } @@ -517,6 +294,7 @@ mod tests { use alloy_consensus::TxEip1559; use alloy_primitives::{b256, Address, StorageKey, StorageValue}; use reth_chainspec::MIN_TRANSACTION_GAS; + use reth_evm::execute::{BasicBlockExecutorProvider, BatchExecutor, BlockExecutorProvider}; use reth_optimism_chainspec::{optimism_deposit_tx_signature, OpChainSpecBuilder}; use reth_primitives::{Account, Block, BlockBody, Signature, Transaction, TransactionSigned}; use reth_revm::{ @@ -551,8 +329,13 @@ mod tests { db } - fn executor_provider(chain_spec: Arc) -> OpExecutorProvider { - OpExecutorProvider { evm_config: OptimismEvmConfig::new(chain_spec.clone()), chain_spec } + fn executor_provider( + chain_spec: Arc, + ) -> BasicBlockExecutorProvider { + let strategy_factory = + OpExecutionStrategyFactory::new(chain_spec.clone(), OptimismEvmConfig::new(chain_spec)); + + BasicBlockExecutorProvider::new(strategy_factory) } #[test] @@ -600,7 +383,10 @@ mod tests { let provider = executor_provider(chain_spec); let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); - executor.state_mut().load_cache_account(L1_BLOCK_CONTRACT).unwrap(); + // make sure the L1 block contract state is preloaded. + executor.with_state_mut(|state| { + state.load_cache_account(L1_BLOCK_CONTRACT).unwrap(); + }); // Attempt to execute a block with one deposit and one non-deposit transaction executor @@ -622,8 +408,9 @@ mod tests { ) .unwrap(); - let tx_receipt = executor.receipts()[0][0].as_ref().unwrap(); - let deposit_receipt = executor.receipts()[0][1].as_ref().unwrap(); + let receipts = executor.receipts(); + let tx_receipt = receipts[0][0].as_ref().unwrap(); + let deposit_receipt = receipts[0][1].as_ref().unwrap(); // deposit_receipt_version is not present in pre canyon transactions assert!(deposit_receipt.deposit_receipt_version.is_none()); @@ -680,7 +467,10 @@ mod tests { let provider = executor_provider(chain_spec); let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); - executor.state_mut().load_cache_account(L1_BLOCK_CONTRACT).unwrap(); + // make sure the L1 block contract state is preloaded. + executor.with_state_mut(|state| { + state.load_cache_account(L1_BLOCK_CONTRACT).unwrap(); + }); // attempt to execute an empty block with parent beacon block root, this should not fail executor @@ -702,8 +492,9 @@ mod tests { ) .expect("Executing a block while canyon is active should not fail"); - let tx_receipt = executor.receipts()[0][0].as_ref().unwrap(); - let deposit_receipt = executor.receipts()[0][1].as_ref().unwrap(); + let receipts = executor.receipts(); + let tx_receipt = receipts[0][0].as_ref().unwrap(); + let deposit_receipt = receipts[0][1].as_ref().unwrap(); // deposit_receipt_version is set to 1 for post canyon deposit transactions assert_eq!(deposit_receipt.deposit_receipt_version, Some(1)); diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index ffc82fde4..60aa9f7db 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -36,8 +36,6 @@ use revm_primitives::{ BlobExcessGasAndPrice, BlockEnv, Bytes, CfgEnv, Env, HandlerCfg, OptimismFields, SpecId, TxKind, }; -pub mod strategy; - /// Optimism-related EVM configuration. #[derive(Debug, Clone)] pub struct OptimismEvmConfig { diff --git a/crates/optimism/evm/src/strategy.rs b/crates/optimism/evm/src/strategy.rs deleted file mode 100644 index c626bb665..000000000 --- a/crates/optimism/evm/src/strategy.rs +++ /dev/null @@ -1,494 +0,0 @@ -//! Optimism block execution strategy, - -use crate::{l1::ensure_create2_deployer, OptimismBlockExecutionError, OptimismEvmConfig}; -use alloc::{boxed::Box, sync::Arc, vec::Vec}; -use alloy_consensus::Transaction as _; -use alloy_eips::eip7685::Requests; -use core::fmt::Display; -use reth_chainspec::EthereumHardforks; -use reth_consensus::ConsensusError; -use reth_evm::{ - execute::{ - BlockExecutionError, BlockExecutionStrategy, BlockExecutionStrategyFactory, - BlockValidationError, ProviderError, - }, - state_change::post_block_balance_increments, - system_calls::{OnStateHook, SystemCaller}, - ConfigureEvm, ConfigureEvmEnv, -}; -use reth_optimism_chainspec::OpChainSpec; -use reth_optimism_consensus::validate_block_post_execution; -use reth_optimism_forks::OptimismHardfork; -use reth_primitives::{BlockWithSenders, Header, Receipt, TxType}; -use reth_revm::{ - db::{states::bundle_state::BundleRetention, BundleState}, - Database, State, -}; -use revm_primitives::{ - db::DatabaseCommit, BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, U256, -}; -use tracing::trace; - -/// Factory for [`OpExecutionStrategy`]. -#[derive(Debug, Clone)] -pub struct OpExecutionStrategyFactory { - /// The chainspec - chain_spec: Arc, - /// How to create an EVM. - evm_config: EvmConfig, -} - -impl OpExecutionStrategyFactory { - /// Creates a new default optimism executor strategy factory. - pub fn optimism(chain_spec: Arc) -> Self { - Self::new(chain_spec.clone(), OptimismEvmConfig::new(chain_spec)) - } -} - -impl OpExecutionStrategyFactory { - /// Creates a new executor strategy factory. - pub const fn new(chain_spec: Arc, evm_config: EvmConfig) -> Self { - Self { chain_spec, evm_config } - } -} - -impl BlockExecutionStrategyFactory for OpExecutionStrategyFactory { - type Strategy + Display>> = OpExecutionStrategy; - - fn create_strategy(&self, db: DB) -> Self::Strategy - where - DB: Database + Display>, - { - let state = - State::builder().with_database(db).with_bundle_update().without_state_clear().build(); - OpExecutionStrategy::new(state, self.chain_spec.clone(), self.evm_config.clone()) - } -} - -/// Block execution strategy for Optimism. -#[allow(missing_debug_implementations)] -pub struct OpExecutionStrategy { - /// The chainspec - chain_spec: Arc, - /// How to create an EVM. - evm_config: EvmConfig, - /// Current state for block execution. - state: State, - /// Utility to call system smart contracts. - system_caller: SystemCaller, -} - -impl OpExecutionStrategy { - /// Creates a new [`OpExecutionStrategy`] - pub fn new( - state: State, - chain_spec: Arc, - evm_config: OptimismEvmConfig, - ) -> Self { - let system_caller = SystemCaller::new(evm_config.clone(), (*chain_spec).clone()); - Self { state, chain_spec, evm_config, system_caller } - } -} - -impl OpExecutionStrategy { - /// Configures a new evm configuration and block environment for the given block. - /// - /// Caution: this does not initialize the tx environment. - fn evm_env_for_block(&self, header: &Header, total_difficulty: U256) -> EnvWithHandlerCfg { - let mut cfg = CfgEnvWithHandlerCfg::new(Default::default(), Default::default()); - let mut block_env = BlockEnv::default(); - self.evm_config.fill_cfg_and_block_env(&mut cfg, &mut block_env, header, total_difficulty); - - EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, Default::default()) - } -} - -impl BlockExecutionStrategy for OpExecutionStrategy -where - DB: Database + Display>, -{ - type Error = BlockExecutionError; - - fn apply_pre_execution_changes( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - ) -> Result<(), Self::Error> { - // Set state clear flag if the block is after the Spurious Dragon hardfork. - let state_clear_flag = - (*self.chain_spec).is_spurious_dragon_active_at_block(block.header.number); - self.state.set_state_clear_flag(state_clear_flag); - - let env = self.evm_env_for_block(&block.header, total_difficulty); - let mut evm = self.evm_config.evm_with_env(&mut self.state, env); - - self.system_caller.apply_beacon_root_contract_call( - block.timestamp, - block.number, - block.parent_beacon_block_root, - &mut evm, - )?; - - // Ensure that the create2deployer is force-deployed at the canyon transition. Optimism - // blocks will always have at least a single transaction in them (the L1 info transaction), - // so we can safely assume that this will always be triggered upon the transition and that - // the above check for empty blocks will never be hit on OP chains. - ensure_create2_deployer(self.chain_spec.clone(), block.timestamp, evm.db_mut()) - .map_err(|_| OptimismBlockExecutionError::ForceCreate2DeployerFail)?; - - Ok(()) - } - - fn execute_transactions( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - ) -> Result<(Vec, u64), Self::Error> { - let env = self.evm_env_for_block(&block.header, total_difficulty); - let mut evm = self.evm_config.evm_with_env(&mut self.state, env); - - let is_regolith = - self.chain_spec.fork(OptimismHardfork::Regolith).active_at_timestamp(block.timestamp); - - let mut cumulative_gas_used = 0; - let mut receipts = Vec::with_capacity(block.body.transactions.len()); - for (sender, transaction) in block.transactions_with_sender() { - // The sum of the transaction’s gas limit, Tg, and the gas utilized in this block prior, - // must be no greater than the block’s gasLimit. - let block_available_gas = block.header.gas_limit - cumulative_gas_used; - if transaction.gas_limit() > block_available_gas && - (is_regolith || !transaction.is_system_transaction()) - { - return Err(BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas { - transaction_gas_limit: transaction.gas_limit(), - block_available_gas, - } - .into()) - } - - // An optimism block should never contain blob transactions. - if matches!(transaction.tx_type(), TxType::Eip4844) { - return Err(OptimismBlockExecutionError::BlobTransactionRejected.into()) - } - - // Cache the depositor account prior to the state transition for the deposit nonce. - // - // Note that this *only* needs to be done post-regolith hardfork, as deposit nonces - // were not introduced in Bedrock. In addition, regular transactions don't have deposit - // nonces, so we don't need to touch the DB for those. - let depositor = (is_regolith && transaction.is_deposit()) - .then(|| { - evm.db_mut() - .load_cache_account(*sender) - .map(|acc| acc.account_info().unwrap_or_default()) - }) - .transpose() - .map_err(|_| OptimismBlockExecutionError::AccountLoadFailed(*sender))?; - - self.evm_config.fill_tx_env(evm.tx_mut(), transaction, *sender); - - // Execute transaction. - let result_and_state = evm.transact().map_err(move |err| { - let new_err = err.map_db_err(|e| e.into()); - // Ensure hash is calculated for error log, if not already done - BlockValidationError::EVM { - hash: transaction.recalculate_hash(), - error: Box::new(new_err), - } - })?; - - trace!( - target: "evm", - ?transaction, - "Executed transaction" - ); - self.system_caller.on_state(&result_and_state); - let ResultAndState { result, state } = result_and_state; - evm.db_mut().commit(state); - - // append gas used - cumulative_gas_used += result.gas_used(); - - // Push transaction changeset and calculate header bloom filter for receipt. - receipts.push(Receipt { - tx_type: transaction.tx_type(), - // Success flag was added in `EIP-658: Embedding transaction status code in - // receipts`. - success: result.is_success(), - cumulative_gas_used, - logs: result.into_logs(), - deposit_nonce: depositor.map(|account| account.nonce), - // The deposit receipt version was introduced in Canyon to indicate an update to how - // receipt hashes should be computed when set. The state transition process ensures - // this is only set for post-Canyon deposit transactions. - deposit_receipt_version: (transaction.is_deposit() && - self.chain_spec - .is_fork_active_at_timestamp(OptimismHardfork::Canyon, block.timestamp)) - .then_some(1), - }); - } - - Ok((receipts, cumulative_gas_used)) - } - - fn apply_post_execution_changes( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - _receipts: &[Receipt], - ) -> Result { - let balance_increments = - post_block_balance_increments(&self.chain_spec.clone(), block, total_difficulty); - // increment balances - self.state - .increment_balances(balance_increments) - .map_err(|_| BlockValidationError::IncrementBalanceFailed)?; - - Ok(Requests::default()) - } - - fn state_ref(&self) -> &State { - &self.state - } - - fn state_mut(&mut self) -> &mut State { - &mut self.state - } - - fn with_state_hook(&mut self, hook: Option>) { - self.system_caller.with_state_hook(hook); - } - - fn finish(&mut self) -> BundleState { - self.state.merge_transitions(BundleRetention::Reverts); - self.state.take_bundle() - } - - fn validate_block_post_execution( - &self, - block: &BlockWithSenders, - receipts: &[Receipt], - _requests: &Requests, - ) -> Result<(), ConsensusError> { - validate_block_post_execution(block, &self.chain_spec.clone(), receipts) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::OpChainSpec; - use alloy_consensus::TxEip1559; - use alloy_primitives::{b256, Address, StorageKey, StorageValue}; - use reth_chainspec::MIN_TRANSACTION_GAS; - use reth_evm::execute::{BasicBlockExecutorProvider, BatchExecutor, BlockExecutorProvider}; - use reth_optimism_chainspec::{optimism_deposit_tx_signature, OpChainSpecBuilder}; - use reth_primitives::{Account, Block, BlockBody, Signature, Transaction, TransactionSigned}; - use reth_revm::{ - database::StateProviderDatabase, test_utils::StateProviderTest, L1_BLOCK_CONTRACT, - }; - use std::{collections::HashMap, str::FromStr}; - - fn create_op_state_provider() -> StateProviderTest { - let mut db = StateProviderTest::default(); - - let l1_block_contract_account = - Account { balance: U256::ZERO, bytecode_hash: None, nonce: 1 }; - - let mut l1_block_storage = HashMap::default(); - // base fee - l1_block_storage.insert(StorageKey::with_last_byte(1), StorageValue::from(1000000000)); - // l1 fee overhead - l1_block_storage.insert(StorageKey::with_last_byte(5), StorageValue::from(188)); - // l1 fee scalar - l1_block_storage.insert(StorageKey::with_last_byte(6), StorageValue::from(684000)); - // l1 free scalars post ecotone - l1_block_storage.insert( - StorageKey::with_last_byte(3), - StorageValue::from_str( - "0x0000000000000000000000000000000000001db0000d27300000000000000005", - ) - .unwrap(), - ); - - db.insert_account(L1_BLOCK_CONTRACT, l1_block_contract_account, None, l1_block_storage); - - db - } - - fn executor_provider( - chain_spec: Arc, - ) -> BasicBlockExecutorProvider { - let strategy_factory = - OpExecutionStrategyFactory::new(chain_spec.clone(), OptimismEvmConfig::new(chain_spec)); - - BasicBlockExecutorProvider::new(strategy_factory) - } - - #[test] - fn op_deposit_fields_pre_canyon() { - let header = Header { - timestamp: 1, - number: 1, - gas_limit: 1_000_000, - gas_used: 42_000, - receipts_root: b256!( - "83465d1e7d01578c0d609be33570f91242f013e9e295b0879905346abbd63731" - ), - ..Default::default() - }; - - let mut db = create_op_state_provider(); - - let addr = Address::ZERO; - let account = Account { balance: U256::MAX, ..Account::default() }; - db.insert_account(addr, account, None, HashMap::default()); - - let chain_spec = Arc::new(OpChainSpecBuilder::base_mainnet().regolith_activated().build()); - - let tx = TransactionSigned::from_transaction_and_signature( - Transaction::Eip1559(TxEip1559 { - chain_id: chain_spec.chain.id(), - nonce: 0, - gas_limit: MIN_TRANSACTION_GAS, - to: addr.into(), - ..Default::default() - }), - Signature::test_signature(), - ); - - let tx_deposit = TransactionSigned::from_transaction_and_signature( - Transaction::Deposit(op_alloy_consensus::TxDeposit { - from: addr, - to: addr.into(), - gas_limit: MIN_TRANSACTION_GAS, - ..Default::default() - }), - Signature::test_signature(), - ); - - let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); - - // make sure the L1 block contract state is preloaded. - executor.with_state_mut(|state| { - state.load_cache_account(L1_BLOCK_CONTRACT).unwrap(); - }); - - // Attempt to execute a block with one deposit and one non-deposit transaction - executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { - header, - body: BlockBody { - transactions: vec![tx, tx_deposit], - ..Default::default() - }, - }, - senders: vec![addr, addr], - }, - U256::ZERO, - ) - .into(), - ) - .unwrap(); - - let receipts = executor.receipts(); - let tx_receipt = receipts[0][0].as_ref().unwrap(); - let deposit_receipt = receipts[0][1].as_ref().unwrap(); - - // deposit_receipt_version is not present in pre canyon transactions - assert!(deposit_receipt.deposit_receipt_version.is_none()); - assert!(tx_receipt.deposit_receipt_version.is_none()); - - // deposit_nonce is present only in deposit transactions - assert!(deposit_receipt.deposit_nonce.is_some()); - assert!(tx_receipt.deposit_nonce.is_none()); - } - - #[test] - fn op_deposit_fields_post_canyon() { - // ensure_create2_deployer will fail if timestamp is set to less then 2 - let header = Header { - timestamp: 2, - number: 1, - gas_limit: 1_000_000, - gas_used: 42_000, - receipts_root: b256!( - "fffc85c4004fd03c7bfbe5491fae98a7473126c099ac11e8286fd0013f15f908" - ), - ..Default::default() - }; - - let mut db = create_op_state_provider(); - let addr = Address::ZERO; - let account = Account { balance: U256::MAX, ..Account::default() }; - - db.insert_account(addr, account, None, HashMap::default()); - - let chain_spec = Arc::new(OpChainSpecBuilder::base_mainnet().canyon_activated().build()); - - let tx = TransactionSigned::from_transaction_and_signature( - Transaction::Eip1559(TxEip1559 { - chain_id: chain_spec.chain.id(), - nonce: 0, - gas_limit: MIN_TRANSACTION_GAS, - to: addr.into(), - ..Default::default() - }), - Signature::test_signature(), - ); - - let tx_deposit = TransactionSigned::from_transaction_and_signature( - Transaction::Deposit(op_alloy_consensus::TxDeposit { - from: addr, - to: addr.into(), - gas_limit: MIN_TRANSACTION_GAS, - ..Default::default() - }), - optimism_deposit_tx_signature(), - ); - - let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); - - // make sure the L1 block contract state is preloaded. - executor.with_state_mut(|state| { - state.load_cache_account(L1_BLOCK_CONTRACT).unwrap(); - }); - - // attempt to execute an empty block with parent beacon block root, this should not fail - executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { - header, - body: BlockBody { - transactions: vec![tx, tx_deposit], - ..Default::default() - }, - }, - senders: vec![addr, addr], - }, - U256::ZERO, - ) - .into(), - ) - .expect("Executing a block while canyon is active should not fail"); - - let receipts = executor.receipts(); - let tx_receipt = receipts[0][0].as_ref().unwrap(); - let deposit_receipt = receipts[0][1].as_ref().unwrap(); - - // deposit_receipt_version is set to 1 for post canyon deposit transactions - assert_eq!(deposit_receipt.deposit_receipt_version, Some(1)); - assert!(tx_receipt.deposit_receipt_version.is_none()); - - // deposit_nonce is present only in deposit transactions - assert!(deposit_receipt.deposit_nonce.is_some()); - assert!(tx_receipt.deposit_nonce.is_none()); - } -} diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 175b2d4bf..22fc1a88f 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -4,7 +4,7 @@ use std::sync::Arc; use reth_basic_payload_builder::{BasicPayloadJobGenerator, BasicPayloadJobGeneratorConfig}; use reth_chainspec::{EthChainSpec, Hardforks}; -use reth_evm::ConfigureEvm; +use reth_evm::{execute::BasicBlockExecutorProvider, ConfigureEvm}; use reth_network::{NetworkConfig, NetworkHandle, NetworkManager}; use reth_node_api::{ AddOnsContext, EngineValidator, FullNodeComponents, NodeAddOns, NodePrimitives, @@ -20,7 +20,7 @@ use reth_node_builder::{ }; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::OptimismBeaconConsensus; -use reth_optimism_evm::{OpExecutorProvider, OptimismEvmConfig}; +use reth_optimism_evm::{OpExecutionStrategyFactory, OptimismEvmConfig}; use reth_optimism_rpc::OpEthApi; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; use reth_primitives::{Block, Header}; @@ -184,14 +184,16 @@ where Node: FullNodeTypes>, { type EVM = OptimismEvmConfig; - type Executor = OpExecutorProvider; + type Executor = BasicBlockExecutorProvider; async fn build_evm( self, ctx: &BuilderContext, ) -> eyre::Result<(Self::EVM, Self::Executor)> { let evm_config = OptimismEvmConfig::new(ctx.chain_spec()); - let executor = OpExecutorProvider::new(ctx.chain_spec(), evm_config.clone()); + let strategy_factory = + OpExecutionStrategyFactory::new(ctx.chain_spec(), evm_config.clone()); + let executor = BasicBlockExecutorProvider::new(strategy_factory); Ok((evm_config, executor)) } From de07436f0e5d534873cd4670f75df8918abf2bec Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 20 Oct 2024 12:43:31 +0000 Subject: [PATCH 157/425] chore(deps): weekly `cargo update` (#11902) Co-authored-by: github-merge-queue <118344674+github-merge-queue@users.noreply.github.com> --- Cargo.lock | 213 ++++++++++++++++++++++++++--------------------------- 1 file changed, 105 insertions(+), 108 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4def3dcab..19287e666 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -97,9 +97,9 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "alloy-chains" -version = "0.1.38" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "156bfc5dcd52ef9a5f33381701fa03310317e14c65093a9430d3e3557b08dcd3" +checksum = "d4932d790c723181807738cf1ac68198ab581cd699545b155601332541ee47bd" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -394,7 +394,7 @@ checksum = "4d0f2d905ebd295e7effec65e5f6868d153936130ae718352771de3e7d03c75c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -618,7 +618,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -634,7 +634,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", "syn-solidity", "tiny-keccak", ] @@ -650,7 +650,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", "syn-solidity", ] @@ -841,9 +841,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.89" +version = "1.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" +checksum = "37bf3594c4c988a53154954629820791dde498571819ae4ca50ca811e060cc95" [[package]] name = "aquamarine" @@ -856,7 +856,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -1032,9 +1032,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.15" +version = "0.4.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e26a9844c659a2a293d239c7910b752f8487fe122c6c8bd1659bf85a6507c302" +checksum = "103db485efc3e41214fe4fda9f3dbeae2eb9082f48fd236e6095627a9422066e" dependencies = [ "brotli", "flate2", @@ -1079,7 +1079,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -1090,7 +1090,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -1128,7 +1128,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -1234,7 +1234,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -1416,7 +1416,7 @@ checksum = "240f4126219a83519bad05c9a40bfc0303921eeb571fc2d7e44c17ffac99d3f1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", "synstructure", ] @@ -1538,7 +1538,7 @@ checksum = "bcfcc3cd946cb52f0bbfdbbcfa2f4e24f75ebb6c0e1002f7c25904fada18b9ec" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -1626,9 +1626,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.30" +version = "1.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16803a61b81d9eabb7eae2588776c4c1e584b738ede45fdbb4c972cec1e9945" +checksum = "c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f" dependencies = [ "jobserver", "libc", @@ -1750,7 +1750,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -2038,9 +2038,9 @@ dependencies = [ [[package]] name = "critical-section" -version = "1.1.3" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f64009896348fc5af4222e9cf7d7d82a95a256c634ebcf61c53e4ea461422242" +checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" [[package]] name = "crossbeam-channel" @@ -2207,7 +2207,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -2231,7 +2231,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -2242,7 +2242,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -2363,7 +2363,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -2374,7 +2374,7 @@ checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -2395,7 +2395,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", "unicode-xid", ] @@ -2509,7 +2509,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -2657,7 +2657,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -2668,7 +2668,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -2725,7 +2725,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -3263,7 +3263,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -3707,9 +3707,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.4.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" dependencies = [ "bytes", "futures-channel", @@ -3789,7 +3789,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -3939,7 +3939,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -4107,7 +4107,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b23a0c8dfe501baac4adf6ebbfa6eddf8f0c07f56b058cc1288017e32397846c" dependencies = [ "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -4252,9 +4252,9 @@ dependencies = [ [[package]] name = "jsonrpsee" -version = "0.24.6" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02f01f48e04e0d7da72280ab787c9943695699c9b32b99158ece105e8ad0afea" +checksum = "c5c71d8c1a731cc4227c2f698d377e7848ca12c8a48866fc5e6951c43a4db843" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -4270,9 +4270,9 @@ dependencies = [ [[package]] name = "jsonrpsee-client-transport" -version = "0.24.6" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d80eccbd47a7b9f1e67663fd846928e941cb49c65236e297dd11c9ea3c5e3387" +checksum = "548125b159ba1314104f5bb5f38519e03a41862786aa3925cf349aae9cdd546e" dependencies = [ "base64 0.22.1", "futures-channel", @@ -4295,9 +4295,9 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.24.6" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c2709a32915d816a6e8f625bf72cf74523ebe5d8829f895d6b041b1d3137818" +checksum = "f2882f6f8acb9fdaec7cefc4fd607119a9bd709831df7d7672a1d3b644628280" dependencies = [ "async-trait", "bytes", @@ -4322,9 +4322,9 @@ dependencies = [ [[package]] name = "jsonrpsee-http-client" -version = "0.24.6" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc54db939002b030e794fbfc9d5a925aa2854889c5a2f0352b0bffa54681707e" +checksum = "b3638bc4617f96675973253b3a45006933bde93c2fd8a6170b33c777cc389e5b" dependencies = [ "async-trait", "base64 0.22.1", @@ -4347,22 +4347,22 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.24.6" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a9a4b2eaba8cc928f49c4ccf4fcfa65b690a73997682da99ed08f3393b51f07" +checksum = "c06c01ae0007548e73412c08e2285ffe5d723195bf268bce67b1b77c3bb2a14d" dependencies = [ "heck", "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] name = "jsonrpsee-server" -version = "0.24.6" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e30110d0f2d7866c8cc6c86483bdab2eb9f4d2f0e20db55518b2bca84651ba8e" +checksum = "82ad8ddc14be1d4290cd68046e7d1d37acd408efed6d3ca08aefcc3ad6da069c" dependencies = [ "futures-util", "http", @@ -4387,9 +4387,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.24.6" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ca331cd7b3fe95b33432825c2d4c9f5a43963e207fdc01ae67f9fd80ab0930f" +checksum = "a178c60086f24cc35bb82f57c651d0d25d99c4742b4d335de04e97fa1f08a8a1" dependencies = [ "http", "serde", @@ -4399,9 +4399,9 @@ dependencies = [ [[package]] name = "jsonrpsee-wasm-client" -version = "0.24.6" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c603d97578071dc44d79d3cfaf0775437638fd5adc33c6b622dfe4fa2ec812d" +checksum = "1a01cd500915d24ab28ca17527e23901ef1be6d659a2322451e1045532516c25" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -4410,9 +4410,9 @@ dependencies = [ [[package]] name = "jsonrpsee-ws-client" -version = "0.24.6" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "755ca3da1c67671f1fae01cd1a47f41dfb2233a8f19a643e587ab0a663942044" +checksum = "0fe322e0896d0955a3ebdd5bf813571c53fea29edd713bc315b76620b327e86d" dependencies = [ "http", "jsonrpsee-client-transport", @@ -4515,9 +4515,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.159" +version = "0.2.161" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" +checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1" [[package]] name = "libloading" @@ -4526,7 +4526,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -4773,7 +4773,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -4920,7 +4920,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -5168,7 +5168,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -5547,7 +5547,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -5576,7 +5576,7 @@ checksum = "a4502d8515ca9f32f1fb543d987f63d95a14934883db45bdb48060b6b69257f8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -5743,12 +5743,12 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.22" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba" +checksum = "904afd36257cdb6ce0bee88b7981847bd7b955e5e216bb32f466b302923ad446" dependencies = [ "proc-macro2", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -5823,14 +5823,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] name = "proc-macro2" -version = "1.0.87" +version = "1.0.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3e4daa0dcf6feba26f985457cdf104d4b4256fc5a09547140f3631bb076b19a" +checksum = "7c3a7fc5db1e57d5a779a352c8cdb57b29aa4c40cc69c3a68a7fedc815fbf2f9" dependencies = [ "unicode-ident", ] @@ -5921,7 +5921,7 @@ checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -6744,7 +6744,7 @@ dependencies = [ "proc-macro2", "quote", "similar-asserts", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -9589,9 +9589,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.14" +version = "0.23.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "415d9944693cb90382053259f89fbb077ea730ad7273047ec63b19bc9b160ba8" +checksum = "5fbb44d7acc4e873d613422379f69f237a1b141928c02f6bc6ccfddddc2d7993" dependencies = [ "log", "once_cell", @@ -9881,14 +9881,14 @@ checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] name = "serde_json" -version = "1.0.128" +version = "1.0.132" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" +checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" dependencies = [ "indexmap 2.6.0", "itoa", @@ -9916,7 +9916,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -9967,7 +9967,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -9990,7 +9990,7 @@ checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -10286,7 +10286,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -10344,9 +10344,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.79" +version = "2.0.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" +checksum = "e6e185e337f816bc8da115b8afcb3324006ccc82eeaddf35113888d3bd8e44ac" dependencies = [ "proc-macro2", "quote", @@ -10362,7 +10362,7 @@ dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -10388,7 +10388,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -10465,7 +10465,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -10504,7 +10504,7 @@ checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -10681,7 +10681,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -10882,7 +10882,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -11106,12 +11106,9 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicase" -version = "2.7.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] +checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" [[package]] name = "unicode-bidi" @@ -11229,9 +11226,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" +checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" dependencies = [ "getrandom 0.2.15", ] @@ -11330,7 +11327,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", "wasm-bindgen-shared", ] @@ -11364,7 +11361,7 @@ checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11520,7 +11517,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -11531,7 +11528,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -11542,7 +11539,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -11553,7 +11550,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -11828,7 +11825,7 @@ checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", "synstructure", ] @@ -11850,7 +11847,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -11870,7 +11867,7 @@ checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", "synstructure", ] @@ -11891,7 +11888,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] @@ -11913,7 +11910,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.80", ] [[package]] From e9c09723edf7b75a8dcdc5f3d458307120713356 Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Sun, 20 Oct 2024 22:39:42 +0800 Subject: [PATCH 158/425] docs(blockchain-tree): rm comment (#11903) --- crates/blockchain-tree/src/blockchain_tree.rs | 3 --- crates/blockchain-tree/src/externals.rs | 1 - 2 files changed, 4 deletions(-) diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index 1e2ed2a4a..95c0361f3 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -113,9 +113,6 @@ where /// is crucial for the correct execution of transactions. /// - `tree_config`: Configuration for the blockchain tree, including any parameters that affect /// its structure or performance. - /// - `prune_modes`: Configuration for pruning old blockchain data. This helps in managing the - /// storage space efficiently. It's important to validate this configuration to ensure it does - /// not lead to unintended data loss. pub fn new( externals: TreeExternals, config: BlockchainTreeConfig, diff --git a/crates/blockchain-tree/src/externals.rs b/crates/blockchain-tree/src/externals.rs index 719852c12..4e22fcb78 100644 --- a/crates/blockchain-tree/src/externals.rs +++ b/crates/blockchain-tree/src/externals.rs @@ -21,7 +21,6 @@ use std::{collections::BTreeMap, sync::Arc}; /// - A handle to the database /// - A handle to the consensus engine /// - The executor factory to execute blocks with -/// - The chain spec #[derive(Debug)] pub struct TreeExternals { /// The provider factory, used to commit the canonical chain, or unwind it. From 5fca07ca8752163aecfe1334fa8474565db20b66 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Sun, 20 Oct 2024 21:23:31 +0200 Subject: [PATCH 159/425] fix: impl BlockExecutionStrategy for OpExecutionStrategy generic over EvmConfig (#11910) --- crates/optimism/evm/src/execute.rs | 36 ++++++++++++++++++++---------- 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index 77f670668..10ac5c525 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -14,7 +14,7 @@ use reth_evm::{ }, state_change::post_block_balance_increments, system_calls::{OnStateHook, SystemCaller}, - ConfigureEvm, ConfigureEvmEnv, + ConfigureEvm, }; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::validate_block_post_execution; @@ -52,8 +52,13 @@ impl OpExecutionStrategyFactory { } } -impl BlockExecutionStrategyFactory for OpExecutionStrategyFactory { - type Strategy + Display>> = OpExecutionStrategy; +impl BlockExecutionStrategyFactory for OpExecutionStrategyFactory +where + EvmConfig: + Clone + Unpin + Sync + Send + 'static + ConfigureEvm
, +{ + type Strategy + Display>> = + OpExecutionStrategy; fn create_strategy(&self, db: DB) -> Self::Strategy where @@ -67,7 +72,10 @@ impl BlockExecutionStrategyFactory for OpExecutionStrategyFactory { /// Block execution strategy for Optimism. #[allow(missing_debug_implementations)] -pub struct OpExecutionStrategy { +pub struct OpExecutionStrategy +where + EvmConfig: Clone, +{ /// The chainspec chain_spec: Arc, /// How to create an EVM. @@ -78,19 +86,22 @@ pub struct OpExecutionStrategy { system_caller: SystemCaller, } -impl OpExecutionStrategy { +impl OpExecutionStrategy +where + EvmConfig: Clone, +{ /// Creates a new [`OpExecutionStrategy`] - pub fn new( - state: State, - chain_spec: Arc, - evm_config: OptimismEvmConfig, - ) -> Self { + pub fn new(state: State, chain_spec: Arc, evm_config: EvmConfig) -> Self { let system_caller = SystemCaller::new(evm_config.clone(), (*chain_spec).clone()); Self { state, chain_spec, evm_config, system_caller } } } -impl OpExecutionStrategy { +impl OpExecutionStrategy +where + DB: Database + Display>, + EvmConfig: ConfigureEvm
, +{ /// Configures a new evm configuration and block environment for the given block. /// /// Caution: this does not initialize the tx environment. @@ -103,9 +114,10 @@ impl OpExecutionStrategy { } } -impl BlockExecutionStrategy for OpExecutionStrategy +impl BlockExecutionStrategy for OpExecutionStrategy where DB: Database + Display>, + EvmConfig: ConfigureEvm
, { type Error = BlockExecutionError; From a188597a3c6a9590ab54cfc9e02c18e7e0760be3 Mon Sep 17 00:00:00 2001 From: Oliver Date: Sun, 20 Oct 2024 22:55:14 +0200 Subject: [PATCH 160/425] ci: merge sync jobs (#11909) --- .github/workflows/eth-sync.yml | 53 ---------------------------- .github/workflows/op-sync.yml | 55 ----------------------------- .github/workflows/sync.yml | 63 ++++++++++++++++++++++++++++++++++ 3 files changed, 63 insertions(+), 108 deletions(-) delete mode 100644 .github/workflows/eth-sync.yml delete mode 100644 .github/workflows/op-sync.yml create mode 100644 .github/workflows/sync.yml diff --git a/.github/workflows/eth-sync.yml b/.github/workflows/eth-sync.yml deleted file mode 100644 index f473e29a5..000000000 --- a/.github/workflows/eth-sync.yml +++ /dev/null @@ -1,53 +0,0 @@ -# Runs an ethereum mainnet sync test. - -name: eth-sync-test - -on: - pull_request: - merge_group: - push: - branches: [ main ] - -env: - CARGO_TERM_COLOR: always - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -jobs: - sync: - name: sync / 100k blocks - # Only run sync tests in merge groups - if: github.event_name == 'merge_group' - runs-on: - group: Reth - env: - RUST_LOG: info,sync=error - RUST_BACKTRACE: 1 - timeout-minutes: 60 - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 - with: - cache-on-failure: true - - name: Build reth - run: | - cargo install --features asm-keccak,jemalloc --path bin/reth - - name: Run sync - run: | - reth node \ - --debug.tip 0x91c90676cab257a59cd956d7cb0bceb9b1a71d79755c23c7277a0697ccfaf8c4 \ - --debug.max-block 100000 \ - --debug.terminate - - name: Verify the target block hash - run: | - reth db get static-file headers 100000 \ - | grep 0x91c90676cab257a59cd956d7cb0bceb9b1a71d79755c23c7277a0697ccfaf8c4 - - name: Run stage unwind for 100 blocks - run: | - reth stage unwind num-blocks 100 - - name: Run stage unwind to block hash - run: | - reth stage unwind to-block 0x52e0509d33a988ef807058e2980099ee3070187f7333aae12b64d4d675f34c5a diff --git a/.github/workflows/op-sync.yml b/.github/workflows/op-sync.yml deleted file mode 100644 index 2a223391d..000000000 --- a/.github/workflows/op-sync.yml +++ /dev/null @@ -1,55 +0,0 @@ -# Runs a base mainnet sync test. - -name: op-sync-test - -on: - pull_request: - merge_group: - push: - branches: [ main ] - -env: - CARGO_TERM_COLOR: always - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -jobs: - sync: - name: op sync / 10k blocks - # Only run sync tests in merge groups - if: github.event_name == 'merge_group' - runs-on: - group: Reth - env: - RUST_LOG: info,sync=error - RUST_BACKTRACE: 1 - timeout-minutes: 60 - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 - with: - cache-on-failure: true - - name: Build op-reth - run: make install-op - - name: Run sync - # https://basescan.org/block/10000 - run: | - op-reth node \ - --chain base \ - --debug.tip 0xbb9b85352c7ebca6ba8efc63bd66cecd038c92ec8ebd02e153a3e0b197e672b7 \ - --debug.max-block 10000 \ - --debug.terminate - - name: Verify the target block hash - run: | - op-reth db --chain base get static-file headers 10000 \ - | grep 0xbb9b85352c7ebca6ba8efc63bd66cecd038c92ec8ebd02e153a3e0b197e672b7 - - name: Run stage unwind for 100 blocks - run: | - op-reth stage --chain base unwind num-blocks 100 - - name: Run stage unwind to block hash - run: | - op-reth stage --chain base unwind to-block 0x118a6e922a8c6cab221fc5adfe5056d2b72d58c6580e9c5629de55299e2cf8de - diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml new file mode 100644 index 000000000..531d04b2e --- /dev/null +++ b/.github/workflows/sync.yml @@ -0,0 +1,63 @@ +# Runs sync tests. + +name: sync test + +on: + merge_group: + +env: + CARGO_TERM_COLOR: always + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + sync: + name: sync (${{ matrix.chain.bin }}) + runs-on: + group: Reth + env: + RUST_LOG: info,sync=error + RUST_BACKTRACE: 1 + timeout-minutes: 60 + strategy: + matrix: + chain: + - build: install + bin: reth + chain: mainnet + tip: "0x91c90676cab257a59cd956d7cb0bceb9b1a71d79755c23c7277a0697ccfaf8c4" + block: 100000 + unwind-target: "0x52e0509d33a988ef807058e2980099ee3070187f7333aae12b64d4d675f34c5a" + - build: install-op + bin: op-reth + chain: base + tip: "0xbb9b85352c7ebca6ba8efc63bd66cecd038c92ec8ebd02e153a3e0b197e672b7" + block: 10000 + unwind-target: "0x118a6e922a8c6cab221fc5adfe5056d2b72d58c6580e9c5629de55299e2cf8de" + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + with: + cache-on-failure: true + - name: Build ${{ matrix.chain.bin }} + run: make ${{ matrix.chain.build }} + - name: Run sync + run: | + ${{ matrix.chain.bin }} node \ + --chain ${{ matrix.chain.chain }} \ + --debug.tip ${{ matrix.chain.tip }} \ + --debug.max-block ${{ matrix.chain.block }} \ + --debug.terminate + - name: Verify the target block hash + run: | + ${{ matrix.chain.bin }} db --chain ${{ matrix.chain.chain }} get static-file headers ${{ matrix.chain.block }} \ + | grep ${{ matrix.chain.tip }} + - name: Run stage unwind for 100 blocks + run: | + ${{ matrix.chain.bin }} stage --chain ${{ matrix.chain.chain }} unwind num-blocks 100 + - name: Run stage unwind to block hash + run: | + ${{ matrix.chain.bin }} stage --chain ${{ matrix.chain.chain }} unwind to-block ${{ matrix.chain.unwind-target }} From 0270128d4f7a9f6fad27dff69273095abdfa7452 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Sun, 20 Oct 2024 23:42:45 +0200 Subject: [PATCH 161/425] refactor(txpool): small refactor in `DiskFileBlobStoreInner` `get_exact` (#11911) --- crates/transaction-pool/src/blobstore/disk.rs | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/crates/transaction-pool/src/blobstore/disk.rs b/crates/transaction-pool/src/blobstore/disk.rs index 96119a0f8..e168a1c11 100644 --- a/crates/transaction-pool/src/blobstore/disk.rs +++ b/crates/transaction-pool/src/blobstore/disk.rs @@ -409,13 +409,9 @@ impl DiskFileBlobStoreInner { /// Returns an error if there are any missing blobs. #[inline] fn get_exact(&self, txs: Vec) -> Result, BlobStoreError> { - let mut res = Vec::with_capacity(txs.len()); - for tx in txs { - let blob = self.get_one(tx)?.ok_or_else(|| BlobStoreError::MissingSidecar(tx))?; - res.push(blob) - } - - Ok(res) + txs.into_iter() + .map(|tx| self.get_one(tx)?.ok_or(BlobStoreError::MissingSidecar(tx))) + .collect() } } From 2e8a8fe6f6147123ecb2892692ed09ef7e95d0ff Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 21 Oct 2024 01:10:36 +0200 Subject: [PATCH 162/425] doc(storage): add missing documentation for `nippy-jar` (#11913) --- .../storage/nippy-jar/src/compression/mod.rs | 2 + .../storage/nippy-jar/src/compression/zstd.rs | 4 ++ crates/storage/nippy-jar/src/consistency.rs | 5 +++ crates/storage/nippy-jar/src/cursor.rs | 7 +++- crates/storage/nippy-jar/src/error.rs | 39 +++++++++++++++++++ crates/storage/nippy-jar/src/lib.rs | 7 +++- crates/storage/nippy-jar/src/writer.rs | 12 ++++++ 7 files changed, 72 insertions(+), 4 deletions(-) diff --git a/crates/storage/nippy-jar/src/compression/mod.rs b/crates/storage/nippy-jar/src/compression/mod.rs index 28a92fe90..f9bf8110e 100644 --- a/crates/storage/nippy-jar/src/compression/mod.rs +++ b/crates/storage/nippy-jar/src/compression/mod.rs @@ -44,7 +44,9 @@ pub trait Compression: Serialize + for<'a> Deserialize<'a> { #[derive(Debug, Serialize, Deserialize)] #[cfg_attr(test, derive(PartialEq))] pub enum Compressors { + /// Zstandard compression algorithm with custom settings. Zstd(Zstd), + /// LZ4 compression algorithm with custom settings. Lz4(Lz4), } diff --git a/crates/storage/nippy-jar/src/compression/zstd.rs b/crates/storage/nippy-jar/src/compression/zstd.rs index 500247d17..7685941df 100644 --- a/crates/storage/nippy-jar/src/compression/zstd.rs +++ b/crates/storage/nippy-jar/src/compression/zstd.rs @@ -12,10 +12,13 @@ pub use zstd::{bulk::Decompressor, dict::DecoderDictionary}; type RawDictionary = Vec; +/// Represents the state of a Zstandard compression operation. #[derive(Debug, Default, PartialEq, Eq, Serialize, Deserialize)] pub enum ZstdState { + /// The compressor is pending a dictionary. #[default] PendingDictionary, + /// The compressor is ready to perform compression. Ready, } @@ -51,6 +54,7 @@ impl Zstd { } } + /// Sets the compression level for the Zstd compression instance. pub const fn with_level(mut self, level: i32) -> Self { self.level = level; self diff --git a/crates/storage/nippy-jar/src/consistency.rs b/crates/storage/nippy-jar/src/consistency.rs index 1093fb554..952980ef6 100644 --- a/crates/storage/nippy-jar/src/consistency.rs +++ b/crates/storage/nippy-jar/src/consistency.rs @@ -28,6 +28,11 @@ pub struct NippyJarChecker { } impl NippyJarChecker { + /// Creates a new instance of [`NippyJarChecker`] with the provided [`NippyJar`]. + /// + /// This method initializes the checker without any associated file handles for + /// the data or offsets files. The [`NippyJar`] passed in contains all necessary + /// configurations for handling data. pub const fn new(jar: NippyJar) -> Self { Self { jar, data_file: None, offsets_file: None } } diff --git a/crates/storage/nippy-jar/src/cursor.rs b/crates/storage/nippy-jar/src/cursor.rs index 267764827..376411ac2 100644 --- a/crates/storage/nippy-jar/src/cursor.rs +++ b/crates/storage/nippy-jar/src/cursor.rs @@ -25,9 +25,10 @@ impl std::fmt::Debug for NippyJarCursor<'_, H> { } impl<'a, H: NippyJarHeader> NippyJarCursor<'a, H> { + /// Creates a new instance of [`NippyJarCursor`] for the given [`NippyJar`]. pub fn new(jar: &'a NippyJar) -> Result { let max_row_size = jar.max_row_size; - Ok(NippyJarCursor { + Ok(Self { jar, reader: Arc::new(jar.open_data_reader()?), // Makes sure that we have enough buffer capacity to decompress any row of data. @@ -36,12 +37,14 @@ impl<'a, H: NippyJarHeader> NippyJarCursor<'a, H> { }) } + /// Creates a new instance of [`NippyJarCursor`] with the specified [`NippyJar`] and data + /// reader. pub fn with_reader( jar: &'a NippyJar, reader: Arc, ) -> Result { let max_row_size = jar.max_row_size; - Ok(NippyJarCursor { + Ok(Self { jar, reader, // Makes sure that we have enough buffer capacity to decompress any row of data. diff --git a/crates/storage/nippy-jar/src/error.rs b/crates/storage/nippy-jar/src/error.rs index fc096cf84..f69bb44a0 100644 --- a/crates/storage/nippy-jar/src/error.rs +++ b/crates/storage/nippy-jar/src/error.rs @@ -4,53 +4,92 @@ use thiserror::Error; /// Errors associated with [`crate::NippyJar`]. #[derive(Error, Debug)] pub enum NippyJarError { + /// An internal error occurred, wrapping any type of error. #[error(transparent)] Internal(#[from] Box), + + /// An error occurred while disconnecting, wrapping a standard I/O error. #[error(transparent)] Disconnect(#[from] std::io::Error), + + /// An error related to the file system occurred, wrapping a file system path error. #[error(transparent)] FileSystem(#[from] reth_fs_util::FsPathError), + + /// A custom error message provided by the user. #[error("{0}")] Custom(String), + + /// An error occurred during serialization/deserialization with Bincode. #[error(transparent)] Bincode(#[from] Box), + + /// An error occurred with the Elias-Fano encoding/decoding process. #[error(transparent)] EliasFano(#[from] anyhow::Error), + + /// Compression was enabled, but the compressor is not ready yet. #[error("compression was enabled, but it's not ready yet")] CompressorNotReady, + + /// Decompression was enabled, but the decompressor is not ready yet. #[error("decompression was enabled, but it's not ready yet")] DecompressorNotReady, + + /// The number of columns does not match the expected length. #[error("number of columns does not match: {0} != {1}")] ColumnLenMismatch(usize, usize), + + /// An unexpected missing value was encountered at a specific row and column. #[error("unexpected missing value: row:col {0}:{1}")] UnexpectedMissingValue(u64, u64), + + /// The size of an offset exceeds the maximum allowed size of 8 bytes. #[error("the size of an offset must be at most 8 bytes, got {offset_size}")] OffsetSizeTooBig { /// The read offset size in number of bytes. offset_size: u8, }, + + /// The size of an offset is less than the minimum allowed size of 1 byte. #[error("the size of an offset must be at least 1 byte, got {offset_size}")] OffsetSizeTooSmall { /// The read offset size in number of bytes. offset_size: u8, }, + + /// An attempt was made to read an offset that is out of bounds. #[error("attempted to read an out of bounds offset: {index}")] OffsetOutOfBounds { /// The index of the offset that was being read. index: usize, }, + + /// The output buffer is too small for the compression or decompression operation. #[error("compression or decompression requires a bigger destination output")] OutputTooSmall, + + /// A dictionary is not loaded when it is required for operations. #[error("dictionary is not loaded.")] DictionaryNotLoaded, + + /// It's not possible to generate a compressor after loading a dictionary. #[error("it's not possible to generate a compressor after loading a dictionary.")] CompressorNotAllowed, + + /// The number of offsets is smaller than the requested prune size. #[error("number of offsets ({0}) is smaller than prune request ({1}).")] InvalidPruning(u64, u64), + + /// The jar has been frozen and cannot be modified. #[error("jar has been frozen and cannot be modified.")] FrozenJar, + + /// The file is in an inconsistent state. #[error("File is in an inconsistent state.")] InconsistentState, + + /// A specified file is missing. #[error("Missing file: {0}.")] MissingFile(PathBuf), } diff --git a/crates/storage/nippy-jar/src/lib.rs b/crates/storage/nippy-jar/src/lib.rs index bdc950aa3..b1d174feb 100644 --- a/crates/storage/nippy-jar/src/lib.rs +++ b/crates/storage/nippy-jar/src/lib.rs @@ -10,7 +10,6 @@ issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![allow(missing_docs)] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] use memmap2::Mmap; @@ -28,6 +27,7 @@ use std::os::windows::prelude::OpenOptionsExt; use tracing::*; +/// Compression algorithms supported by `NippyJar`. pub mod compression; #[cfg(test)] use compression::Compression; @@ -55,10 +55,13 @@ pub use writer::NippyJarWriter; mod consistency; pub use consistency::NippyJarChecker; +/// The version number of the Nippy Jar format. const NIPPY_JAR_VERSION: usize = 1; - +/// The file extension used for index files. const INDEX_FILE_EXTENSION: &str = "idx"; +/// The file extension used for offsets files. const OFFSETS_FILE_EXTENSION: &str = "off"; +/// The file extension used for configuration files. pub const CONFIG_FILE_EXTENSION: &str = "conf"; /// A [`RefRow`] is a list of column value slices pointing to either an internal buffer or a diff --git a/crates/storage/nippy-jar/src/writer.rs b/crates/storage/nippy-jar/src/writer.rs index 9bf9bf526..3a1003bee 100644 --- a/crates/storage/nippy-jar/src/writer.rs +++ b/crates/storage/nippy-jar/src/writer.rs @@ -354,6 +354,10 @@ impl NippyJarWriter { Ok(()) } + /// Commits changes to the data file and offsets without synchronizing all data to disk. + /// + /// This function flushes the buffered data to the data file and commits the offsets, + /// but it does not guarantee that all data is synchronized to persistent storage. #[cfg(feature = "test-utils")] pub fn commit_without_sync_all(&mut self) -> Result<(), NippyJarError> { self.data_file.flush()?; @@ -412,41 +416,49 @@ impl NippyJarWriter { Ok(()) } + /// Returns the maximum row size for the associated [`NippyJar`]. #[cfg(test)] pub const fn max_row_size(&self) -> usize { self.jar.max_row_size } + /// Returns the column index of the current checker instance. #[cfg(test)] pub const fn column(&self) -> usize { self.column } + /// Returns a reference to the offsets vector. #[cfg(test)] pub fn offsets(&self) -> &[u64] { &self.offsets } + /// Returns a mutable reference to the offsets vector. #[cfg(test)] pub fn offsets_mut(&mut self) -> &mut Vec { &mut self.offsets } + /// Returns the path to the offsets file for the associated [`NippyJar`]. #[cfg(test)] pub fn offsets_path(&self) -> std::path::PathBuf { self.jar.offsets_path() } + /// Returns the path to the data file for the associated [`NippyJar`]. #[cfg(test)] pub fn data_path(&self) -> &Path { self.jar.data_path() } + /// Returns a mutable reference to the buffered writer for the data file. #[cfg(any(test, feature = "test-utils"))] pub fn data_file(&mut self) -> &mut BufWriter { &mut self.data_file } + /// Returns a reference to the associated [`NippyJar`] instance. #[cfg(any(test, feature = "test-utils"))] pub const fn jar(&self) -> &NippyJar { &self.jar From ddc82e2645b584719996f3824596e4721174aeba Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 21 Oct 2024 08:31:29 +0200 Subject: [PATCH 163/425] test(static-file): add unit tests for `HighestStaticFiles` implementation (#11912) --- crates/static-file/types/src/lib.rs | 78 +++++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) diff --git a/crates/static-file/types/src/lib.rs b/crates/static-file/types/src/lib.rs index 380931138..6e954a781 100644 --- a/crates/static-file/types/src/lib.rs +++ b/crates/static-file/types/src/lib.rs @@ -71,3 +71,81 @@ pub const fn find_fixed_range( let start = (block / blocks_per_static_file) * blocks_per_static_file; SegmentRangeInclusive::new(start, start + blocks_per_static_file - 1) } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_highest_static_files_highest() { + let files = + HighestStaticFiles { headers: Some(100), receipts: Some(200), transactions: None }; + + // Test for headers segment + assert_eq!(files.highest(StaticFileSegment::Headers), Some(100)); + + // Test for receipts segment + assert_eq!(files.highest(StaticFileSegment::Receipts), Some(200)); + + // Test for transactions segment + assert_eq!(files.highest(StaticFileSegment::Transactions), None); + } + + #[test] + fn test_highest_static_files_as_mut() { + let mut files = HighestStaticFiles::default(); + + // Modify headers value + *files.as_mut(StaticFileSegment::Headers) = Some(150); + assert_eq!(files.headers, Some(150)); + + // Modify receipts value + *files.as_mut(StaticFileSegment::Receipts) = Some(250); + assert_eq!(files.receipts, Some(250)); + + // Modify transactions value + *files.as_mut(StaticFileSegment::Transactions) = Some(350); + assert_eq!(files.transactions, Some(350)); + } + + #[test] + fn test_highest_static_files_min() { + let files = + HighestStaticFiles { headers: Some(300), receipts: Some(100), transactions: None }; + + // Minimum value among the available segments + assert_eq!(files.min(), Some(100)); + + let empty_files = HighestStaticFiles::default(); + // No values, should return None + assert_eq!(empty_files.min(), None); + } + + #[test] + fn test_highest_static_files_max() { + let files = + HighestStaticFiles { headers: Some(300), receipts: Some(100), transactions: Some(500) }; + + // Maximum value among the available segments + assert_eq!(files.max(), Some(500)); + + let empty_files = HighestStaticFiles::default(); + // No values, should return None + assert_eq!(empty_files.max(), None); + } + + #[test] + fn test_find_fixed_range() { + // Test with default block size + let block: BlockNumber = 600_000; + let range = find_fixed_range(block, DEFAULT_BLOCKS_PER_STATIC_FILE); + assert_eq!(range.start(), 500_000); + assert_eq!(range.end(), 999_999); + + // Test with a custom block size + let block: BlockNumber = 1_200_000; + let range = find_fixed_range(block, 1_000_000); + assert_eq!(range.start(), 1_000_000); + assert_eq!(range.end(), 1_999_999); + } +} From fbb27ebdad01780a2068399a01e7337bc0bb514f Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Mon, 21 Oct 2024 10:24:36 +0200 Subject: [PATCH 164/425] chore(ci): update excluded crates in wasm checker (#11915) --- .github/assets/check_wasm.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/assets/check_wasm.sh b/.github/assets/check_wasm.sh index 52d700941..3b2f2d6b7 100755 --- a/.github/assets/check_wasm.sh +++ b/.github/assets/check_wasm.sh @@ -36,7 +36,6 @@ exclude_crates=( reth-ethereum-engine-primitives reth-ethereum-payload-builder reth-etl - reth-evm-ethereum reth-exex reth-exex-test-utils reth-ipc From bdad91b7002913a8bb6ec08982752685b2c5b604 Mon Sep 17 00:00:00 2001 From: greged93 <82421016+greged93@users.noreply.github.com> Date: Mon, 21 Oct 2024 11:42:46 +0200 Subject: [PATCH 165/425] docs: update grafana docs (#11908) --- etc/README.md | 41 +++++++++++++++-------------------------- 1 file changed, 15 insertions(+), 26 deletions(-) diff --git a/etc/README.md b/etc/README.md index 28c71b046..4f4ce7f20 100644 --- a/etc/README.md +++ b/etc/README.md @@ -19,55 +19,41 @@ the [docker docs](/book/installation/docker.md#using-docker-compose). #### Adding a new metric to Grafana -To set up a new metric in Reth and its Grafana dashboard: +To set up a new metric in Reth and its Grafana dashboard (this assumes running Reth and Grafana instances): 1. Add the metric to the codebase following the [metrics section](../docs/design/metrics.md#creating-metrics) documentation. -2. Build the Reth image: - - ```bash - docker build . -t reth:local - ``` - - Modify the [docker-compose](./docker-compose.yml) file to use your locally built image for the Reth service. - -3. Run Docker Compose: - - ```bash - docker compose -f etc/docker-compose.yml -f etc/lighthouse.yml up -d - ``` - -4. Access Grafana: +1. Access Grafana: - Open `http://localhost:3000/` in a browser - Log in with username and password `admin` - Navigate to the `Dashboards` tab -5. Create or modify a dashboard: +1. Create or modify a dashboard: - Select an existing dashboard or create a new one - Click `Add` > `Visualization` to create a new panel -6. Configure your metric panel: +1. Configure your metric panel: - Set a panel title and description - Select metric(s) from the `Metrics browser` or use the `PromQL` terminal - Document your metric(s) by setting units, legends, etc. - When adding multiple metrics, use field overwrites if needed -7. Save and arrange: +1. Save and arrange: - Click `Apply` to save the panel - Drag the panel to desired position on the dashboard -8. Export the dashboard: +1. Export the dashboard: - Click `Share` > `Export` - Toggle `Export for sharing externally` - Click `Save to file` -9. Update dashboard file: +1. Update dashboard file: - Replace the content of the corresponding file in the [dashboards folder](./grafana/dashboards) with the exported JSON @@ -75,15 +61,18 @@ Your new metric is now integrated into the Reth Grafana dashboard. #### Import Grafana dashboards -In order to import new Grafana dashboards or update a dashboard: +If you are running Reth and Grafana outside of docker, and wish to import new Grafana dashboards or update a dashboard: 1. Go to `Home` > `Dashboards` -2. Click `New` > `Import` +1. Click `New` > `Import` -3. Drag the JSON dashboard file to import it +1. Drag the JSON dashboard file to import it -4. If updating an existing dashboard, you will need to change the name and UID of the imported dashboard in order to +1. If updating an existing dashboard, you will need to change the name and UID of the imported dashboard in order to avoid conflict -5. Delete the old dashboard +1. Delete the old dashboard + +If you are running Reth and Grafana using docker, after having pulled the updated dashboards from `main`, restart the +Grafana service. This will update all dashboards. \ No newline at end of file From aba4991d0acb514190399c607fbcf56657789718 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 21 Oct 2024 12:31:29 +0200 Subject: [PATCH 166/425] docs: note about type changes (#11925) --- crates/storage/codecs/src/lib.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crates/storage/codecs/src/lib.rs b/crates/storage/codecs/src/lib.rs index 54ca046cb..58a7db8c1 100644 --- a/crates/storage/codecs/src/lib.rs +++ b/crates/storage/codecs/src/lib.rs @@ -48,6 +48,12 @@ pub mod test_utils; /// Regarding the `specialized_to/from_compact` methods: Mainly used as a workaround for not being /// able to specialize an impl over certain types like `Vec`/`Option` where `T` is a fixed /// size array like `Vec`. +/// +/// ## Caution +/// +/// Due to the bitfields, every type change on the rust type (e.g. `U256` to `u64`) is a breaking +/// change and will lead to a new, incompatible [`Compact`] implementation. Implementers must take +/// special care when changing or rearranging fields. pub trait Compact: Sized { /// Takes a buffer which can be written to. *Ideally*, it returns the length written to. fn to_compact(&self, buf: &mut B) -> usize From f25cceb9f93fcceaae59a030f81f088e29a5f0c1 Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Mon, 21 Oct 2024 19:59:09 +0800 Subject: [PATCH 167/425] perf: use Vec::with_capacity and reserve_exact (#11904) --- crates/evm/execution-types/src/chain.rs | 4 ++-- crates/net/discv4/src/lib.rs | 4 ++-- crates/rpc/rpc-eth-types/src/fee_history.rs | 2 +- crates/rpc/rpc-eth-types/src/simulate.rs | 2 +- crates/rpc/rpc/src/eth/core.rs | 9 +++++---- crates/rpc/rpc/src/eth/helpers/signer.rs | 2 +- crates/stages/api/src/pipeline/builder.rs | 4 +++- crates/stages/stages/src/stages/bodies.rs | 3 ++- crates/storage/codecs/src/lib.rs | 2 +- crates/storage/nippy-jar/src/compression/zstd.rs | 2 +- .../provider/src/providers/static_file/manager.rs | 2 +- crates/transaction-pool/benches/truncate.rs | 2 +- examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs | 1 + 13 files changed, 22 insertions(+), 17 deletions(-) diff --git a/crates/evm/execution-types/src/chain.rs b/crates/evm/execution-types/src/chain.rs index d3ed2913e..65f96ff56 100644 --- a/crates/evm/execution-types/src/chain.rs +++ b/crates/evm/execution-types/src/chain.rs @@ -228,11 +228,11 @@ impl Chain { /// /// Attachment includes block number, block hash, transaction hash and transaction index. pub fn receipts_with_attachment(&self) -> Vec { - let mut receipt_attach = Vec::new(); + let mut receipt_attach = Vec::with_capacity(self.blocks().len()); for ((block_num, block), receipts) in self.blocks().iter().zip(self.execution_outcome.receipts().iter()) { - let mut tx_receipts = Vec::new(); + let mut tx_receipts = Vec::with_capacity(receipts.len()); for (tx, receipt) in block.body.transactions().zip(receipts.iter()) { tx_receipts.push(( tx.hash(), diff --git a/crates/net/discv4/src/lib.rs b/crates/net/discv4/src/lib.rs index 7c14eac9b..7963c6e6f 100644 --- a/crates/net/discv4/src/lib.rs +++ b/crates/net/discv4/src/lib.rs @@ -2324,9 +2324,9 @@ mod tests { let original = EnrForkIdEntry { fork_id: ForkId { hash: ForkHash([0xdc, 0xe9, 0x6c, 0x2d]), next: 0 }, }; - let mut encoded = Vec::new(); - original.encode(&mut encoded); let expected: [u8; 8] = [0xc7, 0xc6, 0x84, 0xdc, 0xe9, 0x6c, 0x2d, 0x80]; + let mut encoded = Vec::with_capacity(expected.len()); + original.encode(&mut encoded); assert_eq!(&expected[..], encoded.as_slice()); } diff --git a/crates/rpc/rpc-eth-types/src/fee_history.rs b/crates/rpc/rpc-eth-types/src/fee_history.rs index 57dd276e5..c845d9683 100644 --- a/crates/rpc/rpc-eth-types/src/fee_history.rs +++ b/crates/rpc/rpc-eth-types/src/fee_history.rs @@ -305,7 +305,7 @@ pub fn calculate_reward_percentiles_for_block( // the percentiles are monotonically increasing. let mut tx_index = 0; let mut cumulative_gas_used = transactions.first().map(|tx| tx.gas_used).unwrap_or_default(); - let mut rewards_in_block = Vec::new(); + let mut rewards_in_block = Vec::with_capacity(percentiles.len()); for percentile in percentiles { // Empty blocks should return in a zero row if transactions.is_empty() { diff --git a/crates/rpc/rpc-eth-types/src/simulate.rs b/crates/rpc/rpc-eth-types/src/simulate.rs index a673da967..77db511e6 100644 --- a/crates/rpc/rpc-eth-types/src/simulate.rs +++ b/crates/rpc/rpc-eth-types/src/simulate.rs @@ -183,7 +183,7 @@ pub fn build_block( ) -> Result>, EthApiError> { let mut calls: Vec = Vec::with_capacity(results.len()); let mut senders = Vec::with_capacity(results.len()); - let mut receipts = Vec::new(); + let mut receipts = Vec::with_capacity(results.len()); let mut log_index = 0; for (transaction_index, ((sender, result), tx)) in diff --git a/crates/rpc/rpc/src/eth/core.rs b/crates/rpc/rpc/src/eth/core.rs index 6da468040..5c7fbbd00 100644 --- a/crates/rpc/rpc/src/eth/core.rs +++ b/crates/rpc/rpc/src/eth/core.rs @@ -421,8 +421,8 @@ mod tests { let mut rng = generators::rng(); // Build mock data - let mut gas_used_ratios = Vec::new(); - let mut base_fees_per_gas = Vec::new(); + let mut gas_used_ratios = Vec::with_capacity(block_count as usize); + let mut base_fees_per_gas = Vec::with_capacity(block_count as usize); let mut last_header = None; let mut parent_hash = B256::default(); @@ -444,8 +444,9 @@ mod tests { last_header = Some(header.clone()); parent_hash = hash; - let mut transactions = vec![]; - for _ in 0..100 { + const TOTAL_TRANSACTIONS: usize = 100; + let mut transactions = Vec::with_capacity(TOTAL_TRANSACTIONS); + for _ in 0..TOTAL_TRANSACTIONS { let random_fee: u128 = rng.gen(); if let Some(base_fee_per_gas) = header.base_fee_per_gas { diff --git a/crates/rpc/rpc/src/eth/helpers/signer.rs b/crates/rpc/rpc/src/eth/helpers/signer.rs index a5818aa49..e59be0ac2 100644 --- a/crates/rpc/rpc/src/eth/helpers/signer.rs +++ b/crates/rpc/rpc/src/eth/helpers/signer.rs @@ -40,7 +40,7 @@ impl DevSigner { /// Generates provided number of random dev signers /// which satisfy [`EthSigner`] trait pub fn random_signers(num: u32) -> Vec> { - let mut signers = Vec::new(); + let mut signers = Vec::with_capacity(num as usize); for _ in 0..num { let sk = PrivateKeySigner::random_with(&mut rand::thread_rng()); diff --git a/crates/stages/api/src/pipeline/builder.rs b/crates/stages/api/src/pipeline/builder.rs index 68ca887fe..79a4c477e 100644 --- a/crates/stages/api/src/pipeline/builder.rs +++ b/crates/stages/api/src/pipeline/builder.rs @@ -34,7 +34,9 @@ impl PipelineBuilder { /// [`builder`][StageSet::builder] on the set which will convert it to a /// [`StageSetBuilder`][crate::StageSetBuilder]. pub fn add_stages>(mut self, set: Set) -> Self { - for stage in set.builder().build() { + let states = set.builder().build(); + self.stages.reserve_exact(states.len()); + for stage in states { self.stages.push(stage); } self diff --git a/crates/stages/stages/src/stages/bodies.rs b/crates/stages/stages/src/stages/bodies.rs index 93d8a1229..06a525091 100644 --- a/crates/stages/stages/src/stages/bodies.rs +++ b/crates/stages/stages/src/stages/bodies.rs @@ -917,7 +917,8 @@ mod tests { return Poll::Ready(None) } - let mut response = Vec::default(); + let mut response = + Vec::with_capacity(std::cmp::min(this.headers.len(), this.batch_size as usize)); while let Some(header) = this.headers.pop_front() { if header.is_empty() { response.push(BlockResponse::Empty(header)) diff --git a/crates/storage/codecs/src/lib.rs b/crates/storage/codecs/src/lib.rs index 58a7db8c1..c432400a5 100644 --- a/crates/storage/codecs/src/lib.rs +++ b/crates/storage/codecs/src/lib.rs @@ -505,7 +505,7 @@ mod tests { #[test] fn compact_address() { - let mut buf = vec![]; + let mut buf = Vec::with_capacity(21); assert_eq!(Address::ZERO.to_compact(&mut buf), 20); assert_eq!(buf, vec![0; 20]); diff --git a/crates/storage/nippy-jar/src/compression/zstd.rs b/crates/storage/nippy-jar/src/compression/zstd.rs index 7685941df..896a65bd7 100644 --- a/crates/storage/nippy-jar/src/compression/zstd.rs +++ b/crates/storage/nippy-jar/src/compression/zstd.rs @@ -213,7 +213,7 @@ impl Compression for Zstd { return Err(NippyJarError::ColumnLenMismatch(self.columns, columns.len())) } - let mut dictionaries = vec![]; + let mut dictionaries = Vec::with_capacity(columns.len()); for column in columns { // ZSTD requires all training data to be continuous in memory, alongside the size of // each entry diff --git a/crates/storage/provider/src/providers/static_file/manager.rs b/crates/storage/provider/src/providers/static_file/manager.rs index 20d6a1b18..e81dc01f7 100644 --- a/crates/storage/provider/src/providers/static_file/manager.rs +++ b/crates/storage/provider/src/providers/static_file/manager.rs @@ -1364,13 +1364,13 @@ impl TransactionsProviderExt for StaticFileProvider { // chunks are too big, there will be idle threads waiting for work. Choosing an // arbitrary smaller value to make sure it doesn't happen. let chunk_size = 100; - let mut channels = Vec::new(); // iterator over the chunks let chunks = tx_range .clone() .step_by(chunk_size) .map(|start| start..std::cmp::min(start + chunk_size as u64, tx_range.end)); + let mut channels = Vec::with_capacity(tx_range_size.div_ceil(chunk_size)); for chunk_range in chunks { let (channel_tx, channel_rx) = mpsc::channel(); diff --git a/crates/transaction-pool/benches/truncate.rs b/crates/transaction-pool/benches/truncate.rs index 22e457630..1ca6f9849 100644 --- a/crates/transaction-pool/benches/truncate.rs +++ b/crates/transaction-pool/benches/truncate.rs @@ -66,7 +66,7 @@ fn generate_many_transactions(senders: usize, max_depth: usize) -> Vec().new_tree(&mut runner).unwrap().current() % max_depth + 1; diff --git a/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs b/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs index cc761aa98..1c53e4f41 100644 --- a/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs +++ b/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs @@ -109,6 +109,7 @@ where match self.pool.get_all_blobs_exact(txs.iter().map(|(tx, _)| tx.hash()).collect()) { Ok(blobs) => { + actions_to_queue.reserve_exact(txs.len()); for ((tx, _), sidecar) in txs.iter().zip(blobs.iter()) { let transaction = BlobTransaction::try_from_signed(tx.clone(), sidecar.clone()) .expect("should not fail to convert blob tx if it is already eip4844"); From 7119bb1fe000dbd7cb58e0fcd5a66c549538e9a2 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Mon, 21 Oct 2024 14:34:23 +0200 Subject: [PATCH 168/425] chore: introduce ExecuteOutput (#11929) --- crates/ethereum/evm/src/execute.rs | 6 +++--- crates/evm/src/execute.rs | 32 +++++++++++++++++++++--------- crates/optimism/evm/src/execute.rs | 6 +++--- 3 files changed, 29 insertions(+), 15 deletions(-) diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index 185f351dd..e8b238c6a 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -14,7 +14,7 @@ use reth_ethereum_consensus::validate_block_post_execution; use reth_evm::{ execute::{ BasicBlockExecutorProvider, BlockExecutionError, BlockExecutionStrategy, - BlockExecutionStrategyFactory, BlockValidationError, ProviderError, + BlockExecutionStrategyFactory, BlockValidationError, ExecuteOutput, ProviderError, }, state_change::post_block_balance_increments, system_calls::{OnStateHook, SystemCaller}, @@ -152,7 +152,7 @@ where &mut self, block: &BlockWithSenders, total_difficulty: U256, - ) -> Result<(Vec, u64), Self::Error> { + ) -> Result { let env = self.evm_env_for_block(&block.header, total_difficulty); let mut evm = self.evm_config.evm_with_env(&mut self.state, env); @@ -203,7 +203,7 @@ where }, ); } - Ok((receipts, cumulative_gas_used)) + Ok(ExecuteOutput { receipts, gas_used: cumulative_gas_used }) } fn apply_post_execution_changes( diff --git a/crates/evm/src/execute.rs b/crates/evm/src/execute.rs index d7c8590ee..2b3ce85e9 100644 --- a/crates/evm/src/execute.rs +++ b/crates/evm/src/execute.rs @@ -166,6 +166,15 @@ pub trait BlockExecutorProvider: Send + Sync + Clone + Unpin + 'static { DB: Database + Display>; } +/// Helper type for the output of executing a block. +#[derive(Debug, Clone)] +pub struct ExecuteOutput { + /// Receipts obtained after executing a block. + pub receipts: Vec, + /// Cumulative gas used in the block execution. + pub gas_used: u64, +} + /// Defines the strategy for executing a single block. pub trait BlockExecutionStrategy { /// The error type returned by this strategy's methods. @@ -183,7 +192,7 @@ pub trait BlockExecutionStrategy { &mut self, block: &BlockWithSenders, total_difficulty: U256, - ) -> Result<(Vec, u64), Self::Error>; + ) -> Result; /// Applies any necessary changes after executing the block's transactions. fn apply_post_execution_changes( @@ -313,7 +322,8 @@ where let BlockExecutionInput { block, total_difficulty } = input; self.strategy.apply_pre_execution_changes(block, total_difficulty)?; - let (receipts, gas_used) = self.strategy.execute_transactions(block, total_difficulty)?; + let ExecuteOutput { receipts, gas_used } = + self.strategy.execute_transactions(block, total_difficulty)?; let requests = self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; let state = self.strategy.finish(); @@ -332,7 +342,8 @@ where let BlockExecutionInput { block, total_difficulty } = input; self.strategy.apply_pre_execution_changes(block, total_difficulty)?; - let (receipts, gas_used) = self.strategy.execute_transactions(block, total_difficulty)?; + let ExecuteOutput { receipts, gas_used } = + self.strategy.execute_transactions(block, total_difficulty)?; let requests = self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; @@ -356,7 +367,8 @@ where self.strategy.with_state_hook(Some(Box::new(state_hook))); self.strategy.apply_pre_execution_changes(block, total_difficulty)?; - let (receipts, gas_used) = self.strategy.execute_transactions(block, total_difficulty)?; + let ExecuteOutput { receipts, gas_used } = + self.strategy.execute_transactions(block, total_difficulty)?; let requests = self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; @@ -407,7 +419,8 @@ where } self.strategy.apply_pre_execution_changes(block, total_difficulty)?; - let (receipts, _gas_used) = self.strategy.execute_transactions(block, total_difficulty)?; + let ExecuteOutput { receipts, .. } = + self.strategy.execute_transactions(block, total_difficulty)?; let requests = self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; @@ -545,14 +558,14 @@ mod tests { _chain_spec: Arc, _evm_config: EvmConfig, state: State, - execute_transactions_result: (Vec, u64), + execute_transactions_result: ExecuteOutput, apply_post_execution_changes_result: Requests, finish_result: BundleState, } #[derive(Clone)] struct TestExecutorStrategyFactory { - execute_transactions_result: (Vec, u64), + execute_transactions_result: ExecuteOutput, apply_post_execution_changes_result: Requests, finish_result: BundleState, } @@ -599,7 +612,7 @@ mod tests { &mut self, _block: &BlockWithSenders, _total_difficulty: U256, - ) -> Result<(Vec, u64), Self::Error> { + ) -> Result { Ok(self.execute_transactions_result.clone()) } @@ -651,7 +664,8 @@ mod tests { fn test_strategy() { let expected_gas_used = 10; let expected_receipts = vec![Receipt::default()]; - let expected_execute_transactions_result = (expected_receipts.clone(), expected_gas_used); + let expected_execute_transactions_result = + ExecuteOutput { receipts: expected_receipts.clone(), gas_used: expected_gas_used }; let expected_apply_post_execution_changes_result = Requests::new(vec![bytes!("deadbeef")]); let expected_finish_result = BundleState::default(); diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index 10ac5c525..20ef64457 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -10,7 +10,7 @@ use reth_consensus::ConsensusError; use reth_evm::{ execute::{ BasicBlockExecutorProvider, BlockExecutionError, BlockExecutionStrategy, - BlockExecutionStrategyFactory, BlockValidationError, ProviderError, + BlockExecutionStrategyFactory, BlockValidationError, ExecuteOutput, ProviderError, }, state_change::post_block_balance_increments, system_calls::{OnStateHook, SystemCaller}, @@ -155,7 +155,7 @@ where &mut self, block: &BlockWithSenders, total_difficulty: U256, - ) -> Result<(Vec, u64), Self::Error> { + ) -> Result { let env = self.evm_env_for_block(&block.header, total_difficulty); let mut evm = self.evm_config.evm_with_env(&mut self.state, env); @@ -240,7 +240,7 @@ where }); } - Ok((receipts, cumulative_gas_used)) + Ok(ExecuteOutput { receipts, gas_used: cumulative_gas_used }) } fn apply_post_execution_changes( From 20dc0c7da0a12e31ab0c896ccf4208585658ad07 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 21 Oct 2024 19:25:33 +0200 Subject: [PATCH 169/425] some fmt (#11933) --- crates/storage/provider/src/providers/blockchain_provider.rs | 2 +- crates/storage/provider/src/providers/mod.rs | 2 +- crates/transaction-pool/src/traits.rs | 3 +-- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index 1866610e3..13215e11a 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -71,7 +71,7 @@ impl BlockchainProvider2 { /// the database to initialize the provider. pub fn new(database: ProviderFactory) -> ProviderResult { let provider = database.provider()?; - let best: ChainInfo = provider.chain_info()?; + let best = provider.chain_info()?; match provider.header_by_number(best.best_number)? { Some(header) => { drop(provider); diff --git a/crates/storage/provider/src/providers/mod.rs b/crates/storage/provider/src/providers/mod.rs index b98bbf5be..a67ebf89b 100644 --- a/crates/storage/provider/src/providers/mod.rs +++ b/crates/storage/provider/src/providers/mod.rs @@ -118,7 +118,7 @@ impl BlockchainProvider { /// the database to initialize the provider. pub fn new(database: ProviderFactory, tree: Arc) -> ProviderResult { let provider = database.provider()?; - let best: ChainInfo = provider.chain_info()?; + let best = provider.chain_info()?; let latest_header = provider .header_by_number(best.best_number)? .ok_or_else(|| ProviderError::HeaderNotFound(best.best_number.into()))?; diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index fcfdae4ed..56da11fe6 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -72,7 +72,6 @@ pub trait TransactionPool: Send + Sync + Clone { /// Imports all _external_ transactions /// - /// /// Consumer: Utility fn add_external_transactions( &self, @@ -83,7 +82,7 @@ pub trait TransactionPool: Send + Sync + Clone { /// Adds an _unvalidated_ transaction into the pool and subscribe to state changes. /// - /// This is the same as [TransactionPool::add_transaction] but returns an event stream for the + /// This is the same as [`TransactionPool::add_transaction`] but returns an event stream for the /// given transaction. /// /// Consumer: Custom From 3f2a41bd3f1d44c9a794a20876ca46a09499bb00 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 21 Oct 2024 20:05:06 +0200 Subject: [PATCH 170/425] chore: rm redundant trait bound (#11940) --- crates/rpc/rpc/src/debug.rs | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 66465ef47..2d9d6f782 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -21,8 +21,8 @@ use reth_evm::{ }; use reth_primitives::{Block, BlockId, BlockNumberOrTag, TransactionSignedEcRecovered}; use reth_provider::{ - BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, HeaderProvider, StateProofProvider, - StateProviderFactory, TransactionVariant, + BlockReaderIdExt, ChainSpecProvider, HeaderProvider, StateProofProvider, StateProviderFactory, + TransactionVariant, }; use reth_revm::database::StateProviderDatabase; use reth_rpc_api::DebugApiServer; @@ -81,7 +81,6 @@ where + HeaderProvider + ChainSpecProvider + StateProviderFactory - + EvmEnvProvider + 'static, Eth: EthApiTypes + TraceExt + 'static, BlockExecutor: BlockExecutorProvider, @@ -842,7 +841,6 @@ where + HeaderProvider + ChainSpecProvider + StateProviderFactory - + EvmEnvProvider + 'static, Eth: EthApiSpec + EthTransactions + TraceExt + 'static, BlockExecutor: BlockExecutorProvider, @@ -979,15 +977,6 @@ where .map_err(Into::into) } - /// Handler for `debug_executionWitness` - async fn debug_execution_witness( - &self, - block: BlockNumberOrTag, - ) -> RpcResult { - let _permit = self.acquire_trace_permit().await; - Self::debug_execution_witness(self, block).await.map_err(Into::into) - } - /// Handler for `debug_traceCall` async fn debug_trace_call( &self, @@ -1011,6 +1000,15 @@ where Self::debug_trace_call_many(self, bundles, state_context, opts).await.map_err(Into::into) } + /// Handler for `debug_executionWitness` + async fn debug_execution_witness( + &self, + block: BlockNumberOrTag, + ) -> RpcResult { + let _permit = self.acquire_trace_permit().await; + Self::debug_execution_witness(self, block).await.map_err(Into::into) + } + async fn debug_backtrace_at(&self, _location: &str) -> RpcResult<()> { Ok(()) } From d9d184d4984aec0099e3a832cf9dc9c6072af0df Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Mon, 21 Oct 2024 20:47:50 +0200 Subject: [PATCH 171/425] feat: default impl for some BlockExecutionStrategy methods (#11941) --- crates/ethereum/evm/src/execute.rs | 7 +------ crates/evm/src/execute.rs | 32 ++++++++++++++++++++++-------- crates/evm/src/test_utils.rs | 2 ++ crates/optimism/evm/src/execute.rs | 10 +--------- 4 files changed, 28 insertions(+), 23 deletions(-) diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index e8b238c6a..c62949902 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -21,7 +21,7 @@ use reth_evm::{ ConfigureEvm, }; use reth_primitives::{BlockWithSenders, Receipt}; -use reth_revm::db::{states::bundle_state::BundleRetention, BundleState, State}; +use reth_revm::db::State; use revm_primitives::{ db::{Database, DatabaseCommit}, BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, U256, @@ -264,11 +264,6 @@ where self.system_caller.with_state_hook(hook); } - fn finish(&mut self) -> BundleState { - self.state.merge_transitions(BundleRetention::Reverts); - self.state.take_bundle() - } - fn validate_block_post_execution( &self, block: &BlockWithSenders, diff --git a/crates/evm/src/execute.rs b/crates/evm/src/execute.rs index 2b3ce85e9..677a15dfa 100644 --- a/crates/evm/src/execute.rs +++ b/crates/evm/src/execute.rs @@ -6,6 +6,7 @@ pub use reth_execution_errors::{ }; pub use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput, ExecutionOutcome}; pub use reth_storage_errors::provider::ProviderError; +use revm::db::states::bundle_state::BundleRetention; use crate::system_calls::OnStateHook; use alloc::{boxed::Box, vec::Vec}; @@ -176,7 +177,10 @@ pub struct ExecuteOutput { } /// Defines the strategy for executing a single block. -pub trait BlockExecutionStrategy { +pub trait BlockExecutionStrategy +where + DB: Database, +{ /// The error type returned by this strategy's methods. type Error: From + core::error::Error; @@ -209,18 +213,23 @@ pub trait BlockExecutionStrategy { fn state_mut(&mut self) -> &mut State; /// Sets a hook to be called after each state change during execution. - fn with_state_hook(&mut self, hook: Option>); + fn with_state_hook(&mut self, _hook: Option>) {} /// Returns the final bundle state. - fn finish(&mut self) -> BundleState; + fn finish(&mut self) -> BundleState { + self.state_mut().merge_transitions(BundleRetention::Reverts); + self.state_mut().take_bundle() + } /// Validate a block with regard to execution results. fn validate_block_post_execution( &self, - block: &BlockWithSenders, - receipts: &[Receipt], - requests: &Requests, - ) -> Result<(), ConsensusError>; + _block: &BlockWithSenders, + _receipts: &[Receipt], + _requests: &Requests, + ) -> Result<(), ConsensusError> { + Ok(()) + } } /// A strategy factory that can create block execution strategies. @@ -293,6 +302,7 @@ where pub struct BasicBlockExecutor where S: BlockExecutionStrategy, + DB: Database, { /// Block execution strategy. pub(crate) strategy: S, @@ -302,6 +312,7 @@ where impl BasicBlockExecutor where S: BlockExecutionStrategy, + DB: Database, { /// Creates a new `BasicBlockExecutor` with the given strategy. pub const fn new(strategy: S) -> Self { @@ -384,6 +395,7 @@ where pub struct BasicBatchExecutor where S: BlockExecutionStrategy, + DB: Database, { /// Batch execution strategy. pub(crate) strategy: S, @@ -395,6 +407,7 @@ where impl BasicBatchExecutor where S: BlockExecutionStrategy, + DB: Database, { /// Creates a new `BasicBatchExecutor` with the given strategy. pub const fn new(strategy: S, batch_record: BlockBatchRecord) -> Self { @@ -597,7 +610,10 @@ mod tests { } } - impl BlockExecutionStrategy for TestExecutorStrategy { + impl BlockExecutionStrategy for TestExecutorStrategy + where + DB: Database, + { type Error = BlockExecutionError; fn apply_pre_execution_changes( diff --git a/crates/evm/src/test_utils.rs b/crates/evm/src/test_utils.rs index c20f43dca..a4dc90649 100644 --- a/crates/evm/src/test_utils.rs +++ b/crates/evm/src/test_utils.rs @@ -119,6 +119,7 @@ impl BatchExecutor for MockExecutorProvider { impl BasicBlockExecutor where S: BlockExecutionStrategy, + DB: Database, { /// Provides safe read access to the state pub fn with_state(&self, f: F) -> R @@ -140,6 +141,7 @@ where impl BasicBatchExecutor where S: BlockExecutionStrategy, + DB: Database, { /// Provides safe read access to the state pub fn with_state(&self, f: F) -> R diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index 20ef64457..748e57e6b 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -20,10 +20,7 @@ use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::validate_block_post_execution; use reth_optimism_forks::OptimismHardfork; use reth_primitives::{BlockWithSenders, Header, Receipt, TxType}; -use reth_revm::{ - db::{states::bundle_state::BundleRetention, BundleState}, - Database, State, -}; +use reth_revm::{Database, State}; use revm_primitives::{ db::DatabaseCommit, BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, U256, }; @@ -271,11 +268,6 @@ where self.system_caller.with_state_hook(hook); } - fn finish(&mut self) -> BundleState { - self.state.merge_transitions(BundleRetention::Reverts); - self.state.take_bundle() - } - fn validate_block_post_execution( &self, block: &BlockWithSenders, From bddd3202e4603251d76ecceb6bfacf40b26ec46e Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Mon, 21 Oct 2024 20:04:09 +0100 Subject: [PATCH 172/425] test(trie): narrow the range of keys for sparse trie fuzz (#11937) --- crates/trie/sparse/src/trie.rs | 73 ++++++++++++++++++++-------------- 1 file changed, 44 insertions(+), 29 deletions(-) diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index 0b9ffb5c0..6680d7e9b 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -265,15 +265,15 @@ impl RevealedSparseTrie { } /// Remove leaf node from the trie. - pub fn remove_leaf(&mut self, path: Nibbles) -> SparseTrieResult<()> { + pub fn remove_leaf(&mut self, path: &Nibbles) -> SparseTrieResult<()> { self.prefix_set.insert(path.clone()); - let existing = self.values.remove(&path); + let existing = self.values.remove(path); if existing.is_none() { // trie structure unchanged, return immediately return Ok(()) } - let mut removed_nodes = self.take_nodes_for_path(&path)?; + let mut removed_nodes = self.take_nodes_for_path(path)?; debug!(target: "trie::sparse", ?path, ?removed_nodes, "Removed nodes for path"); // Pop the first node from the stack which is the leaf node we want to remove. let mut child = removed_nodes.pop().expect("leaf exists"); @@ -282,7 +282,7 @@ impl RevealedSparseTrie { let mut child_path = child.path.clone(); let SparseNode::Leaf { key, .. } = &child.node else { panic!("expected leaf node") }; child_path.extend_from_slice_unchecked(key); - assert_eq!(child_path, path); + assert_eq!(&child_path, path); } // If we don't have any other removed nodes, insert an empty node at the root. @@ -727,6 +727,7 @@ mod tests { use super::*; use alloy_primitives::U256; use itertools::Itertools; + use prop::sample::SizeRange; use proptest::prelude::*; use rand::seq::IteratorRandom; use reth_testing_utils::generators; @@ -998,7 +999,7 @@ mod tests { ]) ); - sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])).unwrap(); + sparse.remove_leaf(&Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])).unwrap(); // Extension (Key = 5) // └── Branch (Mask = 1001) @@ -1049,7 +1050,7 @@ mod tests { ]) ); - sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])).unwrap(); + sparse.remove_leaf(&Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])).unwrap(); // Extension (Key = 5) // └── Branch (Mask = 1001) @@ -1085,7 +1086,7 @@ mod tests { ]) ); - sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])).unwrap(); + sparse.remove_leaf(&Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])).unwrap(); // Extension (Key = 5) // └── Branch (Mask = 1001) @@ -1118,7 +1119,7 @@ mod tests { ]) ); - sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])).unwrap(); + sparse.remove_leaf(&Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])).unwrap(); // Extension (Key = 5) // └── Branch (Mask = 1001) @@ -1140,7 +1141,7 @@ mod tests { ]) ); - sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])).unwrap(); + sparse.remove_leaf(&Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])).unwrap(); // Leaf (Key = 53302) pretty_assertions::assert_eq!( @@ -1151,7 +1152,7 @@ mod tests { ),]) ); - sparse.remove_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])).unwrap(); + sparse.remove_leaf(&Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])).unwrap(); // Empty pretty_assertions::assert_eq!( @@ -1162,34 +1163,31 @@ mod tests { #[test] fn sparse_trie_fuzz() { - proptest!(ProptestConfig::with_cases(10), |(updates: Vec>)| { + // Having only the first 3 nibbles set, we narrow down the range of keys + // to 4096 different hashes. It allows us to generate collisions more likely + // to test the sparse trie updates. + const KEY_NIBBLES_LEN: usize = 3; + + fn test(updates: Vec>>) { let mut rng = generators::rng(); let mut state = BTreeMap::default(); - let mut unpacked_state = BTreeMap::default(); let mut sparse = RevealedSparseTrie::default(); for update in updates { let keys_to_delete_len = update.len() / 2; - let unpacked_update = update.iter().map(|(key, value)| ( - Nibbles::unpack(key), - alloy_rlp::encode_fixed_size(value).to_vec() - )); - // Insert state updates into the sparse trie and calculate the root - for (key, value) in unpacked_update.clone() { + for (key, value) in update.clone() { sparse.update_leaf(key, value).unwrap(); } let sparse_root = sparse.root(); // Insert state updates into the hash builder and calculate the root - unpacked_state.extend(unpacked_update); state.extend(update); - let keys = state.keys().map(Nibbles::unpack).collect::>(); let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( - unpacked_state.clone(), - keys, + state.clone(), + state.keys().cloned().collect::>(), ); // Assert that the sparse trie root matches the hash builder root @@ -1204,20 +1202,18 @@ mod tests { .keys() .choose_multiple(&mut rng, keys_to_delete_len) .into_iter() - .copied() + .cloned() .collect::>(); for key in keys_to_delete { state.remove(&key).unwrap(); - unpacked_state.remove(&Nibbles::unpack(key)).unwrap(); - sparse.remove_leaf(Nibbles::unpack(key)).unwrap(); + sparse.remove_leaf(&key).unwrap(); } let sparse_root = sparse.root(); - let keys = state.keys().map(Nibbles::unpack).collect::>(); let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( - unpacked_state.clone(), - keys, + state.clone(), + state.keys().cloned().collect::>(), ); // Assert that the sparse trie root matches the hash builder root @@ -1225,6 +1221,25 @@ mod tests { // Assert that the sparse trie nodes match the hash builder proof nodes assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); } - }); + } + + /// Pad nibbles of length [`KEY_NIBBLES_LEN`] with zeros to the length of a B256 hash. + fn pad_nibbles(nibbles: Nibbles) -> Nibbles { + let mut base = + Nibbles::from_nibbles_unchecked([0; { B256::len_bytes() / 2 - KEY_NIBBLES_LEN }]); + base.extend_from_slice_unchecked(&nibbles); + base + } + + proptest!(ProptestConfig::with_cases(10), |( + updates in proptest::collection::vec( + proptest::collection::hash_map( + any_with::(SizeRange::new(KEY_NIBBLES_LEN..=KEY_NIBBLES_LEN)).prop_map(pad_nibbles), + any::>(), + 1..100, + ), + 1..100, + ) + )| { test(updates) }); } } From 7f47ef0fd3e540232f24f07d31f3b12e8be4e8cd Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 21 Oct 2024 21:18:24 +0200 Subject: [PATCH 173/425] chore: spawn eth_calls as blocking tasks (#11944) --- crates/rpc/rpc-eth-api/src/helpers/call.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index b43b34305..64017f9f8 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -544,6 +544,16 @@ pub trait Call: LoadState + SpawnBlocking { /// /// This returns the configured [`EnvWithHandlerCfg`] for the given [`TransactionRequest`] at /// the given [`BlockId`] and with configured call settings: `prepare_call_env`. + /// + /// This is primarily used by `eth_call`. + /// + /// # Blocking behaviour + /// + /// This assumes executing the call is relatively more expensive on IO than CPU because it + /// transacts a single transaction on an empty in memory database. Because `eth_call`s are + /// usually allowed to consume a lot of gas, this also allows a lot of memory operations so + /// we assume this is not primarily CPU bound and instead spawn the call on a regular tokio task + /// instead, where blocking IO is less problematic. fn spawn_with_call_at( &self, request: TransactionRequest, @@ -561,7 +571,7 @@ pub trait Call: LoadState + SpawnBlocking { async move { let (cfg, block_env, at) = self.evm_env_at(at).await?; let this = self.clone(); - self.spawn_tracing(move |_| { + self.spawn_blocking_io(move |_| { let state = this.state_at_block_id(at)?; let mut db = CacheDB::new(StateProviderDatabase::new(StateProviderTraitObjWrapper(&state))); From 1f4ca32e35a7cc4bfccf7cc5a974429e3df4a846 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 21 Oct 2024 21:18:30 +0200 Subject: [PATCH 174/425] refactor(primitives-traits): rm useless trait bounds for `Receipt` (#11942) --- crates/primitives-traits/src/receipt.rs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/crates/primitives-traits/src/receipt.rs b/crates/primitives-traits/src/receipt.rs index e2d19e4d4..5c317dc49 100644 --- a/crates/primitives-traits/src/receipt.rs +++ b/crates/primitives-traits/src/receipt.rs @@ -1,7 +1,5 @@ //! Receipt abstraction -use alloc::fmt; - use alloy_consensus::TxReceipt; use reth_codecs::Compact; use serde::{Deserialize, Serialize}; @@ -14,10 +12,6 @@ impl FullReceipt for T where T: Receipt + Compact {} /// Abstraction of a receipt. pub trait Receipt: TxReceipt - + Clone - + fmt::Debug - + PartialEq - + Eq + Default + alloy_rlp::Encodable + alloy_rlp::Decodable From 4d3b35dbd24c3a5c6b1a4f7bd86b1451e8efafcc Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 21 Oct 2024 21:31:40 +0200 Subject: [PATCH 175/425] test(tx-pool): add unit test for `remove_sender_count` (#11939) --- crates/transaction-pool/src/pool/parked.rs | 64 +++++++++++++++++++++- 1 file changed, 62 insertions(+), 2 deletions(-) diff --git a/crates/transaction-pool/src/pool/parked.rs b/crates/transaction-pool/src/pool/parked.rs index b591fdb53..407f04fd5 100644 --- a/crates/transaction-pool/src/pool/parked.rs +++ b/crates/transaction-pool/src/pool/parked.rs @@ -35,8 +35,8 @@ pub struct ParkedPool { best: BTreeSet>, /// Keeps track of last submission id for each sender. /// - /// This are sorted in Reverse order, so the last (highest) submission id is first, and the - /// lowest(oldest) is the last. + /// This are sorted in reverse order, so the last (highest) submission id is first, and the + /// lowest (oldest) is the last. last_sender_submission: BTreeSet, /// Keeps track of the number of transactions in the pool by the sender and the last submission /// id. @@ -856,4 +856,64 @@ mod tests { assert_eq!(submission_info2.sender_id, sender2); assert_eq!(submission_info2.submission_id, 2); } + + #[test] + fn test_remove_sender_count() { + // Initialize a mock transaction factory + let mut f = MockTransactionFactory::default(); + // Create an empty transaction pool + let mut pool = ParkedPool::>::default(); + // Generate two validated transactions and add them to the pool + let tx1 = f.validated_arc(MockTransaction::eip1559().inc_price()); + let tx2 = f.validated_arc(MockTransaction::eip1559().inc_price()); + pool.add_transaction(tx1); + pool.add_transaction(tx2); + + // Define two different sender IDs and their corresponding submission IDs + let sender1: SenderId = 11.into(); + let sender2: SenderId = 22.into(); + + // Add the sender counts to the pool + pool.add_sender_count(sender1, 1); + + // We add sender 2 multiple times to test the removal of sender counts + pool.add_sender_count(sender2, 2); + pool.add_sender_count(sender2, 3); + + // Before removing the sender count we should have 4 sender transaction counts + assert_eq!(pool.sender_transaction_count.len(), 4); + assert!(pool.sender_transaction_count.contains_key(&sender1)); + + // We should have 1 sender transaction count for sender 1 before removing the sender count + assert_eq!(pool.sender_transaction_count.get(&sender1).unwrap().count, 1); + + // Remove the sender count for sender 1 + pool.remove_sender_count(sender1); + + // After removing the sender count we should have 3 sender transaction counts remaining + assert_eq!(pool.sender_transaction_count.len(), 3); + assert!(!pool.sender_transaction_count.contains_key(&sender1)); + + // Check the sender transaction count for sender 2 before removing the sender count + assert_eq!( + *pool.sender_transaction_count.get(&sender2).unwrap(), + SenderTransactionCount { count: 2, last_submission_id: 3 } + ); + + // Remove the sender count for sender 2 + pool.remove_sender_count(sender2); + + // After removing the sender count for sender 2, we still have 3 sender transaction counts + // remaining. + // + // This is because we added sender 2 multiple times and we only removed the last submission. + assert_eq!(pool.sender_transaction_count.len(), 3); + assert!(pool.sender_transaction_count.contains_key(&sender2)); + + // Sender transaction count for sender 2 should be updated correctly + assert_eq!( + *pool.sender_transaction_count.get(&sender2).unwrap(), + SenderTransactionCount { count: 1, last_submission_id: 3 } + ); + } } From 387b0f8b361f0c42cee92cc598232fdf344487b9 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 21 Oct 2024 22:14:18 +0200 Subject: [PATCH 176/425] refactor(tx-pool): small refactor for `contains_conflicting_transaction` (#11935) --- crates/transaction-pool/src/pool/txpool.rs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index a85a9a185..e5857d564 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -1405,12 +1405,9 @@ impl AllTransactions { /// Caution: This assumes that mutually exclusive invariant is always true for the same sender. #[inline] fn contains_conflicting_transaction(&self, tx: &ValidPoolTransaction) -> bool { - let mut iter = self.txs_iter(tx.transaction_id.sender); - if let Some((_, existing)) = iter.next() { - return tx.tx_type_conflicts_with(&existing.transaction) - } - // no existing transaction for this sender - false + self.txs_iter(tx.transaction_id.sender) + .next() + .map_or(false, |(_, existing)| tx.tx_type_conflicts_with(&existing.transaction)) } /// Additional checks for a new transaction. From cc895e705249197778c2b467af603b6f8e0f7641 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 22 Oct 2024 00:14:48 +0400 Subject: [PATCH 177/425] fix: always accept transactions with current nonce (#11931) --- crates/transaction-pool/src/pool/txpool.rs | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index e5857d564..4fbec1105 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -1421,11 +1421,15 @@ impl AllTransactions { fn ensure_valid( &self, transaction: ValidPoolTransaction, + on_chain_nonce: u64, ) -> Result, InsertErr> { if !self.local_transactions_config.is_local(transaction.origin, transaction.sender()) { let current_txs = self.tx_counter.get(&transaction.sender_id()).copied().unwrap_or_default(); - if current_txs >= self.max_account_slots { + + // Reject transactions if sender's capacity is exceeded. + // If transaction's nonce matches on-chain nonce always let it through + if current_txs >= self.max_account_slots && transaction.nonce() > on_chain_nonce { return Err(InsertErr::ExceededSenderTransactionsCapacity { transaction: Arc::new(transaction), }) @@ -1592,7 +1596,7 @@ impl AllTransactions { ) -> InsertResult { assert!(on_chain_nonce <= transaction.nonce(), "Invalid transaction"); - let mut transaction = self.ensure_valid(transaction)?; + let mut transaction = self.ensure_valid(transaction, on_chain_nonce)?; let inserted_tx_id = *transaction.id(); let mut state = TxState::default(); @@ -2631,6 +2635,7 @@ mod tests { let mut pool = AllTransactions::default(); let mut tx = MockTransaction::eip1559(); + let unblocked_tx = tx.clone(); for _ in 0..pool.max_account_slots { tx = tx.next(); pool.insert_tx(f.validated(tx.clone()), on_chain_balance, on_chain_nonce).unwrap(); @@ -2644,6 +2649,10 @@ mod tests { let err = pool.insert_tx(f.validated(tx.next()), on_chain_balance, on_chain_nonce).unwrap_err(); assert!(matches!(err, InsertErr::ExceededSenderTransactionsCapacity { .. })); + + assert!(pool + .insert_tx(f.validated(unblocked_tx), on_chain_balance, on_chain_nonce) + .is_ok()); } #[test] From 88a38de4d74cb8f01de405b67a65b098580434f0 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 21 Oct 2024 22:46:16 +0200 Subject: [PATCH 178/425] chore(rpc): relax some types (#11946) --- crates/rpc/rpc-eth-api/src/helpers/call.rs | 11 ++--- crates/rpc/rpc-eth-api/src/helpers/trace.rs | 2 +- crates/rpc/rpc-eth-types/src/cache/db.rs | 50 ++++++++++----------- 3 files changed, 32 insertions(+), 31 deletions(-) diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index 64017f9f8..0acf66462 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -655,14 +655,14 @@ pub trait Call: LoadState + SpawnBlocking { /// Returns the index of the target transaction in the given iterator. fn replay_transactions_until<'a, DB, I>( &self, - db: &mut CacheDB, + db: &mut DB, cfg: CfgEnvWithHandlerCfg, block_env: BlockEnv, transactions: I, target_tx_hash: B256, ) -> Result where - DB: DatabaseRef, + DB: Database + DatabaseCommit, EthApiError: From, I: IntoIterator, { @@ -929,14 +929,15 @@ pub trait Call: LoadState + SpawnBlocking { /// Executes the requests again after an out of gas error to check if the error is gas related /// or not #[inline] - fn map_out_of_gas_err( + fn map_out_of_gas_err( &self, env_gas_limit: U256, mut env: EnvWithHandlerCfg, - db: &mut CacheDB>, + db: &mut DB, ) -> Self::Error where - S: StateProvider, + DB: Database, + EthApiError: From, { let req_gas_limit = env.tx.gas_limit; env.tx.gas_limit = env_gas_limit.try_into().unwrap_or(u64::MAX); diff --git a/crates/rpc/rpc-eth-api/src/helpers/trace.rs b/crates/rpc/rpc-eth-api/src/helpers/trace.rs index 981de8fa6..64056148c 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/trace.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/trace.rs @@ -117,7 +117,7 @@ pub trait Trace: LoadState { self.spawn_with_state_at_block(at, move |state| { let mut db = CacheDB::new(StateProviderDatabase::new(state)); let mut inspector = TracingInspector::new(config); - let (res, _) = this.inspect(StateCacheDbRefMutWrapper(&mut db), env, &mut inspector)?; + let (res, _) = this.inspect(&mut db, env, &mut inspector)?; f(inspector, res, db) }) } diff --git a/crates/rpc/rpc-eth-types/src/cache/db.rs b/crates/rpc/rpc-eth-types/src/cache/db.rs index 7422dcfb8..627fd2b2d 100644 --- a/crates/rpc/rpc-eth-types/src/cache/db.rs +++ b/crates/rpc/rpc-eth-types/src/cache/db.rs @@ -114,6 +114,13 @@ impl reth_storage_api::BlockHashReader for StateProviderTraitObjWrapper<'_> { self.0.block_hash(block_number) } + fn convert_block_hash( + &self, + hash_or_number: alloy_rpc_types::BlockHashOrNumber, + ) -> reth_errors::ProviderResult> { + self.0.convert_block_hash(hash_or_number) + } + fn canonical_hashes_range( &self, start: alloy_primitives::BlockNumber, @@ -121,21 +128,22 @@ impl reth_storage_api::BlockHashReader for StateProviderTraitObjWrapper<'_> { ) -> reth_errors::ProviderResult> { self.0.canonical_hashes_range(start, end) } +} - fn convert_block_hash( +impl StateProvider for StateProviderTraitObjWrapper<'_> { + fn storage( &self, - hash_or_number: alloy_rpc_types::BlockHashOrNumber, - ) -> reth_errors::ProviderResult> { - self.0.convert_block_hash(hash_or_number) + account: revm_primitives::Address, + storage_key: alloy_primitives::StorageKey, + ) -> reth_errors::ProviderResult> { + self.0.storage(account, storage_key) } -} -impl StateProvider for StateProviderTraitObjWrapper<'_> { - fn account_balance( + fn bytecode_by_hash( &self, - addr: revm_primitives::Address, - ) -> reth_errors::ProviderResult> { - self.0.account_balance(addr) + code_hash: B256, + ) -> reth_errors::ProviderResult> { + self.0.bytecode_by_hash(code_hash) } fn account_code( @@ -145,26 +153,18 @@ impl StateProvider for StateProviderTraitObjWrapper<'_> { self.0.account_code(addr) } - fn account_nonce( + fn account_balance( &self, addr: revm_primitives::Address, - ) -> reth_errors::ProviderResult> { - self.0.account_nonce(addr) - } - - fn bytecode_by_hash( - &self, - code_hash: B256, - ) -> reth_errors::ProviderResult> { - self.0.bytecode_by_hash(code_hash) + ) -> reth_errors::ProviderResult> { + self.0.account_balance(addr) } - fn storage( + fn account_nonce( &self, - account: revm_primitives::Address, - storage_key: alloy_primitives::StorageKey, - ) -> reth_errors::ProviderResult> { - self.0.storage(account, storage_key) + addr: revm_primitives::Address, + ) -> reth_errors::ProviderResult> { + self.0.account_nonce(addr) } } From f2ac547666b3b79b20597571530768e7a4a2dc09 Mon Sep 17 00:00:00 2001 From: Lion - dapplion <35266934+dapplion@users.noreply.github.com> Date: Tue, 22 Oct 2024 00:02:44 +0300 Subject: [PATCH 179/425] Add custom beacon withdrawals example (#9497) Co-authored-by: Federico Gimenez --- Cargo.lock | 33 +- Cargo.toml | 1 + examples/custom-beacon-withdrawals/Cargo.toml | 26 ++ .../custom-beacon-withdrawals/src/main.rs | 286 ++++++++++++++++++ 4 files changed, 338 insertions(+), 8 deletions(-) create mode 100644 examples/custom-beacon-withdrawals/Cargo.toml create mode 100644 examples/custom-beacon-withdrawals/src/main.rs diff --git a/Cargo.lock b/Cargo.lock index 19287e666..9082aaeb2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -609,9 +609,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b2395336745358cc47207442127c47c63801a7065ecc0aa928da844f8bb5576" +checksum = "b0900b83f4ee1f45c640ceee596afbc118051921b9438fdb5a3175c1a7e05f8b" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", @@ -623,9 +623,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro-expander" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed5047c9a241df94327879c2b0729155b58b941eae7805a7ada2e19436e6b39" +checksum = "a41b1e78dde06b5e12e6702fa8c1d30621bf07728ba75b801fb801c9c6a0ba10" dependencies = [ "alloy-sol-macro-input", "const-hex", @@ -641,9 +641,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro-input" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dee02a81f529c415082235129f0df8b8e60aa1601b9c9298ffe54d75f57210b" +checksum = "91dc311a561a306664393407b88d3e53ae58581624128afd8a15faa5de3627dc" dependencies = [ "const-hex", "dunce", @@ -2783,6 +2783,23 @@ dependencies = [ "tokio-stream", ] +[[package]] +name = "example-custom-beacon-withdrawals" +version = "0.0.0" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-sol-macro", + "alloy-sol-types", + "eyre", + "reth", + "reth-chainspec", + "reth-evm", + "reth-evm-ethereum", + "reth-node-ethereum", + "reth-primitives", +] + [[package]] name = "example-custom-dev-node" version = "0.0.0" @@ -10355,9 +10372,9 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebfc1bfd06acc78f16d8fd3ef846bc222ee7002468d10a7dce8d703d6eab89a3" +checksum = "9d5e0c2ea8db64b2898b62ea2fbd60204ca95e0b2c6bdf53ff768bbe916fbe4d" dependencies = [ "paste", "proc-macro2", diff --git a/Cargo.toml b/Cargo.toml index 5b6912c33..b32e2d0df 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -145,6 +145,7 @@ members = [ "examples/rpc-db/", "examples/stateful-precompile/", "examples/txpool-tracing/", + "examples/custom-beacon-withdrawals", "testing/ef-tests/", "testing/testing-utils", ] diff --git a/examples/custom-beacon-withdrawals/Cargo.toml b/examples/custom-beacon-withdrawals/Cargo.toml new file mode 100644 index 000000000..c396ca11d --- /dev/null +++ b/examples/custom-beacon-withdrawals/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "example-custom-beacon-withdrawals" +version = "0.0.0" +publish = false +edition.workspace = true +license.workspace = true + +[dependencies] +reth.workspace = true +reth-node-ethereum.workspace = true +reth-evm-ethereum.workspace = true +reth-chainspec.workspace = true +reth-evm.workspace = true +reth-primitives.workspace = true + +alloy-sol-macro = "0.8.9" +alloy-sol-types.workspace = true +alloy-eips.workspace = true +alloy-consensus.workspace = true + +eyre.workspace = true + +[features] +optimism = [ + "reth-primitives/optimism" +] \ No newline at end of file diff --git a/examples/custom-beacon-withdrawals/src/main.rs b/examples/custom-beacon-withdrawals/src/main.rs new file mode 100644 index 000000000..09dad2f70 --- /dev/null +++ b/examples/custom-beacon-withdrawals/src/main.rs @@ -0,0 +1,286 @@ +//! Example for how to modify a block post-execution step. It credits beacon withdrawals with a +//! custom mechanism instead of minting native tokens + +#![cfg_attr(not(test), warn(unused_crate_dependencies))] + +use alloy_eips::eip7685::Requests; +use alloy_sol_macro::sol; +use alloy_sol_types::SolCall; +#[cfg(feature = "optimism")] +use reth::revm::primitives::OptimismFields; +use reth::{ + api::{ConfigureEvm, ConfigureEvmEnv, NodeTypesWithEngine}, + builder::{components::ExecutorBuilder, BuilderContext, FullNodeTypes}, + cli::Cli, + providers::ProviderError, + revm::{ + interpreter::Host, + primitives::{Env, TransactTo, TxEnv}, + Database, DatabaseCommit, Evm, State, + }, +}; +use reth_chainspec::{ChainSpec, EthereumHardforks}; +use reth_evm::execute::{ + BlockExecutionError, BlockExecutionStrategy, BlockExecutionStrategyFactory, ExecuteOutput, + InternalBlockExecutionError, +}; +use reth_evm_ethereum::EthEvmConfig; +use reth_node_ethereum::{node::EthereumAddOns, BasicBlockExecutorProvider, EthereumNode}; +use reth_primitives::{ + revm_primitives::{ + address, Address, BlockEnv, Bytes, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, U256, + }, + BlockWithSenders, Receipt, Withdrawal, +}; +use std::{fmt::Display, sync::Arc}; + +pub const SYSTEM_ADDRESS: Address = address!("fffffffffffffffffffffffffffffffffffffffe"); +pub const WITHDRAWALS_ADDRESS: Address = address!("4200000000000000000000000000000000000000"); + +fn main() { + Cli::parse_args() + .run(|builder, _| async move { + let handle = builder + // use the default ethereum node types + .with_types::() + // Configure the components of the node + // use default ethereum components but use our custom pool + .with_components( + EthereumNode::components().executor(CustomExecutorBuilder::default()), + ) + .with_add_ons(EthereumAddOns::default()) + .launch() + .await?; + + handle.wait_for_node_exit().await + }) + .unwrap(); +} + +/// A custom executor builder +#[derive(Debug, Default, Clone, Copy)] +#[non_exhaustive] +pub struct CustomExecutorBuilder; + +impl ExecutorBuilder for CustomExecutorBuilder +where + Types: NodeTypesWithEngine, + Node: FullNodeTypes, +{ + type EVM = EthEvmConfig; + type Executor = BasicBlockExecutorProvider; + + async fn build_evm( + self, + ctx: &BuilderContext, + ) -> eyre::Result<(Self::EVM, Self::Executor)> { + let chain_spec = ctx.chain_spec(); + let evm_config = EthEvmConfig::new(ctx.chain_spec()); + let strategy_factory = + CustomExecutorStrategyFactory { chain_spec, evm_config: evm_config.clone() }; + let executor = BasicBlockExecutorProvider::new(strategy_factory); + + Ok((evm_config, executor)) + } +} + +#[derive(Clone)] +pub struct CustomExecutorStrategyFactory { + /// The chainspec + chain_spec: Arc, + /// How to create an EVM. + evm_config: EthEvmConfig, +} + +impl BlockExecutionStrategyFactory for CustomExecutorStrategyFactory { + type Strategy + Display>> = CustomExecutorStrategy; + + fn create_strategy(&self, db: DB) -> Self::Strategy + where + DB: Database + Display>, + { + let state = + State::builder().with_database(db).with_bundle_update().without_state_clear().build(); + CustomExecutorStrategy { + state, + chain_spec: self.chain_spec.clone(), + evm_config: self.evm_config.clone(), + } + } +} + +pub struct CustomExecutorStrategy +where + DB: Database + Display>, +{ + /// The chainspec + chain_spec: Arc, + /// How to create an EVM. + evm_config: EthEvmConfig, + /// Current state for block execution. + state: State, +} + +impl CustomExecutorStrategy +where + DB: Database + Display>, +{ + /// Configures a new evm configuration and block environment for the given block. + /// + /// # Caution + /// + /// This does not initialize the tx environment. + fn evm_env_for_block( + &self, + header: &alloy_consensus::Header, + total_difficulty: U256, + ) -> EnvWithHandlerCfg { + let mut cfg = CfgEnvWithHandlerCfg::new(Default::default(), Default::default()); + let mut block_env = BlockEnv::default(); + self.evm_config.fill_cfg_and_block_env(&mut cfg, &mut block_env, header, total_difficulty); + + EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, Default::default()) + } +} + +impl BlockExecutionStrategy for CustomExecutorStrategy +where + DB: Database + Display>, +{ + type Error = BlockExecutionError; + + fn apply_pre_execution_changes( + &mut self, + block: &BlockWithSenders, + _total_difficulty: U256, + ) -> Result<(), Self::Error> { + // Set state clear flag if the block is after the Spurious Dragon hardfork. + let state_clear_flag = + (*self.chain_spec).is_spurious_dragon_active_at_block(block.header.number); + self.state.set_state_clear_flag(state_clear_flag); + + Ok(()) + } + + fn execute_transactions( + &mut self, + _block: &BlockWithSenders, + _total_difficulty: U256, + ) -> Result { + Ok(ExecuteOutput { receipts: vec![], gas_used: 0 }) + } + + fn apply_post_execution_changes( + &mut self, + block: &BlockWithSenders, + total_difficulty: U256, + _receipts: &[Receipt], + ) -> Result { + let env = self.evm_env_for_block(&block.header, total_difficulty); + let mut evm = self.evm_config.evm_with_env(&mut self.state, env); + + if let Some(withdrawals) = block.body.withdrawals.as_ref() { + apply_withdrawals_contract_call(withdrawals, &mut evm)?; + } + + Ok(Requests::default()) + } + + fn state_ref(&self) -> &State { + &self.state + } + + fn state_mut(&mut self) -> &mut State { + &mut self.state + } +} + +sol!( + function withdrawals( + uint64[] calldata amounts, + address[] calldata addresses + ); +); + +/// Applies the post-block call to the withdrawal / deposit contract, using the given block, +/// [`ChainSpec`], EVM. +pub fn apply_withdrawals_contract_call( + withdrawals: &[Withdrawal], + evm: &mut Evm<'_, EXT, DB>, +) -> Result<(), BlockExecutionError> +where + DB::Error: std::fmt::Display, +{ + // get previous env + let previous_env = Box::new(evm.context.env().clone()); + + // modify env for pre block call + fill_tx_env_with_system_contract_call( + &mut evm.context.evm.env, + SYSTEM_ADDRESS, + WITHDRAWALS_ADDRESS, + withdrawalsCall { + amounts: withdrawals.iter().map(|w| w.amount).collect::>(), + addresses: withdrawals.iter().map(|w| w.address).collect::>(), + } + .abi_encode() + .into(), + ); + + let mut state = match evm.transact() { + Ok(res) => res.state, + Err(e) => { + evm.context.evm.env = previous_env; + return Err(BlockExecutionError::Internal(InternalBlockExecutionError::Other( + format!("withdrawal contract system call revert: {}", e).into(), + ))) + } + }; + + // Clean-up post system tx context + state.remove(&SYSTEM_ADDRESS); + state.remove(&evm.block().coinbase); + evm.context.evm.db.commit(state); + // re-set the previous env + evm.context.evm.env = previous_env; + + Ok(()) +} + +fn fill_tx_env_with_system_contract_call( + env: &mut Env, + caller: Address, + contract: Address, + data: Bytes, +) { + env.tx = TxEnv { + caller, + transact_to: TransactTo::Call(contract), + // Explicitly set nonce to None so revm does not do any nonce checks + nonce: None, + gas_limit: 30_000_000, + value: U256::ZERO, + data, + // Setting the gas price to zero enforces that no value is transferred as part of the call, + // and that the call will not count against the block's gas limit + gas_price: U256::ZERO, + // The chain ID check is not relevant here and is disabled if set to None + chain_id: None, + // Setting the gas priority fee to None ensures the effective gas price is derived from the + // `gas_price` field, which we need to be zero + gas_priority_fee: None, + access_list: Vec::new(), + // blob fields can be None for this tx + blob_hashes: Vec::new(), + max_fee_per_blob_gas: None, + authorization_list: None, + #[cfg(feature = "optimism")] + optimism: OptimismFields::default(), + }; + + // ensure the block gas limit is >= the tx + env.block.gas_limit = U256::from(env.tx.gas_limit); + + // disable the base fee check for this call by setting the base fee to zero + env.block.basefee = U256::ZERO; +} From 51594c9a68e437075f0aa0bfc13491dd82176d7c Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 21 Oct 2024 23:36:44 +0200 Subject: [PATCH 180/425] chore: relax payload traits (#11947) --- crates/e2e-test-utils/src/payload.rs | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/crates/e2e-test-utils/src/payload.rs b/crates/e2e-test-utils/src/payload.rs index 946d9af57..a5e4f56ac 100644 --- a/crates/e2e-test-utils/src/payload.rs +++ b/crates/e2e-test-utils/src/payload.rs @@ -1,20 +1,20 @@ use futures_util::StreamExt; -use reth::api::{BuiltPayload, EngineTypes, PayloadBuilderAttributes}; +use reth::api::{BuiltPayload, PayloadBuilderAttributes}; use reth_payload_builder::{PayloadBuilderHandle, PayloadId}; -use reth_payload_primitives::{Events, PayloadBuilder}; +use reth_payload_primitives::{Events, PayloadBuilder, PayloadTypes}; use tokio_stream::wrappers::BroadcastStream; /// Helper for payload operations #[derive(Debug)] -pub struct PayloadTestContext { - pub payload_event_stream: BroadcastStream>, - payload_builder: PayloadBuilderHandle, +pub struct PayloadTestContext { + pub payload_event_stream: BroadcastStream>, + payload_builder: PayloadBuilderHandle, pub timestamp: u64, } -impl PayloadTestContext { +impl PayloadTestContext { /// Creates a new payload helper - pub async fn new(payload_builder: PayloadBuilderHandle) -> eyre::Result { + pub async fn new(payload_builder: PayloadBuilderHandle) -> eyre::Result { let payload_events = payload_builder.subscribe().await?; let payload_event_stream = payload_events.into_stream(); // Cancun timestamp @@ -24,10 +24,10 @@ impl PayloadTestContext { /// Creates a new payload job from static attributes pub async fn new_payload( &mut self, - attributes_generator: impl Fn(u64) -> E::PayloadBuilderAttributes, - ) -> eyre::Result { + attributes_generator: impl Fn(u64) -> T::PayloadBuilderAttributes, + ) -> eyre::Result { self.timestamp += 1; - let attributes: E::PayloadBuilderAttributes = attributes_generator(self.timestamp); + let attributes = attributes_generator(self.timestamp); self.payload_builder.send_new_payload(attributes.clone()).await.unwrap()?; Ok(attributes) } @@ -35,10 +35,10 @@ impl PayloadTestContext { /// Asserts that the next event is a payload attributes event pub async fn expect_attr_event( &mut self, - attrs: E::PayloadBuilderAttributes, + attrs: T::PayloadBuilderAttributes, ) -> eyre::Result<()> { let first_event = self.payload_event_stream.next().await.unwrap()?; - if let reth::payload::Events::Attributes(attr) = first_event { + if let Events::Attributes(attr) = first_event { assert_eq!(attrs.timestamp(), attr.timestamp()); } else { panic!("Expect first event as payload attributes.") @@ -59,9 +59,9 @@ impl PayloadTestContext { } /// Expects the next event to be a built payload event or panics - pub async fn expect_built_payload(&mut self) -> eyre::Result { + pub async fn expect_built_payload(&mut self) -> eyre::Result { let second_event = self.payload_event_stream.next().await.unwrap()?; - if let reth::payload::Events::BuiltPayload(payload) = second_event { + if let Events::BuiltPayload(payload) = second_event { Ok(payload) } else { panic!("Expect a built payload event."); From 2973f0c3e80705d0303a62874ec4fffadf3472aa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 22 Oct 2024 12:33:47 +0200 Subject: [PATCH 181/425] chore(deps): bump dawidd6/action-homebrew-bump-formula from 3 to 4 (#11951) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/release-dist.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release-dist.yml b/.github/workflows/release-dist.yml index 2142360e0..f7df80e81 100644 --- a/.github/workflows/release-dist.yml +++ b/.github/workflows/release-dist.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Update Homebrew formula - uses: dawidd6/action-homebrew-bump-formula@v3 + uses: dawidd6/action-homebrew-bump-formula@v4 with: token: ${{ secrets.HOMEBREW }} no_fork: true From e52f647644cb10fe6d310428d75ac3045140f311 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 22 Oct 2024 12:34:53 +0200 Subject: [PATCH 182/425] chore(primitives): use alloy eth conversion constants (#11892) --- Cargo.lock | 3 ++ crates/consensus/common/src/calc.rs | 4 +-- crates/ethereum/evm/src/execute.rs | 6 ++-- crates/evm/Cargo.toml | 36 ++++++++++--------- crates/evm/src/state_change.rs | 2 +- crates/exex/exex/src/backfill/test_utils.rs | 5 ++- crates/node/events/Cargo.toml | 1 + crates/node/events/src/node.rs | 3 +- crates/primitives-traits/src/constants/mod.rs | 9 ----- crates/revm/Cargo.toml | 27 +++++++------- crates/rpc/rpc-eth-types/src/gas_oracle.rs | 3 +- crates/rpc/rpc-eth-types/src/revm_utils.rs | 2 +- 12 files changed, 50 insertions(+), 51 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9082aaeb2..37135dfd5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7441,6 +7441,7 @@ dependencies = [ name = "reth-evm" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "auto_impl", @@ -8053,6 +8054,7 @@ dependencies = [ name = "reth-node-events" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-primitives", "alloy-rpc-types-engine", "futures", @@ -8590,6 +8592,7 @@ dependencies = [ name = "reth-revm" version = "1.1.0" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "reth-ethereum-forks", diff --git a/crates/consensus/common/src/calc.rs b/crates/consensus/common/src/calc.rs index 3f519332f..e30c5b715 100644 --- a/crates/consensus/common/src/calc.rs +++ b/crates/consensus/common/src/calc.rs @@ -1,6 +1,6 @@ +use alloy_consensus::constants::ETH_TO_WEI; use alloy_primitives::{BlockNumber, U256}; use reth_chainspec::{EthereumHardfork, Hardforks}; -use reth_primitives::constants::ETH_TO_WEI; /// Calculates the base block reward. /// @@ -57,7 +57,7 @@ pub fn base_block_reward_pre_merge(chain_spec: impl Hardforks, block_number: Blo /// ``` /// # use reth_chainspec::MAINNET; /// # use reth_consensus_common::calc::{base_block_reward, block_reward}; -/// # use reth_primitives::constants::ETH_TO_WEI; +/// # use alloy_consensus::constants::ETH_TO_WEI; /// # use alloy_primitives::U256; /// # /// // This is block 126 on mainnet. diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index c62949902..f082a3a70 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -296,7 +296,7 @@ impl EthExecutorProvider { #[cfg(test)] mod tests { use super::*; - use alloy_consensus::{Header, TxLegacy}; + use alloy_consensus::{constants::ETH_TO_WEI, Header, TxLegacy}; use alloy_eips::{ eip2935::{HISTORY_STORAGE_ADDRESS, HISTORY_STORAGE_CODE}, eip4788::{BEACON_ROOTS_ADDRESS, BEACON_ROOTS_CODE, SYSTEM_ADDRESS}, @@ -309,9 +309,7 @@ mod tests { BasicBlockExecutorProvider, BatchExecutor, BlockExecutorProvider, Executor, }; use reth_execution_types::BlockExecutionOutput; - use reth_primitives::{ - constants::ETH_TO_WEI, public_key_to_address, Account, Block, BlockBody, Transaction, - }; + use reth_primitives::{public_key_to_address, Account, Block, BlockBody, Transaction}; use reth_revm::{ database::StateProviderDatabase, test_utils::StateProviderTest, TransitionState, }; diff --git a/crates/evm/Cargo.toml b/crates/evm/Cargo.toml index 6a1e1fe0d..d97a57864 100644 --- a/crates/evm/Cargo.toml +++ b/crates/evm/Cargo.toml @@ -39,27 +39,29 @@ parking_lot = { workspace = true, optional = true } [dev-dependencies] parking_lot.workspace = true reth-ethereum-forks.workspace = true +alloy-consensus.workspace = true [features] default = ["std"] std = [ - "dep:metrics", - "dep:reth-metrics", - "reth-consensus/std", - "reth-primitives/std", - "reth-primitives-traits/std", - "reth-revm/std", - "alloy-eips/std", - "alloy-primitives/std", - "revm-primitives/std", - "revm/std" + "dep:metrics", + "dep:reth-metrics", + "reth-consensus/std", + "reth-primitives/std", + "reth-primitives-traits/std", + "reth-revm/std", + "alloy-eips/std", + "alloy-primitives/std", + "alloy-consensus/std", + "revm-primitives/std", + "revm/std", ] test-utils = [ - "dep:parking_lot", - "reth-chainspec/test-utils", - "reth-consensus/test-utils", - "reth-primitives/test-utils", - "reth-primitives-traits/test-utils", - "reth-revm/test-utils", - "revm/test-utils" + "dep:parking_lot", + "reth-chainspec/test-utils", + "reth-consensus/test-utils", + "reth-primitives/test-utils", + "reth-primitives-traits/test-utils", + "reth-revm/test-utils", + "revm/test-utils", ] diff --git a/crates/evm/src/state_change.rs b/crates/evm/src/state_change.rs index 2d5209015..2a3d93f94 100644 --- a/crates/evm/src/state_change.rs +++ b/crates/evm/src/state_change.rs @@ -91,9 +91,9 @@ pub fn insert_post_block_withdrawals_balance_increments Date: Tue, 22 Oct 2024 12:35:09 +0200 Subject: [PATCH 183/425] fix: spawn network manager on test exex ctx (#11907) --- crates/exex/test-utils/src/lib.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/exex/test-utils/src/lib.rs b/crates/exex/test-utils/src/lib.rs index 9b86da7c7..1f6ea75ce 100644 --- a/crates/exex/test-utils/src/lib.rs +++ b/crates/exex/test-utils/src/lib.rs @@ -268,15 +268,16 @@ pub async fn test_exex_context_with_chain_spec( let network_manager = NetworkManager::new( NetworkConfigBuilder::new(SecretKey::new(&mut rand::thread_rng())) .with_unused_discovery_port() + .with_unused_listener_port() .build(provider_factory.clone()), ) .await?; let network = network_manager.handle().clone(); - - let (_, payload_builder) = NoopPayloadBuilderService::::new(); - let tasks = TaskManager::current(); let task_executor = tasks.executor(); + tasks.executor().spawn(network_manager); + + let (_, payload_builder) = NoopPayloadBuilderService::::new(); let components = NodeAdapter::, _>, _> { components: Components { From 4a68c5e2d49a4789fcccbce2ed645a40f86e4844 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 22 Oct 2024 12:35:27 +0200 Subject: [PATCH 184/425] refactor(rpc): small refactor in `block_with_senders` (#11950) --- crates/rpc/rpc-eth-api/src/helpers/block.rs | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/crates/rpc/rpc-eth-api/src/helpers/block.rs b/crates/rpc/rpc-eth-api/src/helpers/block.rs index 9993b477a..9bf35d850 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/block.rs @@ -219,17 +219,17 @@ pub trait LoadBlock: LoadPendingBlock + SpawnBlocking { async move { if block_id.is_pending() { // Pending block can be fetched directly without need for caching - let maybe_pending = LoadPendingBlock::provider(self) + if let Some(pending_block) = LoadPendingBlock::provider(self) .pending_block_with_senders() - .map_err(Self::Error::from_eth_err)?; - return if maybe_pending.is_some() { - Ok(maybe_pending.map(Arc::new)) - } else { - // If no pending block from provider, try to get local pending block - return match self.local_pending_block().await? { - Some((block, _)) => Ok(Some(Arc::new(block))), - None => Ok(None), - }; + .map_err(Self::Error::from_eth_err)? + { + return Ok(Some(Arc::new(pending_block))); + } + + // If no pending block from provider, try to get local pending block + return match self.local_pending_block().await? { + Some((block, _)) => Ok(Some(Arc::new(block))), + None => Ok(None), }; } From 0df7f65f3d0e9ab5af616c8d5913d9d660113775 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 22 Oct 2024 13:09:44 +0200 Subject: [PATCH 185/425] chore: serde 1.0.210 (#11963) --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index b32e2d0df..91e3ac6e5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -503,7 +503,7 @@ rand = "0.8.5" rayon = "1.7" rustc-hash = { version = "2.0", default-features = false } schnellru = "0.2" -serde = { version = "1.0", default-features = false } +serde = { version = "=1.0.210", default-features = false } serde_json = "1.0.94" serde_with = "3.3.0" sha2 = { version = "0.10", default-features = false } From 75b39bc2647b28a62dd583a56090305338264a4c Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 22 Oct 2024 13:28:27 +0200 Subject: [PATCH 186/425] chore: run clippy locked (#11964) --- .github/workflows/lint.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 1921859c2..4723d8a4d 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -18,10 +18,10 @@ jobs: matrix: include: - type: ethereum - args: --bin reth --workspace + args: --bin reth --workspace --locked features: "ethereum asm-keccak jemalloc jemalloc-prof min-error-logs min-warn-logs min-info-logs min-debug-logs min-trace-logs" - type: optimism - args: --bin op-reth --workspace + args: --bin op-reth --workspace --locked features: "optimism asm-keccak jemalloc jemalloc-prof min-error-logs min-warn-logs min-info-logs min-debug-logs min-trace-logs" - type: book args: --manifest-path book/sources/Cargo.toml --workspace --bins From e8205936daea4419191d68f0a8aa82517a025033 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 22 Oct 2024 13:38:45 +0200 Subject: [PATCH 187/425] primitive-traits: rm unused `SELECTOR_LEN` (#11959) --- crates/primitives-traits/src/constants/mod.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index 12ef300f6..2874c596a 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -10,9 +10,6 @@ pub use gas_units::{GIGAGAS, KILOGAS, MEGAGAS}; /// The client version: `reth/v{major}.{minor}.{patch}` pub const RETH_CLIENT_VERSION: &str = concat!("reth/v", env!("CARGO_PKG_VERSION")); -/// The first four bytes of the call data for a function call specifies the function to be called. -pub const SELECTOR_LEN: usize = 4; - /// An EPOCH is a series of 32 slots. pub const EPOCH_SLOTS: u64 = 32; From cab76f2083be680afa4ddcff44420334dd0e71a5 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Tue, 22 Oct 2024 20:39:08 +0900 Subject: [PATCH 188/425] fix(engine): run pruner after saving blocks (#11927) --- crates/engine/tree/src/persistence.rs | 38 +++++++++------------------ 1 file changed, 12 insertions(+), 26 deletions(-) diff --git a/crates/engine/tree/src/persistence.rs b/crates/engine/tree/src/persistence.rs index 25c1f0ed7..f4650a047 100644 --- a/crates/engine/tree/src/persistence.rs +++ b/crates/engine/tree/src/persistence.rs @@ -77,20 +77,22 @@ impl PersistenceService { } PersistenceAction::SaveBlocks(blocks, sender) => { let result = self.on_save_blocks(blocks)?; - if let Some(ref num_hash) = result { + let result_number = result.map(|r| r.number); + + // we ignore the error because the caller may or may not care about the result + let _ = sender.send(result); + + if let Some(block_number) = result_number { // send new sync metrics based on saved blocks let _ = self .sync_metrics_tx - .send(MetricEvent::SyncHeight { height: num_hash.number }); - } - // we ignore the error because the caller may or may not care about the result - let _ = sender.send(result); - } - PersistenceAction::PruneBefore(block_num, sender) => { - let res = self.prune_before(block_num)?; + .send(MetricEvent::SyncHeight { height: block_number }); - // we ignore the error because the caller may or may not care about the result - let _ = sender.send(res); + if self.pruner.is_pruning_needed(block_number) { + // We log `PrunerOutput` inside the `Pruner` + let _ = self.prune_before(block_number)?; + } + } } PersistenceAction::SaveFinalizedBlock(finalized_block) => { let provider = self.provider.database_provider_rw()?; @@ -175,10 +177,6 @@ pub enum PersistenceAction { /// static files. RemoveBlocksAbove(u64, oneshot::Sender>), - /// Prune associated block data before the given block number, according to already-configured - /// prune modes. - PruneBefore(u64, oneshot::Sender), - /// Update the persisted finalized block on disk SaveFinalizedBlock(u64), @@ -279,18 +277,6 @@ impl PersistenceHandle { ) -> Result<(), SendError> { self.send_action(PersistenceAction::RemoveBlocksAbove(block_num, tx)) } - - /// Tells the persistence service to remove block data before the given hash, according to the - /// configured prune config. - /// - /// The resulting [`PrunerOutput`] is returned in the receiver end of the sender argument. - pub fn prune_before( - &self, - block_num: u64, - tx: oneshot::Sender, - ) -> Result<(), SendError> { - self.send_action(PersistenceAction::PruneBefore(block_num, tx)) - } } #[cfg(test)] From 90aaad8285ea549734e978f17fcb7622fb137800 Mon Sep 17 00:00:00 2001 From: James Prestwich Date: Tue, 22 Oct 2024 07:53:39 -0400 Subject: [PATCH 189/425] opt: reduce allocs for `parse_deposits_from_receipts` (#11949) --- crates/ethereum/evm/src/eip6110.rs | 114 ++++++++++++++++++----------- 1 file changed, 73 insertions(+), 41 deletions(-) diff --git a/crates/ethereum/evm/src/eip6110.rs b/crates/ethereum/evm/src/eip6110.rs index 4cf1c6ae9..d57002081 100644 --- a/crates/ethereum/evm/src/eip6110.rs +++ b/crates/ethereum/evm/src/eip6110.rs @@ -1,12 +1,18 @@ //! EIP-6110 deposit requests parsing use alloc::{string::ToString, vec::Vec}; use alloy_eips::eip6110::MAINNET_DEPOSIT_CONTRACT_ADDRESS; -use alloy_primitives::{Bytes, Log}; +use alloy_primitives::{Address, Bytes, Log}; use alloy_sol_types::{sol, SolEvent}; -use reth_chainspec::ChainSpec; +use reth_chainspec::{ChainSpec, EthChainSpec}; use reth_evm::execute::BlockValidationError; use reth_primitives::Receipt; +/// The size of a deposit request in bytes. While the event fields emit +/// bytestrings, those bytestrings are fixed size. The fields are: 48-byte +/// pubkey, 32-byte withdrawal credentials, 8-byte amount, 96-byte signature, +/// and 8-byte index. +const DEPOSIT_BYTES_SIZE: usize = 48 + 32 + 8 + 96 + 8; + sol! { #[allow(missing_docs)] event DepositEvent( @@ -18,53 +24,79 @@ sol! { ); } -/// Parse [deposit contract](https://etherscan.io/address/0x00000000219ab540356cbb839cbe05303d7705fa) -/// (address is from the passed [`ChainSpec`]) deposits from receipts, and return them as a -/// [vector](Vec) of (requests)[`alloy_eips::eip7685::Requests`]. -pub fn parse_deposits_from_receipts<'a, I>( - chain_spec: &ChainSpec, - receipts: I, -) -> Result -where - I: IntoIterator, -{ - let mut requests = Vec::new(); - let deposit_contract_address = chain_spec - .deposit_contract - .as_ref() - .map_or(MAINNET_DEPOSIT_CONTRACT_ADDRESS, |contract| contract.address); - let logs: Vec<_> = receipts - .into_iter() - .flat_map(|receipt| &receipt.logs) - // No need to filter for topic because there's only one event and that's the Deposit - // event in the deposit contract. - .filter(|log| log.address == deposit_contract_address) - .collect(); +/// Accumulate a deposit request from a log. containing a [`DepositEvent`]. +pub fn accumulate_deposit_from_log(log: &Log, out: &mut Vec) { + out.reserve(DEPOSIT_BYTES_SIZE); + out.extend_from_slice(log.pubkey.as_ref()); + out.extend_from_slice(log.withdrawal_credentials.as_ref()); + out.extend_from_slice(log.amount.as_ref()); + out.extend_from_slice(log.signature.as_ref()); + out.extend_from_slice(log.index.as_ref()); +} - for log in &logs { +/// Accumulate deposits from an iterator of logs. +pub fn accumulate_deposits_from_logs<'a>( + address: Address, + logs: impl IntoIterator, + out: &mut Vec, +) -> Result<(), BlockValidationError> { + logs.into_iter().filter(|log| log.address == address).try_for_each(|log| { + // We assume that the log is valid because it was emitted by the + // deposit contract. let decoded_log = DepositEvent::decode_log(log, false).map_err(|err: alloy_sol_types::Error| { BlockValidationError::DepositRequestDecode(err.to_string()) })?; - requests.extend(parse_deposit_from_log(&decoded_log).as_ref()) - } + accumulate_deposit_from_log(&decoded_log, out); + Ok(()) + }) +} + +/// Accumulate deposits from a receipt. Iterates over the logs in the receipt +/// and accumulates the deposit request bytestrings. +pub fn accumulate_deposits_from_receipt( + address: Address, + receipt: &Receipt, + out: &mut Vec, +) -> Result<(), BlockValidationError> { + accumulate_deposits_from_logs(address, &receipt.logs, out) +} - Ok(requests.into()) +/// Accumulate deposits from a list of receipts. Iterates over the logs in the +/// receipts and accumulates the deposit request bytestrings. +pub fn accumulate_deposits_from_receipts<'a, I>( + address: Address, + receipts: I, + out: &mut Vec, +) -> Result<(), BlockValidationError> +where + I: IntoIterator, +{ + receipts + .into_iter() + .try_for_each(|receipt| accumulate_deposits_from_receipt(address, receipt, out)) } -fn parse_deposit_from_log(log: &Log) -> Bytes { - // SAFETY: These `expect` https://github.com/ethereum/consensus-specs/blob/5f48840f4d768bf0e0a8156a3ed06ec333589007/solidity_deposit_contract/deposit_contract.sol#L107-L110 - // are safe because the `DepositEvent` is the only event in the deposit contract and the length - // checks are done there. - [ - log.pubkey.as_ref(), - log.withdrawal_credentials.as_ref(), - log.amount.as_ref(), - log.signature.as_ref(), - log.index.as_ref(), - ] - .concat() - .into() +/// Find deposit logs in a list of receipts, and return the concatenated +/// deposit request bytestring. +/// +/// The address of the deposit contract is taken from the chain spec, and +/// defaults to [`MAINNET_DEPOSIT_CONTRACT_ADDRESS`] if not specified in +/// the chain spec. +pub fn parse_deposits_from_receipts<'a, I>( + chainspec: &ChainSpec, + receipts: I, +) -> Result +where + I: IntoIterator, +{ + let mut out = Vec::new(); + accumulate_deposits_from_receipts( + chainspec.deposit_contract().map(|c| c.address).unwrap_or(MAINNET_DEPOSIT_CONTRACT_ADDRESS), + receipts, + &mut out, + )?; + Ok(out.into()) } #[cfg(test)] From df57aedba64af2cdc1b006792fb147454529bbe3 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 22 Oct 2024 14:34:03 +0200 Subject: [PATCH 190/425] chore: bump discv5 (#11966) --- Cargo.lock | 153 +++++++++++++++++++++-------------------------------- Cargo.toml | 2 +- 2 files changed, 62 insertions(+), 93 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 37135dfd5..59c582fec 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -343,7 +343,7 @@ dependencies = [ "futures", "futures-utils-wasm", "lru", - "parking_lot 0.12.3", + "parking_lot", "pin-project", "reqwest", "schnellru", @@ -2085,7 +2085,7 @@ dependencies = [ "bitflags 2.6.0", "crossterm_winapi", "libc", - "parking_lot 0.12.3", + "parking_lot", "winapi", ] @@ -2098,7 +2098,7 @@ dependencies = [ "bitflags 2.6.0", "crossterm_winapi", "mio 1.0.2", - "parking_lot 0.12.3", + "parking_lot", "rustix", "signal-hook", "signal-hook-mio", @@ -2255,7 +2255,7 @@ dependencies = [ "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core 0.9.10", + "parking_lot_core", ] [[package]] @@ -2269,7 +2269,7 @@ dependencies = [ "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core 0.9.10", + "parking_lot_core", "serde", ] @@ -2316,11 +2316,12 @@ dependencies = [ [[package]] name = "delay_map" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4355c25cbf99edcb6b4a0e906f6bdc6956eda149e84455bea49696429b2f8e8" +checksum = "df941644b671f05f59433e481ba0d31ac10e3667de725236a4c0d587c496fba1" dependencies = [ "futures", + "tokio", "tokio-util", ] @@ -2470,9 +2471,9 @@ dependencies = [ [[package]] name = "discv5" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f569b8c367554666c8652305621e8bae3634a2ff5c6378081d5bd8c399c99f23" +checksum = "23e6b70634e26c909d1edbb3142b3eaf3b89da0e52f284f00ca7c80d9901ad9e" dependencies = [ "aes", "aes-gcm", @@ -2491,13 +2492,13 @@ dependencies = [ "lru", "more-asserts", "multiaddr", - "parking_lot 0.11.2", + "parking_lot", "rand 0.8.5", "smallvec", - "socket2 0.4.10", + "socket2", "tokio", "tracing", - "uint", + "uint 0.10.0", "zeroize", ] @@ -3029,7 +3030,7 @@ dependencies = [ "alloy-genesis", "alloy-primitives", "eyre", - "parking_lot 0.12.3", + "parking_lot", "reth", "reth-chainspec", "reth-node-api", @@ -3528,9 +3529,9 @@ dependencies = [ [[package]] name = "hashlink" -version = "0.8.4" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" dependencies = [ "hashbrown 0.14.5", ] @@ -3776,7 +3777,7 @@ dependencies = [ "http-body", "hyper", "pin-project-lite", - "socket2 0.5.7", + "socket2", "tokio", "tower-service", "tracing", @@ -4166,7 +4167,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2 0.5.7", + "socket2", "widestring", "windows-sys 0.48.0", "winreg", @@ -4324,7 +4325,7 @@ dependencies = [ "http-body", "http-body-util", "jsonrpsee-types", - "parking_lot 0.12.3", + "parking_lot", "pin-project", "rand 0.8.5", "rustc-hash 2.0.0", @@ -4590,7 +4591,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags 2.6.0", "libc", - "redox_syscall 0.5.7", + "redox_syscall", ] [[package]] @@ -5433,17 +5434,6 @@ version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", -] - [[package]] name = "parking_lot" version = "0.12.3" @@ -5451,21 +5441,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", - "parking_lot_core 0.9.10", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if", - "instant", - "libc", - "redox_syscall 0.2.16", - "smallvec", - "winapi", + "parking_lot_core", ] [[package]] @@ -5476,7 +5452,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.7", + "redox_syscall", "smallvec", "windows-targets 0.52.6", ] @@ -5706,7 +5682,7 @@ dependencies = [ "log", "nix", "once_cell", - "parking_lot 0.12.3", + "parking_lot", "smallvec", "symbolic-demangle", "tempfile", @@ -5785,7 +5761,7 @@ checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" dependencies = [ "fixed-hash", "impl-codec", - "uint", + "uint 0.9.5", ] [[package]] @@ -5992,7 +5968,7 @@ dependencies = [ "quinn-udp", "rustc-hash 2.0.0", "rustls", - "socket2 0.5.7", + "socket2", "thiserror", "tokio", "tracing", @@ -6023,7 +5999,7 @@ checksum = "4fe68c2e9e1a1234e218683dbdf9f9dfcb094113c5ac2b938dfcb9bab4c4140b" dependencies = [ "libc", "once_cell", - "socket2 0.5.7", + "socket2", "tracing", "windows-sys 0.59.0", ] @@ -6180,15 +6156,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3edd4d5d42c92f0a659926464d4cce56b562761267ecf0f469d85b7de384175" -[[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_syscall" version = "0.5.7" @@ -6544,7 +6511,7 @@ dependencies = [ "assert_matches", "linked_hash_set", "metrics", - "parking_lot 0.12.3", + "parking_lot", "reth-blockchain-tree-api", "reth-chainspec", "reth-consensus", @@ -6594,7 +6561,7 @@ dependencies = [ "auto_impl", "derive_more 1.0.0", "metrics", - "parking_lot 0.12.3", + "parking_lot", "pin-project", "rand 0.8.5", "reth-chainspec", @@ -6843,7 +6810,7 @@ dependencies = [ "iai-callgrind", "metrics", "page_size", - "parking_lot 0.12.3", + "parking_lot", "paste", "pprof", "proptest", @@ -6951,7 +6918,7 @@ dependencies = [ "discv5", "enr", "generic-array", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "reth-ethereum-forks", "reth-net-banlist", @@ -7001,7 +6968,7 @@ dependencies = [ "data-encoding", "enr", "linked_hash_set", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "reth-chainspec", "reth-ethereum-forks", @@ -7447,7 +7414,7 @@ dependencies = [ "auto_impl", "futures-util", "metrics", - "parking_lot 0.12.3", + "parking_lot", "reth-chainspec", "reth-consensus", "reth-consensus-common", @@ -7531,7 +7498,7 @@ dependencies = [ "futures", "itertools 0.13.0", "metrics", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "reth-blockchain-tree", "reth-chain-state", @@ -7675,7 +7642,7 @@ dependencies = [ "dashmap 6.1.0", "derive_more 1.0.0", "indexmap 2.6.0", - "parking_lot 0.12.3", + "parking_lot", "pprof", "rand 0.8.5", "rand_xorshift", @@ -7745,7 +7712,7 @@ dependencies = [ "futures", "itertools 0.13.0", "metrics", - "parking_lot 0.12.3", + "parking_lot", "pin-project", "pprof", "rand 0.8.5", @@ -7817,7 +7784,7 @@ dependencies = [ "auto_impl", "derive_more 1.0.0", "futures", - "parking_lot 0.12.3", + "parking_lot", "reth-consensus", "reth-eth-wire-types", "reth-network-peers", @@ -8090,7 +8057,7 @@ dependencies = [ "reth-metrics", "reth-provider", "reth-tasks", - "socket2 0.5.7", + "socket2", "tikv-jemalloc-ctl", "tokio", "tower 0.4.13", @@ -8238,7 +8205,7 @@ dependencies = [ "eyre", "op-alloy-consensus", "op-alloy-rpc-types-engine", - "parking_lot 0.12.3", + "parking_lot", "reth", "reth-auto-seal-consensus", "reth-basic-payload-builder", @@ -8327,7 +8294,7 @@ dependencies = [ "op-alloy-consensus", "op-alloy-network", "op-alloy-rpc-types", - "parking_lot 0.12.3", + "parking_lot", "reqwest", "reth-chainspec", "reth-evm", @@ -8505,7 +8472,7 @@ dependencies = [ "itertools 0.13.0", "metrics", "notify", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "rayon", "reth-blockchain-tree-api", @@ -8635,7 +8602,7 @@ dependencies = [ "jsonrpsee", "jsonrpsee-types", "jsonwebtoken", - "parking_lot 0.12.3", + "parking_lot", "pin-project", "rand 0.8.5", "reth-chainspec", @@ -8822,7 +8789,7 @@ dependencies = [ "futures", "jsonrpsee", "jsonrpsee-types", - "parking_lot 0.12.3", + "parking_lot", "reth-chainspec", "reth-errors", "reth-evm", @@ -9035,7 +9002,7 @@ version = "1.1.0" dependencies = [ "alloy-primitives", "assert_matches", - "parking_lot 0.12.3", + "parking_lot", "rayon", "reth-db", "reth-db-api", @@ -9161,7 +9128,7 @@ dependencies = [ "criterion", "futures-util", "metrics", - "parking_lot 0.12.3", + "parking_lot", "paste", "pprof", "proptest", @@ -9997,7 +9964,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b4b487fe2acf240a021cf57c6b2b4903b1e78ca0ecd862a71b71d2a51fed77d" dependencies = [ "once_cell", - "parking_lot 0.12.3", + "parking_lot", "scc", "serial_test_derive", ] @@ -10202,16 +10169,6 @@ version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" -[[package]] -name = "socket2" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "socket2" version = "0.5.7" @@ -10685,10 +10642,10 @@ dependencies = [ "bytes", "libc", "mio 1.0.2", - "parking_lot 0.12.3", + "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.7", + "socket2", "tokio-macros", "windows-sys 0.52.0", ] @@ -11057,7 +11014,7 @@ dependencies = [ "ipconfig", "lru-cache", "once_cell", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "resolv-conf", "serde", @@ -11118,6 +11075,18 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "uint" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "909988d098b2f738727b161a106cfc7cab00c539c2687a8836f8e565976fb53e" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + [[package]] name = "unarray" version = "0.1.4" @@ -11413,7 +11382,7 @@ checksum = "c7ed9d8b15c7fb594d72bfb4b5a276f3d2029333cd93a932f376f5937f6f80ee" dependencies = [ "futures", "js-sys", - "parking_lot 0.12.3", + "parking_lot", "pin-utils", "slab", "wasm-bindgen", diff --git a/Cargo.toml b/Cargo.toml index 91e3ac6e5..48a6525d5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -548,7 +548,7 @@ tower = "0.4" tower-http = "0.5" # p2p -discv5 = "0.7.0" +discv5 = "0.8.0" if-addrs = "0.13" # rpc From 60337d9614b25d20347599d590f83646b1de3e7f Mon Sep 17 00:00:00 2001 From: Francis Li Date: Tue, 22 Oct 2024 05:43:25 -0700 Subject: [PATCH 191/425] feat(rpc): Add flags to disable read tx timeout (#11856) --- book/cli/reth/db.md | 3 +++ book/cli/reth/db/diff.md | 3 +++ book/cli/reth/debug/build-block.md | 3 +++ book/cli/reth/debug/execution.md | 3 +++ book/cli/reth/debug/in-memory-merkle.md | 3 +++ book/cli/reth/debug/merkle.md | 3 +++ book/cli/reth/debug/replay-engine.md | 3 +++ book/cli/reth/import.md | 3 +++ book/cli/reth/init-state.md | 3 +++ book/cli/reth/init.md | 3 +++ book/cli/reth/node.md | 3 +++ book/cli/reth/p2p.md | 3 +++ book/cli/reth/prune.md | 3 +++ book/cli/reth/recover/storage-tries.md | 3 +++ book/cli/reth/stage/drop.md | 3 +++ book/cli/reth/stage/dump.md | 3 +++ book/cli/reth/stage/run.md | 3 +++ book/cli/reth/stage/unwind.md | 3 +++ crates/node/core/src/args/database.rs | 14 +++++++++++++- 19 files changed, 67 insertions(+), 1 deletion(-) diff --git a/book/cli/reth/db.md b/book/cli/reth/db.md index 9e3b32cc0..f9a8a158a 100644 --- a/book/cli/reth/db.md +++ b/book/cli/reth/db.md @@ -81,6 +81,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + Logging: --log.stdout.format The format to use for logs written to stdout diff --git a/book/cli/reth/db/diff.md b/book/cli/reth/db/diff.md index ea4c29612..f57c6ac36 100644 --- a/book/cli/reth/db/diff.md +++ b/book/cli/reth/db/diff.md @@ -45,6 +45,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + --table The table name to diff. If not specified, all tables are diffed. diff --git a/book/cli/reth/debug/build-block.md b/book/cli/reth/debug/build-block.md index 76ddac306..2e6d637d5 100644 --- a/book/cli/reth/debug/build-block.md +++ b/book/cli/reth/debug/build-block.md @@ -69,6 +69,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + --trusted-setup-file Overrides the KZG trusted setup by reading from the supplied file diff --git a/book/cli/reth/debug/execution.md b/book/cli/reth/debug/execution.md index 202e1452a..9ca74897c 100644 --- a/book/cli/reth/debug/execution.md +++ b/book/cli/reth/debug/execution.md @@ -69,6 +69,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + Networking: -d, --disable-discovery Disable the discovery service diff --git a/book/cli/reth/debug/in-memory-merkle.md b/book/cli/reth/debug/in-memory-merkle.md index 534e6d46c..3e322a691 100644 --- a/book/cli/reth/debug/in-memory-merkle.md +++ b/book/cli/reth/debug/in-memory-merkle.md @@ -69,6 +69,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + Networking: -d, --disable-discovery Disable the discovery service diff --git a/book/cli/reth/debug/merkle.md b/book/cli/reth/debug/merkle.md index 19bc38acc..d701803b8 100644 --- a/book/cli/reth/debug/merkle.md +++ b/book/cli/reth/debug/merkle.md @@ -69,6 +69,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + Networking: -d, --disable-discovery Disable the discovery service diff --git a/book/cli/reth/debug/replay-engine.md b/book/cli/reth/debug/replay-engine.md index 7a14b9cf0..dd587620a 100644 --- a/book/cli/reth/debug/replay-engine.md +++ b/book/cli/reth/debug/replay-engine.md @@ -69,6 +69,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + Networking: -d, --disable-discovery Disable the discovery service diff --git a/book/cli/reth/import.md b/book/cli/reth/import.md index 7bd8a0079..28e085bda 100644 --- a/book/cli/reth/import.md +++ b/book/cli/reth/import.md @@ -69,6 +69,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + --no-state Disables stages that require state. diff --git a/book/cli/reth/init-state.md b/book/cli/reth/init-state.md index cb221634c..ddcd3cece 100644 --- a/book/cli/reth/init-state.md +++ b/book/cli/reth/init-state.md @@ -69,6 +69,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + JSONL file with state dump. diff --git a/book/cli/reth/init.md b/book/cli/reth/init.md index cc889e5e3..cd01accc0 100644 --- a/book/cli/reth/init.md +++ b/book/cli/reth/init.md @@ -69,6 +69,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + Logging: --log.stdout.format The format to use for logs written to stdout diff --git a/book/cli/reth/node.md b/book/cli/reth/node.md index 34d32209a..4cd55db1f 100644 --- a/book/cli/reth/node.md +++ b/book/cli/reth/node.md @@ -590,6 +590,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + Dev testnet: --dev Start the node in dev mode diff --git a/book/cli/reth/p2p.md b/book/cli/reth/p2p.md index 01253705b..603b451d9 100644 --- a/book/cli/reth/p2p.md +++ b/book/cli/reth/p2p.md @@ -247,6 +247,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + Logging: --log.stdout.format The format to use for logs written to stdout diff --git a/book/cli/reth/prune.md b/book/cli/reth/prune.md index e0641256f..ed16197a7 100644 --- a/book/cli/reth/prune.md +++ b/book/cli/reth/prune.md @@ -69,6 +69,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + Logging: --log.stdout.format The format to use for logs written to stdout diff --git a/book/cli/reth/recover/storage-tries.md b/book/cli/reth/recover/storage-tries.md index 1f639cb09..ecdaabe77 100644 --- a/book/cli/reth/recover/storage-tries.md +++ b/book/cli/reth/recover/storage-tries.md @@ -69,6 +69,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + Logging: --log.stdout.format The format to use for logs written to stdout diff --git a/book/cli/reth/stage/drop.md b/book/cli/reth/stage/drop.md index ae21a8918..399b3818c 100644 --- a/book/cli/reth/stage/drop.md +++ b/book/cli/reth/stage/drop.md @@ -69,6 +69,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + Possible values: - headers: The headers stage within the pipeline diff --git a/book/cli/reth/stage/dump.md b/book/cli/reth/stage/dump.md index 291d89690..4b3de3fb1 100644 --- a/book/cli/reth/stage/dump.md +++ b/book/cli/reth/stage/dump.md @@ -76,6 +76,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + Logging: --log.stdout.format The format to use for logs written to stdout diff --git a/book/cli/reth/stage/run.md b/book/cli/reth/stage/run.md index bfe5ff9d6..9da3ce0de 100644 --- a/book/cli/reth/stage/run.md +++ b/book/cli/reth/stage/run.md @@ -69,6 +69,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + --metrics Enable Prometheus metrics. diff --git a/book/cli/reth/stage/unwind.md b/book/cli/reth/stage/unwind.md index d181b3bca..700ab3d7e 100644 --- a/book/cli/reth/stage/unwind.md +++ b/book/cli/reth/stage/unwind.md @@ -74,6 +74,9 @@ Database: [possible values: true, false] + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + Networking: -d, --disable-discovery Disable the discovery service diff --git a/crates/node/core/src/args/database.rs b/crates/node/core/src/args/database.rs index da96deb70..0eec6639a 100644 --- a/crates/node/core/src/args/database.rs +++ b/crates/node/core/src/args/database.rs @@ -1,12 +1,14 @@ //! clap [Args](clap::Args) for database configuration +use std::time::Duration; + use crate::version::default_client_version; use clap::{ builder::{PossibleValue, TypedValueParser}, error::ErrorKind, Arg, Args, Command, Error, }; -use reth_db::ClientVersion; +use reth_db::{mdbx::MaxReadTransactionDuration, ClientVersion}; use reth_storage_errors::db::LogLevel; /// Parameters for database configuration @@ -20,6 +22,9 @@ pub struct DatabaseArgs { /// NFS volume. #[arg(long = "db.exclusive")] pub exclusive: Option, + /// Read transaction timeout in seconds, 0 means no timeout. + #[arg(long = "db.read-transaction-timeout")] + pub read_transaction_timeout: Option, } impl DatabaseArgs { @@ -33,9 +38,16 @@ impl DatabaseArgs { &self, client_version: ClientVersion, ) -> reth_db::mdbx::DatabaseArguments { + let max_read_transaction_duration = match self.read_transaction_timeout { + None => None, // if not specified, use default value + Some(0) => Some(MaxReadTransactionDuration::Unbounded), // if 0, disable timeout + Some(secs) => Some(MaxReadTransactionDuration::Set(Duration::from_secs(secs))), + }; + reth_db::mdbx::DatabaseArguments::new(client_version) .with_log_level(self.log_level) .with_exclusive(self.exclusive) + .with_max_read_transaction_duration(max_read_transaction_duration) } } From b3015c75b1f56fdfbbcce8cdb97a3cde1c642985 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Tue, 22 Oct 2024 14:31:37 +0100 Subject: [PATCH 192/425] fix(trie): removing a blinded leaf should result in an error (#11869) Co-authored-by: Roman Krasiuk --- crates/trie/sparse/src/trie.rs | 68 ++++++++++++++++++++++++++-------- 1 file changed, 52 insertions(+), 16 deletions(-) diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index 6680d7e9b..42f90d34c 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -267,11 +267,11 @@ impl RevealedSparseTrie { /// Remove leaf node from the trie. pub fn remove_leaf(&mut self, path: &Nibbles) -> SparseTrieResult<()> { self.prefix_set.insert(path.clone()); - let existing = self.values.remove(path); - if existing.is_none() { - // trie structure unchanged, return immediately - return Ok(()) - } + self.values.remove(path); + + // If the path wasn't present in `values`, we still need to walk the trie and ensure that + // there is no node at the path. When a leaf node is a blinded `Hash`, it will have an entry + // in `nodes`, but not in the `values`. let mut removed_nodes = self.take_nodes_for_path(path)?; debug!(target: "trie::sparse", ?path, ?removed_nodes, "Removed nodes for path"); @@ -726,6 +726,7 @@ mod tests { use super::*; use alloy_primitives::U256; + use assert_matches::assert_matches; use itertools::Itertools; use prop::sample::SizeRange; use proptest::prelude::*; @@ -960,7 +961,7 @@ mod tests { pretty_assertions::assert_eq!( sparse.nodes.clone().into_iter().collect::>(), BTreeMap::from_iter([ - (Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), + (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1101.into())), ( Nibbles::from_nibbles([0x5, 0x0]), @@ -972,11 +973,11 @@ mod tests { ), ( Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]), - SparseNode::new_leaf(Nibbles::new()) + SparseNode::new_leaf(Nibbles::default()) ), ( Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]), - SparseNode::new_leaf(Nibbles::new()) + SparseNode::new_leaf(Nibbles::default()) ), ( Nibbles::from_nibbles([0x5, 0x2]), @@ -1015,7 +1016,7 @@ mod tests { pretty_assertions::assert_eq!( sparse.nodes.clone().into_iter().collect::>(), BTreeMap::from_iter([ - (Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), + (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into())), ( Nibbles::from_nibbles([0x5, 0x0]), @@ -1027,11 +1028,11 @@ mod tests { ), ( Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]), - SparseNode::new_leaf(Nibbles::new()) + SparseNode::new_leaf(Nibbles::default()) ), ( Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]), - SparseNode::new_leaf(Nibbles::new()) + SparseNode::new_leaf(Nibbles::default()) ), (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into())), ( @@ -1063,7 +1064,7 @@ mod tests { pretty_assertions::assert_eq!( sparse.nodes.clone().into_iter().collect::>(), BTreeMap::from_iter([ - (Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), + (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into())), ( Nibbles::from_nibbles([0x5, 0x0]), @@ -1097,7 +1098,7 @@ mod tests { pretty_assertions::assert_eq!( sparse.nodes.clone().into_iter().collect::>(), BTreeMap::from_iter([ - (Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), + (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into())), ( Nibbles::from_nibbles([0x5, 0x0]), @@ -1128,7 +1129,7 @@ mod tests { pretty_assertions::assert_eq!( sparse.nodes.clone().into_iter().collect::>(), BTreeMap::from_iter([ - (Nibbles::new(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), + (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))), (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into())), ( Nibbles::from_nibbles([0x5, 0x0]), @@ -1147,7 +1148,7 @@ mod tests { pretty_assertions::assert_eq!( sparse.nodes.clone().into_iter().collect::>(), BTreeMap::from_iter([( - Nibbles::new(), + Nibbles::default(), SparseNode::new_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])) ),]) ); @@ -1157,7 +1158,42 @@ mod tests { // Empty pretty_assertions::assert_eq!( sparse.nodes.clone().into_iter().collect::>(), - BTreeMap::from_iter([(Nibbles::new(), SparseNode::Empty),]) + BTreeMap::from_iter([(Nibbles::default(), SparseNode::Empty),]) + ); + } + + #[test] + fn sparse_trie_remove_leaf_blinded() { + let mut sparse = RevealedSparseTrie::default(); + + let leaf = LeafNode::new( + Nibbles::default(), + alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec(), + ); + + // Reveal a branch node and one of its children + // + // Branch (Mask = 11) + // ├── 0 -> Hash (Path = 0) + // └── 1 -> Leaf (Path = 1) + sparse + .reveal_node( + Nibbles::default(), + TrieNode::Branch(BranchNode::new( + vec![ + RlpNode::word_rlp(&B256::repeat_byte(1)), + RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), + ], + TrieMask::new(0b11), + )), + ) + .unwrap(); + sparse.reveal_node(Nibbles::from_nibbles([0x1]), TrieNode::Leaf(leaf)).unwrap(); + + // Removing a blinded leaf should result in an error + assert_matches!( + sparse.remove_leaf(&Nibbles::from_nibbles([0x0])), + Err(SparseTrieError::BlindedNode { path, hash }) if path == Nibbles::from_nibbles([0x0]) && hash == B256::repeat_byte(1) ); } From b20a2715514a320c8633356fb7430d7f37a847e9 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Tue, 22 Oct 2024 16:56:04 +0200 Subject: [PATCH 193/425] chore(tree): improved debug logging for block insertion (#11958) --- crates/engine/tree/src/tree/mod.rs | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index a2abd3f53..021b3149a 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -259,6 +259,7 @@ impl TreeState { } } } + debug!(target: "engine::tree", ?upper_bound, ?last_persisted_hash, "Removed canonical blocks from the tree"); } /// Removes all blocks that are below the finalized block, as well as removing non-canonical @@ -1593,7 +1594,7 @@ where /// Returns an error if we failed to fetch the state from the database. fn state_provider(&self, hash: B256) -> ProviderResult> { if let Some((historical, blocks)) = self.state.tree_state.blocks_by_hash(hash) { - trace!(target: "engine::tree", %hash, "found canonical state for block in memory"); + debug!(target: "engine::tree", %hash, %historical, "found canonical state for block in memory"); // the block leads back to the canonical chain let historical = self.provider.state_by_block_hash(historical)?; return Ok(Some(Box::new(MemoryOverlayStateProvider::new(historical, blocks)))) @@ -1601,13 +1602,13 @@ where // the hash could belong to an unknown block or a persisted block if let Some(header) = self.provider.header(&hash)? { - trace!(target: "engine::tree", %hash, number = %header.number, "found canonical state for block in database"); + debug!(target: "engine::tree", %hash, number = %header.number, "found canonical state for block in database"); // the block is known and persisted let historical = self.provider.state_by_block_hash(hash)?; return Ok(Some(historical)) } - trace!(target: "engine::tree", %hash, "no canonical state found for block"); + debug!(target: "engine::tree", %hash, "no canonical state found for block"); Ok(None) } @@ -2137,7 +2138,8 @@ where &mut self, block: SealedBlockWithSenders, ) -> Result { - debug!(target: "engine::tree", block=?block.num_hash(), "Inserting new block into tree"); + debug!(target: "engine::tree", block=?block.num_hash(), parent = ?block.parent_hash, state_root = ?block.state_root, "Inserting new block into tree"); + if self.block_by_hash(block.hash())?.is_some() { return Ok(InsertPayloadOk2::AlreadySeen(BlockStatus2::Valid)) } @@ -2206,7 +2208,7 @@ where let hashed_state = HashedPostState::from_bundle_state(&output.state.state); - trace!(target: "engine::tree", block=?BlockNumHash::new(block_number, block_hash), "Calculating block state root"); + trace!(target: "engine::tree", block=?sealed_block.num_hash(), "Calculating block state root"); let root_time = Instant::now(); let mut state_root_result = None; @@ -2232,7 +2234,7 @@ where let (state_root, trie_output) = if let Some(result) = state_root_result { result } else { - debug!(target: "engine::tree", persistence_in_progress, "Failed to compute state root in parallel"); + debug!(target: "engine::tree", block=?sealed_block.num_hash(), persistence_in_progress, "Failed to compute state root in parallel"); state_provider.state_root_with_updates(hashed_state.clone())? }; @@ -2252,7 +2254,7 @@ where let root_elapsed = root_time.elapsed(); self.metrics.block_validation.record_state_root(&trie_output, root_elapsed.as_secs_f64()); - debug!(target: "engine::tree", ?root_elapsed, ?block_number, "Calculated state root"); + debug!(target: "engine::tree", ?root_elapsed, block=?sealed_block.num_hash(), "Calculated state root"); let executed = ExecutedBlock { block: sealed_block.clone(), @@ -2301,6 +2303,7 @@ where let mut input = TrieInput::default(); if let Some((historical, blocks)) = self.state.tree_state.blocks_by_hash(parent_hash) { + debug!(target: "engine::tree", %parent_hash, %historical, "Calculating state root in parallel, parent found in memory"); // Retrieve revert state for historical block. let revert_state = consistent_view.revert_state(historical)?; input.append(revert_state); @@ -2311,6 +2314,7 @@ where } } else { // The block attaches to canonical persisted parent. + debug!(target: "engine::tree", %parent_hash, "Calculating state root in parallel, parent found in disk"); let revert_state = consistent_view.revert_state(parent_hash)?; input.append(revert_state); } From 8b1dfcca3fd1edcbf574e969ce30493f997e6d19 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Tue, 22 Oct 2024 15:56:17 +0100 Subject: [PATCH 194/425] test(trie): proptest <-> alloy maps integration (#11962) --- book/sources/Cargo.toml | 1 - crates/trie/sparse/benches/root.rs | 2 ++ crates/trie/sparse/src/trie.rs | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/book/sources/Cargo.toml b/book/sources/Cargo.toml index 1529af952..32fb13990 100644 --- a/book/sources/Cargo.toml +++ b/book/sources/Cargo.toml @@ -8,4 +8,3 @@ members = [ # Explicitly set the resolver to version 2, which is the default for packages with edition >= 2021 # https://doc.rust-lang.org/edition-guide/rust-2021/default-cargo-resolver.html resolver = "2" - diff --git a/crates/trie/sparse/benches/root.rs b/crates/trie/sparse/benches/root.rs index 248e3caee..bc221a8f8 100644 --- a/crates/trie/sparse/benches/root.rs +++ b/crates/trie/sparse/benches/root.rs @@ -198,6 +198,8 @@ fn generate_test_data(size: usize) -> HashMap { .new_tree(&mut runner) .unwrap() .current() + .into_iter() + .collect() } criterion_group!(root, calculate_root_from_leaves, calculate_root_from_leaves_repeated); diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index 42f90d34c..a0f7a0c30 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -1276,6 +1276,6 @@ mod tests { ), 1..100, ) - )| { test(updates) }); + )| { test(updates.into_iter().collect()) }); } } From 468ac0d43b5ff9538d89061bdb15604041948562 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 22 Oct 2024 19:00:10 +0200 Subject: [PATCH 195/425] chore: log enode (#11974) --- crates/ethereum/node/src/node.rs | 4 ++-- crates/optimism/node/src/node.rs | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/ethereum/node/src/node.rs b/crates/ethereum/node/src/node.rs index 3df46b485..dbd6ce0a1 100644 --- a/crates/ethereum/node/src/node.rs +++ b/crates/ethereum/node/src/node.rs @@ -11,7 +11,7 @@ use reth_ethereum_engine_primitives::{ }; use reth_evm::execute::BasicBlockExecutorProvider; use reth_evm_ethereum::execute::EthExecutionStrategyFactory; -use reth_network::NetworkHandle; +use reth_network::{NetworkHandle, PeersInfo}; use reth_node_api::{ AddOnsContext, ConfigureEvm, EngineValidator, FullNodeComponents, NodePrimitives, NodeTypesWithDB, @@ -314,7 +314,7 @@ where ) -> eyre::Result { let network = ctx.network_builder().await?; let handle = ctx.start_network(network, pool); - + info!(target: "reth::cli", enode=%handle.local_node_record(), "P2P networking initialized"); Ok(handle) } } diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 22fc1a88f..9492bb8c4 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -5,7 +5,7 @@ use std::sync::Arc; use reth_basic_payload_builder::{BasicPayloadJobGenerator, BasicPayloadJobGeneratorConfig}; use reth_chainspec::{EthChainSpec, Hardforks}; use reth_evm::{execute::BasicBlockExecutorProvider, ConfigureEvm}; -use reth_network::{NetworkConfig, NetworkHandle, NetworkManager}; +use reth_network::{NetworkConfig, NetworkHandle, NetworkManager, PeersInfo}; use reth_node_api::{ AddOnsContext, EngineValidator, FullNodeComponents, NodeAddOns, NodePrimitives, }; @@ -432,6 +432,7 @@ where let network_config = self.network_config(ctx)?; let network = NetworkManager::builder(network_config).await?; let handle = ctx.start_network(network, pool); + info!(target: "reth::cli", enode=%handle.local_node_record(), "P2P networking initialized"); Ok(handle) } From e70b112420b1b06fc0d451ec79d2c01a561dd8af Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Tue, 22 Oct 2024 19:04:58 +0100 Subject: [PATCH 196/425] feat(trie): update sparse trie hashes below level (#11969) --- crates/trie/sparse/src/trie.rs | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index a0f7a0c30..39deb50e7 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -518,34 +518,35 @@ impl RevealedSparseTrie { } } - /// Update node hashes only if their path exceeds the provided level. - pub fn update_rlp_node_level(&mut self, min_len: usize) { - let mut paths = Vec::from([Nibbles::default()]); + /// Update hashes of the nodes that are located at a level deeper than or equal to the provided + /// depth. Root node has a level of 0. + pub fn update_rlp_node_level(&mut self, depth: usize) { + let mut paths = Vec::from([(Nibbles::default(), 0)]); let mut targets = HashSet::::default(); - while let Some(mut path) = paths.pop() { + while let Some((mut path, level)) = paths.pop() { match self.nodes.get(&path).unwrap() { SparseNode::Empty | SparseNode::Hash(_) => {} SparseNode::Leaf { .. } => { targets.insert(path); } SparseNode::Extension { key, .. } => { - if path.len() >= min_len { + if level >= depth { targets.insert(path); } else { path.extend_from_slice_unchecked(key); - paths.push(path); + paths.push((path, level + 1)); } } SparseNode::Branch { state_mask, .. } => { - if path.len() >= min_len { + if level >= depth { targets.insert(path); } else { for bit in CHILD_INDEX_RANGE { if state_mask.is_bit_set(bit) { let mut child_path = path.clone(); child_path.push_unchecked(bit); - paths.push(child_path); + paths.push((child_path, level + 1)); } } } From 3174bd5c913a76803403d83ded0a9623ac1d9493 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 22 Oct 2024 20:12:15 +0200 Subject: [PATCH 197/425] chore: bump aquamarine (#11965) --- Cargo.lock | 30 +++--------------------------- Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 28 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 59c582fec..e1cead609 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -847,13 +847,13 @@ checksum = "37bf3594c4c988a53154954629820791dde498571819ae4ca50ca811e060cc95" [[package]] name = "aquamarine" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21cc1548309245035eb18aa7f0967da6bc65587005170c56e6ef2788a4cf3f4e" +checksum = "0f50776554130342de4836ba542aa85a4ddb361690d7e8df13774d7284c3d5c2" dependencies = [ "include_dir", "itertools 0.10.5", - "proc-macro-error", + "proc-macro-error2", "proc-macro2", "quote", "syn 2.0.80", @@ -5773,30 +5773,6 @@ dependencies = [ "toml_edit", ] -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn 1.0.109", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - [[package]] name = "proc-macro-error-attr2" version = "2.0.0" diff --git a/Cargo.toml b/Cargo.toml index 48a6525d5..274af4cd7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -466,7 +466,7 @@ op-alloy-network = "0.5" op-alloy-consensus = "0.5" # misc -aquamarine = "0.5" +aquamarine = "0.6" auto_impl = "1" backon = { version = "1.2", default-features = false, features = [ "std-blocking-sleep", From 28c61c15b42305edca436e980e29c2cf5087a7c5 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 22 Oct 2024 21:46:00 +0200 Subject: [PATCH 198/425] fix: invoke prometheus recorder on op-reth Cli::run (#11982) --- bin/reth/src/cli/mod.rs | 3 +-- crates/optimism/cli/src/lib.rs | 4 ++++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/bin/reth/src/cli/mod.rs b/bin/reth/src/cli/mod.rs index 01f8f73e7..01662eb4d 100644 --- a/bin/reth/src/cli/mod.rs +++ b/bin/reth/src/cli/mod.rs @@ -146,8 +146,7 @@ impl, Ext: clap::Args + fmt::Debug> Cl let _guard = self.init_tracing()?; info!(target: "reth::cli", "Initialized tracing, debug log directory: {}", self.logs.log_file_directory); - // Install the prometheus recorder to be sure to record task - // executor's metrics + // Install the prometheus recorder to be sure to record all metrics let _ = install_prometheus_recorder(); let runner = CliRunner::default(); diff --git a/crates/optimism/cli/src/lib.rs b/crates/optimism/cli/src/lib.rs index e6eed86bf..235b44559 100644 --- a/crates/optimism/cli/src/lib.rs +++ b/crates/optimism/cli/src/lib.rs @@ -54,6 +54,7 @@ use tracing::info; // This allows us to manually enable node metrics features, required for proper jemalloc metric // reporting use reth_node_metrics as _; +use reth_node_metrics::recorder::install_prometheus_recorder; /// The main op-reth cli interface. /// @@ -135,6 +136,9 @@ where let _guard = self.init_tracing()?; info!(target: "reth::cli", "Initialized tracing, debug log directory: {}", self.logs.log_file_directory); + // Install the prometheus recorder to be sure to record all metrics + let _ = install_prometheus_recorder(); + let runner = CliRunner::default(); match self.command { Commands::Node(command) => { From 22171d27bf575a45308063072ea7984846dbd8e1 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 22 Oct 2024 22:24:25 +0200 Subject: [PATCH 199/425] chore: unpin serde (#11977) --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 274af4cd7..f1506366d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -503,7 +503,7 @@ rand = "0.8.5" rayon = "1.7" rustc-hash = { version = "2.0", default-features = false } schnellru = "0.2" -serde = { version = "=1.0.210", default-features = false } +serde = { version = "1.0", default-features = false } serde_json = "1.0.94" serde_with = "3.3.0" sha2 = { version = "0.10", default-features = false } From 8bfbd977952cbcd42a66f46df235ee9694476a52 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 22 Oct 2024 23:19:34 +0200 Subject: [PATCH 200/425] feat(discv4): add soft_remove_node (#11970) --- crates/net/discv4/Cargo.toml | 2 +- crates/net/discv4/src/lib.rs | 26 +++++++++++++++++++------- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/crates/net/discv4/Cargo.toml b/crates/net/discv4/Cargo.toml index f008d03b5..6fb669388 100644 --- a/crates/net/discv4/Cargo.toml +++ b/crates/net/discv4/Cargo.toml @@ -46,7 +46,7 @@ serde = { workspace = true, optional = true } [dev-dependencies] assert_matches.workspace = true rand.workspace = true -tokio = { workspace = true, features = ["macros"] } +tokio = { workspace = true, features = ["macros", "rt-multi-thread"] } reth-tracing.workspace = true [features] diff --git a/crates/net/discv4/src/lib.rs b/crates/net/discv4/src/lib.rs index 7963c6e6f..c9b14d38b 100644 --- a/crates/net/discv4/src/lib.rs +++ b/crates/net/discv4/src/lib.rs @@ -829,6 +829,24 @@ impl Discv4Service { /// table. Returns `true` if the node was in the table and `false` otherwise. pub fn remove_node(&mut self, node_id: PeerId) -> bool { let key = kad_key(node_id); + self.remove_key(node_id, key) + } + + /// Removes a `node_id` from the routing table but only if there are enough other nodes in the + /// bucket (bucket must be at least half full) + /// + /// Returns `true` if the node was removed + pub fn soft_remove_node(&mut self, node_id: PeerId) -> bool { + let key = kad_key(node_id); + let Some(bucket) = self.kbuckets.get_bucket(&key) else { return false }; + if bucket.num_entries() < MAX_NODES_PER_BUCKET / 2 { + // skip half empty bucket + return false; + } + self.remove_key(node_id, key) + } + + fn remove_key(&mut self, node_id: PeerId, key: discv5::Key) -> bool { let removed = self.kbuckets.remove(&key); if removed { trace!(target: "discv4", ?node_id, "removed node"); @@ -1491,13 +1509,7 @@ impl Discv4Service { // the table, but only if there are enough other nodes in the bucket (bucket must be at // least half full) if failures > (self.config.max_find_node_failures as usize) { - if let Some(bucket) = self.kbuckets.get_bucket(&key) { - if bucket.num_entries() < MAX_NODES_PER_BUCKET / 2 { - // skip half empty bucket - continue - } - } - self.remove_node(node_id); + self.soft_remove_node(node_id); } } } From 527d344ddab7397ede9f06874b9a00746fa2fa41 Mon Sep 17 00:00:00 2001 From: Moe Mahhouk Date: Tue, 22 Oct 2024 23:52:49 +0200 Subject: [PATCH 201/425] feat: Add reproducible build profile (#10459) Co-authored-by: Roman Krasiuk Co-authored-by: Dan Cline <6798349+Rjected@users.noreply.github.com> --- Cargo.toml | 7 +++++++ Dockerfile.reproducible | 37 +++++++++++++++++++++++++++++++++++++ Makefile | 10 ++++++++++ 3 files changed, 54 insertions(+) create mode 100644 Dockerfile.reproducible diff --git a/Cargo.toml b/Cargo.toml index f1506366d..ea90cd7fb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -288,6 +288,13 @@ codegen-units = 1 inherits = "release" lto = "fat" +[profile.reproducible] +inherits = "release" +debug = false +panic = "abort" +codegen-units = 1 +overflow-checks = true + [workspace.dependencies] # reth op-reth = { path = "crates/optimism/bin" } diff --git a/Dockerfile.reproducible b/Dockerfile.reproducible new file mode 100644 index 000000000..12c12dd7c --- /dev/null +++ b/Dockerfile.reproducible @@ -0,0 +1,37 @@ +# Use the Rust 1.82 image based on Debian Bullseye +FROM rust:1.82-bullseye@sha256:c42c8ca762560c182ba30edda0e0d71a8604040af2672370559d7e854653c66d AS builder + +# Install specific version of libclang-dev +RUN apt-get update && apt-get install -y libclang-dev=1:11.0-51+nmu5 + +# Clone the repository at the specific branch +RUN git clone https://github.com/paradigmxyz/reth /app +WORKDIR /app + +# Checkout the reproducible-build branch +RUN git checkout reproducible-build + +# Get the latest commit timestamp and set SOURCE_DATE_EPOCH +RUN SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && \ + echo "SOURCE_DATE_EPOCH=$SOURCE_DATE_EPOCH" >> /etc/environment + +# Set environment variables for reproducibility +ARG RUSTFLAGS="-C target-feature=+crt-static -C link-arg=-Wl,--build-id=none -Clink-arg=-static-libgcc -C metadata='' --remap-path-prefix $(pwd)=." +ENV SOURCE_DATE_EPOCH=$SOURCE_DATE_EPOCH \ + CARGO_INCREMENTAL=0 \ + LC_ALL=C \ + TZ=UTC \ + RUSTFLAGS="${RUSTFLAGS}" + +# Set the default features if not provided +ARG FEATURES="jemalloc asm-keccak" + +# Build the project with the reproducible settings +RUN . /etc/environment && \ + cargo build --bin reth --features "${FEATURES}" --profile "reproducible" --locked --target x86_64-unknown-linux-gnu + +# Create a minimal final image with just the binary +FROM scratch AS binaries + +# Copy the compiled binary from the builder stage +COPY --from=builder /app/target/x86_64-unknown-linux-gnu/reproducible/reth /reth diff --git a/Makefile b/Makefile index 908f1ef24..5ad7abac6 100644 --- a/Makefile +++ b/Makefile @@ -62,6 +62,16 @@ install-op: ## Build and install the op-reth binary under `~/.cargo/bin`. build: ## Build the reth binary into `target` directory. cargo build --bin reth --features "$(FEATURES)" --profile "$(PROFILE)" +SOURCE_DATE_EPOCH := $(shell git log -1 --pretty=%ct) +.PHONY: reproducible +reproducible: ## Build the reth binary into `target` directory with reproducible builds. Only works for x86_64-unknown-linux-gnu currently + SOURCE_DATE_EPOCH=$(SOURCE_DATE_EPOCH) \ + CARGO_INCREMENTAL=0 \ + LC_ALL=C \ + TZ=UTC \ + RUSTFLAGS="-C target-feature=+crt-static -C link-arg=-Wl,--build-id=none -Clink-arg=-static-libgcc -C metadata='' --remap-path-prefix $$(pwd)=." \ + cargo build --bin reth --features "$(FEATURES)" --profile "reproducible" --locked --target x86_64-unknown-linux-gnu + .PHONY: build-debug build-debug: ## Build the reth binary into `target/debug` directory. cargo build --bin reth --features "$(FEATURES)" From d68dca1a26b69e4784b7b4671b05641025e657fc Mon Sep 17 00:00:00 2001 From: alpharush <0xalpharush@protonmail.com> Date: Tue, 22 Oct 2024 17:34:09 -0500 Subject: [PATCH 202/425] chore: fix cargo feature warning (#11900) Co-authored-by: Matthias Seitz Co-authored-by: Dan Cline <6798349+Rjected@users.noreply.github.com> --- bin/reth/Cargo.toml | 2 +- crates/cli/commands/Cargo.toml | 7 ++++++- crates/cli/commands/src/lib.rs | 2 +- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/bin/reth/Cargo.toml b/bin/reth/Cargo.toml index 8380915d4..ffd1998b2 100644 --- a/bin/reth/Cargo.toml +++ b/bin/reth/Cargo.toml @@ -101,7 +101,7 @@ tempfile.workspace = true [features] default = ["jemalloc"] -dev = ["reth-cli-commands/dev"] +dev = ["reth-cli-commands/arbitrary"] asm-keccak = [ "reth-node-core/asm-keccak", diff --git a/crates/cli/commands/Cargo.toml b/crates/cli/commands/Cargo.toml index e307859df..6f4b1008f 100644 --- a/crates/cli/commands/Cargo.toml +++ b/crates/cli/commands/Cargo.toml @@ -88,10 +88,15 @@ reth-discv4.workspace = true [features] default = [] -dev = [ +arbitrary = [ "dep:proptest", "dep:arbitrary", "dep:proptest-arbitrary-interop", "reth-primitives/arbitrary", "reth-db-api/arbitrary", + "reth-eth-wire/arbitrary", + "reth-db/arbitrary", + "reth-chainspec/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/arbitrary", ] diff --git a/crates/cli/commands/src/lib.rs b/crates/cli/commands/src/lib.rs index 33a38ddbc..166ea438f 100644 --- a/crates/cli/commands/src/lib.rs +++ b/crates/cli/commands/src/lib.rs @@ -20,7 +20,7 @@ pub mod p2p; pub mod prune; pub mod recover; pub mod stage; -#[cfg(feature = "dev")] +#[cfg(feature = "arbitrary")] pub mod test_vectors; pub use node::NodeCommand; From 74eb37523a8ad74b7748701703ea3f3542e5fec7 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 23 Oct 2024 02:38:44 +0400 Subject: [PATCH 203/425] fix: correctly poll `BasicBlockDownloader` (#11981) --- crates/engine/tree/src/engine.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/crates/engine/tree/src/engine.rs b/crates/engine/tree/src/engine.rs index c1571ed82..914121adc 100644 --- a/crates/engine/tree/src/engine.rs +++ b/crates/engine/tree/src/engine.rs @@ -113,9 +113,11 @@ where } // advance the downloader - if let Poll::Ready(DownloadOutcome::Blocks(blocks)) = self.downloader.poll(cx) { - // delegate the downloaded blocks to the handler - self.handler.on_event(FromEngine::DownloadedBlocks(blocks)); + if let Poll::Ready(outcome) = self.downloader.poll(cx) { + if let DownloadOutcome::Blocks(blocks) = outcome { + // delegate the downloaded blocks to the handler + self.handler.on_event(FromEngine::DownloadedBlocks(blocks)); + } continue } From 6e5176221ff8b5cacea12c86dc52feba0de1593a Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 23 Oct 2024 01:20:19 +0200 Subject: [PATCH 204/425] fix: ping oldest more often (#11988) --- crates/net/discv4/src/config.rs | 8 +++----- crates/net/discv4/src/lib.rs | 6 ++++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/net/discv4/src/config.rs b/crates/net/discv4/src/config.rs index 4fae31f58..c934f3361 100644 --- a/crates/net/discv4/src/config.rs +++ b/crates/net/discv4/src/config.rs @@ -8,8 +8,6 @@ use alloy_rlp::Encodable; use reth_net_banlist::BanList; use reth_net_nat::{NatResolver, ResolveNatInterval}; use reth_network_peers::NodeRecord; -#[cfg(feature = "serde")] -use serde::{Deserialize, Serialize}; use std::{ collections::{HashMap, HashSet}, time::Duration, @@ -17,7 +15,7 @@ use std::{ /// Configuration parameters that define the performance of the discovery network. #[derive(Clone, Debug)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Discv4Config { /// Whether to enable the incoming packet filter. Default: false. pub enable_packet_filter: bool, @@ -118,7 +116,7 @@ impl Default for Discv4Config { // Every outgoing request will eventually lead to an incoming response udp_ingress_message_buffer: 1024, max_find_node_failures: 5, - ping_interval: Duration::from_secs(60 * 10), + ping_interval: Duration::from_secs(10), // Unified expiration and timeout durations, mirrors geth's `expiration` duration ping_expiration: Duration::from_secs(20), bond_expiration: Duration::from_secs(60 * 60), @@ -144,7 +142,7 @@ impl Default for Discv4Config { /// Builder type for [`Discv4Config`] #[derive(Clone, Debug, Default)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Discv4ConfigBuilder { config: Discv4Config, } diff --git a/crates/net/discv4/src/lib.rs b/crates/net/discv4/src/lib.rs index c9b14d38b..77f64ff82 100644 --- a/crates/net/discv4/src/lib.rs +++ b/crates/net/discv4/src/lib.rs @@ -870,7 +870,9 @@ impl Discv4Service { false } - /// Update the entry on RE-ping + /// Update the entry on RE-ping. + /// + /// Invoked when we received the Pong to our [`PingReason::RePing`] ping. /// /// On re-ping we check for a changed `enr_seq` if eip868 is enabled and when it changed we sent /// a followup request to retrieve the updated ENR @@ -2259,7 +2261,7 @@ impl NodeEntry { impl NodeEntry { /// Returns true if the node should be re-pinged. fn is_expired(&self) -> bool { - self.last_seen.elapsed() > ENDPOINT_PROOF_EXPIRATION + self.last_seen.elapsed() > (ENDPOINT_PROOF_EXPIRATION / 2) } } From ee1260a1de29325c52a756cc84ef4fd49f7deb19 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 23 Oct 2024 11:38:45 +0200 Subject: [PATCH 205/425] refactor(tx-pool): move `is_underpriced` to `ValidPoolTransaction` impl (#11938) --- crates/transaction-pool/src/pool/txpool.rs | 49 +------------------ crates/transaction-pool/src/validate/mod.rs | 53 +++++++++++++++++++++ 2 files changed, 54 insertions(+), 48 deletions(-) diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index 4fbec1105..03e69c390 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -1511,52 +1511,6 @@ impl AllTransactions { Ok(new_blob_tx) } - /// Returns true if the replacement candidate is underpriced and can't replace the existing - /// transaction. - #[inline] - fn is_underpriced( - existing_transaction: &ValidPoolTransaction, - maybe_replacement: &ValidPoolTransaction, - price_bumps: &PriceBumpConfig, - ) -> bool { - let price_bump = price_bumps.price_bump(existing_transaction.tx_type()); - - if maybe_replacement.max_fee_per_gas() <= - existing_transaction.max_fee_per_gas() * (100 + price_bump) / 100 - { - return true - } - - let existing_max_priority_fee_per_gas = - existing_transaction.transaction.max_priority_fee_per_gas().unwrap_or(0); - let replacement_max_priority_fee_per_gas = - maybe_replacement.transaction.max_priority_fee_per_gas().unwrap_or(0); - - if replacement_max_priority_fee_per_gas <= - existing_max_priority_fee_per_gas * (100 + price_bump) / 100 && - existing_max_priority_fee_per_gas != 0 && - replacement_max_priority_fee_per_gas != 0 - { - return true - } - - // check max blob fee per gas - if let Some(existing_max_blob_fee_per_gas) = - existing_transaction.transaction.max_fee_per_blob_gas() - { - // this enforces that blob txs can only be replaced by blob txs - let replacement_max_blob_fee_per_gas = - maybe_replacement.transaction.max_fee_per_blob_gas().unwrap_or(0); - if replacement_max_blob_fee_per_gas <= - existing_max_blob_fee_per_gas * (100 + price_bump) / 100 - { - return true - } - } - - false - } - /// Inserts a new _valid_ transaction into the pool. /// /// If the transaction already exists, it will be replaced if not underpriced. @@ -1671,8 +1625,7 @@ impl AllTransactions { let maybe_replacement = transaction.as_ref(); // Ensure the new transaction is not underpriced - if Self::is_underpriced(existing_transaction, maybe_replacement, &self.price_bumps) - { + if existing_transaction.is_underpriced(maybe_replacement, &self.price_bumps) { return Err(InsertErr::Underpriced { transaction: pool_tx.transaction, existing: *entry.get().transaction.hash(), diff --git a/crates/transaction-pool/src/validate/mod.rs b/crates/transaction-pool/src/validate/mod.rs index 4395cc979..4a82a1a14 100644 --- a/crates/transaction-pool/src/validate/mod.rs +++ b/crates/transaction-pool/src/validate/mod.rs @@ -4,6 +4,7 @@ use crate::{ error::InvalidPoolTransactionError, identifier::{SenderId, TransactionId}, traits::{PoolTransaction, TransactionOrigin}, + PriceBumpConfig, }; use alloy_primitives::{Address, TxHash, B256, U256}; use futures_util::future::Either; @@ -372,6 +373,58 @@ impl ValidPoolTransaction { pub(crate) fn tx_type_conflicts_with(&self, other: &Self) -> bool { self.is_eip4844() != other.is_eip4844() } + + /// Determines whether a candidate transaction (`maybe_replacement`) is underpriced compared to + /// an existing transaction in the pool. + /// + /// A transaction is considered underpriced if it doesn't meet the required fee bump threshold. + /// This applies to both standard gas fees and, for blob-carrying transactions (EIP-4844), + /// the blob-specific fees. + #[inline] + pub(crate) fn is_underpriced( + &self, + maybe_replacement: &Self, + price_bumps: &PriceBumpConfig, + ) -> bool { + // Retrieve the required price bump percentage for this type of transaction. + // + // The bump is different for EIP-4844 and other transactions. See `PriceBumpConfig`. + let price_bump = price_bumps.price_bump(self.tx_type()); + + // Check if the max fee per gas is underpriced. + if maybe_replacement.max_fee_per_gas() <= self.max_fee_per_gas() * (100 + price_bump) / 100 + { + return true + } + + let existing_max_priority_fee_per_gas = + self.transaction.max_priority_fee_per_gas().unwrap_or_default(); + let replacement_max_priority_fee_per_gas = + maybe_replacement.transaction.max_priority_fee_per_gas().unwrap_or_default(); + + // Check max priority fee per gas (relevant for EIP-1559 transactions only) + if existing_max_priority_fee_per_gas != 0 && + replacement_max_priority_fee_per_gas != 0 && + replacement_max_priority_fee_per_gas <= + existing_max_priority_fee_per_gas * (100 + price_bump) / 100 + { + return true + } + + // Check max blob fee per gas + if let Some(existing_max_blob_fee_per_gas) = self.transaction.max_fee_per_blob_gas() { + // This enforces that blob txs can only be replaced by blob txs + let replacement_max_blob_fee_per_gas = + maybe_replacement.transaction.max_fee_per_blob_gas().unwrap_or_default(); + if replacement_max_blob_fee_per_gas <= + existing_max_blob_fee_per_gas * (100 + price_bump) / 100 + { + return true + } + } + + false + } } impl>> ValidPoolTransaction { From 7fd28df2b4a01cc88a85ccb46cebbbff547123c4 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 23 Oct 2024 11:39:12 +0200 Subject: [PATCH 206/425] fix: re-establish bond on ping (#11989) --- crates/net/discv4/src/lib.rs | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/crates/net/discv4/src/lib.rs b/crates/net/discv4/src/lib.rs index 77f64ff82..e955e45df 100644 --- a/crates/net/discv4/src/lib.rs +++ b/crates/net/discv4/src/lib.rs @@ -1048,11 +1048,23 @@ impl Discv4Service { let old_enr = match self.kbuckets.entry(&key) { kbucket::Entry::Present(mut entry, _) => { - is_proven = entry.value().has_endpoint_proof; + if entry.value().is_expired() { + // If no communication with the sender has occurred within the last 12h, a ping + // should be sent in addition to pong in order to receive an endpoint proof. + needs_bond = true; + } else { + is_proven = entry.value().has_endpoint_proof; + } entry.value_mut().update_with_enr(ping.enr_sq) } kbucket::Entry::Pending(mut entry, _) => { - is_proven = entry.value().has_endpoint_proof; + if entry.value().is_expired() { + // If no communication with the sender has occurred within the last 12h, a ping + // should be sent in addition to pong in order to receive an endpoint proof. + needs_bond = true; + } else { + is_proven = entry.value().has_endpoint_proof; + } entry.value().update_with_enr(ping.enr_sq) } kbucket::Entry::Absent(entry) => { @@ -1225,7 +1237,8 @@ impl Discv4Service { self.update_on_pong(node, pong.enr_sq); } PingReason::EstablishBond => { - // nothing to do here + // same as `InitialInsert` which renews the bond if the peer is in the table + self.update_on_pong(node, pong.enr_sq); } PingReason::RePing => { self.update_on_reping(node, pong.enr_sq); @@ -2270,8 +2283,7 @@ impl NodeEntry { enum PingReason { /// Initial ping to a previously unknown peer that was inserted into the table. InitialInsert, - /// Initial ping to a previously unknown peer that didn't fit into the table. But we still want - /// to establish a bond. + /// A ping to a peer to establish a bond (endpoint proof). EstablishBond, /// Re-ping a peer. RePing, From ff04de380dc9f107d0aec4a4f0b1bc5027c891c4 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 23 Oct 2024 16:53:29 +0400 Subject: [PATCH 207/425] chore: bump alloy-eip7702 (#11986) Co-authored-by: Oliver Nordbjerg --- Cargo.lock | 133 +++++++++--------- Cargo.toml | 60 ++++---- book/sources/Cargo.toml | 13 +- book/sources/exex/hello-world/Cargo.toml | 8 +- book/sources/exex/tracking-state/Cargo.toml | 10 +- crates/primitives/src/transaction/sidecar.rs | 90 ++---------- .../codecs/src/alloy/authorization_list.rs | 14 +- crates/transaction-pool/src/blobstore/disk.rs | 13 +- 8 files changed, 141 insertions(+), 200 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e1cead609..7e10bdb0a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -112,9 +112,9 @@ dependencies = [ [[package]] name = "alloy-consensus" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42642aed67f938363d9c7543e5ca4163cfb4205d9ec15fe933dc4e865d2932dd" +checksum = "cdf02dfacfc815214f9b54ff50d54900ba527a68fd73e2c5637ced3460005045" dependencies = [ "alloy-eips", "alloy-primitives", @@ -161,13 +161,14 @@ dependencies = [ [[package]] name = "alloy-eip7702" -version = "0.2.0" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eeffd2590ce780ddfaa9d0ae340eb2b4e08627650c4676eef537cef0b4bf535d" +checksum = "64ffc577390ce50234e02d841214b3dc0bea6aaaae8e04bbf3cb82e9a45da9eb" dependencies = [ "alloy-primitives", "alloy-rlp", "arbitrary", + "derive_more 1.0.0", "k256", "rand 0.8.5", "serde", @@ -176,9 +177,9 @@ dependencies = [ [[package]] name = "alloy-eips" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fbc52a30df46f9831ed74557dfad0d94b12420393662a8b9ef90e2d6c8cb4b0" +checksum = "769da342b6bcd945013925ef4c40763cc82f11e002c60702dba8b444bb60e5a7" dependencies = [ "alloy-eip2930", "alloy-eip7702", @@ -197,9 +198,9 @@ dependencies = [ [[package]] name = "alloy-genesis" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0787d1688b9806290313cc335d416cc7ee39b11e3245f3d218544c62572d92ba" +checksum = "c698ce0ada980b17f0323e1a28c7da8a2e9abc6dff5be9ee33d1525b28ac46b6" dependencies = [ "alloy-primitives", "alloy-serde", @@ -220,9 +221,9 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d55a16a5f9ca498a217c060414bcd1c43e934235dc8058b31b87dcd69ff4f105" +checksum = "c1050e1d65524c030b17442b6546b564da51fdab7f71bd534b001ba65f2ebb16" dependencies = [ "alloy-primitives", "alloy-sol-types", @@ -234,9 +235,9 @@ dependencies = [ [[package]] name = "alloy-network" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d236a8c3e1d5adc09b1b63c81815fc9b757d9a4ba9482cc899f9679b55dd437" +checksum = "da34a18446a27734473af3d77eb21c5ebbdf97ea8eb65c39c0b50916bc659023" dependencies = [ "alloy-consensus", "alloy-eips", @@ -255,9 +256,9 @@ dependencies = [ [[package]] name = "alloy-network-primitives" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd15a0990fa8a56d85a42d6a689719aa4eebf5e2f1a5c5354658c0bfc52cac9a" +checksum = "9a968c063fcfcb937736665c865a71fc2242b68916156f5ffa41fee7b44bb695" dependencies = [ "alloy-consensus", "alloy-eips", @@ -268,9 +269,9 @@ dependencies = [ [[package]] name = "alloy-node-bindings" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2249f3c3ce446cf4063fe3d1aa7530823643c2706a1cc63045e0683ebc497a0a" +checksum = "439fc6a933b9f8e8b272a8cac35dbeabaf2b2eaf9590482bebedb5782153118e" dependencies = [ "alloy-genesis", "alloy-primitives", @@ -317,9 +318,9 @@ dependencies = [ [[package]] name = "alloy-provider" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "316f522bb6f9ac3805132112197957013b570e20cfdad058e8339dae6030c849" +checksum = "c45dbc0e3630becef9e988b69d43339f68d67e32a854e3c855bc28bd5031895b" dependencies = [ "alloy-chains", "alloy-consensus", @@ -358,9 +359,9 @@ dependencies = [ [[package]] name = "alloy-pubsub" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "222cd9b17b1c5ad48de51a88ffbdb17f17145170288f22662f80ac88739125e6" +checksum = "1e3961a56e10f44bfd69dd3f4b0854b90b84c612b0c43708e738933e8b47f93a" dependencies = [ "alloy-json-rpc", "alloy-primitives", @@ -399,9 +400,9 @@ dependencies = [ [[package]] name = "alloy-rpc-client" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b2ab59712c594c9624aaa69e38e4d38f180cb569f1fa46cdaf8c21fd50793e5" +checksum = "917e5504e4f8f7e39bdc322ff81589ed54c1e462240adaeb58162c2d986a5a2b" dependencies = [ "alloy-json-rpc", "alloy-primitives", @@ -424,9 +425,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba21284319e12d053baa204d438db6c1577aedd94c1298e4becefdac1f9cec87" +checksum = "07c7eb2dc6db1dd41e5e7bd2b98a38813854efc30e034afd90d1e420e7f3de2b" dependencies = [ "alloy-primitives", "alloy-rpc-types-engine", @@ -437,9 +438,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-admin" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "416cc9f391d0b876c4c8da85f7131e771a88a55b917cc9a35e1724d9409e3b1c" +checksum = "bd468a4e3eddcd9d612cad657852de4b7475ac2080e7af9224fbf1df20ddffe0" dependencies = [ "alloy-genesis", "alloy-primitives", @@ -449,9 +450,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-anvil" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba40bea86c3102b9ed9b3be579e32e0b3e54e766248d873de5fc0437238c8df2" +checksum = "2640928d9b1d43bb1cec7a0d615e10c2b407c5bd8ff1fcbe49e6318a2b62d731" dependencies = [ "alloy-primitives", "alloy-serde", @@ -460,9 +461,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-beacon" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b535781fe224c101c3d957b514cb9f438d165ff0280e5c0b2f87a0d9a2950593" +checksum = "64f731ad2ef8d7dd75a4d28214f4922a5b683feee1e6df35bd7b427315f94366" dependencies = [ "alloy-eips", "alloy-primitives", @@ -474,9 +475,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-debug" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4303deacf4cbf12ed4431a5a1bbc3284f0defb4b8b72d9aa2b888656cc5ae657" +checksum = "06bd0757bfb3eccde06ee3f4e378f5839fe923d40956cff586018d4427a15bb5" dependencies = [ "alloy-primitives", "serde", @@ -484,9 +485,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-engine" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44848fced3b42260b9cb61f22102246636dfe5a2d0132f8d10a617df3cb1a74b" +checksum = "7d3d95c3bf03efbb7bdc1d097e2931f520aac47438b709ccd8f065a7793dd371" dependencies = [ "alloy-consensus", "alloy-eips", @@ -505,9 +506,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-eth" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35894711990019fafff0012b82b9176cbb744516eb2a9bbe6b8e5cae522163ee" +checksum = "e855b0daccf2320ba415753c3fed422abe9d3ad5d77b2d6cafcc9bcf32fe387f" dependencies = [ "alloy-consensus", "alloy-eips", @@ -526,9 +527,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-mev" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cac6250cad380a005ecb5ffc6d2facf03df0e72628d819a63dd8c3ade7a766ff" +checksum = "eca3753b9894235f915437f908644e737d8714c686ce4e8d03afbf585b23f074" dependencies = [ "alloy-eips", "alloy-primitives", @@ -539,9 +540,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-trace" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f568c5624881896d8a25e19acbdcbabadd8df339427ea2f10b2ee447d57c4509" +checksum = "ae58a997afde032cd021547c960a53eef6245f47969dd71886e9f63fb45a6048" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -553,9 +554,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-txpool" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a37d2e1ed9b7daf20ad0b3e0092613cbae46737e0e988b23caa556c7067ce6" +checksum = "667e45c882fda207d4cc94c4bb35e24a23347955113dcb236a5e4e0eaddef826" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -565,9 +566,9 @@ dependencies = [ [[package]] name = "alloy-serde" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2843c195675f06b29c09a4315cccdc233ab5bdc7c0a3775909f9f0cab5e9ae0f" +checksum = "35c2661ca6785add8fc37aff8005439c806ffad58254c19939c6f59ac0d6596e" dependencies = [ "alloy-primitives", "arbitrary", @@ -577,9 +578,9 @@ dependencies = [ [[package]] name = "alloy-signer" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88b2a00d9803dfef99963303ffe41a7bf2221f3342f0a503d6741a9f4a18e5e5" +checksum = "67eca011160d18a7dc6d8cdc1e8dc13e2e86c908f8e41b02aa76e429d6fe7085" dependencies = [ "alloy-primitives", "async-trait", @@ -591,9 +592,9 @@ dependencies = [ [[package]] name = "alloy-signer-local" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a2505d4f8c98dcae86152d58d549cb4bcf953f8352fca903410e0a0ef535571" +checksum = "1c54b195a6ee5a83f32e7c697b4e6b565966737ed5a2ef9176bbbb39f720d023" dependencies = [ "alloy-consensus", "alloy-network", @@ -679,9 +680,9 @@ dependencies = [ [[package]] name = "alloy-transport" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dc2c8f6b8c227ef0398f702d954c4ab572c2ead3c1ed4a5157aa1cbaf959747" +checksum = "3e4a136e733f55fef0870b81e1f8f1db28e78973d1b1ae5a5df642ba39538a07" dependencies = [ "alloy-json-rpc", "base64 0.22.1", @@ -699,9 +700,9 @@ dependencies = [ [[package]] name = "alloy-transport-http" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd328e990d57f4c4e63899fb2c26877597d6503f8e0022a3d71b2d753ecbfc0c" +checksum = "1a6b358a89b6d107b92d09b61a61fbc04243942182709752c796f4b29402cead" dependencies = [ "alloy-json-rpc", "alloy-transport", @@ -714,9 +715,9 @@ dependencies = [ [[package]] name = "alloy-transport-ipc" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89aea26aaf1d67904a7ff95ec4a24ddd5e7d419a6945f641b885962d7c2803e2" +checksum = "4a899c43b7f5e3bc83762dfe5128fccd9cfa99f1f03c5f26bbfb2495ae8dcd35" dependencies = [ "alloy-json-rpc", "alloy-pubsub", @@ -733,9 +734,9 @@ dependencies = [ [[package]] name = "alloy-transport-ws" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e222e950ecc4ea12fbfb524b9a2275cac2cd5f57c8ce25bcaf1bd3ff80dd8fc8" +checksum = "d27aac1246e13c9e6fa0c784fbb0c56872c6224f78dbde388bb2213ccdf8af02" dependencies = [ "alloy-pubsub", "alloy-transport", @@ -9264,9 +9265,9 @@ dependencies = [ [[package]] name = "revm" -version = "16.0.0" +version = "17.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34e44692d5736cc44c697a372e507890f8797f06d1541c5f4b9bec594d90fd8a" +checksum = "2eab16cb0a8cd5ac88b11230b20df588b7e8aae7dfab4b3f830e98aebeb4b365" dependencies = [ "auto_impl", "cfg-if", @@ -9279,9 +9280,9 @@ dependencies = [ [[package]] name = "revm-inspectors" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a64e2246ad480167548724eb9c9c66945241b867c7d50894de3ca860c9823a45" +checksum = "1e29c662f7887f3b659d4b0fd234673419a8fcbeaa1ecc29bf7034c0a75cc8ea" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -9298,9 +9299,9 @@ dependencies = [ [[package]] name = "revm-interpreter" -version = "12.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f89940d17d5d077570de1977f52f69049595322e237cb6c754c3d47f668f023" +checksum = "fac2034454f8bc69dc7d3c94cdb1b57559e27f5ef0518771f1787de543d7d6a1" dependencies = [ "revm-primitives", "serde", @@ -9308,9 +9309,9 @@ dependencies = [ [[package]] name = "revm-precompile" -version = "13.0.0" +version = "14.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8f816aaea3245cbdbe7fdd84955df33597f9322c7912c3e3ba7bc855e03211f" +checksum = "7a88c8c7c5f9b988a9e65fc0990c6ce859cdb74114db705bd118a96d22d08027" dependencies = [ "aurora-engine-modexp", "blst", @@ -9328,9 +9329,9 @@ dependencies = [ [[package]] name = "revm-primitives" -version = "12.0.0" +version = "13.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "532411bbde45a46707c1d434dcdc29866cf261c1b748fb01b303ce3b4310b361" +checksum = "0d11fa1e195b0bebaf3fb18596f314a13ba3a4cb1fdd16d3465934d812fd921e" dependencies = [ "alloy-eip2930", "alloy-eip7702", diff --git a/Cargo.toml b/Cargo.toml index ea90cd7fb..e8f10229e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -418,9 +418,9 @@ reth-trie-db = { path = "crates/trie/db" } reth-trie-parallel = { path = "crates/trie/parallel" } # revm -revm = { version = "16.0.0", features = ["std"], default-features = false } -revm-inspectors = "0.9.0" -revm-primitives = { version = "12.0.0", features = [ +revm = { version = "17.0.0", features = ["std"], default-features = false } +revm-inspectors = "0.10.0" +revm-primitives = { version = "13.0.0", features = [ "std", ], default-features = false } @@ -432,39 +432,39 @@ alloy-rlp = "0.3.4" alloy-sol-types = "0.8.0" alloy-trie = { version = "0.7", default-features = false } -alloy-consensus = { version = "0.5.2", default-features = false } -alloy-eips = { version = "0.5.2", default-features = false } -alloy-genesis = { version = "0.5.2", default-features = false } -alloy-json-rpc = { version = "0.5.2", default-features = false } -alloy-network = { version = "0.5.2", default-features = false } -alloy-network-primitives = { version = "0.5.2", default-features = false } -alloy-node-bindings = { version = "0.5.2", default-features = false } -alloy-provider = { version = "0.5.2", features = [ +alloy-consensus = { version = "0.5.3", default-features = false } +alloy-eips = { version = "0.5.3", default-features = false } +alloy-genesis = { version = "0.5.3", default-features = false } +alloy-json-rpc = { version = "0.5.3", default-features = false } +alloy-network = { version = "0.5.3", default-features = false } +alloy-network-primitives = { version = "0.5.3", default-features = false } +alloy-node-bindings = { version = "0.5.3", default-features = false } +alloy-provider = { version = "0.5.3", features = [ "reqwest", ], default-features = false } -alloy-pubsub = { version = "0.5.2", default-features = false } -alloy-rpc-client = { version = "0.5.2", default-features = false } -alloy-rpc-types = { version = "0.5.2", features = [ +alloy-pubsub = { version = "0.5.3", default-features = false } +alloy-rpc-client = { version = "0.5.3", default-features = false } +alloy-rpc-types = { version = "0.5.3", features = [ "eth", ], default-features = false } -alloy-rpc-types-admin = { version = "0.5.2", default-features = false } -alloy-rpc-types-anvil = { version = "0.5.2", default-features = false } -alloy-rpc-types-beacon = { version = "0.5.2", default-features = false } -alloy-rpc-types-debug = { version = "0.5.2", default-features = false } -alloy-rpc-types-engine = { version = "0.5.2", default-features = false } -alloy-rpc-types-eth = { version = "0.5.2", default-features = false } -alloy-rpc-types-mev = { version = "0.5.2", default-features = false } -alloy-rpc-types-trace = { version = "0.5.2", default-features = false } -alloy-rpc-types-txpool = { version = "0.5.2", default-features = false } -alloy-serde = { version = "0.5.2", default-features = false } -alloy-signer = { version = "0.5.2", default-features = false } -alloy-signer-local = { version = "0.5.2", default-features = false } -alloy-transport = { version = "0.5.2" } -alloy-transport-http = { version = "0.5.2", features = [ +alloy-rpc-types-admin = { version = "0.5.3", default-features = false } +alloy-rpc-types-anvil = { version = "0.5.3", default-features = false } +alloy-rpc-types-beacon = { version = "0.5.3", default-features = false } +alloy-rpc-types-debug = { version = "0.5.3", default-features = false } +alloy-rpc-types-engine = { version = "0.5.3", default-features = false } +alloy-rpc-types-eth = { version = "0.5.3", default-features = false } +alloy-rpc-types-mev = { version = "0.5.3", default-features = false } +alloy-rpc-types-trace = { version = "0.5.3", default-features = false } +alloy-rpc-types-txpool = { version = "0.5.3", default-features = false } +alloy-serde = { version = "0.5.3", default-features = false } +alloy-signer = { version = "0.5.3", default-features = false } +alloy-signer-local = { version = "0.5.3", default-features = false } +alloy-transport = { version = "0.5.3" } +alloy-transport-http = { version = "0.5.3", features = [ "reqwest-rustls-tls", ], default-features = false } -alloy-transport-ipc = { version = "0.5.2", default-features = false } -alloy-transport-ws = { version = "0.5.2", default-features = false } +alloy-transport-ipc = { version = "0.5.3", default-features = false } +alloy-transport-ws = { version = "0.5.3", default-features = false } # op op-alloy-rpc-types = "0.5" diff --git a/book/sources/Cargo.toml b/book/sources/Cargo.toml index 32fb13990..b374ad798 100644 --- a/book/sources/Cargo.toml +++ b/book/sources/Cargo.toml @@ -1,10 +1,13 @@ [workspace] -members = [ - "exex/hello-world", - "exex/remote", - "exex/tracking-state", -] +members = ["exex/hello-world", "exex/remote", "exex/tracking-state"] # Explicitly set the resolver to version 2, which is the default for packages with edition >= 2021 # https://doc.rust-lang.org/edition-guide/rust-2021/default-cargo-resolver.html resolver = "2" + +[patch.'https://github.com/paradigmxyz/reth'] +reth = { path = "../../bin/reth" } +reth-exex = { path = "../../crates/exex/exex" } +reth-node-ethereum = { path = "../../crates/ethereum/node" } +reth-tracing = { path = "../../crates/tracing" } +reth-node-api = { path = "../../crates/node/api" } diff --git a/book/sources/exex/hello-world/Cargo.toml b/book/sources/exex/hello-world/Cargo.toml index e5d32a140..c466018c6 100644 --- a/book/sources/exex/hello-world/Cargo.toml +++ b/book/sources/exex/hello-world/Cargo.toml @@ -4,10 +4,10 @@ version = "0.1.0" edition = "2021" [dependencies] -reth = { git = "https://github.com/paradigmxyz/reth.git" } # Reth -reth-exex = { git = "https://github.com/paradigmxyz/reth.git" } # Execution Extensions +reth = { git = "https://github.com/paradigmxyz/reth.git" } # Reth +reth-exex = { git = "https://github.com/paradigmxyz/reth.git" } # Execution Extensions reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth.git" } # Ethereum Node implementation -reth-tracing = { git = "https://github.com/paradigmxyz/reth.git" } # Logging +reth-tracing = { git = "https://github.com/paradigmxyz/reth.git" } # Logging -eyre = "0.6" # Easy error handling +eyre = "0.6" # Easy error handling futures-util = "0.3" # Stream utilities for consuming notifications diff --git a/book/sources/exex/tracking-state/Cargo.toml b/book/sources/exex/tracking-state/Cargo.toml index 3ce21b0c3..a8e862d0a 100644 --- a/book/sources/exex/tracking-state/Cargo.toml +++ b/book/sources/exex/tracking-state/Cargo.toml @@ -5,10 +5,12 @@ edition = "2021" [dependencies] reth = { git = "https://github.com/paradigmxyz/reth.git" } -reth-exex = { git = "https://github.com/paradigmxyz/reth.git", features = ["serde"] } -reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth.git"} +reth-exex = { git = "https://github.com/paradigmxyz/reth.git", features = [ + "serde", +] } +reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth.git" } reth-tracing = { git = "https://github.com/paradigmxyz/reth.git" } -eyre = "0.6" # Easy error handling -futures-util = "0.3" # Stream utilities for consuming notifications +eyre = "0.6" # Easy error handling +futures-util = "0.3" # Stream utilities for consuming notifications alloy-primitives = "0.8.7" diff --git a/crates/primitives/src/transaction/sidecar.rs b/crates/primitives/src/transaction/sidecar.rs index edc1427d1..e901cbfc0 100644 --- a/crates/primitives/src/transaction/sidecar.rs +++ b/crates/primitives/src/transaction/sidecar.rs @@ -1,11 +1,9 @@ #![cfg_attr(docsrs, doc(cfg(feature = "c-kzg")))] use crate::{Signature, Transaction, TransactionSigned}; -use alloy_consensus::{ - constants::EIP4844_TX_TYPE_ID, transaction::TxEip4844, TxEip4844WithSidecar, -}; -use alloy_primitives::{keccak256, TxHash}; -use alloy_rlp::{Decodable, Error as RlpError, Header}; +use alloy_consensus::{constants::EIP4844_TX_TYPE_ID, TxEip4844WithSidecar}; +use alloy_primitives::TxHash; +use alloy_rlp::Header; use serde::{Deserialize, Serialize}; #[doc(inline)] @@ -14,8 +12,6 @@ pub use alloy_eips::eip4844::BlobTransactionSidecar; #[cfg(feature = "c-kzg")] pub use alloy_eips::eip4844::BlobTransactionValidationError; -use alloc::vec::Vec; - /// A response to `GetPooledTransactions` that includes blob data, their commitments, and their /// corresponding proofs. /// @@ -36,7 +32,7 @@ impl BlobTransaction { /// Constructs a new [`BlobTransaction`] from a [`TransactionSigned`] and a /// [`BlobTransactionSidecar`]. /// - /// Returns an error if the signed transaction is not [`TxEip4844`] + /// Returns an error if the signed transaction is not [`Transaction::Eip4844`] pub fn try_from_signed( tx: TransactionSigned, sidecar: BlobTransactionSidecar, @@ -57,7 +53,7 @@ impl BlobTransaction { /// Verifies that the transaction's blob data, commitments, and proofs are all valid. /// - /// See also [`TxEip4844::validate_blob`] + /// See also [`alloy_consensus::TxEip4844::validate_blob`] #[cfg(feature = "c-kzg")] pub fn validate( &self, @@ -163,7 +159,7 @@ impl BlobTransaction { // The payload length is the length of the `tranascation_payload_body` list, plus the // length of the blobs, commitments, and proofs. - let payload_length = tx_length + self.transaction.sidecar.fields_len(); + let payload_length = tx_length + self.transaction.sidecar.rlp_encoded_fields_length(); // We use the calculated payload len to construct the first list header, which encompasses // everything in the tx - the length of the second, inner list header is part of @@ -188,74 +184,17 @@ impl BlobTransaction { /// Note: this should be used only when implementing other RLP decoding methods, and does not /// represent the full RLP decoding of the `PooledTransactionsElement` type. pub(crate) fn decode_inner(data: &mut &[u8]) -> alloy_rlp::Result { - // decode the _first_ list header for the rest of the transaction - let outer_header = Header::decode(data)?; - if !outer_header.list { - return Err(RlpError::Custom("PooledTransactions blob tx must be encoded as a list")) - } - - let outer_remaining_len = data.len(); - - // Now we need to decode the inner 4844 transaction and its signature: - // - // `[chain_id, nonce, max_priority_fee_per_gas, ..., y_parity, r, s]` - let inner_header = Header::decode(data)?; - if !inner_header.list { - return Err(RlpError::Custom( - "PooledTransactions inner blob tx must be encoded as a list", - )) - } - - let inner_remaining_len = data.len(); - - // inner transaction - let transaction = TxEip4844::decode_fields(data)?; - - // signature - let signature = Signature::decode_rlp_vrs(data)?; - - // the inner header only decodes the transaction and signature, so we check the length here - let inner_consumed = inner_remaining_len - data.len(); - if inner_consumed != inner_header.payload_length { - return Err(RlpError::UnexpectedLength) - } - - // All that's left are the blobs, commitments, and proofs - let sidecar = BlobTransactionSidecar::decode(data)?; - - // # Calculating the hash - // - // The full encoding of the `PooledTransaction` response is: - // `tx_type (0x03) || rlp([tx_payload_body, blobs, commitments, proofs])` - // - // The transaction hash however, is: - // `keccak256(tx_type (0x03) || rlp(tx_payload_body))` - // - // Note that this is `tx_payload_body`, not `[tx_payload_body]`, which would be - // `[[chain_id, nonce, max_priority_fee_per_gas, ...]]`, i.e. a list within a list. - // - // Because the pooled transaction encoding is different than the hash encoding for - // EIP-4844 transactions, we do not use the original buffer to calculate the hash. - // - // Instead, we use `encode_with_signature`, which RLP encodes the transaction with a - // signature for hashing without a header. We then hash the result. - let mut buf = Vec::new(); - transaction.encode_with_signature(&signature, &mut buf, false); - let hash = keccak256(&buf); - - // the outer header is for the entire transaction, so we check the length here - let outer_consumed = outer_remaining_len - data.len(); - if outer_consumed != outer_header.payload_length { - return Err(RlpError::UnexpectedLength) - } + let (transaction, signature, hash) = + TxEip4844WithSidecar::decode_signed_fields(data)?.into_parts(); - Ok(Self { transaction: TxEip4844WithSidecar { tx: transaction, sidecar }, hash, signature }) + Ok(Self { transaction, hash, signature }) } } /// Generates a [`BlobTransactionSidecar`] structure containing blobs, commitments, and proofs. #[cfg(all(feature = "c-kzg", any(test, feature = "arbitrary")))] pub fn generate_blob_sidecar(blobs: Vec) -> BlobTransactionSidecar { + use alloc::vec::Vec; use alloy_eips::eip4844::env_settings::EnvKzgSettings; use c_kzg::{KzgCommitment, KzgProof}; @@ -285,12 +224,12 @@ pub fn generate_blob_sidecar(blobs: Vec) -> BlobTransactionSidecar mod tests { use super::*; use crate::{kzg::Blob, PooledTransactionsElement}; + use alloc::vec::Vec; use alloy_eips::{ eip2718::{Decodable2718, Encodable2718}, eip4844::Bytes48, }; use alloy_primitives::hex; - use alloy_rlp::Encodable; use std::{fs, path::PathBuf, str::FromStr}; #[test] @@ -392,7 +331,7 @@ mod tests { let mut encoded_rlp = Vec::new(); // Encode the inner data of the BlobTransactionSidecar into RLP - sidecar.encode(&mut encoded_rlp); + sidecar.rlp_encode_fields(&mut encoded_rlp); // Assert the equality between the expected RLP from the JSON and the encoded RLP assert_eq!(json_value.get("rlp").unwrap().as_str().unwrap(), hex::encode(&encoded_rlp)); @@ -423,10 +362,11 @@ mod tests { let mut encoded_rlp = Vec::new(); // Encode the inner data of the BlobTransactionSidecar into RLP - sidecar.encode(&mut encoded_rlp); + sidecar.rlp_encode_fields(&mut encoded_rlp); // Decode the RLP-encoded data back into a BlobTransactionSidecar - let decoded_sidecar = BlobTransactionSidecar::decode(&mut encoded_rlp.as_slice()).unwrap(); + let decoded_sidecar = + BlobTransactionSidecar::rlp_decode_fields(&mut encoded_rlp.as_slice()).unwrap(); // Assert the equality between the original BlobTransactionSidecar and the decoded one assert_eq!(sidecar, decoded_sidecar); diff --git a/crates/storage/codecs/src/alloy/authorization_list.rs b/crates/storage/codecs/src/alloy/authorization_list.rs index 2b013c0d3..6dc36956d 100644 --- a/crates/storage/codecs/src/alloy/authorization_list.rs +++ b/crates/storage/codecs/src/alloy/authorization_list.rs @@ -44,11 +44,9 @@ impl Compact for SignedAuthorization { where B: bytes::BufMut + AsMut<[u8]>, { - let signature = self.signature(); - let (v, r, s) = (signature.v(), signature.r(), signature.s()); - buf.put_u8(v.y_parity_byte()); - buf.put_slice(r.as_le_slice()); - buf.put_slice(s.as_le_slice()); + buf.put_u8(self.y_parity()); + buf.put_slice(self.r().as_le_slice()); + buf.put_slice(self.s().as_le_slice()); // to_compact doesn't write the len to buffer. // By placing it as last, we don't need to store it either. @@ -56,17 +54,15 @@ impl Compact for SignedAuthorization { } fn from_compact(mut buf: &[u8], len: usize) -> (Self, &[u8]) { - let y = alloy_primitives::Parity::Parity(buf.get_u8() == 1); + let y_parity = buf.get_u8(); let r = U256::from_le_slice(&buf[0..32]); buf.advance(32); let s = U256::from_le_slice(&buf[0..32]); buf.advance(32); - let signature = alloy_primitives::Signature::from_rs_and_parity(r, s, y) - .expect("invalid authorization signature"); let (auth, buf) = AlloyAuthorization::from_compact(buf, len); - (auth.into_signed(signature), buf) + (Self::new_unchecked(auth, y_parity, r, s), buf) } } diff --git a/crates/transaction-pool/src/blobstore/disk.rs b/crates/transaction-pool/src/blobstore/disk.rs index e168a1c11..787d4985f 100644 --- a/crates/transaction-pool/src/blobstore/disk.rs +++ b/crates/transaction-pool/src/blobstore/disk.rs @@ -3,7 +3,6 @@ use crate::blobstore::{BlobStore, BlobStoreCleanupStat, BlobStoreError, BlobStoreSize}; use alloy_eips::eip4844::BlobAndProofV1; use alloy_primitives::{TxHash, B256}; -use alloy_rlp::{Decodable, Encodable}; use parking_lot::{Mutex, RwLock}; use reth_primitives::BlobTransactionSidecar; use schnellru::{ByLength, LruMap}; @@ -204,8 +203,8 @@ impl DiskFileBlobStoreInner { /// Ensures blob is in the blob cache and written to the disk. fn insert_one(&self, tx: B256, data: BlobTransactionSidecar) -> Result<(), BlobStoreError> { - let mut buf = Vec::with_capacity(data.fields_len()); - data.encode(&mut buf); + let mut buf = Vec::with_capacity(data.rlp_encoded_fields_length()); + data.rlp_encode_fields(&mut buf); self.blob_cache.lock().insert(tx, data); let size = self.write_one_encoded(tx, &buf)?; @@ -219,8 +218,8 @@ impl DiskFileBlobStoreInner { let raw = txs .iter() .map(|(tx, data)| { - let mut buf = Vec::with_capacity(data.fields_len()); - data.encode(&mut buf); + let mut buf = Vec::with_capacity(data.rlp_encoded_fields_length()); + data.rlp_encode_fields(&mut buf); (self.blob_disk_file(*tx), buf) }) .collect::>(); @@ -312,7 +311,7 @@ impl DiskFileBlobStoreInner { } } }; - BlobTransactionSidecar::decode(&mut data.as_slice()) + BlobTransactionSidecar::rlp_decode_fields(&mut data.as_slice()) .map(Some) .map_err(BlobStoreError::DecodeError) } @@ -322,7 +321,7 @@ impl DiskFileBlobStoreInner { self.read_many_raw(txs) .into_iter() .filter_map(|(tx, data)| { - BlobTransactionSidecar::decode(&mut data.as_slice()) + BlobTransactionSidecar::rlp_decode_fields(&mut data.as_slice()) .map(|sidecar| (tx, sidecar)) .ok() }) From cf4e7745427c8e57e576acb2458f10e6a89d89c0 Mon Sep 17 00:00:00 2001 From: Hai | RISE <150876604+hai-rise@users.noreply.github.com> Date: Wed, 23 Oct 2024 20:12:44 +0700 Subject: [PATCH 208/425] chore: `Ethereum` -> `Optimism` in comment of `optimism_payload` (#11998) --- crates/optimism/payload/src/builder.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index b6ab9b879..3ed00c49a 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -146,12 +146,12 @@ where } } -/// Constructs an Ethereum transaction payload from the transactions sent through the +/// Constructs an Optimism transaction payload from the transactions sent through the /// Payload attributes by the sequencer. If the `no_tx_pool` argument is passed in /// the payload attributes, the transaction pool will be ignored and the only transactions /// included in the payload will be those sent through the attributes. /// -/// Given build arguments including an Ethereum client, transaction pool, +/// Given build arguments including an Optimism client, transaction pool, /// and configuration, this function creates a transaction payload. Returns /// a result indicating success with the payload or an error in case of failure. #[inline] From 386379efd55e491013d010b0a377771317a21e14 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 23 Oct 2024 14:58:56 +0200 Subject: [PATCH 209/425] test(tokio-util): add unit tests for `EventSender` (#11980) --- crates/tokio-util/src/event_sender.rs | 93 +++++++++++++++++++++++++++ 1 file changed, 93 insertions(+) diff --git a/crates/tokio-util/src/event_sender.rs b/crates/tokio-util/src/event_sender.rs index a4e981538..16208ee19 100644 --- a/crates/tokio-util/src/event_sender.rs +++ b/crates/tokio-util/src/event_sender.rs @@ -40,3 +40,96 @@ impl EventSender { EventStream::new(self.sender.subscribe()) } } + +#[cfg(test)] +mod tests { + use super::*; + use tokio::{ + task, + time::{timeout, Duration}, + }; + use tokio_stream::StreamExt; + + #[tokio::test] + async fn test_event_broadcast_to_listener() { + let sender = EventSender::default(); + + // Create a listener for the events + let mut listener = sender.new_listener(); + + // Broadcast an event + sender.notify("event1"); + + // Check if the listener receives the event + let received_event = listener.next().await; + assert_eq!(received_event, Some("event1")); + } + + #[tokio::test] + async fn test_event_no_listener() { + let sender = EventSender::default(); + + // Broadcast an event with no listeners + sender.notify("event2"); + + // Ensure it doesn't panic or fail when no listeners are present + // (this test passes if it runs without errors). + } + + #[tokio::test] + async fn test_multiple_listeners_receive_event() { + let sender = EventSender::default(); + + // Create two listeners + let mut listener1 = sender.new_listener(); + let mut listener2 = sender.new_listener(); + + // Broadcast an event + sender.notify("event3"); + + // Both listeners should receive the same event + let event1 = listener1.next().await; + let event2 = listener2.next().await; + + assert_eq!(event1, Some("event3")); + assert_eq!(event2, Some("event3")); + } + + #[tokio::test] + async fn test_bounded_channel_size() { + // Create a channel with size 2 + let sender = EventSender::new(2); + + // Create a listener + let mut listener = sender.new_listener(); + + // Broadcast 3 events, which exceeds the channel size + sender.notify("event4"); + sender.notify("event5"); + sender.notify("event6"); + + // Only the last two should be received due to the size limit + let received_event1 = listener.next().await; + let received_event2 = listener.next().await; + + assert_eq!(received_event1, Some("event5")); + assert_eq!(received_event2, Some("event6")); + } + + #[tokio::test] + async fn test_event_listener_timeout() { + let sender = EventSender::default(); + let mut listener = sender.new_listener(); + + // Broadcast an event asynchronously + task::spawn(async move { + tokio::time::sleep(Duration::from_millis(50)).await; + sender.notify("delayed_event"); + }); + + // Use a timeout to ensure that the event is received within a certain time + let result = timeout(Duration::from_millis(100), listener.next()).await; + assert!(result.is_ok()); + assert_eq!(result.unwrap(), Some("delayed_event")); + } +} From 5e0ba4104d85dcf2be9a7df91347cb84996d245f Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 23 Oct 2024 15:11:59 +0200 Subject: [PATCH 210/425] tx-pool: migrate `ensure_max_init_code_size` to `PoolTransaction` trait (#11976) --- crates/transaction-pool/src/traits.rs | 22 ++++++++++++++++- crates/transaction-pool/src/validate/eth.rs | 26 ++++----------------- 2 files changed, 26 insertions(+), 22 deletions(-) diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index 56da11fe6..cedec5606 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -2,7 +2,7 @@ use crate::{ blobstore::BlobStoreError, - error::PoolResult, + error::{InvalidPoolTransactionError, PoolResult}, pool::{state::SubPool, BestTransactionFilter, TransactionEvents}, validate::ValidPoolTransaction, AllTransactionsEvents, @@ -961,6 +961,26 @@ pub trait PoolTransaction: fmt::Debug + Send + Sync + Clone { /// Returns `chain_id` fn chain_id(&self) -> Option; + + /// Ensures that the transaction's code size does not exceed the provided `max_init_code_size`. + /// + /// This is specifically relevant for contract creation transactions ([`TxKind::Create`]), + /// where the input data contains the initialization code. If the input code size exceeds + /// the configured limit, an [`InvalidPoolTransactionError::ExceedsMaxInitCodeSize`] error is + /// returned. + fn ensure_max_init_code_size( + &self, + max_init_code_size: usize, + ) -> Result<(), InvalidPoolTransactionError> { + if self.kind().is_create() && self.input().len() > max_init_code_size { + Err(InvalidPoolTransactionError::ExceedsMaxInitCodeSize( + self.size(), + max_init_code_size, + )) + } else { + Ok(()) + } + } } /// Super trait for transactions that can be converted to and from Eth transactions diff --git a/crates/transaction-pool/src/validate/eth.rs b/crates/transaction-pool/src/validate/eth.rs index 22744c58a..bf7749fb8 100644 --- a/crates/transaction-pool/src/validate/eth.rs +++ b/crates/transaction-pool/src/validate/eth.rs @@ -8,7 +8,7 @@ use crate::{ }, traits::TransactionOrigin, validate::{ValidTransaction, ValidationTask, MAX_INIT_CODE_BYTE_SIZE}, - EthBlobTransactionSidecar, EthPoolTransaction, LocalTransactionConfig, PoolTransaction, + EthBlobTransactionSidecar, EthPoolTransaction, LocalTransactionConfig, TransactionValidationOutcome, TransactionValidationTaskExecutor, TransactionValidator, }; use alloy_consensus::constants::{ @@ -223,7 +223,7 @@ where // Check whether the init code size has been exceeded. if self.fork_tracker.is_shanghai_activated() { - if let Err(err) = ensure_max_init_code_size(&transaction, MAX_INIT_CODE_BYTE_SIZE) { + if let Err(err) = transaction.ensure_max_init_code_size(MAX_INIT_CODE_BYTE_SIZE) { return TransactionValidationOutcome::Invalid(transaction, err) } } @@ -711,7 +711,7 @@ impl EthTransactionValidatorBuilder { EthTransactionValidator { inner: Arc::new(inner) } } - /// Builds a the [`EthTransactionValidator`] and spawns validation tasks via the + /// Builds a [`EthTransactionValidator`] and spawns validation tasks via the /// [`TransactionValidationTaskExecutor`] /// /// The validator will spawn `additional_tasks` additional tasks for validation. @@ -783,22 +783,6 @@ impl ForkTracker { } } -/// Ensure that the code size is not greater than `max_init_code_size`. -/// `max_init_code_size` should be configurable so this will take it as an argument. -pub fn ensure_max_init_code_size( - transaction: &T, - max_init_code_size: usize, -) -> Result<(), InvalidPoolTransactionError> { - if transaction.kind().is_create() && transaction.input().len() > max_init_code_size { - Err(InvalidPoolTransactionError::ExceedsMaxInitCodeSize( - transaction.size(), - max_init_code_size, - )) - } else { - Ok(()) - } -} - /// Ensures that gas limit of the transaction exceeds the intrinsic gas of the transaction. /// /// Caution: This only checks past the Merge hardfork. @@ -833,8 +817,8 @@ pub fn ensure_intrinsic_gas( mod tests { use super::*; use crate::{ - blobstore::InMemoryBlobStore, error::PoolErrorKind, CoinbaseTipOrdering, - EthPooledTransaction, Pool, TransactionPool, + blobstore::InMemoryBlobStore, error::PoolErrorKind, traits::PoolTransaction, + CoinbaseTipOrdering, EthPooledTransaction, Pool, TransactionPool, }; use alloy_eips::eip2718::Decodable2718; use alloy_primitives::{hex, U256}; From 252cdf7f3576f76a0ffe6b4086105655b11ba420 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 23 Oct 2024 15:35:24 +0200 Subject: [PATCH 211/425] storage: add unit tests for `StorageRevertsIter` (#11999) --- .../src/bundle_state/state_reverts.rs | 102 ++++++++++++++++++ 1 file changed, 102 insertions(+) diff --git a/crates/storage/provider/src/bundle_state/state_reverts.rs b/crates/storage/provider/src/bundle_state/state_reverts.rs index 09b892562..3e1ba2a4b 100644 --- a/crates/storage/provider/src/bundle_state/state_reverts.rs +++ b/crates/storage/provider/src/bundle_state/state_reverts.rs @@ -76,3 +76,105 @@ where } } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_storage_reverts_iter_empty() { + // Create empty sample data for reverts and wiped entries. + let reverts: Vec<(B256, RevertToSlot)> = vec![]; + let wiped: Vec<(B256, U256)> = vec![]; + + // Create the iterator with the empty data. + let iter = StorageRevertsIter::new(reverts, wiped); + + // Iterate and collect results into a vector for verification. + let results: Vec<_> = iter.collect(); + + // Verify that the results are empty. + assert_eq!(results, vec![]); + } + + #[test] + fn test_storage_reverts_iter_reverts_only() { + // Create sample data for only reverts. + let reverts = vec![ + (B256::from_slice(&[4; 32]), RevertToSlot::Destroyed), + (B256::from_slice(&[5; 32]), RevertToSlot::Some(U256::from(40))), + ]; + + // Create the iterator with only reverts and no wiped entries. + let iter = StorageRevertsIter::new(reverts, vec![]); + + // Iterate and collect results into a vector for verification. + let results: Vec<_> = iter.collect(); + + // Verify the output order and values. + assert_eq!( + results, + vec![ + (B256::from_slice(&[4; 32]), U256::ZERO), // Revert slot previous value + (B256::from_slice(&[5; 32]), U256::from(40)), // Only revert present. + ] + ); + } + + #[test] + fn test_storage_reverts_iter_wiped_only() { + // Create sample data for only wiped entries. + let wiped = vec![ + (B256::from_slice(&[6; 32]), U256::from(50)), + (B256::from_slice(&[7; 32]), U256::from(60)), + ]; + + // Create the iterator with only wiped entries and no reverts. + let iter = StorageRevertsIter::new(vec![], wiped); + + // Iterate and collect results into a vector for verification. + let results: Vec<_> = iter.collect(); + + // Verify the output order and values. + assert_eq!( + results, + vec![ + (B256::from_slice(&[6; 32]), U256::from(50)), // Only wiped present. + (B256::from_slice(&[7; 32]), U256::from(60)), // Only wiped present. + ] + ); + } + + #[test] + fn test_storage_reverts_iter_interleaved() { + // Create sample data for interleaved reverts and wiped entries. + let reverts = vec![ + (B256::from_slice(&[8; 32]), RevertToSlot::Some(U256::from(70))), + (B256::from_slice(&[9; 32]), RevertToSlot::Some(U256::from(80))), + // Some higher key than wiped + (B256::from_slice(&[15; 32]), RevertToSlot::Some(U256::from(90))), + ]; + + let wiped = vec![ + (B256::from_slice(&[8; 32]), U256::from(75)), // Same key as revert + (B256::from_slice(&[10; 32]), U256::from(85)), // Wiped with new key + ]; + + // Create the iterator with the sample data. + let iter = StorageRevertsIter::new(reverts, wiped); + + // Iterate and collect results into a vector for verification. + let results: Vec<_> = iter.collect(); + + // Verify the output order and values. + assert_eq!( + results, + vec![ + (B256::from_slice(&[8; 32]), U256::from(70)), // Revert takes priority. + (B256::from_slice(&[9; 32]), U256::from(80)), // Only revert present. + (B256::from_slice(&[10; 32]), U256::from(85)), // Wiped entry. + (B256::from_slice(&[15; 32]), U256::from(90)), // WGreater revert entry + ] + ); + } +} From 55d98bbc6effb0dde79b3210d0eea2a0424ebc64 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 23 Oct 2024 15:37:39 +0200 Subject: [PATCH 212/425] fix: check failed find nodes requests before sending new ones (#11997) --- crates/net/discv4/src/config.rs | 2 +- crates/net/discv4/src/lib.rs | 135 +++++++++++++++++++++++++++----- 2 files changed, 118 insertions(+), 19 deletions(-) diff --git a/crates/net/discv4/src/config.rs b/crates/net/discv4/src/config.rs index c934f3361..38467304d 100644 --- a/crates/net/discv4/src/config.rs +++ b/crates/net/discv4/src/config.rs @@ -23,7 +23,7 @@ pub struct Discv4Config { pub udp_egress_message_buffer: usize, /// Size of the channel buffer for incoming messages. pub udp_ingress_message_buffer: usize, - /// The number of allowed failures for `FindNode` requests. Default: 5. + /// The number of allowed consecutive failures for `FindNode` requests. Default: 5. pub max_find_node_failures: u8, /// The interval to use when checking for expired nodes that need to be re-pinged. Default: /// 10min. diff --git a/crates/net/discv4/src/lib.rs b/crates/net/discv4/src/lib.rs index e955e45df..779c7ee63 100644 --- a/crates/net/discv4/src/lib.rs +++ b/crates/net/discv4/src/lib.rs @@ -743,7 +743,8 @@ impl Discv4Service { trace!(target: "discv4", ?target, "Starting lookup"); let target_key = kad_key(target); - // Start a lookup context with the 16 (MAX_NODES_PER_BUCKET) closest nodes + // Start a lookup context with the 16 (MAX_NODES_PER_BUCKET) closest nodes to which we have + // a valid endpoint proof let ctx = LookupContext::new( target_key.clone(), self.kbuckets @@ -772,7 +773,10 @@ impl Discv4Service { trace!(target: "discv4", ?target, num = closest.len(), "Start lookup closest nodes"); for node in closest { - self.find_node(&node, ctx.clone()); + // here we still want to check against previous request failures and if necessary + // re-establish a new endpoint proof because it can be the case that the other node lost + // our entry and no longer has an endpoint proof on their end + self.find_node_checked(&node, ctx.clone()); } } @@ -788,6 +792,22 @@ impl Discv4Service { self.pending_find_nodes.insert(node.id, FindNodeRequest::new(ctx)); } + /// Sends a new `FindNode` packet to the node with `target` as the lookup target but checks + /// whether we should should send a new ping first to renew the endpoint proof by checking the + /// previously failed findNode requests. It could be that the node is no longer reachable or + /// lost our entry. + fn find_node_checked(&mut self, node: &NodeRecord, ctx: LookupContext) { + let max_failures = self.config.max_find_node_failures; + let needs_ping = self + .on_entry(node.id, |entry| entry.exceeds_find_node_failures(max_failures)) + .unwrap_or(true); + if needs_ping { + self.try_ping(*node, PingReason::Lookup(*node, ctx)) + } else { + self.find_node(node, ctx) + } + } + /// Notifies all listeners. /// /// Removes all listeners that are closed. @@ -860,7 +880,7 @@ impl Discv4Service { self.kbuckets.buckets_iter().fold(0, |count, bucket| count + bucket.num_connected()) } - /// Check if the peer has a bond + /// Check if the peer has an active bond. fn has_bond(&self, remote_id: PeerId, remote_ip: IpAddr) -> bool { if let Some(timestamp) = self.received_pongs.last_pong(remote_id, remote_ip) { if timestamp.elapsed() < self.config.bond_expiration { @@ -870,6 +890,19 @@ impl Discv4Service { false } + /// Applies a closure on the pending or present [`NodeEntry`]. + fn on_entry(&mut self, peer_id: PeerId, f: F) -> Option + where + F: FnOnce(&NodeEntry) -> R, + { + let key = kad_key(peer_id); + match self.kbuckets.entry(&key) { + BucketEntry::Present(entry, _) => Some(f(entry.value())), + BucketEntry::Pending(mut entry, _) => Some(f(entry.value())), + _ => None, + } + } + /// Update the entry on RE-ping. /// /// Invoked when we received the Pong to our [`PingReason::RePing`] ping. @@ -929,7 +962,7 @@ impl Discv4Service { match self.kbuckets.entry(&key) { kbucket::Entry::Present(mut entry, old_status) => { // endpoint is now proven - entry.value_mut().has_endpoint_proof = true; + entry.value_mut().establish_proof(); entry.value_mut().update_with_enr(last_enr_seq); if !old_status.is_connected() { @@ -945,7 +978,7 @@ impl Discv4Service { } kbucket::Entry::Pending(mut entry, mut status) => { // endpoint is now proven - entry.value().has_endpoint_proof = true; + entry.value().establish_proof(); entry.value().update_with_enr(last_enr_seq); if !status.is_connected() { @@ -1129,6 +1162,8 @@ impl Discv4Service { // try to send it ctx.unmark_queried(record.id); } else { + // we just received a ping from that peer so we can send a find node request + // directly self.find_node(&record, ctx); } } @@ -1419,14 +1454,28 @@ impl Discv4Service { BucketEntry::SelfEntry => { // we received our own node entry } - BucketEntry::Present(mut entry, _) => { - if entry.value_mut().has_endpoint_proof { - self.find_node(&closest, ctx.clone()); + BucketEntry::Present(entry, _) => { + if entry.value().has_endpoint_proof { + if entry + .value() + .exceeds_find_node_failures(self.config.max_find_node_failures) + { + self.try_ping(closest, PingReason::Lookup(closest, ctx.clone())) + } else { + self.find_node(&closest, ctx.clone()); + } } } BucketEntry::Pending(mut entry, _) => { if entry.value().has_endpoint_proof { - self.find_node(&closest, ctx.clone()); + if entry + .value() + .exceeds_find_node_failures(self.config.max_find_node_failures) + { + self.try_ping(closest, PingReason::Lookup(closest, ctx.clone())) + } else { + self.find_node(&closest, ctx.clone()); + } } } } @@ -1486,27 +1535,27 @@ impl Discv4Service { self.remove_node(node_id); } - self.evict_failed_neighbours(now); + self.evict_failed_find_nodes(now); } /// Handles failed responses to `FindNode` - fn evict_failed_neighbours(&mut self, now: Instant) { - let mut failed_neighbours = Vec::new(); + fn evict_failed_find_nodes(&mut self, now: Instant) { + let mut failed_find_nodes = Vec::new(); self.pending_find_nodes.retain(|node_id, find_node_request| { if now.duration_since(find_node_request.sent_at) > self.config.neighbours_expiration { if !find_node_request.answered { // node actually responded but with fewer entries than expected, but we don't // treat this as an hard error since it responded. - failed_neighbours.push(*node_id); + failed_find_nodes.push(*node_id); } return false } true }); - trace!(target: "discv4", num=%failed_neighbours.len(), "processing failed neighbours"); + trace!(target: "discv4", num=%failed_find_nodes.len(), "processing failed find nodes"); - for node_id in failed_neighbours { + for node_id in failed_find_nodes { let key = kad_key(node_id); let failures = match self.kbuckets.entry(&key) { kbucket::Entry::Present(mut entry, _) => { @@ -1523,7 +1572,7 @@ impl Discv4Service { // if the node failed to respond anything useful multiple times, remove the node from // the table, but only if there are enough other nodes in the bucket (bucket must be at // least half full) - if failures > (self.config.max_find_node_failures as usize) { + if failures > self.config.max_find_node_failures { self.soft_remove_node(node_id); } } @@ -2216,8 +2265,8 @@ struct NodeEntry { last_enr_seq: Option, /// `ForkId` if retrieved via ENR requests. fork_id: Option, - /// Counter for failed findNode requests. - find_node_failures: usize, + /// Counter for failed _consecutive_ findNode requests. + find_node_failures: u8, /// Whether the endpoint of the peer is proven. has_endpoint_proof: bool, } @@ -2244,6 +2293,17 @@ impl NodeEntry { node } + /// Marks the entry with an established proof and resets the consecutive failure counter. + fn establish_proof(&mut self) { + self.has_endpoint_proof = true; + self.find_node_failures = 0; + } + + /// Returns true if the tracked find node failures exceed the max amount + const fn exceeds_find_node_failures(&self, max_failures: u8) -> bool { + self.find_node_failures >= max_failures + } + /// Updates the last timestamp and sets the enr seq fn update_with_enr(&mut self, last_enr_seq: Option) -> Option { self.update_now(|s| std::mem::replace(&mut s.last_enr_seq, last_enr_seq)) @@ -2660,6 +2720,45 @@ mod tests { assert_eq!(ctx.inner.closest_nodes.borrow().len(), 1); } + #[tokio::test] + async fn test_reping_on_find_node_failures() { + reth_tracing::init_test_tracing(); + + let config = Discv4Config::builder().build(); + let (_discv4, mut service) = create_discv4_with_config(config).await; + + let target = PeerId::random(); + + let id = PeerId::random(); + let key = kad_key(id); + let record = NodeRecord::new("0.0.0.0:0".parse().unwrap(), id); + + let mut entry = NodeEntry::new_proven(record); + entry.find_node_failures = u8::MAX; + let _ = service.kbuckets.insert_or_update( + &key, + entry, + NodeStatus { + direction: ConnectionDirection::Incoming, + state: ConnectionState::Connected, + }, + ); + + service.lookup(target); + assert_eq!(service.pending_find_nodes.len(), 0); + assert_eq!(service.pending_pings.len(), 1); + + service.update_on_pong(record, None); + + service + .on_entry(record.id, |entry| { + // reset on pong + assert_eq!(entry.find_node_failures, 0); + assert!(entry.has_endpoint_proof); + }) + .unwrap(); + } + #[tokio::test] async fn test_service_commands() { reth_tracing::init_test_tracing(); From ab407e74447fc02d7d48fc2bbe0f6807787ec0a0 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 23 Oct 2024 15:39:41 +0200 Subject: [PATCH 213/425] chore: bump alloy 054 (#12000) --- Cargo.lock | 104 ++++++++++++++++++++++++++--------------------------- Cargo.toml | 54 ++++++++++++++-------------- 2 files changed, 79 insertions(+), 79 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7e10bdb0a..f2d134cb8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -112,9 +112,9 @@ dependencies = [ [[package]] name = "alloy-consensus" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdf02dfacfc815214f9b54ff50d54900ba527a68fd73e2c5637ced3460005045" +checksum = "41ed961a48297c732a5d97ee321aa8bb5009ecadbcb077d8bec90cb54e651629" dependencies = [ "alloy-eips", "alloy-primitives", @@ -177,9 +177,9 @@ dependencies = [ [[package]] name = "alloy-eips" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "769da342b6bcd945013925ef4c40763cc82f11e002c60702dba8b444bb60e5a7" +checksum = "b69e06cf9c37be824b9d26d6d101114fdde6af0c87de2828b414c05c4b3daa71" dependencies = [ "alloy-eip2930", "alloy-eip7702", @@ -198,9 +198,9 @@ dependencies = [ [[package]] name = "alloy-genesis" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c698ce0ada980b17f0323e1a28c7da8a2e9abc6dff5be9ee33d1525b28ac46b6" +checksum = "dde15e14944a88bd6a57d325e9a49b75558746fe16aaccc79713ae50a6a9574c" dependencies = [ "alloy-primitives", "alloy-serde", @@ -221,9 +221,9 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1050e1d65524c030b17442b6546b564da51fdab7f71bd534b001ba65f2ebb16" +checksum = "af5979e0d5a7bf9c7eb79749121e8256e59021af611322aee56e77e20776b4b3" dependencies = [ "alloy-primitives", "alloy-sol-types", @@ -235,9 +235,9 @@ dependencies = [ [[package]] name = "alloy-network" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da34a18446a27734473af3d77eb21c5ebbdf97ea8eb65c39c0b50916bc659023" +checksum = "204237129086ce5dc17a58025e93739b01b45313841f98fa339eb1d780511e57" dependencies = [ "alloy-consensus", "alloy-eips", @@ -269,9 +269,9 @@ dependencies = [ [[package]] name = "alloy-node-bindings" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439fc6a933b9f8e8b272a8cac35dbeabaf2b2eaf9590482bebedb5782153118e" +checksum = "27444ea67d360508753022807cdd0b49a95c878924c9c5f8f32668b7d7768245" dependencies = [ "alloy-genesis", "alloy-primitives", @@ -318,9 +318,9 @@ dependencies = [ [[package]] name = "alloy-provider" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c45dbc0e3630becef9e988b69d43339f68d67e32a854e3c855bc28bd5031895b" +checksum = "4814d141ede360bb6cd1b4b064f1aab9de391e7c4d0d4d50ac89ea4bc1e25fbd" dependencies = [ "alloy-chains", "alloy-consensus", @@ -359,9 +359,9 @@ dependencies = [ [[package]] name = "alloy-pubsub" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e3961a56e10f44bfd69dd3f4b0854b90b84c612b0c43708e738933e8b47f93a" +checksum = "96ba46eb69ddf7a9925b81f15229cb74658e6eebe5dd30a5b74e2cd040380573" dependencies = [ "alloy-json-rpc", "alloy-primitives", @@ -400,9 +400,9 @@ dependencies = [ [[package]] name = "alloy-rpc-client" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "917e5504e4f8f7e39bdc322ff81589ed54c1e462240adaeb58162c2d986a5a2b" +checksum = "7fc2bd1e7403463a5f2c61e955bcc9d3072b63aa177442b0f9aa6a6d22a941e3" dependencies = [ "alloy-json-rpc", "alloy-primitives", @@ -425,9 +425,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07c7eb2dc6db1dd41e5e7bd2b98a38813854efc30e034afd90d1e420e7f3de2b" +checksum = "eea9bf1abdd506f985a53533f5ac01296bcd6102c5e139bbc5d40bc468d2c916" dependencies = [ "alloy-primitives", "alloy-rpc-types-engine", @@ -438,9 +438,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-admin" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd468a4e3eddcd9d612cad657852de4b7475ac2080e7af9224fbf1df20ddffe0" +checksum = "ea02c25541fb19eaac4278aa5c41d2d7e0245898887e54a74bfc0f3103e99415" dependencies = [ "alloy-genesis", "alloy-primitives", @@ -450,9 +450,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-anvil" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2640928d9b1d43bb1cec7a0d615e10c2b407c5bd8ff1fcbe49e6318a2b62d731" +checksum = "2382fc63fb0cf3e02818d547b80cb66cc49a31f8803d0c328402b2008bc13650" dependencies = [ "alloy-primitives", "alloy-serde", @@ -461,9 +461,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-beacon" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64f731ad2ef8d7dd75a4d28214f4922a5b683feee1e6df35bd7b427315f94366" +checksum = "45357a642081c8ce235c0ad990c4e9279f5f18a723545076b38cfcc05cc25234" dependencies = [ "alloy-eips", "alloy-primitives", @@ -475,9 +475,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-debug" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06bd0757bfb3eccde06ee3f4e378f5839fe923d40956cff586018d4427a15bb5" +checksum = "a5afe3ab1038f90faf56304aa0adf1e6a8c9844615d8f83967f932f3a70390b1" dependencies = [ "alloy-primitives", "serde", @@ -485,9 +485,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-engine" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d3d95c3bf03efbb7bdc1d097e2931f520aac47438b709ccd8f065a7793dd371" +checksum = "886d22d41992287a235af2f3af4299b5ced2bcafb81eb835572ad35747476946" dependencies = [ "alloy-consensus", "alloy-eips", @@ -506,9 +506,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-eth" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e855b0daccf2320ba415753c3fed422abe9d3ad5d77b2d6cafcc9bcf32fe387f" +checksum = "00b034779a4850b4b03f5be5ea674a1cf7d746b2da762b34d1860ab45e48ca27" dependencies = [ "alloy-consensus", "alloy-eips", @@ -527,9 +527,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-mev" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca3753b9894235f915437f908644e737d8714c686ce4e8d03afbf585b23f074" +checksum = "3246948dfa5f5060a9abe04233d741ea656ef076b12958f3242416ce9f375058" dependencies = [ "alloy-eips", "alloy-primitives", @@ -540,9 +540,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-trace" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae58a997afde032cd021547c960a53eef6245f47969dd71886e9f63fb45a6048" +checksum = "4e5fb6c5c401321f802f69dcdb95b932f30f8158f6798793f914baac5995628e" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -554,9 +554,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-txpool" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "667e45c882fda207d4cc94c4bb35e24a23347955113dcb236a5e4e0eaddef826" +checksum = "9ad066b49c3b1b5f64cdd2399177a19926a6a15db2dbf11e2098de621f9e7480" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -566,9 +566,9 @@ dependencies = [ [[package]] name = "alloy-serde" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35c2661ca6785add8fc37aff8005439c806ffad58254c19939c6f59ac0d6596e" +checksum = "028e72eaa9703e4882344983cfe7636ce06d8cce104a78ea62fd19b46659efc4" dependencies = [ "alloy-primitives", "arbitrary", @@ -578,9 +578,9 @@ dependencies = [ [[package]] name = "alloy-signer" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67eca011160d18a7dc6d8cdc1e8dc13e2e86c908f8e41b02aa76e429d6fe7085" +checksum = "592c185d7100258c041afac51877660c7bf6213447999787197db4842f0e938e" dependencies = [ "alloy-primitives", "async-trait", @@ -592,9 +592,9 @@ dependencies = [ [[package]] name = "alloy-signer-local" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c54b195a6ee5a83f32e7c697b4e6b565966737ed5a2ef9176bbbb39f720d023" +checksum = "6614f02fc1d5b079b2a4a5320018317b506fd0a6d67c1fd5542a71201724986c" dependencies = [ "alloy-consensus", "alloy-network", @@ -680,9 +680,9 @@ dependencies = [ [[package]] name = "alloy-transport" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e4a136e733f55fef0870b81e1f8f1db28e78973d1b1ae5a5df642ba39538a07" +checksum = "be77579633ebbc1266ae6fd7694f75c408beb1aeb6865d0b18f22893c265a061" dependencies = [ "alloy-json-rpc", "base64 0.22.1", @@ -700,9 +700,9 @@ dependencies = [ [[package]] name = "alloy-transport-http" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a6b358a89b6d107b92d09b61a61fbc04243942182709752c796f4b29402cead" +checksum = "91fd1a5d0827939847983b46f2f79510361f901dc82f8e3c38ac7397af142c6e" dependencies = [ "alloy-json-rpc", "alloy-transport", @@ -715,9 +715,9 @@ dependencies = [ [[package]] name = "alloy-transport-ipc" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a899c43b7f5e3bc83762dfe5128fccd9cfa99f1f03c5f26bbfb2495ae8dcd35" +checksum = "8073d1186bfeeb8fbdd1292b6f1a0731f3aed8e21e1463905abfae0b96a887a6" dependencies = [ "alloy-json-rpc", "alloy-pubsub", @@ -734,9 +734,9 @@ dependencies = [ [[package]] name = "alloy-transport-ws" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d27aac1246e13c9e6fa0c784fbb0c56872c6224f78dbde388bb2213ccdf8af02" +checksum = "61f27837bb4a1d6c83a28231c94493e814882f0e9058648a97e908a5f3fc9fcf" dependencies = [ "alloy-pubsub", "alloy-transport", diff --git a/Cargo.toml b/Cargo.toml index e8f10229e..22a78979d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -432,39 +432,39 @@ alloy-rlp = "0.3.4" alloy-sol-types = "0.8.0" alloy-trie = { version = "0.7", default-features = false } -alloy-consensus = { version = "0.5.3", default-features = false } -alloy-eips = { version = "0.5.3", default-features = false } -alloy-genesis = { version = "0.5.3", default-features = false } -alloy-json-rpc = { version = "0.5.3", default-features = false } -alloy-network = { version = "0.5.3", default-features = false } -alloy-network-primitives = { version = "0.5.3", default-features = false } -alloy-node-bindings = { version = "0.5.3", default-features = false } -alloy-provider = { version = "0.5.3", features = [ +alloy-consensus = { version = "0.5.4", default-features = false } +alloy-eips = { version = "0.5.4", default-features = false } +alloy-genesis = { version = "0.5.4", default-features = false } +alloy-json-rpc = { version = "0.5.4", default-features = false } +alloy-network = { version = "0.5.4", default-features = false } +alloy-network-primitives = { version = "0.5.4", default-features = false } +alloy-node-bindings = { version = "0.5.4", default-features = false } +alloy-provider = { version = "0.5.4", features = [ "reqwest", ], default-features = false } -alloy-pubsub = { version = "0.5.3", default-features = false } -alloy-rpc-client = { version = "0.5.3", default-features = false } -alloy-rpc-types = { version = "0.5.3", features = [ +alloy-pubsub = { version = "0.5.4", default-features = false } +alloy-rpc-client = { version = "0.5.4", default-features = false } +alloy-rpc-types = { version = "0.5.4", features = [ "eth", ], default-features = false } -alloy-rpc-types-admin = { version = "0.5.3", default-features = false } -alloy-rpc-types-anvil = { version = "0.5.3", default-features = false } -alloy-rpc-types-beacon = { version = "0.5.3", default-features = false } -alloy-rpc-types-debug = { version = "0.5.3", default-features = false } -alloy-rpc-types-engine = { version = "0.5.3", default-features = false } -alloy-rpc-types-eth = { version = "0.5.3", default-features = false } -alloy-rpc-types-mev = { version = "0.5.3", default-features = false } -alloy-rpc-types-trace = { version = "0.5.3", default-features = false } -alloy-rpc-types-txpool = { version = "0.5.3", default-features = false } -alloy-serde = { version = "0.5.3", default-features = false } -alloy-signer = { version = "0.5.3", default-features = false } -alloy-signer-local = { version = "0.5.3", default-features = false } -alloy-transport = { version = "0.5.3" } -alloy-transport-http = { version = "0.5.3", features = [ +alloy-rpc-types-admin = { version = "0.5.4", default-features = false } +alloy-rpc-types-anvil = { version = "0.5.4", default-features = false } +alloy-rpc-types-beacon = { version = "0.5.4", default-features = false } +alloy-rpc-types-debug = { version = "0.5.4", default-features = false } +alloy-rpc-types-engine = { version = "0.5.4", default-features = false } +alloy-rpc-types-eth = { version = "0.5.4", default-features = false } +alloy-rpc-types-mev = { version = "0.5.4", default-features = false } +alloy-rpc-types-trace = { version = "0.5.4", default-features = false } +alloy-rpc-types-txpool = { version = "0.5.4", default-features = false } +alloy-serde = { version = "0.5.4", default-features = false } +alloy-signer = { version = "0.5.4", default-features = false } +alloy-signer-local = { version = "0.5.4", default-features = false } +alloy-transport = { version = "0.5.4" } +alloy-transport-http = { version = "0.5.4", features = [ "reqwest-rustls-tls", ], default-features = false } -alloy-transport-ipc = { version = "0.5.3", default-features = false } -alloy-transport-ws = { version = "0.5.3", default-features = false } +alloy-transport-ipc = { version = "0.5.4", default-features = false } +alloy-transport-ws = { version = "0.5.4", default-features = false } # op op-alloy-rpc-types = "0.5" From 8a40d5c6aabd8f232b9aaacce0ca474a480a9b64 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Wed, 23 Oct 2024 22:40:14 +0900 Subject: [PATCH 214/425] feat(providers): add `AtomicBlockchainProvider` (#11705) Co-authored-by: Matthias Seitz --- crates/chain-state/src/in_memory.rs | 69 +- crates/chain-state/src/lib.rs | 2 +- crates/chain-state/src/memory_overlay.rs | 349 +-- crates/engine/service/src/service.rs | 4 +- .../src/providers/blockchain_provider.rs | 1137 +--------- .../provider/src/providers/consistent.rs | 1871 +++++++++++++++++ crates/storage/provider/src/providers/mod.rs | 3 + 7 files changed, 2220 insertions(+), 1215 deletions(-) create mode 100644 crates/storage/provider/src/providers/consistent.rs diff --git a/crates/chain-state/src/in_memory.rs b/crates/chain-state/src/in_memory.rs index be33e1fd7..a850e6652 100644 --- a/crates/chain-state/src/in_memory.rs +++ b/crates/chain-state/src/in_memory.rs @@ -4,7 +4,7 @@ use crate::{ CanonStateNotification, CanonStateNotificationSender, CanonStateNotifications, ChainInfoTracker, MemoryOverlayStateProvider, }; -use alloy_eips::BlockNumHash; +use alloy_eips::{BlockHashOrNumber, BlockNumHash}; use alloy_primitives::{map::HashMap, Address, TxHash, B256}; use parking_lot::RwLock; use reth_chainspec::ChainInfo; @@ -514,7 +514,7 @@ impl CanonicalInMemoryState { historical: StateProviderBox, ) -> MemoryOverlayStateProvider { let in_memory = if let Some(state) = self.state_by_hash(hash) { - state.chain().into_iter().map(|block_state| block_state.block()).collect() + state.chain().map(|block_state| block_state.block()).collect() } else { Vec::new() }; @@ -692,10 +692,8 @@ impl BlockState { /// Returns a vector of `BlockStates` representing the entire in memory chain. /// The block state order in the output vector is newest to oldest (highest to lowest), /// including self as the first element. - pub fn chain(&self) -> Vec<&Self> { - let mut chain = vec![self]; - self.append_parent_chain(&mut chain); - chain + pub fn chain(&self) -> impl Iterator { + std::iter::successors(Some(self), |state| state.parent.as_deref()) } /// Appends the parent chain of this [`BlockState`] to the given vector. @@ -715,10 +713,59 @@ impl BlockState { /// This merges the state of all blocks that are part of the chain that the this block is /// the head of. This includes all blocks that connect back to the canonical block on disk. pub fn state_provider(&self, historical: StateProviderBox) -> MemoryOverlayStateProvider { - let in_memory = self.chain().into_iter().map(|block_state| block_state.block()).collect(); + let in_memory = self.chain().map(|block_state| block_state.block()).collect(); MemoryOverlayStateProvider::new(historical, in_memory) } + + /// Tries to find a block by [`BlockHashOrNumber`] in the chain ending at this block. + pub fn block_on_chain(&self, hash_or_num: BlockHashOrNumber) -> Option<&Self> { + self.chain().find(|block| match hash_or_num { + BlockHashOrNumber::Hash(hash) => block.hash() == hash, + BlockHashOrNumber::Number(number) => block.number() == number, + }) + } + + /// Tries to find a transaction by [`TxHash`] in the chain ending at this block. + pub fn transaction_on_chain(&self, hash: TxHash) -> Option { + self.chain().find_map(|block_state| { + block_state + .block_ref() + .block() + .body + .transactions() + .find(|tx| tx.hash() == hash) + .cloned() + }) + } + + /// Tries to find a transaction with meta by [`TxHash`] in the chain ending at this block. + pub fn transaction_meta_on_chain( + &self, + tx_hash: TxHash, + ) -> Option<(TransactionSigned, TransactionMeta)> { + self.chain().find_map(|block_state| { + block_state + .block_ref() + .block() + .body + .transactions() + .enumerate() + .find(|(_, tx)| tx.hash() == tx_hash) + .map(|(index, tx)| { + let meta = TransactionMeta { + tx_hash, + index: index as u64, + block_hash: block_state.hash(), + block_number: block_state.block_ref().block.number, + base_fee: block_state.block_ref().block.header.base_fee_per_gas, + timestamp: block_state.block_ref().block.timestamp, + excess_blob_gas: block_state.block_ref().block.excess_blob_gas, + }; + (tx.clone(), meta) + }) + }) + } } /// Represents an executed block stored in-memory. @@ -1382,7 +1429,7 @@ mod tests { let parents = single_block.parent_state_chain(); assert_eq!(parents.len(), 0); - let block_state_chain = single_block.chain(); + let block_state_chain = single_block.chain().collect::>(); assert_eq!(block_state_chain.len(), 1); assert_eq!(block_state_chain[0].block().block.number, single_block_number); assert_eq!(block_state_chain[0].block().block.hash(), single_block_hash); @@ -1393,18 +1440,18 @@ mod tests { let mut test_block_builder = TestBlockBuilder::default(); let chain = create_mock_state_chain(&mut test_block_builder, 3); - let block_state_chain = chain[2].chain(); + let block_state_chain = chain[2].chain().collect::>(); assert_eq!(block_state_chain.len(), 3); assert_eq!(block_state_chain[0].block().block.number, 3); assert_eq!(block_state_chain[1].block().block.number, 2); assert_eq!(block_state_chain[2].block().block.number, 1); - let block_state_chain = chain[1].chain(); + let block_state_chain = chain[1].chain().collect::>(); assert_eq!(block_state_chain.len(), 2); assert_eq!(block_state_chain[0].block().block.number, 2); assert_eq!(block_state_chain[1].block().block.number, 1); - let block_state_chain = chain[0].chain(); + let block_state_chain = chain[0].chain().collect::>(); assert_eq!(block_state_chain.len(), 1); assert_eq!(block_state_chain[0].block().block.number, 1); } diff --git a/crates/chain-state/src/lib.rs b/crates/chain-state/src/lib.rs index 50a103111..bd9b43a59 100644 --- a/crates/chain-state/src/lib.rs +++ b/crates/chain-state/src/lib.rs @@ -22,7 +22,7 @@ pub use notifications::{ }; mod memory_overlay; -pub use memory_overlay::MemoryOverlayStateProvider; +pub use memory_overlay::{MemoryOverlayStateProvider, MemoryOverlayStateProviderRef}; #[cfg(any(test, feature = "test-utils"))] /// Common test helpers diff --git a/crates/chain-state/src/memory_overlay.rs b/crates/chain-state/src/memory_overlay.rs index eb125dad1..ada0faee4 100644 --- a/crates/chain-state/src/memory_overlay.rs +++ b/crates/chain-state/src/memory_overlay.rs @@ -7,14 +7,26 @@ use alloy_primitives::{ use reth_errors::ProviderResult; use reth_primitives::{Account, Bytecode}; use reth_storage_api::{ - AccountReader, BlockHashReader, StateProofProvider, StateProvider, StateProviderBox, - StateRootProvider, StorageRootProvider, + AccountReader, BlockHashReader, StateProofProvider, StateProvider, StateRootProvider, + StorageRootProvider, }; use reth_trie::{ updates::TrieUpdates, AccountProof, HashedPostState, HashedStorage, MultiProof, TrieInput, }; use std::sync::OnceLock; +/// A state provider that stores references to in-memory blocks along with their state as well as a +/// reference of the historical state provider for fallback lookups. +#[allow(missing_debug_implementations)] +pub struct MemoryOverlayStateProviderRef<'a> { + /// Historical state provider for state lookups that are not found in in-memory blocks. + pub(crate) historical: Box, + /// The collection of executed parent blocks. Expected order is newest to oldest. + pub(crate) in_memory: Vec, + /// Lazy-loaded in-memory trie data. + pub(crate) trie_state: OnceLock, +} + /// A state provider that stores references to in-memory blocks along with their state as well as /// the historical state provider for fallback lookups. #[allow(missing_debug_implementations)] @@ -27,193 +39,200 @@ pub struct MemoryOverlayStateProvider { pub(crate) trie_state: OnceLock, } -impl MemoryOverlayStateProvider { - /// Create new memory overlay state provider. - /// - /// ## Arguments - /// - /// - `in_memory` - the collection of executed ancestor blocks in reverse. - /// - `historical` - a historical state provider for the latest ancestor block stored in the - /// database. - pub fn new(historical: Box, in_memory: Vec) -> Self { - Self { historical, in_memory, trie_state: OnceLock::new() } - } - - /// Turn this state provider into a [`StateProviderBox`] - pub fn boxed(self) -> StateProviderBox { - Box::new(self) - } - - /// Return lazy-loaded trie state aggregated from in-memory blocks. - fn trie_state(&self) -> &MemoryOverlayTrieState { - self.trie_state.get_or_init(|| { - let mut trie_state = MemoryOverlayTrieState::default(); - for block in self.in_memory.iter().rev() { - trie_state.state.extend_ref(block.hashed_state.as_ref()); - trie_state.nodes.extend_ref(block.trie.as_ref()); - } - trie_state - }) - } -} +macro_rules! impl_state_provider { + ([$($tokens:tt)*],$type:ty, $historical_type:ty) => { + impl $($tokens)* $type { + /// Create new memory overlay state provider. + /// + /// ## Arguments + /// + /// - `in_memory` - the collection of executed ancestor blocks in reverse. + /// - `historical` - a historical state provider for the latest ancestor block stored in the + /// database. + pub fn new(historical: $historical_type, in_memory: Vec) -> Self { + Self { historical, in_memory, trie_state: OnceLock::new() } + } + + /// Turn this state provider into a state provider + pub fn boxed(self) -> $historical_type { + Box::new(self) + } -impl BlockHashReader for MemoryOverlayStateProvider { - fn block_hash(&self, number: BlockNumber) -> ProviderResult> { - for block in &self.in_memory { - if block.block.number == number { - return Ok(Some(block.block.hash())) + /// Return lazy-loaded trie state aggregated from in-memory blocks. + fn trie_state(&self) -> &MemoryOverlayTrieState { + self.trie_state.get_or_init(|| { + let mut trie_state = MemoryOverlayTrieState::default(); + for block in self.in_memory.iter().rev() { + trie_state.state.extend_ref(block.hashed_state.as_ref()); + trie_state.nodes.extend_ref(block.trie.as_ref()); + } + trie_state + }) } } - self.historical.block_hash(number) - } - - fn canonical_hashes_range( - &self, - start: BlockNumber, - end: BlockNumber, - ) -> ProviderResult> { - let range = start..end; - let mut earliest_block_number = None; - let mut in_memory_hashes = Vec::new(); - for block in &self.in_memory { - if range.contains(&block.block.number) { - in_memory_hashes.insert(0, block.block.hash()); - earliest_block_number = Some(block.block.number); + impl $($tokens)* BlockHashReader for $type { + fn block_hash(&self, number: BlockNumber) -> ProviderResult> { + for block in &self.in_memory { + if block.block.number == number { + return Ok(Some(block.block.hash())) + } + } + + self.historical.block_hash(number) + } + + fn canonical_hashes_range( + &self, + start: BlockNumber, + end: BlockNumber, + ) -> ProviderResult> { + let range = start..end; + let mut earliest_block_number = None; + let mut in_memory_hashes = Vec::new(); + for block in &self.in_memory { + if range.contains(&block.block.number) { + in_memory_hashes.insert(0, block.block.hash()); + earliest_block_number = Some(block.block.number); + } + } + + let mut hashes = + self.historical.canonical_hashes_range(start, earliest_block_number.unwrap_or(end))?; + hashes.append(&mut in_memory_hashes); + Ok(hashes) } } - let mut hashes = - self.historical.canonical_hashes_range(start, earliest_block_number.unwrap_or(end))?; - hashes.append(&mut in_memory_hashes); - Ok(hashes) - } -} + impl $($tokens)* AccountReader for $type { + fn basic_account(&self, address: Address) -> ProviderResult> { + for block in &self.in_memory { + if let Some(account) = block.execution_output.account(&address) { + return Ok(account) + } + } -impl AccountReader for MemoryOverlayStateProvider { - fn basic_account(&self, address: Address) -> ProviderResult> { - for block in &self.in_memory { - if let Some(account) = block.execution_output.account(&address) { - return Ok(account) + self.historical.basic_account(address) } } - self.historical.basic_account(address) - } -} + impl $($tokens)* StateRootProvider for $type { + fn state_root(&self, state: HashedPostState) -> ProviderResult { + self.state_root_from_nodes(TrieInput::from_state(state)) + } -impl StateRootProvider for MemoryOverlayStateProvider { - fn state_root(&self, state: HashedPostState) -> ProviderResult { - self.state_root_from_nodes(TrieInput::from_state(state)) - } - - fn state_root_from_nodes(&self, mut input: TrieInput) -> ProviderResult { - let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); - input.prepend_cached(nodes, state); - self.historical.state_root_from_nodes(input) - } - - fn state_root_with_updates( - &self, - state: HashedPostState, - ) -> ProviderResult<(B256, TrieUpdates)> { - self.state_root_from_nodes_with_updates(TrieInput::from_state(state)) - } - - fn state_root_from_nodes_with_updates( - &self, - mut input: TrieInput, - ) -> ProviderResult<(B256, TrieUpdates)> { - let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); - input.prepend_cached(nodes, state); - self.historical.state_root_from_nodes_with_updates(input) - } -} + fn state_root_from_nodes(&self, mut input: TrieInput) -> ProviderResult { + let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); + input.prepend_cached(nodes, state); + self.historical.state_root_from_nodes(input) + } -impl StorageRootProvider for MemoryOverlayStateProvider { - // TODO: Currently this does not reuse available in-memory trie nodes. - fn storage_root(&self, address: Address, storage: HashedStorage) -> ProviderResult { - let state = &self.trie_state().state; - let mut hashed_storage = - state.storages.get(&keccak256(address)).cloned().unwrap_or_default(); - hashed_storage.extend(&storage); - self.historical.storage_root(address, hashed_storage) - } - - // TODO: Currently this does not reuse available in-memory trie nodes. - fn storage_proof( - &self, - address: Address, - slot: B256, - storage: HashedStorage, - ) -> ProviderResult { - let state = &self.trie_state().state; - let mut hashed_storage = - state.storages.get(&keccak256(address)).cloned().unwrap_or_default(); - hashed_storage.extend(&storage); - self.historical.storage_proof(address, slot, hashed_storage) - } -} + fn state_root_with_updates( + &self, + state: HashedPostState, + ) -> ProviderResult<(B256, TrieUpdates)> { + self.state_root_from_nodes_with_updates(TrieInput::from_state(state)) + } -impl StateProofProvider for MemoryOverlayStateProvider { - fn proof( - &self, - mut input: TrieInput, - address: Address, - slots: &[B256], - ) -> ProviderResult { - let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); - input.prepend_cached(nodes, state); - self.historical.proof(input, address, slots) - } - - fn multiproof( - &self, - mut input: TrieInput, - targets: HashMap>, - ) -> ProviderResult { - let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); - input.prepend_cached(nodes, state); - self.historical.multiproof(input, targets) - } - - fn witness( - &self, - mut input: TrieInput, - target: HashedPostState, - ) -> ProviderResult> { - let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); - input.prepend_cached(nodes, state); - self.historical.witness(input, target) - } -} + fn state_root_from_nodes_with_updates( + &self, + mut input: TrieInput, + ) -> ProviderResult<(B256, TrieUpdates)> { + let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); + input.prepend_cached(nodes, state); + self.historical.state_root_from_nodes_with_updates(input) + } + } + + impl $($tokens)* StorageRootProvider for $type { + // TODO: Currently this does not reuse available in-memory trie nodes. + fn storage_root(&self, address: Address, storage: HashedStorage) -> ProviderResult { + let state = &self.trie_state().state; + let mut hashed_storage = + state.storages.get(&keccak256(address)).cloned().unwrap_or_default(); + hashed_storage.extend(&storage); + self.historical.storage_root(address, hashed_storage) + } -impl StateProvider for MemoryOverlayStateProvider { - fn storage( - &self, - address: Address, - storage_key: StorageKey, - ) -> ProviderResult> { - for block in &self.in_memory { - if let Some(value) = block.execution_output.storage(&address, storage_key.into()) { - return Ok(Some(value)) + // TODO: Currently this does not reuse available in-memory trie nodes. + fn storage_proof( + &self, + address: Address, + slot: B256, + storage: HashedStorage, + ) -> ProviderResult { + let state = &self.trie_state().state; + let mut hashed_storage = + state.storages.get(&keccak256(address)).cloned().unwrap_or_default(); + hashed_storage.extend(&storage); + self.historical.storage_proof(address, slot, hashed_storage) } } - self.historical.storage(address, storage_key) - } + impl $($tokens)* StateProofProvider for $type { + fn proof( + &self, + mut input: TrieInput, + address: Address, + slots: &[B256], + ) -> ProviderResult { + let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); + input.prepend_cached(nodes, state); + self.historical.proof(input, address, slots) + } + + fn multiproof( + &self, + mut input: TrieInput, + targets: HashMap>, + ) -> ProviderResult { + let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); + input.prepend_cached(nodes, state); + self.historical.multiproof(input, targets) + } - fn bytecode_by_hash(&self, code_hash: B256) -> ProviderResult> { - for block in &self.in_memory { - if let Some(contract) = block.execution_output.bytecode(&code_hash) { - return Ok(Some(contract)) + fn witness( + &self, + mut input: TrieInput, + target: HashedPostState, + ) -> ProviderResult> { + let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); + input.prepend_cached(nodes, state); + self.historical.witness(input, target) } } - self.historical.bytecode_by_hash(code_hash) - } + impl $($tokens)* StateProvider for $type { + fn storage( + &self, + address: Address, + storage_key: StorageKey, + ) -> ProviderResult> { + for block in &self.in_memory { + if let Some(value) = block.execution_output.storage(&address, storage_key.into()) { + return Ok(Some(value)) + } + } + + self.historical.storage(address, storage_key) + } + + fn bytecode_by_hash(&self, code_hash: B256) -> ProviderResult> { + for block in &self.in_memory { + if let Some(contract) = block.execution_output.bytecode(&code_hash) { + return Ok(Some(contract)) + } + } + + self.historical.bytecode_by_hash(code_hash) + } + } + }; } +impl_state_provider!([], MemoryOverlayStateProvider, Box); +impl_state_provider!([<'a>], MemoryOverlayStateProviderRef<'a>, Box); + /// The collection of data necessary for trie-related operations for [`MemoryOverlayStateProvider`]. #[derive(Clone, Default, Debug)] pub(crate) struct MemoryOverlayTrieState { diff --git a/crates/engine/service/src/service.rs b/crates/engine/service/src/service.rs index 026476a82..198438d45 100644 --- a/crates/engine/service/src/service.rs +++ b/crates/engine/service/src/service.rs @@ -151,7 +151,9 @@ mod tests { use reth_exex_types::FinishedExExHeight; use reth_network_p2p::test_utils::TestFullBlockClient; use reth_primitives::SealedHeader; - use reth_provider::test_utils::create_test_provider_factory_with_chain_spec; + use reth_provider::{ + providers::BlockchainProvider2, test_utils::create_test_provider_factory_with_chain_spec, + }; use reth_prune::Pruner; use reth_tasks::TokioTaskExecutor; use std::sync::Arc; diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index 13215e11a..64a8a204a 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -1,13 +1,15 @@ +#![allow(unused)] use crate::{ - providers::StaticFileProvider, AccountReader, BlockHashReader, BlockIdReader, BlockNumReader, - BlockReader, BlockReaderIdExt, BlockSource, CanonChainTracker, CanonStateNotifications, - CanonStateSubscriptions, ChainSpecProvider, ChainStateBlockReader, ChangeSetReader, - DatabaseProviderFactory, DatabaseProviderRO, EvmEnvProvider, HeaderProvider, ProviderError, + providers::{ConsistentProvider, StaticFileProvider}, + AccountReader, BlockHashReader, BlockIdReader, BlockNumReader, BlockReader, BlockReaderIdExt, + BlockSource, CanonChainTracker, CanonStateNotifications, CanonStateSubscriptions, + ChainSpecProvider, ChainStateBlockReader, ChangeSetReader, DatabaseProvider, + DatabaseProviderFactory, EvmEnvProvider, FullProvider, HeaderProvider, ProviderError, ProviderFactory, PruneCheckpointReader, ReceiptProvider, ReceiptProviderIdExt, StageCheckpointReader, StateProviderBox, StateProviderFactory, StateReader, StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; -use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, HashOrNumber}; +use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag}; use alloy_primitives::{Address, BlockHash, BlockNumber, Sealable, TxHash, TxNumber, B256, U256}; use alloy_rpc_types_engine::ForkchoiceState; use reth_chain_state::{ @@ -15,10 +17,10 @@ use reth_chain_state::{ MemoryOverlayStateProvider, }; use reth_chainspec::{ChainInfo, EthereumHardforks}; -use reth_db::models::BlockNumberAddress; +use reth_db::{models::BlockNumberAddress, transaction::DbTx, Database}; use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; use reth_evm::ConfigureEvmEnv; -use reth_execution_types::{BundleStateInit, ExecutionOutcome, RevertsInit}; +use reth_execution_types::ExecutionOutcome; use reth_node_types::NodeTypesWithDB; use reth_primitives::{ Account, Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, @@ -27,21 +29,17 @@ use reth_primitives::{ }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; -use reth_storage_api::StorageChangeSetReader; +use reth_storage_api::{DBProvider, StorageChangeSetReader}; use reth_storage_errors::provider::ProviderResult; -use revm::{ - db::states::PlainStorageRevert, - primitives::{BlockEnv, CfgEnvWithHandlerCfg}, -}; +use revm::primitives::{BlockEnv, CfgEnvWithHandlerCfg}; use std::{ - collections::{hash_map, HashMap}, - ops::{Add, Bound, RangeBounds, RangeInclusive, Sub}, + ops::{Add, RangeBounds, RangeInclusive, Sub}, sync::Arc, time::Instant, }; use tracing::trace; -use super::ProviderNodeTypes; +use crate::providers::ProviderNodeTypes; /// The main type for interacting with the blockchain. /// @@ -50,11 +48,11 @@ use super::ProviderNodeTypes; /// type that holds an instance of the database and the blockchain tree. #[derive(Debug)] pub struct BlockchainProvider2 { - /// Provider type used to access the database. - database: ProviderFactory, + /// Provider factory used to access the database. + pub(crate) database: ProviderFactory, /// Tracks the chain info wrt forkchoice updates and in memory canonical /// state. - pub(super) canonical_in_memory_state: CanonicalInMemoryState, + pub(crate) canonical_in_memory_state: CanonicalInMemoryState, } impl Clone for BlockchainProvider2 { @@ -67,15 +65,15 @@ impl Clone for BlockchainProvider2 { } impl BlockchainProvider2 { - /// Create a new provider using only the database, fetching the latest header from - /// the database to initialize the provider. - pub fn new(database: ProviderFactory) -> ProviderResult { - let provider = database.provider()?; + /// Create a new [`BlockchainProvider2`] using only the storage, fetching the latest + /// header from the database to initialize the provider. + pub fn new(storage: ProviderFactory) -> ProviderResult { + let provider = storage.provider()?; let best = provider.chain_info()?; match provider.header_by_number(best.best_number)? { Some(header) => { drop(provider); - Ok(Self::with_latest(database, SealedHeader::new(header, best.best_hash))?) + Ok(Self::with_latest(storage, SealedHeader::new(header, best.best_hash))?) } None => Err(ProviderError::HeaderNotFound(best.best_number.into())), } @@ -86,8 +84,8 @@ impl BlockchainProvider2 { /// /// This returns a `ProviderResult` since it tries the retrieve the last finalized header from /// `database`. - pub fn with_latest(database: ProviderFactory, latest: SealedHeader) -> ProviderResult { - let provider = database.provider()?; + pub fn with_latest(storage: ProviderFactory, latest: SealedHeader) -> ProviderResult { + let provider = storage.provider()?; let finalized_header = provider .last_finalized_block_number()? .map(|num| provider.sealed_header(num)) @@ -104,7 +102,7 @@ impl BlockchainProvider2 { .transpose()? .flatten(); Ok(Self { - database, + database: storage, canonical_in_memory_state: CanonicalInMemoryState::with_head( latest, finalized_header, @@ -118,281 +116,12 @@ impl BlockchainProvider2 { self.canonical_in_memory_state.clone() } - // Helper function to convert range bounds - fn convert_range_bounds( - &self, - range: impl RangeBounds, - end_unbounded: impl FnOnce() -> T, - ) -> (T, T) - where - T: Copy + Add + Sub + From, - { - let start = match range.start_bound() { - Bound::Included(&n) => n, - Bound::Excluded(&n) => n + T::from(1u8), - Bound::Unbounded => T::from(0u8), - }; - - let end = match range.end_bound() { - Bound::Included(&n) => n, - Bound::Excluded(&n) => n - T::from(1u8), - Bound::Unbounded => end_unbounded(), - }; - - (start, end) - } - - /// Return the last N blocks of state, recreating the [`ExecutionOutcome`]. - /// - /// If the range is empty, or there are no blocks for the given range, then this returns `None`. - pub fn get_state( - &self, - range: RangeInclusive, - ) -> ProviderResult> { - if range.is_empty() { - return Ok(None) - } - let start_block_number = *range.start(); - let end_block_number = *range.end(); - - // We are not removing block meta as it is used to get block changesets. - let mut block_bodies = Vec::new(); - for block_num in range.clone() { - let block_body = self - .block_body_indices(block_num)? - .ok_or(ProviderError::BlockBodyIndicesNotFound(block_num))?; - block_bodies.push((block_num, block_body)) - } - - // get transaction receipts - let Some(from_transaction_num) = block_bodies.first().map(|body| body.1.first_tx_num()) - else { - return Ok(None) - }; - let Some(to_transaction_num) = block_bodies.last().map(|body| body.1.last_tx_num()) else { - return Ok(None) - }; - - let mut account_changeset = Vec::new(); - for block_num in range.clone() { - let changeset = - self.account_block_changeset(block_num)?.into_iter().map(|elem| (block_num, elem)); - account_changeset.extend(changeset); - } - - let mut storage_changeset = Vec::new(); - for block_num in range { - let changeset = self.storage_changeset(block_num)?; - storage_changeset.extend(changeset); - } - - let (state, reverts) = - self.populate_bundle_state(account_changeset, storage_changeset, end_block_number)?; - - let mut receipt_iter = - self.receipts_by_tx_range(from_transaction_num..=to_transaction_num)?.into_iter(); - - let mut receipts = Vec::with_capacity(block_bodies.len()); - // loop break if we are at the end of the blocks. - for (_, block_body) in block_bodies { - let mut block_receipts = Vec::with_capacity(block_body.tx_count as usize); - for tx_num in block_body.tx_num_range() { - let receipt = receipt_iter - .next() - .ok_or_else(|| ProviderError::ReceiptNotFound(tx_num.into()))?; - block_receipts.push(Some(receipt)); - } - receipts.push(block_receipts); - } - - Ok(Some(ExecutionOutcome::new_init( - state, - reverts, - // We skip new contracts since we never delete them from the database - Vec::new(), - receipts.into(), - start_block_number, - Vec::new(), - ))) - } - - /// Populate a [`BundleStateInit`] and [`RevertsInit`] using cursors over the - /// [`reth_db::PlainAccountState`] and [`reth_db::PlainStorageState`] tables, based on the given - /// storage and account changesets. - fn populate_bundle_state( - &self, - account_changeset: Vec<(u64, AccountBeforeTx)>, - storage_changeset: Vec<(BlockNumberAddress, StorageEntry)>, - block_range_end: BlockNumber, - ) -> ProviderResult<(BundleStateInit, RevertsInit)> { - let mut state: BundleStateInit = HashMap::new(); - let mut reverts: RevertsInit = HashMap::new(); - let state_provider = self.state_by_block_number_or_tag(block_range_end.into())?; - - // add account changeset changes - for (block_number, account_before) in account_changeset.into_iter().rev() { - let AccountBeforeTx { info: old_info, address } = account_before; - match state.entry(address) { - hash_map::Entry::Vacant(entry) => { - let new_info = state_provider.basic_account(address)?; - entry.insert((old_info, new_info, HashMap::new())); - } - hash_map::Entry::Occupied(mut entry) => { - // overwrite old account state. - entry.get_mut().0 = old_info; - } - } - // insert old info into reverts. - reverts.entry(block_number).or_default().entry(address).or_default().0 = Some(old_info); - } - - // add storage changeset changes - for (block_and_address, old_storage) in storage_changeset.into_iter().rev() { - let BlockNumberAddress((block_number, address)) = block_and_address; - // get account state or insert from plain state. - let account_state = match state.entry(address) { - hash_map::Entry::Vacant(entry) => { - let present_info = state_provider.basic_account(address)?; - entry.insert((present_info, present_info, HashMap::new())) - } - hash_map::Entry::Occupied(entry) => entry.into_mut(), - }; - - // match storage. - match account_state.2.entry(old_storage.key) { - hash_map::Entry::Vacant(entry) => { - let new_storage_value = - state_provider.storage(address, old_storage.key)?.unwrap_or_default(); - entry.insert((old_storage.value, new_storage_value)); - } - hash_map::Entry::Occupied(mut entry) => { - entry.get_mut().0 = old_storage.value; - } - }; - - reverts - .entry(block_number) - .or_default() - .entry(address) - .or_default() - .1 - .push(old_storage); - } - - Ok((state, reverts)) - } - - /// Fetches a range of data from both in-memory state and persistent storage while a predicate - /// is met. - /// - /// Creates a snapshot of the in-memory chain state and database provider to prevent - /// inconsistencies. Splits the range into in-memory and storage sections, prioritizing - /// recent in-memory blocks in case of overlaps. - /// - /// * `fetch_db_range` function (`F`) provides access to the database provider, allowing the - /// user to retrieve the required items from the database using [`RangeInclusive`]. - /// * `map_block_state_item` function (`G`) provides each block of the range in the in-memory - /// state, allowing for selection or filtering for the desired data. - fn get_in_memory_or_storage_by_block_range_while( - &self, - range: impl RangeBounds, - fetch_db_range: F, - map_block_state_item: G, - mut predicate: P, - ) -> ProviderResult> - where - F: FnOnce( - &DatabaseProviderRO, - RangeInclusive, - &mut P, - ) -> ProviderResult>, - G: Fn(Arc, &mut P) -> Option, - P: FnMut(&T) -> bool, - { - // Each one provides a snapshot at the time of instantiation, but its order matters. - // - // If we acquire first the database provider, it's possible that before the in-memory chain - // snapshot is instantiated, it will flush blocks to disk. This would - // mean that our database provider would not have access to the flushed blocks (since it's - // working under an older view), while the in-memory state may have deleted them - // entirely. Resulting in gaps on the range. - let mut in_memory_chain = - self.canonical_in_memory_state.canonical_chain().collect::>(); - let db_provider = self.database_provider_ro()?; - - let (start, end) = self.convert_range_bounds(range, || { - // the first block is the highest one. - in_memory_chain - .first() - .map(|b| b.number()) - .unwrap_or_else(|| db_provider.last_block_number().unwrap_or_default()) - }); - - if start > end { - return Ok(vec![]) - } - - // Split range into storage_range and in-memory range. If the in-memory range is not - // necessary drop it early. - // - // The last block of `in_memory_chain` is the lowest block number. - let (in_memory, storage_range) = match in_memory_chain.last().as_ref().map(|b| b.number()) { - Some(lowest_memory_block) if lowest_memory_block <= end => { - let highest_memory_block = - in_memory_chain.first().as_ref().map(|b| b.number()).expect("qed"); - - // Database will for a time overlap with in-memory-chain blocks. In - // case of a re-org, it can mean that the database blocks are of a forked chain, and - // so, we should prioritize the in-memory overlapped blocks. - let in_memory_range = - lowest_memory_block.max(start)..=end.min(highest_memory_block); - - // If requested range is in the middle of the in-memory range, remove the necessary - // lowest blocks - in_memory_chain.truncate( - in_memory_chain - .len() - .saturating_sub(start.saturating_sub(lowest_memory_block) as usize), - ); - - let storage_range = - (lowest_memory_block > start).then(|| start..=lowest_memory_block - 1); - - (Some((in_memory_chain, in_memory_range)), storage_range) - } - _ => { - // Drop the in-memory chain so we don't hold blocks in memory. - drop(in_memory_chain); - - (None, Some(start..=end)) - } - }; - - let mut items = Vec::with_capacity((end - start + 1) as usize); - - if let Some(storage_range) = storage_range { - let mut db_items = fetch_db_range(&db_provider, storage_range.clone(), &mut predicate)?; - items.append(&mut db_items); - - // The predicate was not met, if the number of items differs from the expected. So, we - // return what we have. - if items.len() as u64 != storage_range.end() - storage_range.start() + 1 { - return Ok(items) - } - } - - if let Some((in_memory_chain, in_memory_range)) = in_memory { - for (num, block) in in_memory_range.zip(in_memory_chain.into_iter().rev()) { - debug_assert!(num == block.number()); - if let Some(item) = map_block_state_item(block, &mut predicate) { - items.push(item); - } else { - break - } - } - } - - Ok(items) + /// Returns a provider with a created `DbTx` inside, which allows fetching data from the + /// database using different types of providers. Example: [`HeaderProvider`] + /// [`BlockHashReader`]. This may fail if the inner read database transaction fails to open. + #[track_caller] + pub fn consistent_provider(&self) -> ProviderResult> { + ConsistentProvider::new(self.database.clone(), self.canonical_in_memory_state()) } /// This uses a given [`BlockState`] to initialize a state provider for that block. @@ -405,222 +134,14 @@ impl BlockchainProvider2 { Ok(state.state_provider(latest_historical)) } - /// Fetches data from either in-memory state or persistent storage for a range of transactions. + /// Return the last N blocks of state, recreating the [`ExecutionOutcome`]. /// - /// * `fetch_from_db`: has a [`DatabaseProviderRO`] and the storage specific range. - /// * `fetch_from_block_state`: has a [`RangeInclusive`] of elements that should be fetched from - /// [`BlockState`]. [`RangeInclusive`] is necessary to handle partial look-ups of a block. - fn get_in_memory_or_storage_by_tx_range( - &self, - range: impl RangeBounds, - fetch_from_db: S, - fetch_from_block_state: M, - ) -> ProviderResult> - where - S: FnOnce( - DatabaseProviderRO, - RangeInclusive, - ) -> ProviderResult>, - M: Fn(RangeInclusive, Arc) -> ProviderResult>, - { - let in_mem_chain = self.canonical_in_memory_state.canonical_chain().collect::>(); - let provider = self.database.provider()?; - - // Get the last block number stored in the storage which does NOT overlap with in-memory - // chain. - let last_database_block_number = in_mem_chain - .last() - .map(|b| Ok(b.anchor().number)) - .unwrap_or_else(|| provider.last_block_number())?; - - // Get the next tx number for the last block stored in the storage, which marks the start of - // the in-memory state. - let last_block_body_index = provider - .block_body_indices(last_database_block_number)? - .ok_or(ProviderError::BlockBodyIndicesNotFound(last_database_block_number))?; - let mut in_memory_tx_num = last_block_body_index.next_tx_num(); - - let (start, end) = self.convert_range_bounds(range, || { - in_mem_chain - .iter() - .map(|b| b.block_ref().block().body.transactions.len() as u64) - .sum::() + - last_block_body_index.last_tx_num() - }); - - if start > end { - return Ok(vec![]) - } - - let mut tx_range = start..=end; - - // If the range is entirely before the first in-memory transaction number, fetch from - // storage - if *tx_range.end() < in_memory_tx_num { - return fetch_from_db(provider, tx_range); - } - - let mut items = Vec::with_capacity((tx_range.end() - tx_range.start() + 1) as usize); - - // If the range spans storage and memory, get elements from storage first. - if *tx_range.start() < in_memory_tx_num { - // Determine the range that needs to be fetched from storage. - let db_range = *tx_range.start()..=in_memory_tx_num.saturating_sub(1); - - // Set the remaining transaction range for in-memory - tx_range = in_memory_tx_num..=*tx_range.end(); - - items.extend(fetch_from_db(provider, db_range)?); - } - - // Iterate from the lowest block to the highest in-memory chain - for block_state in in_mem_chain.into_iter().rev() { - let block_tx_count = block_state.block_ref().block().body.transactions.len(); - let remaining = (tx_range.end() - tx_range.start() + 1) as usize; - - // If the transaction range start is equal or higher than the next block first - // transaction, advance - if *tx_range.start() >= in_memory_tx_num + block_tx_count as u64 { - in_memory_tx_num += block_tx_count as u64; - continue - } - - // This should only be more than 0 once, in case of a partial range inside a block. - let skip = (tx_range.start() - in_memory_tx_num) as usize; - - items.extend(fetch_from_block_state( - skip..=skip + (remaining.min(block_tx_count - skip) - 1), - block_state, - )?); - - in_memory_tx_num += block_tx_count as u64; - - // Break if the range has been fully processed - if in_memory_tx_num > *tx_range.end() { - break - } - - // Set updated range - tx_range = in_memory_tx_num..=*tx_range.end(); - } - - Ok(items) - } - - /// Fetches data from either in-memory state or persistent storage by transaction - /// [`HashOrNumber`]. - fn get_in_memory_or_storage_by_tx( - &self, - id: HashOrNumber, - fetch_from_db: S, - fetch_from_block_state: M, - ) -> ProviderResult> - where - S: FnOnce(DatabaseProviderRO) -> ProviderResult>, - M: Fn(usize, TxNumber, Arc) -> ProviderResult>, - { - // Order of instantiation matters. More information on: - // `get_in_memory_or_storage_by_block_range_while`. - let in_mem_chain = self.canonical_in_memory_state.canonical_chain().collect::>(); - let provider = self.database.provider()?; - - // Get the last block number stored in the database which does NOT overlap with in-memory - // chain. - let last_database_block_number = in_mem_chain - .last() - .map(|b| Ok(b.anchor().number)) - .unwrap_or_else(|| provider.last_block_number())?; - - // Get the next tx number for the last block stored in the database and consider it the - // first tx number of the in-memory state - let last_block_body_index = provider - .block_body_indices(last_database_block_number)? - .ok_or(ProviderError::BlockBodyIndicesNotFound(last_database_block_number))?; - let mut in_memory_tx_num = last_block_body_index.next_tx_num(); - - // If the transaction number is less than the first in-memory transaction number, make a - // database lookup - if let HashOrNumber::Number(id) = id { - if id < in_memory_tx_num { - return fetch_from_db(provider) - } - } - - // Iterate from the lowest block to the highest - for block_state in in_mem_chain.into_iter().rev() { - let executed_block = block_state.block_ref(); - let block = executed_block.block(); - - for tx_index in 0..block.body.transactions.len() { - match id { - HashOrNumber::Hash(tx_hash) => { - if tx_hash == block.body.transactions[tx_index].hash() { - return fetch_from_block_state(tx_index, in_memory_tx_num, block_state) - } - } - HashOrNumber::Number(id) => { - if id == in_memory_tx_num { - return fetch_from_block_state(tx_index, in_memory_tx_num, block_state) - } - } - } - - in_memory_tx_num += 1; - } - } - - // Not found in-memory, so check database. - if let HashOrNumber::Hash(_) = id { - return fetch_from_db(provider) - } - - Ok(None) - } - - /// Fetches data from either in-memory state or persistent storage by [`BlockHashOrNumber`]. - fn get_in_memory_or_storage_by_block( + /// If the range is empty, or there are no blocks for the given range, then this returns `None`. + pub fn get_state( &self, - id: BlockHashOrNumber, - fetch_from_db: S, - fetch_from_block_state: M, - ) -> ProviderResult - where - S: FnOnce(DatabaseProviderRO) -> ProviderResult, - M: Fn(Arc) -> ProviderResult, - { - let block_state = match id { - BlockHashOrNumber::Hash(block_hash) => { - self.canonical_in_memory_state.state_by_hash(block_hash) - } - BlockHashOrNumber::Number(block_number) => { - self.canonical_in_memory_state.state_by_number(block_number) - } - }; - - if let Some(block_state) = block_state { - return fetch_from_block_state(block_state) - } - fetch_from_db(self.database_provider_ro()?) - } -} - -impl BlockchainProvider2 { - /// Ensures that the given block number is canonical (synced) - /// - /// This is a helper for guarding the `HistoricalStateProvider` against block numbers that are - /// out of range and would lead to invalid results, mainly during initial sync. - /// - /// Verifying the `block_number` would be expensive since we need to lookup sync table - /// Instead, we ensure that the `block_number` is within the range of the - /// [`Self::best_block_number`] which is updated when a block is synced. - #[inline] - fn ensure_canonical_block(&self, block_number: BlockNumber) -> ProviderResult<()> { - let latest = self.best_block_number()?; - if block_number > latest { - Err(ProviderError::HeaderNotFound(block_number.into())) - } else { - Ok(()) - } + range: RangeInclusive, + ) -> ProviderResult> { + self.consistent_provider()?.get_state(range) } } @@ -646,78 +167,34 @@ impl StaticFileProviderFactory for BlockchainProvider2 impl HeaderProvider for BlockchainProvider2 { fn header(&self, block_hash: &BlockHash) -> ProviderResult> { - self.get_in_memory_or_storage_by_block( - (*block_hash).into(), - |db_provider| db_provider.header(block_hash), - |block_state| Ok(Some(block_state.block_ref().block().header.header().clone())), - ) + self.consistent_provider()?.header(block_hash) } fn header_by_number(&self, num: BlockNumber) -> ProviderResult> { - self.get_in_memory_or_storage_by_block( - num.into(), - |db_provider| db_provider.header_by_number(num), - |block_state| Ok(Some(block_state.block_ref().block().header.header().clone())), - ) + self.consistent_provider()?.header_by_number(num) } fn header_td(&self, hash: &BlockHash) -> ProviderResult> { - if let Some(num) = self.block_number(*hash)? { - self.header_td_by_number(num) - } else { - Ok(None) - } + self.consistent_provider()?.header_td(hash) } fn header_td_by_number(&self, number: BlockNumber) -> ProviderResult> { - let number = if self.canonical_in_memory_state.hash_by_number(number).is_some() { - // If the block exists in memory, we should return a TD for it. - // - // The canonical in memory state should only store post-merge blocks. Post-merge blocks - // have zero difficulty. This means we can use the total difficulty for the last - // finalized block number if present (so that we are not affected by reorgs), if not the - // last number in the database will be used. - if let Some(last_finalized_num_hash) = - self.canonical_in_memory_state.get_finalized_num_hash() - { - last_finalized_num_hash.number - } else { - self.last_block_number()? - } - } else { - // Otherwise, return what we have on disk for the input block - number - }; - self.database.header_td_by_number(number) + self.consistent_provider()?.header_td_by_number(number) } fn headers_range(&self, range: impl RangeBounds) -> ProviderResult> { - self.get_in_memory_or_storage_by_block_range_while( - range, - |db_provider, range, _| db_provider.headers_range(range), - |block_state, _| Some(block_state.block_ref().block().header.header().clone()), - |_| true, - ) + self.consistent_provider()?.headers_range(range) } fn sealed_header(&self, number: BlockNumber) -> ProviderResult> { - self.get_in_memory_or_storage_by_block( - number.into(), - |db_provider| db_provider.sealed_header(number), - |block_state| Ok(Some(block_state.block_ref().block().header.clone())), - ) + self.consistent_provider()?.sealed_header(number) } fn sealed_headers_range( &self, range: impl RangeBounds, ) -> ProviderResult> { - self.get_in_memory_or_storage_by_block_range_while( - range, - |db_provider, range, _| db_provider.sealed_headers_range(range), - |block_state, _| Some(block_state.block_ref().block().header.clone()), - |_| true, - ) + self.consistent_provider()?.sealed_headers_range(range) } fn sealed_headers_while( @@ -725,25 +202,13 @@ impl HeaderProvider for BlockchainProvider2 { range: impl RangeBounds, predicate: impl FnMut(&SealedHeader) -> bool, ) -> ProviderResult> { - self.get_in_memory_or_storage_by_block_range_while( - range, - |db_provider, range, predicate| db_provider.sealed_headers_while(range, predicate), - |block_state, predicate| { - let header = &block_state.block_ref().block().header; - predicate(header).then(|| header.clone()) - }, - predicate, - ) + self.consistent_provider()?.sealed_headers_while(range, predicate) } } impl BlockHashReader for BlockchainProvider2 { fn block_hash(&self, number: u64) -> ProviderResult> { - self.get_in_memory_or_storage_by_block( - number.into(), - |db_provider| db_provider.block_hash(number), - |block_state| Ok(Some(block_state.hash())), - ) + self.consistent_provider()?.block_hash(number) } fn canonical_hashes_range( @@ -751,15 +216,7 @@ impl BlockHashReader for BlockchainProvider2 { start: BlockNumber, end: BlockNumber, ) -> ProviderResult> { - self.get_in_memory_or_storage_by_block_range_while( - start..end, - |db_provider, inclusive_range, _| { - db_provider - .canonical_hashes_range(*inclusive_range.start(), *inclusive_range.end() + 1) - }, - |block_state, _| Some(block_state.hash()), - |_| true, - ) + self.consistent_provider()?.canonical_hashes_range(start, end) } } @@ -777,11 +234,7 @@ impl BlockNumReader for BlockchainProvider2 { } fn block_number(&self, hash: B256) -> ProviderResult> { - self.get_in_memory_or_storage_by_block( - hash.into(), - |db_provider| db_provider.block_number(hash), - |block_state| Ok(Some(block_state.number())), - ) + self.consistent_provider()?.block_number(hash) } } @@ -801,28 +254,11 @@ impl BlockIdReader for BlockchainProvider2 { impl BlockReader for BlockchainProvider2 { fn find_block_by_hash(&self, hash: B256, source: BlockSource) -> ProviderResult> { - match source { - BlockSource::Any | BlockSource::Canonical => { - // Note: it's fine to return the unsealed block because the caller already has - // the hash - self.get_in_memory_or_storage_by_block( - hash.into(), - |db_provider| db_provider.find_block_by_hash(hash, source), - |block_state| Ok(Some(block_state.block_ref().block().clone().unseal())), - ) - } - BlockSource::Pending => { - Ok(self.canonical_in_memory_state.pending_block().map(|block| block.unseal())) - } - } + self.consistent_provider()?.find_block_by_hash(hash, source) } fn block(&self, id: BlockHashOrNumber) -> ProviderResult> { - self.get_in_memory_or_storage_by_block( - id, - |db_provider| db_provider.block(id), - |block_state| Ok(Some(block_state.block_ref().block().clone().unseal())), - ) + self.consistent_provider()?.block(id) } fn pending_block(&self) -> ProviderResult> { @@ -838,51 +274,14 @@ impl BlockReader for BlockchainProvider2 { } fn ommers(&self, id: BlockHashOrNumber) -> ProviderResult>> { - self.get_in_memory_or_storage_by_block( - id, - |db_provider| db_provider.ommers(id), - |block_state| { - if self.chain_spec().final_paris_total_difficulty(block_state.number()).is_some() { - return Ok(Some(Vec::new())) - } - - Ok(Some(block_state.block_ref().block().body.ommers.clone())) - }, - ) + self.consistent_provider()?.ommers(id) } fn block_body_indices( &self, number: BlockNumber, ) -> ProviderResult> { - self.get_in_memory_or_storage_by_block( - number.into(), - |db_provider| db_provider.block_body_indices(number), - |block_state| { - // Find the last block indices on database - let last_storage_block_number = block_state.anchor().number; - let mut stored_indices = self - .database - .block_body_indices(last_storage_block_number)? - .ok_or(ProviderError::BlockBodyIndicesNotFound(last_storage_block_number))?; - - // Prepare our block indices - stored_indices.first_tx_num = stored_indices.next_tx_num(); - stored_indices.tx_count = 0; - - // Iterate from the lowest block in memory until our target block - for state in block_state.chain().into_iter().rev() { - let block_tx_count = state.block_ref().block.body.transactions.len() as u64; - if state.block_ref().block().number == number { - stored_indices.tx_count = block_tx_count; - } else { - stored_indices.first_tx_num += block_tx_count; - } - } - - Ok(Some(stored_indices)) - }, - ) + self.consistent_provider()?.block_body_indices(number) } /// Returns the block with senders with matching number or hash from database. @@ -896,11 +295,7 @@ impl BlockReader for BlockchainProvider2 { id: BlockHashOrNumber, transaction_kind: TransactionVariant, ) -> ProviderResult> { - self.get_in_memory_or_storage_by_block( - id, - |db_provider| db_provider.block_with_senders(id, transaction_kind), - |block_state| Ok(Some(block_state.block_with_senders())), - ) + self.consistent_provider()?.block_with_senders(id, transaction_kind) } fn sealed_block_with_senders( @@ -908,259 +303,116 @@ impl BlockReader for BlockchainProvider2 { id: BlockHashOrNumber, transaction_kind: TransactionVariant, ) -> ProviderResult> { - self.get_in_memory_or_storage_by_block( - id, - |db_provider| db_provider.sealed_block_with_senders(id, transaction_kind), - |block_state| Ok(Some(block_state.sealed_block_with_senders())), - ) + self.consistent_provider()?.sealed_block_with_senders(id, transaction_kind) } fn block_range(&self, range: RangeInclusive) -> ProviderResult> { - self.get_in_memory_or_storage_by_block_range_while( - range, - |db_provider, range, _| db_provider.block_range(range), - |block_state, _| Some(block_state.block_ref().block().clone().unseal()), - |_| true, - ) + self.consistent_provider()?.block_range(range) } fn block_with_senders_range( &self, range: RangeInclusive, ) -> ProviderResult> { - self.get_in_memory_or_storage_by_block_range_while( - range, - |db_provider, range, _| db_provider.block_with_senders_range(range), - |block_state, _| Some(block_state.block_with_senders()), - |_| true, - ) + self.consistent_provider()?.block_with_senders_range(range) } fn sealed_block_with_senders_range( &self, range: RangeInclusive, ) -> ProviderResult> { - self.get_in_memory_or_storage_by_block_range_while( - range, - |db_provider, range, _| db_provider.sealed_block_with_senders_range(range), - |block_state, _| Some(block_state.sealed_block_with_senders()), - |_| true, - ) + self.consistent_provider()?.sealed_block_with_senders_range(range) } } impl TransactionsProvider for BlockchainProvider2 { fn transaction_id(&self, tx_hash: TxHash) -> ProviderResult> { - self.get_in_memory_or_storage_by_tx( - tx_hash.into(), - |db_provider| db_provider.transaction_id(tx_hash), - |_, tx_number, _| Ok(Some(tx_number)), - ) + self.consistent_provider()?.transaction_id(tx_hash) } fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { - self.get_in_memory_or_storage_by_tx( - id.into(), - |provider| provider.transaction_by_id(id), - |tx_index, _, block_state| { - Ok(block_state.block_ref().block().body.transactions.get(tx_index).cloned()) - }, - ) + self.consistent_provider()?.transaction_by_id(id) } fn transaction_by_id_no_hash( &self, id: TxNumber, ) -> ProviderResult> { - self.get_in_memory_or_storage_by_tx( - id.into(), - |provider| provider.transaction_by_id_no_hash(id), - |tx_index, _, block_state| { - Ok(block_state - .block_ref() - .block() - .body - .transactions - .get(tx_index) - .cloned() - .map(Into::into)) - }, - ) + self.consistent_provider()?.transaction_by_id_no_hash(id) } fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { - if let Some(tx) = self.canonical_in_memory_state.transaction_by_hash(hash) { - return Ok(Some(tx)) - } - - self.database.transaction_by_hash(hash) + self.consistent_provider()?.transaction_by_hash(hash) } fn transaction_by_hash_with_meta( &self, tx_hash: TxHash, ) -> ProviderResult> { - if let Some((tx, meta)) = - self.canonical_in_memory_state.transaction_by_hash_with_meta(tx_hash) - { - return Ok(Some((tx, meta))) - } - - self.database.transaction_by_hash_with_meta(tx_hash) + self.consistent_provider()?.transaction_by_hash_with_meta(tx_hash) } fn transaction_block(&self, id: TxNumber) -> ProviderResult> { - self.get_in_memory_or_storage_by_tx( - id.into(), - |provider| provider.transaction_block(id), - |_, _, block_state| Ok(Some(block_state.block_ref().block().number)), - ) + self.consistent_provider()?.transaction_block(id) } fn transactions_by_block( &self, id: BlockHashOrNumber, ) -> ProviderResult>> { - self.get_in_memory_or_storage_by_block( - id, - |provider| provider.transactions_by_block(id), - |block_state| Ok(Some(block_state.block_ref().block().body.transactions.clone())), - ) + self.consistent_provider()?.transactions_by_block(id) } fn transactions_by_block_range( &self, range: impl RangeBounds, ) -> ProviderResult>> { - self.get_in_memory_or_storage_by_block_range_while( - range, - |db_provider, range, _| db_provider.transactions_by_block_range(range), - |block_state, _| Some(block_state.block_ref().block().body.transactions.clone()), - |_| true, - ) + self.consistent_provider()?.transactions_by_block_range(range) } fn transactions_by_tx_range( &self, range: impl RangeBounds, ) -> ProviderResult> { - self.get_in_memory_or_storage_by_tx_range( - range, - |db_provider, db_range| db_provider.transactions_by_tx_range(db_range), - |index_range, block_state| { - Ok(block_state.block_ref().block().body.transactions[index_range] - .iter() - .cloned() - .map(Into::into) - .collect()) - }, - ) + self.consistent_provider()?.transactions_by_tx_range(range) } fn senders_by_tx_range( &self, range: impl RangeBounds, ) -> ProviderResult> { - self.get_in_memory_or_storage_by_tx_range( - range, - |db_provider, db_range| db_provider.senders_by_tx_range(db_range), - |index_range, block_state| Ok(block_state.block_ref().senders[index_range].to_vec()), - ) + self.consistent_provider()?.senders_by_tx_range(range) } fn transaction_sender(&self, id: TxNumber) -> ProviderResult> { - self.get_in_memory_or_storage_by_tx( - id.into(), - |provider| provider.transaction_sender(id), - |tx_index, _, block_state| Ok(block_state.block_ref().senders.get(tx_index).copied()), - ) + self.consistent_provider()?.transaction_sender(id) } } impl ReceiptProvider for BlockchainProvider2 { fn receipt(&self, id: TxNumber) -> ProviderResult> { - self.get_in_memory_or_storage_by_tx( - id.into(), - |provider| provider.receipt(id), - |tx_index, _, block_state| { - Ok(block_state.executed_block_receipts().get(tx_index).cloned()) - }, - ) + self.consistent_provider()?.receipt(id) } fn receipt_by_hash(&self, hash: TxHash) -> ProviderResult> { - for block_state in self.canonical_in_memory_state.canonical_chain() { - let executed_block = block_state.block_ref(); - let block = executed_block.block(); - let receipts = block_state.executed_block_receipts(); - - // assuming 1:1 correspondence between transactions and receipts - debug_assert_eq!( - block.body.transactions.len(), - receipts.len(), - "Mismatch between transaction and receipt count" - ); - - if let Some(tx_index) = block.body.transactions.iter().position(|tx| tx.hash() == hash) - { - // safe to use tx_index for receipts due to 1:1 correspondence - return Ok(receipts.get(tx_index).cloned()); - } - } - - self.database.receipt_by_hash(hash) + self.consistent_provider()?.receipt_by_hash(hash) } fn receipts_by_block(&self, block: BlockHashOrNumber) -> ProviderResult>> { - self.get_in_memory_or_storage_by_block( - block, - |db_provider| db_provider.receipts_by_block(block), - |block_state| Ok(Some(block_state.executed_block_receipts())), - ) + self.consistent_provider()?.receipts_by_block(block) } fn receipts_by_tx_range( &self, range: impl RangeBounds, ) -> ProviderResult> { - self.get_in_memory_or_storage_by_tx_range( - range, - |db_provider, db_range| db_provider.receipts_by_tx_range(db_range), - |index_range, block_state| { - Ok(block_state.executed_block_receipts().drain(index_range).collect()) - }, - ) + self.consistent_provider()?.receipts_by_tx_range(range) } } impl ReceiptProviderIdExt for BlockchainProvider2 { fn receipts_by_block_id(&self, block: BlockId) -> ProviderResult>> { - match block { - BlockId::Hash(rpc_block_hash) => { - let mut receipts = self.receipts_by_block(rpc_block_hash.block_hash.into())?; - if receipts.is_none() && !rpc_block_hash.require_canonical.unwrap_or(false) { - let block_state = self - .canonical_in_memory_state - .state_by_hash(rpc_block_hash.block_hash) - .ok_or(ProviderError::StateForHashNotFound(rpc_block_hash.block_hash))?; - receipts = Some(block_state.executed_block_receipts()); - } - Ok(receipts) - } - BlockId::Number(num_tag) => match num_tag { - BlockNumberOrTag::Pending => Ok(self - .canonical_in_memory_state - .pending_state() - .map(|block_state| block_state.executed_block_receipts())), - _ => { - if let Some(num) = self.convert_block_number(num_tag)? { - self.receipts_by_block(num.into()) - } else { - Ok(None) - } - } - }, - } + self.consistent_provider()?.receipts_by_block_id(block) } } @@ -1170,47 +422,25 @@ impl WithdrawalsProvider for BlockchainProvider2 { id: BlockHashOrNumber, timestamp: u64, ) -> ProviderResult> { - if !self.chain_spec().is_shanghai_active_at_timestamp(timestamp) { - return Ok(None) - } - - self.get_in_memory_or_storage_by_block( - id, - |db_provider| db_provider.withdrawals_by_block(id, timestamp), - |block_state| Ok(block_state.block_ref().block().body.withdrawals.clone()), - ) + self.consistent_provider()?.withdrawals_by_block(id, timestamp) } fn latest_withdrawal(&self) -> ProviderResult> { - let best_block_num = self.best_block_number()?; - - self.get_in_memory_or_storage_by_block( - best_block_num.into(), - |db_provider| db_provider.latest_withdrawal(), - |block_state| { - Ok(block_state - .block_ref() - .block() - .body - .withdrawals - .clone() - .and_then(|mut w| w.pop())) - }, - ) + self.consistent_provider()?.latest_withdrawal() } } impl StageCheckpointReader for BlockchainProvider2 { fn get_stage_checkpoint(&self, id: StageId) -> ProviderResult> { - self.database.provider()?.get_stage_checkpoint(id) + self.consistent_provider()?.get_stage_checkpoint(id) } fn get_stage_checkpoint_progress(&self, id: StageId) -> ProviderResult>> { - self.database.provider()?.get_stage_checkpoint_progress(id) + self.consistent_provider()?.get_stage_checkpoint_progress(id) } fn get_all_checkpoints(&self) -> ProviderResult> { - self.database.provider()?.get_all_checkpoints() + self.consistent_provider()?.get_all_checkpoints() } } @@ -1225,9 +455,7 @@ impl EvmEnvProvider for BlockchainProvider2 { where EvmConfig: ConfigureEvmEnv
, { - let hash = self.convert_number(at)?.ok_or(ProviderError::HeaderNotFound(at))?; - let header = self.header(&hash)?.ok_or(ProviderError::HeaderNotFound(at))?; - self.fill_env_with_header(cfg, block_env, &header, evm_config) + self.consistent_provider()?.fill_env_at(cfg, block_env, at, evm_config) } fn fill_env_with_header( @@ -1240,11 +468,7 @@ impl EvmEnvProvider for BlockchainProvider2 { where EvmConfig: ConfigureEvmEnv
, { - let total_difficulty = self - .header_td_by_number(header.number)? - .ok_or_else(|| ProviderError::HeaderNotFound(header.number.into()))?; - evm_config.fill_cfg_and_block_env(cfg, block_env, header, total_difficulty); - Ok(()) + self.consistent_provider()?.fill_env_with_header(cfg, block_env, header, evm_config) } fn fill_cfg_env_at( @@ -1256,9 +480,7 @@ impl EvmEnvProvider for BlockchainProvider2 { where EvmConfig: ConfigureEvmEnv
, { - let hash = self.convert_number(at)?.ok_or(ProviderError::HeaderNotFound(at))?; - let header = self.header(&hash)?.ok_or(ProviderError::HeaderNotFound(at))?; - self.fill_cfg_env_with_header(cfg, &header, evm_config) + self.consistent_provider()?.fill_cfg_env_at(cfg, at, evm_config) } fn fill_cfg_env_with_header( @@ -1270,11 +492,7 @@ impl EvmEnvProvider for BlockchainProvider2 { where EvmConfig: ConfigureEvmEnv
, { - let total_difficulty = self - .header_td_by_number(header.number)? - .ok_or_else(|| ProviderError::HeaderNotFound(header.number.into()))?; - evm_config.fill_cfg_env(cfg, header, total_difficulty); - Ok(()) + self.consistent_provider()?.fill_cfg_env_with_header(cfg, header, evm_config) } } @@ -1283,11 +501,11 @@ impl PruneCheckpointReader for BlockchainProvider2 { &self, segment: PruneSegment, ) -> ProviderResult> { - self.database.provider()?.get_prune_checkpoint(segment) + self.consistent_provider()?.get_prune_checkpoint(segment) } fn get_prune_checkpoints(&self) -> ProviderResult> { - self.database.provider()?.get_prune_checkpoints() + self.consistent_provider()?.get_prune_checkpoints() } } @@ -1318,8 +536,9 @@ impl StateProviderFactory for BlockchainProvider2 { block_number: BlockNumber, ) -> ProviderResult { trace!(target: "providers::blockchain", ?block_number, "Getting history by block number"); - self.ensure_canonical_block(block_number)?; - let hash = self + let provider = self.consistent_provider()?; + provider.ensure_canonical_block(block_number)?; + let hash = provider .block_hash(block_number)? .ok_or_else(|| ProviderError::HeaderNotFound(block_number.into()))?; self.history_by_block_hash(hash) @@ -1328,14 +547,11 @@ impl StateProviderFactory for BlockchainProvider2 { fn history_by_block_hash(&self, block_hash: BlockHash) -> ProviderResult { trace!(target: "providers::blockchain", ?block_hash, "Getting history by block hash"); - self.get_in_memory_or_storage_by_block( + self.consistent_provider()?.get_in_memory_or_storage_by_block( block_hash.into(), - |_| { - // TODO(joshie): port history_by_block_hash to DatabaseProvider and use db_provider - self.database.history_by_block_hash(block_hash) - }, + |_| self.database.history_by_block_hash(block_hash), |block_state| { - let state_provider = self.block_state_provider(&block_state)?; + let state_provider = self.block_state_provider(block_state)?; Ok(Box::new(state_provider)) }, ) @@ -1444,105 +660,35 @@ where } } -impl BlockReaderIdExt for BlockchainProvider2 +impl BlockReaderIdExt for BlockchainProvider2 where Self: BlockReader + ReceiptProviderIdExt, { fn block_by_id(&self, id: BlockId) -> ProviderResult> { - match id { - BlockId::Number(num) => self.block_by_number_or_tag(num), - BlockId::Hash(hash) => { - // TODO: should we only apply this for the RPCs that are listed in EIP-1898? - // so not at the provider level? - // if we decide to do this at a higher level, then we can make this an automatic - // trait impl - if Some(true) == hash.require_canonical { - // check the database, canonical blocks are only stored in the database - self.find_block_by_hash(hash.block_hash, BlockSource::Canonical) - } else { - self.block_by_hash(hash.block_hash) - } - } - } + self.consistent_provider()?.block_by_id(id) } fn header_by_number_or_tag(&self, id: BlockNumberOrTag) -> ProviderResult> { - Ok(match id { - BlockNumberOrTag::Latest => { - Some(self.canonical_in_memory_state.get_canonical_head().unseal()) - } - BlockNumberOrTag::Finalized => { - self.canonical_in_memory_state.get_finalized_header().map(|h| h.unseal()) - } - BlockNumberOrTag::Safe => { - self.canonical_in_memory_state.get_safe_header().map(|h| h.unseal()) - } - BlockNumberOrTag::Earliest => self.header_by_number(0)?, - BlockNumberOrTag::Pending => self.canonical_in_memory_state.pending_header(), - - BlockNumberOrTag::Number(num) => self.header_by_number(num)?, - }) + self.consistent_provider()?.header_by_number_or_tag(id) } fn sealed_header_by_number_or_tag( &self, id: BlockNumberOrTag, ) -> ProviderResult> { - match id { - BlockNumberOrTag::Latest => { - Ok(Some(self.canonical_in_memory_state.get_canonical_head())) - } - BlockNumberOrTag::Finalized => { - Ok(self.canonical_in_memory_state.get_finalized_header()) - } - BlockNumberOrTag::Safe => Ok(self.canonical_in_memory_state.get_safe_header()), - BlockNumberOrTag::Earliest => self.header_by_number(0)?.map_or_else( - || Ok(None), - |h| { - let sealed = h.seal_slow(); - let (header, seal) = sealed.into_parts(); - Ok(Some(SealedHeader::new(header, seal))) - }, - ), - BlockNumberOrTag::Pending => Ok(self.canonical_in_memory_state.pending_sealed_header()), - BlockNumberOrTag::Number(num) => self.header_by_number(num)?.map_or_else( - || Ok(None), - |h| { - let sealed = h.seal_slow(); - let (header, seal) = sealed.into_parts(); - Ok(Some(SealedHeader::new(header, seal))) - }, - ), - } + self.consistent_provider()?.sealed_header_by_number_or_tag(id) } fn sealed_header_by_id(&self, id: BlockId) -> ProviderResult> { - Ok(match id { - BlockId::Number(num) => self.sealed_header_by_number_or_tag(num)?, - BlockId::Hash(hash) => self.header(&hash.block_hash)?.map(|h| { - let sealed = h.seal_slow(); - let (header, seal) = sealed.into_parts(); - SealedHeader::new(header, seal) - }), - }) + self.consistent_provider()?.sealed_header_by_id(id) } fn header_by_id(&self, id: BlockId) -> ProviderResult> { - Ok(match id { - BlockId::Number(num) => self.header_by_number_or_tag(num)?, - BlockId::Hash(hash) => self.header(&hash.block_hash)?, - }) + self.consistent_provider()?.header_by_id(id) } fn ommers_by_id(&self, id: BlockId) -> ProviderResult>> { - match id { - BlockId::Number(num) => self.ommers_by_number_or_tag(num), - BlockId::Hash(hash) => { - // TODO: EIP-1898 question, see above - // here it is not handled - self.ommers(BlockHashOrNumber::Hash(hash.block_hash)) - } - } + self.consistent_provider()?.ommers_by_id(id) } } @@ -1569,49 +715,7 @@ impl StorageChangeSetReader for BlockchainProvider2 { &self, block_number: BlockNumber, ) -> ProviderResult> { - if let Some(state) = self.canonical_in_memory_state.state_by_number(block_number) { - let changesets = state - .block() - .execution_output - .bundle - .reverts - .clone() - .into_plain_state_reverts() - .storage - .into_iter() - .flatten() - .flat_map(|revert: PlainStorageRevert| { - revert.storage_revert.into_iter().map(move |(key, value)| { - ( - BlockNumberAddress((block_number, revert.address)), - StorageEntry { key: key.into(), value: value.to_previous_value() }, - ) - }) - }) - .collect(); - Ok(changesets) - } else { - // Perform checks on whether or not changesets exist for the block. - let provider = self.database.provider()?; - - // No prune checkpoint means history should exist and we should `unwrap_or(true)` - let storage_history_exists = provider - .get_prune_checkpoint(PruneSegment::StorageHistory)? - .and_then(|checkpoint| { - // return true if the block number is ahead of the prune checkpoint. - // - // The checkpoint stores the highest pruned block number, so we should make - // sure the block_number is strictly greater. - checkpoint.block_number.map(|checkpoint| block_number > checkpoint) - }) - .unwrap_or(true); - - if !storage_history_exists { - return Err(ProviderError::StateAtBlockPruned(block_number)) - } - - provider.storage_changeset(block_number) - } + self.consistent_provider()?.storage_changeset(block_number) } } @@ -1620,50 +724,14 @@ impl ChangeSetReader for BlockchainProvider2 { &self, block_number: BlockNumber, ) -> ProviderResult> { - if let Some(state) = self.canonical_in_memory_state.state_by_number(block_number) { - let changesets = state - .block_ref() - .execution_output - .bundle - .reverts - .clone() - .into_plain_state_reverts() - .accounts - .into_iter() - .flatten() - .map(|(address, info)| AccountBeforeTx { address, info: info.map(Into::into) }) - .collect(); - Ok(changesets) - } else { - // Perform checks on whether or not changesets exist for the block. - let provider = self.database.provider()?; - // No prune checkpoint means history should exist and we should `unwrap_or(true)` - let account_history_exists = provider - .get_prune_checkpoint(PruneSegment::AccountHistory)? - .and_then(|checkpoint| { - // return true if the block number is ahead of the prune checkpoint. - // - // The checkpoint stores the highest pruned block number, so we should make - // sure the block_number is strictly greater. - checkpoint.block_number.map(|checkpoint| block_number > checkpoint) - }) - .unwrap_or(true); - - if !account_history_exists { - return Err(ProviderError::StateAtBlockPruned(block_number)) - } - - provider.account_block_changeset(block_number) - } + self.consistent_provider()?.account_block_changeset(block_number) } } impl AccountReader for BlockchainProvider2 { /// Get basic account information. fn basic_account(&self, address: Address) -> ProviderResult> { - // use latest state provider - let state_provider = self.latest()?; - state_provider.basic_account(address) + self.consistent_provider()?.basic_account(address) } } @@ -1678,12 +746,7 @@ impl StateReader for BlockchainProvider2 { /// because the tree thread is responsible for modifying the [`CanonicalInMemoryState`] in the /// first place. fn get_state(&self, block: BlockNumber) -> ProviderResult> { - if let Some(state) = self.canonical_in_memory_state.state_by_number(block) { - let state = state.block_ref().execution_outcome().clone(); - Ok(Some(state)) - } else { - self.get_state(block..=block) - } + StateReader::get_state(&self.consistent_provider()?, block) } } diff --git a/crates/storage/provider/src/providers/consistent.rs b/crates/storage/provider/src/providers/consistent.rs new file mode 100644 index 000000000..d6847fa1b --- /dev/null +++ b/crates/storage/provider/src/providers/consistent.rs @@ -0,0 +1,1871 @@ +use super::{DatabaseProviderRO, ProviderFactory, ProviderNodeTypes}; +use crate::{ + providers::StaticFileProvider, AccountReader, BlockHashReader, BlockIdReader, BlockNumReader, + BlockReader, BlockReaderIdExt, BlockSource, ChainSpecProvider, ChangeSetReader, EvmEnvProvider, + HeaderProvider, ProviderError, PruneCheckpointReader, ReceiptProvider, ReceiptProviderIdExt, + StageCheckpointReader, StateReader, StaticFileProviderFactory, TransactionVariant, + TransactionsProvider, WithdrawalsProvider, +}; +use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, HashOrNumber}; +use alloy_primitives::{Address, BlockHash, BlockNumber, Sealable, TxHash, TxNumber, B256, U256}; +use reth_chain_state::{BlockState, CanonicalInMemoryState, MemoryOverlayStateProviderRef}; +use reth_chainspec::{ChainInfo, EthereumHardforks}; +use reth_db::models::BlockNumberAddress; +use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; +use reth_evm::ConfigureEvmEnv; +use reth_execution_types::{BundleStateInit, ExecutionOutcome, RevertsInit}; +use reth_primitives::{ + Account, Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, + SealedHeader, StorageEntry, TransactionMeta, TransactionSigned, TransactionSignedNoHash, + Withdrawal, Withdrawals, +}; +use reth_prune_types::{PruneCheckpoint, PruneSegment}; +use reth_stages_types::{StageCheckpoint, StageId}; +use reth_storage_api::{DatabaseProviderFactory, StateProvider, StorageChangeSetReader}; +use reth_storage_errors::provider::ProviderResult; +use revm::{ + db::states::PlainStorageRevert, + primitives::{BlockEnv, CfgEnvWithHandlerCfg}, +}; +use std::{ + collections::{hash_map, HashMap}, + ops::{Add, Bound, RangeBounds, RangeInclusive, Sub}, + sync::Arc, +}; +use tracing::trace; + +/// Type that interacts with a snapshot view of the blockchain (storage and in-memory) at time of +/// instantiation, EXCEPT for pending, safe and finalized block which might change while holding +/// this provider. +/// +/// CAUTION: Avoid holding this provider for too long or the inner database transaction will +/// time-out. +#[derive(Debug)] +pub struct ConsistentProvider { + /// Storage provider. + storage_provider: as DatabaseProviderFactory>::Provider, + /// Head block at time of [`Self`] creation + head_block: Option>, + /// In-memory canonical state. This is not a snapshot, and can change! Use with caution. + canonical_in_memory_state: CanonicalInMemoryState, +} + +impl ConsistentProvider { + /// Create a new provider using [`ProviderFactory`] and [`CanonicalInMemoryState`], + /// + /// Underneath it will take a snapshot by fetching [`CanonicalInMemoryState::head_state`] and + /// [`ProviderFactory::database_provider_ro`] effectively maintaining one single snapshotted + /// view of memory and database. + pub fn new( + storage_provider_factory: ProviderFactory, + state: CanonicalInMemoryState, + ) -> ProviderResult { + // Each one provides a snapshot at the time of instantiation, but its order matters. + // + // If we acquire first the database provider, it's possible that before the in-memory chain + // snapshot is instantiated, it will flush blocks to disk. This would + // mean that our database provider would not have access to the flushed blocks (since it's + // working under an older view), while the in-memory state may have deleted them + // entirely. Resulting in gaps on the range. + let head_block = state.head_state(); + let storage_provider = storage_provider_factory.database_provider_ro()?; + Ok(Self { storage_provider, head_block, canonical_in_memory_state: state }) + } + + // Helper function to convert range bounds + fn convert_range_bounds( + &self, + range: impl RangeBounds, + end_unbounded: impl FnOnce() -> T, + ) -> (T, T) + where + T: Copy + Add + Sub + From, + { + let start = match range.start_bound() { + Bound::Included(&n) => n, + Bound::Excluded(&n) => n + T::from(1u8), + Bound::Unbounded => T::from(0u8), + }; + + let end = match range.end_bound() { + Bound::Included(&n) => n, + Bound::Excluded(&n) => n - T::from(1u8), + Bound::Unbounded => end_unbounded(), + }; + + (start, end) + } + + /// Storage provider for latest block + fn latest_ref<'a>(&'a self) -> ProviderResult> { + trace!(target: "providers::blockchain", "Getting latest block state provider"); + + // use latest state provider if the head state exists + if let Some(state) = &self.head_block { + trace!(target: "providers::blockchain", "Using head state for latest state provider"); + Ok(self.block_state_provider_ref(state)?.boxed()) + } else { + trace!(target: "providers::blockchain", "Using database state for latest state provider"); + self.storage_provider.latest() + } + } + + fn history_by_block_hash_ref<'a>( + &'a self, + block_hash: BlockHash, + ) -> ProviderResult> { + trace!(target: "providers::blockchain", ?block_hash, "Getting history by block hash"); + + self.get_in_memory_or_storage_by_block( + block_hash.into(), + |_| self.storage_provider.history_by_block_hash(block_hash), + |block_state| { + let state_provider = self.block_state_provider_ref(block_state)?; + Ok(Box::new(state_provider)) + }, + ) + } + + /// Returns a state provider indexed by the given block number or tag. + fn state_by_block_number_ref<'a>( + &'a self, + number: BlockNumber, + ) -> ProviderResult> { + let hash = + self.block_hash(number)?.ok_or_else(|| ProviderError::HeaderNotFound(number.into()))?; + self.history_by_block_hash_ref(hash) + } + + /// Return the last N blocks of state, recreating the [`ExecutionOutcome`]. + /// + /// If the range is empty, or there are no blocks for the given range, then this returns `None`. + pub fn get_state( + &self, + range: RangeInclusive, + ) -> ProviderResult> { + if range.is_empty() { + return Ok(None) + } + let start_block_number = *range.start(); + let end_block_number = *range.end(); + + // We are not removing block meta as it is used to get block changesets. + let mut block_bodies = Vec::new(); + for block_num in range.clone() { + let block_body = self + .block_body_indices(block_num)? + .ok_or(ProviderError::BlockBodyIndicesNotFound(block_num))?; + block_bodies.push((block_num, block_body)) + } + + // get transaction receipts + let Some(from_transaction_num) = block_bodies.first().map(|body| body.1.first_tx_num()) + else { + return Ok(None) + }; + let Some(to_transaction_num) = block_bodies.last().map(|body| body.1.last_tx_num()) else { + return Ok(None) + }; + + let mut account_changeset = Vec::new(); + for block_num in range.clone() { + let changeset = + self.account_block_changeset(block_num)?.into_iter().map(|elem| (block_num, elem)); + account_changeset.extend(changeset); + } + + let mut storage_changeset = Vec::new(); + for block_num in range { + let changeset = self.storage_changeset(block_num)?; + storage_changeset.extend(changeset); + } + + let (state, reverts) = + self.populate_bundle_state(account_changeset, storage_changeset, end_block_number)?; + + let mut receipt_iter = + self.receipts_by_tx_range(from_transaction_num..=to_transaction_num)?.into_iter(); + + let mut receipts = Vec::with_capacity(block_bodies.len()); + // loop break if we are at the end of the blocks. + for (_, block_body) in block_bodies { + let mut block_receipts = Vec::with_capacity(block_body.tx_count as usize); + for tx_num in block_body.tx_num_range() { + let receipt = receipt_iter + .next() + .ok_or_else(|| ProviderError::ReceiptNotFound(tx_num.into()))?; + block_receipts.push(Some(receipt)); + } + receipts.push(block_receipts); + } + + Ok(Some(ExecutionOutcome::new_init( + state, + reverts, + // We skip new contracts since we never delete them from the database + Vec::new(), + receipts.into(), + start_block_number, + Vec::new(), + ))) + } + + /// Populate a [`BundleStateInit`] and [`RevertsInit`] using cursors over the + /// [`reth_db::PlainAccountState`] and [`reth_db::PlainStorageState`] tables, based on the given + /// storage and account changesets. + fn populate_bundle_state( + &self, + account_changeset: Vec<(u64, AccountBeforeTx)>, + storage_changeset: Vec<(BlockNumberAddress, StorageEntry)>, + block_range_end: BlockNumber, + ) -> ProviderResult<(BundleStateInit, RevertsInit)> { + let mut state: BundleStateInit = HashMap::new(); + let mut reverts: RevertsInit = HashMap::new(); + let state_provider = self.state_by_block_number_ref(block_range_end)?; + + // add account changeset changes + for (block_number, account_before) in account_changeset.into_iter().rev() { + let AccountBeforeTx { info: old_info, address } = account_before; + match state.entry(address) { + hash_map::Entry::Vacant(entry) => { + let new_info = state_provider.basic_account(address)?; + entry.insert((old_info, new_info, HashMap::new())); + } + hash_map::Entry::Occupied(mut entry) => { + // overwrite old account state. + entry.get_mut().0 = old_info; + } + } + // insert old info into reverts. + reverts.entry(block_number).or_default().entry(address).or_default().0 = Some(old_info); + } + + // add storage changeset changes + for (block_and_address, old_storage) in storage_changeset.into_iter().rev() { + let BlockNumberAddress((block_number, address)) = block_and_address; + // get account state or insert from plain state. + let account_state = match state.entry(address) { + hash_map::Entry::Vacant(entry) => { + let present_info = state_provider.basic_account(address)?; + entry.insert((present_info, present_info, HashMap::new())) + } + hash_map::Entry::Occupied(entry) => entry.into_mut(), + }; + + // match storage. + match account_state.2.entry(old_storage.key) { + hash_map::Entry::Vacant(entry) => { + let new_storage_value = + state_provider.storage(address, old_storage.key)?.unwrap_or_default(); + entry.insert((old_storage.value, new_storage_value)); + } + hash_map::Entry::Occupied(mut entry) => { + entry.get_mut().0 = old_storage.value; + } + }; + + reverts + .entry(block_number) + .or_default() + .entry(address) + .or_default() + .1 + .push(old_storage); + } + + Ok((state, reverts)) + } + + /// Fetches a range of data from both in-memory state and persistent storage while a predicate + /// is met. + /// + /// Creates a snapshot of the in-memory chain state and database provider to prevent + /// inconsistencies. Splits the range into in-memory and storage sections, prioritizing + /// recent in-memory blocks in case of overlaps. + /// + /// * `fetch_db_range` function (`F`) provides access to the database provider, allowing the + /// user to retrieve the required items from the database using [`RangeInclusive`]. + /// * `map_block_state_item` function (`G`) provides each block of the range in the in-memory + /// state, allowing for selection or filtering for the desired data. + fn get_in_memory_or_storage_by_block_range_while( + &self, + range: impl RangeBounds, + fetch_db_range: F, + map_block_state_item: G, + mut predicate: P, + ) -> ProviderResult> + where + F: FnOnce( + &DatabaseProviderRO, + RangeInclusive, + &mut P, + ) -> ProviderResult>, + G: Fn(&BlockState, &mut P) -> Option, + P: FnMut(&T) -> bool, + { + // Each one provides a snapshot at the time of instantiation, but its order matters. + // + // If we acquire first the database provider, it's possible that before the in-memory chain + // snapshot is instantiated, it will flush blocks to disk. This would + // mean that our database provider would not have access to the flushed blocks (since it's + // working under an older view), while the in-memory state may have deleted them + // entirely. Resulting in gaps on the range. + let mut in_memory_chain = + self.head_block.as_ref().map(|b| b.chain().collect::>()).unwrap_or_default(); + let db_provider = &self.storage_provider; + + let (start, end) = self.convert_range_bounds(range, || { + // the first block is the highest one. + in_memory_chain + .first() + .map(|b| b.number()) + .unwrap_or_else(|| db_provider.last_block_number().unwrap_or_default()) + }); + + if start > end { + return Ok(vec![]) + } + + // Split range into storage_range and in-memory range. If the in-memory range is not + // necessary drop it early. + // + // The last block of `in_memory_chain` is the lowest block number. + let (in_memory, storage_range) = match in_memory_chain.last().as_ref().map(|b| b.number()) { + Some(lowest_memory_block) if lowest_memory_block <= end => { + let highest_memory_block = + in_memory_chain.first().as_ref().map(|b| b.number()).expect("qed"); + + // Database will for a time overlap with in-memory-chain blocks. In + // case of a re-org, it can mean that the database blocks are of a forked chain, and + // so, we should prioritize the in-memory overlapped blocks. + let in_memory_range = + lowest_memory_block.max(start)..=end.min(highest_memory_block); + + // If requested range is in the middle of the in-memory range, remove the necessary + // lowest blocks + in_memory_chain.truncate( + in_memory_chain + .len() + .saturating_sub(start.saturating_sub(lowest_memory_block) as usize), + ); + + let storage_range = + (lowest_memory_block > start).then(|| start..=lowest_memory_block - 1); + + (Some((in_memory_chain, in_memory_range)), storage_range) + } + _ => { + // Drop the in-memory chain so we don't hold blocks in memory. + drop(in_memory_chain); + + (None, Some(start..=end)) + } + }; + + let mut items = Vec::with_capacity((end - start + 1) as usize); + + if let Some(storage_range) = storage_range { + let mut db_items = fetch_db_range(db_provider, storage_range.clone(), &mut predicate)?; + items.append(&mut db_items); + + // The predicate was not met, if the number of items differs from the expected. So, we + // return what we have. + if items.len() as u64 != storage_range.end() - storage_range.start() + 1 { + return Ok(items) + } + } + + if let Some((in_memory_chain, in_memory_range)) = in_memory { + for (num, block) in in_memory_range.zip(in_memory_chain.into_iter().rev()) { + debug_assert!(num == block.number()); + if let Some(item) = map_block_state_item(block, &mut predicate) { + items.push(item); + } else { + break + } + } + } + + Ok(items) + } + + /// This uses a given [`BlockState`] to initialize a state provider for that block. + fn block_state_provider_ref( + &self, + state: &BlockState, + ) -> ProviderResult> { + let anchor_hash = state.anchor().hash; + let latest_historical = self.history_by_block_hash_ref(anchor_hash)?; + let in_memory = state.chain().map(|block_state| block_state.block()).collect(); + Ok(MemoryOverlayStateProviderRef::new(latest_historical, in_memory)) + } + + /// Fetches data from either in-memory state or persistent storage for a range of transactions. + /// + /// * `fetch_from_db`: has a `DatabaseProviderRO` and the storage specific range. + /// * `fetch_from_block_state`: has a [`RangeInclusive`] of elements that should be fetched from + /// [`BlockState`]. [`RangeInclusive`] is necessary to handle partial look-ups of a block. + fn get_in_memory_or_storage_by_tx_range( + &self, + range: impl RangeBounds, + fetch_from_db: S, + fetch_from_block_state: M, + ) -> ProviderResult> + where + S: FnOnce( + &DatabaseProviderRO, + RangeInclusive, + ) -> ProviderResult>, + M: Fn(RangeInclusive, &BlockState) -> ProviderResult>, + { + let in_mem_chain = self.head_block.iter().flat_map(|b| b.chain()).collect::>(); + let provider = &self.storage_provider; + + // Get the last block number stored in the storage which does NOT overlap with in-memory + // chain. + let last_database_block_number = in_mem_chain + .last() + .map(|b| Ok(b.anchor().number)) + .unwrap_or_else(|| provider.last_block_number())?; + + // Get the next tx number for the last block stored in the storage, which marks the start of + // the in-memory state. + let last_block_body_index = provider + .block_body_indices(last_database_block_number)? + .ok_or(ProviderError::BlockBodyIndicesNotFound(last_database_block_number))?; + let mut in_memory_tx_num = last_block_body_index.next_tx_num(); + + let (start, end) = self.convert_range_bounds(range, || { + in_mem_chain + .iter() + .map(|b| b.block_ref().block().body.transactions.len() as u64) + .sum::() + + last_block_body_index.last_tx_num() + }); + + if start > end { + return Ok(vec![]) + } + + let mut tx_range = start..=end; + + // If the range is entirely before the first in-memory transaction number, fetch from + // storage + if *tx_range.end() < in_memory_tx_num { + return fetch_from_db(provider, tx_range); + } + + let mut items = Vec::with_capacity((tx_range.end() - tx_range.start() + 1) as usize); + + // If the range spans storage and memory, get elements from storage first. + if *tx_range.start() < in_memory_tx_num { + // Determine the range that needs to be fetched from storage. + let db_range = *tx_range.start()..=in_memory_tx_num.saturating_sub(1); + + // Set the remaining transaction range for in-memory + tx_range = in_memory_tx_num..=*tx_range.end(); + + items.extend(fetch_from_db(provider, db_range)?); + } + + // Iterate from the lowest block to the highest in-memory chain + for block_state in in_mem_chain.iter().rev() { + let block_tx_count = block_state.block_ref().block().body.transactions.len(); + let remaining = (tx_range.end() - tx_range.start() + 1) as usize; + + // If the transaction range start is equal or higher than the next block first + // transaction, advance + if *tx_range.start() >= in_memory_tx_num + block_tx_count as u64 { + in_memory_tx_num += block_tx_count as u64; + continue + } + + // This should only be more than 0 once, in case of a partial range inside a block. + let skip = (tx_range.start() - in_memory_tx_num) as usize; + + items.extend(fetch_from_block_state( + skip..=skip + (remaining.min(block_tx_count - skip) - 1), + block_state, + )?); + + in_memory_tx_num += block_tx_count as u64; + + // Break if the range has been fully processed + if in_memory_tx_num > *tx_range.end() { + break + } + + // Set updated range + tx_range = in_memory_tx_num..=*tx_range.end(); + } + + Ok(items) + } + + /// Fetches data from either in-memory state or persistent storage by transaction + /// [`HashOrNumber`]. + fn get_in_memory_or_storage_by_tx( + &self, + id: HashOrNumber, + fetch_from_db: S, + fetch_from_block_state: M, + ) -> ProviderResult> + where + S: FnOnce(&DatabaseProviderRO) -> ProviderResult>, + M: Fn(usize, TxNumber, &BlockState) -> ProviderResult>, + { + let in_mem_chain = self.head_block.iter().flat_map(|b| b.chain()).collect::>(); + let provider = &self.storage_provider; + + // Get the last block number stored in the database which does NOT overlap with in-memory + // chain. + let last_database_block_number = in_mem_chain + .last() + .map(|b| Ok(b.anchor().number)) + .unwrap_or_else(|| provider.last_block_number())?; + + // Get the next tx number for the last block stored in the database and consider it the + // first tx number of the in-memory state + let last_block_body_index = provider + .block_body_indices(last_database_block_number)? + .ok_or(ProviderError::BlockBodyIndicesNotFound(last_database_block_number))?; + let mut in_memory_tx_num = last_block_body_index.next_tx_num(); + + // If the transaction number is less than the first in-memory transaction number, make a + // database lookup + if let HashOrNumber::Number(id) = id { + if id < in_memory_tx_num { + return fetch_from_db(provider) + } + } + + // Iterate from the lowest block to the highest + for block_state in in_mem_chain.iter().rev() { + let executed_block = block_state.block_ref(); + let block = executed_block.block(); + + for tx_index in 0..block.body.transactions.len() { + match id { + HashOrNumber::Hash(tx_hash) => { + if tx_hash == block.body.transactions[tx_index].hash() { + return fetch_from_block_state(tx_index, in_memory_tx_num, block_state) + } + } + HashOrNumber::Number(id) => { + if id == in_memory_tx_num { + return fetch_from_block_state(tx_index, in_memory_tx_num, block_state) + } + } + } + + in_memory_tx_num += 1; + } + } + + // Not found in-memory, so check database. + if let HashOrNumber::Hash(_) = id { + return fetch_from_db(provider) + } + + Ok(None) + } + + /// Fetches data from either in-memory state or persistent storage by [`BlockHashOrNumber`]. + pub(crate) fn get_in_memory_or_storage_by_block( + &self, + id: BlockHashOrNumber, + fetch_from_db: S, + fetch_from_block_state: M, + ) -> ProviderResult + where + S: FnOnce(&DatabaseProviderRO) -> ProviderResult, + M: Fn(&BlockState) -> ProviderResult, + { + if let Some(Some(block_state)) = self.head_block.as_ref().map(|b| b.block_on_chain(id)) { + return fetch_from_block_state(block_state) + } + fetch_from_db(&self.storage_provider) + } +} + +impl ConsistentProvider { + /// Ensures that the given block number is canonical (synced) + /// + /// This is a helper for guarding the `HistoricalStateProvider` against block numbers that are + /// out of range and would lead to invalid results, mainly during initial sync. + /// + /// Verifying the `block_number` would be expensive since we need to lookup sync table + /// Instead, we ensure that the `block_number` is within the range of the + /// [`Self::best_block_number`] which is updated when a block is synced. + #[inline] + pub(crate) fn ensure_canonical_block(&self, block_number: BlockNumber) -> ProviderResult<()> { + let latest = self.best_block_number()?; + if block_number > latest { + Err(ProviderError::HeaderNotFound(block_number.into())) + } else { + Ok(()) + } + } +} + +impl StaticFileProviderFactory for ConsistentProvider { + fn static_file_provider(&self) -> StaticFileProvider { + self.storage_provider.static_file_provider() + } +} + +impl HeaderProvider for ConsistentProvider { + fn header(&self, block_hash: &BlockHash) -> ProviderResult> { + self.get_in_memory_or_storage_by_block( + (*block_hash).into(), + |db_provider| db_provider.header(block_hash), + |block_state| Ok(Some(block_state.block_ref().block().header.header().clone())), + ) + } + + fn header_by_number(&self, num: BlockNumber) -> ProviderResult> { + self.get_in_memory_or_storage_by_block( + num.into(), + |db_provider| db_provider.header_by_number(num), + |block_state| Ok(Some(block_state.block_ref().block().header.header().clone())), + ) + } + + fn header_td(&self, hash: &BlockHash) -> ProviderResult> { + if let Some(num) = self.block_number(*hash)? { + self.header_td_by_number(num) + } else { + Ok(None) + } + } + + fn header_td_by_number(&self, number: BlockNumber) -> ProviderResult> { + let number = if self.head_block.as_ref().map(|b| b.block_on_chain(number.into())).is_some() + { + // If the block exists in memory, we should return a TD for it. + // + // The canonical in memory state should only store post-merge blocks. Post-merge blocks + // have zero difficulty. This means we can use the total difficulty for the last + // finalized block number if present (so that we are not affected by reorgs), if not the + // last number in the database will be used. + if let Some(last_finalized_num_hash) = + self.canonical_in_memory_state.get_finalized_num_hash() + { + last_finalized_num_hash.number + } else { + self.last_block_number()? + } + } else { + // Otherwise, return what we have on disk for the input block + number + }; + self.storage_provider.header_td_by_number(number) + } + + fn headers_range(&self, range: impl RangeBounds) -> ProviderResult> { + self.get_in_memory_or_storage_by_block_range_while( + range, + |db_provider, range, _| db_provider.headers_range(range), + |block_state, _| Some(block_state.block_ref().block().header.header().clone()), + |_| true, + ) + } + + fn sealed_header(&self, number: BlockNumber) -> ProviderResult> { + self.get_in_memory_or_storage_by_block( + number.into(), + |db_provider| db_provider.sealed_header(number), + |block_state| Ok(Some(block_state.block_ref().block().header.clone())), + ) + } + + fn sealed_headers_range( + &self, + range: impl RangeBounds, + ) -> ProviderResult> { + self.get_in_memory_or_storage_by_block_range_while( + range, + |db_provider, range, _| db_provider.sealed_headers_range(range), + |block_state, _| Some(block_state.block_ref().block().header.clone()), + |_| true, + ) + } + + fn sealed_headers_while( + &self, + range: impl RangeBounds, + predicate: impl FnMut(&SealedHeader) -> bool, + ) -> ProviderResult> { + self.get_in_memory_or_storage_by_block_range_while( + range, + |db_provider, range, predicate| db_provider.sealed_headers_while(range, predicate), + |block_state, predicate| { + let header = &block_state.block_ref().block().header; + predicate(header).then(|| header.clone()) + }, + predicate, + ) + } +} + +impl BlockHashReader for ConsistentProvider { + fn block_hash(&self, number: u64) -> ProviderResult> { + self.get_in_memory_or_storage_by_block( + number.into(), + |db_provider| db_provider.block_hash(number), + |block_state| Ok(Some(block_state.hash())), + ) + } + + fn canonical_hashes_range( + &self, + start: BlockNumber, + end: BlockNumber, + ) -> ProviderResult> { + self.get_in_memory_or_storage_by_block_range_while( + start..end, + |db_provider, inclusive_range, _| { + db_provider + .canonical_hashes_range(*inclusive_range.start(), *inclusive_range.end() + 1) + }, + |block_state, _| Some(block_state.hash()), + |_| true, + ) + } +} + +impl BlockNumReader for ConsistentProvider { + fn chain_info(&self) -> ProviderResult { + let best_number = self.best_block_number()?; + Ok(ChainInfo { best_hash: self.block_hash(best_number)?.unwrap_or_default(), best_number }) + } + + fn best_block_number(&self) -> ProviderResult { + self.head_block.as_ref().map(|b| Ok(b.number())).unwrap_or_else(|| self.last_block_number()) + } + + fn last_block_number(&self) -> ProviderResult { + self.storage_provider.last_block_number() + } + + fn block_number(&self, hash: B256) -> ProviderResult> { + self.get_in_memory_or_storage_by_block( + hash.into(), + |db_provider| db_provider.block_number(hash), + |block_state| Ok(Some(block_state.number())), + ) + } +} + +impl BlockIdReader for ConsistentProvider { + fn pending_block_num_hash(&self) -> ProviderResult> { + Ok(self.canonical_in_memory_state.pending_block_num_hash()) + } + + fn safe_block_num_hash(&self) -> ProviderResult> { + Ok(self.canonical_in_memory_state.get_safe_num_hash()) + } + + fn finalized_block_num_hash(&self) -> ProviderResult> { + Ok(self.canonical_in_memory_state.get_finalized_num_hash()) + } +} + +impl BlockReader for ConsistentProvider { + fn find_block_by_hash(&self, hash: B256, source: BlockSource) -> ProviderResult> { + match source { + BlockSource::Any | BlockSource::Canonical => { + // Note: it's fine to return the unsealed block because the caller already has + // the hash + self.get_in_memory_or_storage_by_block( + hash.into(), + |db_provider| db_provider.find_block_by_hash(hash, source), + |block_state| Ok(Some(block_state.block_ref().block().clone().unseal())), + ) + } + BlockSource::Pending => { + Ok(self.canonical_in_memory_state.pending_block().map(|block| block.unseal())) + } + } + } + + fn block(&self, id: BlockHashOrNumber) -> ProviderResult> { + self.get_in_memory_or_storage_by_block( + id, + |db_provider| db_provider.block(id), + |block_state| Ok(Some(block_state.block_ref().block().clone().unseal())), + ) + } + + fn pending_block(&self) -> ProviderResult> { + Ok(self.canonical_in_memory_state.pending_block()) + } + + fn pending_block_with_senders(&self) -> ProviderResult> { + Ok(self.canonical_in_memory_state.pending_block_with_senders()) + } + + fn pending_block_and_receipts(&self) -> ProviderResult)>> { + Ok(self.canonical_in_memory_state.pending_block_and_receipts()) + } + + fn ommers(&self, id: BlockHashOrNumber) -> ProviderResult>> { + self.get_in_memory_or_storage_by_block( + id, + |db_provider| db_provider.ommers(id), + |block_state| { + if self.chain_spec().final_paris_total_difficulty(block_state.number()).is_some() { + return Ok(Some(Vec::new())) + } + + Ok(Some(block_state.block_ref().block().body.ommers.clone())) + }, + ) + } + + fn block_body_indices( + &self, + number: BlockNumber, + ) -> ProviderResult> { + self.get_in_memory_or_storage_by_block( + number.into(), + |db_provider| db_provider.block_body_indices(number), + |block_state| { + // Find the last block indices on database + let last_storage_block_number = block_state.anchor().number; + let mut stored_indices = self + .storage_provider + .block_body_indices(last_storage_block_number)? + .ok_or(ProviderError::BlockBodyIndicesNotFound(last_storage_block_number))?; + + // Prepare our block indices + stored_indices.first_tx_num = stored_indices.next_tx_num(); + stored_indices.tx_count = 0; + + // Iterate from the lowest block in memory until our target block + for state in block_state.chain().collect::>().into_iter().rev() { + let block_tx_count = state.block_ref().block.body.transactions.len() as u64; + if state.block_ref().block().number == number { + stored_indices.tx_count = block_tx_count; + } else { + stored_indices.first_tx_num += block_tx_count; + } + } + + Ok(Some(stored_indices)) + }, + ) + } + + /// Returns the block with senders with matching number or hash from database. + /// + /// **NOTE: If [`TransactionVariant::NoHash`] is provided then the transactions have invalid + /// hashes, since they would need to be calculated on the spot, and we want fast querying.** + /// + /// Returns `None` if block is not found. + fn block_with_senders( + &self, + id: BlockHashOrNumber, + transaction_kind: TransactionVariant, + ) -> ProviderResult> { + self.get_in_memory_or_storage_by_block( + id, + |db_provider| db_provider.block_with_senders(id, transaction_kind), + |block_state| Ok(Some(block_state.block_with_senders())), + ) + } + + fn sealed_block_with_senders( + &self, + id: BlockHashOrNumber, + transaction_kind: TransactionVariant, + ) -> ProviderResult> { + self.get_in_memory_or_storage_by_block( + id, + |db_provider| db_provider.sealed_block_with_senders(id, transaction_kind), + |block_state| Ok(Some(block_state.sealed_block_with_senders())), + ) + } + + fn block_range(&self, range: RangeInclusive) -> ProviderResult> { + self.get_in_memory_or_storage_by_block_range_while( + range, + |db_provider, range, _| db_provider.block_range(range), + |block_state, _| Some(block_state.block_ref().block().clone().unseal()), + |_| true, + ) + } + + fn block_with_senders_range( + &self, + range: RangeInclusive, + ) -> ProviderResult> { + self.get_in_memory_or_storage_by_block_range_while( + range, + |db_provider, range, _| db_provider.block_with_senders_range(range), + |block_state, _| Some(block_state.block_with_senders()), + |_| true, + ) + } + + fn sealed_block_with_senders_range( + &self, + range: RangeInclusive, + ) -> ProviderResult> { + self.get_in_memory_or_storage_by_block_range_while( + range, + |db_provider, range, _| db_provider.sealed_block_with_senders_range(range), + |block_state, _| Some(block_state.sealed_block_with_senders()), + |_| true, + ) + } +} + +impl TransactionsProvider for ConsistentProvider { + fn transaction_id(&self, tx_hash: TxHash) -> ProviderResult> { + self.get_in_memory_or_storage_by_tx( + tx_hash.into(), + |db_provider| db_provider.transaction_id(tx_hash), + |_, tx_number, _| Ok(Some(tx_number)), + ) + } + + fn transaction_by_id(&self, id: TxNumber) -> ProviderResult> { + self.get_in_memory_or_storage_by_tx( + id.into(), + |provider| provider.transaction_by_id(id), + |tx_index, _, block_state| { + Ok(block_state.block_ref().block().body.transactions.get(tx_index).cloned()) + }, + ) + } + + fn transaction_by_id_no_hash( + &self, + id: TxNumber, + ) -> ProviderResult> { + self.get_in_memory_or_storage_by_tx( + id.into(), + |provider| provider.transaction_by_id_no_hash(id), + |tx_index, _, block_state| { + Ok(block_state + .block_ref() + .block() + .body + .transactions + .get(tx_index) + .cloned() + .map(Into::into)) + }, + ) + } + + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { + if let Some(tx) = self.head_block.as_ref().and_then(|b| b.transaction_on_chain(hash)) { + return Ok(Some(tx)) + } + + self.storage_provider.transaction_by_hash(hash) + } + + fn transaction_by_hash_with_meta( + &self, + tx_hash: TxHash, + ) -> ProviderResult> { + if let Some((tx, meta)) = + self.head_block.as_ref().and_then(|b| b.transaction_meta_on_chain(tx_hash)) + { + return Ok(Some((tx, meta))) + } + + self.storage_provider.transaction_by_hash_with_meta(tx_hash) + } + + fn transaction_block(&self, id: TxNumber) -> ProviderResult> { + self.get_in_memory_or_storage_by_tx( + id.into(), + |provider| provider.transaction_block(id), + |_, _, block_state| Ok(Some(block_state.block_ref().block().number)), + ) + } + + fn transactions_by_block( + &self, + id: BlockHashOrNumber, + ) -> ProviderResult>> { + self.get_in_memory_or_storage_by_block( + id, + |provider| provider.transactions_by_block(id), + |block_state| Ok(Some(block_state.block_ref().block().body.transactions.clone())), + ) + } + + fn transactions_by_block_range( + &self, + range: impl RangeBounds, + ) -> ProviderResult>> { + self.get_in_memory_or_storage_by_block_range_while( + range, + |db_provider, range, _| db_provider.transactions_by_block_range(range), + |block_state, _| Some(block_state.block_ref().block().body.transactions.clone()), + |_| true, + ) + } + + fn transactions_by_tx_range( + &self, + range: impl RangeBounds, + ) -> ProviderResult> { + self.get_in_memory_or_storage_by_tx_range( + range, + |db_provider, db_range| db_provider.transactions_by_tx_range(db_range), + |index_range, block_state| { + Ok(block_state.block_ref().block().body.transactions[index_range] + .iter() + .cloned() + .map(Into::into) + .collect()) + }, + ) + } + + fn senders_by_tx_range( + &self, + range: impl RangeBounds, + ) -> ProviderResult> { + self.get_in_memory_or_storage_by_tx_range( + range, + |db_provider, db_range| db_provider.senders_by_tx_range(db_range), + |index_range, block_state| Ok(block_state.block_ref().senders[index_range].to_vec()), + ) + } + + fn transaction_sender(&self, id: TxNumber) -> ProviderResult> { + self.get_in_memory_or_storage_by_tx( + id.into(), + |provider| provider.transaction_sender(id), + |tx_index, _, block_state| Ok(block_state.block_ref().senders.get(tx_index).copied()), + ) + } +} + +impl ReceiptProvider for ConsistentProvider { + fn receipt(&self, id: TxNumber) -> ProviderResult> { + self.get_in_memory_or_storage_by_tx( + id.into(), + |provider| provider.receipt(id), + |tx_index, _, block_state| { + Ok(block_state.executed_block_receipts().get(tx_index).cloned()) + }, + ) + } + + fn receipt_by_hash(&self, hash: TxHash) -> ProviderResult> { + for block_state in self.head_block.iter().flat_map(|b| b.chain()) { + let executed_block = block_state.block_ref(); + let block = executed_block.block(); + let receipts = block_state.executed_block_receipts(); + + // assuming 1:1 correspondence between transactions and receipts + debug_assert_eq!( + block.body.transactions.len(), + receipts.len(), + "Mismatch between transaction and receipt count" + ); + + if let Some(tx_index) = block.body.transactions.iter().position(|tx| tx.hash() == hash) + { + // safe to use tx_index for receipts due to 1:1 correspondence + return Ok(receipts.get(tx_index).cloned()); + } + } + + self.storage_provider.receipt_by_hash(hash) + } + + fn receipts_by_block(&self, block: BlockHashOrNumber) -> ProviderResult>> { + self.get_in_memory_or_storage_by_block( + block, + |db_provider| db_provider.receipts_by_block(block), + |block_state| Ok(Some(block_state.executed_block_receipts())), + ) + } + + fn receipts_by_tx_range( + &self, + range: impl RangeBounds, + ) -> ProviderResult> { + self.get_in_memory_or_storage_by_tx_range( + range, + |db_provider, db_range| db_provider.receipts_by_tx_range(db_range), + |index_range, block_state| { + Ok(block_state.executed_block_receipts().drain(index_range).collect()) + }, + ) + } +} + +impl ReceiptProviderIdExt for ConsistentProvider { + fn receipts_by_block_id(&self, block: BlockId) -> ProviderResult>> { + match block { + BlockId::Hash(rpc_block_hash) => { + let mut receipts = self.receipts_by_block(rpc_block_hash.block_hash.into())?; + if receipts.is_none() && !rpc_block_hash.require_canonical.unwrap_or(false) { + if let Some(state) = self + .head_block + .as_ref() + .and_then(|b| b.block_on_chain(rpc_block_hash.block_hash.into())) + { + receipts = Some(state.executed_block_receipts()); + } + } + Ok(receipts) + } + BlockId::Number(num_tag) => match num_tag { + BlockNumberOrTag::Pending => Ok(self + .canonical_in_memory_state + .pending_state() + .map(|block_state| block_state.executed_block_receipts())), + _ => { + if let Some(num) = self.convert_block_number(num_tag)? { + self.receipts_by_block(num.into()) + } else { + Ok(None) + } + } + }, + } + } +} + +impl WithdrawalsProvider for ConsistentProvider { + fn withdrawals_by_block( + &self, + id: BlockHashOrNumber, + timestamp: u64, + ) -> ProviderResult> { + if !self.chain_spec().is_shanghai_active_at_timestamp(timestamp) { + return Ok(None) + } + + self.get_in_memory_or_storage_by_block( + id, + |db_provider| db_provider.withdrawals_by_block(id, timestamp), + |block_state| Ok(block_state.block_ref().block().body.withdrawals.clone()), + ) + } + + fn latest_withdrawal(&self) -> ProviderResult> { + let best_block_num = self.best_block_number()?; + + self.get_in_memory_or_storage_by_block( + best_block_num.into(), + |db_provider| db_provider.latest_withdrawal(), + |block_state| { + Ok(block_state + .block_ref() + .block() + .body + .withdrawals + .clone() + .and_then(|mut w| w.pop())) + }, + ) + } +} + +impl StageCheckpointReader for ConsistentProvider { + fn get_stage_checkpoint(&self, id: StageId) -> ProviderResult> { + self.storage_provider.get_stage_checkpoint(id) + } + + fn get_stage_checkpoint_progress(&self, id: StageId) -> ProviderResult>> { + self.storage_provider.get_stage_checkpoint_progress(id) + } + + fn get_all_checkpoints(&self) -> ProviderResult> { + self.storage_provider.get_all_checkpoints() + } +} + +impl EvmEnvProvider for ConsistentProvider { + fn fill_env_at( + &self, + cfg: &mut CfgEnvWithHandlerCfg, + block_env: &mut BlockEnv, + at: BlockHashOrNumber, + evm_config: EvmConfig, + ) -> ProviderResult<()> + where + EvmConfig: ConfigureEvmEnv
, + { + let hash = self.convert_number(at)?.ok_or(ProviderError::HeaderNotFound(at))?; + let header = self.header(&hash)?.ok_or(ProviderError::HeaderNotFound(at))?; + self.fill_env_with_header(cfg, block_env, &header, evm_config) + } + + fn fill_env_with_header( + &self, + cfg: &mut CfgEnvWithHandlerCfg, + block_env: &mut BlockEnv, + header: &Header, + evm_config: EvmConfig, + ) -> ProviderResult<()> + where + EvmConfig: ConfigureEvmEnv
, + { + let total_difficulty = self + .header_td_by_number(header.number)? + .ok_or_else(|| ProviderError::HeaderNotFound(header.number.into()))?; + evm_config.fill_cfg_and_block_env(cfg, block_env, header, total_difficulty); + Ok(()) + } + + fn fill_cfg_env_at( + &self, + cfg: &mut CfgEnvWithHandlerCfg, + at: BlockHashOrNumber, + evm_config: EvmConfig, + ) -> ProviderResult<()> + where + EvmConfig: ConfigureEvmEnv
, + { + let hash = self.convert_number(at)?.ok_or(ProviderError::HeaderNotFound(at))?; + let header = self.header(&hash)?.ok_or(ProviderError::HeaderNotFound(at))?; + self.fill_cfg_env_with_header(cfg, &header, evm_config) + } + + fn fill_cfg_env_with_header( + &self, + cfg: &mut CfgEnvWithHandlerCfg, + header: &Header, + evm_config: EvmConfig, + ) -> ProviderResult<()> + where + EvmConfig: ConfigureEvmEnv
, + { + let total_difficulty = self + .header_td_by_number(header.number)? + .ok_or_else(|| ProviderError::HeaderNotFound(header.number.into()))?; + evm_config.fill_cfg_env(cfg, header, total_difficulty); + Ok(()) + } +} + +impl PruneCheckpointReader for ConsistentProvider { + fn get_prune_checkpoint( + &self, + segment: PruneSegment, + ) -> ProviderResult> { + self.storage_provider.get_prune_checkpoint(segment) + } + + fn get_prune_checkpoints(&self) -> ProviderResult> { + self.storage_provider.get_prune_checkpoints() + } +} + +impl ChainSpecProvider for ConsistentProvider { + type ChainSpec = N::ChainSpec; + + fn chain_spec(&self) -> Arc { + ChainSpecProvider::chain_spec(&self.storage_provider) + } +} + +impl BlockReaderIdExt for ConsistentProvider { + fn block_by_id(&self, id: BlockId) -> ProviderResult> { + match id { + BlockId::Number(num) => self.block_by_number_or_tag(num), + BlockId::Hash(hash) => { + // TODO: should we only apply this for the RPCs that are listed in EIP-1898? + // so not at the provider level? + // if we decide to do this at a higher level, then we can make this an automatic + // trait impl + if Some(true) == hash.require_canonical { + // check the database, canonical blocks are only stored in the database + self.find_block_by_hash(hash.block_hash, BlockSource::Canonical) + } else { + self.block_by_hash(hash.block_hash) + } + } + } + } + + fn header_by_number_or_tag(&self, id: BlockNumberOrTag) -> ProviderResult> { + Ok(match id { + BlockNumberOrTag::Latest => { + Some(self.canonical_in_memory_state.get_canonical_head().unseal()) + } + BlockNumberOrTag::Finalized => { + self.canonical_in_memory_state.get_finalized_header().map(|h| h.unseal()) + } + BlockNumberOrTag::Safe => { + self.canonical_in_memory_state.get_safe_header().map(|h| h.unseal()) + } + BlockNumberOrTag::Earliest => self.header_by_number(0)?, + BlockNumberOrTag::Pending => self.canonical_in_memory_state.pending_header(), + + BlockNumberOrTag::Number(num) => self.header_by_number(num)?, + }) + } + + fn sealed_header_by_number_or_tag( + &self, + id: BlockNumberOrTag, + ) -> ProviderResult> { + match id { + BlockNumberOrTag::Latest => { + Ok(Some(self.canonical_in_memory_state.get_canonical_head())) + } + BlockNumberOrTag::Finalized => { + Ok(self.canonical_in_memory_state.get_finalized_header()) + } + BlockNumberOrTag::Safe => Ok(self.canonical_in_memory_state.get_safe_header()), + BlockNumberOrTag::Earliest => self.header_by_number(0)?.map_or_else( + || Ok(None), + |h| { + let sealed = h.seal_slow(); + let (header, seal) = sealed.into_parts(); + Ok(Some(SealedHeader::new(header, seal))) + }, + ), + BlockNumberOrTag::Pending => Ok(self.canonical_in_memory_state.pending_sealed_header()), + BlockNumberOrTag::Number(num) => self.header_by_number(num)?.map_or_else( + || Ok(None), + |h| { + let sealed = h.seal_slow(); + let (header, seal) = sealed.into_parts(); + Ok(Some(SealedHeader::new(header, seal))) + }, + ), + } + } + + fn sealed_header_by_id(&self, id: BlockId) -> ProviderResult> { + Ok(match id { + BlockId::Number(num) => self.sealed_header_by_number_or_tag(num)?, + BlockId::Hash(hash) => self.header(&hash.block_hash)?.map(|h| { + let sealed = h.seal_slow(); + let (header, seal) = sealed.into_parts(); + SealedHeader::new(header, seal) + }), + }) + } + + fn header_by_id(&self, id: BlockId) -> ProviderResult> { + Ok(match id { + BlockId::Number(num) => self.header_by_number_or_tag(num)?, + BlockId::Hash(hash) => self.header(&hash.block_hash)?, + }) + } + + fn ommers_by_id(&self, id: BlockId) -> ProviderResult>> { + match id { + BlockId::Number(num) => self.ommers_by_number_or_tag(num), + BlockId::Hash(hash) => { + // TODO: EIP-1898 question, see above + // here it is not handled + self.ommers(BlockHashOrNumber::Hash(hash.block_hash)) + } + } + } +} + +impl StorageChangeSetReader for ConsistentProvider { + fn storage_changeset( + &self, + block_number: BlockNumber, + ) -> ProviderResult> { + if let Some(state) = + self.head_block.as_ref().and_then(|b| b.block_on_chain(block_number.into())) + { + let changesets = state + .block() + .execution_output + .bundle + .reverts + .clone() + .into_plain_state_reverts() + .storage + .into_iter() + .flatten() + .flat_map(|revert: PlainStorageRevert| { + revert.storage_revert.into_iter().map(move |(key, value)| { + ( + BlockNumberAddress((block_number, revert.address)), + StorageEntry { key: key.into(), value: value.to_previous_value() }, + ) + }) + }) + .collect(); + Ok(changesets) + } else { + // Perform checks on whether or not changesets exist for the block. + + // No prune checkpoint means history should exist and we should `unwrap_or(true)` + let storage_history_exists = self + .storage_provider + .get_prune_checkpoint(PruneSegment::StorageHistory)? + .and_then(|checkpoint| { + // return true if the block number is ahead of the prune checkpoint. + // + // The checkpoint stores the highest pruned block number, so we should make + // sure the block_number is strictly greater. + checkpoint.block_number.map(|checkpoint| block_number > checkpoint) + }) + .unwrap_or(true); + + if !storage_history_exists { + return Err(ProviderError::StateAtBlockPruned(block_number)) + } + + self.storage_provider.storage_changeset(block_number) + } + } +} + +impl ChangeSetReader for ConsistentProvider { + fn account_block_changeset( + &self, + block_number: BlockNumber, + ) -> ProviderResult> { + if let Some(state) = + self.head_block.as_ref().and_then(|b| b.block_on_chain(block_number.into())) + { + let changesets = state + .block_ref() + .execution_output + .bundle + .reverts + .clone() + .into_plain_state_reverts() + .accounts + .into_iter() + .flatten() + .map(|(address, info)| AccountBeforeTx { address, info: info.map(Into::into) }) + .collect(); + Ok(changesets) + } else { + // Perform checks on whether or not changesets exist for the block. + + // No prune checkpoint means history should exist and we should `unwrap_or(true)` + let account_history_exists = self + .storage_provider + .get_prune_checkpoint(PruneSegment::AccountHistory)? + .and_then(|checkpoint| { + // return true if the block number is ahead of the prune checkpoint. + // + // The checkpoint stores the highest pruned block number, so we should make + // sure the block_number is strictly greater. + checkpoint.block_number.map(|checkpoint| block_number > checkpoint) + }) + .unwrap_or(true); + + if !account_history_exists { + return Err(ProviderError::StateAtBlockPruned(block_number)) + } + + self.storage_provider.account_block_changeset(block_number) + } + } +} + +impl AccountReader for ConsistentProvider { + /// Get basic account information. + fn basic_account(&self, address: Address) -> ProviderResult> { + // use latest state provider + let state_provider = self.latest_ref()?; + state_provider.basic_account(address) + } +} + +impl StateReader for ConsistentProvider { + /// Re-constructs the [`ExecutionOutcome`] from in-memory and database state, if necessary. + /// + /// If data for the block does not exist, this will return [`None`]. + /// + /// NOTE: This cannot be called safely in a loop outside of the blockchain tree thread. This is + /// because the [`CanonicalInMemoryState`] could change during a reorg, causing results to be + /// inconsistent. Currently this can safely be called within the blockchain tree thread, + /// because the tree thread is responsible for modifying the [`CanonicalInMemoryState`] in the + /// first place. + fn get_state(&self, block: BlockNumber) -> ProviderResult> { + if let Some(state) = self.head_block.as_ref().and_then(|b| b.block_on_chain(block.into())) { + let state = state.block_ref().execution_outcome().clone(); + Ok(Some(state)) + } else { + Self::get_state(self, block..=block) + } + } +} + +#[cfg(test)] +mod tests { + use crate::{ + providers::blockchain_provider::BlockchainProvider2, + test_utils::create_test_provider_factory, BlockWriter, + }; + use alloy_eips::BlockHashOrNumber; + use alloy_primitives::B256; + use itertools::Itertools; + use rand::Rng; + use reth_chain_state::{ExecutedBlock, NewCanonicalChain}; + use reth_db::models::AccountBeforeTx; + use reth_execution_types::ExecutionOutcome; + use reth_primitives::SealedBlock; + use reth_storage_api::{BlockReader, BlockSource, ChangeSetReader}; + use reth_testing_utils::generators::{ + self, random_block_range, random_changeset_range, random_eoa_accounts, BlockRangeParams, + }; + use revm::db::BundleState; + use std::{ + ops::{Bound, Range, RangeBounds}, + sync::Arc, + }; + + const TEST_BLOCKS_COUNT: usize = 5; + + fn random_blocks( + rng: &mut impl Rng, + database_blocks: usize, + in_memory_blocks: usize, + requests_count: Option>, + withdrawals_count: Option>, + tx_count: impl RangeBounds, + ) -> (Vec, Vec) { + let block_range = (database_blocks + in_memory_blocks - 1) as u64; + + let tx_start = match tx_count.start_bound() { + Bound::Included(&n) | Bound::Excluded(&n) => n, + Bound::Unbounded => u8::MIN, + }; + let tx_end = match tx_count.end_bound() { + Bound::Included(&n) | Bound::Excluded(&n) => n + 1, + Bound::Unbounded => u8::MAX, + }; + + let blocks = random_block_range( + rng, + 0..=block_range, + BlockRangeParams { + parent: Some(B256::ZERO), + tx_count: tx_start..tx_end, + requests_count, + withdrawals_count, + }, + ); + let (database_blocks, in_memory_blocks) = blocks.split_at(database_blocks); + (database_blocks.to_vec(), in_memory_blocks.to_vec()) + } + + #[test] + fn test_block_reader_find_block_by_hash() -> eyre::Result<()> { + // Initialize random number generator and provider factory + let mut rng = generators::rng(); + let factory = create_test_provider_factory(); + + // Generate 10 random blocks and split into database and in-memory blocks + let blocks = random_block_range( + &mut rng, + 0..=10, + BlockRangeParams { parent: Some(B256::ZERO), tx_count: 0..1, ..Default::default() }, + ); + let (database_blocks, in_memory_blocks) = blocks.split_at(5); + + // Insert first 5 blocks into the database + let provider_rw = factory.provider_rw()?; + for block in database_blocks { + provider_rw.insert_historical_block( + block.clone().seal_with_senders().expect("failed to seal block with senders"), + )?; + } + provider_rw.commit()?; + + // Create a new provider + let provider = BlockchainProvider2::new(factory)?; + let consistent_provider = provider.consistent_provider()?; + + // Useful blocks + let first_db_block = database_blocks.first().unwrap(); + let first_in_mem_block = in_memory_blocks.first().unwrap(); + let last_in_mem_block = in_memory_blocks.last().unwrap(); + + // No block in memory before setting in memory state + assert_eq!( + consistent_provider.find_block_by_hash(first_in_mem_block.hash(), BlockSource::Any)?, + None + ); + assert_eq!( + consistent_provider + .find_block_by_hash(first_in_mem_block.hash(), BlockSource::Canonical)?, + None + ); + // No pending block in memory + assert_eq!( + consistent_provider + .find_block_by_hash(first_in_mem_block.hash(), BlockSource::Pending)?, + None + ); + + // Insert first block into the in-memory state + let in_memory_block_senders = + first_in_mem_block.senders().expect("failed to recover senders"); + let chain = NewCanonicalChain::Commit { + new: vec![ExecutedBlock::new( + Arc::new(first_in_mem_block.clone()), + Arc::new(in_memory_block_senders), + Default::default(), + Default::default(), + Default::default(), + )], + }; + consistent_provider.canonical_in_memory_state.update_chain(chain); + let consistent_provider = provider.consistent_provider()?; + + // Now the block should be found in memory + assert_eq!( + consistent_provider.find_block_by_hash(first_in_mem_block.hash(), BlockSource::Any)?, + Some(first_in_mem_block.clone().into()) + ); + assert_eq!( + consistent_provider + .find_block_by_hash(first_in_mem_block.hash(), BlockSource::Canonical)?, + Some(first_in_mem_block.clone().into()) + ); + + // Find the first block in database by hash + assert_eq!( + consistent_provider.find_block_by_hash(first_db_block.hash(), BlockSource::Any)?, + Some(first_db_block.clone().into()) + ); + assert_eq!( + consistent_provider + .find_block_by_hash(first_db_block.hash(), BlockSource::Canonical)?, + Some(first_db_block.clone().into()) + ); + + // No pending block in database + assert_eq!( + consistent_provider.find_block_by_hash(first_db_block.hash(), BlockSource::Pending)?, + None + ); + + // Insert the last block into the pending state + provider.canonical_in_memory_state.set_pending_block(ExecutedBlock { + block: Arc::new(last_in_mem_block.clone()), + senders: Default::default(), + execution_output: Default::default(), + hashed_state: Default::default(), + trie: Default::default(), + }); + + // Now the last block should be found in memory + assert_eq!( + consistent_provider + .find_block_by_hash(last_in_mem_block.hash(), BlockSource::Pending)?, + Some(last_in_mem_block.clone().into()) + ); + + Ok(()) + } + + #[test] + fn test_block_reader_block() -> eyre::Result<()> { + // Initialize random number generator and provider factory + let mut rng = generators::rng(); + let factory = create_test_provider_factory(); + + // Generate 10 random blocks and split into database and in-memory blocks + let blocks = random_block_range( + &mut rng, + 0..=10, + BlockRangeParams { parent: Some(B256::ZERO), tx_count: 0..1, ..Default::default() }, + ); + let (database_blocks, in_memory_blocks) = blocks.split_at(5); + + // Insert first 5 blocks into the database + let provider_rw = factory.provider_rw()?; + for block in database_blocks { + provider_rw.insert_historical_block( + block.clone().seal_with_senders().expect("failed to seal block with senders"), + )?; + } + provider_rw.commit()?; + + // Create a new provider + let provider = BlockchainProvider2::new(factory)?; + let consistent_provider = provider.consistent_provider()?; + + // First in memory block + let first_in_mem_block = in_memory_blocks.first().unwrap(); + // First database block + let first_db_block = database_blocks.first().unwrap(); + + // First in memory block should not be found yet as not integrated to the in-memory state + assert_eq!( + consistent_provider.block(BlockHashOrNumber::Hash(first_in_mem_block.hash()))?, + None + ); + assert_eq!( + consistent_provider.block(BlockHashOrNumber::Number(first_in_mem_block.number))?, + None + ); + + // Insert first block into the in-memory state + let in_memory_block_senders = + first_in_mem_block.senders().expect("failed to recover senders"); + let chain = NewCanonicalChain::Commit { + new: vec![ExecutedBlock::new( + Arc::new(first_in_mem_block.clone()), + Arc::new(in_memory_block_senders), + Default::default(), + Default::default(), + Default::default(), + )], + }; + consistent_provider.canonical_in_memory_state.update_chain(chain); + + let consistent_provider = provider.consistent_provider()?; + + // First in memory block should be found + assert_eq!( + consistent_provider.block(BlockHashOrNumber::Hash(first_in_mem_block.hash()))?, + Some(first_in_mem_block.clone().into()) + ); + assert_eq!( + consistent_provider.block(BlockHashOrNumber::Number(first_in_mem_block.number))?, + Some(first_in_mem_block.clone().into()) + ); + + // First database block should be found + assert_eq!( + consistent_provider.block(BlockHashOrNumber::Hash(first_db_block.hash()))?, + Some(first_db_block.clone().into()) + ); + assert_eq!( + consistent_provider.block(BlockHashOrNumber::Number(first_db_block.number))?, + Some(first_db_block.clone().into()) + ); + + Ok(()) + } + + #[test] + fn test_changeset_reader() -> eyre::Result<()> { + let mut rng = generators::rng(); + + let (database_blocks, in_memory_blocks) = + random_blocks(&mut rng, TEST_BLOCKS_COUNT, 1, None, None, 0..1); + + let first_database_block = database_blocks.first().map(|block| block.number).unwrap(); + let last_database_block = database_blocks.last().map(|block| block.number).unwrap(); + let first_in_memory_block = in_memory_blocks.first().map(|block| block.number).unwrap(); + + let accounts = random_eoa_accounts(&mut rng, 2); + + let (database_changesets, database_state) = random_changeset_range( + &mut rng, + &database_blocks, + accounts.into_iter().map(|(address, account)| (address, (account, Vec::new()))), + 0..0, + 0..0, + ); + let (in_memory_changesets, in_memory_state) = random_changeset_range( + &mut rng, + &in_memory_blocks, + database_state + .iter() + .map(|(address, (account, storage))| (*address, (*account, storage.clone()))), + 0..0, + 0..0, + ); + + let factory = create_test_provider_factory(); + + let provider_rw = factory.provider_rw()?; + provider_rw.append_blocks_with_state( + database_blocks + .into_iter() + .map(|b| b.seal_with_senders().expect("failed to seal block with senders")) + .collect(), + ExecutionOutcome { + bundle: BundleState::new( + database_state.into_iter().map(|(address, (account, _))| { + (address, None, Some(account.into()), Default::default()) + }), + database_changesets + .iter() + .map(|block_changesets| { + block_changesets.iter().map(|(address, account, _)| { + (*address, Some(Some((*account).into())), []) + }) + }) + .collect::>(), + Vec::new(), + ), + first_block: first_database_block, + ..Default::default() + }, + Default::default(), + Default::default(), + )?; + provider_rw.commit()?; + + let provider = BlockchainProvider2::new(factory)?; + + let in_memory_changesets = in_memory_changesets.into_iter().next().unwrap(); + let chain = NewCanonicalChain::Commit { + new: vec![in_memory_blocks + .first() + .map(|block| { + let senders = block.senders().expect("failed to recover senders"); + ExecutedBlock::new( + Arc::new(block.clone()), + Arc::new(senders), + Arc::new(ExecutionOutcome { + bundle: BundleState::new( + in_memory_state.into_iter().map(|(address, (account, _))| { + (address, None, Some(account.into()), Default::default()) + }), + [in_memory_changesets.iter().map(|(address, account, _)| { + (*address, Some(Some((*account).into())), Vec::new()) + })], + [], + ), + first_block: first_in_memory_block, + ..Default::default() + }), + Default::default(), + Default::default(), + ) + }) + .unwrap()], + }; + provider.canonical_in_memory_state.update_chain(chain); + + let consistent_provider = provider.consistent_provider()?; + + assert_eq!( + consistent_provider.account_block_changeset(last_database_block).unwrap(), + database_changesets + .into_iter() + .last() + .unwrap() + .into_iter() + .sorted_by_key(|(address, _, _)| *address) + .map(|(address, account, _)| AccountBeforeTx { address, info: Some(account) }) + .collect::>() + ); + assert_eq!( + consistent_provider.account_block_changeset(first_in_memory_block).unwrap(), + in_memory_changesets + .into_iter() + .sorted_by_key(|(address, _, _)| *address) + .map(|(address, account, _)| AccountBeforeTx { address, info: Some(account) }) + .collect::>() + ); + + Ok(()) + } +} diff --git a/crates/storage/provider/src/providers/mod.rs b/crates/storage/provider/src/providers/mod.rs index a67ebf89b..c81ef05d2 100644 --- a/crates/storage/provider/src/providers/mod.rs +++ b/crates/storage/provider/src/providers/mod.rs @@ -61,6 +61,9 @@ pub use consistent_view::{ConsistentDbView, ConsistentViewError}; mod blockchain_provider; pub use blockchain_provider::BlockchainProvider2; +mod consistent; +pub use consistent::ConsistentProvider; + /// Helper trait keeping common requirements of providers for [`NodeTypesWithDB`]. pub trait ProviderNodeTypes: NodeTypesWithDB {} From e98a050dc7ce868d079587d8f417b51d80a36cc8 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Wed, 23 Oct 2024 15:27:52 +0100 Subject: [PATCH 215/425] fix(trie): account for existing nodes when revealing a node (#11836) --- crates/trie/sparse/src/errors.rs | 10 + crates/trie/sparse/src/trie.rs | 301 +++++++++++++++++++++++++++---- 2 files changed, 280 insertions(+), 31 deletions(-) diff --git a/crates/trie/sparse/src/errors.rs b/crates/trie/sparse/src/errors.rs index f60d1736c..506b206fd 100644 --- a/crates/trie/sparse/src/errors.rs +++ b/crates/trie/sparse/src/errors.rs @@ -4,6 +4,8 @@ use alloy_primitives::{Bytes, B256}; use reth_trie::Nibbles; use thiserror::Error; +use crate::SparseNode; + /// Result type with [`SparseStateTrieError`] as error. pub type SparseStateTrieResult = Result; @@ -43,6 +45,14 @@ pub enum SparseTrieError { /// Node hash hash: B256, }, + /// Encountered unexpected node at path when revealing. + #[error("encountered an invalid node at path {path:?} when revealing: {node:?}")] + Reveal { + /// Path to the node. + path: Nibbles, + /// Node that was at the path when revealing. + node: Box, + }, /// RLP error. #[error(transparent)] Rlp(#[from] alloy_rlp::Error), diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index 39deb50e7..fae1141ec 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -142,21 +142,55 @@ impl RevealedSparseTrie { stack_ptr += 1; } } - self.nodes - .insert(path, SparseNode::Branch { state_mask: branch.state_mask, hash: None }); - } - TrieNode::Extension(ext) => { - let mut child_path = path.clone(); - child_path.extend_from_slice_unchecked(&ext.key); - self.reveal_node_or_hash(child_path, &ext.child)?; - self.nodes.insert(path, SparseNode::Extension { key: ext.key, hash: None }); - } - TrieNode::Leaf(leaf) => { - let mut full = path.clone(); - full.extend_from_slice_unchecked(&leaf.key); - self.values.insert(full, leaf.value); - self.nodes.insert(path, SparseNode::new_leaf(leaf.key)); + + match self.nodes.get(&path) { + // Blinded and non-existent nodes can be replaced. + Some(SparseNode::Hash(_)) | None => { + self.nodes.insert( + path, + SparseNode::Branch { state_mask: branch.state_mask, hash: None }, + ); + } + // Branch node already exists, or an extension node was placed where a + // branch node was before. + Some(SparseNode::Branch { .. } | SparseNode::Extension { .. }) => {} + // All other node types can't be handled. + Some(node @ (SparseNode::Empty | SparseNode::Leaf { .. })) => { + return Err(SparseTrieError::Reveal { path, node: Box::new(node.clone()) }) + } + } } + TrieNode::Extension(ext) => match self.nodes.get(&path) { + Some(SparseNode::Hash(_)) | None => { + let mut child_path = path.clone(); + child_path.extend_from_slice_unchecked(&ext.key); + self.reveal_node_or_hash(child_path, &ext.child)?; + self.nodes.insert(path, SparseNode::Extension { key: ext.key, hash: None }); + } + // Extension node already exists, or an extension node was placed where a branch + // node was before. + Some(SparseNode::Extension { .. } | SparseNode::Branch { .. }) => {} + // All other node types can't be handled. + Some(node @ (SparseNode::Empty | SparseNode::Leaf { .. })) => { + return Err(SparseTrieError::Reveal { path, node: Box::new(node.clone()) }) + } + }, + TrieNode::Leaf(leaf) => match self.nodes.get(&path) { + Some(SparseNode::Hash(_)) | None => { + let mut full = path.clone(); + full.extend_from_slice_unchecked(&leaf.key); + self.values.insert(full, leaf.value); + self.nodes.insert(path, SparseNode::new_leaf(leaf.key)); + } + // Left node already exists. + Some(SparseNode::Leaf { .. }) => {} + // All other node types can't be handled. + Some( + node @ (SparseNode::Empty | + SparseNode::Extension { .. } | + SparseNode::Branch { .. }), + ) => return Err(SparseTrieError::Reveal { path, node: Box::new(node.clone()) }), + }, } Ok(()) @@ -164,8 +198,18 @@ impl RevealedSparseTrie { fn reveal_node_or_hash(&mut self, path: Nibbles, child: &[u8]) -> SparseTrieResult<()> { if child.len() == B256::len_bytes() + 1 { - // TODO: revise insert to not overwrite existing entries - self.nodes.insert(path, SparseNode::Hash(B256::from_slice(&child[1..]))); + let hash = B256::from_slice(&child[1..]); + match self.nodes.get(&path) { + // Hash node with a different hash can't be handled. + Some(node @ SparseNode::Hash(previous_hash)) if previous_hash != &hash => { + return Err(SparseTrieError::Reveal { path, node: Box::new(node.clone()) }) + } + None => { + self.nodes.insert(path, SparseNode::Hash(hash)); + } + // All other node types mean that it has already been revealed. + Some(_) => {} + } return Ok(()) } @@ -273,6 +317,10 @@ impl RevealedSparseTrie { // there is no node at the path. When a leaf node is a blinded `Hash`, it will have an entry // in `nodes`, but not in the `values`. + // If the path wasn't present in `values`, we still need to walk the trie and ensure that + // there is no node at the path. When a leaf node is a blinded `Hash`, it will have an entry + // in `nodes`, but not in the `values`. + let mut removed_nodes = self.take_nodes_for_path(path)?; debug!(target: "trie::sparse", ?path, ?removed_nodes, "Removed nodes for path"); // Pop the first node from the stack which is the leaf node we want to remove. @@ -1159,36 +1207,32 @@ mod tests { // Empty pretty_assertions::assert_eq!( sparse.nodes.clone().into_iter().collect::>(), - BTreeMap::from_iter([(Nibbles::default(), SparseNode::Empty),]) + BTreeMap::from_iter([(Nibbles::default(), SparseNode::Empty)]) ); } #[test] fn sparse_trie_remove_leaf_blinded() { - let mut sparse = RevealedSparseTrie::default(); - let leaf = LeafNode::new( Nibbles::default(), alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec(), ); + let branch = TrieNode::Branch(BranchNode::new( + vec![ + RlpNode::word_rlp(&B256::repeat_byte(1)), + RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), + ], + TrieMask::new(0b11), + )); + + let mut sparse = RevealedSparseTrie::from_root(branch.clone()).unwrap(); // Reveal a branch node and one of its children // // Branch (Mask = 11) // ├── 0 -> Hash (Path = 0) // └── 1 -> Leaf (Path = 1) - sparse - .reveal_node( - Nibbles::default(), - TrieNode::Branch(BranchNode::new( - vec![ - RlpNode::word_rlp(&B256::repeat_byte(1)), - RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), - ], - TrieMask::new(0b11), - )), - ) - .unwrap(); + sparse.reveal_node(Nibbles::default(), branch).unwrap(); sparse.reveal_node(Nibbles::from_nibbles([0x1]), TrieNode::Leaf(leaf)).unwrap(); // Removing a blinded leaf should result in an error @@ -1279,4 +1323,199 @@ mod tests { ) )| { test(updates.into_iter().collect()) }); } + + /// We have three leaves that share the same prefix: 0x00, 0x01 and 0x02. Hash builder trie has + /// only nodes 0x00 and 0x01, and we have proofs for them. Node B is new and inserted in the + /// sparse trie first. + /// + /// 1. Reveal the hash builder proof to leaf 0x00 in the sparse trie. + /// 2. Insert leaf 0x01 into the sparse trie. + /// 3. Reveal the hash builder proof to leaf 0x02 in the sparse trie. + /// + /// The hash builder proof to the leaf 0x02 didn't have the leaf 0x01 at the corresponding + /// nibble of the branch node, so we need to adjust the branch node instead of fully + /// replacing it. + #[test] + fn sparse_trie_reveal_node_1() { + let key1 = || Nibbles::from_nibbles_unchecked([0x00]); + let key2 = || Nibbles::from_nibbles_unchecked([0x01]); + let key3 = || Nibbles::from_nibbles_unchecked([0x02]); + let value = || alloy_rlp::encode_fixed_size(&B256::repeat_byte(1)); + + // Generate the proof for the root node and initialize the sparse trie with it + let (_, proof_nodes) = hash_builder_root_with_proofs( + [(key1(), value()), (key3(), value())], + [Nibbles::default()], + ); + let mut sparse = RevealedSparseTrie::from_root( + TrieNode::decode(&mut &proof_nodes.nodes_sorted()[0].1[..]).unwrap(), + ) + .unwrap(); + + // Generate the proof for the first key and reveal it in the sparse trie + let (_, proof_nodes) = + hash_builder_root_with_proofs([(key1(), value()), (key3(), value())], [key1()]); + for (path, node) in proof_nodes.nodes_sorted() { + sparse.reveal_node(path, TrieNode::decode(&mut &node[..]).unwrap()).unwrap(); + } + + // Check that the branch node exists with only two nibbles set + assert_eq!( + sparse.nodes.get(&Nibbles::default()), + Some(&SparseNode::new_branch(0b101.into())) + ); + + // Insert the leaf for the second key + sparse.update_leaf(key2(), value().to_vec()).unwrap(); + + // Check that the branch node was updated and another nibble was set + assert_eq!( + sparse.nodes.get(&Nibbles::default()), + Some(&SparseNode::new_branch(0b111.into())) + ); + + // Generate the proof for the third key and reveal it in the sparse trie + let (_, proof_nodes_3) = + hash_builder_root_with_proofs([(key1(), value()), (key3(), value())], [key3()]); + for (path, node) in proof_nodes_3.nodes_sorted() { + sparse.reveal_node(path, TrieNode::decode(&mut &node[..]).unwrap()).unwrap(); + } + + // Check that nothing changed in the branch node + assert_eq!( + sparse.nodes.get(&Nibbles::default()), + Some(&SparseNode::new_branch(0b111.into())) + ); + + // Generate the nodes for the full trie with all three key using the hash builder, and + // compare them to the sparse trie + let (_, proof_nodes) = hash_builder_root_with_proofs( + [(key1(), value()), (key2(), value()), (key3(), value())], + [key1(), key2(), key3()], + ); + + assert_eq_sparse_trie_proof_nodes(&sparse, proof_nodes); + } + + /// We have three leaves: 0x0000, 0x0101, and 0x0102. Hash builder trie has all nodes, and we + /// have proofs for them. + /// + /// 1. Reveal the hash builder proof to leaf 0x00 in the sparse trie. + /// 2. Remove leaf 0x00 from the sparse trie (that will remove the branch node and create an + /// extension node with the key 0x0000). + /// 3. Reveal the hash builder proof to leaf 0x0101 in the sparse trie. + /// + /// The hash builder proof to the leaf 0x0101 had a branch node in the path, but we turned it + /// into an extension node, so it should ignore this node. + #[test] + fn sparse_trie_reveal_node_2() { + let key1 = || Nibbles::from_nibbles_unchecked([0x00, 0x00]); + let key2 = || Nibbles::from_nibbles_unchecked([0x01, 0x01]); + let key3 = || Nibbles::from_nibbles_unchecked([0x01, 0x02]); + let value = || alloy_rlp::encode_fixed_size(&B256::repeat_byte(1)); + + // Generate the proof for the root node and initialize the sparse trie with it + let (_, proof_nodes) = hash_builder_root_with_proofs( + [(key1(), value()), (key2(), value()), (key3(), value())], + [Nibbles::default()], + ); + let mut sparse = RevealedSparseTrie::from_root( + TrieNode::decode(&mut &proof_nodes.nodes_sorted()[0].1[..]).unwrap(), + ) + .unwrap(); + + // Generate the proof for the children of the root branch node and reveal it in the sparse + // trie + let (_, proof_nodes) = hash_builder_root_with_proofs( + [(key1(), value()), (key2(), value()), (key3(), value())], + [key1(), Nibbles::from_nibbles_unchecked([0x01])], + ); + for (path, node) in proof_nodes.nodes_sorted() { + sparse.reveal_node(path, TrieNode::decode(&mut &node[..]).unwrap()).unwrap(); + } + + // Check that the branch node exists + assert_eq!( + sparse.nodes.get(&Nibbles::default()), + Some(&SparseNode::new_branch(0b11.into())) + ); + + // Remove the leaf for the first key + sparse.remove_leaf(&key1()).unwrap(); + + // Check that the branch node was turned into an extension node + assert_eq!( + sparse.nodes.get(&Nibbles::default()), + Some(&SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x01]))) + ); + + // Generate the proof for the third key and reveal it in the sparse trie + let (_, proof_nodes) = hash_builder_root_with_proofs( + [(key1(), value()), (key2(), value()), (key3(), value())], + [key2()], + ); + for (path, node) in proof_nodes.nodes_sorted() { + sparse.reveal_node(path, TrieNode::decode(&mut &node[..]).unwrap()).unwrap(); + } + + // Check that nothing changed in the extension node + assert_eq!( + sparse.nodes.get(&Nibbles::default()), + Some(&SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x01]))) + ); + } + + /// We have two leaves that share the same prefix: 0x0001 and 0x0002, and a leaf with a + /// different prefix: 0x0100. Hash builder trie has only the first two leaves, and we have + /// proofs for them. + /// + /// 1. Insert the leaf 0x0100 into the sparse trie, and check that the root extensino node was + /// turned into a branch node. + /// 2. Reveal the leaf 0x0001 in the sparse trie, and check that the root branch node wasn't + /// overwritten with the extension node from the proof. + #[test] + fn sparse_trie_reveal_node_3() { + let key1 = || Nibbles::from_nibbles_unchecked([0x00, 0x01]); + let key2 = || Nibbles::from_nibbles_unchecked([0x00, 0x02]); + let key3 = || Nibbles::from_nibbles_unchecked([0x01, 0x00]); + let value = || alloy_rlp::encode_fixed_size(&B256::repeat_byte(1)); + + // Generate the proof for the root node and initialize the sparse trie with it + let (_, proof_nodes) = hash_builder_root_with_proofs( + [(key1(), value()), (key2(), value())], + [Nibbles::default()], + ); + let mut sparse = RevealedSparseTrie::from_root( + TrieNode::decode(&mut &proof_nodes.nodes_sorted()[0].1[..]).unwrap(), + ) + .unwrap(); + + // Check that the root extension node exists + assert_matches!( + sparse.nodes.get(&Nibbles::default()), + Some(SparseNode::Extension { key, hash: None }) if *key == Nibbles::from_nibbles([0x00]) + ); + + // Insert the leaf with a different prefix + sparse.update_leaf(key3(), value().to_vec()).unwrap(); + + // Check that the extension node was turned into a branch node + assert_matches!( + sparse.nodes.get(&Nibbles::default()), + Some(SparseNode::Branch { state_mask, hash: None }) if *state_mask == TrieMask::new(0b11) + ); + + // Generate the proof for the first key and reveal it in the sparse trie + let (_, proof_nodes) = + hash_builder_root_with_proofs([(key1(), value()), (key2(), value())], [key1()]); + for (path, node) in proof_nodes.nodes_sorted() { + sparse.reveal_node(path, TrieNode::decode(&mut &node[..]).unwrap()).unwrap(); + } + + // Check that the branch node wasn't overwritten by the extension node in the proof + assert_matches!( + sparse.nodes.get(&Nibbles::default()), + Some(SparseNode::Branch { state_mask, hash: None }) if *state_mask == TrieMask::new(0b11) + ); + } } From fa30a4f758e5f166e73c99bc25fbff34962ea3be Mon Sep 17 00:00:00 2001 From: Oliver Date: Wed, 23 Oct 2024 16:29:32 +0200 Subject: [PATCH 216/425] feat: add osaka hardfork (#11984) --- crates/chainspec/src/spec.rs | 8 ++++++++ crates/ethereum-forks/src/hardfork/ethereum.rs | 2 ++ crates/ethereum-forks/src/hardforks/ethereum.rs | 5 +++++ crates/ethereum/cli/src/chainspec.rs | 4 +++- crates/ethereum/evm/src/config.rs | 4 +++- 5 files changed, 21 insertions(+), 2 deletions(-) diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index a7f45727d..0e38d866b 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -617,6 +617,7 @@ impl From for ChainSpec { (EthereumHardfork::Shanghai.boxed(), genesis.config.shanghai_time), (EthereumHardfork::Cancun.boxed(), genesis.config.cancun_time), (EthereumHardfork::Prague.boxed(), genesis.config.prague_time), + (EthereumHardfork::Osaka.boxed(), genesis.config.osaka_time), ]; let mut time_hardforks = time_hardfork_opts @@ -864,6 +865,13 @@ impl ChainSpecBuilder { self } + /// Enable Osaka at genesis. + pub fn osaka_activated(mut self) -> Self { + self = self.prague_activated(); + self.hardforks.insert(EthereumHardfork::Osaka, ForkCondition::Timestamp(0)); + self + } + /// Build the resulting [`ChainSpec`]. /// /// # Panics diff --git a/crates/ethereum-forks/src/hardfork/ethereum.rs b/crates/ethereum-forks/src/hardfork/ethereum.rs index 3d85b54a9..4e13b0017 100644 --- a/crates/ethereum-forks/src/hardfork/ethereum.rs +++ b/crates/ethereum-forks/src/hardfork/ethereum.rs @@ -49,6 +49,8 @@ hardfork!( Cancun, /// Prague: Prague, + /// Osaka: + Osaka, } ); diff --git a/crates/ethereum-forks/src/hardforks/ethereum.rs b/crates/ethereum-forks/src/hardforks/ethereum.rs index 306936715..086d2d3b4 100644 --- a/crates/ethereum-forks/src/hardforks/ethereum.rs +++ b/crates/ethereum-forks/src/hardforks/ethereum.rs @@ -21,6 +21,11 @@ pub trait EthereumHardforks: Hardforks { self.is_fork_active_at_timestamp(EthereumHardfork::Prague, timestamp) } + /// Convenience method to check if [`EthereumHardfork::Osaka`] is active at a given timestamp. + fn is_osaka_active_at_timestamp(&self, timestamp: u64) -> bool { + self.is_fork_active_at_timestamp(EthereumHardfork::Osaka, timestamp) + } + /// Convenience method to check if [`EthereumHardfork::Byzantium`] is active at a given block /// number. fn is_byzantium_active_at_block(&self, block_number: u64) -> bool { diff --git a/crates/ethereum/cli/src/chainspec.rs b/crates/ethereum/cli/src/chainspec.rs index cbcce9f69..a60d70179 100644 --- a/crates/ethereum/cli/src/chainspec.rs +++ b/crates/ethereum/cli/src/chainspec.rs @@ -89,7 +89,8 @@ mod tests { "terminalTotalDifficulty": 0, "shanghaiTime": 0, "cancunTime": 0, - "pragueTime": 0 + "pragueTime": 0, + "osakaTime": 0 } }"#; @@ -97,5 +98,6 @@ mod tests { assert!(spec.is_shanghai_active_at_timestamp(0)); assert!(spec.is_cancun_active_at_timestamp(0)); assert!(spec.is_prague_active_at_timestamp(0)); + assert!(spec.is_osaka_active_at_timestamp(0)); } } diff --git a/crates/ethereum/evm/src/config.rs b/crates/ethereum/evm/src/config.rs index e5253307b..9d6b6d879 100644 --- a/crates/ethereum/evm/src/config.rs +++ b/crates/ethereum/evm/src/config.rs @@ -11,7 +11,9 @@ pub fn revm_spec_by_timestamp_after_merge( chain_spec: &ChainSpec, timestamp: u64, ) -> revm_primitives::SpecId { - if chain_spec.is_prague_active_at_timestamp(timestamp) { + if chain_spec.is_osaka_active_at_timestamp(timestamp) { + revm_primitives::OSAKA + } else if chain_spec.is_prague_active_at_timestamp(timestamp) { revm_primitives::PRAGUE } else if chain_spec.is_cancun_active_at_timestamp(timestamp) { revm_primitives::CANCUN From b7167a9ddc8377a235f6d1d50e4581127f2a8bf0 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 23 Oct 2024 16:30:57 +0200 Subject: [PATCH 217/425] test(tx-pool): add unit test for `GetPooledTransactionLimit` (#11975) --- crates/transaction-pool/src/traits.rs | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index cedec5606..00cda8e1c 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -1594,4 +1594,27 @@ mod tests { assert_eq!(pooled_tx.blob_sidecar, EthBlobTransactionSidecar::None); assert_eq!(pooled_tx.cost, U256::from(100) + U256::from(10 * 1000)); } + + #[test] + fn test_pooled_transaction_limit() { + // No limit should never exceed + let limit_none = GetPooledTransactionLimit::None; + // Any size should return false + assert!(!limit_none.exceeds(1000)); + + // Size limit of 2MB (2 * 1024 * 1024 bytes) + let size_limit_2mb = GetPooledTransactionLimit::ResponseSizeSoftLimit(2 * 1024 * 1024); + + // Test with size below the limit + // 1MB is below 2MB, should return false + assert!(!size_limit_2mb.exceeds(1024 * 1024)); + + // Test with size exactly at the limit + // 2MB equals the limit, should return false + assert!(!size_limit_2mb.exceeds(2 * 1024 * 1024)); + + // Test with size exceeding the limit + // 3MB is above the 2MB limit, should return true + assert!(size_limit_2mb.exceeds(3 * 1024 * 1024)); + } } From 889a7e0b981e4a36394f6b4e554483daaabfeb25 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 23 Oct 2024 16:31:22 +0200 Subject: [PATCH 218/425] primitive-traits: use alloy `_DURATION` constants (#11960) --- Cargo.lock | 2 ++ crates/node/core/Cargo.toml | 15 ++++----------- crates/node/core/src/args/payload_builder.rs | 3 ++- crates/payload/basic/Cargo.toml | 1 + crates/payload/basic/src/lib.rs | 4 ++-- crates/primitives-traits/src/constants/mod.rs | 10 ---------- 6 files changed, 11 insertions(+), 24 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f2d134cb8..92a9d6887 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6370,6 +6370,7 @@ name = "reth-basic-payload-builder" version = "1.1.0" dependencies = [ "alloy-consensus", + "alloy-eips", "alloy-primitives", "alloy-rlp", "futures-core", @@ -7911,6 +7912,7 @@ name = "reth-node-core" version = "1.1.0" dependencies = [ "alloy-consensus", + "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", "clap", diff --git a/crates/node/core/Cargo.toml b/crates/node/core/Cargo.toml index 73c552f4d..0c9672d17 100644 --- a/crates/node/core/Cargo.toml +++ b/crates/node/core/Cargo.toml @@ -40,6 +40,7 @@ reth-stages-types.workspace = true alloy-primitives.workspace = true alloy-rpc-types-engine = { workspace = true, features = ["jwt"] } alloy-consensus.workspace = true +alloy-eips.workspace = true # misc eyre.workspace = true @@ -76,18 +77,10 @@ proptest.workspace = true tokio.workspace = true [features] -optimism = [ - "reth-primitives/optimism", - "reth-db/optimism" -] +optimism = ["reth-primitives/optimism", "reth-db/optimism"] # Features for vergen to generate correct env vars -jemalloc = [ - "reth-cli-util/jemalloc" -] -asm-keccak = [ - "reth-primitives/asm-keccak", - "alloy-primitives/asm-keccak" -] +jemalloc = ["reth-cli-util/jemalloc"] +asm-keccak = ["reth-primitives/asm-keccak", "alloy-primitives/asm-keccak"] [build-dependencies] vergen = { version = "8.0.0", features = ["build", "cargo", "git", "gitcl"] } diff --git a/crates/node/core/src/args/payload_builder.rs b/crates/node/core/src/args/payload_builder.rs index 4a18fd5b0..dceb10726 100644 --- a/crates/node/core/src/args/payload_builder.rs +++ b/crates/node/core/src/args/payload_builder.rs @@ -1,11 +1,12 @@ use crate::{cli::config::PayloadBuilderConfig, version::default_extradata}; use alloy_consensus::constants::MAXIMUM_EXTRA_DATA_SIZE; +use alloy_eips::merge::SLOT_DURATION; use clap::{ builder::{RangedU64ValueParser, TypedValueParser}, Arg, Args, Command, }; use reth_cli_util::{parse_duration_from_secs, parse_duration_from_secs_or_ms}; -use reth_primitives::constants::{ETHEREUM_BLOCK_GAS_LIMIT, SLOT_DURATION}; +use reth_primitives::constants::ETHEREUM_BLOCK_GAS_LIMIT; use std::{borrow::Cow, ffi::OsStr, time::Duration}; /// Parameters for configuring the Payload Builder diff --git a/crates/payload/basic/Cargo.toml b/crates/payload/basic/Cargo.toml index 904776889..88ab99272 100644 --- a/crates/payload/basic/Cargo.toml +++ b/crates/payload/basic/Cargo.toml @@ -27,6 +27,7 @@ alloy-rlp.workspace = true alloy-primitives.workspace = true revm.workspace = true alloy-consensus.workspace = true +alloy-eips.workspace = true # async tokio = { workspace = true, features = ["sync", "time"] } diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index fcc8be9a8..4274d451e 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -10,6 +10,7 @@ use crate::metrics::PayloadBuilderMetrics; use alloy_consensus::constants::EMPTY_WITHDRAWALS; +use alloy_eips::merge::SLOT_DURATION; use alloy_primitives::{Bytes, B256, U256}; use futures_core::ready; use futures_util::FutureExt; @@ -22,8 +23,7 @@ use reth_payload_primitives::{ BuiltPayload, PayloadBuilderAttributes, PayloadBuilderError, PayloadKind, }; use reth_primitives::{ - constants::{RETH_CLIENT_VERSION, SLOT_DURATION}, - proofs, BlockNumberOrTag, SealedBlock, Withdrawals, + constants::RETH_CLIENT_VERSION, proofs, BlockNumberOrTag, SealedBlock, Withdrawals, }; use reth_provider::{ BlockReaderIdExt, BlockSource, CanonStateNotification, ProviderError, StateProviderFactory, diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index 2874c596a..a4091a4a9 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -1,7 +1,6 @@ //! Ethereum protocol-related constants use alloy_primitives::{address, b256, Address, B256, U256}; -use core::time::Duration; /// Gas units, for example [`GIGAGAS`]. pub mod gas_units; @@ -13,15 +12,6 @@ pub const RETH_CLIENT_VERSION: &str = concat!("reth/v", env!("CARGO_PKG_VERSION" /// An EPOCH is a series of 32 slots. pub const EPOCH_SLOTS: u64 = 32; -/// The duration of a slot in seconds. -/// -/// This is the time period of 12 seconds in which a randomly chosen validator has time to propose a -/// block. -pub const SLOT_DURATION: Duration = Duration::from_secs(12); - -/// An EPOCH is a series of 32 slots (~6.4min). -pub const EPOCH_DURATION: Duration = Duration::from_secs(12 * EPOCH_SLOTS); - /// The default block nonce in the beacon consensus pub const BEACON_NONCE: u64 = 0u64; From 89eb73f3d23f6d6771e472e6588633e8cb284943 Mon Sep 17 00:00:00 2001 From: Oliver Date: Wed, 23 Oct 2024 16:44:37 +0200 Subject: [PATCH 219/425] refactor: replace extra fields with `ExecutionPayloadSidecar` in engine (#11901) --- Cargo.lock | 2 - .../src/commands/debug_cmd/replay_engine.rs | 6 +- crates/consensus/beacon/Cargo.toml | 13 ++- crates/consensus/beacon/src/engine/handle.rs | 15 +--- crates/consensus/beacon/src/engine/message.rs | 12 +-- crates/consensus/beacon/src/engine/mod.rs | 50 +++++------ .../consensus/beacon/src/engine/test_utils.rs | 10 +-- crates/engine/local/src/miner.rs | 9 +- crates/engine/tree/src/tree/mod.rs | 34 +++----- crates/engine/util/src/engine_store.rs | 17 ++-- crates/engine/util/src/reorg.rs | 85 ++++++++----------- crates/engine/util/src/skip_new_payload.rs | 16 +--- crates/payload/validator/Cargo.toml | 1 - crates/payload/validator/src/lib.rs | 21 ++--- crates/rpc/rpc-engine-api/src/engine_api.rs | 44 +++++++--- crates/rpc/rpc-engine-api/tests/it/payload.rs | 19 +++-- .../rpc-types-compat/src/engine/payload.rs | 40 ++++----- 17 files changed, 179 insertions(+), 215 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 92a9d6887..350e6a77a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6394,7 +6394,6 @@ dependencies = [ name = "reth-beacon-consensus" version = "1.1.0" dependencies = [ - "alloy-eips", "alloy-genesis", "alloy-primitives", "alloy-rpc-types-engine", @@ -8356,7 +8355,6 @@ dependencies = [ name = "reth-payload-validator" version = "1.1.0" dependencies = [ - "alloy-eips", "alloy-rpc-types", "reth-chainspec", "reth-primitives", diff --git a/bin/reth/src/commands/debug_cmd/replay_engine.rs b/bin/reth/src/commands/debug_cmd/replay_engine.rs index de497cbe0..e7b3de6b6 100644 --- a/bin/reth/src/commands/debug_cmd/replay_engine.rs +++ b/bin/reth/src/commands/debug_cmd/replay_engine.rs @@ -170,10 +170,8 @@ impl> Command { beacon_engine_handle.fork_choice_updated(state, payload_attrs).await?; debug!(target: "reth::cli", ?response, "Received for forkchoice updated"); } - StoredEngineApiMessage::NewPayload { payload, cancun_fields } => { - // todo: prague (last arg) - let response = - beacon_engine_handle.new_payload(payload, cancun_fields, None).await?; + StoredEngineApiMessage::NewPayload { payload, sidecar } => { + let response = beacon_engine_handle.new_payload(payload, sidecar).await?; debug!(target: "reth::cli", ?response, "Received for new payload"); } }; diff --git a/crates/consensus/beacon/Cargo.toml b/crates/consensus/beacon/Cargo.toml index dd1e33931..b141bd34e 100644 --- a/crates/consensus/beacon/Cargo.toml +++ b/crates/consensus/beacon/Cargo.toml @@ -31,7 +31,6 @@ reth-node-types.workspace = true reth-chainspec = { workspace = true, optional = true } # ethereum -alloy-eips.workspace = true alloy-primitives.workspace = true alloy-rpc-types-engine.workspace = true @@ -78,10 +77,10 @@ assert_matches.workspace = true [features] optimism = [ - "reth-chainspec", - "reth-primitives/optimism", - "reth-provider/optimism", - "reth-blockchain-tree/optimism", - "reth-db/optimism", - "reth-db-api/optimism" + "reth-chainspec", + "reth-primitives/optimism", + "reth-provider/optimism", + "reth-blockchain-tree/optimism", + "reth-db/optimism", + "reth-db-api/optimism", ] diff --git a/crates/consensus/beacon/src/engine/handle.rs b/crates/consensus/beacon/src/engine/handle.rs index bb5c4dee1..4aafc6e07 100644 --- a/crates/consensus/beacon/src/engine/handle.rs +++ b/crates/consensus/beacon/src/engine/handle.rs @@ -4,9 +4,8 @@ use crate::{ engine::message::OnForkChoiceUpdated, BeaconConsensusEngineEvent, BeaconEngineMessage, BeaconForkChoiceUpdateError, BeaconOnNewPayloadError, }; -use alloy_eips::eip7685::Requests; use alloy_rpc_types_engine::{ - CancunPayloadFields, ExecutionPayload, ForkchoiceState, ForkchoiceUpdated, PayloadStatus, + ExecutionPayload, ExecutionPayloadSidecar, ForkchoiceState, ForkchoiceUpdated, PayloadStatus, }; use futures::TryFutureExt; use reth_engine_primitives::EngineTypes; @@ -47,18 +46,10 @@ where pub async fn new_payload( &self, payload: ExecutionPayload, - cancun_fields: Option, - execution_requests: Option, + sidecar: ExecutionPayloadSidecar, ) -> Result { let (tx, rx) = oneshot::channel(); - // HACK(onbjerg): We should have a pectra payload fields struct, this is just a temporary - // workaround. - let _ = self.to_engine.send(BeaconEngineMessage::NewPayload { - payload, - cancun_fields, - execution_requests, - tx, - }); + let _ = self.to_engine.send(BeaconEngineMessage::NewPayload { payload, sidecar, tx }); rx.await.map_err(|_| BeaconOnNewPayloadError::EngineUnavailable)? } diff --git a/crates/consensus/beacon/src/engine/message.rs b/crates/consensus/beacon/src/engine/message.rs index 56328f03d..e33decbd8 100644 --- a/crates/consensus/beacon/src/engine/message.rs +++ b/crates/consensus/beacon/src/engine/message.rs @@ -1,7 +1,6 @@ use crate::engine::{error::BeaconOnNewPayloadError, forkchoice::ForkchoiceStatus}; -use alloy_eips::eip7685::Requests; use alloy_rpc_types_engine::{ - CancunPayloadFields, ExecutionPayload, ForkChoiceUpdateResult, ForkchoiceState, + ExecutionPayload, ExecutionPayloadSidecar, ForkChoiceUpdateResult, ForkchoiceState, ForkchoiceUpdateError, ForkchoiceUpdated, PayloadId, PayloadStatus, PayloadStatusEnum, }; use futures::{future::Either, FutureExt}; @@ -145,12 +144,9 @@ pub enum BeaconEngineMessage { NewPayload { /// The execution payload received by Engine API. payload: ExecutionPayload, - /// The cancun-related newPayload fields, if any. - cancun_fields: Option, - // HACK(onbjerg): We should have a pectra payload fields struct, this is just a temporary - // workaround. - /// The pectra EIP-7685 execution requests. - execution_requests: Option, + /// The execution payload sidecar with additional version-specific fields received by + /// engine API. + sidecar: ExecutionPayloadSidecar, /// The sender for returning payload status result. tx: oneshot::Sender>, }, diff --git a/crates/consensus/beacon/src/engine/mod.rs b/crates/consensus/beacon/src/engine/mod.rs index cff648b28..5af1e26ac 100644 --- a/crates/consensus/beacon/src/engine/mod.rs +++ b/crates/consensus/beacon/src/engine/mod.rs @@ -1,7 +1,6 @@ -use alloy_eips::eip7685::Requests; use alloy_primitives::{BlockNumber, B256}; use alloy_rpc_types_engine::{ - CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, PayloadStatusEnum, + ExecutionPayload, ExecutionPayloadSidecar, ForkchoiceState, PayloadStatus, PayloadStatusEnum, PayloadValidationError, }; use futures::{stream::BoxStream, Future, StreamExt}; @@ -1081,14 +1080,11 @@ where /// /// This returns a [`PayloadStatus`] that represents the outcome of a processed new payload and /// returns an error if an internal error occurred. - #[instrument(level = "trace", skip(self, payload, cancun_fields), fields(block_hash = ?payload.block_hash(), block_number = %payload.block_number(), is_pipeline_idle = %self.sync.is_pipeline_idle()), target = "consensus::engine")] + #[instrument(level = "trace", skip(self, payload, sidecar), fields(block_hash = ?payload.block_hash(), block_number = %payload.block_number(), is_pipeline_idle = %self.sync.is_pipeline_idle()), target = "consensus::engine")] fn on_new_payload( &mut self, payload: ExecutionPayload, - cancun_fields: Option, - // HACK(onbjerg): We should have a pectra payload fields struct, this is just a temporary - // workaround. - execution_requests: Option, + sidecar: ExecutionPayloadSidecar, ) -> Result, BeaconOnNewPayloadError> { self.metrics.new_payload_messages.increment(1); @@ -1118,11 +1114,7 @@ where // // This validation **MUST** be instantly run in all cases even during active sync process. let parent_hash = payload.parent_hash(); - let block = match self.payload_validator.ensure_well_formed_payload( - payload, - cancun_fields.into(), - execution_requests, - ) { + let block = match self.payload_validator.ensure_well_formed_payload(payload, sidecar) { Ok(block) => block, Err(error) => { error!(target: "consensus::engine", %error, "Invalid payload"); @@ -1867,13 +1859,8 @@ where BeaconEngineMessage::ForkchoiceUpdated { state, payload_attrs, tx } => { this.on_forkchoice_updated(state, payload_attrs, tx); } - BeaconEngineMessage::NewPayload { - payload, - cancun_fields, - execution_requests, - tx, - } => { - match this.on_new_payload(payload, cancun_fields, execution_requests) { + BeaconEngineMessage::NewPayload { payload, sidecar, tx } => { + match this.on_new_payload(payload, sidecar) { Ok(Either::Right(block)) => { this.set_blockchain_tree_action( BlockchainTreeAction::InsertNewPayload { block, tx }, @@ -2061,7 +2048,12 @@ mod tests { assert_matches!(rx.try_recv(), Err(TryRecvError::Empty)); // consensus engine is still idle because no FCUs were received - let _ = env.send_new_payload(block_to_payload_v1(SealedBlock::default()), None).await; + let _ = env + .send_new_payload( + block_to_payload_v1(SealedBlock::default()), + ExecutionPayloadSidecar::none(), + ) + .await; assert_matches!(rx.try_recv(), Err(TryRecvError::Empty)); @@ -2626,7 +2618,7 @@ mod tests { 0, BlockParams { ommers_count: Some(0), ..Default::default() }, )), - None, + ExecutionPayloadSidecar::none(), ) .await; @@ -2641,7 +2633,7 @@ mod tests { 1, BlockParams { ommers_count: Some(0), ..Default::default() }, )), - None, + ExecutionPayloadSidecar::none(), ) .await; @@ -2719,7 +2711,10 @@ mod tests { // Send new payload let result = env - .send_new_payload_retry_on_syncing(block_to_payload_v1(block2.clone()), None) + .send_new_payload_retry_on_syncing( + block_to_payload_v1(block2.clone()), + ExecutionPayloadSidecar::none(), + ) .await .unwrap(); @@ -2854,7 +2849,9 @@ mod tests { 2, BlockParams { parent: Some(parent), ommers_count: Some(0), ..Default::default() }, ); - let res = env.send_new_payload(block_to_payload_v1(block), None).await; + let res = env + .send_new_payload(block_to_payload_v1(block), ExecutionPayloadSidecar::none()) + .await; let expected_result = PayloadStatus::from_status(PayloadStatusEnum::Syncing); assert_matches!(res, Ok(result) => assert_eq!(result, expected_result)); @@ -2924,7 +2921,10 @@ mod tests { // Send new payload let result = env - .send_new_payload_retry_on_syncing(block_to_payload_v1(block2.clone()), None) + .send_new_payload_retry_on_syncing( + block_to_payload_v1(block2.clone()), + ExecutionPayloadSidecar::none(), + ) .await .unwrap(); diff --git a/crates/consensus/beacon/src/engine/test_utils.rs b/crates/consensus/beacon/src/engine/test_utils.rs index 7e9e1ec6b..912f0a871 100644 --- a/crates/consensus/beacon/src/engine/test_utils.rs +++ b/crates/consensus/beacon/src/engine/test_utils.rs @@ -6,7 +6,7 @@ use crate::{ }; use alloy_primitives::{BlockNumber, Sealable, B256}; use alloy_rpc_types_engine::{ - CancunPayloadFields, ExecutionPayload, ForkchoiceState, ForkchoiceUpdated, PayloadStatus, + ExecutionPayload, ExecutionPayloadSidecar, ForkchoiceState, ForkchoiceUpdated, PayloadStatus, }; use reth_blockchain_tree::{ config::BlockchainTreeConfig, externals::TreeExternals, BlockchainTree, ShareableBlockchainTree, @@ -68,9 +68,9 @@ impl TestEnv { pub async fn send_new_payload>( &self, payload: T, - cancun_fields: Option, + sidecar: ExecutionPayloadSidecar, ) -> Result { - self.engine_handle.new_payload(payload.into(), cancun_fields, None).await + self.engine_handle.new_payload(payload.into(), sidecar).await } /// Sends the `ExecutionPayload` message to the consensus engine and retries if the engine @@ -78,11 +78,11 @@ impl TestEnv { pub async fn send_new_payload_retry_on_syncing>( &self, payload: T, - cancun_fields: Option, + sidecar: ExecutionPayloadSidecar, ) -> Result { let payload: ExecutionPayload = payload.into(); loop { - let result = self.send_new_payload(payload.clone(), cancun_fields.clone()).await?; + let result = self.send_new_payload(payload.clone(), sidecar.clone()).await?; if !result.is_syncing() { return Ok(result) } diff --git a/crates/engine/local/src/miner.rs b/crates/engine/local/src/miner.rs index 552cbd047..706ddc43d 100644 --- a/crates/engine/local/src/miner.rs +++ b/crates/engine/local/src/miner.rs @@ -1,7 +1,7 @@ //! Contains the implementation of the mining mode for the local engine. use alloy_primitives::{TxHash, B256}; -use alloy_rpc_types_engine::{CancunPayloadFields, ForkchoiceState}; +use alloy_rpc_types_engine::{CancunPayloadFields, ExecutionPayloadSidecar, ForkchoiceState}; use eyre::OptionExt; use futures_util::{stream::Fuse, StreamExt}; use reth_beacon_consensus::BeaconEngineMessage; @@ -221,9 +221,10 @@ where let (tx, rx) = oneshot::channel(); self.to_engine.send(BeaconEngineMessage::NewPayload { payload: block_to_payload(payload.block().clone()), - cancun_fields, - // todo: prague - execution_requests: None, + // todo: prague support + sidecar: cancun_fields + .map(ExecutionPayloadSidecar::v3) + .unwrap_or_else(ExecutionPayloadSidecar::none), tx, })?; diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index 021b3149a..555cf8916 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -10,7 +10,7 @@ use alloy_primitives::{ BlockNumber, B256, U256, }; use alloy_rpc_types_engine::{ - CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, PayloadStatusEnum, + ExecutionPayload, ExecutionPayloadSidecar, ForkchoiceState, PayloadStatus, PayloadStatusEnum, PayloadValidationError, }; use reth_beacon_consensus::{ @@ -70,7 +70,6 @@ use crate::{ engine::{EngineApiKind, EngineApiRequest}, tree::metrics::EngineApiMetrics, }; -use alloy_eips::eip7685::Requests; pub use config::TreeConfig; pub use invalid_block_hook::{InvalidBlockHooks, NoopInvalidBlockHook}; pub use persistence_state::PersistenceState; @@ -722,8 +721,7 @@ where fn on_new_payload( &mut self, payload: ExecutionPayload, - cancun_fields: Option, - execution_requests: Option, + sidecar: ExecutionPayloadSidecar, ) -> Result, InsertBlockFatalError> { trace!(target: "engine::tree", "invoked new payload"); self.metrics.engine.new_payload_messages.increment(1); @@ -754,11 +752,7 @@ where // // This validation **MUST** be instantly run in all cases even during active sync process. let parent_hash = payload.parent_hash(); - let block = match self.payload_validator.ensure_well_formed_payload( - payload, - cancun_fields.into(), - execution_requests, - ) { + let block = match self.payload_validator.ensure_well_formed_payload(payload, sidecar) { Ok(block) => block, Err(error) => { error!(target: "engine::tree", %error, "Invalid payload"); @@ -1241,14 +1235,8 @@ where error!(target: "engine::tree", "Failed to send event: {err:?}"); } } - BeaconEngineMessage::NewPayload { - payload, - cancun_fields, - execution_requests, - tx, - } => { - let output = - self.on_new_payload(payload, cancun_fields, execution_requests); + BeaconEngineMessage::NewPayload { payload, sidecar, tx } => { + let output = self.on_new_payload(payload, sidecar); if let Err(err) = tx.send(output.map(|o| o.outcome).map_err(|e| { reth_beacon_consensus::BeaconOnNewPayloadError::Internal( Box::new(e), @@ -2585,6 +2573,7 @@ mod tests { use crate::persistence::PersistenceAction; use alloy_primitives::{Bytes, Sealable}; use alloy_rlp::Decodable; + use alloy_rpc_types_engine::{CancunPayloadFields, ExecutionPayloadSidecar}; use assert_matches::assert_matches; use reth_beacon_consensus::{EthBeaconConsensus, ForkchoiceStatus}; use reth_chain_state::{test_utils::TestBlockBuilder, BlockState}; @@ -2862,11 +2851,10 @@ mod tests { self.tree .on_new_payload( payload.into(), - Some(CancunPayloadFields { + ExecutionPayloadSidecar::v3(CancunPayloadFields { parent_beacon_block_root: block.parent_beacon_block_root.unwrap(), versioned_hashes: vec![], }), - None, ) .unwrap(); } @@ -3129,7 +3117,10 @@ mod tests { let mut test_harness = TestHarness::new(HOLESKY.clone()); - let outcome = test_harness.tree.on_new_payload(payload.into(), None, None).unwrap(); + let outcome = test_harness + .tree + .on_new_payload(payload.into(), ExecutionPayloadSidecar::none()) + .unwrap(); assert!(outcome.outcome.is_syncing()); // ensure block is buffered @@ -3173,8 +3164,7 @@ mod tests { .on_engine_message(FromEngine::Request( BeaconEngineMessage::NewPayload { payload: payload.clone().into(), - cancun_fields: None, - execution_requests: None, + sidecar: ExecutionPayloadSidecar::none(), tx, } .into(), diff --git a/crates/engine/util/src/engine_store.rs b/crates/engine/util/src/engine_store.rs index de193bf3b..85c5e126f 100644 --- a/crates/engine/util/src/engine_store.rs +++ b/crates/engine/util/src/engine_store.rs @@ -1,6 +1,6 @@ //! Stores engine API messages to disk for later inspection and replay. -use alloy_rpc_types_engine::{CancunPayloadFields, ExecutionPayload, ForkchoiceState}; +use alloy_rpc_types_engine::{ExecutionPayload, ExecutionPayloadSidecar, ForkchoiceState}; use futures::{Stream, StreamExt}; use reth_beacon_consensus::BeaconEngineMessage; use reth_engine_primitives::EngineTypes; @@ -30,8 +30,9 @@ pub enum StoredEngineApiMessage { NewPayload { /// The [`ExecutionPayload`] sent in the persisted call. payload: ExecutionPayload, - /// The Cancun-specific fields sent in the persisted call, if any. - cancun_fields: Option, + /// The execution payload sidecar with additional version-specific fields received by + /// engine API. + sidecar: ExecutionPayloadSidecar, }, } @@ -73,20 +74,14 @@ impl EngineMessageStore { })?, )?; } - // todo(onbjerg): execution requests - BeaconEngineMessage::NewPayload { - payload, - cancun_fields, - execution_requests: _, - tx: _tx, - } => { + BeaconEngineMessage::NewPayload { payload, sidecar, tx: _tx } => { let filename = format!("{}-new_payload-{}.json", timestamp, payload.block_hash()); fs::write( self.path.join(filename), serde_json::to_vec( &StoredEngineApiMessage::::NewPayload { payload: payload.clone(), - cancun_fields: cancun_fields.clone(), + sidecar: sidecar.clone(), }, )?, )?; diff --git a/crates/engine/util/src/reorg.rs b/crates/engine/util/src/reorg.rs index 85216e32f..d109fb9e9 100644 --- a/crates/engine/util/src/reorg.rs +++ b/crates/engine/util/src/reorg.rs @@ -1,10 +1,9 @@ //! Stream wrapper that simulates reorgs. use alloy_consensus::Transaction; -use alloy_eips::eip7685::Requests; use alloy_primitives::U256; use alloy_rpc_types_engine::{ - CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, + CancunPayloadFields, ExecutionPayload, ExecutionPayloadSidecar, ForkchoiceState, PayloadStatus, }; use futures::{stream::FuturesUnordered, Stream, StreamExt, TryFutureExt}; use itertools::Either; @@ -150,12 +149,7 @@ where let next = ready!(this.stream.poll_next_unpin(cx)); let item = match (next, &this.last_forkchoice_state) { ( - Some(BeaconEngineMessage::NewPayload { - payload, - cancun_fields, - execution_requests, - tx, - }), + Some(BeaconEngineMessage::NewPayload { payload, sidecar, tx }), Some(last_forkchoice_state), ) if this.forkchoice_states_forwarded > this.frequency && // Only enter reorg state if new payload attaches to current head. @@ -170,29 +164,26 @@ where // forkchoice state. We will rely on CL to reorg us back to canonical chain. // TODO: This is an expensive blocking operation, ideally it's spawned as a task // so that the stream could yield the control back. - let (reorg_payload, reorg_cancun_fields, reorg_execution_requests) = - match create_reorg_head( - this.provider, - this.evm_config, - this.payload_validator, - *this.depth, - payload.clone(), - cancun_fields.clone(), - execution_requests.clone(), - ) { - Ok(result) => result, - Err(error) => { - error!(target: "engine::stream::reorg", %error, "Error attempting to create reorg head"); - // Forward the payload and attempt to create reorg on top of - // the next one - return Poll::Ready(Some(BeaconEngineMessage::NewPayload { - payload, - cancun_fields, - execution_requests, - tx, - })) - } - }; + let (reorg_payload, reorg_sidecar) = match create_reorg_head( + this.provider, + this.evm_config, + this.payload_validator, + *this.depth, + payload.clone(), + sidecar.clone(), + ) { + Ok(result) => result, + Err(error) => { + error!(target: "engine::stream::reorg", %error, "Error attempting to create reorg head"); + // Forward the payload and attempt to create reorg on top of + // the next one + return Poll::Ready(Some(BeaconEngineMessage::NewPayload { + payload, + sidecar, + tx, + })) + } + }; let reorg_forkchoice_state = ForkchoiceState { finalized_block_hash: last_forkchoice_state.finalized_block_hash, safe_block_hash: last_forkchoice_state.safe_block_hash, @@ -208,17 +199,11 @@ where let queue = VecDeque::from([ // Current payload - BeaconEngineMessage::NewPayload { - payload, - cancun_fields, - execution_requests, - tx, - }, + BeaconEngineMessage::NewPayload { payload, sidecar, tx }, // Reorg payload BeaconEngineMessage::NewPayload { payload: reorg_payload, - cancun_fields: reorg_cancun_fields, - execution_requests: reorg_execution_requests, + sidecar: reorg_sidecar, tx: reorg_payload_tx, }, // Reorg forkchoice state @@ -252,9 +237,8 @@ fn create_reorg_head( payload_validator: &ExecutionPayloadValidator, mut depth: usize, next_payload: ExecutionPayload, - next_cancun_fields: Option, - next_execution_requests: Option, -) -> RethResult<(ExecutionPayload, Option, Option)> + next_sidecar: ExecutionPayloadSidecar, +) -> RethResult<(ExecutionPayload, ExecutionPayloadSidecar)> where Provider: BlockReader + StateProviderFactory, Evm: ConfigureEvm
, @@ -264,11 +248,7 @@ where // Ensure next payload is valid. let next_block = payload_validator - .ensure_well_formed_payload( - next_payload, - next_cancun_fields.into(), - next_execution_requests, - ) + .ensure_well_formed_payload(next_payload, next_sidecar) .map_err(RethError::msg)?; // Fetch reorg target block depending on its depth and its parent. @@ -439,11 +419,16 @@ where Ok(( block_to_payload(reorg_block), + // todo(onbjerg): how do we support execution requests? reorg_target .header .parent_beacon_block_root - .map(|root| CancunPayloadFields { parent_beacon_block_root: root, versioned_hashes }), - // todo(prague) - None, + .map(|root| { + ExecutionPayloadSidecar::v3(CancunPayloadFields { + parent_beacon_block_root: root, + versioned_hashes, + }) + }) + .unwrap_or_else(ExecutionPayloadSidecar::none), )) } diff --git a/crates/engine/util/src/skip_new_payload.rs b/crates/engine/util/src/skip_new_payload.rs index 47c48282e..16f2e9819 100644 --- a/crates/engine/util/src/skip_new_payload.rs +++ b/crates/engine/util/src/skip_new_payload.rs @@ -41,19 +41,14 @@ where loop { let next = ready!(this.stream.poll_next_unpin(cx)); let item = match next { - Some(BeaconEngineMessage::NewPayload { - payload, - cancun_fields, - execution_requests, - tx, - }) => { + Some(BeaconEngineMessage::NewPayload { payload, sidecar, tx }) => { if this.skipped < this.threshold { *this.skipped += 1; tracing::warn!( target: "engine::stream::skip_new_payload", block_number = payload.block_number(), block_hash = %payload.block_hash(), - ?cancun_fields, + ?sidecar, threshold=this.threshold, skipped=this.skipped, "Skipping new payload" ); @@ -61,12 +56,7 @@ where continue } *this.skipped = 0; - Some(BeaconEngineMessage::NewPayload { - payload, - cancun_fields, - execution_requests, - tx, - }) + Some(BeaconEngineMessage::NewPayload { payload, sidecar, tx }) } next => next, }; diff --git a/crates/payload/validator/Cargo.toml b/crates/payload/validator/Cargo.toml index 619b99f28..2662b987f 100644 --- a/crates/payload/validator/Cargo.toml +++ b/crates/payload/validator/Cargo.toml @@ -18,5 +18,4 @@ reth-primitives.workspace = true reth-rpc-types-compat.workspace = true # alloy -alloy-eips.workspace = true alloy-rpc-types = { workspace = true, features = ["engine"] } diff --git a/crates/payload/validator/src/lib.rs b/crates/payload/validator/src/lib.rs index 3ec7b206a..9952815fd 100644 --- a/crates/payload/validator/src/lib.rs +++ b/crates/payload/validator/src/lib.rs @@ -8,8 +8,9 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -use alloy_eips::eip7685::Requests; -use alloy_rpc_types::engine::{ExecutionPayload, MaybeCancunPayloadFields, PayloadError}; +use alloy_rpc_types::engine::{ + ExecutionPayload, ExecutionPayloadSidecar, MaybeCancunPayloadFields, PayloadError, +}; use reth_chainspec::EthereumHardforks; use reth_primitives::SealedBlock; use reth_rpc_types_compat::engine::payload::try_into_block; @@ -112,15 +113,12 @@ impl ExecutionPayloadValidator { pub fn ensure_well_formed_payload( &self, payload: ExecutionPayload, - cancun_fields: MaybeCancunPayloadFields, - execution_requests: Option, + sidecar: ExecutionPayloadSidecar, ) -> Result { let expected_hash = payload.block_hash(); // First parse the block - let sealed_block = - try_into_block(payload, cancun_fields.parent_beacon_block_root(), execution_requests)? - .seal_slow(); + let sealed_block = try_into_block(payload, &sidecar)?.seal_slow(); // Ensure the hash included in the payload matches the block hash if expected_hash != sealed_block.hash() { @@ -139,7 +137,7 @@ impl ExecutionPayloadValidator { // cancun active but excess blob gas not present return Err(PayloadError::PostCancunBlockWithoutExcessBlobGas) } - if cancun_fields.as_ref().is_none() { + if sidecar.cancun().is_none() { // cancun active but cancun fields not present return Err(PayloadError::PostCancunWithoutCancunFields) } @@ -156,7 +154,7 @@ impl ExecutionPayloadValidator { // cancun not active but excess blob gas present return Err(PayloadError::PreCancunBlockWithExcessBlobGas) } - if cancun_fields.as_ref().is_some() { + if sidecar.cancun().is_some() { // cancun not active but cancun fields present return Err(PayloadError::PreCancunWithCancunFields) } @@ -175,7 +173,10 @@ impl ExecutionPayloadValidator { } // EIP-4844 checks - self.ensure_matching_blob_versioned_hashes(&sealed_block, &cancun_fields)?; + self.ensure_matching_blob_versioned_hashes( + &sealed_block, + &sidecar.cancun().cloned().into(), + )?; Ok(sealed_block) } diff --git a/crates/rpc/rpc-engine-api/src/engine_api.rs b/crates/rpc/rpc-engine-api/src/engine_api.rs index ca055a77e..eb280408e 100644 --- a/crates/rpc/rpc-engine-api/src/engine_api.rs +++ b/crates/rpc/rpc-engine-api/src/engine_api.rs @@ -5,8 +5,8 @@ use alloy_eips::{eip4844::BlobAndProofV1, eip7685::Requests}; use alloy_primitives::{BlockHash, BlockNumber, B256, U64}; use alloy_rpc_types_engine::{ CancunPayloadFields, ClientVersionV1, ExecutionPayload, ExecutionPayloadBodiesV1, - ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV3, ForkchoiceState, - ForkchoiceUpdated, PayloadId, PayloadStatus, TransitionConfiguration, + ExecutionPayloadInputV2, ExecutionPayloadSidecar, ExecutionPayloadV1, ExecutionPayloadV3, + ForkchoiceState, ForkchoiceUpdated, PayloadId, PayloadStatus, TransitionConfiguration, }; use async_trait::async_trait; use jsonrpsee_core::RpcResult; @@ -140,7 +140,11 @@ where self.inner .validator .validate_version_specific_fields(EngineApiMessageVersion::V1, payload_or_attrs)?; - Ok(self.inner.beacon_consensus.new_payload(payload, None, None).await?) + Ok(self + .inner + .beacon_consensus + .new_payload(payload, ExecutionPayloadSidecar::none()) + .await?) } /// See also @@ -156,7 +160,11 @@ where self.inner .validator .validate_version_specific_fields(EngineApiMessageVersion::V2, payload_or_attrs)?; - Ok(self.inner.beacon_consensus.new_payload(payload, None, None).await?) + Ok(self + .inner + .beacon_consensus + .new_payload(payload, ExecutionPayloadSidecar::none()) + .await?) } /// See also @@ -176,9 +184,17 @@ where .validator .validate_version_specific_fields(EngineApiMessageVersion::V3, payload_or_attrs)?; - let cancun_fields = CancunPayloadFields { versioned_hashes, parent_beacon_block_root }; - - Ok(self.inner.beacon_consensus.new_payload(payload, Some(cancun_fields), None).await?) + Ok(self + .inner + .beacon_consensus + .new_payload( + payload, + ExecutionPayloadSidecar::v3(CancunPayloadFields { + versioned_hashes, + parent_beacon_block_root, + }), + ) + .await?) } /// See also @@ -187,8 +203,6 @@ where payload: ExecutionPayloadV3, versioned_hashes: Vec, parent_beacon_block_root: B256, - // TODO(onbjerg): Figure out why we even get these here, since we'll check the requests - // from execution against the requests root in the header. execution_requests: Requests, ) -> EngineApiResult { let payload = ExecutionPayload::from(payload); @@ -201,14 +215,16 @@ where .validator .validate_version_specific_fields(EngineApiMessageVersion::V4, payload_or_attrs)?; - let cancun_fields = CancunPayloadFields { versioned_hashes, parent_beacon_block_root }; - - // HACK(onbjerg): We should have a pectra payload fields struct, this is just a temporary - // workaround. Ok(self .inner .beacon_consensus - .new_payload(payload, Some(cancun_fields), Some(execution_requests)) + .new_payload( + payload, + ExecutionPayloadSidecar::v4( + CancunPayloadFields { versioned_hashes, parent_beacon_block_root }, + execution_requests, + ), + ) .await?) } diff --git a/crates/rpc/rpc-engine-api/tests/it/payload.rs b/crates/rpc/rpc-engine-api/tests/it/payload.rs index 007a62db0..febbc291e 100644 --- a/crates/rpc/rpc-engine-api/tests/it/payload.rs +++ b/crates/rpc/rpc-engine-api/tests/it/payload.rs @@ -3,7 +3,8 @@ use alloy_primitives::{Bytes, Sealable, U256}; use alloy_rlp::{Decodable, Error as RlpError}; use alloy_rpc_types_engine::{ - ExecutionPayload, ExecutionPayloadBodyV1, ExecutionPayloadV1, PayloadError, + ExecutionPayload, ExecutionPayloadBodyV1, ExecutionPayloadSidecar, ExecutionPayloadV1, + PayloadError, }; use assert_matches::assert_matches; use reth_primitives::{proofs, Block, SealedBlock, SealedHeader, TransactionSigned, Withdrawals}; @@ -75,7 +76,10 @@ fn payload_validation() { b }); - assert_matches!(try_into_sealed_block(block_with_valid_extra_data, None, None), Ok(_)); + assert_matches!( + try_into_sealed_block(block_with_valid_extra_data, &ExecutionPayloadSidecar::none()), + Ok(_) + ); // Invalid extra data let block_with_invalid_extra_data = Bytes::from_static(&[0; 33]); @@ -84,7 +88,7 @@ fn payload_validation() { b }); assert_matches!( - try_into_sealed_block(invalid_extra_data_block, None, None), + try_into_sealed_block(invalid_extra_data_block, &ExecutionPayloadSidecar::none()), Err(PayloadError::ExtraData(data)) if data == block_with_invalid_extra_data ); @@ -94,7 +98,7 @@ fn payload_validation() { b }); assert_matches!( - try_into_sealed_block(block_with_zero_base_fee, None, None), + try_into_sealed_block(block_with_zero_base_fee, &ExecutionPayloadSidecar::none()), Err(PayloadError::BaseFee(val)) if val.is_zero() ); @@ -113,7 +117,7 @@ fn payload_validation() { b }); assert_matches!( - try_into_sealed_block(block_with_ommers.clone(), None, None), + try_into_sealed_block(block_with_ommers.clone(), &ExecutionPayloadSidecar::none()), Err(PayloadError::BlockHash { consensus, .. }) if consensus == block_with_ommers.block_hash() ); @@ -124,7 +128,7 @@ fn payload_validation() { b }); assert_matches!( - try_into_sealed_block(block_with_difficulty.clone(), None, None), + try_into_sealed_block(block_with_difficulty.clone(), &ExecutionPayloadSidecar::none()), Err(PayloadError::BlockHash { consensus, .. }) if consensus == block_with_difficulty.block_hash() ); @@ -134,9 +138,8 @@ fn payload_validation() { b }); assert_matches!( - try_into_sealed_block(block_with_nonce.clone(), None, None), + try_into_sealed_block(block_with_nonce.clone(), &ExecutionPayloadSidecar::none()), Err(PayloadError::BlockHash { consensus, .. }) if consensus == block_with_nonce.block_hash() - ); // Valid block diff --git a/crates/rpc/rpc-types-compat/src/engine/payload.rs b/crates/rpc/rpc-types-compat/src/engine/payload.rs index b63b7453a..b4c45a617 100644 --- a/crates/rpc/rpc-types-compat/src/engine/payload.rs +++ b/crates/rpc/rpc-types-compat/src/engine/payload.rs @@ -9,7 +9,8 @@ use alloy_eips::{ use alloy_primitives::{B256, U256}; use alloy_rpc_types_engine::{ payload::{ExecutionPayloadBodyV1, ExecutionPayloadFieldV2, ExecutionPayloadInputV2}, - ExecutionPayload, ExecutionPayloadV1, ExecutionPayloadV2, ExecutionPayloadV3, PayloadError, + ExecutionPayload, ExecutionPayloadSidecar, ExecutionPayloadV1, ExecutionPayloadV2, + ExecutionPayloadV3, PayloadError, }; use reth_primitives::{ proofs::{self}, @@ -248,17 +249,18 @@ pub fn convert_block_to_payload_input_v2(value: SealedBlock) -> ExecutionPayload } } -/// Tries to create a new block (without a block hash) from the given payload and optional parent -/// beacon block root. +/// Tries to create a new unsealed block from the given payload and payload sidecar. +/// /// Performs additional validation of `extra_data` and `base_fee_per_gas` fields. /// -/// NOTE: The log bloom is assumed to be validated during serialization. +/// # Note +/// +/// The log bloom is assumed to be validated during serialization. /// /// See pub fn try_into_block( value: ExecutionPayload, - parent_beacon_block_root: Option, - execution_requests: Option, + sidecar: &ExecutionPayloadSidecar, ) -> Result { let mut base_payload = match value { ExecutionPayload::V1(payload) => try_payload_v1_to_block(payload)?, @@ -266,29 +268,30 @@ pub fn try_into_block( ExecutionPayload::V3(payload) => try_payload_v3_to_block(payload)?, }; - base_payload.header.parent_beacon_block_root = parent_beacon_block_root; - base_payload.header.requests_hash = execution_requests.map(|reqs| reqs.requests_hash()); + base_payload.header.parent_beacon_block_root = sidecar.parent_beacon_block_root(); + base_payload.header.requests_hash = sidecar.requests().map(Requests::requests_hash); Ok(base_payload) } -/// Tries to create a new block from the given payload and optional parent beacon block root. -/// -/// NOTE: Empty ommers, nonce and difficulty values are validated upon computing block hash and -/// comparing the value with `payload.block_hash`. +/// Tries to create a sealed new block from the given payload and payload sidecar. /// /// Uses [`try_into_block`] to convert from the [`ExecutionPayload`] to [`Block`] and seals the /// block with its hash. /// /// Uses [`validate_block_hash`] to validate the payload block hash and ultimately return the /// [`SealedBlock`]. +/// +/// # Note +/// +/// Empty ommers, nonce, difficulty, and execution request values are validated upon computing block +/// hash and comparing the value with `payload.block_hash`. pub fn try_into_sealed_block( payload: ExecutionPayload, - parent_beacon_block_root: Option, - execution_requests: Option, + sidecar: &ExecutionPayloadSidecar, ) -> Result { let block_hash = payload.block_hash(); - let base_payload = try_into_block(payload, parent_beacon_block_root, execution_requests)?; + let base_payload = try_into_block(payload, sidecar)?; // validate block hash and return validate_block_hash(block_hash, base_payload) @@ -356,8 +359,8 @@ mod tests { }; use alloy_primitives::{b256, hex, Bytes, U256}; use alloy_rpc_types_engine::{ - CancunPayloadFields, ExecutionPayload, ExecutionPayloadV1, ExecutionPayloadV2, - ExecutionPayloadV3, + CancunPayloadFields, ExecutionPayload, ExecutionPayloadSidecar, ExecutionPayloadV1, + ExecutionPayloadV2, ExecutionPayloadV3, }; #[test] @@ -575,8 +578,7 @@ mod tests { let cancun_fields = CancunPayloadFields { parent_beacon_block_root, versioned_hashes }; // convert into block - let block = - try_into_block(payload, Some(cancun_fields.parent_beacon_block_root), None).unwrap(); + let block = try_into_block(payload, &ExecutionPayloadSidecar::v3(cancun_fields)).unwrap(); // Ensure the actual hash is calculated if we set the fields to what they should be validate_block_hash(block_hash_with_blob_fee_fields, block).unwrap(); From bf612bee50cada67624791baa7707f260acf1be4 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Wed, 23 Oct 2024 17:03:25 +0200 Subject: [PATCH 220/425] chore(hive): update expected failures (#12006) --- .github/assets/hive/expected_failures.yaml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/assets/hive/expected_failures.yaml b/.github/assets/hive/expected_failures.yaml index d4b3d2bcb..ec7bd0549 100644 --- a/.github/assets/hive/expected_failures.yaml +++ b/.github/assets/hive/expected_failures.yaml @@ -41,8 +41,6 @@ engine-withdrawals: - Withdrawals Fork on Canonical Block 8 / Side Block 9 - 10 Block Re-Org (Paris) (reth) - Withdrawals Fork on Canonical Block 8 / Side Block 9 - 10 Block Re-Org Sync (Paris) (reth) -# https://github.com/paradigmxyz/reth/issues/8305 -# https://github.com/paradigmxyz/reth/issues/6217 engine-api: [] # https://github.com/paradigmxyz/reth/issues/8305 @@ -58,6 +56,4 @@ engine-cancun: - Invalid NewPayload, Incomplete VersionedHashes, Syncing=False, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) - Invalid NewPayload, Extra VersionedHashes, Syncing=False, EmptyTxs=False, DynFeeTxs=False (Cancun) (reth) -# https://github.com/paradigmxyz/reth/issues/8579 -sync: - - sync reth -> reth +sync: [] From f3853e71b36314a370ace6bf3ac699531e635be7 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Wed, 23 Oct 2024 16:22:59 +0100 Subject: [PATCH 221/425] test(trie): get sparse trie nodes at depth (#12007) --- crates/trie/sparse/src/trie.rs | 86 ++++++++++++++++++++++++++++++++-- 1 file changed, 82 insertions(+), 4 deletions(-) diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index fae1141ec..4d195cbf3 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -569,6 +569,17 @@ impl RevealedSparseTrie { /// Update hashes of the nodes that are located at a level deeper than or equal to the provided /// depth. Root node has a level of 0. pub fn update_rlp_node_level(&mut self, depth: usize) { + let targets = self.get_nodes_at_depth(depth); + let mut prefix_set = self.prefix_set.clone().freeze(); + for target in targets { + self.rlp_node(target, &mut prefix_set); + } + } + + /// Returns a list of paths to the nodes that are located at the provided depth when counting + /// from the root node. If there's a leaf at a depth less than the provided depth, it will be + /// included in the result. + fn get_nodes_at_depth(&self, depth: usize) -> HashSet { let mut paths = Vec::from([(Nibbles::default(), 0)]); let mut targets = HashSet::::default(); @@ -602,10 +613,7 @@ impl RevealedSparseTrie { } } - let mut prefix_set = self.prefix_set.clone().freeze(); - for target in targets { - self.rlp_node(target, &mut prefix_set); - } + targets } fn rlp_node(&mut self, path: Nibbles, prefix_set: &mut PrefixSet) -> RlpNode { @@ -1518,4 +1526,74 @@ mod tests { Some(SparseNode::Branch { state_mask, hash: None }) if *state_mask == TrieMask::new(0b11) ); } + + #[test] + fn sparse_trie_get_nodes_at_depth() { + let mut sparse = RevealedSparseTrie::default(); + + let value = alloy_rlp::encode_fixed_size(&U256::ZERO).to_vec(); + + // Extension (Key = 5) – Level 0 + // └── Branch (Mask = 1011) – Level 1 + // ├── 0 -> Extension (Key = 23) – Level 2 + // │ └── Branch (Mask = 0101) – Level 3 + // │ ├── 1 -> Leaf (Key = 1, Path = 50231) – Level 4 + // │ └── 3 -> Leaf (Key = 3, Path = 50233) – Level 4 + // ├── 2 -> Leaf (Key = 013, Path = 52013) – Level 2 + // └── 3 -> Branch (Mask = 0101) – Level 2 + // ├── 1 -> Leaf (Key = 3102, Path = 53102) – Level 3 + // └── 3 -> Branch (Mask = 1010) – Level 3 + // ├── 0 -> Leaf (Key = 3302, Path = 53302) – Level 4 + // └── 2 -> Leaf (Key = 3320, Path = 53320) – Level 4 + sparse + .update_leaf(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]), value.clone()) + .unwrap(); + sparse + .update_leaf(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]), value.clone()) + .unwrap(); + sparse + .update_leaf(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3]), value.clone()) + .unwrap(); + sparse + .update_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2]), value.clone()) + .unwrap(); + sparse + .update_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2]), value.clone()) + .unwrap(); + sparse.update_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0]), value).unwrap(); + + assert_eq!(sparse.get_nodes_at_depth(0), HashSet::from([Nibbles::default()])); + assert_eq!( + sparse.get_nodes_at_depth(1), + HashSet::from([Nibbles::from_nibbles_unchecked([0x5])]) + ); + assert_eq!( + sparse.get_nodes_at_depth(2), + HashSet::from([ + Nibbles::from_nibbles_unchecked([0x5, 0x0]), + Nibbles::from_nibbles_unchecked([0x5, 0x2]), + Nibbles::from_nibbles_unchecked([0x5, 0x3]) + ]) + ); + assert_eq!( + sparse.get_nodes_at_depth(3), + HashSet::from([ + Nibbles::from_nibbles_unchecked([0x5, 0x0, 0x2, 0x3]), + Nibbles::from_nibbles_unchecked([0x5, 0x2]), + Nibbles::from_nibbles_unchecked([0x5, 0x3, 0x1]), + Nibbles::from_nibbles_unchecked([0x5, 0x3, 0x3]) + ]) + ); + assert_eq!( + sparse.get_nodes_at_depth(4), + HashSet::from([ + Nibbles::from_nibbles_unchecked([0x5, 0x0, 0x2, 0x3, 0x1]), + Nibbles::from_nibbles_unchecked([0x5, 0x0, 0x2, 0x3, 0x3]), + Nibbles::from_nibbles_unchecked([0x5, 0x2]), + Nibbles::from_nibbles_unchecked([0x5, 0x3, 0x1]), + Nibbles::from_nibbles_unchecked([0x5, 0x3, 0x3, 0x0]), + Nibbles::from_nibbles_unchecked([0x5, 0x3, 0x3, 0x2]) + ]) + ); + } } From b73261936ee0995818030d696ad1c8778ecd449a Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Wed, 23 Oct 2024 16:59:22 +0100 Subject: [PATCH 222/425] chore(trie): prefix set doc comment clarification (#12010) --- crates/trie/trie/src/prefix_set.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/trie/trie/src/prefix_set.rs b/crates/trie/trie/src/prefix_set.rs index da912fbbd..0cf16f939 100644 --- a/crates/trie/trie/src/prefix_set.rs +++ b/crates/trie/trie/src/prefix_set.rs @@ -168,8 +168,7 @@ pub struct PrefixSet { } impl PrefixSet { - /// Returns `true` if any of the keys in the set has the given prefix or - /// if the given prefix is a prefix of any key in the set. + /// Returns `true` if any of the keys in the set has the given prefix #[inline] pub fn contains(&mut self, prefix: &[u8]) -> bool { if self.all { From 57a21fcb9efc274675ea00a644ebbfa627940c6d Mon Sep 17 00:00:00 2001 From: Julian Meyer Date: Wed, 23 Oct 2024 09:22:51 -0700 Subject: [PATCH 223/425] chore: increase max proof window (#12001) --- crates/rpc/rpc-server-types/src/constants.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/rpc/rpc-server-types/src/constants.rs b/crates/rpc/rpc-server-types/src/constants.rs index 0bc441819..126bc722f 100644 --- a/crates/rpc/rpc-server-types/src/constants.rs +++ b/crates/rpc/rpc-server-types/src/constants.rs @@ -51,9 +51,9 @@ pub const DEFAULT_MAX_SIMULATE_BLOCKS: u64 = 256; /// The default eth historical proof window. pub const DEFAULT_ETH_PROOF_WINDOW: u64 = 0; -/// Maximum eth historical proof window. Equivalent to roughly one and a half months of data on a 12 -/// second block time, and a week on a 2 second block time. -pub const MAX_ETH_PROOF_WINDOW: u64 = 7 * 24 * 60 * 60 / 2; +/// Maximum eth historical proof window. Equivalent to roughly 6 months of data on a 12 +/// second block time, and a month on a 2 second block time. +pub const MAX_ETH_PROOF_WINDOW: u64 = 28 * 24 * 60 * 60 / 2; /// GPO specific constants pub mod gas_oracle { From d6f5a89a277a9490137d634b21c4cca4e728015f Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 23 Oct 2024 20:40:57 +0200 Subject: [PATCH 224/425] test: tests for empty block bodies (#12013) --- crates/net/eth-wire-types/src/blocks.rs | 9 +++++++++ crates/net/eth-wire-types/src/message.rs | 16 +++++++++++++++- crates/primitives/src/block.rs | 9 +++++++++ 3 files changed, 33 insertions(+), 1 deletion(-) diff --git a/crates/net/eth-wire-types/src/blocks.rs b/crates/net/eth-wire-types/src/blocks.rs index d60c63fc1..878b4573f 100644 --- a/crates/net/eth-wire-types/src/blocks.rs +++ b/crates/net/eth-wire-types/src/blocks.rs @@ -497,4 +497,13 @@ mod tests { let result = RequestPair::decode(&mut &data[..]).unwrap(); assert_eq!(result, expected); } + + #[test] + fn empty_block_bodies_rlp() { + let body = BlockBodies::default(); + let mut buf = Vec::new(); + body.encode(&mut buf); + let decoded = BlockBodies::decode(&mut buf.as_slice()).unwrap(); + assert_eq!(body, decoded); + } } diff --git a/crates/net/eth-wire-types/src/message.rs b/crates/net/eth-wire-types/src/message.rs index 9ef8e6c71..4afcb34e1 100644 --- a/crates/net/eth-wire-types/src/message.rs +++ b/crates/net/eth-wire-types/src/message.rs @@ -494,7 +494,8 @@ where mod tests { use super::MessageError; use crate::{ - message::RequestPair, EthMessage, EthMessageID, GetNodeData, NodeData, ProtocolMessage, + message::RequestPair, EthMessage, EthMessageID, EthVersion, GetNodeData, NodeData, + ProtocolMessage, }; use alloy_primitives::hex; use alloy_rlp::{Decodable, Encodable, Error}; @@ -566,4 +567,17 @@ mod tests { let result = RequestPair::>::decode(&mut &*raw_pair); assert!(matches!(result, Err(Error::UnexpectedLength))); } + + #[test] + fn empty_block_bodies_protocol() { + let empty_block_bodies = ProtocolMessage::from(EthMessage::BlockBodies(RequestPair { + request_id: 0, + message: Default::default(), + })); + let mut buf = Vec::new(); + empty_block_bodies.encode(&mut buf); + let decoded = + ProtocolMessage::decode_message(EthVersion::Eth68, &mut buf.as_slice()).unwrap(); + assert_eq!(empty_block_bodies, decoded); + } } diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index 717b0446b..a06979300 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -1090,4 +1090,13 @@ mod tests { let block = block.seal_slow(); assert_eq!(sealed, block.hash()); } + + #[test] + fn empty_block_rlp() { + let body = BlockBody::default(); + let mut buf = Vec::new(); + body.encode(&mut buf); + let decoded = BlockBody::decode(&mut buf.as_slice()).unwrap(); + assert_eq!(body, decoded); + } } From 2fb63b04911affa850ada5a336953942f4f11b0f Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 23 Oct 2024 21:40:29 +0200 Subject: [PATCH 225/425] chore: dont log if nothing to evict (#12015) --- crates/net/discv4/src/lib.rs | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/crates/net/discv4/src/lib.rs b/crates/net/discv4/src/lib.rs index 779c7ee63..a99906bdf 100644 --- a/crates/net/discv4/src/lib.rs +++ b/crates/net/discv4/src/lib.rs @@ -1513,11 +1513,12 @@ impl Discv4Service { true }); - trace!(target: "discv4", num=%failed_pings.len(), "evicting nodes due to failed pong"); - - // remove nodes that failed to pong - for node_id in failed_pings { - self.remove_node(node_id); + if !failed_pings.is_empty() { + // remove nodes that failed to pong + trace!(target: "discv4", num=%failed_pings.len(), "evicting nodes due to failed pong"); + for node_id in failed_pings { + self.remove_node(node_id); + } } let mut failed_lookups = Vec::new(); @@ -1528,11 +1529,13 @@ impl Discv4Service { } true }); - trace!(target: "discv4", num=%failed_lookups.len(), "evicting nodes due to failed lookup"); - // remove nodes that failed the e2e lookup process, so we can restart it - for node_id in failed_lookups { - self.remove_node(node_id); + if !failed_lookups.is_empty() { + // remove nodes that failed the e2e lookup process, so we can restart it + trace!(target: "discv4", num=%failed_lookups.len(), "evicting nodes due to failed lookup"); + for node_id in failed_lookups { + self.remove_node(node_id); + } } self.evict_failed_find_nodes(now); @@ -1553,6 +1556,10 @@ impl Discv4Service { true }); + if failed_find_nodes.is_empty() { + return + } + trace!(target: "discv4", num=%failed_find_nodes.len(), "processing failed find nodes"); for node_id in failed_find_nodes { From 565e4b400d6e3a4125efdcd50bb5a2c2d60f65ca Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 24 Oct 2024 01:15:15 +0200 Subject: [PATCH 226/425] refactor(primitive-traits): use alloy `ETHEREUM_BLOCK_GAS_LIMIT` constant (#12019) --- Cargo.lock | 1 + crates/chainspec/src/spec.rs | 6 ++--- crates/node/core/src/args/payload_builder.rs | 3 +-- crates/node/core/src/args/txpool.rs | 3 ++- crates/optimism/chainspec/Cargo.toml | 22 ++++++++++--------- crates/optimism/chainspec/src/op.rs | 2 +- crates/optimism/chainspec/src/op_sepolia.rs | 2 +- crates/primitives-traits/src/constants/mod.rs | 3 --- crates/rpc/rpc-server-types/src/constants.rs | 5 ++--- crates/rpc/rpc/src/eth/helpers/state.rs | 2 +- crates/rpc/rpc/src/eth/helpers/transaction.rs | 2 +- crates/transaction-pool/src/config.rs | 3 ++- crates/transaction-pool/src/noop.rs | 4 ++-- crates/transaction-pool/src/pool/txpool.rs | 5 ++--- crates/transaction-pool/tests/it/evict.rs | 3 ++- 15 files changed, 32 insertions(+), 34 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 350e6a77a..b66da6181 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8060,6 +8060,7 @@ version = "1.1.0" dependencies = [ "alloy-chains", "alloy-consensus", + "alloy-eips", "alloy-genesis", "alloy-primitives", "derive_more 1.0.0", diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index 0e38d866b..bebf7ca26 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -9,6 +9,7 @@ use alloy_primitives::{address, b256, Address, BlockNumber, B256, U256}; use derive_more::From; use alloy_consensus::constants::{DEV_GENESIS_HASH, MAINNET_GENESIS_HASH}; +use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; use reth_ethereum_forks::{ ChainHardforks, DisplayHardforks, EthereumHardfork, EthereumHardforks, ForkCondition, ForkFilter, ForkFilterKey, ForkHash, ForkId, Hardfork, Hardforks, Head, DEV_HARDFORKS, @@ -18,10 +19,7 @@ use reth_network_peers::{ sepolia_nodes, NodeRecord, }; use reth_primitives_traits::{ - constants::{ - EIP1559_INITIAL_BASE_FEE, ETHEREUM_BLOCK_GAS_LIMIT, HOLESKY_GENESIS_HASH, - SEPOLIA_GENESIS_HASH, - }, + constants::{EIP1559_INITIAL_BASE_FEE, HOLESKY_GENESIS_HASH, SEPOLIA_GENESIS_HASH}, Header, SealedHeader, }; use reth_trie_common::root::state_root_ref_unhashed; diff --git a/crates/node/core/src/args/payload_builder.rs b/crates/node/core/src/args/payload_builder.rs index dceb10726..524a93195 100644 --- a/crates/node/core/src/args/payload_builder.rs +++ b/crates/node/core/src/args/payload_builder.rs @@ -1,12 +1,11 @@ use crate::{cli::config::PayloadBuilderConfig, version::default_extradata}; use alloy_consensus::constants::MAXIMUM_EXTRA_DATA_SIZE; -use alloy_eips::merge::SLOT_DURATION; +use alloy_eips::{eip1559::ETHEREUM_BLOCK_GAS_LIMIT, merge::SLOT_DURATION}; use clap::{ builder::{RangedU64ValueParser, TypedValueParser}, Arg, Args, Command, }; use reth_cli_util::{parse_duration_from_secs, parse_duration_from_secs_or_ms}; -use reth_primitives::constants::ETHEREUM_BLOCK_GAS_LIMIT; use std::{borrow::Cow, ffi::OsStr, time::Duration}; /// Parameters for configuring the Payload Builder diff --git a/crates/node/core/src/args/txpool.rs b/crates/node/core/src/args/txpool.rs index 63f6c566c..282313555 100644 --- a/crates/node/core/src/args/txpool.rs +++ b/crates/node/core/src/args/txpool.rs @@ -1,9 +1,10 @@ //! Transaction pool arguments use crate::cli::config::RethTransactionPoolConfig; +use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; use alloy_primitives::Address; use clap::Args; -use reth_primitives::constants::{ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE}; +use reth_primitives::constants::MIN_PROTOCOL_BASE_FEE; use reth_transaction_pool::{ blobstore::disk::DEFAULT_MAX_CACHED_BLOBS, pool::{NEW_TX_LISTENER_BUFFER_SIZE, PENDING_TX_LISTENER_BUFFER_SIZE}, diff --git a/crates/optimism/chainspec/Cargo.toml b/crates/optimism/chainspec/Cargo.toml index 6b068dabb..4e573ce29 100644 --- a/crates/optimism/chainspec/Cargo.toml +++ b/crates/optimism/chainspec/Cargo.toml @@ -26,6 +26,7 @@ alloy-chains.workspace = true alloy-genesis.workspace = true alloy-primitives.workspace = true alloy-consensus.workspace = true +alloy-eips.workspace = true # op op-alloy-rpc-types.workspace = true @@ -45,14 +46,15 @@ op-alloy-rpc-types.workspace = true [features] default = ["std"] std = [ - "alloy-chains/std", - "alloy-genesis/std", - "alloy-primitives/std", - "op-alloy-rpc-types/std", - "reth-chainspec/std", - "reth-ethereum-forks/std", - "reth-primitives-traits/std", - "reth-optimism-forks/std", - "alloy-consensus/std", - "once_cell/std" + "alloy-chains/std", + "alloy-genesis/std", + "alloy-primitives/std", + "alloy-eips/std", + "op-alloy-rpc-types/std", + "reth-chainspec/std", + "reth-ethereum-forks/std", + "reth-primitives-traits/std", + "reth-optimism-forks/std", + "alloy-consensus/std", + "once_cell/std", ] diff --git a/crates/optimism/chainspec/src/op.rs b/crates/optimism/chainspec/src/op.rs index 8c0da5320..5afb236cd 100644 --- a/crates/optimism/chainspec/src/op.rs +++ b/crates/optimism/chainspec/src/op.rs @@ -3,11 +3,11 @@ use alloc::{sync::Arc, vec}; use alloy_chains::Chain; +use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; use alloy_primitives::{b256, U256}; use reth_chainspec::{once_cell_set, BaseFeeParams, BaseFeeParamsKind, ChainSpec}; use reth_ethereum_forks::EthereumHardfork; use reth_optimism_forks::OptimismHardfork; -use reth_primitives_traits::constants::ETHEREUM_BLOCK_GAS_LIMIT; use crate::{LazyLock, OpChainSpec}; diff --git a/crates/optimism/chainspec/src/op_sepolia.rs b/crates/optimism/chainspec/src/op_sepolia.rs index d3243ebd5..31c9eda6b 100644 --- a/crates/optimism/chainspec/src/op_sepolia.rs +++ b/crates/optimism/chainspec/src/op_sepolia.rs @@ -3,11 +3,11 @@ use alloc::{sync::Arc, vec}; use alloy_chains::{Chain, NamedChain}; +use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; use alloy_primitives::{b256, U256}; use reth_chainspec::{once_cell_set, BaseFeeParams, BaseFeeParamsKind, ChainSpec}; use reth_ethereum_forks::EthereumHardfork; use reth_optimism_forks::OptimismHardfork; -use reth_primitives_traits::constants::ETHEREUM_BLOCK_GAS_LIMIT; use crate::{LazyLock, OpChainSpec}; diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index a4091a4a9..377f66cf0 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -15,9 +15,6 @@ pub const EPOCH_SLOTS: u64 = 32; /// The default block nonce in the beacon consensus pub const BEACON_NONCE: u64 = 0u64; -/// The default Ethereum block gas limit. -pub const ETHEREUM_BLOCK_GAS_LIMIT: u64 = 30_000_000; - /// The minimum tx fee below which the txpool will reject the transaction. /// /// Configured to `7` WEI which is the lowest possible value of base fee under mainnet EIP-1559 diff --git a/crates/rpc/rpc-server-types/src/constants.rs b/crates/rpc/rpc-server-types/src/constants.rs index 126bc722f..48019745a 100644 --- a/crates/rpc/rpc-server-types/src/constants.rs +++ b/crates/rpc/rpc-server-types/src/constants.rs @@ -80,9 +80,8 @@ pub mod gas_oracle { /// The default gas limit for `eth_call` and adjacent calls. /// - /// This is different from the default to regular 30M block gas limit - /// [`ETHEREUM_BLOCK_GAS_LIMIT`](reth_primitives::constants::ETHEREUM_BLOCK_GAS_LIMIT) to allow - /// for more complex calls. + /// This is different from the default to regular 30M block gas limit `ETHEREUM_BLOCK_GAS_LIMIT` + /// to allow for more complex calls. pub const RPC_DEFAULT_GAS_CAP: u64 = 50_000_000; /// Allowed error ratio for gas estimation diff --git a/crates/rpc/rpc/src/eth/helpers/state.rs b/crates/rpc/rpc/src/eth/helpers/state.rs index 8a3584279..429a10333 100644 --- a/crates/rpc/rpc/src/eth/helpers/state.rs +++ b/crates/rpc/rpc/src/eth/helpers/state.rs @@ -45,11 +45,11 @@ where #[cfg(test)] mod tests { use super::*; + use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; use alloy_primitives::{Address, StorageKey, StorageValue, U256}; use reth_chainspec::MAINNET; use reth_evm_ethereum::EthEvmConfig; use reth_network_api::noop::NoopNetwork; - use reth_primitives::constants::ETHEREUM_BLOCK_GAS_LIMIT; use reth_provider::test_utils::{ExtendedAccount, MockEthProvider, NoopProvider}; use reth_rpc_eth_api::helpers::EthState; use reth_rpc_eth_types::{ diff --git a/crates/rpc/rpc/src/eth/helpers/transaction.rs b/crates/rpc/rpc/src/eth/helpers/transaction.rs index 8bd9997f6..24a13cb80 100644 --- a/crates/rpc/rpc/src/eth/helpers/transaction.rs +++ b/crates/rpc/rpc/src/eth/helpers/transaction.rs @@ -55,11 +55,11 @@ where #[cfg(test)] mod tests { + use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; use alloy_primitives::{hex_literal::hex, Bytes}; use reth_chainspec::ChainSpecProvider; use reth_evm_ethereum::EthEvmConfig; use reth_network_api::noop::NoopNetwork; - use reth_primitives::constants::ETHEREUM_BLOCK_GAS_LIMIT; use reth_provider::test_utils::NoopProvider; use reth_rpc_eth_api::helpers::EthTransactions; use reth_rpc_eth_types::{ diff --git a/crates/transaction-pool/src/config.rs b/crates/transaction-pool/src/config.rs index 30703f888..8fe49f476 100644 --- a/crates/transaction-pool/src/config.rs +++ b/crates/transaction-pool/src/config.rs @@ -3,8 +3,9 @@ use crate::{ PoolSize, TransactionOrigin, }; use alloy_consensus::constants::EIP4844_TX_TYPE_ID; +use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; use alloy_primitives::Address; -use reth_primitives::constants::{ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE}; +use reth_primitives::constants::MIN_PROTOCOL_BASE_FEE; use std::{collections::HashSet, ops::Mul}; /// Guarantees max transactions for one sender, compatible with geth/erigon diff --git a/crates/transaction-pool/src/noop.rs b/crates/transaction-pool/src/noop.rs index 4464ae1fc..11c5e7eea 100644 --- a/crates/transaction-pool/src/noop.rs +++ b/crates/transaction-pool/src/noop.rs @@ -16,10 +16,10 @@ use crate::{ PooledTransactionsElement, PropagatedTransactions, TransactionEvents, TransactionOrigin, TransactionPool, TransactionValidationOutcome, TransactionValidator, ValidPoolTransaction, }; -use alloy_eips::eip4844::BlobAndProofV1; +use alloy_eips::{eip1559::ETHEREUM_BLOCK_GAS_LIMIT, eip4844::BlobAndProofV1}; use alloy_primitives::{Address, TxHash, B256, U256}; use reth_eth_wire_types::HandleMempoolData; -use reth_primitives::{constants::ETHEREUM_BLOCK_GAS_LIMIT, BlobTransactionSidecar}; +use reth_primitives::BlobTransactionSidecar; use std::{collections::HashSet, marker::PhantomData, sync::Arc}; use tokio::sync::{mpsc, mpsc::Receiver}; diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index 03e69c390..a3f192992 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -22,10 +22,9 @@ use alloy_consensus::constants::{ EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, LEGACY_TX_TYPE_ID, }; +use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; use alloy_primitives::{Address, TxHash, B256}; -use reth_primitives::constants::{ - eip4844::BLOB_TX_MIN_BLOB_GASPRICE, ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE, -}; +use reth_primitives::constants::{eip4844::BLOB_TX_MIN_BLOB_GASPRICE, MIN_PROTOCOL_BASE_FEE}; use rustc_hash::FxHashMap; use smallvec::SmallVec; use std::{ diff --git a/crates/transaction-pool/tests/it/evict.rs b/crates/transaction-pool/tests/it/evict.rs index c7438c996..c1d0bbaa6 100644 --- a/crates/transaction-pool/tests/it/evict.rs +++ b/crates/transaction-pool/tests/it/evict.rs @@ -1,8 +1,9 @@ //! Transaction pool eviction tests. +use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; use alloy_primitives::{Address, B256}; use rand::distributions::Uniform; -use reth_primitives::constants::{ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE}; +use reth_primitives::constants::MIN_PROTOCOL_BASE_FEE; use reth_transaction_pool::{ error::PoolErrorKind, test_utils::{ From 044e2d6aea9fbc214563a45ff432c2ef01b42206 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 24 Oct 2024 01:15:45 +0200 Subject: [PATCH 227/425] refactor(primitive-traits): use alloy `EPOCH_SLOTS` constant (#12018) --- Cargo.lock | 1 + crates/consensus/beacon/Cargo.toml | 1 + crates/consensus/beacon/src/engine/mod.rs | 5 ++--- crates/primitives-traits/src/constants/mod.rs | 3 --- crates/storage/provider/src/providers/state/historical.rs | 3 ++- 5 files changed, 6 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b66da6181..155cb3417 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6394,6 +6394,7 @@ dependencies = [ name = "reth-beacon-consensus" version = "1.1.0" dependencies = [ + "alloy-eips", "alloy-genesis", "alloy-primitives", "alloy-rpc-types-engine", diff --git a/crates/consensus/beacon/Cargo.toml b/crates/consensus/beacon/Cargo.toml index b141bd34e..1abc09b2a 100644 --- a/crates/consensus/beacon/Cargo.toml +++ b/crates/consensus/beacon/Cargo.toml @@ -33,6 +33,7 @@ reth-chainspec = { workspace = true, optional = true } # ethereum alloy-primitives.workspace = true alloy-rpc-types-engine.workspace = true +alloy-eips.workspace = true # async tokio = { workspace = true, features = ["sync"] } diff --git a/crates/consensus/beacon/src/engine/mod.rs b/crates/consensus/beacon/src/engine/mod.rs index 5af1e26ac..2363b9078 100644 --- a/crates/consensus/beacon/src/engine/mod.rs +++ b/crates/consensus/beacon/src/engine/mod.rs @@ -1,3 +1,4 @@ +use alloy_eips::merge::EPOCH_SLOTS; use alloy_primitives::{BlockNumber, B256}; use alloy_rpc_types_engine::{ ExecutionPayload, ExecutionPayloadSidecar, ForkchoiceState, PayloadStatus, PayloadStatusEnum, @@ -19,9 +20,7 @@ use reth_node_types::NodeTypesWithEngine; use reth_payload_builder::PayloadBuilderHandle; use reth_payload_primitives::{PayloadAttributes, PayloadBuilder, PayloadBuilderAttributes}; use reth_payload_validator::ExecutionPayloadValidator; -use reth_primitives::{ - constants::EPOCH_SLOTS, BlockNumHash, Head, Header, SealedBlock, SealedHeader, -}; +use reth_primitives::{BlockNumHash, Head, Header, SealedBlock, SealedHeader}; use reth_provider::{ providers::ProviderNodeTypes, BlockIdReader, BlockReader, BlockSource, CanonChainTracker, ChainSpecProvider, ProviderError, StageCheckpointReader, diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index 377f66cf0..86bd93495 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -9,9 +9,6 @@ pub use gas_units::{GIGAGAS, KILOGAS, MEGAGAS}; /// The client version: `reth/v{major}.{minor}.{patch}` pub const RETH_CLIENT_VERSION: &str = concat!("reth/v", env!("CARGO_PKG_VERSION")); -/// An EPOCH is a series of 32 slots. -pub const EPOCH_SLOTS: u64 = 32; - /// The default block nonce in the beacon consensus pub const BEACON_NONCE: u64 = 0u64; diff --git a/crates/storage/provider/src/providers/state/historical.rs b/crates/storage/provider/src/providers/state/historical.rs index 640041e08..56a1d057e 100644 --- a/crates/storage/provider/src/providers/state/historical.rs +++ b/crates/storage/provider/src/providers/state/historical.rs @@ -2,6 +2,7 @@ use crate::{ providers::{state::macros::delegate_provider_impls, StaticFileProvider}, AccountReader, BlockHashReader, ProviderError, StateProvider, StateRootProvider, }; +use alloy_eips::merge::EPOCH_SLOTS; use alloy_primitives::{ map::{HashMap, HashSet}, Address, BlockNumber, Bytes, StorageKey, StorageValue, B256, @@ -13,7 +14,7 @@ use reth_db_api::{ table::Table, transaction::DbTx, }; -use reth_primitives::{constants::EPOCH_SLOTS, Account, Bytecode, StaticFileSegment}; +use reth_primitives::{Account, Bytecode, StaticFileSegment}; use reth_storage_api::{StateProofProvider, StorageRootProvider}; use reth_storage_errors::provider::ProviderResult; use reth_trie::{ From f2195026ccbc1d74298362901f4edede0d363023 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 24 Oct 2024 02:10:00 +0200 Subject: [PATCH 228/425] test: more unit tests for `TreeState` (#11687) --- crates/blockchain-tree/Cargo.toml | 1 + crates/blockchain-tree/src/state.rs | 300 ++++++++++++++++++++++++++++ 2 files changed, 301 insertions(+) diff --git a/crates/blockchain-tree/Cargo.toml b/crates/blockchain-tree/Cargo.toml index aa8fab16f..3fa6de2b4 100644 --- a/crates/blockchain-tree/Cargo.toml +++ b/crates/blockchain-tree/Cargo.toml @@ -57,6 +57,7 @@ reth-consensus = { workspace = true, features = ["test-utils"] } reth-testing-utils.workspace = true reth-revm.workspace = true reth-evm-ethereum.workspace = true +reth-execution-types.workspace = true parking_lot.workspace = true assert_matches.workspace = true alloy-genesis.workspace = true diff --git a/crates/blockchain-tree/src/state.rs b/crates/blockchain-tree/src/state.rs index b76db9e6a..ca8af6f9b 100644 --- a/crates/blockchain-tree/src/state.rs +++ b/crates/blockchain-tree/src/state.rs @@ -61,6 +61,7 @@ impl TreeState { pub(crate) fn block_by_hash(&self, block_hash: BlockHash) -> Option<&SealedBlock> { self.block_with_senders_by_hash(block_hash).map(|block| &block.block) } + /// Returns the block with matching hash from any side-chain. /// /// Caution: This will not return blocks from the canonical chain. @@ -128,3 +129,302 @@ impl From for SidechainId { Self(value) } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::canonical_chain::CanonicalChain; + use alloy_primitives::B256; + use reth_execution_types::Chain; + use reth_provider::ExecutionOutcome; + + #[test] + fn test_tree_state_initialization() { + // Set up some dummy data for initialization + let last_finalized_block_number = 10u64; + let last_canonical_hashes = vec![(9u64, B256::random()), (10u64, B256::random())]; + let buffer_limit = 5; + + // Initialize the tree state + let tree_state = TreeState::new( + last_finalized_block_number, + last_canonical_hashes.clone(), + buffer_limit, + ); + + // Verify the tree state after initialization + assert_eq!(tree_state.block_chain_id_generator, 0); + assert_eq!(tree_state.block_indices().last_finalized_block(), last_finalized_block_number); + assert_eq!( + *tree_state.block_indices.canonical_chain().inner(), + *CanonicalChain::new(last_canonical_hashes.into_iter().collect()).inner() + ); + assert!(tree_state.chains.is_empty()); + assert!(tree_state.buffered_blocks.lru.is_empty()); + } + + #[test] + fn test_tree_state_next_id() { + // Initialize the tree state + let mut tree_state = TreeState::new(0, vec![], 5); + + // Generate a few sidechain IDs + let first_id = tree_state.next_id(); + let second_id = tree_state.next_id(); + + // Verify the generated sidechain IDs and the updated generator state + assert_eq!(first_id, SidechainId(0)); + assert_eq!(second_id, SidechainId(1)); + assert_eq!(tree_state.block_chain_id_generator, 2); + } + + #[test] + fn test_tree_state_insert_chain() { + // Initialize tree state + let mut tree_state = TreeState::new(0, vec![], 5); + + // Create a chain with two blocks + let block = SealedBlockWithSenders::default(); + let block1_hash = B256::random(); + let block2_hash = B256::random(); + + let mut block1 = block.clone(); + let mut block2 = block; + + block1.block.header.set_hash(block1_hash); + block1.block.header.set_block_number(9); + block2.block.header.set_hash(block2_hash); + block2.block.header.set_block_number(10); + + let chain = AppendableChain::new(Chain::new( + [block1, block2], + Default::default(), + Default::default(), + )); + + // Insert the chain into the TreeState + let chain_id = tree_state.insert_chain(chain).unwrap(); + + // Verify the chain ID and that it was added to the chains collection + assert_eq!(chain_id, SidechainId(0)); + assert!(tree_state.chains.contains_key(&chain_id)); + + // Ensure that the block indices are updated + assert_eq!( + tree_state.block_indices.get_side_chain_id(&block1_hash).unwrap(), + SidechainId(0) + ); + assert_eq!( + tree_state.block_indices.get_side_chain_id(&block2_hash).unwrap(), + SidechainId(0) + ); + + // Ensure that the block chain ID generator was updated + assert_eq!(tree_state.block_chain_id_generator, 1); + + // Create an empty chain + let chain_empty = AppendableChain::new(Chain::default()); + + // Insert the empty chain into the tree state + let chain_id = tree_state.insert_chain(chain_empty); + + // Ensure that the empty chain was not inserted + assert!(chain_id.is_none()); + + // Nothing should have changed and no new chain should have been added + assert!(tree_state.chains.contains_key(&SidechainId(0))); + assert!(!tree_state.chains.contains_key(&SidechainId(1))); + assert_eq!( + tree_state.block_indices.get_side_chain_id(&block1_hash).unwrap(), + SidechainId(0) + ); + assert_eq!( + tree_state.block_indices.get_side_chain_id(&block2_hash).unwrap(), + SidechainId(0) + ); + assert_eq!(tree_state.block_chain_id_generator, 1); + } + + #[test] + fn test_block_by_hash_side_chain() { + // Initialize a tree state with some dummy data + let mut tree_state = TreeState::new(0, vec![], 5); + + // Create two side-chain blocks with random hashes + let block1_hash = B256::random(); + let block2_hash = B256::random(); + + let mut block1 = SealedBlockWithSenders::default(); + let mut block2 = SealedBlockWithSenders::default(); + + block1.block.header.set_hash(block1_hash); + block1.block.header.set_block_number(9); + block2.block.header.set_hash(block2_hash); + block2.block.header.set_block_number(10); + + // Create an chain with these blocks + let chain = AppendableChain::new(Chain::new( + vec![block1.clone(), block2.clone()], + Default::default(), + Default::default(), + )); + + // Insert the side chain into the TreeState + tree_state.insert_chain(chain).unwrap(); + + // Retrieve the blocks by their hashes + let retrieved_block1 = tree_state.block_by_hash(block1_hash); + assert_eq!(*retrieved_block1.unwrap(), block1.block); + + let retrieved_block2 = tree_state.block_by_hash(block2_hash); + assert_eq!(*retrieved_block2.unwrap(), block2.block); + + // Test block_by_hash with a random hash that doesn't exist + let non_existent_hash = B256::random(); + let result = tree_state.block_by_hash(non_existent_hash); + + // Ensure that no block is found + assert!(result.is_none()); + } + + #[test] + fn test_block_with_senders_by_hash() { + // Initialize a tree state with some dummy data + let mut tree_state = TreeState::new(0, vec![], 5); + + // Create two side-chain blocks with random hashes + let block1_hash = B256::random(); + let block2_hash = B256::random(); + + let mut block1 = SealedBlockWithSenders::default(); + let mut block2 = SealedBlockWithSenders::default(); + + block1.block.header.set_hash(block1_hash); + block1.block.header.set_block_number(9); + block2.block.header.set_hash(block2_hash); + block2.block.header.set_block_number(10); + + // Create a chain with these blocks + let chain = AppendableChain::new(Chain::new( + vec![block1.clone(), block2.clone()], + Default::default(), + Default::default(), + )); + + // Insert the side chain into the TreeState + tree_state.insert_chain(chain).unwrap(); + + // Test to retrieve the blocks with senders by their hashes + let retrieved_block1 = tree_state.block_with_senders_by_hash(block1_hash); + assert_eq!(*retrieved_block1.unwrap(), block1); + + let retrieved_block2 = tree_state.block_with_senders_by_hash(block2_hash); + assert_eq!(*retrieved_block2.unwrap(), block2); + + // Test block_with_senders_by_hash with a random hash that doesn't exist + let non_existent_hash = B256::random(); + let result = tree_state.block_with_senders_by_hash(non_existent_hash); + + // Ensure that no block is found + assert!(result.is_none()); + } + + #[test] + fn test_get_buffered_block() { + // Initialize a tree state with some dummy data + let mut tree_state = TreeState::new(0, vec![], 5); + + // Create a block with a random hash and add it to the buffer + let block_hash = B256::random(); + let mut block = SealedBlockWithSenders::default(); + block.block.header.set_hash(block_hash); + + // Add the block to the buffered blocks in the TreeState + tree_state.buffered_blocks.insert_block(block.clone()); + + // Test get_buffered_block to retrieve the block by its hash + let retrieved_block = tree_state.get_buffered_block(&block_hash); + assert_eq!(*retrieved_block.unwrap(), block); + + // Test get_buffered_block with a non-existent hash + let non_existent_hash = B256::random(); + let result = tree_state.get_buffered_block(&non_existent_hash); + + // Ensure that no block is found + assert!(result.is_none()); + } + + #[test] + fn test_lowest_buffered_ancestor() { + // Initialize a tree state with some dummy data + let mut tree_state = TreeState::new(0, vec![], 5); + + // Create blocks with random hashes and set up parent-child relationships + let ancestor_hash = B256::random(); + let descendant_hash = B256::random(); + + let mut ancestor_block = SealedBlockWithSenders::default(); + let mut descendant_block = SealedBlockWithSenders::default(); + + ancestor_block.block.header.set_hash(ancestor_hash); + descendant_block.block.header.set_hash(descendant_hash); + descendant_block.block.header.set_parent_hash(ancestor_hash); + + // Insert the blocks into the buffer + tree_state.buffered_blocks.insert_block(ancestor_block.clone()); + tree_state.buffered_blocks.insert_block(descendant_block.clone()); + + // Test lowest_buffered_ancestor for the descendant block + let lowest_ancestor = tree_state.lowest_buffered_ancestor(&descendant_hash); + assert!(lowest_ancestor.is_some()); + assert_eq!(lowest_ancestor.unwrap().block.header.hash(), ancestor_hash); + + // Test lowest_buffered_ancestor with a non-existent hash + let non_existent_hash = B256::random(); + let result = tree_state.lowest_buffered_ancestor(&non_existent_hash); + + // Ensure that no ancestor is found + assert!(result.is_none()); + } + + #[test] + fn test_receipts_by_block_hash() { + // Initialize a tree state with some dummy data + let mut tree_state = TreeState::new(0, vec![], 5); + + // Create a block with a random hash and receipts + let block_hash = B256::random(); + let receipt1 = Receipt::default(); + let receipt2 = Receipt::default(); + + let mut block = SealedBlockWithSenders::default(); + block.block.header.set_hash(block_hash); + + let receipts = vec![receipt1, receipt2]; + + // Create a chain with the block and its receipts + let chain = AppendableChain::new(Chain::new( + vec![block.clone()], + ExecutionOutcome { receipts: receipts.clone().into(), ..Default::default() }, + Default::default(), + )); + + // Insert the chain into the TreeState + tree_state.insert_chain(chain).unwrap(); + + // Test receipts_by_block_hash for the inserted block + let retrieved_receipts = tree_state.receipts_by_block_hash(block_hash); + assert!(retrieved_receipts.is_some()); + + // Check if the correct receipts are returned + let receipts_ref: Vec<&Receipt> = receipts.iter().collect(); + assert_eq!(retrieved_receipts.unwrap(), receipts_ref); + + // Test receipts_by_block_hash with a non-existent block hash + let non_existent_hash = B256::random(); + let result = tree_state.receipts_by_block_hash(non_existent_hash); + + // Ensure that no receipts are found + assert!(result.is_none()); + } +} From 40935321e3127d02c11552a253e93fcb2a02bc37 Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Thu, 24 Oct 2024 02:28:16 +0200 Subject: [PATCH 229/425] chore(cli): engine cli options conflict with legacy (#11993) --- bin/reth/src/main.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bin/reth/src/main.rs b/bin/reth/src/main.rs index f424163a2..e146912c0 100644 --- a/bin/reth/src/main.rs +++ b/bin/reth/src/main.rs @@ -33,11 +33,11 @@ pub struct EngineArgs { pub legacy: bool, /// Configure persistence threshold for engine experimental. - #[arg(long = "engine.persistence-threshold", requires = "experimental", default_value_t = DEFAULT_PERSISTENCE_THRESHOLD)] + #[arg(long = "engine.persistence-threshold", conflicts_with = "legacy", default_value_t = DEFAULT_PERSISTENCE_THRESHOLD)] pub persistence_threshold: u64, /// Configure the target number of blocks to keep in memory. - #[arg(long = "engine.memory-block-buffer-target", requires = "experimental", default_value_t = DEFAULT_MEMORY_BLOCK_BUFFER_TARGET)] + #[arg(long = "engine.memory-block-buffer-target", conflicts_with = "legacy", default_value_t = DEFAULT_MEMORY_BLOCK_BUFFER_TARGET)] pub memory_block_buffer_target: u64, } From 082f2cd2356262d4ffd34ea9565bd4c1aba1a5f4 Mon Sep 17 00:00:00 2001 From: caglarkaya Date: Thu, 24 Oct 2024 04:19:39 +0300 Subject: [PATCH 230/425] refactor: use op-alloy deposit signature (#12016) --- Cargo.lock | 1 - crates/optimism/chainspec/src/lib.rs | 8 +-- crates/optimism/evm/src/execute.rs | 5 +- crates/primitives/Cargo.toml | 52 ++++++++----------- crates/primitives/src/transaction/mod.rs | 9 ++-- .../primitives/src/transaction/signature.rs | 5 +- 6 files changed, 31 insertions(+), 49 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 155cb3417..63f730b1b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8394,7 +8394,6 @@ dependencies = [ "reth-chainspec", "reth-codecs", "reth-ethereum-forks", - "reth-optimism-chainspec", "reth-primitives-traits", "reth-static-file-types", "reth-testing-utils", diff --git a/crates/optimism/chainspec/src/lib.rs b/crates/optimism/chainspec/src/lib.rs index 98c6589d1..83c499de5 100644 --- a/crates/optimism/chainspec/src/lib.rs +++ b/crates/optimism/chainspec/src/lib.rs @@ -20,7 +20,7 @@ mod op_sepolia; use alloc::{vec, vec::Vec}; use alloy_chains::Chain; use alloy_genesis::Genesis; -use alloy_primitives::{Parity, Signature, B256, U256}; +use alloy_primitives::{B256, U256}; pub use base::BASE_MAINNET; pub use base_sepolia::BASE_SEPOLIA; use core::fmt::Display; @@ -178,12 +178,6 @@ pub struct OpChainSpec { pub inner: ChainSpec, } -/// Returns the signature for the optimism deposit transactions, which don't include a -/// signature. -pub fn optimism_deposit_tx_signature() -> Signature { - Signature::new(U256::ZERO, U256::ZERO, Parity::Parity(false)) -} - impl EthChainSpec for OpChainSpec { fn chain(&self) -> alloy_chains::Chain { self.inner.chain() diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index 748e57e6b..1cd924098 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -297,9 +297,10 @@ mod tests { use crate::OpChainSpec; use alloy_consensus::TxEip1559; use alloy_primitives::{b256, Address, StorageKey, StorageValue}; + use op_alloy_consensus::TxDeposit; use reth_chainspec::MIN_TRANSACTION_GAS; use reth_evm::execute::{BasicBlockExecutorProvider, BatchExecutor, BlockExecutorProvider}; - use reth_optimism_chainspec::{optimism_deposit_tx_signature, OpChainSpecBuilder}; + use reth_optimism_chainspec::OpChainSpecBuilder; use reth_primitives::{Account, Block, BlockBody, Signature, Transaction, TransactionSigned}; use reth_revm::{ database::StateProviderDatabase, test_utils::StateProviderTest, L1_BLOCK_CONTRACT, @@ -465,7 +466,7 @@ mod tests { gas_limit: MIN_TRANSACTION_GAS, ..Default::default() }), - optimism_deposit_tx_signature(), + TxDeposit::signature(), ); let provider = executor_provider(chain_spec); diff --git a/crates/primitives/Cargo.toml b/crates/primitives/Cargo.toml index 566a114be..107c218c7 100644 --- a/crates/primitives/Cargo.toml +++ b/crates/primitives/Cargo.toml @@ -20,9 +20,6 @@ reth-trie-common.workspace = true revm-primitives = { workspace = true, features = ["serde"] } reth-codecs = { workspace = true, optional = true } -# op-reth -reth-optimism-chainspec = { workspace = true, optional = true } - # ethereum alloy-consensus.workspace = true alloy-primitives = { workspace = true, features = ["rand", "rlp"] } @@ -34,14 +31,15 @@ alloy-eips = { workspace = true, features = ["serde"] } # optimism op-alloy-rpc-types = { workspace = true, optional = true } op-alloy-consensus = { workspace = true, features = [ - "arbitrary", + "arbitrary", + "serde", ], optional = true } # crypto secp256k1 = { workspace = true, features = [ - "global-context", - "recovery", - "rand", + "global-context", + "recovery", + "rand", ], optional = true } k256.workspace = true # for eip-4844 @@ -83,9 +81,9 @@ test-fuzz.workspace = true criterion.workspace = true pprof = { workspace = true, features = [ - "flamegraph", - "frame-pointer", - "criterion", + "flamegraph", + "frame-pointer", + "criterion", ] } [features] @@ -101,13 +99,10 @@ std = [ "once_cell/std", "revm-primitives/std", "secp256k1?/std", - "serde/std" + "serde/std", ] reth-codec = ["dep:reth-codecs", "dep:zstd", "dep:modular-bitfield", "std"] -asm-keccak = [ - "alloy-primitives/asm-keccak", - "revm-primitives/asm-keccak" -] +asm-keccak = ["alloy-primitives/asm-keccak", "revm-primitives/asm-keccak"] arbitrary = [ "dep:arbitrary", "alloy-eips/arbitrary", @@ -124,38 +119,37 @@ arbitrary = [ "alloy-rpc-types?/arbitrary", "alloy-serde?/arbitrary", "op-alloy-consensus?/arbitrary", - "op-alloy-rpc-types?/arbitrary" + "op-alloy-rpc-types?/arbitrary", ] secp256k1 = ["dep:secp256k1"] c-kzg = [ - "dep:c-kzg", - "alloy-consensus/kzg", - "alloy-eips/kzg", - "revm-primitives/c-kzg", + "dep:c-kzg", + "alloy-consensus/kzg", + "alloy-eips/kzg", + "revm-primitives/c-kzg", ] optimism = [ - "dep:op-alloy-consensus", - "dep:reth-optimism-chainspec", - "reth-codecs?/optimism", - "revm-primitives/optimism", + "dep:op-alloy-consensus", + "reth-codecs?/optimism", + "revm-primitives/optimism", ] alloy-compat = [ - "dep:alloy-rpc-types", - "dep:alloy-serde", - "dep:op-alloy-rpc-types", + "dep:alloy-rpc-types", + "dep:alloy-serde", + "dep:op-alloy-rpc-types", ] test-utils = [ "reth-primitives-traits/test-utils", "reth-chainspec/test-utils", "reth-codecs?/test-utils", - "reth-trie-common/test-utils" + "reth-trie-common/test-utils", ] serde-bincode-compat = [ "alloy-consensus/serde-bincode-compat", "op-alloy-consensus?/serde-bincode-compat", "reth-primitives-traits/serde-bincode-compat", "serde_with", - "alloy-eips/serde-bincode-compat" + "alloy-eips/serde-bincode-compat", ] [[bench]] diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 1d410da1e..7798433d0 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -56,8 +56,6 @@ mod variant; #[cfg(feature = "optimism")] use op_alloy_consensus::TxDeposit; #[cfg(feature = "optimism")] -use reth_optimism_chainspec::optimism_deposit_tx_signature; -#[cfg(feature = "optimism")] pub use tx_type::DEPOSIT_TX_TYPE_ID; #[cfg(any(test, feature = "reth-codec"))] use tx_type::{ @@ -955,7 +953,7 @@ impl TransactionSignedNoHash { // transactions with an empty signature // // NOTE: this is very hacky and only relevant for op-mainnet pre bedrock - if self.is_legacy() && self.signature == optimism_deposit_tx_signature() { + if self.is_legacy() && self.signature == TxDeposit::signature() { return Some(Address::ZERO) } } @@ -1530,7 +1528,7 @@ impl Decodable2718 for TransactionSigned { #[cfg(feature = "optimism")] TxType::Deposit => Ok(Self::from_transaction_and_signature( Transaction::Deposit(TxDeposit::decode(buf)?), - optimism_deposit_tx_signature(), + TxDeposit::signature(), )), } } @@ -1575,8 +1573,7 @@ impl<'a> arbitrary::Arbitrary<'a> for TransactionSigned { } #[cfg(feature = "optimism")] - let signature = - if transaction.is_deposit() { optimism_deposit_tx_signature() } else { signature }; + let signature = if transaction.is_deposit() { TxDeposit::signature() } else { signature }; Ok(Self::from_transaction_and_signature(transaction, signature)) } diff --git a/crates/primitives/src/transaction/signature.rs b/crates/primitives/src/transaction/signature.rs index 39c0f92fd..5bfdab8e6 100644 --- a/crates/primitives/src/transaction/signature.rs +++ b/crates/primitives/src/transaction/signature.rs @@ -4,9 +4,6 @@ use alloy_rlp::{Decodable, Error as RlpError}; pub use alloy_primitives::Signature; -#[cfg(feature = "optimism")] -use reth_optimism_chainspec::optimism_deposit_tx_signature; - /// The order of the secp256k1 curve, divided by two. Signatures that should be checked according /// to EIP-2 should have an S value less than or equal to this. /// @@ -82,7 +79,7 @@ pub fn legacy_parity(signature: &Signature, chain_id: Option) -> Parity { // transactions with an empty signature // // NOTE: this is very hacky and only relevant for op-mainnet pre bedrock - if *signature == optimism_deposit_tx_signature() { + if *signature == op_alloy_consensus::TxDeposit::signature() { return Parity::Parity(false) } Parity::NonEip155(signature.v().y_parity()) From 7a06298cf70fec4338cb5014f12a6a561d7962f2 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Thu, 24 Oct 2024 06:46:07 +0200 Subject: [PATCH 231/425] chore(rpc): make `TransactionCompat::fill` stateful (#11732) --- Cargo.lock | 1 + crates/optimism/rpc/Cargo.toml | 2 +- crates/optimism/rpc/src/eth/mod.rs | 19 +++++++++-- crates/optimism/rpc/src/eth/transaction.rs | 30 ++++++++++++----- crates/optimism/rpc/src/lib.rs | 2 +- crates/rpc/rpc-builder/src/eth.rs | 4 ++- crates/rpc/rpc-builder/src/lib.rs | 9 +++-- crates/rpc/rpc-eth-api/src/core.rs | 2 +- crates/rpc/rpc-eth-api/src/helpers/block.rs | 3 +- crates/rpc/rpc-eth-api/src/helpers/call.rs | 3 +- .../rpc-eth-api/src/helpers/transaction.rs | 8 +++-- crates/rpc/rpc-eth-api/src/types.rs | 7 ++++ crates/rpc/rpc-eth-types/src/simulate.rs | 4 ++- crates/rpc/rpc-eth-types/src/transaction.rs | 6 ++-- crates/rpc/rpc-types-compat/Cargo.toml | 3 ++ crates/rpc/rpc-types-compat/src/block.rs | 8 +++-- .../rpc-types-compat/src/transaction/mod.rs | 32 ++++++++++++++---- crates/rpc/rpc/src/eth/core.rs | 13 ++++++-- crates/rpc/rpc/src/eth/filter.rs | 21 +++++++----- crates/rpc/rpc/src/eth/helpers/types.rs | 6 +++- crates/rpc/rpc/src/eth/pubsub.rs | 33 ++++++++++++------- crates/rpc/rpc/src/txpool.rs | 28 ++++++++-------- 22 files changed, 168 insertions(+), 76 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 63f730b1b..f749672fe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8877,6 +8877,7 @@ dependencies = [ "alloy-serde", "reth-primitives", "reth-trie-common", + "serde", "serde_json", ] diff --git a/crates/optimism/rpc/Cargo.toml b/crates/optimism/rpc/Cargo.toml index dc0f96c40..17ebec7ff 100644 --- a/crates/optimism/rpc/Cargo.toml +++ b/crates/optimism/rpc/Cargo.toml @@ -56,7 +56,7 @@ serde_json.workspace = true # misc thiserror.workspace = true tracing.workspace = true -derive_more.workspace = true +derive_more = { workspace = true, features = ["constructor", "deref"] } [dev-dependencies] reth-optimism-chainspec.workspace = true diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index d65dd8edd..04774a465 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -39,7 +39,7 @@ use reth_tasks::{ }; use reth_transaction_pool::TransactionPool; -use crate::{OpEthApiError, OpTxBuilder, SequencerClient}; +use crate::{OpEthApiError, SequencerClient}; /// Adapter for [`EthApiInner`], which holds all the data required to serve core `eth_` API. pub type EthApiNodeBackend = EthApiInner< @@ -59,7 +59,7 @@ pub type EthApiNodeBackend = EthApiInner< /// /// This type implements the [`FullEthApi`](reth_rpc_eth_api::helpers::FullEthApi) by implemented /// all the `Eth` helper traits and prerequisite traits. -#[derive(Clone, Deref)] +#[derive(Deref)] pub struct OpEthApi { /// Gateway to node's core components. #[deref] @@ -102,7 +102,11 @@ where { type Error = OpEthApiError; type NetworkTypes = Optimism; - type TransactionCompat = OpTxBuilder; + type TransactionCompat = Self; + + fn tx_resp_builder(&self) -> &Self::TransactionCompat { + self + } } impl EthApiSpec for OpEthApi @@ -249,3 +253,12 @@ impl fmt::Debug for OpEthApi { f.debug_struct("OpEthApi").finish_non_exhaustive() } } + +impl Clone for OpEthApi +where + N: FullNodeComponents, +{ + fn clone(&self) -> Self { + Self { inner: self.inner.clone(), sequencer_client: self.sequencer_client.clone() } + } +} diff --git a/crates/optimism/rpc/src/eth/transaction.rs b/crates/optimism/rpc/src/eth/transaction.rs index b7575c244..4ac2d7e6b 100644 --- a/crates/optimism/rpc/src/eth/transaction.rs +++ b/crates/optimism/rpc/src/eth/transaction.rs @@ -6,7 +6,7 @@ use alloy_rpc_types::TransactionInfo; use op_alloy_rpc_types::Transaction; use reth_node_api::FullNodeComponents; use reth_primitives::TransactionSignedEcRecovered; -use reth_provider::{BlockReaderIdExt, TransactionsProvider}; +use reth_provider::{BlockReaderIdExt, ReceiptProvider, TransactionsProvider}; use reth_rpc::eth::EthTxBuilder; use reth_rpc_eth_api::{ helpers::{EthSigner, EthTransactions, LoadTransaction, SpawnBlocking}, @@ -88,22 +88,34 @@ where } } -/// Builds OP transaction response type. -#[derive(Clone, Debug, Copy)] -pub struct OpTxBuilder; - -impl TransactionCompat for OpTxBuilder { +impl TransactionCompat for OpEthApi +where + N: FullNodeComponents, +{ type Transaction = Transaction; - fn fill(tx: TransactionSignedEcRecovered, tx_info: TransactionInfo) -> Self::Transaction { + fn fill( + &self, + tx: TransactionSignedEcRecovered, + tx_info: TransactionInfo, + ) -> Self::Transaction { let signed_tx = tx.clone().into_signed(); + let hash = tx.hash; - let mut inner = EthTxBuilder::fill(tx, tx_info).inner; + let mut inner = EthTxBuilder.fill(tx, tx_info).inner; if signed_tx.is_deposit() { inner.gas_price = Some(signed_tx.max_fee_per_gas()) } + let deposit_receipt_version = self + .inner + .provider() + .receipt_by_hash(hash) + .ok() // todo: change sig to return result + .flatten() + .and_then(|receipt| receipt.deposit_receipt_version); + Transaction { inner, source_hash: signed_tx.source_hash(), @@ -111,7 +123,7 @@ impl TransactionCompat for OpTxBuilder { // only include is_system_tx if true: is_system_tx: (signed_tx.is_deposit() && signed_tx.is_system_transaction()) .then_some(true), - deposit_receipt_version: None, // todo: how to fill this field? + deposit_receipt_version, } } diff --git a/crates/optimism/rpc/src/lib.rs b/crates/optimism/rpc/src/lib.rs index e3fef7adb..0ff1451d0 100644 --- a/crates/optimism/rpc/src/lib.rs +++ b/crates/optimism/rpc/src/lib.rs @@ -15,5 +15,5 @@ pub mod eth; pub mod sequencer; pub use error::{OpEthApiError, OptimismInvalidTransactionError, SequencerClientError}; -pub use eth::{transaction::OpTxBuilder, OpEthApi, OpReceiptBuilder}; +pub use eth::{OpEthApi, OpReceiptBuilder}; pub use sequencer::SequencerClient; diff --git a/crates/rpc/rpc-builder/src/eth.rs b/crates/rpc/rpc-builder/src/eth.rs index 613652678..40acecfed 100644 --- a/crates/rpc/rpc-builder/src/eth.rs +++ b/crates/rpc/rpc-builder/src/eth.rs @@ -22,7 +22,7 @@ pub struct EthHandlers { /// Polling based filter handler available on all transports pub filter: EthFilter, /// Handler for subscriptions only available for transports that support it (ws, ipc) - pub pubsub: EthPubSub, + pub pubsub: EthPubSub, } impl EthHandlers @@ -94,6 +94,7 @@ where ctx.cache.clone(), ctx.config.filter_config(), Box::new(ctx.executor.clone()), + api.tx_resp_builder().clone(), ); let pubsub = EthPubSub::with_spawner( @@ -102,6 +103,7 @@ where ctx.events.clone(), ctx.network.clone(), Box::new(ctx.executor.clone()), + api.tx_resp_builder().clone(), ); Self { api, cache: ctx.cache, filter, pubsub } diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index cd93aeb62..72b53efe6 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -1199,9 +1199,12 @@ where .into_rpc() .into(), RethRpcModule::Web3 => Web3Api::new(self.network.clone()).into_rpc().into(), - RethRpcModule::Txpool => { - TxPoolApi::<_, EthApi>::new(self.pool.clone()).into_rpc().into() - } + RethRpcModule::Txpool => TxPoolApi::new( + self.pool.clone(), + self.eth.api.tx_resp_builder().clone(), + ) + .into_rpc() + .into(), RethRpcModule::Rpc => RPCApi::new( namespaces .iter() diff --git a/crates/rpc/rpc-eth-api/src/core.rs b/crates/rpc/rpc-eth-api/src/core.rs index 20edf96d8..66bc5a44d 100644 --- a/crates/rpc/rpc-eth-api/src/core.rs +++ b/crates/rpc/rpc-eth-api/src/core.rs @@ -502,7 +502,7 @@ where trace!(target: "rpc::eth", ?hash, "Serving eth_getTransactionByHash"); Ok(EthTransactions::transaction_by_hash(self, hash) .await? - .map(|tx| tx.into_transaction::())) + .map(|tx| tx.into_transaction(self.tx_resp_builder()))) } /// Handler for: `eth_getRawTransactionByBlockHashAndIndex` diff --git a/crates/rpc/rpc-eth-api/src/helpers/block.rs b/crates/rpc/rpc-eth-api/src/helpers/block.rs index 9bf35d850..da5f275ef 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/block.rs @@ -63,11 +63,12 @@ pub trait EthBlocks: LoadBlock { .map_err(Self::Error::from_eth_err)?; } - let block = from_block::( + let block = from_block( (*block).clone().unseal(), total_difficulty.unwrap_or_default(), full.into(), Some(block_hash), + self.tx_resp_builder(), ) .map_err(Self::Error::from_eth_err)?; Ok(Some(block)) diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index 0acf66462..1510233c5 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -191,7 +191,7 @@ pub trait EthCall: Call + LoadPendingBlock { results.push((env.tx.caller, res.result)); } - let block = simulate::build_block::( + let block = simulate::build_block( results, transactions, &block_env, @@ -199,6 +199,7 @@ pub trait EthCall: Call + LoadPendingBlock { total_difficulty, return_full_transactions, &db, + this.tx_resp_builder(), )?; parent_hash = block.inner.header.hash; diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index d29787d7a..0d16a5c91 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -209,9 +209,10 @@ pub trait EthTransactions: LoadTransaction { index: Some(index as u64), }; - return Ok(Some(from_recovered_with_block_context::( + return Ok(Some(from_recovered_with_block_context( tx.clone().with_signer(*signer), tx_info, + self.tx_resp_builder(), ))) } } @@ -237,7 +238,7 @@ pub trait EthTransactions: LoadTransaction { LoadState::pool(self).get_transaction_by_sender_and_nonce(sender, nonce) { let transaction = tx.transaction.clone().into_consensus(); - return Ok(Some(from_recovered::(transaction.into()))); + return Ok(Some(from_recovered(transaction.into(), self.tx_resp_builder()))); } } @@ -288,9 +289,10 @@ pub trait EthTransactions: LoadTransaction { base_fee: base_fee_per_gas.map(u128::from), index: Some(index as u64), }; - from_recovered_with_block_context::( + from_recovered_with_block_context( tx.clone().with_signer(*signer), tx_info, + self.tx_resp_builder(), ) }) }) diff --git a/crates/rpc/rpc-eth-api/src/types.rs b/crates/rpc/rpc-eth-api/src/types.rs index 9ddc23ea3..653730ed3 100644 --- a/crates/rpc/rpc-eth-api/src/types.rs +++ b/crates/rpc/rpc-eth-api/src/types.rs @@ -23,12 +23,19 @@ pub trait EthApiTypes: Send + Sync + Clone { type NetworkTypes: Network; /// Conversion methods for transaction RPC type. type TransactionCompat: Send + Sync + Clone + fmt::Debug; + + /// Returns reference to transaction response builder. + fn tx_resp_builder(&self) -> &Self::TransactionCompat; } impl EthApiTypes for () { type Error = EthApiError; type NetworkTypes = AnyNetwork; type TransactionCompat = (); + + fn tx_resp_builder(&self) -> &Self::TransactionCompat { + self + } } /// Adapter for network specific transaction type. diff --git a/crates/rpc/rpc-eth-types/src/simulate.rs b/crates/rpc/rpc-eth-types/src/simulate.rs index 77db511e6..1d443861d 100644 --- a/crates/rpc/rpc-eth-types/src/simulate.rs +++ b/crates/rpc/rpc-eth-types/src/simulate.rs @@ -172,6 +172,7 @@ where } /// Handles outputs of the calls execution and builds a [`SimulatedBlock`]. +#[expect(clippy::too_many_arguments)] pub fn build_block( results: Vec<(Address, ExecutionResult)>, transactions: Vec, @@ -180,6 +181,7 @@ pub fn build_block( total_difficulty: U256, full_transactions: bool, db: &CacheDB>>, + tx_resp_builder: &T, ) -> Result>, EthApiError> { let mut calls: Vec = Vec::with_capacity(results.len()); let mut senders = Vec::with_capacity(results.len()); @@ -304,6 +306,6 @@ pub fn build_block( let txs_kind = if full_transactions { BlockTransactionsKind::Full } else { BlockTransactionsKind::Hashes }; - let block = from_block::(block, total_difficulty, txs_kind, None)?; + let block = from_block(block, total_difficulty, txs_kind, None, tx_resp_builder)?; Ok(SimulatedBlock { inner: block, calls }) } diff --git a/crates/rpc/rpc-eth-types/src/transaction.rs b/crates/rpc/rpc-eth-types/src/transaction.rs index c3ca1b503..7d2237a1b 100644 --- a/crates/rpc/rpc-eth-types/src/transaction.rs +++ b/crates/rpc/rpc-eth-types/src/transaction.rs @@ -41,9 +41,9 @@ impl TransactionSource { } /// Conversion into network specific transaction type. - pub fn into_transaction(self) -> T::Transaction { + pub fn into_transaction(self, resp_builder: &T) -> T::Transaction { match self { - Self::Pool(tx) => from_recovered::(tx), + Self::Pool(tx) => from_recovered(tx, resp_builder), Self::Block { transaction, index, block_hash, block_number, base_fee } => { let tx_info = TransactionInfo { hash: Some(transaction.hash()), @@ -53,7 +53,7 @@ impl TransactionSource { base_fee: base_fee.map(u128::from), }; - from_recovered_with_block_context::(transaction, tx_info) + from_recovered_with_block_context(transaction, tx_info, resp_builder) } } } diff --git a/crates/rpc/rpc-types-compat/Cargo.toml b/crates/rpc/rpc-types-compat/Cargo.toml index 8e436f0d3..7d5eac9db 100644 --- a/crates/rpc/rpc-types-compat/Cargo.toml +++ b/crates/rpc/rpc-types-compat/Cargo.toml @@ -26,5 +26,8 @@ alloy-serde.workspace = true alloy-rpc-types-engine.workspace = true alloy-consensus.workspace = true +# io +serde.workspace = true + [dev-dependencies] serde_json.workspace = true \ No newline at end of file diff --git a/crates/rpc/rpc-types-compat/src/block.rs b/crates/rpc/rpc-types-compat/src/block.rs index a650a69c1..8cddc8c44 100644 --- a/crates/rpc/rpc-types-compat/src/block.rs +++ b/crates/rpc/rpc-types-compat/src/block.rs @@ -20,12 +20,15 @@ pub fn from_block( total_difficulty: U256, kind: BlockTransactionsKind, block_hash: Option, + tx_resp_builder: &T, ) -> Result, BlockError> { match kind { BlockTransactionsKind::Hashes => { Ok(from_block_with_tx_hashes::(block, total_difficulty, block_hash)) } - BlockTransactionsKind::Full => from_block_full::(block, total_difficulty, block_hash), + BlockTransactionsKind::Full => { + from_block_full::(block, total_difficulty, block_hash, tx_resp_builder) + } } } @@ -60,6 +63,7 @@ pub fn from_block_full( mut block: BlockWithSenders, total_difficulty: U256, block_hash: Option, + tx_resp_builder: &T, ) -> Result, BlockError> { let block_hash = block_hash.unwrap_or_else(|| block.block.header.hash_slow()); let block_number = block.block.number; @@ -83,7 +87,7 @@ pub fn from_block_full( index: Some(idx as u64), }; - from_recovered_with_block_context::(signed_tx_ec_recovered, tx_info) + from_recovered_with_block_context::(signed_tx_ec_recovered, tx_info, tx_resp_builder) }) .collect::>(); diff --git a/crates/rpc/rpc-types-compat/src/transaction/mod.rs b/crates/rpc/rpc-types-compat/src/transaction/mod.rs index 7ffd48cb1..f8b46454d 100644 --- a/crates/rpc/rpc-types-compat/src/transaction/mod.rs +++ b/crates/rpc/rpc-types-compat/src/transaction/mod.rs @@ -2,6 +2,7 @@ mod signature; pub use signature::*; + use std::fmt; use alloy_consensus::Transaction as _; @@ -11,6 +12,7 @@ use alloy_rpc_types::{ }; use alloy_serde::WithOtherFields; use reth_primitives::{TransactionSigned, TransactionSignedEcRecovered, TxType}; +use serde::{Deserialize, Serialize}; /// Create a new rpc transaction result for a mined transaction, using the given block hash, /// number, and tx index fields to populate the corresponding fields in the rpc result. @@ -20,21 +22,33 @@ use reth_primitives::{TransactionSigned, TransactionSignedEcRecovered, TxType}; pub fn from_recovered_with_block_context( tx: TransactionSignedEcRecovered, tx_info: TransactionInfo, + resp_builder: &T, ) -> T::Transaction { - T::fill(tx, tx_info) + resp_builder.fill(tx, tx_info) } /// Create a new rpc transaction result for a _pending_ signed transaction, setting block /// environment related fields to `None`. -pub fn from_recovered(tx: TransactionSignedEcRecovered) -> T::Transaction { - T::fill(tx, TransactionInfo::default()) +pub fn from_recovered( + tx: TransactionSignedEcRecovered, + resp_builder: &T, +) -> T::Transaction { + resp_builder.fill(tx, TransactionInfo::default()) } /// Builds RPC transaction w.r.t. network. pub trait TransactionCompat: Send + Sync + Unpin + Clone + fmt::Debug { /// RPC transaction response type. - type Transaction: Send + Clone + Default + fmt::Debug; - + type Transaction: Serialize + + for<'de> Deserialize<'de> + + Send + + Sync + + Unpin + + Clone + + Default + + fmt::Debug; + + /// /// Formats gas price and max fee per gas for RPC transaction response w.r.t. network specific /// transaction type. fn gas_price(signed_tx: &TransactionSigned, base_fee: Option) -> GasPrice { @@ -63,7 +77,7 @@ pub trait TransactionCompat: Send + Sync + Unpin + Clone + fmt::Debug { /// Create a new rpc transaction result for a _pending_ signed transaction, setting block /// environment related fields to `None`. - fn fill(tx: TransactionSignedEcRecovered, tx_inf: TransactionInfo) -> Self::Transaction; + fn fill(&self, tx: TransactionSignedEcRecovered, tx_inf: TransactionInfo) -> Self::Transaction; /// Truncates the input of a transaction to only the first 4 bytes. // todo: remove in favour of using constructor on `TransactionResponse` or similar @@ -80,7 +94,11 @@ impl TransactionCompat for () { // `alloy_network::AnyNetwork` type Transaction = WithOtherFields; - fn fill(_tx: TransactionSignedEcRecovered, _tx_info: TransactionInfo) -> Self::Transaction { + fn fill( + &self, + _tx: TransactionSignedEcRecovered, + _tx_info: TransactionInfo, + ) -> Self::Transaction { WithOtherFields::default() } diff --git a/crates/rpc/rpc/src/eth/core.rs b/crates/rpc/rpc/src/eth/core.rs index 5c7fbbd00..21787873e 100644 --- a/crates/rpc/rpc/src/eth/core.rs +++ b/crates/rpc/rpc/src/eth/core.rs @@ -36,12 +36,15 @@ use crate::eth::EthTxBuilder; #[derive(Deref)] pub struct EthApi { /// All nested fields bundled together. + #[deref] pub(super) inner: Arc>, + /// Transaction RPC response builder. + pub tx_resp_builder: EthTxBuilder, } impl Clone for EthApi { fn clone(&self) -> Self { - Self { inner: self.inner.clone() } + Self { inner: self.inner.clone(), tx_resp_builder: EthTxBuilder } } } @@ -81,7 +84,7 @@ where proof_permits, ); - Self { inner: Arc::new(inner) } + Self { inner: Arc::new(inner), tx_resp_builder: EthTxBuilder } } } @@ -119,7 +122,7 @@ where ctx.config.proof_permits, ); - Self { inner: Arc::new(inner) } + Self { inner: Arc::new(inner), tx_resp_builder: EthTxBuilder } } } @@ -131,6 +134,10 @@ where // todo: replace with alloy_network::Ethereum type NetworkTypes = AnyNetwork; type TransactionCompat = EthTxBuilder; + + fn tx_resp_builder(&self) -> &Self::TransactionCompat { + &self.tx_resp_builder + } } impl std::fmt::Debug diff --git a/crates/rpc/rpc/src/eth/filter.rs b/crates/rpc/rpc/src/eth/filter.rs index b136861c7..24058da17 100644 --- a/crates/rpc/rpc/src/eth/filter.rs +++ b/crates/rpc/rpc/src/eth/filter.rs @@ -4,7 +4,6 @@ use std::{ collections::HashMap, fmt, iter::StepBy, - marker::PhantomData, ops::RangeInclusive, sync::Arc, time::{Duration, Instant}, @@ -44,7 +43,7 @@ pub struct EthFilter { /// All nested fields bundled together inner: Arc>>, /// Assembles response data w.r.t. network. - _tx_resp_builder: PhantomData, + tx_resp_builder: Eth::TransactionCompat, } impl Clone for EthFilter @@ -52,7 +51,7 @@ where Eth: EthApiTypes, { fn clone(&self) -> Self { - Self { inner: self.inner.clone(), _tx_resp_builder: PhantomData } + Self { inner: self.inner.clone(), tx_resp_builder: self.tx_resp_builder.clone() } } } @@ -76,6 +75,7 @@ where eth_cache: EthStateCache, config: EthFilterConfig, task_spawner: Box, + tx_resp_builder: Eth::TransactionCompat, ) -> Self { let EthFilterConfig { max_blocks_per_filter, max_logs_per_response, stale_filter_ttl } = config; @@ -93,7 +93,7 @@ where max_logs_per_response: max_logs_per_response.unwrap_or(usize::MAX), }; - let eth_filter = Self { inner: Arc::new(inner), _tx_resp_builder: PhantomData }; + let eth_filter = Self { inner: Arc::new(inner), tx_resp_builder }; let this = eth_filter.clone(); eth_filter.inner.task_spawner.spawn_critical( @@ -278,7 +278,7 @@ where PendingTransactionFilterKind::Full => { let stream = self.inner.pool.new_pending_pool_transactions_listener(); let full_txs_receiver = - FullTransactionsReceiver::<_, Eth::TransactionCompat>::new(stream); + FullTransactionsReceiver::new(stream, self.tx_resp_builder.clone()); FilterKind::PendingTransaction(PendingTransactionKind::FullTransaction(Arc::new( full_txs_receiver, ))) @@ -603,7 +603,7 @@ impl PendingTransactionsReceiver { #[derive(Debug, Clone)] struct FullTransactionsReceiver { txs_stream: Arc>>, - _tx_resp_builder: PhantomData, + tx_resp_builder: TxCompat, } impl FullTransactionsReceiver @@ -612,8 +612,8 @@ where TxCompat: TransactionCompat, { /// Creates a new `FullTransactionsReceiver` encapsulating the provided transaction stream. - fn new(stream: NewSubpoolTransactionStream) -> Self { - Self { txs_stream: Arc::new(Mutex::new(stream)), _tx_resp_builder: PhantomData } + fn new(stream: NewSubpoolTransactionStream, tx_resp_builder: TxCompat) -> Self { + Self { txs_stream: Arc::new(Mutex::new(stream)), tx_resp_builder } } /// Returns all new pending transactions received since the last poll. @@ -625,7 +625,10 @@ where let mut prepared_stream = self.txs_stream.lock().await; while let Ok(tx) = prepared_stream.try_recv() { - pending_txs.push(from_recovered::(tx.transaction.to_recovered_transaction())) + pending_txs.push(from_recovered( + tx.transaction.to_recovered_transaction(), + &self.tx_resp_builder, + )) } FilterChanges::Transactions(pending_txs) } diff --git a/crates/rpc/rpc/src/eth/helpers/types.rs b/crates/rpc/rpc/src/eth/helpers/types.rs index ab7b1a268..848bcdc36 100644 --- a/crates/rpc/rpc/src/eth/helpers/types.rs +++ b/crates/rpc/rpc/src/eth/helpers/types.rs @@ -21,7 +21,11 @@ where { type Transaction = ::TransactionResponse; - fn fill(tx: TransactionSignedEcRecovered, tx_info: TransactionInfo) -> Self::Transaction { + fn fill( + &self, + tx: TransactionSignedEcRecovered, + tx_info: TransactionInfo, + ) -> Self::Transaction { let signer = tx.signer(); let signed_tx = tx.into_signed(); diff --git a/crates/rpc/rpc/src/eth/pubsub.rs b/crates/rpc/rpc/src/eth/pubsub.rs index 7bd1fd03d..ac962610e 100644 --- a/crates/rpc/rpc/src/eth/pubsub.rs +++ b/crates/rpc/rpc/src/eth/pubsub.rs @@ -1,6 +1,6 @@ //! `eth_` `PubSub` RPC handler implementation -use std::{marker::PhantomData, sync::Arc}; +use std::sync::Arc; use alloy_primitives::TxHash; use alloy_rpc_types::{ @@ -17,7 +17,7 @@ use jsonrpsee::{ }; use reth_network_api::NetworkInfo; use reth_provider::{BlockReader, CanonStateSubscriptions, EvmEnvProvider}; -use reth_rpc_eth_api::{pubsub::EthPubSubApiServer, FullEthApiTypes, RpcTransaction}; +use reth_rpc_eth_api::{pubsub::EthPubSubApiServer, TransactionCompat}; use reth_rpc_eth_types::logs_utils; use reth_rpc_server_types::result::{internal_rpc_err, invalid_params_rpc_err}; use reth_rpc_types_compat::transaction::from_recovered; @@ -38,7 +38,7 @@ pub struct EthPubSub { inner: Arc>, /// The type that's used to spawn subscription tasks. subscription_task_spawner: Box, - _tx_resp_builder: PhantomData, + tx_resp_builder: Eth, } // === impl EthPubSub === @@ -47,13 +47,20 @@ impl EthPubSub Self { + pub fn new( + provider: Provider, + pool: Pool, + chain_events: Events, + network: Network, + tx_resp_builder: Eth, + ) -> Self { Self::with_spawner( provider, pool, chain_events, network, Box::::default(), + tx_resp_builder, ) } @@ -64,21 +71,22 @@ impl EthPubSub, + tx_resp_builder: Eth, ) -> Self { let inner = EthPubSubInner { provider, pool, chain_events, network }; - Self { inner: Arc::new(inner), subscription_task_spawner, _tx_resp_builder: PhantomData } + Self { inner: Arc::new(inner), subscription_task_spawner, tx_resp_builder } } } #[async_trait::async_trait] -impl EthPubSubApiServer> +impl EthPubSubApiServer for EthPubSub where Provider: BlockReader + EvmEnvProvider + Clone + 'static, Pool: TransactionPool + 'static, Events: CanonStateSubscriptions + Clone + 'static, Network: NetworkInfo + Clone + 'static, - Eth: FullEthApiTypes + 'static, + Eth: TransactionCompat + 'static, { /// Handler for `eth_subscribe` async fn subscribe( @@ -89,8 +97,9 @@ where ) -> jsonrpsee::core::SubscriptionResult { let sink = pending.accept().await?; let pubsub = self.inner.clone(); + let resp_builder = self.tx_resp_builder.clone(); self.subscription_task_spawner.spawn(Box::pin(async move { - let _ = handle_accepted::<_, _, _, _, Eth>(pubsub, sink, kind, params).await; + let _ = handle_accepted(pubsub, sink, kind, params, resp_builder).await; })); Ok(()) @@ -103,13 +112,14 @@ async fn handle_accepted( accepted_sink: SubscriptionSink, kind: SubscriptionKind, params: Option, + tx_resp_builder: Eth, ) -> Result<(), ErrorObject<'static>> where Provider: BlockReader + EvmEnvProvider + Clone + 'static, Pool: TransactionPool + 'static, Events: CanonStateSubscriptions + Clone + 'static, Network: NetworkInfo + Clone + 'static, - Eth: FullEthApiTypes, + Eth: TransactionCompat, { match kind { SubscriptionKind::NewHeads => { @@ -140,10 +150,9 @@ where Params::Bool(true) => { // full transaction objects requested let stream = pubsub.full_pending_transaction_stream().map(|tx| { - EthSubscriptionResult::FullTransaction(Box::new(from_recovered::< - Eth::TransactionCompat, - >( + EthSubscriptionResult::FullTransaction(Box::new(from_recovered( tx.transaction.to_recovered_transaction(), + &tx_resp_builder, ))) }); return pipe_from_stream(accepted_sink, stream).await diff --git a/crates/rpc/rpc/src/txpool.rs b/crates/rpc/rpc/src/txpool.rs index 47aaac0bb..d03e10ca7 100644 --- a/crates/rpc/rpc/src/txpool.rs +++ b/crates/rpc/rpc/src/txpool.rs @@ -1,4 +1,4 @@ -use std::{collections::BTreeMap, marker::PhantomData}; +use std::collections::BTreeMap; use alloy_consensus::Transaction; use alloy_primitives::Address; @@ -9,7 +9,6 @@ use async_trait::async_trait; use jsonrpsee::core::RpcResult as Result; use reth_primitives::TransactionSignedEcRecovered; use reth_rpc_api::TxPoolApiServer; -use reth_rpc_eth_api::{FullEthApiTypes, RpcTransaction}; use reth_rpc_types_compat::{transaction::from_recovered, TransactionCompat}; use reth_transaction_pool::{AllPoolTransactions, PoolTransaction, TransactionPool}; use tracing::trace; @@ -21,33 +20,34 @@ use tracing::trace; pub struct TxPoolApi { /// An interface to interact with the pool pool: Pool, - _tx_resp_builder: PhantomData, + tx_resp_builder: Eth, } impl TxPoolApi { /// Creates a new instance of `TxpoolApi`. - pub const fn new(pool: Pool) -> Self { - Self { pool, _tx_resp_builder: PhantomData } + pub const fn new(pool: Pool, tx_resp_builder: Eth) -> Self { + Self { pool, tx_resp_builder } } } impl TxPoolApi where Pool: TransactionPool + 'static, - Eth: FullEthApiTypes, + Eth: TransactionCompat, { - fn content(&self) -> TxpoolContent> { + fn content(&self) -> TxpoolContent { #[inline] fn insert( tx: &Tx, content: &mut BTreeMap>, + resp_builder: &RpcTxB, ) where Tx: PoolTransaction>, RpcTxB: TransactionCompat, { content.entry(tx.sender()).or_default().insert( tx.nonce().to_string(), - from_recovered::(tx.clone().into_consensus().into()), + from_recovered(tx.clone().into_consensus().into(), resp_builder), ); } @@ -55,10 +55,10 @@ where let mut content = TxpoolContent { pending: BTreeMap::new(), queued: BTreeMap::new() }; for pending in pending { - insert::<_, Eth::TransactionCompat>(&pending.transaction, &mut content.pending); + insert::<_, Eth>(&pending.transaction, &mut content.pending, &self.tx_resp_builder); } for queued in queued { - insert::<_, Eth::TransactionCompat>(&queued.transaction, &mut content.queued); + insert::<_, Eth>(&queued.transaction, &mut content.queued, &self.tx_resp_builder); } content @@ -66,10 +66,10 @@ where } #[async_trait] -impl TxPoolApiServer> for TxPoolApi +impl TxPoolApiServer for TxPoolApi where Pool: TransactionPool + 'static, - Eth: FullEthApiTypes + 'static, + Eth: TransactionCompat + 'static, { /// Returns the number of transactions currently pending for inclusion in the next block(s), as /// well as the ones that are being scheduled for future execution only. @@ -131,7 +131,7 @@ where async fn txpool_content_from( &self, from: Address, - ) -> Result>> { + ) -> Result> { trace!(target: "rpc::eth", ?from, "Serving txpool_contentFrom"); Ok(self.content().remove_from(&from)) } @@ -141,7 +141,7 @@ where /// /// See [here](https://geth.ethereum.org/docs/rpc/ns-txpool#txpool_content) for more details /// Handler for `txpool_content` - async fn txpool_content(&self) -> Result>> { + async fn txpool_content(&self) -> Result> { trace!(target: "rpc::eth", "Serving txpool_content"); Ok(self.content()) } From d7f08cd8762dbe81c7c83655f7f94d5ce628daa3 Mon Sep 17 00:00:00 2001 From: Hai | RISE <150876604+hai-rise@users.noreply.github.com> Date: Thu, 24 Oct 2024 12:09:14 +0700 Subject: [PATCH 232/425] chore: remove some clones (#12008) --- crates/evm/src/system_calls/mod.rs | 4 ++-- crates/optimism/payload/src/builder.rs | 14 ++++---------- 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/crates/evm/src/system_calls/mod.rs b/crates/evm/src/system_calls/mod.rs index d71dcfeda..daaf1d141 100644 --- a/crates/evm/src/system_calls/mod.rs +++ b/crates/evm/src/system_calls/mod.rs @@ -171,7 +171,7 @@ where DB::Error: Display, { let result_and_state = eip2935::transact_blockhashes_contract_call( - &self.evm_config.clone(), + &self.evm_config, &self.chain_spec, timestamp, block_number, @@ -226,7 +226,7 @@ where DB::Error: Display, { let result_and_state = eip4788::transact_beacon_root_contract_call( - &self.evm_config.clone(), + &self.evm_config, &self.chain_spec, timestamp, block_number, diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 3ed00c49a..a05690490 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -105,13 +105,7 @@ where args: BuildArguments, ) -> Result, PayloadBuilderError> { let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_block); - optimism_payload( - self.evm_config.clone(), - args, - cfg_env, - block_env, - self.compute_pending_block, - ) + optimism_payload(&self.evm_config, args, cfg_env, block_env, self.compute_pending_block) } fn on_missing_payload( @@ -140,7 +134,7 @@ where best_payload: None, }; let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_block); - optimism_payload(self.evm_config.clone(), args, cfg_env, block_env, false)? + optimism_payload(&self.evm_config, args, cfg_env, block_env, false)? .into_payload() .ok_or_else(|| PayloadBuilderError::MissingPayload) } @@ -156,7 +150,7 @@ where /// a result indicating success with the payload or an error in case of failure. #[inline] pub(crate) fn optimism_payload( - evm_config: EvmConfig, + evm_config: &EvmConfig, args: BuildArguments, initialized_cfg: CfgEnvWithHandlerCfg, initialized_block_env: BlockEnv, @@ -430,7 +424,7 @@ where &mut db, &chain_spec, attributes.payload_attributes.timestamp, - attributes.clone().payload_attributes.withdrawals, + attributes.payload_attributes.withdrawals.clone(), )?; // merge all transitions into bundle state, this would apply the withdrawal balance changes From fcca8b1523fa7c1917754389fd08c9b21eb57987 Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Thu, 24 Oct 2024 13:26:25 +0800 Subject: [PATCH 233/425] refactor: BlockchainTestCase::run rm repetitive convert ForkSpec to ChainSpec (#11896) Co-authored-by: Matthias Seitz --- testing/ef-tests/src/cases/blockchain_test.rs | 14 ++++++-------- testing/ef-tests/src/models.rs | 2 +- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/testing/ef-tests/src/cases/blockchain_test.rs b/testing/ef-tests/src/cases/blockchain_test.rs index d29aafa82..7d80ec6c4 100644 --- a/testing/ef-tests/src/cases/blockchain_test.rs +++ b/testing/ef-tests/src/cases/blockchain_test.rs @@ -6,6 +6,7 @@ use crate::{ }; use alloy_rlp::Decodable; use rayon::iter::{ParallelBridge, ParallelIterator}; +use reth_chainspec::ChainSpec; use reth_primitives::{BlockBody, SealedBlock, StaticFileSegment}; use reth_provider::{ providers::StaticFileWriter, test_utils::create_test_provider_factory_with_chain_spec, @@ -83,11 +84,10 @@ impl Case for BlockchainTestCase { .par_bridge() .try_for_each(|case| { // Create a new test database and initialize a provider for the test case. - let provider = create_test_provider_factory_with_chain_spec(Arc::new( - case.network.clone().into(), - )) - .database_provider_rw() - .unwrap(); + let chain_spec: Arc = Arc::new(case.network.into()); + let provider = create_test_provider_factory_with_chain_spec(chain_spec.clone()) + .database_provider_rw() + .unwrap(); // Insert initial test state into the provider. provider.insert_historical_block( @@ -127,9 +127,7 @@ impl Case for BlockchainTestCase { // Execute the execution stage using the EVM processor factory for the test case // network. let _ = ExecutionStage::new_with_executor( - reth_evm_ethereum::execute::EthExecutorProvider::ethereum(Arc::new( - case.network.clone().into(), - )), + reth_evm_ethereum::execute::EthExecutorProvider::ethereum(chain_spec), ) .execute( &provider, diff --git a/testing/ef-tests/src/models.rs b/testing/ef-tests/src/models.rs index 30e5e5bb2..b5dc073c1 100644 --- a/testing/ef-tests/src/models.rs +++ b/testing/ef-tests/src/models.rs @@ -257,7 +257,7 @@ impl Account { } /// Fork specification. -#[derive(Debug, PartialEq, Eq, PartialOrd, Hash, Ord, Clone, Deserialize)] +#[derive(Debug, PartialEq, Eq, PartialOrd, Hash, Ord, Clone, Copy, Deserialize)] pub enum ForkSpec { /// Frontier Frontier, From e04d1b4b4a3a6ab6e4f784ad129a88054bab460e Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Thu, 24 Oct 2024 08:04:57 +0200 Subject: [PATCH 234/425] perf(net): P2P sink, revert pull/11658 (#11712) --- crates/net/eth-wire/src/p2pstream.rs | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/crates/net/eth-wire/src/p2pstream.rs b/crates/net/eth-wire/src/p2pstream.rs index 9882e3978..76075838b 100644 --- a/crates/net/eth-wire/src/p2pstream.rs +++ b/crates/net/eth-wire/src/p2pstream.rs @@ -614,25 +614,24 @@ where /// Returns `Poll::Ready(Ok(()))` when no buffered items remain. fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { let mut this = self.project(); - loop { - match ready!(this.inner.as_mut().poll_flush(cx)) { - Err(err) => { - trace!(target: "net::p2p", - %err, - "error flushing p2p stream" - ); - return Poll::Ready(Err(err.into())) - } - Ok(()) => { + let poll_res = loop { + match this.inner.as_mut().poll_ready(cx) { + Poll::Pending => break Poll::Pending, + Poll::Ready(Err(err)) => break Poll::Ready(Err(err.into())), + Poll::Ready(Ok(())) => { let Some(message) = this.outgoing_messages.pop_front() else { - return Poll::Ready(Ok(())) + break Poll::Ready(Ok(())) }; if let Err(err) = this.inner.as_mut().start_send(message) { - return Poll::Ready(Err(err.into())) + break Poll::Ready(Err(err.into())) } } } - } + }; + + ready!(this.inner.as_mut().poll_flush(cx))?; + + poll_res } fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { From 0df3148357cbfb9b811fd04da6a69fca24e59be2 Mon Sep 17 00:00:00 2001 From: Hoa Nguyen Date: Thu, 24 Oct 2024 13:29:07 +0700 Subject: [PATCH 235/425] feat(payload): introduce payload freezing for predetermined blocks (#11790) --- crates/payload/basic/src/lib.rs | 74 ++++++++++++++++++++++++--------- 1 file changed, 55 insertions(+), 19 deletions(-) diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index 4274d451e..b8eab3c0f 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -178,7 +178,7 @@ where deadline, // ticks immediately interval: tokio::time::interval(self.config.interval), - best_payload: None, + best_payload: PayloadState::Missing, pending_block: None, cached_reads, payload_task_guard: self.payload_task_guard.clone(), @@ -324,8 +324,8 @@ where deadline: Pin>, /// The interval at which the job should build a new payload after the last. interval: Interval, - /// The best payload so far. - best_payload: Option, + /// The best payload so far and its state. + best_payload: PayloadState, /// Receiver for the block that is currently being built. pending_block: Option>, /// Restricts how many generator tasks can be executed at once. @@ -362,7 +362,7 @@ where let _cancel = cancel.clone(); let guard = self.payload_task_guard.clone(); let payload_config = self.config.clone(); - let best_payload = self.best_payload.clone(); + let best_payload = self.best_payload.payload().cloned(); self.metrics.inc_initiated_payload_builds(); let cached_reads = self.cached_reads.take().unwrap_or_default(); let builder = self.builder.clone(); @@ -407,8 +407,9 @@ where // check if the interval is reached while this.interval.poll_tick(cx).is_ready() { - // start a new job if there is no pending block and we haven't reached the deadline - if this.pending_block.is_none() { + // start a new job if there is no pending block, we haven't reached the deadline, + // and the payload isn't frozen + if this.pending_block.is_none() && !this.best_payload.is_frozen() { this.spawn_build_job(); } } @@ -420,7 +421,11 @@ where BuildOutcome::Better { payload, cached_reads } => { this.cached_reads = Some(cached_reads); debug!(target: "payload_builder", value = %payload.fees(), "built better payload"); - this.best_payload = Some(payload); + this.best_payload = PayloadState::Best(payload); + } + BuildOutcome::Freeze(payload) => { + debug!(target: "payload_builder", "payload frozen, no further building will occur"); + this.best_payload = PayloadState::Frozen(payload); } BuildOutcome::Aborted { fees, cached_reads } => { this.cached_reads = Some(cached_reads); @@ -459,17 +464,18 @@ where type BuiltPayload = Builder::BuiltPayload; fn best_payload(&self) -> Result { - if let Some(ref payload) = self.best_payload { - return Ok(payload.clone()) + if let Some(payload) = self.best_payload.payload() { + Ok(payload.clone()) + } else { + // No payload has been built yet, but we need to return something that the CL then + // can deliver, so we need to return an empty payload. + // + // Note: it is assumed that this is unlikely to happen, as the payload job is + // started right away and the first full block should have been + // built by the time CL is requesting the payload. + self.metrics.inc_requested_empty_payload(); + self.builder.build_empty_payload(&self.client, self.config.clone()) } - // No payload has been built yet, but we need to return something that the CL then can - // deliver, so we need to return an empty payload. - // - // Note: it is assumed that this is unlikely to happen, as the payload job is started right - // away and the first full block should have been built by the time CL is requesting the - // payload. - self.metrics.inc_requested_empty_payload(); - self.builder.build_empty_payload(&self.client, self.config.clone()) } fn payload_attributes(&self) -> Result { @@ -480,8 +486,7 @@ where &mut self, kind: PayloadKind, ) -> (Self::ResolvePayloadFuture, KeepPayloadJobAlive) { - let best_payload = self.best_payload.take(); - + let best_payload = self.best_payload.payload().cloned(); if best_payload.is_none() && self.pending_block.is_none() { // ensure we have a job scheduled if we don't have a best payload yet and none is active self.spawn_build_job(); @@ -545,6 +550,34 @@ where } } +/// Represents the current state of a payload being built. +#[derive(Debug, Clone)] +pub enum PayloadState

{ + /// No payload has been built yet. + Missing, + /// The best payload built so far, which may still be improved upon. + Best(P), + /// The payload is frozen and no further building should occur. + /// + /// Contains the final payload `P` that should be used. + Frozen(P), +} + +impl

PayloadState

{ + /// Checks if the payload is frozen. + pub const fn is_frozen(&self) -> bool { + matches!(self, Self::Frozen(_)) + } + + /// Returns the payload if it exists (either Best or Frozen). + pub const fn payload(&self) -> Option<&P> { + match self { + Self::Missing => None, + Self::Best(p) | Self::Frozen(p) => Some(p), + } + } +} + /// The future that returns the best payload to be served to the consensus layer. /// /// This returns the payload that's supposed to be sent to the CL. @@ -725,6 +758,9 @@ pub enum BuildOutcome { }, /// Build job was cancelled Cancelled, + + /// The payload is final and no further building should occur + Freeze(Payload), } impl BuildOutcome { From 84a30b0404e73d11d16208cc5898afff1b9f4f03 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 24 Oct 2024 11:53:15 +0200 Subject: [PATCH 236/425] primitive-traits: use alloy `INITIAL_BASE_FEE` constant (#12022) --- Cargo.lock | 1 + crates/blockchain-tree/src/blockchain_tree.rs | 8 ++++---- crates/chain-state/src/test_utils.rs | 9 ++++----- crates/chainspec/src/spec.rs | 6 +++--- crates/consensus/common/Cargo.toml | 1 + crates/consensus/common/src/validation.rs | 2 +- crates/ethereum/evm/src/lib.rs | 4 ++-- crates/primitives-traits/src/constants/mod.rs | 3 --- 8 files changed, 16 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f749672fe..e261297ae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6741,6 +6741,7 @@ name = "reth-consensus-common" version = "1.1.0" dependencies = [ "alloy-consensus", + "alloy-eips", "alloy-primitives", "mockall", "rand 0.8.5", diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index 95c0361f3..4468d8205 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -1375,6 +1375,7 @@ where mod tests { use super::*; use alloy_consensus::{TxEip1559, EMPTY_ROOT_HASH}; + use alloy_eips::eip1559::INITIAL_BASE_FEE; use alloy_genesis::{Genesis, GenesisAccount}; use alloy_primitives::{keccak256, Address, Sealable, B256}; use assert_matches::assert_matches; @@ -1386,7 +1387,6 @@ mod tests { use reth_evm::test_utils::MockExecutorProvider; use reth_evm_ethereum::execute::EthExecutorProvider; use reth_primitives::{ - constants::EIP1559_INITIAL_BASE_FEE, proofs::{calculate_receipt_root, calculate_transaction_root}, revm_primitives::AccountInfo, Account, BlockBody, Header, Signature, Transaction, TransactionSigned, @@ -1560,7 +1560,7 @@ mod tests { provider_rw.commit().unwrap(); } - let single_tx_cost = U256::from(EIP1559_INITIAL_BASE_FEE * MIN_TRANSACTION_GAS); + let single_tx_cost = U256::from(INITIAL_BASE_FEE * MIN_TRANSACTION_GAS); let mock_tx = |nonce: u64| -> TransactionSignedEcRecovered { TransactionSigned::from_transaction_and_signature( Transaction::Eip1559(TxEip1559 { @@ -1568,7 +1568,7 @@ mod tests { nonce, gas_limit: MIN_TRANSACTION_GAS, to: Address::ZERO.into(), - max_fee_per_gas: EIP1559_INITIAL_BASE_FEE as u128, + max_fee_per_gas: INITIAL_BASE_FEE as u128, ..Default::default() }), Signature::test_signature(), @@ -1605,7 +1605,7 @@ mod tests { gas_used: body.len() as u64 * MIN_TRANSACTION_GAS, gas_limit: chain_spec.max_gas_limit, mix_hash: B256::random(), - base_fee_per_gas: Some(EIP1559_INITIAL_BASE_FEE), + base_fee_per_gas: Some(INITIAL_BASE_FEE), transactions_root, receipts_root, state_root: state_root_unhashed(HashMap::from([( diff --git a/crates/chain-state/src/test_utils.rs b/crates/chain-state/src/test_utils.rs index f1648ab6b..564df9fe3 100644 --- a/crates/chain-state/src/test_utils.rs +++ b/crates/chain-state/src/test_utils.rs @@ -3,7 +3,7 @@ use crate::{ CanonStateSubscriptions, }; use alloy_consensus::{Transaction as _, TxEip1559, EMPTY_ROOT_HASH}; -use alloy_eips::eip7685::Requests; +use alloy_eips::{eip1559::INITIAL_BASE_FEE, eip7685::Requests}; use alloy_primitives::{Address, BlockNumber, Sealable, B256, U256}; use alloy_signer::SignerSync; use alloy_signer_local::PrivateKeySigner; @@ -11,7 +11,6 @@ use rand::{thread_rng, Rng}; use reth_chainspec::{ChainSpec, EthereumHardfork, MIN_TRANSACTION_GAS}; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_primitives::{ - constants::EIP1559_INITIAL_BASE_FEE, proofs::{calculate_receipt_root, calculate_transaction_root, calculate_withdrawals_root}, BlockBody, Header, Receipt, Receipts, SealedBlock, SealedBlockWithSenders, SealedHeader, Transaction, TransactionSigned, TransactionSignedEcRecovered, @@ -75,7 +74,7 @@ impl TestBlockBuilder { /// Gas cost of a single transaction generated by the block builder. pub fn single_tx_cost() -> U256 { - U256::from(EIP1559_INITIAL_BASE_FEE * MIN_TRANSACTION_GAS) + U256::from(INITIAL_BASE_FEE * MIN_TRANSACTION_GAS) } /// Generates a random [`SealedBlockWithSenders`]. @@ -92,7 +91,7 @@ impl TestBlockBuilder { nonce, gas_limit: MIN_TRANSACTION_GAS, to: Address::random().into(), - max_fee_per_gas: EIP1559_INITIAL_BASE_FEE as u128, + max_fee_per_gas: INITIAL_BASE_FEE as u128, max_priority_fee_per_gas: 1, ..Default::default() }); @@ -136,7 +135,7 @@ impl TestBlockBuilder { gas_used: transactions.len() as u64 * MIN_TRANSACTION_GAS, gas_limit: self.chain_spec.max_gas_limit, mix_hash: B256::random(), - base_fee_per_gas: Some(EIP1559_INITIAL_BASE_FEE), + base_fee_per_gas: Some(INITIAL_BASE_FEE), transactions_root: calculate_transaction_root(&transactions), receipts_root: calculate_receipt_root(&receipts), beneficiary: Address::random(), diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index bebf7ca26..b1a23f1fa 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -3,7 +3,7 @@ pub use alloy_eips::eip1559::BaseFeeParams; use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_chains::{Chain, NamedChain}; use alloy_consensus::constants::EMPTY_WITHDRAWALS; -use alloy_eips::eip7685::EMPTY_REQUESTS_HASH; +use alloy_eips::{eip1559::INITIAL_BASE_FEE, eip7685::EMPTY_REQUESTS_HASH}; use alloy_genesis::Genesis; use alloy_primitives::{address, b256, Address, BlockNumber, B256, U256}; use derive_more::From; @@ -19,7 +19,7 @@ use reth_network_peers::{ sepolia_nodes, NodeRecord, }; use reth_primitives_traits::{ - constants::{EIP1559_INITIAL_BASE_FEE, HOLESKY_GENESIS_HASH, SEPOLIA_GENESIS_HASH}, + constants::{HOLESKY_GENESIS_HASH, SEPOLIA_GENESIS_HASH}, Header, SealedHeader, }; use reth_trie_common::root::state_root_ref_unhashed; @@ -314,7 +314,7 @@ impl ChainSpec { pub fn initial_base_fee(&self) -> Option { // If the base fee is set in the genesis block, we use that instead of the default. let genesis_base_fee = - self.genesis.base_fee_per_gas.map(|fee| fee as u64).unwrap_or(EIP1559_INITIAL_BASE_FEE); + self.genesis.base_fee_per_gas.map(|fee| fee as u64).unwrap_or(INITIAL_BASE_FEE); // If London is activated at genesis, we set the initial base fee as per EIP-1559. self.hardforks.fork(EthereumHardfork::London).active_at_block(0).then_some(genesis_base_fee) diff --git a/crates/consensus/common/Cargo.toml b/crates/consensus/common/Cargo.toml index eaae1301b..c83312577 100644 --- a/crates/consensus/common/Cargo.toml +++ b/crates/consensus/common/Cargo.toml @@ -20,6 +20,7 @@ reth-consensus.workspace = true alloy-primitives.workspace = true revm-primitives.workspace = true alloy-consensus.workspace = true +alloy-eips.workspace = true [dev-dependencies] reth-storage-api.workspace = true diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index dabb8c3c3..1070bbdbc 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -204,7 +204,7 @@ pub fn validate_against_parent_eip1559_base_fee Date: Thu, 24 Oct 2024 11:56:05 +0200 Subject: [PATCH 237/425] primitive-traits: rm `ALLOWED_FUTURE_BLOCK_TIME_SECONDS` constant (#12028) --- crates/primitives-traits/src/constants/mod.rs | 9 --------- 1 file changed, 9 deletions(-) diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index 19decfb2d..6bd21eb82 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -87,15 +87,6 @@ pub const OP_SYSTEM_TX_TO_ADDR: Address = address!("4200000000000000000000000000 /// the database. pub const BEACON_CONSENSUS_REORG_UNWIND_DEPTH: u64 = 3; -/// Max seconds from current time allowed for blocks, before they're considered future blocks. -/// -/// This is only used when checking whether or not the timestamp for pre-merge blocks is in the -/// future. -/// -/// See: -/// -pub const ALLOWED_FUTURE_BLOCK_TIME_SECONDS: u64 = 15; - #[cfg(test)] mod tests { use super::*; From 8bfb7f9ce9b527dbbfc94327519abbafb666a1d8 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 24 Oct 2024 11:58:50 +0200 Subject: [PATCH 238/425] primitive-traits: use alloy `BEACON_NONCE` constant (#12029) --- crates/ethereum/payload/src/lib.rs | 4 ++-- crates/optimism/payload/src/builder.rs | 2 +- crates/primitives-traits/src/constants/mod.rs | 3 --- crates/rpc/rpc-eth-api/src/helpers/pending_block.rs | 4 ++-- 4 files changed, 5 insertions(+), 8 deletions(-) diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 7f94acf72..951a909b9 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -10,7 +10,7 @@ #![allow(clippy::useless_let_if_seq)] use alloy_consensus::EMPTY_OMMER_ROOT_HASH; -use alloy_eips::eip7685::Requests; +use alloy_eips::{eip7685::Requests, merge::BEACON_NONCE}; use alloy_primitives::U256; use reth_basic_payload_builder::{ commit_withdrawals, is_better_payload, BuildArguments, BuildOutcome, PayloadBuilder, @@ -25,7 +25,7 @@ use reth_execution_types::ExecutionOutcome; use reth_payload_builder::{EthBuiltPayload, EthPayloadBuilderAttributes}; use reth_payload_primitives::{PayloadBuilderAttributes, PayloadBuilderError}; use reth_primitives::{ - constants::{eip4844::MAX_DATA_GAS_PER_BLOCK, BEACON_NONCE}, + constants::eip4844::MAX_DATA_GAS_PER_BLOCK, proofs::{self}, revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg}, Block, BlockBody, EthereumHardforks, Header, Receipt, diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index a05690490..c85abfad7 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use alloy_consensus::EMPTY_OMMER_ROOT_HASH; +use alloy_eips::merge::BEACON_NONCE; use alloy_primitives::U256; use reth_basic_payload_builder::*; use reth_chain_state::ExecutedBlock; @@ -14,7 +15,6 @@ use reth_optimism_consensus::calculate_receipt_root_no_memo_optimism; use reth_optimism_forks::OptimismHardfork; use reth_payload_primitives::{PayloadBuilderAttributes, PayloadBuilderError}; use reth_primitives::{ - constants::BEACON_NONCE, proofs, revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg}, Block, BlockBody, Header, Receipt, TxType, diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index 6bd21eb82..c11a62963 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -9,9 +9,6 @@ pub use gas_units::{GIGAGAS, KILOGAS, MEGAGAS}; /// The client version: `reth/v{major}.{minor}.{patch}` pub const RETH_CLIENT_VERSION: &str = concat!("reth/v", env!("CARGO_PKG_VERSION")); -/// The default block nonce in the beacon consensus -pub const BEACON_NONCE: u64 = 0u64; - /// The minimum tx fee below which the txpool will reject the transaction. /// /// Configured to `7` WEI which is the lowest possible value of base fee under mainnet EIP-1559 diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index 407ddf187..872f17ee9 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -6,7 +6,7 @@ use std::time::{Duration, Instant}; use crate::{EthApiTypes, FromEthApiError, FromEvmError}; use alloy_consensus::EMPTY_OMMER_ROOT_HASH; -use alloy_eips::eip7685::EMPTY_REQUESTS_HASH; +use alloy_eips::{eip7685::EMPTY_REQUESTS_HASH, merge::BEACON_NONCE}; use alloy_primitives::{BlockNumber, B256, U256}; use alloy_rpc_types::BlockNumberOrTag; use futures::Future; @@ -17,7 +17,7 @@ use reth_evm::{ }; use reth_execution_types::ExecutionOutcome; use reth_primitives::{ - constants::{eip4844::MAX_DATA_GAS_PER_BLOCK, BEACON_NONCE}, + constants::eip4844::MAX_DATA_GAS_PER_BLOCK, proofs::calculate_transaction_root, revm_primitives::{ BlockEnv, CfgEnv, CfgEnvWithHandlerCfg, EVMError, Env, ExecutionResult, InvalidTransaction, From ebd1ed9fb822377d816c798585c0911be45a2b32 Mon Sep 17 00:00:00 2001 From: Deil Urba Date: Thu, 24 Oct 2024 11:49:14 +0100 Subject: [PATCH 239/425] feat(exex): notifications trait (#11972) --- crates/exex/exex/src/context.rs | 2 +- crates/exex/exex/src/notifications.rs | 64 ++++++++++++++++++--------- 2 files changed, 45 insertions(+), 21 deletions(-) diff --git a/crates/exex/exex/src/context.rs b/crates/exex/exex/src/context.rs index 9af12e260..c4b4f351b 100644 --- a/crates/exex/exex/src/context.rs +++ b/crates/exex/exex/src/context.rs @@ -7,7 +7,7 @@ use reth_primitives::Head; use reth_tasks::TaskExecutor; use tokio::sync::mpsc::UnboundedSender; -use crate::{ExExEvent, ExExNotifications}; +use crate::{ExExEvent, ExExNotifications, ExExNotificationsStream}; /// Captures the context that an `ExEx` has access to. pub struct ExExContext { diff --git a/crates/exex/exex/src/notifications.rs b/crates/exex/exex/src/notifications.rs index d0c94d34f..90a0ee230 100644 --- a/crates/exex/exex/src/notifications.rs +++ b/crates/exex/exex/src/notifications.rs @@ -21,6 +21,40 @@ pub struct ExExNotifications { inner: ExExNotificationsInner, } +/// A trait, that represents a stream of [`ExExNotification`]s. The stream will emit notifications +/// for all blocks. If the stream is configured with a head via [`ExExNotifications::set_with_head`] +/// or [`ExExNotifications::with_head`], it will run backfill jobs to catch up to the node head. +pub trait ExExNotificationsStream: Stream> + Unpin { + /// Sets [`ExExNotificationsStream`] to a stream of [`ExExNotification`]s without a head. + /// + /// It's a no-op if the stream has already been configured without a head. + /// + /// See the documentation of [`ExExNotificationsWithoutHead`] for more details. + fn set_without_head(&mut self); + + /// Sets [`ExExNotificationsStream`] to a stream of [`ExExNotification`]s with the provided + /// head. + /// + /// It's a no-op if the stream has already been configured with a head. + /// + /// See the documentation of [`ExExNotificationsWithHead`] for more details. + fn set_with_head(&mut self, exex_head: ExExHead); + + /// Returns a new [`ExExNotificationsStream`] without a head. + /// + /// See the documentation of [`ExExNotificationsWithoutHead`] for more details. + fn without_head(self) -> Self + where + Self: Sized; + + /// Returns a new [`ExExNotificationsStream`] with the provided head. + /// + /// See the documentation of [`ExExNotificationsWithHead`] for more details. + fn with_head(self, exex_head: ExExHead) -> Self + where + Self: Sized; +} + #[derive(Debug)] enum ExExNotificationsInner { /// A stream of [`ExExNotification`]s. The stream will emit notifications for all blocks. @@ -52,13 +86,14 @@ impl ExExNotifications { )), } } +} - /// Sets [`ExExNotifications`] to a stream of [`ExExNotification`]s without a head. - /// - /// It's a no-op if the stream has already been configured without a head. - /// - /// See the documentation of [`ExExNotificationsWithoutHead`] for more details. - pub fn set_without_head(&mut self) { +impl ExExNotificationsStream for ExExNotifications +where + P: BlockReader + HeaderProvider + StateProviderFactory + Clone + Unpin + 'static, + E: BlockExecutorProvider + Clone + Unpin + 'static, +{ + fn set_without_head(&mut self) { let current = std::mem::replace(&mut self.inner, ExExNotificationsInner::Invalid); self.inner = ExExNotificationsInner::WithoutHead(match current { ExExNotificationsInner::WithoutHead(notifications) => notifications, @@ -73,20 +108,12 @@ impl ExExNotifications { }); } - /// Returns a new [`ExExNotifications`] without a head. - /// - /// See the documentation of [`ExExNotificationsWithoutHead`] for more details. - pub fn without_head(mut self) -> Self { + fn without_head(mut self) -> Self { self.set_without_head(); self } - /// Sets [`ExExNotifications`] to a stream of [`ExExNotification`]s with the provided head. - /// - /// It's a no-op if the stream has already been configured with a head. - /// - /// See the documentation of [`ExExNotificationsWithHead`] for more details. - pub fn set_with_head(&mut self, exex_head: ExExHead) { + fn set_with_head(&mut self, exex_head: ExExHead) { let current = std::mem::replace(&mut self.inner, ExExNotificationsInner::Invalid); self.inner = ExExNotificationsInner::WithHead(match current { ExExNotificationsInner::WithoutHead(notifications) => { @@ -104,10 +131,7 @@ impl ExExNotifications { }); } - /// Returns a new [`ExExNotifications`] with the provided head. - /// - /// See the documentation of [`ExExNotificationsWithHead`] for more details. - pub fn with_head(mut self, exex_head: ExExHead) -> Self { + fn with_head(mut self, exex_head: ExExHead) -> Self { self.set_with_head(exex_head); self } From d50da7fcd65da3c37ad77b7b5f645f12ffc055b6 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 24 Oct 2024 12:50:43 +0200 Subject: [PATCH 240/425] primitives-traits: use alloy `SEPOLIA_GENESIS_HASH` constant (#12024) --- crates/chainspec/src/spec.rs | 7 ++----- crates/primitives-traits/src/constants/mod.rs | 4 ---- crates/primitives/src/lib.rs | 2 +- crates/storage/db-common/src/init.rs | 4 ++-- 4 files changed, 5 insertions(+), 12 deletions(-) diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index b1a23f1fa..02f4b5ca9 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -8,7 +8,7 @@ use alloy_genesis::Genesis; use alloy_primitives::{address, b256, Address, BlockNumber, B256, U256}; use derive_more::From; -use alloy_consensus::constants::{DEV_GENESIS_HASH, MAINNET_GENESIS_HASH}; +use alloy_consensus::constants::{DEV_GENESIS_HASH, MAINNET_GENESIS_HASH, SEPOLIA_GENESIS_HASH}; use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; use reth_ethereum_forks::{ ChainHardforks, DisplayHardforks, EthereumHardfork, EthereumHardforks, ForkCondition, @@ -18,10 +18,7 @@ use reth_network_peers::{ base_nodes, base_testnet_nodes, holesky_nodes, mainnet_nodes, op_nodes, op_testnet_nodes, sepolia_nodes, NodeRecord, }; -use reth_primitives_traits::{ - constants::{HOLESKY_GENESIS_HASH, SEPOLIA_GENESIS_HASH}, - Header, SealedHeader, -}; +use reth_primitives_traits::{constants::HOLESKY_GENESIS_HASH, Header, SealedHeader}; use reth_trie_common::root::state_root_ref_unhashed; use crate::{constants::MAINNET_DEPOSIT_CONTRACT, once_cell_set, EthChainSpec, LazyLock, OnceLock}; diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index c11a62963..c5ee6bc3e 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -60,10 +60,6 @@ pub const OP_SEPOLIA_EIP1559_DEFAULT_ELASTICITY_MULTIPLIER: u128 = 6; /// [transaction costs](https://community.optimism.io/docs/developers/build/differences/#transaction-costs) doc. pub const BASE_SEPOLIA_EIP1559_DEFAULT_ELASTICITY_MULTIPLIER: u128 = 10; -/// Sepolia genesis hash: `0x25a5cc106eea7138acab33231d7160d69cb777ee0c2c553fcddf5138993e6dd9` -pub const SEPOLIA_GENESIS_HASH: B256 = - b256!("25a5cc106eea7138acab33231d7160d69cb777ee0c2c553fcddf5138993e6dd9"); - /// Holesky genesis hash: `0xb5f7f912443c940f21fd611f12828d75b534364ed9e95ca4e307729a4661bde4` pub const HOLESKY_GENESIS_HASH: B256 = b256!("b5f7f912443c940f21fd611f12828d75b534364ed9e95ca4e307729a4661bde4"); diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 7a9a6bf45..4e3f1d3bd 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -39,7 +39,7 @@ pub use block::{ }; #[cfg(feature = "reth-codec")] pub use compression::*; -pub use constants::{HOLESKY_GENESIS_HASH, SEPOLIA_GENESIS_HASH}; +pub use constants::HOLESKY_GENESIS_HASH; pub use receipt::{ gas_spent_by_transactions, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, Receipts, }; diff --git a/crates/storage/db-common/src/init.rs b/crates/storage/db-common/src/init.rs index f0695421e..014751733 100644 --- a/crates/storage/db-common/src/init.rs +++ b/crates/storage/db-common/src/init.rs @@ -581,7 +581,7 @@ struct GenesisAccountWithAddress { #[cfg(test)] mod tests { use super::*; - use alloy_consensus::constants::MAINNET_GENESIS_HASH; + use alloy_consensus::constants::{MAINNET_GENESIS_HASH, SEPOLIA_GENESIS_HASH}; use alloy_genesis::Genesis; use reth_chainspec::{Chain, ChainSpec, HOLESKY, MAINNET, SEPOLIA}; use reth_db::DatabaseEnv; @@ -592,7 +592,7 @@ mod tests { transaction::DbTx, Database, }; - use reth_primitives::{HOLESKY_GENESIS_HASH, SEPOLIA_GENESIS_HASH}; + use reth_primitives::HOLESKY_GENESIS_HASH; use reth_primitives_traits::IntegerList; use reth_provider::{ test_utils::{create_test_provider_factory_with_chain_spec, MockNodeTypesWithDB}, From 2d83f2048937a1ddd9f9646bd513a3e52364af78 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 24 Oct 2024 13:12:13 +0200 Subject: [PATCH 241/425] chore: reuse alloy-primitives logs bloom (#12031) --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- crates/primitives-traits/src/lib.rs | 2 +- crates/primitives-traits/src/log.rs | 15 --------------- crates/primitives/src/receipt.rs | 4 ++-- crates/rpc/rpc-eth-types/src/simulate.rs | 5 +++-- crates/trie/sparse/src/trie.rs | 14 ++++++++++---- 7 files changed, 19 insertions(+), 27 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e261297ae..ab5218437 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -286,9 +286,9 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38f35429a652765189c1c5092870d8360ee7b7769b09b06d89ebaefd34676446" +checksum = "c71738eb20c42c5fb149571e76536a0f309d142f3957c28791662b96baf77a3d" dependencies = [ "alloy-rlp", "arbitrary", diff --git a/Cargo.toml b/Cargo.toml index 22a78979d..e23efdeb3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -427,7 +427,7 @@ revm-primitives = { version = "13.0.0", features = [ # eth alloy-chains = "0.1.32" alloy-dyn-abi = "0.8.0" -alloy-primitives = { version = "0.8.7", default-features = false } +alloy-primitives = { version = "0.8.9", default-features = false } alloy-rlp = "0.3.4" alloy-sol-types = "0.8.0" alloy-trie = { version = "0.7", default-features = false } diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index a77669ec3..57d1119b0 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -39,7 +39,7 @@ mod error; pub use error::{GotExpected, GotExpectedBoxed}; mod log; -pub use log::{logs_bloom, Log, LogData}; +pub use alloy_primitives::{logs_bloom, Log, LogData}; mod storage; pub use storage::StorageEntry; diff --git a/crates/primitives-traits/src/log.rs b/crates/primitives-traits/src/log.rs index 6e6b47335..0b445aeeb 100644 --- a/crates/primitives-traits/src/log.rs +++ b/crates/primitives-traits/src/log.rs @@ -1,18 +1,3 @@ -use alloy_primitives::Bloom; -pub use alloy_primitives::{Log, LogData}; - -/// Calculate receipt logs bloom. -pub fn logs_bloom<'a>(logs: impl IntoIterator) -> Bloom { - let mut bloom = Bloom::ZERO; - for log in logs { - bloom.m3_2048(log.address.as_slice()); - for topic in log.topics() { - bloom.m3_2048(topic.as_slice()); - } - } - bloom -} - #[cfg(test)] mod tests { use alloy_primitives::{Address, Bytes, Log as AlloyLog, B256}; diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index b117f8d96..bb6c0841b 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -1,6 +1,6 @@ #[cfg(feature = "reth-codec")] use crate::compression::{RECEIPT_COMPRESSOR, RECEIPT_DECOMPRESSOR}; -use crate::{logs_bloom, TxType}; +use crate::TxType; use alloc::{vec, vec::Vec}; use alloy_consensus::constants::{ EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, @@ -49,7 +49,7 @@ impl Receipt { /// Calculates [`Log`]'s bloom filter. this is slow operation and [`ReceiptWithBloom`] can /// be used to cache this value. pub fn bloom_slow(&self) -> Bloom { - logs_bloom(self.logs.iter()) + alloy_primitives::logs_bloom(self.logs.iter()) } /// Calculates the bloom filter for the receipt and returns the [`ReceiptWithBloom`] container diff --git a/crates/rpc/rpc-eth-types/src/simulate.rs b/crates/rpc/rpc-eth-types/src/simulate.rs index 1d443861d..4249c78fe 100644 --- a/crates/rpc/rpc-eth-types/src/simulate.rs +++ b/crates/rpc/rpc-eth-types/src/simulate.rs @@ -9,7 +9,6 @@ use alloy_rpc_types::{ use alloy_rpc_types_eth::transaction::TransactionRequest; use jsonrpsee_types::ErrorObject; use reth_primitives::{ - logs_bloom, proofs::{calculate_receipt_root, calculate_transaction_root}, BlockBody, BlockWithSenders, Receipt, Signature, Transaction, TransactionSigned, TransactionSignedNoHash, @@ -290,7 +289,9 @@ pub fn build_block( receipts_root: calculate_receipt_root(&receipts), transactions_root: calculate_transaction_root(&transactions), state_root, - logs_bloom: logs_bloom(receipts.iter().flat_map(|r| r.receipt.logs.iter())), + logs_bloom: alloy_primitives::logs_bloom( + receipts.iter().flat_map(|r| r.receipt.logs.iter()), + ), mix_hash: block_env.prevrandao.unwrap_or_default(), ..Default::default() }; diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index 4d195cbf3..8d65378f6 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -1257,19 +1257,24 @@ mod tests { // to test the sparse trie updates. const KEY_NIBBLES_LEN: usize = 3; - fn test(updates: Vec>>) { + fn test(updates: I) + where + I: IntoIterator, + T: IntoIterator)> + Clone, + { let mut rng = generators::rng(); let mut state = BTreeMap::default(); let mut sparse = RevealedSparseTrie::default(); for update in updates { - let keys_to_delete_len = update.len() / 2; - + let mut count = 0; // Insert state updates into the sparse trie and calculate the root for (key, value) in update.clone() { sparse.update_leaf(key, value).unwrap(); + count += 1; } + let keys_to_delete_len = count / 2; let sparse_root = sparse.root(); // Insert state updates into the hash builder and calculate the root @@ -1329,7 +1334,8 @@ mod tests { ), 1..100, ) - )| { test(updates.into_iter().collect()) }); + )| { + test(updates) }); } /// We have three leaves that share the same prefix: 0x00, 0x01 and 0x02. Hash builder trie has From 5e4da59b3a83c869647ad38b6f679f7594c11938 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 24 Oct 2024 15:12:34 +0400 Subject: [PATCH 242/425] feat: Add more complex E2E test (#12005) --- Cargo.lock | 5 + crates/e2e-test-utils/Cargo.toml | 2 + crates/e2e-test-utils/src/engine_api.rs | 51 +++++++--- crates/e2e-test-utils/src/lib.rs | 107 +++++++++++++++++--- crates/e2e-test-utils/src/node.rs | 18 +++- crates/e2e-test-utils/src/traits.rs | 15 ++- crates/ethereum/node/Cargo.toml | 3 + crates/ethereum/node/tests/e2e/p2p.rs | 126 +++++++++++++++++++++++- 8 files changed, 298 insertions(+), 29 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ab5218437..3449e381f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7020,6 +7020,7 @@ dependencies = [ "reth", "reth-chainspec", "reth-db", + "reth-engine-local", "reth-network-peers", "reth-node-builder", "reth-payload-builder", @@ -7033,6 +7034,7 @@ dependencies = [ "tokio", "tokio-stream", "tracing", + "url", ] [[package]] @@ -7966,8 +7968,11 @@ dependencies = [ "alloy-consensus", "alloy-genesis", "alloy-primitives", + "alloy-provider", + "alloy-signer", "eyre", "futures", + "rand 0.8.5", "reth", "reth-auto-seal-consensus", "reth-basic-payload-builder", diff --git a/crates/e2e-test-utils/Cargo.toml b/crates/e2e-test-utils/Cargo.toml index 9fa3e2b60..04f031daa 100644 --- a/crates/e2e-test-utils/Cargo.toml +++ b/crates/e2e-test-utils/Cargo.toml @@ -23,9 +23,11 @@ reth-node-builder = { workspace = true, features = ["test-utils"] } reth-tokio-util.workspace = true reth-stages-types.workspace = true reth-network-peers.workspace = true +reth-engine-local.workspace = true # rpc jsonrpsee.workspace = true +url.workspace = true # ethereum alloy-primitives.workspace = true diff --git a/crates/e2e-test-utils/src/engine_api.rs b/crates/e2e-test-utils/src/engine_api.rs index f4aa8fdf5..5027b2620 100644 --- a/crates/e2e-test-utils/src/engine_api.rs +++ b/crates/e2e-test-utils/src/engine_api.rs @@ -12,19 +12,22 @@ use reth::{ types::engine::{ForkchoiceState, PayloadStatusEnum}, }, }; +use reth_chainspec::EthereumHardforks; +use reth_node_builder::BuiltPayload; use reth_payload_builder::PayloadId; use reth_rpc_layer::AuthClientService; -use std::marker::PhantomData; +use std::{marker::PhantomData, sync::Arc}; /// Helper for engine api operations #[derive(Debug)] -pub struct EngineApiTestContext { +pub struct EngineApiTestContext { + pub chain_spec: Arc, pub canonical_stream: CanonStateNotificationStream, pub engine_api_client: HttpClient>, pub _marker: PhantomData, } -impl EngineApiTestContext { +impl EngineApiTestContext { /// Retrieves a v3 payload from the engine api pub async fn get_payload_v3( &self, @@ -51,18 +54,40 @@ impl EngineApiTestContext { ) -> eyre::Result where E::ExecutionPayloadEnvelopeV3: From + PayloadEnvelopeExt, + E::ExecutionPayloadEnvelopeV4: From + PayloadEnvelopeExt, { - // setup payload for submission - let envelope_v3: ::ExecutionPayloadEnvelopeV3 = payload.into(); - // submit payload to engine api - let submission = EngineApiClient::::new_payload_v3( - &self.engine_api_client, - envelope_v3.execution_payload(), - versioned_hashes, - payload_builder_attributes.parent_beacon_block_root().unwrap(), - ) - .await?; + let submission = if self + .chain_spec + .is_prague_active_at_timestamp(payload_builder_attributes.timestamp()) + { + let requests = payload + .executed_block() + .unwrap() + .execution_outcome() + .requests + .first() + .unwrap() + .clone(); + let envelope: ::ExecutionPayloadEnvelopeV4 = payload.into(); + EngineApiClient::::new_payload_v4( + &self.engine_api_client, + envelope.execution_payload(), + versioned_hashes, + payload_builder_attributes.parent_beacon_block_root().unwrap(), + requests, + ) + .await? + } else { + let envelope: ::ExecutionPayloadEnvelopeV3 = payload.into(); + EngineApiClient::::new_payload_v3( + &self.engine_api_client, + envelope.execution_payload(), + versioned_hashes, + payload_builder_attributes.parent_beacon_block_root().unwrap(), + ) + .await? + }; assert_eq!(submission.status, expected_status); diff --git a/crates/e2e-test-utils/src/lib.rs b/crates/e2e-test-utils/src/lib.rs index 48e56910e..f5ee1e5e6 100644 --- a/crates/e2e-test-utils/src/lib.rs +++ b/crates/e2e-test-utils/src/lib.rs @@ -11,11 +11,13 @@ use reth::{ }; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_db::{test_utils::TempDatabase, DatabaseEnv}; +use reth_engine_local::LocalPayloadAttributesBuilder; use reth_node_builder::{ - components::NodeComponentsBuilder, rpc::RethRpcAddOns, FullNodeTypesAdapter, Node, NodeAdapter, - NodeComponents, NodeTypesWithDBAdapter, NodeTypesWithEngine, RethFullAdapter, + components::NodeComponentsBuilder, rpc::RethRpcAddOns, EngineNodeLauncher, + FullNodeTypesAdapter, Node, NodeAdapter, NodeComponents, NodeTypesWithDBAdapter, + NodeTypesWithEngine, PayloadAttributesBuilder, PayloadTypes, }; -use reth_provider::providers::BlockchainProvider; +use reth_provider::providers::{BlockchainProvider, BlockchainProvider2}; use tracing::{span, Level}; use wallet::Wallet; @@ -102,21 +104,102 @@ where Ok((nodes, tasks, Wallet::default().with_chain_id(chain_spec.chain().into()))) } +/// Creates the initial setup with `num_nodes` started and interconnected. +pub async fn setup_engine( + num_nodes: usize, + chain_spec: Arc, + is_dev: bool, +) -> eyre::Result<( + Vec>>>, + TaskManager, + Wallet, +)> +where + N: Default + + Node>>> + + NodeTypesWithEngine, + N::ComponentsBuilder: NodeComponentsBuilder< + TmpNodeAdapter>>, + Components: NodeComponents< + TmpNodeAdapter>>, + Network: PeersHandleProvider, + >, + >, + N::AddOns: RethRpcAddOns>>>, + LocalPayloadAttributesBuilder: PayloadAttributesBuilder< + <::Engine as PayloadTypes>::PayloadAttributes, + >, +{ + let tasks = TaskManager::current(); + let exec = tasks.executor(); + + let network_config = NetworkArgs { + discovery: DiscoveryArgs { disable_discovery: true, ..DiscoveryArgs::default() }, + ..NetworkArgs::default() + }; + + // Create nodes and peer them + let mut nodes: Vec> = Vec::with_capacity(num_nodes); + + for idx in 0..num_nodes { + let node_config = NodeConfig::new(chain_spec.clone()) + .with_network(network_config.clone()) + .with_unused_ports() + .with_rpc(RpcServerArgs::default().with_unused_ports().with_http()) + .set_dev(is_dev); + + let span = span!(Level::INFO, "node", idx); + let _enter = span.enter(); + let node = N::default(); + let NodeHandle { node, node_exit_future: _ } = NodeBuilder::new(node_config.clone()) + .testing_node(exec.clone()) + .with_types_and_provider::>() + .with_components(node.components_builder()) + .with_add_ons(node.add_ons()) + .launch_with_fn(|builder| { + let launcher = EngineNodeLauncher::new( + builder.task_executor().clone(), + builder.config().datadir(), + Default::default(), + ); + builder.launch_with(launcher) + }) + .await?; + + let mut node = NodeTestContext::new(node).await?; + + // Connect each node in a chain. + if let Some(previous_node) = nodes.last_mut() { + previous_node.connect(&mut node).await; + } + + // Connect last node with the first if there are more than two + if idx + 1 == num_nodes && num_nodes > 2 { + if let Some(first_node) = nodes.first_mut() { + node.connect(first_node).await; + } + } + + nodes.push(node); + } + + Ok((nodes, tasks, Wallet::default().with_chain_id(chain_spec.chain().into()))) +} + // Type aliases type TmpDB = Arc>; -type TmpNodeAdapter = FullNodeTypesAdapter< - NodeTypesWithDBAdapter, - BlockchainProvider>, ->; +type TmpNodeAdapter>> = + FullNodeTypesAdapter, Provider>; /// Type alias for a `NodeAdapter` -pub type Adapter = NodeAdapter< - RethFullAdapter, - <>>::ComponentsBuilder as NodeComponentsBuilder< - RethFullAdapter, +pub type Adapter>> = NodeAdapter< + TmpNodeAdapter, + <>>::ComponentsBuilder as NodeComponentsBuilder< + TmpNodeAdapter, >>::Components, >; /// Type alias for a type of `NodeHelper` -pub type NodeHelperType = NodeTestContext, AO>; +pub type NodeHelperType>> = + NodeTestContext, AO>; diff --git a/crates/e2e-test-utils/src/node.rs b/crates/e2e-test-utils/src/node.rs index 776a437a5..f40072018 100644 --- a/crates/e2e-test-utils/src/node.rs +++ b/crates/e2e-test-utils/src/node.rs @@ -18,9 +18,10 @@ use reth::{ }, }; use reth_chainspec::EthereumHardforks; -use reth_node_builder::{rpc::RethRpcAddOns, NodeTypesWithEngine}; +use reth_node_builder::{rpc::RethRpcAddOns, NodeTypes, NodeTypesWithEngine}; use reth_stages_types::StageId; use tokio_stream::StreamExt; +use url::Url; use crate::{ engine_api::EngineApiTestContext, network::NetworkTestContext, payload::PayloadTestContext, @@ -41,7 +42,10 @@ where /// Context for testing network functionalities. pub network: NetworkTestContext, /// Context for testing the Engine API. - pub engine_api: EngineApiTestContext<::Engine>, + pub engine_api: EngineApiTestContext< + ::Engine, + ::ChainSpec, + >, /// Context for testing RPC features. pub rpc: RpcTestContext, } @@ -63,6 +67,7 @@ where payload: PayloadTestContext::new(builder).await?, network: NetworkTestContext::new(node.network.clone()), engine_api: EngineApiTestContext { + chain_spec: node.chain_spec(), engine_api_client: node.auth_server_handle().http_client(), canonical_stream: node.provider.canonical_state_stream(), _marker: PhantomData::, @@ -89,6 +94,7 @@ where ) -> eyre::Result> where Engine::ExecutionPayloadEnvelopeV3: From + PayloadEnvelopeExt, + Engine::ExecutionPayloadEnvelopeV4: From + PayloadEnvelopeExt, AddOns::EthApi: EthApiSpec + EthTransactions + TraceExt + FullEthApiTypes, { let mut chain = Vec::with_capacity(length as usize); @@ -137,6 +143,8 @@ where where ::ExecutionPayloadEnvelopeV3: From + PayloadEnvelopeExt, + ::ExecutionPayloadEnvelopeV4: + From + PayloadEnvelopeExt, { let (payload, eth_attr) = self.new_payload(attributes_generator).await?; @@ -236,4 +244,10 @@ where } Ok(()) } + + /// Returns the RPC URL. + pub fn rpc_url(&self) -> Url { + let addr = self.inner.rpc_server_handle().http_local_addr().unwrap(); + format!("http://{}", addr).parse().unwrap() + } } diff --git a/crates/e2e-test-utils/src/traits.rs b/crates/e2e-test-utils/src/traits.rs index 678649214..a70bbf7af 100644 --- a/crates/e2e-test-utils/src/traits.rs +++ b/crates/e2e-test-utils/src/traits.rs @@ -1,4 +1,5 @@ -use op_alloy_rpc_types_engine::OpExecutionPayloadEnvelopeV3; +use alloy_rpc_types::engine::ExecutionPayloadEnvelopeV4; +use op_alloy_rpc_types_engine::{OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4}; use reth::rpc::types::engine::{ExecutionPayloadEnvelopeV3, ExecutionPayloadV3}; /// The execution payload envelope type. @@ -13,8 +14,20 @@ impl PayloadEnvelopeExt for OpExecutionPayloadEnvelopeV3 { } } +impl PayloadEnvelopeExt for OpExecutionPayloadEnvelopeV4 { + fn execution_payload(&self) -> ExecutionPayloadV3 { + self.execution_payload.clone() + } +} + impl PayloadEnvelopeExt for ExecutionPayloadEnvelopeV3 { fn execution_payload(&self) -> ExecutionPayloadV3 { self.execution_payload.clone() } } + +impl PayloadEnvelopeExt for ExecutionPayloadEnvelopeV4 { + fn execution_payload(&self) -> ExecutionPayloadV3 { + self.execution_payload.clone() + } +} diff --git a/crates/ethereum/node/Cargo.toml b/crates/ethereum/node/Cargo.toml index 11555cdc4..e7784637a 100644 --- a/crates/ethereum/node/Cargo.toml +++ b/crates/ethereum/node/Cargo.toml @@ -52,6 +52,9 @@ alloy-genesis.workspace = true tokio.workspace = true serde_json.workspace = true alloy-consensus.workspace = true +alloy-provider.workspace = true +rand.workspace = true +alloy-signer.workspace = true [features] default = [] diff --git a/crates/ethereum/node/tests/e2e/p2p.rs b/crates/ethereum/node/tests/e2e/p2p.rs index a40c1b3f4..0fae23a08 100644 --- a/crates/ethereum/node/tests/e2e/p2p.rs +++ b/crates/ethereum/node/tests/e2e/p2p.rs @@ -1,7 +1,19 @@ use crate::utils::eth_payload_attributes; +use alloy_consensus::TxType; +use alloy_primitives::bytes; +use alloy_provider::{ + network::{ + Ethereum, EthereumWallet, NetworkWallet, TransactionBuilder, TransactionBuilder7702, + }, + Provider, ProviderBuilder, SendableTx, +}; +use alloy_signer::SignerSync; +use rand::{rngs::StdRng, seq::SliceRandom, Rng, SeedableRng}; +use reth::rpc::types::TransactionRequest; use reth_chainspec::{ChainSpecBuilder, MAINNET}; -use reth_e2e_test_utils::{setup, transaction::TransactionTestContext}; +use reth_e2e_test_utils::{setup, setup_engine, transaction::TransactionTestContext}; use reth_node_ethereum::EthereumNode; +use revm::primitives::{AccessListItem, Authorization}; use std::sync::Arc; #[tokio::test] @@ -45,3 +57,115 @@ async fn can_sync() -> eyre::Result<()> { Ok(()) } + +#[tokio::test] +async fn e2e_test_send_transactions() -> eyre::Result<()> { + reth_tracing::init_test_tracing(); + + let seed: [u8; 32] = rand::thread_rng().gen(); + let mut rng = StdRng::from_seed(seed); + println!("Seed: {:?}", seed); + + let chain_spec = Arc::new( + ChainSpecBuilder::default() + .chain(MAINNET.chain) + .genesis(serde_json::from_str(include_str!("../assets/genesis.json")).unwrap()) + .cancun_activated() + .prague_activated() + .build(), + ); + + let (mut nodes, _tasks, wallet) = + setup_engine::(2, chain_spec.clone(), false).await?; + let mut node = nodes.pop().unwrap(); + let signers = wallet.gen(); + let provider = ProviderBuilder::new().with_recommended_fillers().on_http(node.rpc_url()); + + // simple contract which writes to storage on any call + let dummy_bytecode = bytes!("6080604052348015600f57600080fd5b50602880601d6000396000f3fe4360a09081523360c0526040608081905260e08152902080805500fea164736f6c6343000810000a"); + let mut call_destinations = signers.iter().map(|s| s.address()).collect::>(); + + // Produce 100 random blocks with random transactions + for _ in 0..100 { + let tx_count = rng.gen_range(1..20); + + let mut pending = vec![]; + for _ in 0..tx_count { + let signer = signers.choose(&mut rng).unwrap(); + let tx_type = TxType::try_from(rng.gen_range(0..=4)).unwrap(); + + let mut tx = TransactionRequest::default().with_from(signer.address()); + + let should_create = + rng.gen::() && tx_type != TxType::Eip4844 && tx_type != TxType::Eip7702; + if should_create { + tx = tx.into_create().with_input(dummy_bytecode.clone()); + } else { + tx = tx.with_to(*call_destinations.choose(&mut rng).unwrap()).with_input( + (0..rng.gen_range(0..10000)).map(|_| rng.gen()).collect::>(), + ); + } + + if matches!(tx_type, TxType::Legacy | TxType::Eip2930) { + tx = tx.with_gas_price(provider.get_gas_price().await?); + } + + if rng.gen::() || tx_type == TxType::Eip2930 { + tx = tx.with_access_list( + vec![AccessListItem { + address: *call_destinations.choose(&mut rng).unwrap(), + storage_keys: (0..rng.gen_range(0..100)).map(|_| rng.gen()).collect(), + }] + .into(), + ); + } + + if tx_type == TxType::Eip7702 { + let signer = signers.choose(&mut rng).unwrap(); + let auth = Authorization { + chain_id: provider.get_chain_id().await?, + address: *call_destinations.choose(&mut rng).unwrap(), + nonce: provider.get_transaction_count(signer.address()).await?, + }; + let sig = signer.sign_hash_sync(&auth.signature_hash())?; + tx = tx.with_authorization_list(vec![auth.into_signed(sig)]) + } + + let SendableTx::Builder(tx) = provider.fill(tx).await? else { unreachable!() }; + let tx = + NetworkWallet::::sign_request(&EthereumWallet::new(signer.clone()), tx) + .await?; + + pending.push(provider.send_tx_envelope(tx).await?); + } + + let (payload, _) = node.advance_block(vec![], eth_payload_attributes).await?; + assert!(payload.block().raw_transactions().len() == tx_count); + + for pending in pending { + let receipt = pending.get_receipt().await?; + if let Some(address) = receipt.contract_address { + call_destinations.push(address); + } + } + } + + let second_node = nodes.pop().unwrap(); + let second_provider = + ProviderBuilder::new().with_recommended_fillers().on_http(second_node.rpc_url()); + + assert_eq!(second_provider.get_block_number().await?, 0); + + let head = provider.get_block_by_number(Default::default(), false).await?.unwrap().header.hash; + second_node.engine_api.update_forkchoice(head, head).await?; + + let start = std::time::Instant::now(); + + while provider.get_block_number().await? != second_provider.get_block_number().await? { + tokio::time::sleep(std::time::Duration::from_millis(100)).await; + + assert!(start.elapsed() <= std::time::Duration::from_secs(10), "timed out"); + } + + Ok(()) +} From 8b317f206f89e5618e0a6ce77a4e999c34597b51 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 24 Oct 2024 14:36:52 +0200 Subject: [PATCH 243/425] fix: exclude withdrawals from uncle block (#12038) --- crates/rpc/rpc-types-compat/src/block.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/rpc/rpc-types-compat/src/block.rs b/crates/rpc/rpc-types-compat/src/block.rs index 8cddc8c44..a954e05e4 100644 --- a/crates/rpc/rpc-types-compat/src/block.rs +++ b/crates/rpc/rpc-types-compat/src/block.rs @@ -183,14 +183,14 @@ fn from_block_with_transactions( /// an Uncle from its header. pub fn uncle_block_from_header(header: PrimitiveHeader) -> Block { let hash = header.hash_slow(); - let rpc_header = from_primitive_with_hash(SealedHeader::new(header.clone(), hash)); let uncle_block = PrimitiveBlock { header, ..Default::default() }; let size = Some(U256::from(uncle_block.length())); + let rpc_header = from_primitive_with_hash(SealedHeader::new(uncle_block.header, hash)); Block { uncles: vec![], header: rpc_header, transactions: BlockTransactions::Uncle, - withdrawals: Some(vec![]), + withdrawals: None, size, } } From bea6e278dfdb052ce7bbcec5d42349221aa04b84 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 24 Oct 2024 14:29:24 +0200 Subject: [PATCH 244/425] primitive-traits: use alloy `MIN_PROTOCOL_BASE_FEE` constant (#12037) --- crates/node/core/src/args/txpool.rs | 3 +-- crates/primitives-traits/src/constants/mod.rs | 26 +------------------ crates/transaction-pool/src/config.rs | 3 +-- crates/transaction-pool/src/pool/txpool.rs | 4 +-- crates/transaction-pool/src/test_utils/gen.rs | 6 ++--- .../transaction-pool/src/test_utils/mock.rs | 5 ++-- crates/transaction-pool/tests/it/evict.rs | 3 +-- 7 files changed, 10 insertions(+), 40 deletions(-) diff --git a/crates/node/core/src/args/txpool.rs b/crates/node/core/src/args/txpool.rs index 282313555..538315101 100644 --- a/crates/node/core/src/args/txpool.rs +++ b/crates/node/core/src/args/txpool.rs @@ -1,10 +1,9 @@ //! Transaction pool arguments use crate::cli::config::RethTransactionPoolConfig; -use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; +use alloy_eips::eip1559::{ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE}; use alloy_primitives::Address; use clap::Args; -use reth_primitives::constants::MIN_PROTOCOL_BASE_FEE; use reth_transaction_pool::{ blobstore::disk::DEFAULT_MAX_CACHED_BLOBS, pool::{NEW_TX_LISTENER_BUFFER_SIZE, PENDING_TX_LISTENER_BUFFER_SIZE}, diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index c5ee6bc3e..6ed88ce59 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -1,6 +1,6 @@ //! Ethereum protocol-related constants -use alloy_primitives::{address, b256, Address, B256, U256}; +use alloy_primitives::{address, b256, Address, B256}; /// Gas units, for example [`GIGAGAS`]. pub mod gas_units; @@ -9,20 +9,6 @@ pub use gas_units::{GIGAGAS, KILOGAS, MEGAGAS}; /// The client version: `reth/v{major}.{minor}.{patch}` pub const RETH_CLIENT_VERSION: &str = concat!("reth/v", env!("CARGO_PKG_VERSION")); -/// The minimum tx fee below which the txpool will reject the transaction. -/// -/// Configured to `7` WEI which is the lowest possible value of base fee under mainnet EIP-1559 -/// parameters. `BASE_FEE_MAX_CHANGE_DENOMINATOR` -/// is `8`, or 12.5%. Once the base fee has dropped to `7` WEI it cannot decrease further because -/// 12.5% of 7 is less than 1. -/// -/// Note that min base fee under different 1559 parameterizations may differ, but there's no -/// significant harm in leaving this setting as is. -pub const MIN_PROTOCOL_BASE_FEE: u64 = 7; - -/// Same as [`MIN_PROTOCOL_BASE_FEE`] but as a U256. -pub const MIN_PROTOCOL_BASE_FEE_U256: U256 = U256::from_limbs([7u64, 0, 0, 0]); - /// Base fee max change denominator as defined in [EIP-1559](https://eips.ethereum.org/EIPS/eip-1559) pub const EIP1559_DEFAULT_BASE_FEE_MAX_CHANGE_DENOMINATOR: u64 = 8; @@ -79,13 +65,3 @@ pub const OP_SYSTEM_TX_TO_ADDR: Address = address!("4200000000000000000000000000 /// Unwind depth of `3` blocks significantly reduces the chance that the reorged block is kept in /// the database. pub const BEACON_CONSENSUS_REORG_UNWIND_DEPTH: u64 = 3; - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn min_protocol_sanity() { - assert_eq!(MIN_PROTOCOL_BASE_FEE_U256.to::(), MIN_PROTOCOL_BASE_FEE); - } -} diff --git a/crates/transaction-pool/src/config.rs b/crates/transaction-pool/src/config.rs index 8fe49f476..d45188462 100644 --- a/crates/transaction-pool/src/config.rs +++ b/crates/transaction-pool/src/config.rs @@ -3,9 +3,8 @@ use crate::{ PoolSize, TransactionOrigin, }; use alloy_consensus::constants::EIP4844_TX_TYPE_ID; -use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; +use alloy_eips::eip1559::{ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE}; use alloy_primitives::Address; -use reth_primitives::constants::MIN_PROTOCOL_BASE_FEE; use std::{collections::HashSet, ops::Mul}; /// Guarantees max transactions for one sender, compatible with geth/erigon diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index a3f192992..42de860db 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -22,9 +22,9 @@ use alloy_consensus::constants::{ EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, LEGACY_TX_TYPE_ID, }; -use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; +use alloy_eips::eip1559::{ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE}; use alloy_primitives::{Address, TxHash, B256}; -use reth_primitives::constants::{eip4844::BLOB_TX_MIN_BLOB_GASPRICE, MIN_PROTOCOL_BASE_FEE}; +use reth_primitives::constants::eip4844::BLOB_TX_MIN_BLOB_GASPRICE; use rustc_hash::FxHashMap; use smallvec::SmallVec; use std::{ diff --git a/crates/transaction-pool/src/test_utils/gen.rs b/crates/transaction-pool/src/test_utils/gen.rs index d51bf8027..858098ec9 100644 --- a/crates/transaction-pool/src/test_utils/gen.rs +++ b/crates/transaction-pool/src/test_utils/gen.rs @@ -1,12 +1,10 @@ use crate::EthPooledTransaction; use alloy_consensus::{TxEip1559, TxEip4844, TxLegacy}; -use alloy_eips::{eip2718::Encodable2718, eip2930::AccessList}; +use alloy_eips::{eip1559::MIN_PROTOCOL_BASE_FEE, eip2718::Encodable2718, eip2930::AccessList}; use alloy_primitives::{Address, Bytes, TxKind, B256, U256}; use rand::Rng; use reth_chainspec::MAINNET; -use reth_primitives::{ - constants::MIN_PROTOCOL_BASE_FEE, sign_message, Transaction, TransactionSigned, -}; +use reth_primitives::{sign_message, Transaction, TransactionSigned}; /// A generator for transactions for testing purposes. #[derive(Debug)] diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index 474cf5cc8..99b0caaf4 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -11,7 +11,7 @@ use alloy_consensus::{ constants::{EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, LEGACY_TX_TYPE_ID}, TxEip1559, TxEip2930, TxEip4844, TxLegacy, }; -use alloy_eips::eip2930::AccessList; +use alloy_eips::{eip1559::MIN_PROTOCOL_BASE_FEE, eip2930::AccessList}; use alloy_primitives::{Address, Bytes, ChainId, TxHash, TxKind, B256, U256}; use paste::paste; use rand::{ @@ -19,8 +19,7 @@ use rand::{ prelude::Distribution, }; use reth_primitives::{ - constants::{eip4844::DATA_GAS_PER_BLOB, MIN_PROTOCOL_BASE_FEE}, - transaction::TryFromRecoveredTransactionError, + constants::eip4844::DATA_GAS_PER_BLOB, transaction::TryFromRecoveredTransactionError, BlobTransactionSidecar, BlobTransactionValidationError, PooledTransactionsElementEcRecovered, Signature, Transaction, TransactionSigned, TransactionSignedEcRecovered, TxType, }; diff --git a/crates/transaction-pool/tests/it/evict.rs b/crates/transaction-pool/tests/it/evict.rs index c1d0bbaa6..fea50962f 100644 --- a/crates/transaction-pool/tests/it/evict.rs +++ b/crates/transaction-pool/tests/it/evict.rs @@ -1,9 +1,8 @@ //! Transaction pool eviction tests. -use alloy_eips::eip1559::ETHEREUM_BLOCK_GAS_LIMIT; +use alloy_eips::eip1559::{ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE}; use alloy_primitives::{Address, B256}; use rand::distributions::Uniform; -use reth_primitives::constants::MIN_PROTOCOL_BASE_FEE; use reth_transaction_pool::{ error::PoolErrorKind, test_utils::{ From 11149d295e195fcdb480fe33e72c8c5c72cf900c Mon Sep 17 00:00:00 2001 From: greged93 <82421016+greged93@users.noreply.github.com> Date: Thu, 24 Oct 2024 15:13:05 +0200 Subject: [PATCH 245/425] feat(payload builder): transaction pool filter (#10542) Co-authored-by: Matthias Seitz --- Cargo.lock | 2 +- crates/transaction-pool/src/pool/best.rs | 2 +- crates/transaction-pool/src/traits.rs | 30 ++++++++++++++++++++++++ 3 files changed, 32 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3449e381f..c3003c991 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11913,4 +11913,4 @@ checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa" dependencies = [ "cc", "pkg-config", -] +] \ No newline at end of file diff --git a/crates/transaction-pool/src/pool/best.rs b/crates/transaction-pool/src/pool/best.rs index 52f25a9db..77cd35d8a 100644 --- a/crates/transaction-pool/src/pool/best.rs +++ b/crates/transaction-pool/src/pool/best.rs @@ -208,7 +208,7 @@ pub struct BestTransactionFilter { impl BestTransactionFilter { /// Create a new [`BestTransactionFilter`] with the given predicate. - pub(crate) const fn new(best: I, predicate: P) -> Self { + pub const fn new(best: I, predicate: P) -> Self { Self { best, predicate } } } diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index 00cda8e1c..eb9d35ad5 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -814,6 +814,36 @@ impl BestTransactions for std::iter::Empty { fn set_skip_blobs(&mut self, _skip_blobs: bool) {} } +/// A filter that allows to check if a transaction satisfies a set of conditions +pub trait TransactionFilter { + /// The type of the transaction to check. + type Transaction; + + /// Returns true if the transaction satisfies the conditions. + fn is_valid(&self, transaction: &Self::Transaction) -> bool; +} + +/// A no-op implementation of [`TransactionFilter`] which +/// marks all transactions as valid. +#[derive(Debug, Clone)] +pub struct NoopTransactionFilter(std::marker::PhantomData); + +// We can't derive Default because this forces T to be +// Default as well, which isn't necessary. +impl Default for NoopTransactionFilter { + fn default() -> Self { + Self(std::marker::PhantomData) + } +} + +impl TransactionFilter for NoopTransactionFilter { + type Transaction = T; + + fn is_valid(&self, _transaction: &Self::Transaction) -> bool { + true + } +} + /// A Helper type that bundles the best transactions attributes together. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct BestTransactionsAttributes { From 53c547663796edc1891bf4c6c5a8b4c0f65597cb Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 24 Oct 2024 15:20:11 +0200 Subject: [PATCH 246/425] chore(primitive-traits): rm `EIP1559_` constants (#12036) --- crates/primitives-traits/src/constants/mod.rs | 34 ------------------- 1 file changed, 34 deletions(-) diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index 6ed88ce59..f3dd28e19 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -9,43 +9,9 @@ pub use gas_units::{GIGAGAS, KILOGAS, MEGAGAS}; /// The client version: `reth/v{major}.{minor}.{patch}` pub const RETH_CLIENT_VERSION: &str = concat!("reth/v", env!("CARGO_PKG_VERSION")); -/// Base fee max change denominator as defined in [EIP-1559](https://eips.ethereum.org/EIPS/eip-1559) -pub const EIP1559_DEFAULT_BASE_FEE_MAX_CHANGE_DENOMINATOR: u64 = 8; - -/// Elasticity multiplier as defined in [EIP-1559](https://eips.ethereum.org/EIPS/eip-1559) -pub const EIP1559_DEFAULT_ELASTICITY_MULTIPLIER: u64 = 2; - /// Minimum gas limit allowed for transactions. pub const MINIMUM_GAS_LIMIT: u64 = 5000; -/// Base fee max change denominator for Optimism Mainnet as defined in the Optimism -/// [transaction costs](https://community.optimism.io/docs/developers/build/differences/#transaction-costs) doc. -pub const OP_MAINNET_EIP1559_DEFAULT_BASE_FEE_MAX_CHANGE_DENOMINATOR: u128 = 50; - -/// Base fee max change denominator for Optimism Mainnet as defined in the Optimism Canyon -/// hardfork. -pub const OP_MAINNET_EIP1559_BASE_FEE_MAX_CHANGE_DENOMINATOR_CANYON: u128 = 250; - -/// Base fee max change denominator for Optimism Mainnet as defined in the Optimism -/// [transaction costs](https://community.optimism.io/docs/developers/build/differences/#transaction-costs) doc. -pub const OP_MAINNET_EIP1559_DEFAULT_ELASTICITY_MULTIPLIER: u128 = 6; - -/// Base fee max change denominator for Optimism Sepolia as defined in the Optimism -/// [transaction costs](https://community.optimism.io/docs/developers/build/differences/#transaction-costs) doc. -pub const OP_SEPOLIA_EIP1559_DEFAULT_BASE_FEE_MAX_CHANGE_DENOMINATOR: u128 = 50; - -/// Base fee max change denominator for Optimism Sepolia as defined in the Optimism Canyon -/// hardfork. -pub const OP_SEPOLIA_EIP1559_BASE_FEE_MAX_CHANGE_DENOMINATOR_CANYON: u128 = 250; - -/// Base fee max change denominator for Optimism Sepolia as defined in the Optimism -/// [transaction costs](https://community.optimism.io/docs/developers/build/differences/#transaction-costs) doc. -pub const OP_SEPOLIA_EIP1559_DEFAULT_ELASTICITY_MULTIPLIER: u128 = 6; - -/// Base fee max change denominator for Base Sepolia as defined in the Optimism -/// [transaction costs](https://community.optimism.io/docs/developers/build/differences/#transaction-costs) doc. -pub const BASE_SEPOLIA_EIP1559_DEFAULT_ELASTICITY_MULTIPLIER: u128 = 10; - /// Holesky genesis hash: `0xb5f7f912443c940f21fd611f12828d75b534364ed9e95ca4e307729a4661bde4` pub const HOLESKY_GENESIS_HASH: B256 = b256!("b5f7f912443c940f21fd611f12828d75b534364ed9e95ca4e307729a4661bde4"); From e74f185d95df7a18734899826890dbd12811cb36 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 24 Oct 2024 15:28:25 +0200 Subject: [PATCH 247/425] feat: add BestTransactions::filter_transactions (#12041) --- crates/transaction-pool/src/traits.rs | 13 +++++++++++++ crates/transaction-pool/tests/it/best.rs | 10 ++++++++++ crates/transaction-pool/tests/it/main.rs | 2 ++ 3 files changed, 25 insertions(+) create mode 100644 crates/transaction-pool/tests/it/best.rs diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index eb9d35ad5..fa5e22ec0 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -761,6 +761,19 @@ pub trait BestTransactions: Iterator + Send { /// /// If set to true, no blob transactions will be returned. fn set_skip_blobs(&mut self, skip_blobs: bool); + + /// Creates an iterator which uses a closure to determine whether a transaction should be + /// returned by the iterator. + /// + /// All items the closure returns false for are marked as invalid via [`Self::mark_invalid`] and + /// descendant transactions will be skipped. + fn filter_transactions

(self, predicate: P) -> BestTransactionFilter + where + P: FnMut(&Self::Item) -> bool, + Self: Sized, + { + BestTransactionFilter::new(self, predicate) + } } impl BestTransactions for Box diff --git a/crates/transaction-pool/tests/it/best.rs b/crates/transaction-pool/tests/it/best.rs new file mode 100644 index 000000000..cd7a93eae --- /dev/null +++ b/crates/transaction-pool/tests/it/best.rs @@ -0,0 +1,10 @@ +//! Best transaction and filter testing + +use reth_transaction_pool::{noop::NoopTransactionPool, BestTransactions, TransactionPool}; + +#[test] +fn test_best_transactions() { + let noop = NoopTransactionPool::default(); + let mut best = noop.best_transactions().filter_transactions(|_| true); + assert!(best.next().is_none()); +} diff --git a/crates/transaction-pool/tests/it/main.rs b/crates/transaction-pool/tests/it/main.rs index ead33a328..7db2b14c9 100644 --- a/crates/transaction-pool/tests/it/main.rs +++ b/crates/transaction-pool/tests/it/main.rs @@ -9,4 +9,6 @@ mod listeners; #[cfg(feature = "test-utils")] mod pending; +mod best; + const fn main() {} From 2fba3c027b7396db868b162e16635197cd584822 Mon Sep 17 00:00:00 2001 From: Deil Urba Date: Thu, 24 Oct 2024 14:32:44 +0100 Subject: [PATCH 248/425] feat: `ExExContext`'s dynamic config (#11983) --- crates/chainspec/src/api.rs | 7 ++- crates/exex/exex/src/context.rs | 21 ++++++- crates/exex/exex/src/dyn_context.rs | 86 +++++++++++++++++++++++++++ crates/exex/exex/src/lib.rs | 3 + crates/exex/exex/src/notifications.rs | 8 ++- crates/node/types/Cargo.toml | 2 +- crates/optimism/chainspec/src/lib.rs | 7 +-- 7 files changed, 121 insertions(+), 13 deletions(-) create mode 100644 crates/exex/exex/src/dyn_context.rs diff --git a/crates/chainspec/src/api.rs b/crates/chainspec/src/api.rs index f7061ff18..3751789ca 100644 --- a/crates/chainspec/src/api.rs +++ b/crates/chainspec/src/api.rs @@ -4,7 +4,8 @@ use alloy_chains::Chain; use alloy_eips::eip1559::BaseFeeParams; use alloy_genesis::Genesis; use alloy_primitives::B256; -use core::fmt::{Debug, Display}; +use core::fmt::Debug; +use reth_ethereum_forks::DisplayHardforks; use reth_network_peers::NodeRecord; use reth_primitives_traits::Header; @@ -38,7 +39,7 @@ pub trait EthChainSpec: Send + Sync + Unpin + Debug { fn prune_delete_limit(&self) -> usize; /// Returns a string representation of the hardforks. - fn display_hardforks(&self) -> impl Display; + fn display_hardforks(&self) -> DisplayHardforks; /// The genesis header. fn genesis_header(&self) -> &Header; @@ -83,7 +84,7 @@ impl EthChainSpec for ChainSpec { self.prune_delete_limit } - fn display_hardforks(&self) -> impl Display { + fn display_hardforks(&self) -> DisplayHardforks { self.display_hardforks() } diff --git a/crates/exex/exex/src/context.rs b/crates/exex/exex/src/context.rs index c4b4f351b..70972f9be 100644 --- a/crates/exex/exex/src/context.rs +++ b/crates/exex/exex/src/context.rs @@ -7,7 +7,7 @@ use reth_primitives::Head; use reth_tasks::TaskExecutor; use tokio::sync::mpsc::UnboundedSender; -use crate::{ExExEvent, ExExNotifications, ExExNotificationsStream}; +use crate::{ExExContextDyn, ExExEvent, ExExNotifications, ExExNotificationsStream}; /// Captures the context that an `ExEx` has access to. pub struct ExExContext { @@ -55,7 +55,24 @@ where } } -impl ExExContext { +impl ExExContext +where + Node: FullNodeComponents, + Node::Provider: Debug, + Node::Executor: Debug, +{ + /// Returns dynamic version of the context + pub fn into_dyn(self) -> ExExContextDyn { + ExExContextDyn::from(self) + } +} + +impl ExExContext +where + Node: FullNodeComponents, + Node::Provider: Debug, + Node::Executor: Debug, +{ /// Returns the transaction pool of the node. pub fn pool(&self) -> &Node::Pool { self.components.pool() diff --git a/crates/exex/exex/src/dyn_context.rs b/crates/exex/exex/src/dyn_context.rs new file mode 100644 index 000000000..19d5a0e50 --- /dev/null +++ b/crates/exex/exex/src/dyn_context.rs @@ -0,0 +1,86 @@ +//! Mirrored version of [`ExExContext`](`crate::ExExContext`) +//! without generic abstraction over [Node](`reth_node_api::FullNodeComponents`) + +use std::{fmt::Debug, sync::Arc}; + +use reth_chainspec::{EthChainSpec, Head}; +use reth_node_api::FullNodeComponents; +use reth_node_core::node_config::NodeConfig; +use tokio::sync::mpsc; + +use crate::{ExExContext, ExExEvent, ExExNotificationsStream}; + +// TODO(0xurb) - add `node` after abstractions +/// Captures the context that an `ExEx` has access to. +pub struct ExExContextDyn { + /// The current head of the blockchain at launch. + pub head: Head, + /// The config of the node + pub config: NodeConfig>, + /// The loaded node config + pub reth_config: reth_config::Config, + /// Channel used to send [`ExExEvent`]s to the rest of the node. + /// + /// # Important + /// + /// The exex should emit a `FinishedHeight` whenever a processed block is safe to prune. + /// Additionally, the exex can pre-emptively emit a `FinishedHeight` event to specify what + /// blocks to receive notifications for. + pub events: mpsc::UnboundedSender, + /// Channel to receive [`ExExNotification`](crate::ExExNotification)s. + /// + /// # Important + /// + /// Once an [`ExExNotification`](crate::ExExNotification) is sent over the channel, it is + /// considered delivered by the node. + pub notifications: Box, +} + +impl Debug for ExExContextDyn { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ExExContext") + .field("head", &self.head) + .field("config", &self.config) + .field("reth_config", &self.reth_config) + .field("events", &self.events) + .field("notifications", &self.notifications) + .finish() + } +} + +impl From> for ExExContextDyn +where + Node: FullNodeComponents, + Node::Provider: Debug, + Node::Executor: Debug, +{ + fn from(ctx: ExExContext) -> Self { + // convert `NodeConfig` with generic over chainspec into `NodeConfig` + let chain: Arc> = + Arc::new(Box::new(ctx.config.chain) as Box); + let config = NodeConfig { + chain, + datadir: ctx.config.datadir, + config: ctx.config.config, + metrics: ctx.config.metrics, + instance: ctx.config.instance, + network: ctx.config.network, + rpc: ctx.config.rpc, + txpool: ctx.config.txpool, + builder: ctx.config.builder, + debug: ctx.config.debug, + db: ctx.config.db, + dev: ctx.config.dev, + pruning: ctx.config.pruning, + }; + let notifications = Box::new(ctx.notifications) as Box; + + Self { + head: ctx.head, + config, + reth_config: ctx.reth_config, + events: ctx.events, + notifications, + } + } +} diff --git a/crates/exex/exex/src/lib.rs b/crates/exex/exex/src/lib.rs index edc9e40d4..ce6641ff6 100644 --- a/crates/exex/exex/src/lib.rs +++ b/crates/exex/exex/src/lib.rs @@ -40,6 +40,9 @@ pub use backfill::*; mod context; pub use context::*; +mod dyn_context; +pub use dyn_context::*; + mod event; pub use event::*; diff --git a/crates/exex/exex/src/notifications.rs b/crates/exex/exex/src/notifications.rs index 90a0ee230..ea112219a 100644 --- a/crates/exex/exex/src/notifications.rs +++ b/crates/exex/exex/src/notifications.rs @@ -24,7 +24,9 @@ pub struct ExExNotifications { /// A trait, that represents a stream of [`ExExNotification`]s. The stream will emit notifications /// for all blocks. If the stream is configured with a head via [`ExExNotifications::set_with_head`] /// or [`ExExNotifications::with_head`], it will run backfill jobs to catch up to the node head. -pub trait ExExNotificationsStream: Stream> + Unpin { +pub trait ExExNotificationsStream: + Debug + Stream> + Unpin +{ /// Sets [`ExExNotificationsStream`] to a stream of [`ExExNotification`]s without a head. /// /// It's a no-op if the stream has already been configured without a head. @@ -90,8 +92,8 @@ impl ExExNotifications { impl ExExNotificationsStream for ExExNotifications where - P: BlockReader + HeaderProvider + StateProviderFactory + Clone + Unpin + 'static, - E: BlockExecutorProvider + Clone + Unpin + 'static, + P: BlockReader + HeaderProvider + StateProviderFactory + Clone + Debug + Unpin + 'static, + E: BlockExecutorProvider + Clone + Debug + Unpin + 'static, { fn set_without_head(&mut self) { let current = std::mem::replace(&mut self.inner, ExExNotificationsInner::Invalid); diff --git a/crates/node/types/Cargo.toml b/crates/node/types/Cargo.toml index b28dcfba5..5747abe9c 100644 --- a/crates/node/types/Cargo.toml +++ b/crates/node/types/Cargo.toml @@ -16,4 +16,4 @@ reth-chainspec.workspace = true reth-db-api.workspace = true reth-engine-primitives.workspace = true reth-primitives.workspace = true -reth-primitives-traits.workspace = true \ No newline at end of file +reth-primitives-traits.workspace = true diff --git a/crates/optimism/chainspec/src/lib.rs b/crates/optimism/chainspec/src/lib.rs index 83c499de5..03ce75aec 100644 --- a/crates/optimism/chainspec/src/lib.rs +++ b/crates/optimism/chainspec/src/lib.rs @@ -23,7 +23,6 @@ use alloy_genesis::Genesis; use alloy_primitives::{B256, U256}; pub use base::BASE_MAINNET; pub use base_sepolia::BASE_SEPOLIA; -use core::fmt::Display; use derive_more::{Constructor, Deref, From, Into}; pub use dev::OP_DEV; #[cfg(not(feature = "std"))] @@ -31,8 +30,8 @@ pub(crate) use once_cell::sync::Lazy as LazyLock; pub use op::OP_MAINNET; pub use op_sepolia::OP_SEPOLIA; use reth_chainspec::{ - BaseFeeParams, BaseFeeParamsKind, ChainSpec, ChainSpecBuilder, DepositContract, EthChainSpec, - EthereumHardforks, ForkFilter, ForkId, Hardforks, Head, + BaseFeeParams, BaseFeeParamsKind, ChainSpec, ChainSpecBuilder, DepositContract, + DisplayHardforks, EthChainSpec, EthereumHardforks, ForkFilter, ForkId, Hardforks, Head, }; use reth_ethereum_forks::{ChainHardforks, EthereumHardfork, ForkCondition, Hardfork}; use reth_network_peers::NodeRecord; @@ -203,7 +202,7 @@ impl EthChainSpec for OpChainSpec { self.inner.prune_delete_limit() } - fn display_hardforks(&self) -> impl Display { + fn display_hardforks(&self) -> DisplayHardforks { self.inner.display_hardforks() } From ba78e439385469925c6fc9df36444f1ef5a6a1bc Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Thu, 24 Oct 2024 17:52:13 +0100 Subject: [PATCH 249/425] fix(exex): relax `ExExContext` trait bounds (#12055) --- crates/exex/exex/src/context.rs | 33 +++++++++++++++++++++++++-- crates/exex/exex/src/dyn_context.rs | 2 +- crates/exex/exex/src/notifications.rs | 8 +++---- 3 files changed, 35 insertions(+), 8 deletions(-) diff --git a/crates/exex/exex/src/context.rs b/crates/exex/exex/src/context.rs index 70972f9be..23d772b73 100644 --- a/crates/exex/exex/src/context.rs +++ b/crates/exex/exex/src/context.rs @@ -70,8 +70,6 @@ where impl ExExContext where Node: FullNodeComponents, - Node::Provider: Debug, - Node::Executor: Debug, { /// Returns the transaction pool of the node. pub fn pool(&self) -> &Node::Pool { @@ -123,3 +121,34 @@ where self.notifications.set_with_head(head); } } + +#[cfg(test)] +mod tests { + use reth_exex_types::ExExHead; + use reth_node_api::FullNodeComponents; + + use crate::ExExContext; + + /// + #[test] + const fn issue_12054() { + #[allow(dead_code)] + struct ExEx { + ctx: ExExContext, + } + + impl ExEx { + async fn _test_bounds(mut self) -> eyre::Result<()> { + self.ctx.pool(); + self.ctx.block_executor(); + self.ctx.provider(); + self.ctx.network(); + self.ctx.payload_builder(); + self.ctx.task_executor(); + self.ctx.set_notifications_without_head(); + self.ctx.set_notifications_with_head(ExExHead { block: Default::default() }); + Ok(()) + } + } + } +} diff --git a/crates/exex/exex/src/dyn_context.rs b/crates/exex/exex/src/dyn_context.rs index 19d5a0e50..226f3a3fe 100644 --- a/crates/exex/exex/src/dyn_context.rs +++ b/crates/exex/exex/src/dyn_context.rs @@ -43,7 +43,7 @@ impl Debug for ExExContextDyn { .field("config", &self.config) .field("reth_config", &self.reth_config) .field("events", &self.events) - .field("notifications", &self.notifications) + .field("notifications", &"...") .finish() } } diff --git a/crates/exex/exex/src/notifications.rs b/crates/exex/exex/src/notifications.rs index ea112219a..90a0ee230 100644 --- a/crates/exex/exex/src/notifications.rs +++ b/crates/exex/exex/src/notifications.rs @@ -24,9 +24,7 @@ pub struct ExExNotifications { /// A trait, that represents a stream of [`ExExNotification`]s. The stream will emit notifications /// for all blocks. If the stream is configured with a head via [`ExExNotifications::set_with_head`] /// or [`ExExNotifications::with_head`], it will run backfill jobs to catch up to the node head. -pub trait ExExNotificationsStream: - Debug + Stream> + Unpin -{ +pub trait ExExNotificationsStream: Stream> + Unpin { /// Sets [`ExExNotificationsStream`] to a stream of [`ExExNotification`]s without a head. /// /// It's a no-op if the stream has already been configured without a head. @@ -92,8 +90,8 @@ impl ExExNotifications { impl ExExNotificationsStream for ExExNotifications where - P: BlockReader + HeaderProvider + StateProviderFactory + Clone + Debug + Unpin + 'static, - E: BlockExecutorProvider + Clone + Debug + Unpin + 'static, + P: BlockReader + HeaderProvider + StateProviderFactory + Clone + Unpin + 'static, + E: BlockExecutorProvider + Clone + Unpin + 'static, { fn set_without_head(&mut self) { let current = std::mem::replace(&mut self.inner, ExExNotificationsInner::Invalid); From 777417ad8a1b68d3c1b27e0790cb5be3918ffad1 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Fri, 25 Oct 2024 03:34:12 +0900 Subject: [PATCH 250/425] feat: add `reth test-vectors compact --write|--read` (#11954) --- .github/workflows/compact.yml | 42 +++ Cargo.lock | 18 ++ crates/cli/commands/Cargo.toml | 32 ++- .../cli/commands/src/test_vectors/compact.rs | 257 ++++++++++++++++++ crates/cli/commands/src/test_vectors/mod.rs | 26 +- .../cli/commands/src/test_vectors/tables.rs | 6 +- crates/engine/tree/Cargo.toml | 3 +- crates/evm/Cargo.toml | 15 +- crates/net/eth-wire/Cargo.toml | 6 +- crates/optimism/bin/Cargo.toml | 4 + crates/optimism/cli/Cargo.toml | 13 + crates/optimism/cli/src/commands/mod.rs | 7 + .../optimism/cli/src/commands/test_vectors.rs | 72 +++++ crates/optimism/cli/src/lib.rs | 2 + crates/primitives-traits/Cargo.toml | 3 +- crates/primitives/Cargo.toml | 3 +- crates/primitives/src/receipt.rs | 12 + crates/primitives/src/transaction/mod.rs | 6 +- crates/prune/types/Cargo.toml | 11 + crates/prune/types/src/checkpoint.rs | 2 +- crates/prune/types/src/mode.rs | 16 +- crates/revm/Cargo.toml | 9 +- crates/stages/api/Cargo.toml | 3 +- crates/stages/stages/Cargo.toml | 3 +- crates/stages/types/Cargo.toml | 13 + crates/stages/types/src/checkpoints.rs | 18 +- crates/storage/codecs/Cargo.toml | 27 +- .../storage/codecs/src/alloy/access_list.rs | 2 + .../codecs/src/alloy/authorization_list.rs | 9 +- .../codecs/src/alloy/genesis_account.rs | 20 +- crates/storage/codecs/src/alloy/header.rs | 14 +- crates/storage/codecs/src/alloy/mod.rs | 36 ++- crates/storage/codecs/src/alloy/signature.rs | 3 +- .../codecs/src/alloy/transaction/eip1559.rs | 10 +- .../codecs/src/alloy/transaction/eip2930.rs | 8 +- .../codecs/src/alloy/transaction/eip4844.rs | 60 +++- .../codecs/src/alloy/transaction/eip7702.rs | 8 +- .../codecs/src/alloy/transaction/legacy.rs | 10 +- .../codecs/src/alloy/transaction/mod.rs | 82 +++++- .../codecs/src/alloy/transaction/optimism.rs | 8 +- crates/storage/codecs/src/alloy/withdrawal.rs | 8 +- crates/storage/codecs/src/lib.rs | 5 + crates/storage/codecs/src/test_utils.rs | 9 + crates/storage/db-api/Cargo.toml | 9 +- crates/storage/db-models/Cargo.toml | 3 +- crates/storage/db-models/src/accounts.rs | 2 +- crates/storage/db/Cargo.toml | 8 +- crates/storage/provider/Cargo.toml | 7 +- crates/trie/common/Cargo.toml | 3 +- crates/trie/common/src/nibbles.rs | 2 + crates/trie/trie/Cargo.toml | 3 +- 51 files changed, 857 insertions(+), 101 deletions(-) create mode 100644 .github/workflows/compact.yml create mode 100644 crates/cli/commands/src/test_vectors/compact.rs create mode 100644 crates/optimism/cli/src/commands/test_vectors.rs diff --git a/.github/workflows/compact.yml b/.github/workflows/compact.yml new file mode 100644 index 000000000..c7435220c --- /dev/null +++ b/.github/workflows/compact.yml @@ -0,0 +1,42 @@ +# Ensures that `Compact` codec changes are backwards compatible. +# +# 1) checkout `main` +# 2) randomly generate and serialize to disk many different type vectors with `Compact` (eg. Header, Transaction, etc) +# 3) checkout `pr` +# 4) deserialize previously generated test vectors + +on: + workflow_dispatch: + +env: + CARGO_TERM_COLOR: always + +name: compact-codec +jobs: + compact-codec: + runs-on: + group: Reth + strategy: + matrix: + bin: + - cargo run --bin reth --features "dev" + - cargo run --bin op-reth --features "optimism dev" --manifest-path crates/optimism/bin/Cargo.toml + steps: + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + with: + cache-on-failure: true + - name: Checkout base + uses: actions/checkout@v4 + with: + ref: ${{ github.base_ref || 'main' }} + # On `main` branch, generates test vectors and serializes them to disk using `Compact`. + - name: Generate compact vectors + run: ${{ matrix.bin }} -- test-vectors compact --write + - name: Checkout PR + uses: actions/checkout@v4 + with: + clean: false + # On incoming merge try to read and decode previously generated vectors with `Compact` + - name: Read vectors + run: ${{ matrix.bin }} -- test-vectors compact --read diff --git a/Cargo.lock b/Cargo.lock index c3003c991..a73a42315 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6615,6 +6615,7 @@ dependencies = [ "reth-cli", "reth-cli-runner", "reth-cli-util", + "reth-codecs", "reth-config", "reth-consensus", "reth-db", @@ -6638,10 +6639,13 @@ dependencies = [ "reth-primitives", "reth-provider", "reth-prune", + "reth-prune-types", "reth-stages", + "reth-stages-types", "reth-static-file", "reth-static-file-types", "reth-trie", + "reth-trie-common", "reth-trie-db", "secp256k1", "serde", @@ -6696,6 +6700,7 @@ dependencies = [ "serde", "serde_json", "test-fuzz", + "visibility", ] [[package]] @@ -8090,6 +8095,8 @@ dependencies = [ "clap", "eyre", "futures-util", + "op-alloy-consensus", + "proptest", "reth-chainspec", "reth-cli", "reth-cli-commands", @@ -11233,6 +11240,17 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "visibility" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.80", +] + [[package]] name = "wait-timeout" version = "0.2.0" diff --git a/crates/cli/commands/Cargo.toml b/crates/cli/commands/Cargo.toml index 6f4b1008f..ef66a9941 100644 --- a/crates/cli/commands/Cargo.toml +++ b/crates/cli/commands/Cargo.toml @@ -17,6 +17,7 @@ reth-cli.workspace = true reth-ethereum-cli.workspace = true reth-cli-runner.workspace = true reth-cli-util.workspace = true +reth-codecs = { workspace = true, optional = true } reth-config.workspace = true reth-consensus.workspace = true reth-db = { workspace = true, features = ["mdbx"] } @@ -38,11 +39,14 @@ reth-node-metrics.workspace = true reth-primitives.workspace = true reth-provider.workspace = true reth-prune.workspace = true +reth-prune-types = { workspace = true, optional = true } reth-stages.workspace = true +reth-stages-types = { workspace = true, optional = true } reth-static-file-types = { workspace = true, features = ["clap"] } reth-static-file.workspace = true reth-trie = { workspace = true, features = ["metrics"] } reth-trie-db = { workspace = true, features = ["metrics"] } +reth-trie-common = { workspace = true, optional = true } # ethereum alloy-eips.workspace = true @@ -89,14 +93,22 @@ reth-discv4.workspace = true [features] default = [] arbitrary = [ - "dep:proptest", - "dep:arbitrary", - "dep:proptest-arbitrary-interop", - "reth-primitives/arbitrary", - "reth-db-api/arbitrary", - "reth-eth-wire/arbitrary", - "reth-db/arbitrary", - "reth-chainspec/arbitrary", - "alloy-eips/arbitrary", - "alloy-primitives/arbitrary", + "dep:proptest", + "dep:arbitrary", + "dep:proptest-arbitrary-interop", + "reth-primitives/arbitrary", + "reth-db-api/arbitrary", + "reth-eth-wire/arbitrary", + "reth-db/arbitrary", + "reth-chainspec/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/arbitrary", + "reth-codecs/test-utils", + "reth-prune-types/test-utils", + "reth-stages-types/test-utils", + "reth-trie-common/test-utils", + "reth-codecs?/arbitrary", + "reth-prune-types?/arbitrary", + "reth-stages-types?/arbitrary", + "reth-trie-common?/arbitrary" ] diff --git a/crates/cli/commands/src/test_vectors/compact.rs b/crates/cli/commands/src/test_vectors/compact.rs new file mode 100644 index 000000000..cda7d5bd5 --- /dev/null +++ b/crates/cli/commands/src/test_vectors/compact.rs @@ -0,0 +1,257 @@ +use alloy_primitives::{hex, private::getrandom::getrandom, TxKind}; +use arbitrary::Arbitrary; +use eyre::{Context, Result}; +use proptest::{ + prelude::{ProptestConfig, RngCore}, + test_runner::{TestRng, TestRunner}, +}; +use reth_codecs::alloy::{ + authorization_list::Authorization, + genesis_account::GenesisAccount, + header::{Header, HeaderExt}, + transaction::{ + eip1559::TxEip1559, eip2930::TxEip2930, eip4844::TxEip4844, eip7702::TxEip7702, + legacy::TxLegacy, + }, + withdrawal::Withdrawal, +}; +use reth_db::{ + models::{AccountBeforeTx, StoredBlockBodyIndices, StoredBlockOmmers, StoredBlockWithdrawals}, + ClientVersion, +}; +use reth_fs_util as fs; +use reth_primitives::{ + Account, Log, LogData, Receipt, ReceiptWithBloom, StorageEntry, Transaction, + TransactionSignedNoHash, TxType, Withdrawals, +}; +use reth_prune_types::{PruneCheckpoint, PruneMode}; +use reth_stages_types::{ + AccountHashingCheckpoint, CheckpointBlockRange, EntitiesCheckpoint, ExecutionCheckpoint, + HeadersCheckpoint, IndexHistoryCheckpoint, StageCheckpoint, StageUnitCheckpoint, + StorageHashingCheckpoint, +}; +use reth_trie::{hash_builder::HashBuilderValue, TrieMask}; +use reth_trie_common::{hash_builder::HashBuilderState, StoredNibbles, StoredNibblesSubKey}; +use std::{fs::File, io::BufReader}; + +pub const VECTORS_FOLDER: &str = "testdata/micro/compact"; +pub const VECTOR_SIZE: usize = 100; + +#[macro_export] +macro_rules! compact_types { + (regular: [$($regular_ty:ident),*], identifier: [$($id_ty:ident),*]) => { + pub const GENERATE_VECTORS: &[fn(&mut TestRunner) -> eyre::Result<()>] = &[ + $( + generate_vector::<$regular_ty> as fn(&mut TestRunner) -> eyre::Result<()>, + )* + $( + generate_vector::<$id_ty> as fn(&mut TestRunner) -> eyre::Result<()>, + )* + ]; + + pub const READ_VECTORS: &[fn() -> eyre::Result<()>] = &[ + $( + read_vector::<$regular_ty> as fn() -> eyre::Result<()>, + )* + $( + read_vector::<$id_ty> as fn() -> eyre::Result<()>, + )* + ]; + + pub static IDENTIFIER_TYPE: std::sync::LazyLock> = std::sync::LazyLock::new(|| { + let mut map = std::collections::HashSet::new(); + $( + map.insert(type_name::<$id_ty>()); + )* + map + }); + }; +} + +// The type that **actually** implements `Compact` should go here. If it's an alloy type, import the +// auxiliary type from reth_codecs::alloy instead. +compact_types!( + regular: [ + // reth-primitives + Account, + Receipt, + Withdrawals, + ReceiptWithBloom, + // reth_codecs::alloy + Authorization, + GenesisAccount, + Header, + HeaderExt, + Withdrawal, + TxEip2930, + TxEip1559, + TxEip4844, + TxEip7702, + TxLegacy, + HashBuilderValue, + LogData, + Log, + // BranchNodeCompact, // todo requires arbitrary + TrieMask, + // TxDeposit, TODO(joshie): optimism + // reth_prune_types + PruneCheckpoint, + PruneMode, + // reth_stages_types + AccountHashingCheckpoint, + StorageHashingCheckpoint, + ExecutionCheckpoint, + HeadersCheckpoint, + IndexHistoryCheckpoint, + EntitiesCheckpoint, + CheckpointBlockRange, + StageCheckpoint, + StageUnitCheckpoint, + // reth_db_api + StoredBlockOmmers, + StoredBlockBodyIndices, + StoredBlockWithdrawals, + // Manual implementations + TransactionSignedNoHash, + // Bytecode, // todo revm arbitrary + StorageEntry, + // MerkleCheckpoint, // todo storedsubnode -> branchnodecompact arbitrary + AccountBeforeTx, + ClientVersion, + StoredNibbles, + StoredNibblesSubKey, + // StorageTrieEntry, // todo branchnodecompact arbitrary + // StoredSubNode, // todo branchnodecompact arbitrary + HashBuilderState + ], + // These types require an extra identifier which is usually stored elsewhere (eg. parent type). + identifier: [ + // Signature todo we for v we only store parity(true || false), while v can take more values + Transaction, + TxType, + TxKind + ] +); + +/// Generates a vector of type `T` to a file. +pub fn generate_vectors() -> Result<()> { + generate_vectors_with(GENERATE_VECTORS) +} + +pub fn read_vectors() -> Result<()> { + read_vectors_with(READ_VECTORS) +} + +/// Generates a vector of type `T` to a file. +pub fn generate_vectors_with(gen: &[fn(&mut TestRunner) -> eyre::Result<()>]) -> Result<()> { + // Prepare random seed for test (same method as used by proptest) + let mut seed = [0u8; 32]; + getrandom(&mut seed)?; + println!("Seed for compact test vectors: {:?}", hex::encode_prefixed(seed)); + + // Start the runner with the seed + let config = ProptestConfig::default(); + let rng = TestRng::from_seed(config.rng_algorithm, &seed); + let mut runner = TestRunner::new_with_rng(config, rng); + + fs::create_dir_all(VECTORS_FOLDER)?; + + for generate_fn in gen { + generate_fn(&mut runner)?; + } + + Ok(()) +} + +/// Reads multiple vectors of different types ensuring their correctness by decoding and +/// re-encoding. +pub fn read_vectors_with(read: &[fn() -> eyre::Result<()>]) -> Result<()> { + fs::create_dir_all(VECTORS_FOLDER)?; + + for read_fn in read { + read_fn()?; + } + + Ok(()) +} + +/// Generates test vectors for a specific type `T`. +pub fn generate_vector(runner: &mut TestRunner) -> Result<()> +where + T: for<'a> Arbitrary<'a> + reth_codecs::Compact, +{ + let type_name = type_name::(); + print!("{}", &type_name); + + let mut bytes = std::iter::repeat(0u8).take(256).collect::>(); + let mut compact_buffer = vec![]; + + let mut values = Vec::with_capacity(VECTOR_SIZE); + for _ in 0..VECTOR_SIZE { + runner.rng().fill_bytes(&mut bytes); + compact_buffer.clear(); + + let obj = T::arbitrary(&mut arbitrary::Unstructured::new(&bytes))?; + let res = obj.to_compact(&mut compact_buffer); + + if IDENTIFIER_TYPE.contains(&type_name) { + compact_buffer.push(res as u8); + } + + values.push(hex::encode(&compact_buffer)); + } + + serde_json::to_writer( + std::io::BufWriter::new( + std::fs::File::create(format!("{VECTORS_FOLDER}/{}.json", &type_name)).unwrap(), + ), + &values, + )?; + + println!(" ✅"); + + Ok(()) +} + +/// Reads a vector of type `T` from a file and compares each item with its reconstructed version +/// using `T::from_compact`. +pub fn read_vector() -> Result<()> +where + T: reth_codecs::Compact, +{ + let type_name = type_name::(); + print!("{}", &type_name); + + // Read the file where the vectors are stored + let file_path = format!("{VECTORS_FOLDER}/{}.json", &type_name); + let file = File::open(&file_path).wrap_err_with(|| { + "Failed to open vector. Make sure to run `reth test-vectors compact --write` first." + })?; + let reader = BufReader::new(file); + + let stored_values: Vec = serde_json::from_reader(reader)?; + let mut buffer = vec![]; + + for hex_str in stored_values { + let mut compact_bytes = hex::decode(hex_str)?; + let mut identifier = None; + buffer.clear(); + + if IDENTIFIER_TYPE.contains(&type_name) { + identifier = compact_bytes.pop().map(|b| b as usize); + } + let len_or_identifier = identifier.unwrap_or(compact_bytes.len()); + + let (reconstructed, _) = T::from_compact(&compact_bytes, len_or_identifier); + reconstructed.to_compact(&mut buffer); + assert_eq!(buffer, compact_bytes); + } + + println!(" ✅"); + + Ok(()) +} + +pub fn type_name() -> String { + std::any::type_name::().replace("::", "__") +} diff --git a/crates/cli/commands/src/test_vectors/mod.rs b/crates/cli/commands/src/test_vectors/mod.rs index 999c0bc91..001d0c2e8 100644 --- a/crates/cli/commands/src/test_vectors/mod.rs +++ b/crates/cli/commands/src/test_vectors/mod.rs @@ -2,7 +2,8 @@ use clap::{Parser, Subcommand}; -mod tables; +pub mod compact; +pub mod tables; /// Generate test-vectors for different data types. #[derive(Debug, Parser)] @@ -19,6 +20,22 @@ pub enum Subcommands { /// List of table names. Case-sensitive. names: Vec, }, + /// Randomly generate test vectors for each `Compact` type using the `--write` flag. + /// + /// The generated vectors are serialized in both `json` and `Compact` formats and saved to a + /// file. + /// + /// Use the `--read` flag to read and validate the previously generated vectors from file. + #[group(multiple = false, required = true)] + Compact { + /// Write test vectors to a file. + #[arg(long)] + write: bool, + + /// Read test vectors from a file. + #[arg(long)] + read: bool, + }, } impl Command { @@ -28,6 +45,13 @@ impl Command { Subcommands::Tables { names } => { tables::generate_vectors(names)?; } + Subcommands::Compact { write, .. } => { + if write { + compact::generate_vectors()?; + } else { + compact::read_vectors()?; + } + } } Ok(()) } diff --git a/crates/cli/commands/src/test_vectors/tables.rs b/crates/cli/commands/src/test_vectors/tables.rs index 112685251..29ba50c8d 100644 --- a/crates/cli/commands/src/test_vectors/tables.rs +++ b/crates/cli/commands/src/test_vectors/tables.rs @@ -1,4 +1,4 @@ -use alloy_primitives::private::getrandom::getrandom; +use alloy_primitives::{hex, private::getrandom::getrandom}; use arbitrary::Arbitrary; use eyre::Result; use proptest::{ @@ -17,11 +17,11 @@ const VECTORS_FOLDER: &str = "testdata/micro/db"; const PER_TABLE: usize = 1000; /// Generates test vectors for specified `tables`. If list is empty, then generate for all tables. -pub(crate) fn generate_vectors(mut tables: Vec) -> Result<()> { +pub fn generate_vectors(mut tables: Vec) -> Result<()> { // Prepare random seed for test (same method as used by proptest) let mut seed = [0u8; 32]; getrandom(&mut seed)?; - println!("Seed for test vectors: {:?}", seed); + println!("Seed for table test vectors: {:?}", hex::encode_prefixed(seed)); // Start the runner with the seed let config = ProptestConfig::default(); diff --git a/crates/engine/tree/Cargo.toml b/crates/engine/tree/Cargo.toml index 6fe741db8..dee0bcaf7 100644 --- a/crates/engine/tree/Cargo.toml +++ b/crates/engine/tree/Cargo.toml @@ -95,5 +95,6 @@ test-utils = [ "reth-revm/test-utils", "reth-stages-api/test-utils", "reth-provider/test-utils", - "reth-trie/test-utils" + "reth-trie/test-utils", + "reth-prune-types?/test-utils" ] diff --git a/crates/evm/Cargo.toml b/crates/evm/Cargo.toml index d97a57864..90fd53282 100644 --- a/crates/evm/Cargo.toml +++ b/crates/evm/Cargo.toml @@ -57,11 +57,12 @@ std = [ "revm/std", ] test-utils = [ - "dep:parking_lot", - "reth-chainspec/test-utils", - "reth-consensus/test-utils", - "reth-primitives/test-utils", - "reth-primitives-traits/test-utils", - "reth-revm/test-utils", - "revm/test-utils", + "dep:parking_lot", + "reth-chainspec/test-utils", + "reth-consensus/test-utils", + "reth-primitives/test-utils", + "reth-primitives-traits/test-utils", + "reth-revm/test-utils", + "revm/test-utils", + "reth-prune-types/test-utils" ] diff --git a/crates/net/eth-wire/Cargo.toml b/crates/net/eth-wire/Cargo.toml index b0e256fdf..83a3e163e 100644 --- a/crates/net/eth-wire/Cargo.toml +++ b/crates/net/eth-wire/Cargo.toml @@ -71,7 +71,8 @@ arbitrary = [ "dep:arbitrary", "reth-chainspec/arbitrary", "alloy-eips/arbitrary", - "alloy-primitives/arbitrary" + "alloy-primitives/arbitrary", + "reth-codecs/arbitrary" ] serde = [ "dep:serde", @@ -80,7 +81,8 @@ serde = [ "alloy-primitives/serde", "bytes/serde", "rand/serde", - "secp256k1/serde" + "secp256k1/serde", + "reth-codecs/serde" ] [[test]] diff --git a/crates/optimism/bin/Cargo.toml b/crates/optimism/bin/Cargo.toml index f60ef36a4..771667631 100644 --- a/crates/optimism/bin/Cargo.toml +++ b/crates/optimism/bin/Cargo.toml @@ -47,6 +47,10 @@ optimism = [ "reth-provider/optimism" ] +dev = [ + "reth-optimism-cli/dev" +] + min-error-logs = ["tracing/release_max_level_error"] min-warn-logs = ["tracing/release_max_level_warn"] min-info-logs = ["tracing/release_max_level_info"] diff --git a/crates/optimism/cli/Cargo.toml b/crates/optimism/cli/Cargo.toml index 7db41ccbe..a2ba71214 100644 --- a/crates/optimism/cli/Cargo.toml +++ b/crates/optimism/cli/Cargo.toml @@ -65,6 +65,13 @@ tokio-util = { workspace = true, features = ["codec"] } tracing.workspace = true eyre.workspace = true +# reth test-vectors +proptest = { workspace = true, optional = true } +op-alloy-consensus = { workspace = true, features = [ + "arbitrary", +], optional = true } + + [dev-dependencies] tempfile.workspace = true reth-stages = { workspace = true, features = ["test-utils"] } @@ -94,3 +101,9 @@ jemalloc = [ "reth-node-core/jemalloc", "reth-node-metrics/jemalloc" ] + +dev = [ + "dep:proptest", + "reth-cli-commands/arbitrary", + "op-alloy-consensus" +] diff --git a/crates/optimism/cli/src/commands/mod.rs b/crates/optimism/cli/src/commands/mod.rs index a7674ec2c..d51f89932 100644 --- a/crates/optimism/cli/src/commands/mod.rs +++ b/crates/optimism/cli/src/commands/mod.rs @@ -16,6 +16,9 @@ pub mod import; pub mod import_receipts; pub mod init_state; +#[cfg(feature = "dev")] +pub mod test_vectors; + /// Commands to be executed #[derive(Debug, Subcommand)] pub enum Commands @@ -55,4 +58,8 @@ pub enum Commands), + /// Generate Test Vectors + #[cfg(feature = "dev")] + #[command(name = "test-vectors")] + TestVectors(test_vectors::Command), } diff --git a/crates/optimism/cli/src/commands/test_vectors.rs b/crates/optimism/cli/src/commands/test_vectors.rs new file mode 100644 index 000000000..093d63148 --- /dev/null +++ b/crates/optimism/cli/src/commands/test_vectors.rs @@ -0,0 +1,72 @@ +//! Command for generating test vectors. + +use clap::{Parser, Subcommand}; +use op_alloy_consensus::TxDeposit; +use proptest::test_runner::TestRunner; +use reth_cli_commands::{ + compact_types, + test_vectors::{ + compact, + compact::{ + generate_vector, read_vector, GENERATE_VECTORS as ETH_GENERATE_VECTORS, + READ_VECTORS as ETH_READ_VECTORS, + }, + tables, + }, +}; + +/// Generate test-vectors for different data types. +#[derive(Debug, Parser)] +pub struct Command { + #[command(subcommand)] + command: Subcommands, +} + +#[derive(Subcommand, Debug)] +/// `reth test-vectors` subcommands +pub enum Subcommands { + /// Generates test vectors for specified tables. If no table is specified, generate for all. + Tables { + /// List of table names. Case-sensitive. + names: Vec, + }, + /// Generates test vectors for `Compact` types with `--write`. Reads and checks generated + /// vectors with `--read`. + #[group(multiple = false, required = true)] + Compact { + /// Write test vectors to a file. + #[arg(long)] + write: bool, + + /// Read test vectors from a file. + #[arg(long)] + read: bool, + }, +} + +impl Command { + /// Execute the command + pub async fn execute(self) -> eyre::Result<()> { + match self.command { + Subcommands::Tables { names } => { + tables::generate_vectors(names)?; + } + Subcommands::Compact { write, .. } => { + compact_types!( + regular: [ + TxDeposit + ], identifier: [] + ); + + if write { + compact::generate_vectors_with(ETH_GENERATE_VECTORS)?; + compact::generate_vectors_with(GENERATE_VECTORS)?; + } else { + compact::read_vectors_with(ETH_READ_VECTORS)?; + compact::read_vectors_with(READ_VECTORS)?; + } + } + } + Ok(()) + } +} diff --git a/crates/optimism/cli/src/lib.rs b/crates/optimism/cli/src/lib.rs index 235b44559..43d126164 100644 --- a/crates/optimism/cli/src/lib.rs +++ b/crates/optimism/cli/src/lib.rs @@ -169,6 +169,8 @@ where runner.run_command_until_exit(|ctx| command.execute::(ctx)) } Commands::Prune(command) => runner.run_until_ctrl_c(command.execute::()), + #[cfg(feature = "dev")] + Commands::TestVectors(command) => runner.run_until_ctrl_c(command.execute()), } } diff --git a/crates/primitives-traits/Cargo.toml b/crates/primitives-traits/Cargo.toml index 9634da40f..4319232f8 100644 --- a/crates/primitives-traits/Cargo.toml +++ b/crates/primitives-traits/Cargo.toml @@ -74,7 +74,8 @@ arbitrary = [ "dep:proptest", "dep:proptest-arbitrary-interop", "alloy-eips/arbitrary", - "revm-primitives/arbitrary" + "revm-primitives/arbitrary", + "reth-codecs/arbitrary" ] serde-bincode-compat = [ "serde_with", diff --git a/crates/primitives/Cargo.toml b/crates/primitives/Cargo.toml index 107c218c7..5e761f41f 100644 --- a/crates/primitives/Cargo.toml +++ b/crates/primitives/Cargo.toml @@ -62,7 +62,7 @@ arbitrary = { workspace = true, features = ["derive"], optional = true } [dev-dependencies] # eth reth-chainspec.workspace = true -reth-codecs.workspace = true +reth-codecs = { workspace = true, features = ["test-utils"] } reth-primitives-traits = { workspace = true, features = ["arbitrary"] } reth-testing-utils.workspace = true revm-primitives = { workspace = true, features = ["arbitrary"] } @@ -120,6 +120,7 @@ arbitrary = [ "alloy-serde?/arbitrary", "op-alloy-consensus?/arbitrary", "op-alloy-rpc-types?/arbitrary", + "reth-codecs?/arbitrary" ] secp256k1 = ["dep:secp256k1"] c-kzg = [ diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index bb6c0841b..940b491e3 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -503,6 +503,18 @@ mod tests { use super::*; use alloy_primitives::{address, b256, bytes, hex_literal::hex}; + #[test] + fn test_decode_receipt() { + #[cfg(not(feature = "optimism"))] + reth_codecs::test_utils::test_decode::(&hex!( + "c428b52ffd23fc42696156b10200f034792b6a94c3850215c2fef7aea361a0c31b79d9a32652eefc0d4e2e730036061cff7344b6fc6132b50cda0ed810a991ae58ef013150c12b2522533cb3b3a8b19b7786a8b5ff1d3cdc84225e22b02def168c8858df" + )); + #[cfg(feature = "optimism")] + reth_codecs::test_utils::test_decode::(&hex!( + "c30328b52ffd23fc426961a00105007eb0042307705a97e503562eacf2b95060cce9de6de68386b6c155b73a9650021a49e2f8baad17f30faff5899d785c4c0873e45bc268bcf07560106424570d11f9a59e8f3db1efa4ceec680123712275f10d92c3411e1caaa11c7c5d591bc11487168e09934a9986848136da1b583babf3a7188e3aed007a1520f1cf4c1ca7d3482c6c28d37c298613c70a76940008816c4c95644579fd08471dc34732fd0f24" + )); + } + // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 #[test] fn encode_legacy_receipt() { diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 7798433d0..b09fff9e2 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -649,10 +649,12 @@ impl reth_codecs::Compact for Transaction { let (tx, buf) = TxDeposit::from_compact(buf, buf.len()); (Self::Deposit(tx), buf) } - _ => unreachable!("Junk data in database: unknown Transaction variant"), + _ => unreachable!( + "Junk data in database: unknown Transaction variant: {identifier}" + ), } } - _ => unreachable!("Junk data in database: unknown Transaction variant"), + _ => unreachable!("Junk data in database: unknown Transaction variant: {identifier}"), } } } diff --git a/crates/prune/types/Cargo.toml b/crates/prune/types/Cargo.toml index 13def8eaa..5446d6f76 100644 --- a/crates/prune/types/Cargo.toml +++ b/crates/prune/types/Cargo.toml @@ -20,6 +20,7 @@ derive_more.workspace = true modular-bitfield.workspace = true serde.workspace = true thiserror.workspace = true +arbitrary = { workspace = true, features = ["derive"], optional = true } [dev-dependencies] arbitrary = { workspace = true, features = ["derive"] } @@ -29,3 +30,13 @@ proptest-arbitrary-interop.workspace = true serde_json.workspace = true test-fuzz.workspace = true toml.workspace = true + +[features] +test-utils = [ + "dep:arbitrary", + "reth-codecs/test-utils" +] +arbitrary = [ + "alloy-primitives/arbitrary", + "reth-codecs/arbitrary" +] diff --git a/crates/prune/types/src/checkpoint.rs b/crates/prune/types/src/checkpoint.rs index f654fba7d..e0397c5af 100644 --- a/crates/prune/types/src/checkpoint.rs +++ b/crates/prune/types/src/checkpoint.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; /// Saves the pruning progress of a stage. #[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(Default, arbitrary::Arbitrary))] +#[cfg_attr(any(test, feature = "test-utils"), derive(Default, arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] pub struct PruneCheckpoint { /// Highest pruned block number. If it's [None], the pruning for block `0` is not finished yet. diff --git a/crates/prune/types/src/mode.rs b/crates/prune/types/src/mode.rs index 346588299..9a8e55bb3 100644 --- a/crates/prune/types/src/mode.rs +++ b/crates/prune/types/src/mode.rs @@ -6,7 +6,7 @@ use serde::{Deserialize, Serialize}; /// Prune mode. #[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize, Compact)] #[serde(rename_all = "lowercase")] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[cfg_attr(any(test, feature = "test-utils"), derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] pub enum PruneMode { /// Prune all blocks. @@ -17,6 +17,13 @@ pub enum PruneMode { Before(BlockNumber), } +#[cfg(any(test, feature = "test-utils"))] +impl Default for PruneMode { + fn default() -> Self { + Self::Full + } +} + impl PruneMode { /// Prune blocks up to the specified block number. The specified block number is also pruned. /// @@ -69,13 +76,6 @@ impl PruneMode { } } -#[cfg(test)] -impl Default for PruneMode { - fn default() -> Self { - Self::Full - } -} - #[cfg(test)] mod tests { use crate::{ diff --git a/crates/revm/Cargo.toml b/crates/revm/Cargo.toml index 3f2a39a0b..3ee680101 100644 --- a/crates/revm/Cargo.toml +++ b/crates/revm/Cargo.toml @@ -43,10 +43,11 @@ std = [ "alloy-consensus/std", ] test-utils = [ - "dep:reth-trie", - "reth-primitives/test-utils", - "reth-trie?/test-utils", - "revm/test-utils", + "dep:reth-trie", + "reth-primitives/test-utils", + "reth-trie?/test-utils", + "revm/test-utils", + "reth-prune-types/test-utils" ] serde = [ "revm/serde", diff --git a/crates/stages/api/Cargo.toml b/crates/stages/api/Cargo.toml index cba569a2a..88a8e3b96 100644 --- a/crates/stages/api/Cargo.toml +++ b/crates/stages/api/Cargo.toml @@ -50,5 +50,6 @@ test-utils = [ "reth-consensus/test-utils", "reth-network-p2p/test-utils", "reth-primitives-traits/test-utils", - "reth-provider/test-utils" + "reth-provider/test-utils", + "reth-stages-types/test-utils" ] diff --git a/crates/stages/stages/Cargo.toml b/crates/stages/stages/Cargo.toml index 0b26cb6a1..65bb2637b 100644 --- a/crates/stages/stages/Cargo.toml +++ b/crates/stages/stages/Cargo.toml @@ -110,7 +110,8 @@ test-utils = [ "reth-codecs/test-utils", "reth-db-api/test-utils", "reth-trie-db/test-utils", - "reth-trie/test-utils" + "reth-trie/test-utils", + "reth-prune-types/test-utils" ] [[bench]] diff --git a/crates/stages/types/Cargo.toml b/crates/stages/types/Cargo.toml index 54b14b335..a466b21b6 100644 --- a/crates/stages/types/Cargo.toml +++ b/crates/stages/types/Cargo.toml @@ -19,6 +19,7 @@ alloy-primitives.workspace = true modular-bitfield.workspace = true bytes.workspace = true serde.workspace = true +arbitrary = { workspace = true, features = ["derive"], optional = true } [dev-dependencies] arbitrary = { workspace = true, features = ["derive"] } @@ -26,3 +27,15 @@ proptest.workspace = true proptest-arbitrary-interop.workspace = true test-fuzz.workspace = true rand.workspace = true + +[features] +test-utils = [ + "dep:arbitrary", + "reth-codecs/test-utils", + "reth-trie-common/test-utils" +] +arbitrary = [ + "alloy-primitives/arbitrary", + "reth-codecs/arbitrary", + "reth-trie-common/arbitrary" +] diff --git a/crates/stages/types/src/checkpoints.rs b/crates/stages/types/src/checkpoints.rs index 79e896bf4..87225f1ee 100644 --- a/crates/stages/types/src/checkpoints.rs +++ b/crates/stages/types/src/checkpoints.rs @@ -76,7 +76,7 @@ impl Compact for MerkleCheckpoint { /// Saves the progress of AccountHashing stage. #[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[cfg_attr(any(test, feature = "test-utils"), derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] pub struct AccountHashingCheckpoint { /// The next account to start hashing from. @@ -89,7 +89,7 @@ pub struct AccountHashingCheckpoint { /// Saves the progress of StorageHashing stage. #[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[cfg_attr(any(test, feature = "test-utils"), derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] pub struct StorageHashingCheckpoint { /// The next account to start hashing from. @@ -104,7 +104,7 @@ pub struct StorageHashingCheckpoint { /// Saves the progress of Execution stage. #[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[cfg_attr(any(test, feature = "test-utils"), derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] pub struct ExecutionCheckpoint { /// Block range which this checkpoint is valid for. @@ -115,7 +115,7 @@ pub struct ExecutionCheckpoint { /// Saves the progress of Headers stage. #[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[cfg_attr(any(test, feature = "test-utils"), derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] pub struct HeadersCheckpoint { /// Block range which this checkpoint is valid for. @@ -126,7 +126,7 @@ pub struct HeadersCheckpoint { /// Saves the progress of Index History stages. #[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[cfg_attr(any(test, feature = "test-utils"), derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] pub struct IndexHistoryCheckpoint { /// Block range which this checkpoint is valid for. @@ -137,7 +137,7 @@ pub struct IndexHistoryCheckpoint { /// Saves the progress of abstract stage iterating over or downloading entities. #[derive(Debug, Default, PartialEq, Eq, Clone, Copy, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[cfg_attr(any(test, feature = "test-utils"), derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] pub struct EntitiesCheckpoint { /// Number of entities already processed. @@ -166,7 +166,7 @@ impl EntitiesCheckpoint { /// Saves the block range. Usually, it's used to check the validity of some stage checkpoint across /// multiple executions. #[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[cfg_attr(any(test, feature = "test-utils"), derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] pub struct CheckpointBlockRange { /// The first block of the range, inclusive. @@ -189,7 +189,7 @@ impl From<&RangeInclusive> for CheckpointBlockRange { /// Saves the progress of a stage. #[derive(Debug, Default, PartialEq, Eq, Clone, Copy, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[cfg_attr(any(test, feature = "test-utils"), derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] pub struct StageCheckpoint { /// The maximum block processed by the stage. @@ -256,7 +256,7 @@ impl StageCheckpoint { // is not a Copy type. /// Stage-specific checkpoint metrics. #[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[cfg_attr(any(test, feature = "test-utils"), derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] pub enum StageUnitCheckpoint { /// Saves the progress of AccountHashing stage. diff --git a/crates/storage/codecs/Cargo.toml b/crates/storage/codecs/Cargo.toml index 2525b4e8d..20a0673df 100644 --- a/crates/storage/codecs/Cargo.toml +++ b/crates/storage/codecs/Cargo.toml @@ -27,6 +27,9 @@ op-alloy-consensus = { workspace = true, optional = true } # misc bytes.workspace = true modular-bitfield = { workspace = true, optional = true } +visibility = { version = "0.1.1", optional = true} +serde.workspace = true +arbitrary = { workspace = true, features = ["derive"], optional = true } [dev-dependencies] alloy-eips = { workspace = true, default-features = false, features = [ @@ -45,7 +48,6 @@ serde_json.workspace = true arbitrary = { workspace = true, features = ["derive"] } proptest.workspace = true proptest-arbitrary-interop.workspace = true -serde.workspace = true [features] default = ["std", "alloy"] @@ -66,4 +68,25 @@ alloy = [ "dep:alloy-trie", ] optimism = ["alloy", "dep:op-alloy-consensus"] -test-utils = [] +test-utils = [ + "std", + "alloy", + "arbitrary", + "dep:visibility", + "dep:arbitrary" +] +serde = [ + "alloy-consensus?/serde", + "alloy-eips?/serde", + "alloy-primitives/serde", + "alloy-trie?/serde", + "bytes/serde", + "op-alloy-consensus?/serde" +] +arbitrary = [ + "alloy-consensus?/arbitrary", + "alloy-eips?/arbitrary", + "alloy-primitives/arbitrary", + "alloy-trie?/arbitrary", + "op-alloy-consensus?/arbitrary" +] diff --git a/crates/storage/codecs/src/alloy/access_list.rs b/crates/storage/codecs/src/alloy/access_list.rs index 306b64d7e..304b6bd38 100644 --- a/crates/storage/codecs/src/alloy/access_list.rs +++ b/crates/storage/codecs/src/alloy/access_list.rs @@ -1,3 +1,5 @@ +//! Compact implementation for [`AccessList`] + use crate::Compact; use alloc::vec::Vec; use alloy_eips::eip2930::{AccessList, AccessListItem}; diff --git a/crates/storage/codecs/src/alloy/authorization_list.rs b/crates/storage/codecs/src/alloy/authorization_list.rs index 6dc36956d..3fc9518a6 100644 --- a/crates/storage/codecs/src/alloy/authorization_list.rs +++ b/crates/storage/codecs/src/alloy/authorization_list.rs @@ -1,16 +1,21 @@ -use core::ops::Deref; +//! Compact implementation for [`AlloyAuthorization`] use crate::Compact; use alloy_eips::eip7702::{Authorization as AlloyAuthorization, SignedAuthorization}; use alloy_primitives::{Address, U256}; use bytes::Buf; +use core::ops::Deref; use reth_codecs_derive::add_arbitrary_tests; /// Authorization acts as bridge which simplifies Compact implementation for AlloyAuthorization. /// /// Notice: Make sure this struct is 1:1 with `alloy_eips::eip7702::Authorization` #[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] +#[cfg_attr( + any(test, feature = "test-utils"), + derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) +)] +#[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] #[add_arbitrary_tests(compact)] pub(crate) struct Authorization { chain_id: u64, diff --git a/crates/storage/codecs/src/alloy/genesis_account.rs b/crates/storage/codecs/src/alloy/genesis_account.rs index 938ad1375..b29fe526d 100644 --- a/crates/storage/codecs/src/alloy/genesis_account.rs +++ b/crates/storage/codecs/src/alloy/genesis_account.rs @@ -1,3 +1,5 @@ +//! Compact implementation for [`AlloyGenesisAccount`] + use crate::Compact; use alloc::vec::Vec; use alloy_genesis::GenesisAccount as AlloyGenesisAccount; @@ -22,8 +24,14 @@ pub(crate) struct GenesisAccountRef<'a> { private_key: Option<&'a B256>, } +/// Acts as bridge which simplifies Compact implementation for +/// `AlloyGenesisAccount`. #[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] +#[cfg_attr( + any(test, feature = "test-utils"), + derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) +)] +#[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] #[add_arbitrary_tests(compact)] pub(crate) struct GenesisAccount { /// The nonce of the account at genesis. @@ -39,14 +47,20 @@ pub(crate) struct GenesisAccount { } #[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] +#[cfg_attr( + any(test, feature = "test-utils"), + derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) +)] #[add_arbitrary_tests(compact)] pub(crate) struct StorageEntries { entries: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] +#[cfg_attr( + any(test, feature = "test-utils"), + derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) +)] #[add_arbitrary_tests(compact)] pub(crate) struct StorageEntry { key: B256, diff --git a/crates/storage/codecs/src/alloy/header.rs b/crates/storage/codecs/src/alloy/header.rs index 90e67b1e3..78f2029c3 100644 --- a/crates/storage/codecs/src/alloy/header.rs +++ b/crates/storage/codecs/src/alloy/header.rs @@ -1,3 +1,5 @@ +//! Compact implementation for [`AlloyHeader`] + use crate::Compact; use alloy_consensus::Header as AlloyHeader; use alloy_primitives::{Address, BlockNumber, Bloom, Bytes, B256, U256}; @@ -10,7 +12,11 @@ use alloy_primitives::{Address, BlockNumber, Bloom, Bytes, B256, U256}; /// will automatically apply to this type. /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::Header`] -#[cfg_attr(test, derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr( + any(test, feature = "test-utils"), + derive(serde::Serialize, serde::Deserialize, arbitrary::Arbitrary) +)] +#[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] pub(crate) struct Header { parent_hash: B256, @@ -42,7 +48,11 @@ pub(crate) struct Header { /// used as a field of [`Header`] for backwards compatibility. /// /// More information: & [`reth_codecs_derive::Compact`]. -#[cfg_attr(test, derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr( + any(test, feature = "test-utils"), + derive(serde::Serialize, serde::Deserialize, arbitrary::Arbitrary) +)] +#[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] pub(crate) struct HeaderExt { requests_hash: Option, diff --git a/crates/storage/codecs/src/alloy/mod.rs b/crates/storage/codecs/src/alloy/mod.rs index ed77876c5..f1bf6a00e 100644 --- a/crates/storage/codecs/src/alloy/mod.rs +++ b/crates/storage/codecs/src/alloy/mod.rs @@ -1,13 +1,29 @@ -mod access_list; -mod authorization_list; -mod genesis_account; -mod header; -mod log; -mod signature; -mod transaction; -mod trie; -mod txkind; -mod withdrawal; +//! Implements Compact for alloy types. + +/// Will make it a pub mod if test-utils is enabled +macro_rules! cond_mod { + ($($mod_name:ident),*) => { + $( + #[cfg(feature = "test-utils")] + pub mod $mod_name; + #[cfg(not(feature = "test-utils"))] + mod $mod_name; + )* + }; +} + +cond_mod!( + access_list, + authorization_list, + genesis_account, + header, + log, + signature, + transaction, + trie, + txkind, + withdrawal +); #[cfg(test)] mod tests { diff --git a/crates/storage/codecs/src/alloy/signature.rs b/crates/storage/codecs/src/alloy/signature.rs index 70290ea96..0cc4774d0 100644 --- a/crates/storage/codecs/src/alloy/signature.rs +++ b/crates/storage/codecs/src/alloy/signature.rs @@ -1,6 +1,7 @@ -use alloy_primitives::{Parity, Signature, U256}; +//! Compact implementation for [`Signature`] use crate::Compact; +use alloy_primitives::{Parity, Signature, U256}; impl Compact for Signature { fn to_compact(&self, buf: &mut B) -> usize diff --git a/crates/storage/codecs/src/alloy/transaction/eip1559.rs b/crates/storage/codecs/src/alloy/transaction/eip1559.rs index 8e7594951..0e7f44cde 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip1559.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip1559.rs @@ -1,3 +1,5 @@ +//! Compact implementation for [`AlloyTxEip1559`] + use crate::Compact; use alloy_consensus::TxEip1559 as AlloyTxEip1559; use alloy_eips::eip2930::AccessList; @@ -11,8 +13,12 @@ use alloy_primitives::{Bytes, ChainId, TxKind, U256}; /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip1559`] #[derive(Debug, Clone, PartialEq, Eq, Hash, Compact, Default)] -#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] -#[cfg_attr(test, crate::add_arbitrary_tests(compact))] +#[cfg_attr( + any(test, feature = "test-utils"), + derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) +)] +#[cfg_attr(any(test, feature = "test-utils"), crate::add_arbitrary_tests(compact))] +#[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] pub(crate) struct TxEip1559 { chain_id: ChainId, nonce: u64, diff --git a/crates/storage/codecs/src/alloy/transaction/eip2930.rs b/crates/storage/codecs/src/alloy/transaction/eip2930.rs index e0c78a3e4..75cab9e8a 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip2930.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip2930.rs @@ -1,3 +1,5 @@ +//! Compact implementation for [`AlloyTxEip2930`] + use crate::Compact; use alloy_consensus::TxEip2930 as AlloyTxEip2930; use alloy_eips::eip2930::AccessList; @@ -13,7 +15,11 @@ use reth_codecs_derive::add_arbitrary_tests; /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip2930`] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] +#[cfg_attr( + any(test, feature = "test-utils"), + derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) +)] +#[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] #[add_arbitrary_tests(compact)] pub(crate) struct TxEip2930 { chain_id: ChainId, diff --git a/crates/storage/codecs/src/alloy/transaction/eip4844.rs b/crates/storage/codecs/src/alloy/transaction/eip4844.rs index 27c6b9240..5ec36e06b 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip4844.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip4844.rs @@ -1,3 +1,5 @@ +//! Compact implementation for [`AlloyTxEip4844`] + use crate::{Compact, CompactPlaceholder}; use alloc::vec::Vec; use alloy_consensus::TxEip4844 as AlloyTxEip4844; @@ -14,7 +16,8 @@ use reth_codecs_derive::add_arbitrary_tests; /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip4844`] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] +#[cfg_attr(any(test, feature = "test-utils"), derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] #[add_arbitrary_tests(compact)] pub(crate) struct TxEip4844 { chain_id: ChainId, @@ -25,6 +28,13 @@ pub(crate) struct TxEip4844 { /// TODO(debt): this should be removed if we break the DB. /// Makes sure that the Compact bitflag struct has one bit after the above field: /// + #[cfg_attr( + feature = "test-utils", + serde( + serialize_with = "serialize_placeholder", + deserialize_with = "deserialize_placeholder" + ) + )] placeholder: Option, to: Address, value: U256, @@ -75,6 +85,54 @@ impl Compact for AlloyTxEip4844 { } } +#[cfg(any(test, feature = "test-utils"))] +impl<'a> arbitrary::Arbitrary<'a> for TxEip4844 { + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + Ok(Self { + chain_id: ChainId::arbitrary(u)?, + nonce: u64::arbitrary(u)?, + gas_limit: u64::arbitrary(u)?, + max_fee_per_gas: u128::arbitrary(u)?, + max_priority_fee_per_gas: u128::arbitrary(u)?, + // Should always be Some for TxEip4844 + placeholder: Some(()), + to: Address::arbitrary(u)?, + value: U256::arbitrary(u)?, + access_list: AccessList::arbitrary(u)?, + blob_versioned_hashes: Vec::::arbitrary(u)?, + max_fee_per_blob_gas: u128::arbitrary(u)?, + input: Bytes::arbitrary(u)?, + }) + } +} + +#[cfg(any(test, feature = "test-utils"))] +fn serialize_placeholder(value: &Option<()>, serializer: S) -> Result +where + S: serde::Serializer, +{ + // Required otherwise `serde_json` will serialize it as null and would be `None` when decoding + // it again. + match value { + Some(()) => serializer.serialize_str("placeholder"), // Custom serialization + None => serializer.serialize_none(), + } +} + +#[cfg(any(test, feature = "test-utils"))] +fn deserialize_placeholder<'de, D>(deserializer: D) -> Result, D::Error> +where + D: serde::Deserializer<'de>, +{ + use serde::de::Deserialize; + let s: Option = Option::deserialize(deserializer)?; + match s.as_deref() { + Some("placeholder") => Ok(Some(())), + None => Ok(None), + _ => Err(serde::de::Error::custom("unexpected value")), + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/storage/codecs/src/alloy/transaction/eip7702.rs b/crates/storage/codecs/src/alloy/transaction/eip7702.rs index e714be1c3..8acf59425 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip7702.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip7702.rs @@ -1,3 +1,5 @@ +//! Compact implementation for [`AlloyTxEip7702`] + use crate::Compact; use alloc::vec::Vec; use alloy_consensus::TxEip7702 as AlloyTxEip7702; @@ -14,7 +16,11 @@ use reth_codecs_derive::add_arbitrary_tests; /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip7702`] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] +#[cfg_attr( + any(test, feature = "test-utils"), + derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) +)] +#[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] #[add_arbitrary_tests(compact)] pub(crate) struct TxEip7702 { chain_id: ChainId, diff --git a/crates/storage/codecs/src/alloy/transaction/legacy.rs b/crates/storage/codecs/src/alloy/transaction/legacy.rs index 27e799a79..c83626aa4 100644 --- a/crates/storage/codecs/src/alloy/transaction/legacy.rs +++ b/crates/storage/codecs/src/alloy/transaction/legacy.rs @@ -1,11 +1,17 @@ +//! Compact implementation for [`AlloyTxLegacy`] + use crate::Compact; use alloy_consensus::TxLegacy as AlloyTxLegacy; use alloy_primitives::{Bytes, ChainId, TxKind, U256}; /// Legacy transaction. #[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] -#[cfg_attr(test, crate::add_arbitrary_tests(compact))] +#[cfg_attr( + any(test, feature = "test-utils"), + derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize), + crate::add_arbitrary_tests(compact) +)] +#[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] pub(crate) struct TxLegacy { /// Added as EIP-155: Simple replay attack protection chain_id: Option, diff --git a/crates/storage/codecs/src/alloy/transaction/mod.rs b/crates/storage/codecs/src/alloy/transaction/mod.rs index 5b1d173a5..dc27eacfa 100644 --- a/crates/storage/codecs/src/alloy/transaction/mod.rs +++ b/crates/storage/codecs/src/alloy/transaction/mod.rs @@ -1,10 +1,18 @@ -pub(crate) mod eip1559; -pub(crate) mod eip2930; -pub(crate) mod eip4844; -pub(crate) mod eip7702; -pub(crate) mod legacy; -#[cfg(feature = "optimism")] -pub(crate) mod optimism; +//! Compact implementation for transaction types + +cond_mod!( + eip1559, + eip2930, + eip4844, + eip7702, + legacy +); + + +#[cfg(all(feature = "test-utils", feature = "optimism"))] +pub mod optimism; +#[cfg(all(not(feature = "test-utils"), feature = "optimism"))] +mod optimism; #[cfg(test)] mod tests { @@ -15,9 +23,13 @@ mod tests { // this check is to ensure we do not inadvertently add too many fields to a struct which would // expand the flags field and break backwards compatibility - use crate::alloy::transaction::{ - eip1559::TxEip1559, eip2930::TxEip2930, eip4844::TxEip4844, eip7702::TxEip7702, - legacy::TxLegacy, + use alloy_primitives::hex; + use crate::{ + alloy::{header::Header, transaction::{ + eip1559::TxEip1559, eip2930::TxEip2930, eip4844::TxEip4844, eip7702::TxEip7702, + legacy::TxLegacy, + }}, + test_utils::test_decode, }; #[test] @@ -34,4 +46,54 @@ mod tests { fn test_ensure_backwards_compatibility_optimism() { assert_eq!(crate::alloy::transaction::optimism::TxDeposit::bitflag_encoded_bytes(), 2); } + + #[test] + fn test_decode_header() { + test_decode::

(&hex!( + "01000000fbbb564baeafd064b979c2ac032df5cd987098066a8c6969514dfb8ecfbf043e667fa19efcc00d1dd197c309a3cc42dec820cd627af8f7f38f3274f842406891b22624431d0ea858422db8415b1181f8d19befbd21287debaf98a94e84b3ec20be846f35abfbf743ee3eda4fdda6a6f9124d295da97e26eaa1cedd09936f0a3c560b6bc10316dba5e82abd21afcf519a985feb09a6ce7fba2e8163b10f06c99828b8049c29b993d88d1d112dca60a03ebd8ebc6d69a7e1f301ca6d67c21fe0949d67bca251edf36c96a2cf7c84d98fc60a53988ac95820f434eb35280d98c8ba4d7484e7ee8fefd63591ad4c937ccaaea23871d05c77bac754c5759b34cf9b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + )); + } + + #[test] + fn test_decode_eip1559() { + test_decode::(&hex!( + "88086110b81b05bc5bb59ec3e4cd44e895a9dcb2656d5003e2f64ecb2e15443898cc1cc19af19ca96fc2b4eafc4abc26e4bbd70a3ddb10b7530b65eea128f4095c97164f712c04239902c1b08acf3949d4687123cdd72d5c73df113d2dc6ed7e519f410ace5553ca805975240a208b57013532de78c5cb407423ea11921ab11b13e93ef35d4d01c9a23166c4d627987545fe4675528d0ab111b0a1dc83fba0a4e1cd5c826a94db3f" + )); + } + + #[test] + fn test_decode_eip2930() { + test_decode::(&hex!( + "7810833fce14e3e2921e94fd3727eb71e91551d2c1e029697a654bfab510f3963aa57074015e152065d1c807f8830079fb0aeadc251d248eaec7147e78580ed638c4e667827775e24270edd5aad475776533ece65373afa71722bfeba3c900" + )); + } + + #[test] + fn test_decode_eip4844() { + test_decode::(&hex!( + "88086110025c359180ea680b5007c856f9e1ad4d1be7a5019feb42133f4fc4bdf74da1b457ab787462385a28a1bf8edb401adabf3ff21ac18f695e30180348ea67246fc4dc25e88add12b7c317651a0ce08946d98dbbe5b38883aa758a0f247e23b0fe3ac1bcc43d7212c984d6ccc770d70135890c9a07d715cacb9032c90d539d0b3d209a8d600178bcfb416fd489e5d5dd56d9cfc6addae810ae70bdaee65672b871dc2b3f35ec00dbaa0d872f78cb58b3199984c608c8ba" + )); + } + + #[test] + fn test_decode_eip7702() { + test_decode::(&hex!( + "8808210881415c034feba383d7a6efd3f2601309b33a6d682ad47168cac0f7a5c5136a33370e5e7ca7f570d5530d7a0d18bf5eac33583fdc27b6580f61e8cbd34d6de596f925c1f353188feb2c1e9e20de82a80b57f0be425d8c5896280d4f5f66cdcfba256d0c9ac8abd833859a62ec019501b4585fa176f048de4f88b93bdefecfcaf4d8f0dd04767bc683a4569c893632e44ba9d53f90d758125c9b24c0192a649166520cd5eecbc110b53eda400cf184b8ef9932c81d0deb2ea27dfa863392a87bfd53af3ec67379f20992501e76e387cbe3933861beead1b49649383cf8b2a2d5c6d04b7edc376981ed9b12cf7199fe7fabf5198659e001bed40922969b82a6cd000000000000" + )); + } + + #[test] + fn test_decode_legacy() { + test_decode::(&hex!( + "112210080a8ba06a8d108540bb3140e9f71a0812c46226f9ea77ae880d98d19fe27e5911801175c3b32620b2e887af0296af343526e439b775ee3b1c06750058e9e5fc4cd5965c3010f86184" + )); + } + + #[cfg(feature = "optimism")] + #[test] + fn test_decode_deposit() { + test_decode::(&hex!( + "8108ac8f15983d59b6ae4911a00ff7bfcd2e53d2950926f8c82c12afad02861c46fcb293e776204052725e1c08ff2e9ff602ca916357601fa972a14094891fe3598b718758f22c46f163c18bcaa6296ce87e5267ef3fd932112842fbbf79011548cdf067d93ce6098dfc0aaf5a94531e439f30d6dfd0c6" + )); + } } diff --git a/crates/storage/codecs/src/alloy/transaction/optimism.rs b/crates/storage/codecs/src/alloy/transaction/optimism.rs index f4fdcf5ee..22f508fd4 100644 --- a/crates/storage/codecs/src/alloy/transaction/optimism.rs +++ b/crates/storage/codecs/src/alloy/transaction/optimism.rs @@ -1,3 +1,5 @@ +//! Compact implementation for [`AlloyTxDeposit`] + use crate::Compact; use alloy_primitives::{Address, Bytes, TxKind, B256, U256}; use op_alloy_consensus::TxDeposit as AlloyTxDeposit; @@ -12,7 +14,11 @@ use reth_codecs_derive::add_arbitrary_tests; /// /// Notice: Make sure this struct is 1:1 with [`op_alloy_consensus::TxDeposit`] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] +#[cfg_attr( + any(test, feature = "test-utils"), + derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) +)] +#[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] #[add_arbitrary_tests(compact)] pub(crate) struct TxDeposit { source_hash: B256, diff --git a/crates/storage/codecs/src/alloy/withdrawal.rs b/crates/storage/codecs/src/alloy/withdrawal.rs index 16324c280..0f3347cec 100644 --- a/crates/storage/codecs/src/alloy/withdrawal.rs +++ b/crates/storage/codecs/src/alloy/withdrawal.rs @@ -1,3 +1,5 @@ +//! Compact implementation for [`AlloyWithdrawal`] + use crate::Compact; use alloy_eips::eip4895::Withdrawal as AlloyWithdrawal; use alloy_primitives::Address; @@ -7,7 +9,11 @@ use reth_codecs_derive::add_arbitrary_tests; /// /// Notice: Make sure this struct is 1:1 with `alloy_eips::eip4895::Withdrawal` #[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] +#[cfg_attr( + any(test, feature = "test-utils"), + derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) +)] +#[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] #[add_arbitrary_tests(compact)] pub(crate) struct Withdrawal { /// Monotonically increasing identifier issued by consensus layer. diff --git a/crates/storage/codecs/src/lib.rs b/crates/storage/codecs/src/lib.rs index c432400a5..598f2131b 100644 --- a/crates/storage/codecs/src/lib.rs +++ b/crates/storage/codecs/src/lib.rs @@ -18,6 +18,7 @@ #![cfg_attr(not(feature = "std"), no_std)] pub use reth_codecs_derive::*; +use serde as _; use alloy_primitives::{Address, Bloom, Bytes, FixedBytes, U256}; use bytes::{Buf, BufMut}; @@ -25,6 +26,10 @@ use bytes::{Buf, BufMut}; extern crate alloc; use alloc::vec::Vec; +#[cfg(feature = "test-utils")] +pub mod alloy; + +#[cfg(not(feature = "test-utils"))] #[cfg(any(test, feature = "alloy"))] mod alloy; diff --git a/crates/storage/codecs/src/test_utils.rs b/crates/storage/codecs/src/test_utils.rs index bb377c691..b845645cb 100644 --- a/crates/storage/codecs/src/test_utils.rs +++ b/crates/storage/codecs/src/test_utils.rs @@ -79,3 +79,12 @@ impl UnusedBits { matches!(self, Self::NotZero) } } + +/// Tests decoding and re-encoding to ensure correctness. +pub fn test_decode(buf: &[u8]) { + let (decoded, _) = T::from_compact(buf, buf.len()); + let mut encoded = Vec::with_capacity(buf.len()); + + decoded.to_compact(&mut encoded); + assert_eq!(buf, &encoded[..]); +} diff --git a/crates/storage/db-api/Cargo.toml b/crates/storage/db-api/Cargo.toml index 932a94b98..f827e48c8 100644 --- a/crates/storage/db-api/Cargo.toml +++ b/crates/storage/db-api/Cargo.toml @@ -62,7 +62,9 @@ test-utils = [ "reth-primitives-traits/test-utils", "reth-codecs/test-utils", "reth-db-models/test-utils", - "reth-trie-common/test-utils" + "reth-trie-common/test-utils", + "reth-prune-types/test-utils", + "reth-stages-types/test-utils" ] arbitrary = [ "reth-primitives/arbitrary", @@ -72,7 +74,10 @@ arbitrary = [ "reth-primitives-traits/arbitrary", "reth-trie-common/arbitrary", "alloy-primitives/arbitrary", - "parity-scale-codec/arbitrary" + "parity-scale-codec/arbitrary", + "reth-codecs/arbitrary", + "reth-prune-types/arbitrary", + "reth-stages-types/arbitrary" ] optimism = [ "reth-primitives/optimism", diff --git a/crates/storage/db-models/Cargo.toml b/crates/storage/db-models/Cargo.toml index 31741207c..d5f773347 100644 --- a/crates/storage/db-models/Cargo.toml +++ b/crates/storage/db-models/Cargo.toml @@ -48,5 +48,6 @@ arbitrary = [ "reth-primitives/arbitrary", "dep:arbitrary", "dep:proptest", - "alloy-primitives/arbitrary" + "alloy-primitives/arbitrary", + "reth-codecs/arbitrary" ] diff --git a/crates/storage/db-models/src/accounts.rs b/crates/storage/db-models/src/accounts.rs index b0099d22d..acfd45fe3 100644 --- a/crates/storage/db-models/src/accounts.rs +++ b/crates/storage/db-models/src/accounts.rs @@ -8,7 +8,7 @@ use reth_primitives::Account; /// /// [`Address`] is the subkey. #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize)] -#[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] +#[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary, serde::Deserialize))] #[add_arbitrary_tests(compact)] pub struct AccountBeforeTx { /// Address for the account. Acts as `DupSort::SubKey`. diff --git a/crates/storage/db/Cargo.toml b/crates/storage/db/Cargo.toml index 2f437e631..324411613 100644 --- a/crates/storage/db/Cargo.toml +++ b/crates/storage/db/Cargo.toml @@ -97,7 +97,9 @@ test-utils = [ "reth-primitives-traits/test-utils", "reth-db-api/test-utils", "reth-nippy-jar/test-utils", - "reth-trie-common/test-utils" + "reth-trie-common/test-utils", + "reth-prune-types/test-utils", + "reth-stages-types/test-utils" ] bench = [] arbitrary = [ @@ -105,7 +107,9 @@ arbitrary = [ "reth-db-api/arbitrary", "reth-primitives-traits/arbitrary", "reth-trie-common/arbitrary", - "alloy-primitives/arbitrary" + "alloy-primitives/arbitrary", + "reth-prune-types/arbitrary", + "reth-stages-types/arbitrary" ] optimism = [ "reth-primitives/optimism", diff --git a/crates/storage/provider/Cargo.toml b/crates/storage/provider/Cargo.toml index b93c22cdf..04a0bf429 100644 --- a/crates/storage/provider/Cargo.toml +++ b/crates/storage/provider/Cargo.toml @@ -108,7 +108,8 @@ serde = [ "notify/serde", "parking_lot/serde", "rand/serde", - "revm/serde" + "revm/serde", + "reth-codecs/serde" ] test-utils = [ "reth-db/test-utils", @@ -124,5 +125,7 @@ test-utils = [ "reth-codecs/test-utils", "reth-db-api/test-utils", "reth-trie-db/test-utils", - "revm/test-utils" + "revm/test-utils", + "reth-prune-types/test-utils", + "reth-stages-types/test-utils" ] diff --git a/crates/trie/common/Cargo.toml b/crates/trie/common/Cargo.toml index 2c6ccbfe6..0616e2597 100644 --- a/crates/trie/common/Cargo.toml +++ b/crates/trie/common/Cargo.toml @@ -55,5 +55,6 @@ arbitrary = [ "alloy-consensus/arbitrary", "alloy-primitives/arbitrary", "nybbles/arbitrary", - "revm-primitives/arbitrary" + "revm-primitives/arbitrary", + "reth-codecs/arbitrary" ] diff --git a/crates/trie/common/src/nibbles.rs b/crates/trie/common/src/nibbles.rs index 991fb68f3..cf94f135f 100644 --- a/crates/trie/common/src/nibbles.rs +++ b/crates/trie/common/src/nibbles.rs @@ -19,6 +19,7 @@ pub use nybbles::Nibbles; Deserialize, derive_more::Index, )] +#[cfg_attr(feature = "test-utils", derive(arbitrary::Arbitrary))] pub struct StoredNibbles(pub Nibbles); impl From for StoredNibbles { @@ -74,6 +75,7 @@ impl Compact for StoredNibbles { /// The representation of nibbles of the merkle trie stored in the database. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, PartialOrd, Ord, Hash, Deref)] +#[cfg_attr(feature = "test-utils", derive(arbitrary::Arbitrary))] pub struct StoredNibblesSubKey(pub Nibbles); impl From for StoredNibblesSubKey { diff --git a/crates/trie/trie/Cargo.toml b/crates/trie/trie/Cargo.toml index 112e661c0..134a3055c 100644 --- a/crates/trie/trie/Cargo.toml +++ b/crates/trie/trie/Cargo.toml @@ -79,7 +79,8 @@ test-utils = [ "triehash", "reth-trie-common/test-utils", "reth-primitives/test-utils", - "revm/test-utils" + "revm/test-utils", + "reth-stages-types/test-utils" ] [[bench]] From ea4fb26063221f2b173412fa64ee1660741181e1 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Fri, 25 Oct 2024 09:23:51 +0900 Subject: [PATCH 251/425] ci: enable `compact-codec` job (#12056) --- .github/workflows/compact.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/compact.yml b/.github/workflows/compact.yml index c7435220c..63f6f282f 100644 --- a/.github/workflows/compact.yml +++ b/.github/workflows/compact.yml @@ -6,7 +6,11 @@ # 4) deserialize previously generated test vectors on: - workflow_dispatch: + + pull_request: + merge_group: + push: + branches: [main] env: CARGO_TERM_COLOR: always From 965dabacad948030059c937ebab0711c9fa56748 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 25 Oct 2024 05:57:09 +0200 Subject: [PATCH 252/425] chore: add missing helpers to BestTransactions (#12044) --- crates/transaction-pool/src/traits.rs | 18 ++++++++++++++++++ crates/transaction-pool/tests/it/best.rs | 3 ++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index fa5e22ec0..e35912726 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -747,6 +747,15 @@ pub trait BestTransactions: Iterator + Send { /// listen to pool updates. fn no_updates(&mut self); + /// Convenience function for [`Self::no_updates`] that returns the iterator again. + fn without_updates(mut self) -> Self + where + Self: Sized, + { + self.no_updates(); + self + } + /// Skip all blob transactions. /// /// There's only limited blob space available in a block, once exhausted, EIP-4844 transactions @@ -762,6 +771,15 @@ pub trait BestTransactions: Iterator + Send { /// If set to true, no blob transactions will be returned. fn set_skip_blobs(&mut self, skip_blobs: bool); + /// Convenience function for [`Self::skip_blobs`] that returns the iterator again. + fn without_blobs(mut self) -> Self + where + Self: Sized, + { + self.skip_blobs(); + self + } + /// Creates an iterator which uses a closure to determine whether a transaction should be /// returned by the iterator. /// diff --git a/crates/transaction-pool/tests/it/best.rs b/crates/transaction-pool/tests/it/best.rs index cd7a93eae..20e833676 100644 --- a/crates/transaction-pool/tests/it/best.rs +++ b/crates/transaction-pool/tests/it/best.rs @@ -5,6 +5,7 @@ use reth_transaction_pool::{noop::NoopTransactionPool, BestTransactions, Transac #[test] fn test_best_transactions() { let noop = NoopTransactionPool::default(); - let mut best = noop.best_transactions().filter_transactions(|_| true); + let mut best = + noop.best_transactions().filter_transactions(|_| true).without_blobs().without_updates(); assert!(best.next().is_none()); } From 674616cab976d3d297566373127c6b10e2fbd3ca Mon Sep 17 00:00:00 2001 From: Panagiotis Ganelis <50522617+PanGan21@users.noreply.github.com> Date: Fri, 25 Oct 2024 05:56:21 +0200 Subject: [PATCH 253/425] chore: change `net::discv5` log target to `discv5` (#12045) --- crates/net/discv5/src/lib.rs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/crates/net/discv5/src/lib.rs b/crates/net/discv5/src/lib.rs index d4e8e928f..a8154b7bd 100644 --- a/crates/net/discv5/src/lib.rs +++ b/crates/net/discv5/src/lib.rs @@ -167,7 +167,7 @@ impl Discv5 { // let (enr, bc_enr, fork_key, rlpx_ip_mode) = build_local_enr(sk, &discv5_config); - trace!(target: "net::discv5", + trace!(target: "discv5", ?enr, "local ENR" ); @@ -271,7 +271,7 @@ impl Discv5 { // to them over RLPx, to be compatible with EL discv5 implementations that don't // enforce this security measure. - trace!(target: "net::discv5", + trace!(target: "discv5", ?enr, %socket, "discovered unverifiable enr, source socket doesn't match socket advertised in ENR" @@ -296,7 +296,7 @@ impl Discv5 { let node_record = match self.try_into_reachable(enr, socket) { Ok(enr_bc) => enr_bc, Err(err) => { - trace!(target: "net::discv5", + trace!(target: "discv5", %err, ?enr, "discovered peer is unreachable" @@ -308,7 +308,7 @@ impl Discv5 { } }; if let FilterOutcome::Ignore { reason } = self.filter_discovered_peer(enr) { - trace!(target: "net::discv5", + trace!(target: "discv5", ?enr, reason, "filtered out discovered peer" @@ -324,7 +324,7 @@ impl Discv5 { .then(|| self.get_fork_id(enr).ok()) .flatten(); - trace!(target: "net::discv5", + trace!(target: "discv5", ?fork_id, ?enr, "discovered peer" @@ -491,7 +491,7 @@ pub async fn bootstrap( bootstrap_nodes: HashSet, discv5: &Arc, ) -> Result<(), Error> { - trace!(target: "net::discv5", + trace!(target: "discv5", ?bootstrap_nodes, "adding bootstrap nodes .." ); @@ -508,7 +508,7 @@ pub async fn bootstrap( let discv5 = discv5.clone(); enr_requests.push(async move { if let Err(err) = discv5.request_enr(enode.to_string()).await { - debug!(target: "net::discv5", + debug!(target: "discv5", ?enode, %err, "failed adding boot node" @@ -545,7 +545,7 @@ pub fn spawn_populate_kbuckets_bg( for i in (0..bootstrap_lookup_countdown).rev() { let target = discv5::enr::NodeId::random(); - trace!(target: "net::discv5", + trace!(target: "discv5", %target, bootstrap_boost_runs_countdown=i, lookup_interval=format!("{:#?}", pulse_lookup_interval), @@ -563,7 +563,7 @@ pub fn spawn_populate_kbuckets_bg( // selection (ref kademlia) let target = get_lookup_target(kbucket_index, local_node_id); - trace!(target: "net::discv5", + trace!(target: "discv5", %target, lookup_interval=format!("{:#?}", lookup_interval), "starting periodic lookup query" @@ -628,11 +628,11 @@ pub async fn lookup( ); match discv5.find_node(target).await { - Err(err) => trace!(target: "net::discv5", + Err(err) => trace!(target: "discv5", %err, "lookup query failed" ), - Ok(peers) => trace!(target: "net::discv5", + Ok(peers) => trace!(target: "discv5", target=format!("{:#?}", target), peers_count=peers.len(), peers=format!("[{:#}]", peers.iter() @@ -645,7 +645,7 @@ pub async fn lookup( // `Discv5::connected_peers` can be subset of sessions, not all peers make it // into kbuckets, e.g. incoming sessions from peers with // unreachable enrs - debug!(target: "net::discv5", + debug!(target: "discv5", connected_peers=discv5.connected_peers(), "connected peers in routing table" ); From c3182f2a644c2f363f75bdeeee4c2bc69a7b9faf Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Fri, 25 Oct 2024 06:37:20 +0200 Subject: [PATCH 254/425] primitives-traits: small refac for `IntegerList` and more doc (#12049) --- crates/primitives-traits/src/integer_list.rs | 31 +++++++++++--------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/crates/primitives-traits/src/integer_list.rs b/crates/primitives-traits/src/integer_list.rs index 570c96c9f..682fa0cf8 100644 --- a/crates/primitives-traits/src/integer_list.rs +++ b/crates/primitives-traits/src/integer_list.rs @@ -9,8 +9,16 @@ use serde::{ Deserialize, Deserializer, Serialize, Serializer, }; -/// Uses Roaring Bitmaps to hold a list of integers. It provides really good compression with the -/// capability to access its elements without decoding it. +/// A data structure that uses Roaring Bitmaps to efficiently store a list of integers. +/// +/// This structure provides excellent compression while allowing direct access to individual +/// elements without the need for full decompression. +/// +/// Key features: +/// - Efficient compression: the underlying Roaring Bitmaps significantly reduce memory usage. +/// - Direct access: elements can be accessed or queried without needing to decode the entire list. +/// - [`RoaringTreemap`] backing: internally backed by [`RoaringTreemap`], which supports 64-bit +/// integers. #[derive(Clone, PartialEq, Default, Deref)] pub struct IntegerList(pub RoaringTreemap); @@ -22,12 +30,12 @@ impl fmt::Debug for IntegerList { } impl IntegerList { - /// Creates a new empty `IntegerList`. + /// Creates a new empty [`IntegerList`]. pub fn empty() -> Self { Self(RoaringTreemap::new()) } - /// Creates an `IntegerList` from a list of integers. + /// Creates an [`IntegerList`] from a list of integers. /// /// Returns an error if the list is not pre-sorted. pub fn new(list: impl IntoIterator) -> Result { @@ -36,7 +44,7 @@ impl IntegerList { .map_err(|_| IntegerListError::UnsortedInput) } - // Creates an IntegerList from a pre-sorted list of integers. + /// Creates an [`IntegerList`] from a pre-sorted list of integers. /// /// # Panics /// @@ -54,11 +62,7 @@ impl IntegerList { /// Pushes a new integer to the list. pub fn push(&mut self, value: u64) -> Result<(), IntegerListError> { - if self.0.push(value) { - Ok(()) - } else { - Err(IntegerListError::UnsortedInput) - } + self.0.push(value).then_some(()).ok_or(IntegerListError::UnsortedInput) } /// Clears the list. @@ -80,10 +84,9 @@ impl IntegerList { /// Deserializes a sequence of bytes into a proper [`IntegerList`]. pub fn from_bytes(data: &[u8]) -> Result { - Ok(Self( - RoaringTreemap::deserialize_from(data) - .map_err(|_| IntegerListError::FailedToDeserialize)?, - )) + RoaringTreemap::deserialize_from(data) + .map(Self) + .map_err(|_| IntegerListError::FailedToDeserialize) } } From d9889787a777738f0a62d6f933587b64ebf2b2d6 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 25 Oct 2024 09:01:44 +0400 Subject: [PATCH 255/425] feat: improve e2e tests API + feeHistory test (#12058) --- Cargo.lock | 24 ++++++ Cargo.toml | 1 + crates/e2e-test-utils/Cargo.toml | 1 + crates/e2e-test-utils/src/engine_api.rs | 3 +- crates/e2e-test-utils/src/lib.rs | 6 +- crates/e2e-test-utils/src/node.rs | 24 ++---- crates/e2e-test-utils/src/payload.rs | 23 +++-- crates/ethereum/node/Cargo.toml | 3 + crates/ethereum/node/tests/e2e/blobs.rs | 16 ++-- crates/ethereum/node/tests/e2e/dev.rs | 4 +- crates/ethereum/node/tests/e2e/eth.rs | 9 +- crates/ethereum/node/tests/e2e/main.rs | 1 + crates/ethereum/node/tests/e2e/p2p.rs | 7 +- crates/ethereum/node/tests/e2e/rpc.rs | 109 ++++++++++++++++++++++++ crates/optimism/node/tests/e2e/p2p.rs | 2 - crates/optimism/node/tests/e2e/utils.rs | 31 +++---- 16 files changed, 202 insertions(+), 62 deletions(-) create mode 100644 crates/ethereum/node/tests/e2e/rpc.rs diff --git a/Cargo.lock b/Cargo.lock index a73a42315..d3aa0b295 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -128,6 +128,26 @@ dependencies = [ "serde_with", ] +[[package]] +name = "alloy-contract" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460ab80ce4bda1c80bcf96fe7460520476f2c7b734581c6567fac2708e2a60ef" +dependencies = [ + "alloy-dyn-abi", + "alloy-json-abi", + "alloy-network", + "alloy-network-primitives", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-types-eth", + "alloy-sol-types", + "alloy-transport", + "futures", + "futures-util", + "thiserror", +] + [[package]] name = "alloy-dyn-abi" version = "0.8.8" @@ -7018,6 +7038,7 @@ dependencies = [ "alloy-rpc-types", "alloy-signer", "alloy-signer-local", + "derive_more 1.0.0", "eyre", "futures-util", "jsonrpsee", @@ -7971,10 +7992,13 @@ name = "reth-node-ethereum" version = "1.1.0" dependencies = [ "alloy-consensus", + "alloy-contract", + "alloy-eips", "alloy-genesis", "alloy-primitives", "alloy-provider", "alloy-signer", + "alloy-sol-types", "eyre", "futures", "rand 0.8.5", diff --git a/Cargo.toml b/Cargo.toml index e23efdeb3..d01ee01ce 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -433,6 +433,7 @@ alloy-sol-types = "0.8.0" alloy-trie = { version = "0.7", default-features = false } alloy-consensus = { version = "0.5.4", default-features = false } +alloy-contract = { version = "0.5.4", default-features = false } alloy-eips = { version = "0.5.4", default-features = false } alloy-genesis = { version = "0.5.4", default-features = false } alloy-json-rpc = { version = "0.5.4", default-features = false } diff --git a/crates/e2e-test-utils/Cargo.toml b/crates/e2e-test-utils/Cargo.toml index 04f031daa..67bb74555 100644 --- a/crates/e2e-test-utils/Cargo.toml +++ b/crates/e2e-test-utils/Cargo.toml @@ -45,3 +45,4 @@ alloy-rpc-types.workspace = true alloy-network.workspace = true alloy-consensus = { workspace = true, features = ["kzg"] } tracing.workspace = true +derive_more.workspace = true diff --git a/crates/e2e-test-utils/src/engine_api.rs b/crates/e2e-test-utils/src/engine_api.rs index 5027b2620..729205211 100644 --- a/crates/e2e-test-utils/src/engine_api.rs +++ b/crates/e2e-test-utils/src/engine_api.rs @@ -50,12 +50,13 @@ impl EngineApiTestContext, ) -> eyre::Result where E::ExecutionPayloadEnvelopeV3: From + PayloadEnvelopeExt, E::ExecutionPayloadEnvelopeV4: From + PayloadEnvelopeExt, { + let versioned_hashes = + payload.block().blob_versioned_hashes_iter().copied().collect::>(); // submit payload to engine api let submission = if self .chain_spec diff --git a/crates/e2e-test-utils/src/lib.rs b/crates/e2e-test-utils/src/lib.rs index f5ee1e5e6..1e9717d08 100644 --- a/crates/e2e-test-utils/src/lib.rs +++ b/crates/e2e-test-utils/src/lib.rs @@ -49,6 +49,7 @@ pub async fn setup( num_nodes: usize, chain_spec: Arc, is_dev: bool, + attributes_generator: impl Fn(u64) -> <::Engine as PayloadTypes>::PayloadBuilderAttributes + Copy + 'static, ) -> eyre::Result<(Vec>, TaskManager, Wallet)> where N: Default + Node> + NodeTypesWithEngine, @@ -84,7 +85,7 @@ where .launch() .await?; - let mut node = NodeTestContext::new(node).await?; + let mut node = NodeTestContext::new(node, attributes_generator).await?; // Connect each node in a chain. if let Some(previous_node) = nodes.last_mut() { @@ -109,6 +110,7 @@ pub async fn setup_engine( num_nodes: usize, chain_spec: Arc, is_dev: bool, + attributes_generator: impl Fn(u64) -> <::Engine as PayloadTypes>::PayloadBuilderAttributes + Copy + 'static, ) -> eyre::Result<( Vec>>>, TaskManager, @@ -166,7 +168,7 @@ where }) .await?; - let mut node = NodeTestContext::new(node).await?; + let mut node = NodeTestContext::new(node, attributes_generator).await?; // Connect each node in a chain. if let Some(previous_node) = nodes.last_mut() { diff --git a/crates/e2e-test-utils/src/node.rs b/crates/e2e-test-utils/src/node.rs index f40072018..07df36a33 100644 --- a/crates/e2e-test-utils/src/node.rs +++ b/crates/e2e-test-utils/src/node.rs @@ -59,12 +59,15 @@ where AddOns: RethRpcAddOns, { /// Creates a new test node - pub async fn new(node: FullNode) -> eyre::Result { + pub async fn new( + node: FullNode, + attributes_generator: impl Fn(u64) -> Engine::PayloadBuilderAttributes + 'static, + ) -> eyre::Result { let builder = node.payload_builder.clone(); Ok(Self { inner: node.clone(), - payload: PayloadTestContext::new(builder).await?, + payload: PayloadTestContext::new(builder, attributes_generator).await?, network: NetworkTestContext::new(node.network.clone()), engine_api: EngineApiTestContext { chain_spec: node.chain_spec(), @@ -90,7 +93,6 @@ where &mut self, length: u64, tx_generator: impl Fn(u64) -> Pin>>, - attributes_generator: impl Fn(u64) -> Engine::PayloadBuilderAttributes + Copy, ) -> eyre::Result> where Engine::ExecutionPayloadEnvelopeV3: From + PayloadEnvelopeExt, @@ -101,7 +103,7 @@ where for i in 0..length { let raw_tx = tx_generator(i).await; let tx_hash = self.rpc.inject_tx(raw_tx).await?; - let (payload, eth_attr) = self.advance_block(vec![], attributes_generator).await?; + let (payload, eth_attr) = self.advance_block().await?; let block_hash = payload.block().hash(); let block_number = payload.block().number; self.assert_new_block(tx_hash, block_hash, block_number).await?; @@ -116,14 +118,13 @@ where /// It triggers the resolve payload via engine api and expects the built payload event. pub async fn new_payload( &mut self, - attributes_generator: impl Fn(u64) -> Engine::PayloadBuilderAttributes, ) -> eyre::Result<(Engine::BuiltPayload, Engine::PayloadBuilderAttributes)> where ::ExecutionPayloadEnvelopeV3: From + PayloadEnvelopeExt, { // trigger new payload building draining the pool - let eth_attr = self.payload.new_payload(attributes_generator).await.unwrap(); + let eth_attr = self.payload.new_payload().await.unwrap(); // first event is the payload attributes self.payload.expect_attr_event(eth_attr.clone()).await?; // wait for the payload builder to have finished building @@ -137,8 +138,6 @@ where /// Advances the node forward one block pub async fn advance_block( &mut self, - versioned_hashes: Vec, - attributes_generator: impl Fn(u64) -> Engine::PayloadBuilderAttributes, ) -> eyre::Result<(Engine::BuiltPayload, Engine::PayloadBuilderAttributes)> where ::ExecutionPayloadEnvelopeV3: @@ -146,16 +145,11 @@ where ::ExecutionPayloadEnvelopeV4: From + PayloadEnvelopeExt, { - let (payload, eth_attr) = self.new_payload(attributes_generator).await?; + let (payload, eth_attr) = self.new_payload().await?; let block_hash = self .engine_api - .submit_payload( - payload.clone(), - eth_attr.clone(), - PayloadStatusEnum::Valid, - versioned_hashes, - ) + .submit_payload(payload.clone(), eth_attr.clone(), PayloadStatusEnum::Valid) .await?; // trigger forkchoice update via engine api to commit the block to the blockchain diff --git a/crates/e2e-test-utils/src/payload.rs b/crates/e2e-test-utils/src/payload.rs index a5e4f56ac..29aa11895 100644 --- a/crates/e2e-test-utils/src/payload.rs +++ b/crates/e2e-test-utils/src/payload.rs @@ -5,29 +5,36 @@ use reth_payload_primitives::{Events, PayloadBuilder, PayloadTypes}; use tokio_stream::wrappers::BroadcastStream; /// Helper for payload operations -#[derive(Debug)] +#[derive(derive_more::Debug)] pub struct PayloadTestContext { pub payload_event_stream: BroadcastStream>, payload_builder: PayloadBuilderHandle, pub timestamp: u64, + #[debug(skip)] + attributes_generator: Box T::PayloadBuilderAttributes>, } impl PayloadTestContext { /// Creates a new payload helper - pub async fn new(payload_builder: PayloadBuilderHandle) -> eyre::Result { + pub async fn new( + payload_builder: PayloadBuilderHandle, + attributes_generator: impl Fn(u64) -> T::PayloadBuilderAttributes + 'static, + ) -> eyre::Result { let payload_events = payload_builder.subscribe().await?; let payload_event_stream = payload_events.into_stream(); // Cancun timestamp - Ok(Self { payload_event_stream, payload_builder, timestamp: 1710338135 }) + Ok(Self { + payload_event_stream, + payload_builder, + timestamp: 1710338135, + attributes_generator: Box::new(attributes_generator), + }) } /// Creates a new payload job from static attributes - pub async fn new_payload( - &mut self, - attributes_generator: impl Fn(u64) -> T::PayloadBuilderAttributes, - ) -> eyre::Result { + pub async fn new_payload(&mut self) -> eyre::Result { self.timestamp += 1; - let attributes = attributes_generator(self.timestamp); + let attributes = (self.attributes_generator)(self.timestamp); self.payload_builder.send_new_payload(attributes.clone()).await.unwrap()?; Ok(attributes) } diff --git a/crates/ethereum/node/Cargo.toml b/crates/ethereum/node/Cargo.toml index e7784637a..98224b99e 100644 --- a/crates/ethereum/node/Cargo.toml +++ b/crates/ethereum/node/Cargo.toml @@ -55,6 +55,9 @@ alloy-consensus.workspace = true alloy-provider.workspace = true rand.workspace = true alloy-signer.workspace = true +alloy-eips.workspace = true +alloy-sol-types.workspace = true +alloy-contract.workspace = true [features] default = [] diff --git a/crates/ethereum/node/tests/e2e/blobs.rs b/crates/ethereum/node/tests/e2e/blobs.rs index b4d9a532a..976727bc8 100644 --- a/crates/ethereum/node/tests/e2e/blobs.rs +++ b/crates/ethereum/node/tests/e2e/blobs.rs @@ -41,7 +41,7 @@ async fn can_handle_blobs() -> eyre::Result<()> { .launch() .await?; - let mut node = NodeTestContext::new(node).await?; + let mut node = NodeTestContext::new(node, eth_payload_attributes).await?; let wallets = Wallet::new(2).gen(); let blob_wallet = wallets.first().unwrap(); @@ -51,7 +51,7 @@ async fn can_handle_blobs() -> eyre::Result<()> { let raw_tx = TransactionTestContext::transfer_tx_bytes(1, second_wallet.clone()).await; let tx_hash = node.rpc.inject_tx(raw_tx).await?; // build payload with normal tx - let (payload, attributes) = node.new_payload(eth_payload_attributes).await?; + let (payload, attributes) = node.new_payload().await?; // clean the pool node.inner.pool.remove_transactions(vec![tx_hash]); @@ -64,16 +64,14 @@ async fn can_handle_blobs() -> eyre::Result<()> { // fetch it from rpc let envelope = node.rpc.envelope_by_hash(blob_tx_hash).await?; // validate sidecar - let versioned_hashes = TransactionTestContext::validate_sidecar(envelope); + TransactionTestContext::validate_sidecar(envelope); // build a payload - let (blob_payload, blob_attr) = node.new_payload(eth_payload_attributes).await?; + let (blob_payload, blob_attr) = node.new_payload().await?; // submit the blob payload - let blob_block_hash = node - .engine_api - .submit_payload(blob_payload, blob_attr, PayloadStatusEnum::Valid, versioned_hashes.clone()) - .await?; + let blob_block_hash = + node.engine_api.submit_payload(blob_payload, blob_attr, PayloadStatusEnum::Valid).await?; let (_, _) = tokio::join!( // send fcu with blob hash @@ -83,7 +81,7 @@ async fn can_handle_blobs() -> eyre::Result<()> { ); // submit normal payload - node.engine_api.submit_payload(payload, attributes, PayloadStatusEnum::Valid, vec![]).await?; + node.engine_api.submit_payload(payload, attributes, PayloadStatusEnum::Valid).await?; tokio::time::sleep(std::time::Duration::from_secs(3)).await; diff --git a/crates/ethereum/node/tests/e2e/dev.rs b/crates/ethereum/node/tests/e2e/dev.rs index cad2fb34e..ead438b5a 100644 --- a/crates/ethereum/node/tests/e2e/dev.rs +++ b/crates/ethereum/node/tests/e2e/dev.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use crate::utils::eth_payload_attributes; use alloy_genesis::Genesis; use alloy_primitives::{b256, hex}; use futures::StreamExt; @@ -17,7 +18,8 @@ use reth_tasks::TaskManager; #[tokio::test] async fn can_run_dev_node() -> eyre::Result<()> { reth_tracing::init_test_tracing(); - let (mut nodes, _tasks, _) = setup::(1, custom_chain(), true).await?; + let (mut nodes, _tasks, _) = + setup::(1, custom_chain(), true, eth_payload_attributes).await?; assert_chain_advances(nodes.pop().unwrap().inner).await; Ok(()) diff --git a/crates/ethereum/node/tests/e2e/eth.rs b/crates/ethereum/node/tests/e2e/eth.rs index 14bfb92d4..cb7517c0c 100644 --- a/crates/ethereum/node/tests/e2e/eth.rs +++ b/crates/ethereum/node/tests/e2e/eth.rs @@ -26,6 +26,7 @@ async fn can_run_eth_node() -> eyre::Result<()> { .build(), ), false, + eth_payload_attributes, ) .await?; @@ -36,7 +37,7 @@ async fn can_run_eth_node() -> eyre::Result<()> { let tx_hash = node.rpc.inject_tx(raw_tx).await?; // make the node advance - let (payload, _) = node.advance_block(vec![], eth_payload_attributes).await?; + let (payload, _) = node.advance_block().await?; let block_hash = payload.block().hash(); let block_number = payload.block().number; @@ -74,7 +75,7 @@ async fn can_run_eth_node_with_auth_engine_api_over_ipc() -> eyre::Result<()> { .node(EthereumNode::default()) .launch() .await?; - let mut node = NodeTestContext::new(node).await?; + let mut node = NodeTestContext::new(node, eth_payload_attributes).await?; // Configure wallet from test mnemonic and create dummy transfer tx let wallet = Wallet::default(); @@ -84,7 +85,7 @@ async fn can_run_eth_node_with_auth_engine_api_over_ipc() -> eyre::Result<()> { let tx_hash = node.rpc.inject_tx(raw_tx).await?; // make the node advance - let (payload, _) = node.advance_block(vec![], eth_payload_attributes).await?; + let (payload, _) = node.advance_block().await?; let block_hash = payload.block().hash(); let block_number = payload.block().number; @@ -120,7 +121,7 @@ async fn test_failed_run_eth_node_with_no_auth_engine_api_over_ipc_opts() -> eyr .launch() .await?; - let node = NodeTestContext::new(node).await?; + let node = NodeTestContext::new(node, eth_payload_attributes).await?; // Ensure that the engine api client is not available let client = node.inner.engine_ipc_client().await; diff --git a/crates/ethereum/node/tests/e2e/main.rs b/crates/ethereum/node/tests/e2e/main.rs index 5dff7be17..4ed8ac5fc 100644 --- a/crates/ethereum/node/tests/e2e/main.rs +++ b/crates/ethereum/node/tests/e2e/main.rs @@ -4,6 +4,7 @@ mod blobs; mod dev; mod eth; mod p2p; +mod rpc; mod utils; const fn main() {} diff --git a/crates/ethereum/node/tests/e2e/p2p.rs b/crates/ethereum/node/tests/e2e/p2p.rs index 0fae23a08..180b88bbd 100644 --- a/crates/ethereum/node/tests/e2e/p2p.rs +++ b/crates/ethereum/node/tests/e2e/p2p.rs @@ -30,6 +30,7 @@ async fn can_sync() -> eyre::Result<()> { .build(), ), false, + eth_payload_attributes, ) .await?; @@ -41,7 +42,7 @@ async fn can_sync() -> eyre::Result<()> { let tx_hash = first_node.rpc.inject_tx(raw_tx).await?; // make the node advance - let (payload, _) = first_node.advance_block(vec![], eth_payload_attributes).await?; + let (payload, _) = first_node.advance_block().await?; let block_hash = payload.block().hash(); let block_number = payload.block().number; @@ -76,7 +77,7 @@ async fn e2e_test_send_transactions() -> eyre::Result<()> { ); let (mut nodes, _tasks, wallet) = - setup_engine::(2, chain_spec.clone(), false).await?; + setup_engine::(2, chain_spec.clone(), false, eth_payload_attributes).await?; let mut node = nodes.pop().unwrap(); let signers = wallet.gen(); let provider = ProviderBuilder::new().with_recommended_fillers().on_http(node.rpc_url()); @@ -139,7 +140,7 @@ async fn e2e_test_send_transactions() -> eyre::Result<()> { pending.push(provider.send_tx_envelope(tx).await?); } - let (payload, _) = node.advance_block(vec![], eth_payload_attributes).await?; + let (payload, _) = node.advance_block().await?; assert!(payload.block().raw_transactions().len() == tx_count); for pending in pending { diff --git a/crates/ethereum/node/tests/e2e/rpc.rs b/crates/ethereum/node/tests/e2e/rpc.rs new file mode 100644 index 000000000..ddf3d5cba --- /dev/null +++ b/crates/ethereum/node/tests/e2e/rpc.rs @@ -0,0 +1,109 @@ +use crate::utils::eth_payload_attributes; +use alloy_eips::calc_next_block_base_fee; +use alloy_primitives::U256; +use alloy_provider::{network::EthereumWallet, Provider, ProviderBuilder}; +use rand::{rngs::StdRng, Rng, SeedableRng}; +use reth_chainspec::{ChainSpecBuilder, MAINNET}; +use reth_e2e_test_utils::setup_engine; +use reth_node_ethereum::EthereumNode; +use std::sync::Arc; + +alloy_sol_types::sol! { + #[sol(rpc, bytecode = "6080604052348015600f57600080fd5b5060405160db38038060db833981016040819052602a91607a565b60005b818110156074576040805143602082015290810182905260009060600160408051601f19818403018152919052805160209091012080555080606d816092565b915050602d565b505060b8565b600060208284031215608b57600080fd5b5051919050565b60006001820160b157634e487b7160e01b600052601160045260246000fd5b5060010190565b60168060c56000396000f3fe6080604052600080fdfea164736f6c6343000810000a")] + contract GasWaster { + constructor(uint256 iterations) { + for (uint256 i = 0; i < iterations; i++) { + bytes32 slot = keccak256(abi.encode(block.number, i)); + assembly { + sstore(slot, slot) + } + } + } + } +} + +#[tokio::test] +async fn test_fee_history() -> eyre::Result<()> { + reth_tracing::init_test_tracing(); + + let seed: [u8; 32] = rand::thread_rng().gen(); + let mut rng = StdRng::from_seed(seed); + println!("Seed: {:?}", seed); + + let chain_spec = Arc::new( + ChainSpecBuilder::default() + .chain(MAINNET.chain) + .genesis(serde_json::from_str(include_str!("../assets/genesis.json")).unwrap()) + .cancun_activated() + .build(), + ); + + let (mut nodes, _tasks, wallet) = + setup_engine::(1, chain_spec.clone(), false, eth_payload_attributes).await?; + let mut node = nodes.pop().unwrap(); + let provider = ProviderBuilder::new() + .with_recommended_fillers() + .wallet(EthereumWallet::new(wallet.gen().swap_remove(0))) + .on_http(node.rpc_url()); + + let fee_history = provider.get_fee_history(10, 0_u64.into(), &[]).await?; + + let genesis_base_fee = chain_spec.initial_base_fee().unwrap() as u128; + let expected_first_base_fee = genesis_base_fee - + genesis_base_fee / chain_spec.base_fee_params_at_block(0).max_change_denominator; + assert_eq!(fee_history.base_fee_per_gas[0], genesis_base_fee); + assert_eq!(fee_history.base_fee_per_gas[1], expected_first_base_fee,); + + // Spend some gas + let builder = GasWaster::deploy_builder(&provider, U256::from(500)).send().await?; + node.advance_block().await?; + let receipt = builder.get_receipt().await?; + assert!(receipt.status()); + + let block = provider.get_block_by_number(1.into(), false).await?.unwrap(); + assert_eq!(block.header.gas_used as u128, receipt.gas_used,); + assert_eq!(block.header.base_fee_per_gas.unwrap(), expected_first_base_fee as u64); + + for _ in 0..100 { + let _ = + GasWaster::deploy_builder(&provider, U256::from(rng.gen_range(0..1000))).send().await?; + + node.advance_block().await?; + } + + let latest_block = provider.get_block_number().await?; + + for _ in 0..100 { + let latest_block = rng.gen_range(0..=latest_block); + let block_count = rng.gen_range(1..=(latest_block + 1)); + + let fee_history = provider.get_fee_history(block_count, latest_block.into(), &[]).await?; + + let mut prev_header = provider + .get_block_by_number((latest_block + 1 - block_count).into(), false) + .await? + .unwrap() + .header; + for block in (latest_block + 2 - block_count)..=latest_block { + let expected_base_fee = calc_next_block_base_fee( + prev_header.gas_used, + prev_header.gas_limit, + prev_header.base_fee_per_gas.unwrap(), + chain_spec.base_fee_params_at_block(block), + ); + + let header = provider.get_block_by_number(block.into(), false).await?.unwrap().header; + + assert_eq!(header.base_fee_per_gas.unwrap(), expected_base_fee as u64); + assert_eq!( + header.base_fee_per_gas.unwrap(), + fee_history.base_fee_per_gas[(block + block_count - 1 - latest_block) as usize] + as u64 + ); + + prev_header = header; + } + } + + Ok(()) +} diff --git a/crates/optimism/node/tests/e2e/p2p.rs b/crates/optimism/node/tests/e2e/p2p.rs index ebd35cc8a..30affa9ba 100644 --- a/crates/optimism/node/tests/e2e/p2p.rs +++ b/crates/optimism/node/tests/e2e/p2p.rs @@ -51,7 +51,6 @@ async fn can_sync() -> eyre::Result<()> { side_payload_chain[0].0.clone(), side_payload_chain[0].1.clone(), PayloadStatusEnum::Valid, - Default::default(), ) .await; @@ -81,7 +80,6 @@ async fn can_sync() -> eyre::Result<()> { } .to_string(), }, - Default::default(), ) .await; diff --git a/crates/optimism/node/tests/e2e/utils.rs b/crates/optimism/node/tests/e2e/utils.rs index 8ea8df380..48175e5b2 100644 --- a/crates/optimism/node/tests/e2e/utils.rs +++ b/crates/optimism/node/tests/e2e/utils.rs @@ -21,6 +21,7 @@ pub(crate) async fn setup(num_nodes: usize) -> eyre::Result<(Vec, TaskMa num_nodes, Arc::new(OpChainSpecBuilder::base_mainnet().genesis(genesis).ecotone_activated().build()), false, + optimism_payload_attributes, ) .await } @@ -31,23 +32,19 @@ pub(crate) async fn advance_chain( node: &mut OpNode, wallet: Arc>, ) -> eyre::Result> { - node.advance( - length as u64, - |_| { - let wallet = wallet.clone(); - Box::pin(async move { - let mut wallet = wallet.lock().await; - let tx_fut = TransactionTestContext::optimism_l1_block_info_tx( - wallet.chain_id, - wallet.inner.clone(), - wallet.inner_nonce, - ); - wallet.inner_nonce += 1; - tx_fut.await - }) - }, - optimism_payload_attributes, - ) + node.advance(length as u64, |_| { + let wallet = wallet.clone(); + Box::pin(async move { + let mut wallet = wallet.lock().await; + let tx_fut = TransactionTestContext::optimism_l1_block_info_tx( + wallet.chain_id, + wallet.inner.clone(), + wallet.inner_nonce, + ); + wallet.inner_nonce += 1; + tx_fut.await + }) + }) .await } From 5a5ec73c37d2f84e27db7bc86d5c7893f641ca87 Mon Sep 17 00:00:00 2001 From: tedison <76473430+edisontim@users.noreply.github.com> Date: Fri, 25 Oct 2024 01:02:43 -0400 Subject: [PATCH 256/425] Change return type of ReceiptBuilder (#11987) --- crates/rpc/rpc-eth-types/src/receipt.rs | 17 ++++------------- crates/rpc/rpc/src/eth/helpers/block.rs | 3 ++- crates/rpc/rpc/src/eth/helpers/receipt.rs | 3 ++- 3 files changed, 8 insertions(+), 15 deletions(-) diff --git a/crates/rpc/rpc-eth-types/src/receipt.rs b/crates/rpc/rpc-eth-types/src/receipt.rs index c3232f238..198ca79aa 100644 --- a/crates/rpc/rpc-eth-types/src/receipt.rs +++ b/crates/rpc/rpc-eth-types/src/receipt.rs @@ -2,10 +2,8 @@ use alloy_consensus::Transaction; use alloy_primitives::{Address, TxKind}; -use alloy_rpc_types::{ - AnyReceiptEnvelope, AnyTransactionReceipt, Log, ReceiptWithBloom, TransactionReceipt, -}; -use alloy_serde::{OtherFields, WithOtherFields}; +use alloy_rpc_types::{AnyReceiptEnvelope, Log, ReceiptWithBloom, TransactionReceipt}; +use alloy_serde::OtherFields; use reth_primitives::{Receipt, TransactionMeta, TransactionSigned}; use revm_primitives::calc_blob_gasprice; @@ -111,15 +109,8 @@ impl ReceiptBuilder { Ok(Self { base, other: Default::default() }) } - /// Adds fields to response body. - pub fn add_other_fields(mut self, mut fields: OtherFields) -> Self { - self.other.append(&mut fields); - self - } - /// Builds a receipt response from the base response body, and any set additional fields. - pub fn build(self) -> AnyTransactionReceipt { - let Self { base, other } = self; - WithOtherFields { inner: base, other } + pub fn build(self) -> TransactionReceipt> { + self.base } } diff --git a/crates/rpc/rpc/src/eth/helpers/block.rs b/crates/rpc/rpc/src/eth/helpers/block.rs index b2ff30b88..b29a24c38 100644 --- a/crates/rpc/rpc/src/eth/helpers/block.rs +++ b/crates/rpc/rpc/src/eth/helpers/block.rs @@ -1,6 +1,7 @@ //! Contains RPC handler implementations specific to blocks. use alloy_rpc_types::{AnyTransactionReceipt, BlockId}; +use alloy_serde::WithOtherFields; use reth_primitives::TransactionMeta; use reth_provider::{BlockReaderIdExt, HeaderProvider}; use reth_rpc_eth_api::{ @@ -55,9 +56,9 @@ where excess_blob_gas, timestamp, }; - ReceiptBuilder::new(&tx, meta, receipt, &receipts) .map(|builder| builder.build()) + .map(WithOtherFields::new) }) .collect::, Self::Error>>() .map(Some) diff --git a/crates/rpc/rpc/src/eth/helpers/receipt.rs b/crates/rpc/rpc/src/eth/helpers/receipt.rs index 2ac360944..570ec4fa3 100644 --- a/crates/rpc/rpc/src/eth/helpers/receipt.rs +++ b/crates/rpc/rpc/src/eth/helpers/receipt.rs @@ -1,5 +1,6 @@ //! Builds an RPC receipt response w.r.t. data layout of network. +use alloy_serde::WithOtherFields; use reth_primitives::{Receipt, TransactionMeta, TransactionSigned}; use reth_rpc_eth_api::{helpers::LoadReceipt, FromEthApiError, RpcReceipt}; use reth_rpc_eth_types::{EthApiError, EthStateCache, ReceiptBuilder}; @@ -30,6 +31,6 @@ where .map_err(Self::Error::from_eth_err)? .ok_or(EthApiError::HeaderNotFound(hash.into()))?; - Ok(ReceiptBuilder::new(&tx, meta, &receipt, &all_receipts)?.build()) + Ok(WithOtherFields::new(ReceiptBuilder::new(&tx, meta, &receipt, &all_receipts)?.build())) } } From 269d705c706479cebe303bd527d84a900b068070 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 25 Oct 2024 10:38:07 +0200 Subject: [PATCH 257/425] test: ensure we acquire file lock in tests (#12064) --- crates/storage/db/src/lockfile.rs | 51 ++++++++++++++++++------------- 1 file changed, 29 insertions(+), 22 deletions(-) diff --git a/crates/storage/db/src/lockfile.rs b/crates/storage/db/src/lockfile.rs index 6dc063a16..a87ab7393 100644 --- a/crates/storage/db/src/lockfile.rs +++ b/crates/storage/db/src/lockfile.rs @@ -30,31 +30,35 @@ impl StorageLock { /// Note: In-process exclusivity is not on scope. If called from the same process (or another /// with the same PID), it will succeed. pub fn try_acquire(path: &Path) -> Result { - let file_path = path.join(LOCKFILE_NAME); - #[cfg(feature = "disable-lock")] { + let file_path = path.join(LOCKFILE_NAME); // Too expensive for ef-tests to write/read lock to/from disk. Ok(Self(Arc::new(StorageLockInner { file_path }))) } #[cfg(not(feature = "disable-lock"))] - { - if let Some(process_lock) = ProcessUID::parse(&file_path)? { - if process_lock.pid != (process::id() as usize) && process_lock.is_active() { - error!( - target: "reth::db::lockfile", - path = ?file_path, - pid = process_lock.pid, - start_time = process_lock.start_time, - "Storage lock already taken." - ); - return Err(StorageLockError::Taken(process_lock.pid)) - } - } + Self::try_acquire_file_lock(path) + } - Ok(Self(Arc::new(StorageLockInner::new(file_path)?))) + /// Acquire a file write lock. + #[cfg(any(test, not(feature = "disable-lock")))] + fn try_acquire_file_lock(path: &Path) -> Result { + let file_path = path.join(LOCKFILE_NAME); + if let Some(process_lock) = ProcessUID::parse(&file_path)? { + if process_lock.pid != (process::id() as usize) && process_lock.is_active() { + error!( + target: "reth::db::lockfile", + path = ?file_path, + pid = process_lock.pid, + start_time = process_lock.start_time, + "Storage lock already taken." + ); + return Err(StorageLockError::Taken(process_lock.pid)) + } } + + Ok(Self(Arc::new(StorageLockInner::new(file_path)?))) } } @@ -164,10 +168,10 @@ mod tests { let temp_dir = tempfile::tempdir().unwrap(); - let lock = StorageLock::try_acquire(temp_dir.path()).unwrap(); + let lock = StorageLock::try_acquire_file_lock(temp_dir.path()).unwrap(); // Same process can re-acquire the lock - assert_eq!(Ok(lock.clone()), StorageLock::try_acquire(temp_dir.path())); + assert_eq!(Ok(lock.clone()), StorageLock::try_acquire_file_lock(temp_dir.path())); // A lock of a non existent PID can be acquired. let lock_file = temp_dir.path().join(LOCKFILE_NAME); @@ -177,18 +181,21 @@ mod tests { fake_pid += 1; } ProcessUID { pid: fake_pid, start_time: u64::MAX }.write(&lock_file).unwrap(); - assert_eq!(Ok(lock.clone()), StorageLock::try_acquire(temp_dir.path())); + assert_eq!(Ok(lock.clone()), StorageLock::try_acquire_file_lock(temp_dir.path())); let mut pid_1 = ProcessUID::new(1).unwrap(); // If a parsed `ProcessUID` exists, the lock can NOT be acquired. pid_1.write(&lock_file).unwrap(); - assert_eq!(Err(StorageLockError::Taken(1)), StorageLock::try_acquire(temp_dir.path())); + assert_eq!( + Err(StorageLockError::Taken(1)), + StorageLock::try_acquire_file_lock(temp_dir.path()) + ); // A lock of a different but existing PID can be acquired ONLY IF the start_time differs. pid_1.start_time += 1; pid_1.write(&lock_file).unwrap(); - assert_eq!(Ok(lock), StorageLock::try_acquire(temp_dir.path())); + assert_eq!(Ok(lock), StorageLock::try_acquire_file_lock(temp_dir.path())); } #[test] @@ -198,7 +205,7 @@ mod tests { let temp_dir = tempfile::tempdir().unwrap(); let lock_file = temp_dir.path().join(LOCKFILE_NAME); - let lock = StorageLock::try_acquire(temp_dir.path()).unwrap(); + let lock = StorageLock::try_acquire_file_lock(temp_dir.path()).unwrap(); assert!(lock_file.exists()); drop(lock); From 2ae7ee51e0beb1920d4ab29e3736756321ccdee0 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Fri, 25 Oct 2024 19:13:46 +0900 Subject: [PATCH 258/425] fix: increase `arbitrary::Unstructured` buffer size if `NotEnoughData` is thrown (#12069) --- crates/cli/commands/src/test_vectors/compact.rs | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/crates/cli/commands/src/test_vectors/compact.rs b/crates/cli/commands/src/test_vectors/compact.rs index cda7d5bd5..162ee1cea 100644 --- a/crates/cli/commands/src/test_vectors/compact.rs +++ b/crates/cli/commands/src/test_vectors/compact.rs @@ -191,7 +191,21 @@ where runner.rng().fill_bytes(&mut bytes); compact_buffer.clear(); - let obj = T::arbitrary(&mut arbitrary::Unstructured::new(&bytes))?; + // Sometimes type T, might require extra arbitrary data, so we retry it a few times. + let mut tries = 0; + let obj = loop { + match T::arbitrary(&mut arbitrary::Unstructured::new(&bytes)) { + Ok(obj) => break obj, + Err(err) => { + if tries < 5 && matches!(err, arbitrary::Error::NotEnoughData) { + tries += 1; + bytes.extend(std::iter::repeat(0u8).take(256)); + } else { + return Err(err)? + } + } + } + }; let res = obj.to_compact(&mut compact_buffer); if IDENTIFIER_TYPE.contains(&type_name) { From a87d654c55eacc8f4878affd671e08df7fa536a6 Mon Sep 17 00:00:00 2001 From: tedison <76473430+edisontim@users.noreply.github.com> Date: Fri, 25 Oct 2024 07:48:58 -0400 Subject: [PATCH 259/425] feat: introduce iterator for default_ethereum_payload function (#11978) --- crates/ethereum/payload/src/lib.rs | 46 ++++++++++++++++++++---------- 1 file changed, 31 insertions(+), 15 deletions(-) diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 951a909b9..bb611441f 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -33,7 +33,8 @@ use reth_primitives::{ use reth_provider::{ChainSpecProvider, StateProviderFactory}; use reth_revm::database::StateProviderDatabase; use reth_transaction_pool::{ - noop::NoopTransactionPool, BestTransactionsAttributes, TransactionPool, + noop::NoopTransactionPool, BestTransactions, BestTransactionsAttributes, TransactionPool, + ValidPoolTransaction, }; use reth_trie::HashedPostState; use revm::{ @@ -45,6 +46,10 @@ use revm_primitives::calc_excess_blob_gas; use std::sync::Arc; use tracing::{debug, trace, warn}; +type BestTransactionsIter = Box< + dyn BestTransactions::Transaction>>>, +>; + /// Ethereum payload builder #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct EthereumPayloadBuilder { @@ -94,7 +99,11 @@ where args: BuildArguments, ) -> Result, PayloadBuilderError> { let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_block); - default_ethereum_payload(self.evm_config.clone(), args, cfg_env, block_env) + + let pool = args.pool.clone(); + default_ethereum_payload(self.evm_config.clone(), args, cfg_env, block_env, |attributes| { + pool.best_transactions_with_attributes(attributes) + }) } fn build_empty_payload( @@ -102,19 +111,25 @@ where client: &Client, config: PayloadConfig, ) -> Result { - let args = BuildArguments { + let args = BuildArguments::new( client, - config, // we use defaults here because for the empty payload we don't need to execute anything - pool: NoopTransactionPool::default(), - cached_reads: Default::default(), - cancel: Default::default(), - best_payload: None, - }; + NoopTransactionPool::default(), + Default::default(), + config, + Default::default(), + None, + ); + let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_block); - default_ethereum_payload(self.evm_config.clone(), args, cfg_env, block_env)? - .into_payload() - .ok_or_else(|| PayloadBuilderError::MissingPayload) + + let pool = args.pool.clone(); + + default_ethereum_payload(self.evm_config.clone(), args, cfg_env, block_env, |attributes| { + pool.best_transactions_with_attributes(attributes) + })? + .into_payload() + .ok_or_else(|| PayloadBuilderError::MissingPayload) } } @@ -124,16 +139,18 @@ where /// and configuration, this function creates a transaction payload. Returns /// a result indicating success with the payload or an error in case of failure. #[inline] -pub fn default_ethereum_payload( +pub fn default_ethereum_payload( evm_config: EvmConfig, args: BuildArguments, initialized_cfg: CfgEnvWithHandlerCfg, initialized_block_env: BlockEnv, + best_txs: F, ) -> Result, PayloadBuilderError> where EvmConfig: ConfigureEvm
, Client: StateProviderFactory + ChainSpecProvider, Pool: TransactionPool, + F: FnOnce(BestTransactionsAttributes) -> BestTransactionsIter, { let BuildArguments { client, pool, mut cached_reads, config, cancel, best_payload } = args; @@ -153,11 +170,10 @@ where let mut executed_txs = Vec::new(); let mut executed_senders = Vec::new(); - let mut best_txs = pool.best_transactions_with_attributes(BestTransactionsAttributes::new( + let mut best_txs = best_txs(BestTransactionsAttributes::new( base_fee, initialized_block_env.get_blob_gasprice().map(|gasprice| gasprice as u64), )); - let mut total_fees = U256::ZERO; let block_number = initialized_block_env.number.to::(); From 58441c158b27986b904efccf6cbe5953d090e083 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 25 Oct 2024 16:10:20 +0400 Subject: [PATCH 260/425] fix: fail on unwind during `reth import` (#12062) --- crates/cli/commands/src/import.rs | 1 + .../cli/src/commands/build_pipeline.rs | 1 + crates/stages/api/src/error.rs | 3 +++ crates/stages/api/src/pipeline/builder.rs | 19 +++++++++++++++++-- crates/stages/api/src/pipeline/mod.rs | 8 ++++++++ 5 files changed, 30 insertions(+), 2 deletions(-) diff --git a/crates/cli/commands/src/import.rs b/crates/cli/commands/src/import.rs index 6b750d32a..a7c81e530 100644 --- a/crates/cli/commands/src/import.rs +++ b/crates/cli/commands/src/import.rs @@ -207,6 +207,7 @@ where .with_tip_sender(tip_tx) // we want to sync all blocks the file client provides or 0 if empty .with_max_block(max_block) + .with_fail_on_unwind(true) .add_stages( DefaultStages::new( provider_factory.clone(), diff --git a/crates/optimism/cli/src/commands/build_pipeline.rs b/crates/optimism/cli/src/commands/build_pipeline.rs index f23cb9a7c..a197f93a8 100644 --- a/crates/optimism/cli/src/commands/build_pipeline.rs +++ b/crates/optimism/cli/src/commands/build_pipeline.rs @@ -75,6 +75,7 @@ where .with_tip_sender(tip_tx) // we want to sync all blocks the file client provides or 0 if empty .with_max_block(max_block) + .with_fail_on_unwind(true) .add_stages( DefaultStages::new( provider_factory.clone(), diff --git a/crates/stages/api/src/error.rs b/crates/stages/api/src/error.rs index 68e1d00fd..8562b10b6 100644 --- a/crates/stages/api/src/error.rs +++ b/crates/stages/api/src/error.rs @@ -188,4 +188,7 @@ pub enum PipelineError { /// Internal error #[error(transparent)] Internal(#[from] RethError), + /// The pipeline encountered an unwind when `fail_on_unwind` was set to `true`. + #[error("unexpected unwind")] + UnexpectedUnwind, } diff --git a/crates/stages/api/src/pipeline/builder.rs b/crates/stages/api/src/pipeline/builder.rs index 79a4c477e..45bdc2d89 100644 --- a/crates/stages/api/src/pipeline/builder.rs +++ b/crates/stages/api/src/pipeline/builder.rs @@ -14,6 +14,7 @@ pub struct PipelineBuilder { /// A receiver for the current chain tip to sync to. tip_tx: Option>, metrics_tx: Option, + fail_on_unwind: bool, } impl PipelineBuilder { @@ -62,6 +63,12 @@ impl PipelineBuilder { self } + /// Set whether pipeline should fail on unwind. + pub const fn with_fail_on_unwind(mut self, yes: bool) -> Self { + self.fail_on_unwind = yes; + self + } + /// Builds the final [`Pipeline`] using the given database. pub fn build( self, @@ -72,7 +79,7 @@ impl PipelineBuilder { N: ProviderNodeTypes, ProviderFactory: DatabaseProviderFactory, { - let Self { stages, max_block, tip_tx, metrics_tx } = self; + let Self { stages, max_block, tip_tx, metrics_tx, fail_on_unwind } = self; Pipeline { provider_factory, stages, @@ -82,13 +89,20 @@ impl PipelineBuilder { event_sender: Default::default(), progress: Default::default(), metrics_tx, + fail_on_unwind, } } } impl Default for PipelineBuilder { fn default() -> Self { - Self { stages: Vec::new(), max_block: None, tip_tx: None, metrics_tx: None } + Self { + stages: Vec::new(), + max_block: None, + tip_tx: None, + metrics_tx: None, + fail_on_unwind: false, + } } } @@ -97,6 +111,7 @@ impl std::fmt::Debug for PipelineBuilder { f.debug_struct("PipelineBuilder") .field("stages", &self.stages.iter().map(|stage| stage.id()).collect::>()) .field("max_block", &self.max_block) + .field("fail_on_unwind", &self.fail_on_unwind) .finish() } } diff --git a/crates/stages/api/src/pipeline/mod.rs b/crates/stages/api/src/pipeline/mod.rs index 14225a595..399a3ffb4 100644 --- a/crates/stages/api/src/pipeline/mod.rs +++ b/crates/stages/api/src/pipeline/mod.rs @@ -78,6 +78,9 @@ pub struct Pipeline { /// A receiver for the current chain tip to sync to. tip_tx: Option>, metrics_tx: Option, + /// Whether an unwind should fail the syncing process. Should only be set when downloading + /// blocks from trusted sources and expecting them to be valid. + fail_on_unwind: bool, } impl Pipeline { @@ -164,6 +167,10 @@ impl Pipeline { loop { let next_action = self.run_loop().await?; + if next_action.is_unwind() && self.fail_on_unwind { + return Err(PipelineError::UnexpectedUnwind) + } + // Terminate the loop early if it's reached the maximum user // configured block. if next_action.should_continue() && @@ -586,6 +593,7 @@ impl std::fmt::Debug for Pipeline { .field("stages", &self.stages.iter().map(|stage| stage.id()).collect::>()) .field("max_block", &self.max_block) .field("event_sender", &self.event_sender) + .field("fail_on_unwind", &self.fail_on_unwind) .finish() } } From 26d1b1524bea366e886ecf27a0e76e53d07aa0a5 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 25 Oct 2024 14:17:07 +0200 Subject: [PATCH 261/425] fix: ignore discovered peers with tcp port 0 (#12065) --- crates/net/network/src/discovery.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/net/network/src/discovery.rs b/crates/net/network/src/discovery.rs index d366027d6..5b2bb788f 100644 --- a/crates/net/network/src/discovery.rs +++ b/crates/net/network/src/discovery.rs @@ -214,6 +214,10 @@ impl Discovery { fn on_node_record_update(&mut self, record: NodeRecord, fork_id: Option) { let peer_id = record.id; let tcp_addr = record.tcp_addr(); + if tcp_addr.port() == 0 { + // useless peer for p2p + return + } let udp_addr = record.udp_addr(); let addr = PeerAddr::new(tcp_addr, Some(udp_addr)); _ = From 07bda5d453749a513fdd5afc5ea48dd90a3ba5a0 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 25 Oct 2024 14:17:32 +0200 Subject: [PATCH 262/425] chore: EthBuiltPayload touchups (#12067) --- .../ethereum/engine-primitives/src/payload.rs | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/crates/ethereum/engine-primitives/src/payload.rs b/crates/ethereum/engine-primitives/src/payload.rs index 1ad9c5450..50c485254 100644 --- a/crates/ethereum/engine-primitives/src/payload.rs +++ b/crates/ethereum/engine-primitives/src/payload.rs @@ -38,7 +38,9 @@ pub struct EthBuiltPayload { // === impl BuiltPayload === impl EthBuiltPayload { - /// Initializes the payload with the given initial block. + /// Initializes the payload with the given initial block + /// + /// Caution: This does not set any [`BlobTransactionSidecar`]. pub const fn new( id: PayloadId, block: SealedBlock, @@ -69,9 +71,18 @@ impl EthBuiltPayload { } /// Adds sidecars to the payload. - pub fn extend_sidecars(&mut self, sidecars: Vec) { + pub fn extend_sidecars(&mut self, sidecars: impl IntoIterator) { self.sidecars.extend(sidecars) } + + /// Same as [`Self::extend_sidecars`] but returns the type again. + pub fn with_sidecars( + mut self, + sidecars: impl IntoIterator, + ) -> Self { + self.extend_sidecars(sidecars); + self + } } impl BuiltPayload for EthBuiltPayload { @@ -134,7 +145,7 @@ impl From for ExecutionPayloadEnvelopeV3 { // Spec: // should_override_builder: false, - blobs_bundle: sidecars.into_iter().map(Into::into).collect::>().into(), + blobs_bundle: sidecars.into(), } } } From 09506aa130b4c0dae3fb7029f27845b4f2c58321 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 25 Oct 2024 14:25:52 +0200 Subject: [PATCH 263/425] chore: rm TransactionFilter (#12066) --- crates/transaction-pool/src/traits.rs | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index e35912726..c21a7a4ea 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -815,25 +815,6 @@ where } } -/// A subtrait on the [`BestTransactions`] trait that allows to filter transactions. -pub trait BestTransactionsFilter: BestTransactions { - /// Creates an iterator which uses a closure to determine if a transaction should be yielded. - /// - /// Given an element the closure must return true or false. The returned iterator will yield - /// only the elements for which the closure returns true. - /// - /// Descendant transactions will be skipped. - fn filter

(self, predicate: P) -> BestTransactionFilter - where - P: FnMut(&Self::Item) -> bool, - Self: Sized, - { - BestTransactionFilter::new(self, predicate) - } -} - -impl BestTransactionsFilter for T where T: BestTransactions {} - /// A no-op implementation that yields no transactions. impl BestTransactions for std::iter::Empty { fn mark_invalid(&mut self, _tx: &T) {} From e93e373853ae2735fc6ca86ef0273bcacfd15c31 Mon Sep 17 00:00:00 2001 From: Debjit Bhowal Date: Fri, 25 Oct 2024 19:51:17 +0530 Subject: [PATCH 264/425] making `command` public (#12074) --- bin/reth/src/cli/mod.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/bin/reth/src/cli/mod.rs b/bin/reth/src/cli/mod.rs index 01662eb4d..192ab6700 100644 --- a/bin/reth/src/cli/mod.rs +++ b/bin/reth/src/cli/mod.rs @@ -39,7 +39,7 @@ pub struct Cli, + pub command: Commands, /// The chain this node is running. /// @@ -52,7 +52,7 @@ pub struct Cli, + pub chain: Arc, /// Add a new instance of a node. /// @@ -68,10 +68,11 @@ pub struct Cli Date: Fri, 25 Oct 2024 18:39:52 +0200 Subject: [PATCH 265/425] primitive-traits: add unit tests for `Account` (#12048) --- crates/primitives-traits/src/account.rs | 46 +++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/crates/primitives-traits/src/account.rs b/crates/primitives-traits/src/account.rs index 063504b2a..ae58973ed 100644 --- a/crates/primitives-traits/src/account.rs +++ b/crates/primitives-traits/src/account.rs @@ -256,4 +256,50 @@ mod tests { assert_eq!(decoded, bytecode); assert!(remainder.is_empty()); } + + #[test] + fn test_account_has_bytecode() { + // Account with no bytecode (None) + let acc_no_bytecode = Account { nonce: 1, balance: U256::from(1000), bytecode_hash: None }; + assert!(!acc_no_bytecode.has_bytecode(), "Account should not have bytecode"); + + // Account with bytecode hash set to KECCAK_EMPTY (should have bytecode) + let acc_empty_bytecode = + Account { nonce: 1, balance: U256::from(1000), bytecode_hash: Some(KECCAK_EMPTY) }; + assert!(acc_empty_bytecode.has_bytecode(), "Account should have bytecode"); + + // Account with a non-empty bytecode hash + let acc_with_bytecode = Account { + nonce: 1, + balance: U256::from(1000), + bytecode_hash: Some(B256::from_slice(&[0x11u8; 32])), + }; + assert!(acc_with_bytecode.has_bytecode(), "Account should have bytecode"); + } + + #[test] + fn test_account_get_bytecode_hash() { + // Account with no bytecode (should return KECCAK_EMPTY) + let acc_no_bytecode = Account { nonce: 0, balance: U256::ZERO, bytecode_hash: None }; + assert_eq!(acc_no_bytecode.get_bytecode_hash(), KECCAK_EMPTY, "Should return KECCAK_EMPTY"); + + // Account with bytecode hash set to KECCAK_EMPTY + let acc_empty_bytecode = + Account { nonce: 1, balance: U256::from(1000), bytecode_hash: Some(KECCAK_EMPTY) }; + assert_eq!( + acc_empty_bytecode.get_bytecode_hash(), + KECCAK_EMPTY, + "Should return KECCAK_EMPTY" + ); + + // Account with a valid bytecode hash + let bytecode_hash = B256::from_slice(&[0x11u8; 32]); + let acc_with_bytecode = + Account { nonce: 1, balance: U256::from(1000), bytecode_hash: Some(bytecode_hash) }; + assert_eq!( + acc_with_bytecode.get_bytecode_hash(), + bytecode_hash, + "Should return the bytecode hash" + ); + } } From e676d71d0b5c4f372435277031e41afde349a073 Mon Sep 17 00:00:00 2001 From: tedison <76473430+edisontim@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:08:01 -0400 Subject: [PATCH 266/425] feat: Freeze payload if final (#12078) Co-authored-by: Matthias Seitz --- crates/optimism/payload/src/builder.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index c85abfad7..85f687aa8 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -522,6 +522,8 @@ where trie: Arc::new(trie_output), }; + let no_tx_pool = attributes.no_tx_pool; + let mut payload = OptimismBuiltPayload::new( attributes.payload_attributes.id, sealed_block, @@ -534,5 +536,12 @@ where // extend the payload with the blob sidecars from the executed txs payload.extend_sidecars(blob_sidecars); - Ok(BuildOutcome::Better { payload, cached_reads }) + if no_tx_pool { + // if `no_tx_pool` is set only transactions from the payload attributes will be included in + // the payload. In other words, the payload is deterministic and we can freeze it once we've + // successfully built it. + Ok(BuildOutcome::Freeze(payload)) + } else { + Ok(BuildOutcome::Better { payload, cached_reads }) + } } From d91cacd14ac56495100a7785352e1a1ed8c8a42d Mon Sep 17 00:00:00 2001 From: Jeff Date: Fri, 25 Oct 2024 13:11:24 -0400 Subject: [PATCH 267/425] feat(rpc): rpc rate limiter impl (#11952) Co-authored-by: Matthias Seitz --- Cargo.lock | 1 + crates/rpc/rpc-builder/Cargo.toml | 2 + crates/rpc/rpc-builder/src/lib.rs | 3 + crates/rpc/rpc-builder/src/rate_limiter.rs | 116 +++++++++++++++++++++ 4 files changed, 122 insertions(+) create mode 100644 crates/rpc/rpc-builder/src/rate_limiter.rs diff --git a/Cargo.lock b/Cargo.lock index d3aa0b295..a99803e1b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8744,6 +8744,7 @@ dependencies = [ "serde_json", "thiserror", "tokio", + "tokio-util", "tower 0.4.13", "tower-http", "tracing", diff --git a/crates/rpc/rpc-builder/Cargo.toml b/crates/rpc/rpc-builder/Cargo.toml index cc72c2ebf..b9b511a07 100644 --- a/crates/rpc/rpc-builder/Cargo.toml +++ b/crates/rpc/rpc-builder/Cargo.toml @@ -50,6 +50,8 @@ metrics.workspace = true serde = { workspace = true, features = ["derive"] } thiserror.workspace = true tracing.workspace = true +tokio-util = { workspace = true } +tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } [dev-dependencies] reth-chainspec.workspace = true diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index 72b53efe6..ceafe2065 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -226,6 +226,9 @@ pub use eth::EthHandlers; mod metrics; pub use metrics::{MeteredRequestFuture, RpcRequestMetricsService}; +// Rpc rate limiter +pub mod rate_limiter; + /// Convenience function for starting a server in one step. #[allow(clippy::too_many_arguments)] pub async fn launch( diff --git a/crates/rpc/rpc-builder/src/rate_limiter.rs b/crates/rpc/rpc-builder/src/rate_limiter.rs new file mode 100644 index 000000000..85df0eee6 --- /dev/null +++ b/crates/rpc/rpc-builder/src/rate_limiter.rs @@ -0,0 +1,116 @@ +//! [`jsonrpsee`] helper layer for rate limiting certain methods. + +use jsonrpsee::{server::middleware::rpc::RpcServiceT, types::Request, MethodResponse}; +use std::{ + future::Future, + pin::Pin, + sync::Arc, + task::{ready, Context, Poll}, +}; +use tokio::sync::{OwnedSemaphorePermit, Semaphore}; +use tokio_util::sync::PollSemaphore; +use tower::Layer; + +/// Rate limiter for the RPC server. +/// +/// Rate limits expensive calls such as debug_ and trace_. +#[derive(Debug, Clone)] +pub struct RpcRequestRateLimiter { + inner: Arc, +} + +impl RpcRequestRateLimiter { + /// Create a new rate limit layer with the given number of permits. + pub fn new(rate_limit: usize) -> Self { + Self { + inner: Arc::new(RpcRequestRateLimiterInner { + call_guard: PollSemaphore::new(Arc::new(Semaphore::new(rate_limit))), + }), + } + } +} + +impl Layer for RpcRequestRateLimiter { + type Service = RpcRequestRateLimitingService; + + fn layer(&self, inner: S) -> Self::Service { + RpcRequestRateLimitingService::new(inner, self.clone()) + } +} + +/// Rate Limiter for the RPC server +#[derive(Debug, Clone)] +struct RpcRequestRateLimiterInner { + /// Semaphore to rate limit calls + call_guard: PollSemaphore, +} + +/// A [`RpcServiceT`] middleware that rate limits RPC calls to the server. +#[derive(Debug, Clone)] +pub struct RpcRequestRateLimitingService { + /// The rate limiter for RPC requests + rate_limiter: RpcRequestRateLimiter, + /// The inner service being wrapped + inner: S, +} + +impl RpcRequestRateLimitingService { + /// Create a new rate limited service. + pub const fn new(service: S, rate_limiter: RpcRequestRateLimiter) -> Self { + Self { inner: service, rate_limiter } + } +} + +impl<'a, S> RpcServiceT<'a> for RpcRequestRateLimitingService +where + S: RpcServiceT<'a> + Send + Sync + Clone + 'static, +{ + type Future = RateLimitingRequestFuture; + + fn call(&self, req: Request<'a>) -> Self::Future { + let method_name = req.method_name(); + if method_name.starts_with("trace_") || method_name.starts_with("debug_") { + RateLimitingRequestFuture { + fut: self.inner.call(req), + guard: Some(self.rate_limiter.inner.call_guard.clone()), + permit: None, + } + } else { + // if we don't need to rate limit, then there + // is no need to get a semaphore permit + RateLimitingRequestFuture { fut: self.inner.call(req), guard: None, permit: None } + } + } +} + +/// Response future. +#[pin_project::pin_project] +pub struct RateLimitingRequestFuture { + #[pin] + fut: F, + guard: Option, + permit: Option, +} + +impl std::fmt::Debug for RateLimitingRequestFuture { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("RateLimitingRequestFuture") + } +} + +impl> Future for RateLimitingRequestFuture { + type Output = F::Output; + + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let this = self.project(); + if let Some(guard) = this.guard.as_mut() { + *this.permit = ready!(guard.poll_acquire(cx)); + *this.guard = None; + } + let res = this.fut.poll(cx); + if res.is_ready() { + *this.permit = None; + } + res + } +} From 16b64d8284a87888bae296992a7e4789594e8d16 Mon Sep 17 00:00:00 2001 From: Kien Trinh <51135161+kien6034@users.noreply.github.com> Date: Sat, 26 Oct 2024 03:22:02 +0700 Subject: [PATCH 268/425] feat(make): add docs lint (#12082) --- Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile b/Makefile index 5ad7abac6..ac4ad1038 100644 --- a/Makefile +++ b/Makefile @@ -497,6 +497,7 @@ test: pr: make lint && \ make update-book-cli && \ + cargo docs --document-private-items && \ make test check-features: From a349919b5c1b3f4b28959acebfa55f6786845fe9 Mon Sep 17 00:00:00 2001 From: AJStonewee Date: Fri, 25 Oct 2024 18:44:00 -0400 Subject: [PATCH 269/425] docs: remove deleted op-sync workflow from docs (#12086) Co-authored-by: Oliver --- docs/repo/ci.md | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/docs/repo/ci.md b/docs/repo/ci.md index 5ed2cec00..863a18f9c 100644 --- a/docs/repo/ci.md +++ b/docs/repo/ci.md @@ -7,8 +7,7 @@ The CI runs a couple of workflows: - **[unit]**: Runs unit tests (tests in `src/`) and doc tests - **[integration]**: Runs integration tests (tests in `tests/` and sync tests) - **[bench]**: Runs benchmarks -- **[eth-sync]**: Runs Ethereum mainnet sync tests -- **[op-sync]**: Runs base mainnet sync tests for Optimism +- **[sync]**: Runs sync tests - **[stage]**: Runs all `stage run` commands ### Docs @@ -38,8 +37,7 @@ The CI runs a couple of workflows: [unit]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/unit.yml [integration]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/integration.yml [bench]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/bench.yml -[eth-sync]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/eth-sync.yml -[op-sync]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/op-sync.yml +[sync]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/sync.yml [stage]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/stage.yml [book]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/book.yml [deny]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/deny.yml From fa59bd512e877e4677890b5c154b80816c07480e Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Sat, 26 Oct 2024 03:18:34 +0400 Subject: [PATCH 270/425] fix: correctly detect first sync on headers stage (#12085) --- crates/stages/stages/src/stages/headers.rs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/crates/stages/stages/src/stages/headers.rs b/crates/stages/stages/src/stages/headers.rs index 199e015c2..49e687a96 100644 --- a/crates/stages/stages/src/stages/headers.rs +++ b/crates/stages/stages/src/stages/headers.rs @@ -2,7 +2,7 @@ use alloy_primitives::{BlockHash, BlockNumber, Bytes, B256}; use futures_util::StreamExt; use reth_config::config::EtlConfig; use reth_consensus::Consensus; -use reth_db::{tables, RawKey, RawTable, RawValue}; +use reth_db::{tables, transaction::DbTx, RawKey, RawTable, RawValue}; use reth_db_api::{ cursor::{DbCursorRO, DbCursorRW}, transaction::DbTxMut, @@ -155,11 +155,13 @@ where // If we only have the genesis block hash, then we are at first sync, and we can remove it, // add it to the collector and use tx.append on all hashes. - if let Some((hash, block_number)) = cursor_header_numbers.last()? { - if block_number.value()? == 0 { - self.hash_collector.insert(hash.key()?, 0)?; - cursor_header_numbers.delete_current()?; - first_sync = true; + if provider.tx_ref().entries::>()? == 1 { + if let Some((hash, block_number)) = cursor_header_numbers.last()? { + if block_number.value()? == 0 { + self.hash_collector.insert(hash.key()?, 0)?; + cursor_header_numbers.delete_current()?; + first_sync = true; + } } } From e0ad59834de2e77c0283f1a5c1fc2fd7718a9875 Mon Sep 17 00:00:00 2001 From: greged93 <82421016+greged93@users.noreply.github.com> Date: Sat, 26 Oct 2024 08:10:15 +0200 Subject: [PATCH 271/425] dev: add `requests` to `EthBuiltPayload` (#12072) --- Cargo.lock | 3 ++- crates/e2e-test-utils/src/engine_api.rs | 9 +------ .../ethereum/engine-primitives/src/payload.rs | 26 +++++++++++-------- crates/ethereum/payload/src/lib.rs | 5 ++-- crates/optimism/payload/src/payload.rs | 10 ++++++- crates/payload/builder/src/lib.rs | 2 +- crates/payload/builder/src/test_utils.rs | 1 + crates/payload/primitives/Cargo.toml | 1 + crates/payload/primitives/src/traits.rs | 4 +++ 9 files changed, 37 insertions(+), 24 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a99803e1b..bf73d7eef 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8373,6 +8373,7 @@ dependencies = [ name = "reth-payload-primitives" version = "1.1.0" dependencies = [ + "alloy-eips", "alloy-primitives", "alloy-rpc-types", "async-trait", @@ -11956,4 +11957,4 @@ checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa" dependencies = [ "cc", "pkg-config", -] \ No newline at end of file +] diff --git a/crates/e2e-test-utils/src/engine_api.rs b/crates/e2e-test-utils/src/engine_api.rs index 729205211..cfa245e1d 100644 --- a/crates/e2e-test-utils/src/engine_api.rs +++ b/crates/e2e-test-utils/src/engine_api.rs @@ -62,14 +62,7 @@ impl EngineApiTestContext::ExecutionPayloadEnvelopeV4 = payload.into(); EngineApiClient::::new_payload_v4( &self.engine_api_client, diff --git a/crates/ethereum/engine-primitives/src/payload.rs b/crates/ethereum/engine-primitives/src/payload.rs index 50c485254..420352cf2 100644 --- a/crates/ethereum/engine-primitives/src/payload.rs +++ b/crates/ethereum/engine-primitives/src/payload.rs @@ -33,6 +33,8 @@ pub struct EthBuiltPayload { /// The blobs, proofs, and commitments in the block. If the block is pre-cancun, this will be /// empty. pub(crate) sidecars: Vec, + /// The requests of the payload + pub(crate) requests: Option, } // === impl BuiltPayload === @@ -46,8 +48,9 @@ impl EthBuiltPayload { block: SealedBlock, fees: U256, executed_block: Option, + requests: Option, ) -> Self { - Self { id, block, executed_block, fees, sidecars: Vec::new() } + Self { id, block, executed_block, fees, sidecars: Vec::new(), requests } } /// Returns the identifier of the payload. @@ -97,6 +100,10 @@ impl BuiltPayload for EthBuiltPayload { fn executed_block(&self) -> Option { self.executed_block.clone() } + + fn requests(&self) -> Option { + self.requests.clone() + } } impl BuiltPayload for &EthBuiltPayload { @@ -111,6 +118,10 @@ impl BuiltPayload for &EthBuiltPayload { fn executed_block(&self) -> Option { self.executed_block.clone() } + + fn requests(&self) -> Option { + self.requests.clone() + } } // V1 engine_getPayloadV1 response @@ -152,15 +163,8 @@ impl From for ExecutionPayloadEnvelopeV3 { impl From for ExecutionPayloadEnvelopeV4 { fn from(value: EthBuiltPayload) -> Self { - let EthBuiltPayload { block, fees, sidecars, executed_block, .. } = value; - - // if we have an executed block, we pop off the first set of requests from the execution - // outcome. the assumption here is that there will always only be one block in the execution - // outcome. - let execution_requests = executed_block - .and_then(|block| block.execution_outcome().requests.first().cloned()) - .map(Requests::take) - .unwrap_or_default(); + let EthBuiltPayload { block, fees, sidecars, requests, .. } = value; + Self { execution_payload: block_to_payload_v3(block), block_value: fees, @@ -174,7 +178,7 @@ impl From for ExecutionPayloadEnvelopeV4 { // should_override_builder: false, blobs_bundle: sidecars.into_iter().map(Into::into).collect::>().into(), - execution_requests, + execution_requests: requests.unwrap_or_default().take(), } } } diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index bb611441f..f14c14588 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -360,7 +360,7 @@ where db.take_bundle(), vec![receipts].into(), block_number, - vec![requests.unwrap_or_default()], + vec![requests.clone().unwrap_or_default()], ); let receipts_root = execution_outcome.receipts_root_slow(block_number).expect("Number is in range"); @@ -449,7 +449,8 @@ where trie: Arc::new(trie_output), }; - let mut payload = EthBuiltPayload::new(attributes.id, sealed_block, total_fees, Some(executed)); + let mut payload = + EthBuiltPayload::new(attributes.id, sealed_block, total_fees, Some(executed), requests); // extend the payload with the blob sidecars from the executed txs payload.extend_sidecars(blob_sidecars); diff --git a/crates/optimism/payload/src/payload.rs b/crates/optimism/payload/src/payload.rs index d5d1620e5..98b0e41b0 100644 --- a/crates/optimism/payload/src/payload.rs +++ b/crates/optimism/payload/src/payload.rs @@ -2,7 +2,7 @@ //! Optimism builder support -use alloy_eips::eip2718::Decodable2718; +use alloy_eips::{eip2718::Decodable2718, eip7685::Requests}; use alloy_primitives::{Address, B256, U256}; use alloy_rlp::Encodable; use alloy_rpc_types_engine::{ExecutionPayloadEnvelopeV2, ExecutionPayloadV1, PayloadId}; @@ -178,6 +178,10 @@ impl BuiltPayload for OptimismBuiltPayload { fn executed_block(&self) -> Option { self.executed_block.clone() } + + fn requests(&self) -> Option { + None + } } impl BuiltPayload for &OptimismBuiltPayload { @@ -192,6 +196,10 @@ impl BuiltPayload for &OptimismBuiltPayload { fn executed_block(&self) -> Option { self.executed_block.clone() } + + fn requests(&self) -> Option { + None + } } // V1 engine_getPayloadV1 response diff --git a/crates/payload/builder/src/lib.rs b/crates/payload/builder/src/lib.rs index 0df15f5b0..7af61ac4c 100644 --- a/crates/payload/builder/src/lib.rs +++ b/crates/payload/builder/src/lib.rs @@ -65,7 +65,7 @@ //! }, //! ..Default::default() //! }; -//! let payload = EthBuiltPayload::new(self.attributes.id, payload.seal_slow(), U256::ZERO, None); +//! let payload = EthBuiltPayload::new(self.attributes.id, payload.seal_slow(), U256::ZERO, None, None); //! Ok(payload) //! } //! diff --git a/crates/payload/builder/src/test_utils.rs b/crates/payload/builder/src/test_utils.rs index 6990dc9b1..676e60d91 100644 --- a/crates/payload/builder/src/test_utils.rs +++ b/crates/payload/builder/src/test_utils.rs @@ -89,6 +89,7 @@ impl PayloadJob for TestPayloadJob { Block::default().seal_slow(), U256::ZERO, Some(ExecutedBlock::default()), + Some(Default::default()), )) } diff --git a/crates/payload/primitives/Cargo.toml b/crates/payload/primitives/Cargo.toml index 27418ccd8..ad8ce63a7 100644 --- a/crates/payload/primitives/Cargo.toml +++ b/crates/payload/primitives/Cargo.toml @@ -20,6 +20,7 @@ reth-transaction-pool.workspace = true reth-chain-state.workspace = true # alloy +alloy-eips.workspace = true alloy-primitives.workspace = true alloy-rpc-types = { workspace = true, features = ["engine"] } op-alloy-rpc-types-engine.workspace = true diff --git a/crates/payload/primitives/src/traits.rs b/crates/payload/primitives/src/traits.rs index ce98fcad3..df7614902 100644 --- a/crates/payload/primitives/src/traits.rs +++ b/crates/payload/primitives/src/traits.rs @@ -1,4 +1,5 @@ use crate::{PayloadEvents, PayloadKind, PayloadTypes}; +use alloy_eips::eip7685::Requests; use alloy_primitives::{Address, B256, U256}; use alloy_rpc_types::{ engine::{PayloadAttributes as EthPayloadAttributes, PayloadId}, @@ -65,6 +66,9 @@ pub trait BuiltPayload: Send + Sync + std::fmt::Debug { fn executed_block(&self) -> Option { None } + + /// Returns the EIP-7865 requests for the payload if any. + fn requests(&self) -> Option; } /// This can be implemented by types that describe a currently running payload job. From cecdf611e948257a79e7c262501bdadbd174e44d Mon Sep 17 00:00:00 2001 From: greged93 <82421016+greged93@users.noreply.github.com> Date: Sat, 26 Oct 2024 08:11:27 +0200 Subject: [PATCH 272/425] feat: `map_chainspec` for `NodeConfig` (#12068) --- crates/exex/exex/src/dyn_context.rs | 22 +++------------------- crates/node/core/src/node_config.rs | 23 +++++++++++++++++++++++ 2 files changed, 26 insertions(+), 19 deletions(-) diff --git a/crates/exex/exex/src/dyn_context.rs b/crates/exex/exex/src/dyn_context.rs index 226f3a3fe..b48a6ebc9 100644 --- a/crates/exex/exex/src/dyn_context.rs +++ b/crates/exex/exex/src/dyn_context.rs @@ -1,7 +1,7 @@ //! Mirrored version of [`ExExContext`](`crate::ExExContext`) //! without generic abstraction over [Node](`reth_node_api::FullNodeComponents`) -use std::{fmt::Debug, sync::Arc}; +use std::fmt::Debug; use reth_chainspec::{EthChainSpec, Head}; use reth_node_api::FullNodeComponents; @@ -55,24 +55,8 @@ where Node::Executor: Debug, { fn from(ctx: ExExContext) -> Self { - // convert `NodeConfig` with generic over chainspec into `NodeConfig` - let chain: Arc> = - Arc::new(Box::new(ctx.config.chain) as Box); - let config = NodeConfig { - chain, - datadir: ctx.config.datadir, - config: ctx.config.config, - metrics: ctx.config.metrics, - instance: ctx.config.instance, - network: ctx.config.network, - rpc: ctx.config.rpc, - txpool: ctx.config.txpool, - builder: ctx.config.builder, - debug: ctx.config.debug, - db: ctx.config.db, - dev: ctx.config.dev, - pruning: ctx.config.pruning, - }; + let config = + ctx.config.map_chainspec(|chainspec| Box::new(chainspec) as Box); let notifications = Box::new(ctx.notifications) as Box; Self { diff --git a/crates/node/core/src/node_config.rs b/crates/node/core/src/node_config.rs index a8799d80d..80fb5152e 100644 --- a/crates/node/core/src/node_config.rs +++ b/crates/node/core/src/node_config.rs @@ -422,6 +422,29 @@ impl NodeConfig { Err(e) => Err(eyre!("Failed to load configuration: {e}")), } } + + /// Modifies the [`ChainSpec`] generic of the config using the provided closure. + pub fn map_chainspec(self, f: F) -> NodeConfig + where + F: FnOnce(Arc) -> C, + { + let chain = Arc::new(f(self.chain)); + NodeConfig { + chain, + datadir: self.datadir, + config: self.config, + metrics: self.metrics, + instance: self.instance, + network: self.network, + rpc: self.rpc, + txpool: self.txpool, + builder: self.builder, + debug: self.debug, + db: self.db, + dev: self.dev, + pruning: self.pruning, + } + } } impl Default for NodeConfig { From ac329bfce1d6ebf90e06fc977ff43e40f20cf084 Mon Sep 17 00:00:00 2001 From: Darshan Kathiriya <8559992+lakshya-sky@users.noreply.github.com> Date: Sat, 26 Oct 2024 03:44:47 -0400 Subject: [PATCH 273/425] perf: improve debug_traceBlock performance (#11979) Co-authored-by: Matthias Seitz --- crates/rpc/rpc/src/debug.rs | 78 +++++++++++++++++++++++++------------ 1 file changed, 53 insertions(+), 25 deletions(-) diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 2d9d6f782..5a20bee97 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -112,6 +112,7 @@ where let mut results = Vec::with_capacity(transactions.len()); let mut db = CacheDB::new(StateProviderDatabase::new(state)); let mut transactions = transactions.into_iter().enumerate().peekable(); + let mut inspector = None; while let Some((index, tx)) = transactions.next() { let tx_hash = tx.hash; @@ -124,7 +125,7 @@ where handler_cfg: cfg.handler_cfg, }; let (result, state_changes) = this.trace_transaction( - opts.clone(), + &opts, env, &mut db, Some(TransactionContext { @@ -132,8 +133,11 @@ where tx_hash: Some(tx_hash), tx_index: Some(index), }), + &mut inspector, )?; + inspector = inspector.map(|insp| insp.fused()); + results.push(TraceResult::Success { result, tx_hash: Some(tx_hash) }); if transactions.peek().is_some() { // need to apply the state changes of this transaction before executing the @@ -295,7 +299,7 @@ where }; this.trace_transaction( - opts, + &opts, env, &mut db, Some(TransactionContext { @@ -303,6 +307,7 @@ where tx_index: Some(index), tx_hash: Some(tx.hash), }), + &mut None, ) .map(|(trace, _)| trace) }) @@ -573,6 +578,7 @@ where let Bundle { transactions, block_override } = bundle; let block_overrides = block_override.map(Box::new); + let mut inspector = None; let mut transactions = transactions.into_iter().peekable(); while let Some(tx) = transactions.next() { @@ -588,8 +594,15 @@ where overrides, )?; - let (trace, state) = - this.trace_transaction(tracing_options.clone(), env, &mut db, None)?; + let (trace, state) = this.trace_transaction( + &tracing_options, + env, + &mut db, + None, + &mut inspector, + )?; + + inspector = inspector.map(|insp| insp.fused()); // If there is more transactions, commit the database // If there is no transactions, but more bundles, commit to the database too @@ -692,6 +705,13 @@ where /// Executes the configured transaction with the environment on the given database. /// + /// It optionally takes fused inspector ([`TracingInspector::fused`]) to avoid re-creating the + /// inspector for each transaction. This is useful when tracing multiple transactions in a + /// block. This is only useful for block tracing which uses the same tracer for all transactions + /// in the block. + /// + /// Caution: If the inspector is provided then `opts.tracer_config` is ignored. + /// /// Returns the trace frame and the state that got updated after executing the transaction. /// /// Note: this does not apply any state overrides if they're configured in the `opts`. @@ -699,10 +719,11 @@ where /// Caution: this is blocking and should be performed on a blocking task. fn trace_transaction( &self, - opts: GethDebugTracingOptions, + opts: &GethDebugTracingOptions, env: EnvWithHandlerCfg, db: &mut StateCacheDb<'_>, transaction_context: Option, + fused_inspector: &mut Option, ) -> Result<(GethTrace, revm_primitives::EvmState), Eth::Error> { let GethDebugTracingOptions { config, tracer, tracer_config, .. } = opts; @@ -716,35 +737,42 @@ where } GethDebugBuiltInTracerType::CallTracer => { let call_config = tracer_config + .clone() .into_call_config() .map_err(|_| EthApiError::InvalidTracerConfig)?; - let mut inspector = TracingInspector::new( - TracingInspectorConfig::from_geth_call_config(&call_config), - ); + let mut inspector = fused_inspector.get_or_insert_with(|| { + TracingInspector::new(TracingInspectorConfig::from_geth_call_config( + &call_config, + )) + }); let (res, env) = self.eth_api().inspect(db, env, &mut inspector)?; + inspector.set_transaction_gas_limit(env.tx.gas_limit); + let frame = inspector - .with_transaction_gas_limit(env.tx.gas_limit) - .into_geth_builder() + .geth_builder() .geth_call_traces(call_config, res.result.gas_used()); return Ok((frame.into(), res.state)) } GethDebugBuiltInTracerType::PreStateTracer => { let prestate_config = tracer_config + .clone() .into_pre_state_config() .map_err(|_| EthApiError::InvalidTracerConfig)?; - let mut inspector = TracingInspector::new( - TracingInspectorConfig::from_geth_prestate_config(&prestate_config), - ); + let mut inspector = fused_inspector.get_or_insert_with(|| { + TracingInspector::new( + TracingInspectorConfig::from_geth_prestate_config(&prestate_config), + ) + }); let (res, env) = self.eth_api().inspect(&mut *db, env, &mut inspector)?; + inspector.set_transaction_gas_limit(env.tx.gas_limit); let frame = inspector - .with_transaction_gas_limit(env.tx.gas_limit) - .into_geth_builder() + .geth_builder() .geth_prestate_traces(&res, &prestate_config, db) .map_err(Eth::Error::from_eth_err)?; @@ -755,6 +783,7 @@ where } GethDebugBuiltInTracerType::MuxTracer => { let mux_config = tracer_config + .clone() .into_mux_config() .map_err(|_| EthApiError::InvalidTracerConfig)?; @@ -769,6 +798,7 @@ where } GethDebugBuiltInTracerType::FlatCallTracer => { let flat_call_config = tracer_config + .clone() .into_flat_call_config() .map_err(|_| EthApiError::InvalidTracerConfig)?; @@ -799,10 +829,10 @@ where } #[cfg(feature = "js-tracer")] GethDebugTracerType::JsTracer(code) => { - let config = tracer_config.into_json(); + let config = tracer_config.clone().into_json(); let mut inspector = revm_inspectors::tracing::js::JsInspector::with_transaction_context( - code, + code.clone(), config, transaction_context.unwrap_or_default(), ) @@ -818,17 +848,15 @@ where } // default structlog tracer - let inspector_config = TracingInspectorConfig::from_geth_config(&config); - - let mut inspector = TracingInspector::new(inspector_config); - + let mut inspector = fused_inspector.get_or_insert_with(|| { + let inspector_config = TracingInspectorConfig::from_geth_config(config); + TracingInspector::new(inspector_config) + }); let (res, env) = self.eth_api().inspect(db, env, &mut inspector)?; let gas_used = res.result.gas_used(); let return_value = res.result.into_output().unwrap_or_default(); - let frame = inspector - .with_transaction_gas_limit(env.tx.gas_limit) - .into_geth_builder() - .geth_traces(gas_used, return_value, config); + inspector.set_transaction_gas_limit(env.tx.gas_limit); + let frame = inspector.geth_builder().geth_traces(gas_used, return_value, *config); Ok((frame.into(), res.state)) } From 44e4c47803f26585872ddf0f25848092a4f29c73 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Sat, 26 Oct 2024 18:55:26 +0800 Subject: [PATCH 274/425] chore(sdk): add helper trait to node API to simplify type definition (#10616) --- Cargo.lock | 5 +- crates/consensus/debug-client/Cargo.toml | 2 +- crates/e2e-test-utils/src/rpc.rs | 7 ++- crates/ethereum/node/tests/e2e/dev.rs | 2 +- crates/node/api/Cargo.toml | 1 - crates/node/api/src/lib.rs | 2 - crates/node/builder/src/launch/mod.rs | 17 ++++--- crates/node/builder/src/node.rs | 2 +- crates/node/builder/src/rpc.rs | 6 ++- crates/node/core/Cargo.toml | 4 +- crates/node/core/src/lib.rs | 9 ---- crates/optimism/rpc/src/eth/mod.rs | 40 ++++++++-------- crates/rpc/rpc-eth-api/Cargo.toml | 1 + crates/rpc/rpc-eth-api/src/lib.rs | 2 + crates/rpc/rpc-eth-api/src/node.rs | 58 ++++++++++++++++++++++++ crates/rpc/rpc/Cargo.toml | 1 - crates/rpc/rpc/src/eth/filter.rs | 5 +- crates/rpc/rpc/src/eth/mod.rs | 2 +- 18 files changed, 105 insertions(+), 61 deletions(-) create mode 100644 crates/rpc/rpc-eth-api/src/node.rs diff --git a/Cargo.lock b/Cargo.lock index bf73d7eef..f4dc7dae4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7866,7 +7866,6 @@ dependencies = [ "reth-payload-primitives", "reth-primitives", "reth-provider", - "reth-rpc-eth-api", "reth-tasks", "reth-transaction-pool", ] @@ -7966,8 +7965,6 @@ dependencies = [ "reth-network-peers", "reth-primitives", "reth-prune-types", - "reth-rpc-api", - "reth-rpc-eth-api", "reth-rpc-eth-types", "reth-rpc-server-types", "reth-rpc-types-compat", @@ -8629,7 +8626,6 @@ dependencies = [ "reth-network-api", "reth-network-peers", "reth-network-types", - "reth-node-api", "reth-primitives", "reth-provider", "reth-revm", @@ -8812,6 +8808,7 @@ dependencies = [ "reth-evm", "reth-execution-types", "reth-network-api", + "reth-node-api", "reth-primitives", "reth-provider", "reth-revm", diff --git a/crates/consensus/debug-client/Cargo.toml b/crates/consensus/debug-client/Cargo.toml index c37beef10..e73125a80 100644 --- a/crates/consensus/debug-client/Cargo.toml +++ b/crates/consensus/debug-client/Cargo.toml @@ -13,7 +13,7 @@ workspace = true [dependencies] # reth reth-node-api.workspace = true -reth-rpc-api.workspace = true +reth-rpc-api = { workspace = true, features = ["client"] } reth-rpc-builder.workspace = true reth-tracing.workspace = true diff --git a/crates/e2e-test-utils/src/rpc.rs b/crates/e2e-test-utils/src/rpc.rs index b8cbe4d77..7b7dabdf2 100644 --- a/crates/e2e-test-utils/src/rpc.rs +++ b/crates/e2e-test-utils/src/rpc.rs @@ -4,12 +4,15 @@ use alloy_primitives::{Bytes, B256}; use reth::{ builder::{rpc::RpcRegistry, FullNodeComponents}, rpc::api::{ - eth::helpers::{EthApiSpec, EthTransactions, TraceExt}, + eth::{ + helpers::{EthApiSpec, EthTransactions, TraceExt}, + EthApiTypes, + }, DebugApiServer, }, }; use reth_chainspec::EthereumHardforks; -use reth_node_builder::{EthApiTypes, NodeTypes}; +use reth_node_builder::NodeTypes; #[allow(missing_debug_implementations)] pub struct RpcTestContext { diff --git a/crates/ethereum/node/tests/e2e/dev.rs b/crates/ethereum/node/tests/e2e/dev.rs index ead438b5a..f0fcaf645 100644 --- a/crates/ethereum/node/tests/e2e/dev.rs +++ b/crates/ethereum/node/tests/e2e/dev.rs @@ -4,7 +4,7 @@ use crate::utils::eth_payload_attributes; use alloy_genesis::Genesis; use alloy_primitives::{b256, hex}; use futures::StreamExt; -use reth::{args::DevArgs, core::rpc::eth::helpers::EthTransactions}; +use reth::{args::DevArgs, rpc::api::eth::helpers::EthTransactions}; use reth_chainspec::ChainSpec; use reth_e2e_test_utils::setup; use reth_node_api::FullNodeComponents; diff --git a/crates/node/api/Cargo.toml b/crates/node/api/Cargo.toml index c2c3eb463..6b263d6c5 100644 --- a/crates/node/api/Cargo.toml +++ b/crates/node/api/Cargo.toml @@ -21,7 +21,6 @@ reth-transaction-pool.workspace = true reth-payload-builder.workspace = true reth-payload-primitives.workspace = true reth-tasks.workspace = true -reth-rpc-eth-api.workspace = true reth-network-api.workspace = true reth-node-types.workspace = true reth-primitives.workspace = true diff --git a/crates/node/api/src/lib.rs b/crates/node/api/src/lib.rs index 7692ed6f2..099cf82b5 100644 --- a/crates/node/api/src/lib.rs +++ b/crates/node/api/src/lib.rs @@ -25,5 +25,3 @@ pub use node::*; // re-export for convenience pub use reth_node_types::*; pub use reth_provider::FullProvider; - -pub use reth_rpc_eth_api::EthApiTypes; diff --git a/crates/node/builder/src/launch/mod.rs b/crates/node/builder/src/launch/mod.rs index 36aa55541..50438e79d 100644 --- a/crates/node/builder/src/launch/mod.rs +++ b/crates/node/builder/src/launch/mod.rs @@ -23,15 +23,14 @@ use reth_consensus_debug_client::{DebugConsensusClient, EtherscanBlockProvider, use reth_engine_util::EngineMessageStreamExt; use reth_exex::ExExManagerHandle; use reth_network::{BlockDownloaderProvider, NetworkEventListenerProvider}; -use reth_node_api::{ - AddOnsContext, FullNodeComponents, FullNodeTypes, NodeTypesWithDB, NodeTypesWithEngine, -}; +use reth_node_api::{AddOnsContext, FullNodeTypes, NodeTypesWithDB, NodeTypesWithEngine}; use reth_node_core::{ dirs::{ChainPath, DataDirPath}, exit::NodeExitFuture, }; use reth_node_events::{cl::ConsensusLayerHealthEvents, node}; use reth_provider::providers::BlockchainProvider; +use reth_rpc::eth::RpcNodeCore; use reth_tasks::TaskExecutor; use reth_tracing::tracing::{debug, info}; use reth_transaction_pool::TransactionPool; @@ -47,14 +46,14 @@ use crate::{ AddOns, NodeBuilderWithComponents, NodeHandle, }; -/// Alias for [`reth_rpc_eth_types::EthApiBuilderCtx`], adapter for [`FullNodeComponents`]. +/// Alias for [`reth_rpc_eth_types::EthApiBuilderCtx`], adapter for [`RpcNodeCore`]. pub type EthApiBuilderCtx = reth_rpc_eth_types::EthApiBuilderCtx< - ::Provider, - ::Pool, - ::Evm, - ::Network, + ::Provider, + ::Pool, + ::Evm, + ::Network, TaskExecutor, - ::Provider, + ::Provider, >; /// A general purpose trait that launches a new node of any kind. diff --git a/crates/node/builder/src/node.rs b/crates/node/builder/src/node.rs index 3e3d5b696..3b2f467d6 100644 --- a/crates/node/builder/src/node.rs +++ b/crates/node/builder/src/node.rs @@ -11,10 +11,10 @@ use reth_node_api::{EngineTypes, FullNodeComponents}; use reth_node_core::{ dirs::{ChainPath, DataDirPath}, node_config::NodeConfig, - rpc::api::EngineApiClient, }; use reth_payload_builder::PayloadBuilderHandle; use reth_provider::ChainSpecProvider; +use reth_rpc_api::EngineApiClient; use reth_rpc_builder::{auth::AuthServerHandle, RpcServerHandle}; use reth_tasks::TaskExecutor; diff --git a/crates/node/builder/src/rpc.rs b/crates/node/builder/src/rpc.rs index 18293118d..4c1ea32d0 100644 --- a/crates/node/builder/src/rpc.rs +++ b/crates/node/builder/src/rpc.rs @@ -14,12 +14,14 @@ use reth_node_api::{ }; use reth_node_core::{ node_config::NodeConfig, - rpc::eth::{EthApiTypes, FullEthApiServer}, version::{CARGO_PKG_VERSION, CLIENT_CODE, NAME_CLIENT, VERGEN_GIT_SHA}, }; use reth_payload_builder::PayloadBuilderHandle; use reth_provider::providers::ProviderNodeTypes; -use reth_rpc::EthApi; +use reth_rpc::{ + eth::{EthApiTypes, FullEthApiServer}, + EthApi, +}; use reth_rpc_api::eth::helpers::AddDevSigners; use reth_rpc_builder::{ auth::{AuthRpcModule, AuthServerHandle}, diff --git a/crates/node/core/Cargo.toml b/crates/node/core/Cargo.toml index 0c9672d17..1c6c9d98c 100644 --- a/crates/node/core/Cargo.toml +++ b/crates/node/core/Cargo.toml @@ -23,8 +23,6 @@ reth-network-p2p.workspace = true reth-rpc-eth-types.workspace = true reth-rpc-server-types.workspace = true reth-rpc-types-compat.workspace = true -reth-rpc-api = { workspace = true, features = ["client"] } -reth-rpc-eth-api = { workspace = true, features = ["client"] } reth-transaction-pool.workspace = true reth-tracing.workspace = true reth-config.workspace = true @@ -38,7 +36,7 @@ reth-stages-types.workspace = true # ethereum alloy-primitives.workspace = true -alloy-rpc-types-engine = { workspace = true, features = ["jwt"] } +alloy-rpc-types-engine = { workspace = true, features = ["std", "jwt"] } alloy-consensus.workspace = true alloy-eips.workspace = true diff --git a/crates/node/core/src/lib.rs b/crates/node/core/src/lib.rs index 6af822e22..a69a255a3 100644 --- a/crates/node/core/src/lib.rs +++ b/crates/node/core/src/lib.rs @@ -22,15 +22,6 @@ pub mod primitives { /// Re-export of `reth_rpc_*` crates. pub mod rpc { - /// Re-exported from `reth_rpc_api`. - pub mod api { - pub use reth_rpc_api::*; - } - /// Re-exported from `reth_rpc::eth`. - pub mod eth { - pub use reth_rpc_eth_api::*; - } - /// Re-exported from `reth_rpc::rpc`. pub mod result { pub use reth_rpc_server_types::result::*; diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index 04774a465..a1a9f6e8f 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -17,12 +17,12 @@ use op_alloy_network::Optimism; use reth_chainspec::EthereumHardforks; use reth_evm::ConfigureEvm; use reth_network_api::NetworkInfo; -use reth_node_api::{FullNodeComponents, FullNodeTypes, NodeTypes}; +use reth_node_api::{FullNodeComponents, NodeTypes}; use reth_node_builder::EthApiBuilderCtx; use reth_primitives::Header; use reth_provider::{ - BlockIdReader, BlockNumReader, BlockReaderIdExt, ChainSpecProvider, HeaderProvider, - StageCheckpointReader, StateProviderFactory, + BlockIdReader, BlockNumReader, BlockReaderIdExt, CanonStateSubscriptions, ChainSpecProvider, + HeaderProvider, StageCheckpointReader, StateProviderFactory, }; use reth_rpc::eth::{core::EthApiInner, DevSigner}; use reth_rpc_eth_api::{ @@ -30,7 +30,7 @@ use reth_rpc_eth_api::{ AddDevSigners, EthApiSpec, EthFees, EthSigner, EthState, LoadBlock, LoadFee, LoadState, SpawnBlocking, Trace, }, - EthApiTypes, + EthApiTypes, RpcNodeCore, }; use reth_rpc_eth_types::{EthStateCache, FeeHistoryCache, GasPriceOracle}; use reth_tasks::{ @@ -43,10 +43,10 @@ use crate::{OpEthApiError, SequencerClient}; /// Adapter for [`EthApiInner`], which holds all the data required to serve core `eth_` API. pub type EthApiNodeBackend = EthApiInner< - ::Provider, - ::Pool, - ::Network, - ::Evm, + ::Provider, + ::Pool, + ::Network, + ::Evm, >; /// OP-Reth `Eth` API implementation. @@ -59,8 +59,8 @@ pub type EthApiNodeBackend = EthApiInner< /// /// This type implements the [`FullEthApi`](reth_rpc_eth_api::helpers::FullEthApi) by implemented /// all the `Eth` helper traits and prerequisite traits. -#[derive(Deref)] -pub struct OpEthApi { +#[derive(Deref, Clone)] +pub struct OpEthApi { /// Gateway to node's core components. #[deref] inner: Arc>, @@ -69,7 +69,12 @@ pub struct OpEthApi { sequencer_client: Option, } -impl OpEthApi { +impl OpEthApi +where + N: RpcNodeCore< + Provider: BlockReaderIdExt + ChainSpecProvider + CanonStateSubscriptions + Clone + 'static, + >, +{ /// Creates a new instance for given context. pub fn new(ctx: &EthApiBuilderCtx, sequencer_http: Option) -> Self { let blocking_task_pool = @@ -98,7 +103,7 @@ impl OpEthApi { impl EthApiTypes for OpEthApi where Self: Send + Sync, - N: FullNodeComponents, + N: RpcNodeCore, { type Error = OpEthApiError; type NetworkTypes = Optimism; @@ -248,17 +253,8 @@ where } } -impl fmt::Debug for OpEthApi { +impl fmt::Debug for OpEthApi { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("OpEthApi").finish_non_exhaustive() } } - -impl Clone for OpEthApi -where - N: FullNodeComponents, -{ - fn clone(&self) -> Self { - Self { inner: self.inner.clone(), sequencer_client: self.sequencer_client.clone() } - } -} diff --git a/crates/rpc/rpc-eth-api/Cargo.toml b/crates/rpc/rpc-eth-api/Cargo.toml index 9d0f6cfd8..edfd57b20 100644 --- a/crates/rpc/rpc-eth-api/Cargo.toml +++ b/crates/rpc/rpc-eth-api/Cargo.toml @@ -30,6 +30,7 @@ reth-rpc-eth-types.workspace = true reth-rpc-server-types.workspace = true reth-network-api.workspace = true reth-trie.workspace = true +reth-node-api.workspace = true # ethereum alloy-eips.workspace = true diff --git a/crates/rpc/rpc-eth-api/src/lib.rs b/crates/rpc/rpc-eth-api/src/lib.rs index 849c8e2e4..bc46d526c 100644 --- a/crates/rpc/rpc-eth-api/src/lib.rs +++ b/crates/rpc/rpc-eth-api/src/lib.rs @@ -16,6 +16,7 @@ pub mod bundle; pub mod core; pub mod filter; pub mod helpers; +pub mod node; pub mod pubsub; pub mod types; @@ -25,6 +26,7 @@ pub use bundle::{EthBundleApiServer, EthCallBundleApiServer}; pub use core::{EthApiServer, FullEthApiServer}; pub use filter::EthFilterApiServer; pub use helpers::error::{AsEthApiError, FromEthApiError, FromEvmError, IntoEthApiError}; +pub use node::RpcNodeCore; pub use pubsub::EthPubSubApiServer; pub use types::{EthApiTypes, FullEthApiTypes, RpcBlock, RpcReceipt, RpcTransaction}; diff --git a/crates/rpc/rpc-eth-api/src/node.rs b/crates/rpc/rpc-eth-api/src/node.rs new file mode 100644 index 000000000..8488677e3 --- /dev/null +++ b/crates/rpc/rpc-eth-api/src/node.rs @@ -0,0 +1,58 @@ +//! Helper trait for interfacing with [`FullNodeComponents`]. + +use reth_node_api::FullNodeComponents; + +/// Helper trait to relax trait bounds on [`FullNodeComponents`]. +/// +/// Helpful when defining types that would otherwise have a generic `N: FullNodeComponents`. Using +/// `N: RpcNodeCore` instead, allows access to all the associated types on [`FullNodeComponents`] +/// that are used in RPC, but with more flexibility since they have no trait bounds (asides auto +/// traits). +pub trait RpcNodeCore: Clone { + /// The provider type used to interact with the node. + type Provider: Send + Sync + Clone + Unpin; + /// The transaction pool of the node. + type Pool: Send + Sync + Clone + Unpin; + /// The node's EVM configuration, defining settings for the Ethereum Virtual Machine. + type Evm: Send + Sync + Clone + Unpin; + /// Network API. + type Network: Send + Sync + Clone; + + /// Returns the transaction pool of the node. + fn pool(&self) -> &Self::Pool; + + /// Returns the node's evm config. + fn evm_config(&self) -> &Self::Evm; + + /// Returns the handle to the network + fn network(&self) -> &Self::Network; + + /// Returns the provider of the node. + fn provider(&self) -> &Self::Provider; +} + +impl RpcNodeCore for T +where + T: FullNodeComponents, +{ + type Provider = T::Provider; + type Pool = T::Pool; + type Network = ::Network; + type Evm = ::Evm; + + fn pool(&self) -> &Self::Pool { + FullNodeComponents::pool(self) + } + + fn evm_config(&self) -> &Self::Evm { + FullNodeComponents::evm_config(self) + } + + fn network(&self) -> &Self::Network { + FullNodeComponents::network(self) + } + + fn provider(&self) -> &Self::Provider { + FullNodeComponents::provider(self) + } +} diff --git a/crates/rpc/rpc/Cargo.toml b/crates/rpc/rpc/Cargo.toml index fe150e36e..dab86ac25 100644 --- a/crates/rpc/rpc/Cargo.toml +++ b/crates/rpc/rpc/Cargo.toml @@ -31,7 +31,6 @@ reth-network-peers = { workspace = true, features = ["secp256k1"] } reth-evm.workspace = true reth-rpc-eth-types.workspace = true reth-rpc-server-types.workspace = true -reth-node-api.workspace = true reth-network-types.workspace = true reth-trie.workspace = true diff --git a/crates/rpc/rpc/src/eth/filter.rs b/crates/rpc/rpc/src/eth/filter.rs index 24058da17..5ef224609 100644 --- a/crates/rpc/rpc/src/eth/filter.rs +++ b/crates/rpc/rpc/src/eth/filter.rs @@ -17,10 +17,11 @@ use alloy_rpc_types::{ use async_trait::async_trait; use jsonrpsee::{core::RpcResult, server::IdProvider}; use reth_chainspec::ChainInfo; -use reth_node_api::EthApiTypes; use reth_primitives::{Receipt, SealedBlockWithSenders, TransactionSignedEcRecovered}; use reth_provider::{BlockIdReader, BlockReader, EvmEnvProvider, ProviderError}; -use reth_rpc_eth_api::{EthFilterApiServer, FullEthApiTypes, RpcTransaction, TransactionCompat}; +use reth_rpc_eth_api::{ + EthApiTypes, EthFilterApiServer, FullEthApiTypes, RpcTransaction, TransactionCompat, +}; use reth_rpc_eth_types::{ logs_utils::{self, append_matching_block_logs, ProviderOrBlock}, EthApiError, EthFilterConfig, EthStateCache, EthSubscriptionIdProvider, diff --git a/crates/rpc/rpc/src/eth/mod.rs b/crates/rpc/rpc/src/eth/mod.rs index 99919110d..4d1833add 100644 --- a/crates/rpc/rpc/src/eth/mod.rs +++ b/crates/rpc/rpc/src/eth/mod.rs @@ -15,4 +15,4 @@ pub use pubsub::EthPubSub; pub use helpers::{signer::DevSigner, types::EthTxBuilder}; -pub use reth_rpc_eth_api::EthApiServer; +pub use reth_rpc_eth_api::{EthApiServer, EthApiTypes, FullEthApiServer, RpcNodeCore}; From a06c3af8320caaa95adeffbca38b9ed945019557 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Sat, 26 Oct 2024 21:03:22 +0800 Subject: [PATCH 275/425] chore(rpc): Remove provider and network trait methods from `EthApiSpec` (#12050) --- crates/optimism/rpc/src/eth/mod.rs | 41 ++++++++++++++++------ crates/rpc/rpc-builder/src/lib.rs | 5 +-- crates/rpc/rpc-eth-api/src/helpers/spec.rs | 20 +++++------ crates/rpc/rpc/src/eth/core.rs | 31 +++++++++++++++- crates/rpc/rpc/src/eth/helpers/spec.rs | 14 ++------ 5 files changed, 75 insertions(+), 36 deletions(-) diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index a1a9f6e8f..ccff47789 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -114,24 +114,43 @@ where } } -impl EthApiSpec for OpEthApi +impl RpcNodeCore for OpEthApi where - Self: Send + Sync, - N: FullNodeComponents>, + Self: Clone, + N: RpcNodeCore, { - #[inline] - fn provider( - &self, - ) -> impl ChainSpecProvider + BlockNumReader + StageCheckpointReader - { - self.inner.provider() + type Provider = N::Provider; + type Pool = N::Pool; + type Network = ::Network; + type Evm = ::Evm; + + fn pool(&self) -> &Self::Pool { + self.inner.pool() } - #[inline] - fn network(&self) -> impl NetworkInfo { + fn evm_config(&self) -> &Self::Evm { + self.inner.evm_config() + } + + fn network(&self) -> &Self::Network { self.inner.network() } + fn provider(&self) -> &Self::Provider { + self.inner.provider() + } +} + +impl EthApiSpec for OpEthApi +where + Self: Send + Sync, + N: RpcNodeCore< + Provider: ChainSpecProvider + + BlockNumReader + + StageCheckpointReader, + Network: NetworkInfo, + >, +{ #[inline] fn starting_block(&self) -> U256 { self.inner.starting_block() diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index ceafe2065..fc98cd6ff 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -36,7 +36,7 @@ //! block_executor: BlockExecutor, //! ) where //! Provider: FullRpcProvider + AccountReader + ChangeSetReader, -//! Pool: TransactionPool + 'static, +//! Pool: TransactionPool + Unpin + 'static, //! Network: NetworkInfo + Peers + Clone + 'static, //! Events: CanonStateSubscriptions + Clone + 'static, //! EvmConfig: ConfigureEvm

, @@ -85,6 +85,7 @@ //! use reth_tasks::TokioTaskExecutor; //! use reth_transaction_pool::TransactionPool; //! use tokio::try_join; +//! //! pub async fn launch< //! Provider, //! Pool, @@ -104,7 +105,7 @@ //! block_executor: BlockExecutor, //! ) where //! Provider: FullRpcProvider + AccountReader + ChangeSetReader, -//! Pool: TransactionPool + 'static, +//! Pool: TransactionPool + Unpin + 'static, //! Network: NetworkInfo + Peers + Clone + 'static, //! Events: CanonStateSubscriptions + Clone + 'static, //! EngineApi: EngineApiServer, diff --git a/crates/rpc/rpc-eth-api/src/helpers/spec.rs b/crates/rpc/rpc-eth-api/src/helpers/spec.rs index 5976cf29c..5aa0509e8 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/spec.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/spec.rs @@ -8,21 +8,21 @@ use reth_errors::{RethError, RethResult}; use reth_network_api::NetworkInfo; use reth_provider::{BlockNumReader, ChainSpecProvider, StageCheckpointReader}; -use super::EthSigner; +use crate::{helpers::EthSigner, RpcNodeCore}; /// `Eth` API trait. /// /// Defines core functionality of the `eth` API implementation. #[auto_impl::auto_impl(&, Arc)] -pub trait EthApiSpec: Send + Sync { - /// Returns a handle for reading data from disk. - fn provider( - &self, - ) -> impl ChainSpecProvider + BlockNumReader + StageCheckpointReader; - - /// Returns a handle for reading network data summary. - fn network(&self) -> impl NetworkInfo; - +pub trait EthApiSpec: + RpcNodeCore< + Provider: ChainSpecProvider + + BlockNumReader + + StageCheckpointReader, + Network: NetworkInfo, + > + Send + + Sync +{ /// Returns the block node is started on. fn starting_block(&self) -> U256; diff --git a/crates/rpc/rpc/src/eth/core.rs b/crates/rpc/rpc/src/eth/core.rs index 21787873e..3fca76e8b 100644 --- a/crates/rpc/rpc/src/eth/core.rs +++ b/crates/rpc/rpc/src/eth/core.rs @@ -10,7 +10,7 @@ use reth_primitives::BlockNumberOrTag; use reth_provider::{BlockReaderIdExt, CanonStateSubscriptions, ChainSpecProvider}; use reth_rpc_eth_api::{ helpers::{EthSigner, SpawnBlocking}, - EthApiTypes, + EthApiTypes, RpcNodeCore, }; use reth_rpc_eth_types::{ EthApiBuilderCtx, EthApiError, EthStateCache, FeeHistoryCache, GasCap, GasPriceOracle, @@ -140,6 +140,35 @@ where } } +impl RpcNodeCore for EthApi +where + Provider: Send + Sync + Clone + Unpin, + Pool: Send + Sync + Clone + Unpin, + Network: Send + Sync + Clone, + EvmConfig: Send + Sync + Clone + Unpin, +{ + type Provider = Provider; + type Pool = Pool; + type Network = Network; + type Evm = EvmConfig; + + fn pool(&self) -> &Self::Pool { + self.inner.pool() + } + + fn evm_config(&self) -> &Self::Evm { + self.inner.evm_config() + } + + fn network(&self) -> &Self::Network { + self.inner.network() + } + + fn provider(&self) -> &Self::Provider { + self.inner.provider() + } +} + impl std::fmt::Debug for EthApi { diff --git a/crates/rpc/rpc/src/eth/helpers/spec.rs b/crates/rpc/rpc/src/eth/helpers/spec.rs index 92445bf5e..c5c8d54c6 100644 --- a/crates/rpc/rpc/src/eth/helpers/spec.rs +++ b/crates/rpc/rpc/src/eth/helpers/spec.rs @@ -2,13 +2,14 @@ use alloy_primitives::U256; use reth_chainspec::EthereumHardforks; use reth_network_api::NetworkInfo; use reth_provider::{BlockNumReader, ChainSpecProvider, StageCheckpointReader}; -use reth_rpc_eth_api::helpers::EthApiSpec; +use reth_rpc_eth_api::{helpers::EthApiSpec, RpcNodeCore}; use reth_transaction_pool::TransactionPool; use crate::EthApi; impl EthApiSpec for EthApi where + Self: RpcNodeCore, Pool: TransactionPool + 'static, Provider: ChainSpecProvider + BlockNumReader @@ -17,17 +18,6 @@ where Network: NetworkInfo + 'static, EvmConfig: Send + Sync, { - fn provider( - &self, - ) -> impl ChainSpecProvider + BlockNumReader + StageCheckpointReader - { - self.inner.provider() - } - - fn network(&self) -> impl NetworkInfo { - self.inner.network() - } - fn starting_block(&self) -> U256 { self.inner.starting_block() } From 019f347385741d75638effee1fb0b640ae4a363b Mon Sep 17 00:00:00 2001 From: Yu Zeng Date: Sat, 26 Oct 2024 23:04:17 +0800 Subject: [PATCH 276/425] chore: move optimism execution types test to optimism crate (#12026) --- .../execution-types/src/execution_outcome.rs | 56 +-- crates/optimism/evm/src/lib.rs | 414 +++++++++++++++++- 2 files changed, 423 insertions(+), 47 deletions(-) diff --git a/crates/evm/execution-types/src/execution_outcome.rs b/crates/evm/execution-types/src/execution_outcome.rs index 0fde01547..026e6b37c 100644 --- a/crates/evm/execution-types/src/execution_outcome.rs +++ b/crates/evm/execution-types/src/execution_outcome.rs @@ -168,7 +168,7 @@ impl ExecutionOutcome { } /// Transform block number to the index of block. - fn block_number_to_index(&self, block_number: BlockNumber) -> Option { + pub fn block_number_to_index(&self, block_number: BlockNumber) -> Option { if self.first_block > block_number { return None } @@ -366,12 +366,15 @@ impl From<(BlockExecutionOutput, BlockNumber)> for ExecutionOutcome { #[cfg(test)] mod tests { use super::*; - use alloy_eips::eip7685::Requests; - use alloy_primitives::{bytes, Address, LogData, B256}; - use reth_primitives::{Receipts, TxType}; - use std::collections::HashMap; + #[cfg(not(feature = "optimism"))] + use alloy_primitives::bytes; + use alloy_primitives::{Address, B256}; + use reth_primitives::Receipts; + #[cfg(not(feature = "optimism"))] + use reth_primitives::{LogData, TxType}; #[test] + #[cfg(not(feature = "optimism"))] fn test_initialisation() { // Create a new BundleState object with initial data let bundle = BundleState::new( @@ -387,10 +390,6 @@ mod tests { cumulative_gas_used: 46913, logs: vec![], success: true, - #[cfg(feature = "optimism")] - deposit_nonce: Some(18), - #[cfg(feature = "optimism")] - deposit_receipt_version: Some(34), })]], }; @@ -444,6 +443,7 @@ mod tests { } #[test] + #[cfg(not(feature = "optimism"))] fn test_block_number_to_index() { // Create a Receipts object with a vector of receipt vectors let receipts = Receipts { @@ -452,10 +452,6 @@ mod tests { cumulative_gas_used: 46913, logs: vec![], success: true, - #[cfg(feature = "optimism")] - deposit_nonce: Some(18), - #[cfg(feature = "optimism")] - deposit_receipt_version: Some(34), })]], }; @@ -482,6 +478,7 @@ mod tests { } #[test] + #[cfg(not(feature = "optimism"))] fn test_get_logs() { // Create a Receipts object with a vector of receipt vectors let receipts = Receipts { @@ -490,10 +487,6 @@ mod tests { cumulative_gas_used: 46913, logs: vec![Log::::default()], success: true, - #[cfg(feature = "optimism")] - deposit_nonce: Some(18), - #[cfg(feature = "optimism")] - deposit_receipt_version: Some(34), })]], }; @@ -517,6 +510,7 @@ mod tests { } #[test] + #[cfg(not(feature = "optimism"))] fn test_receipts_by_block() { // Create a Receipts object with a vector of receipt vectors let receipts = Receipts { @@ -525,10 +519,6 @@ mod tests { cumulative_gas_used: 46913, logs: vec![Log::::default()], success: true, - #[cfg(feature = "optimism")] - deposit_nonce: Some(18), - #[cfg(feature = "optimism")] - deposit_receipt_version: Some(34), })]], }; @@ -555,15 +545,12 @@ mod tests { cumulative_gas_used: 46913, logs: vec![Log::::default()], success: true, - #[cfg(feature = "optimism")] - deposit_nonce: Some(18), - #[cfg(feature = "optimism")] - deposit_receipt_version: Some(34), })] ); } #[test] + #[cfg(not(feature = "optimism"))] fn test_receipts_len() { // Create a Receipts object with a vector of receipt vectors let receipts = Receipts { @@ -572,10 +559,6 @@ mod tests { cumulative_gas_used: 46913, logs: vec![Log::::default()], success: true, - #[cfg(feature = "optimism")] - deposit_nonce: Some(18), - #[cfg(feature = "optimism")] - deposit_receipt_version: Some(34), })]], }; @@ -616,6 +599,7 @@ mod tests { } #[test] + #[cfg(not(feature = "optimism"))] fn test_revert_to() { // Create a random receipt object let receipt = Receipt { @@ -623,10 +607,6 @@ mod tests { cumulative_gas_used: 46913, logs: vec![], success: true, - #[cfg(feature = "optimism")] - deposit_nonce: Some(18), - #[cfg(feature = "optimism")] - deposit_receipt_version: Some(34), }; // Create a Receipts object with a vector of receipt vectors @@ -668,6 +648,7 @@ mod tests { } #[test] + #[cfg(not(feature = "optimism"))] fn test_extend_execution_outcome() { // Create a Receipt object with specific attributes. let receipt = Receipt { @@ -675,10 +656,6 @@ mod tests { cumulative_gas_used: 46913, logs: vec![], success: true, - #[cfg(feature = "optimism")] - deposit_nonce: Some(18), - #[cfg(feature = "optimism")] - deposit_receipt_version: Some(34), }; // Create a Receipts object containing the receipt. @@ -715,6 +692,7 @@ mod tests { } #[test] + #[cfg(not(feature = "optimism"))] fn test_split_at_execution_outcome() { // Create a random receipt object let receipt = Receipt { @@ -722,10 +700,6 @@ mod tests { cumulative_gas_used: 46913, logs: vec![], success: true, - #[cfg(feature = "optimism")] - deposit_nonce: Some(18), - #[cfg(feature = "optimism")] - deposit_receipt_version: Some(34), }; // Create a Receipts object with a vector of receipt vectors diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index 60aa9f7db..bc46f3ea9 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -206,23 +206,30 @@ impl ConfigureEvm for OptimismEvmConfig { mod tests { use super::*; use alloy_consensus::constants::KECCAK_EMPTY; + use alloy_eips::eip7685::Requests; use alloy_genesis::Genesis; - use alloy_primitives::{B256, U256}; + use alloy_primitives::{bytes, Address, LogData, B256, U256}; use reth_chainspec::ChainSpec; use reth_evm::execute::ProviderError; - use reth_execution_types::{Chain, ExecutionOutcome}; + use reth_execution_types::{ + AccountRevertInit, BundleStateInit, Chain, ExecutionOutcome, RevertsInit, + }; use reth_optimism_chainspec::BASE_MAINNET; use reth_primitives::{ - revm_primitives::{BlockEnv, CfgEnv, SpecId}, - Header, Receipt, Receipts, SealedBlockWithSenders, TxType, + revm_primitives::{AccountInfo, BlockEnv, CfgEnv, SpecId}, + Account, Header, Log, Receipt, Receipts, SealedBlockWithSenders, TxType, }; + use reth_revm::{ - db::{CacheDB, EmptyDBTyped}, + db::{BundleState, CacheDB, EmptyDBTyped}, inspectors::NoOpInspector, JournaledState, }; use revm_primitives::{CfgEnvWithHandlerCfg, EnvWithHandlerCfg, HandlerCfg}; - use std::{collections::HashSet, sync::Arc}; + use std::{ + collections::{HashMap, HashSet}, + sync::Arc, + }; fn test_evm_config() -> OptimismEvmConfig { OptimismEvmConfig::new(BASE_MAINNET.clone()) @@ -620,4 +627,399 @@ mod tests { // Assert that the execution outcome at the tip block contains the whole execution outcome assert_eq!(chain.execution_outcome_at_block(11), Some(execution_outcome)); } + + #[test] + fn test_initialisation() { + // Create a new BundleState object with initial data + let bundle = BundleState::new( + vec![(Address::new([2; 20]), None, Some(AccountInfo::default()), HashMap::default())], + vec![vec![(Address::new([2; 20]), None, vec![])]], + vec![], + ); + + // Create a Receipts object with a vector of receipt vectors + let receipts = Receipts { + receipt_vec: vec![vec![Some(Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 46913, + logs: vec![], + success: true, + deposit_nonce: Some(18), + deposit_receipt_version: Some(34), + })]], + }; + + // Create a Requests object with a vector of requests + let requests = vec![Requests::new(vec![bytes!("dead"), bytes!("beef"), bytes!("beebee")])]; + + // Define the first block number + let first_block = 123; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let exec_res = ExecutionOutcome { + bundle: bundle.clone(), + receipts: receipts.clone(), + requests: requests.clone(), + first_block, + }; + + // Assert that creating a new ExecutionOutcome using the constructor matches exec_res + assert_eq!( + ExecutionOutcome::new(bundle, receipts.clone(), first_block, requests.clone()), + exec_res + ); + + // Create a BundleStateInit object and insert initial data + let mut state_init: BundleStateInit = HashMap::default(); + state_init + .insert(Address::new([2; 20]), (None, Some(Account::default()), HashMap::default())); + + // Create a HashMap for account reverts and insert initial data + let mut revert_inner: HashMap = HashMap::default(); + revert_inner.insert(Address::new([2; 20]), (None, vec![])); + + // Create a RevertsInit object and insert the revert_inner data + let mut revert_init: RevertsInit = HashMap::default(); + revert_init.insert(123, revert_inner); + + // Assert that creating a new ExecutionOutcome using the new_init method matches + // exec_res + assert_eq!( + ExecutionOutcome::new_init( + state_init, + revert_init, + vec![], + receipts, + first_block, + requests, + ), + exec_res + ); + } + + #[test] + fn test_block_number_to_index() { + // Create a Receipts object with a vector of receipt vectors + let receipts = Receipts { + receipt_vec: vec![vec![Some(Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 46913, + logs: vec![], + success: true, + deposit_nonce: Some(18), + deposit_receipt_version: Some(34), + })]], + }; + + // Define the first block number + let first_block = 123; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let exec_res = ExecutionOutcome { + bundle: Default::default(), + receipts, + requests: vec![], + first_block, + }; + + // Test before the first block + assert_eq!(exec_res.block_number_to_index(12), None); + + // Test after after the first block but index larger than receipts length + assert_eq!(exec_res.block_number_to_index(133), None); + + // Test after the first block + assert_eq!(exec_res.block_number_to_index(123), Some(0)); + } + + #[test] + fn test_get_logs() { + // Create a Receipts object with a vector of receipt vectors + let receipts = Receipts { + receipt_vec: vec![vec![Some(Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 46913, + logs: vec![Log::::default()], + success: true, + deposit_nonce: Some(18), + deposit_receipt_version: Some(34), + })]], + }; + + // Define the first block number + let first_block = 123; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let exec_res = ExecutionOutcome { + bundle: Default::default(), + receipts, + requests: vec![], + first_block, + }; + + // Get logs for block number 123 + let logs: Vec<&Log> = exec_res.logs(123).unwrap().collect(); + + // Assert that the logs match the expected logs + assert_eq!(logs, vec![&Log::::default()]); + } + + #[test] + fn test_receipts_by_block() { + // Create a Receipts object with a vector of receipt vectors + let receipts = Receipts { + receipt_vec: vec![vec![Some(Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 46913, + logs: vec![Log::::default()], + success: true, + deposit_nonce: Some(18), + deposit_receipt_version: Some(34), + })]], + }; + + // Define the first block number + let first_block = 123; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let exec_res = ExecutionOutcome { + bundle: Default::default(), // Default value for bundle + receipts, // Include the created receipts + requests: vec![], // Empty vector for requests + first_block, // Set the first block number + }; + + // Get receipts for block number 123 and convert the result into a vector + let receipts_by_block: Vec<_> = exec_res.receipts_by_block(123).iter().collect(); + + // Assert that the receipts for block number 123 match the expected receipts + assert_eq!( + receipts_by_block, + vec![&Some(Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 46913, + logs: vec![Log::::default()], + success: true, + deposit_nonce: Some(18), + deposit_receipt_version: Some(34), + })] + ); + } + + #[test] + fn test_receipts_len() { + // Create a Receipts object with a vector of receipt vectors + let receipts = Receipts { + receipt_vec: vec![vec![Some(Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 46913, + logs: vec![Log::::default()], + success: true, + deposit_nonce: Some(18), + deposit_receipt_version: Some(34), + })]], + }; + + // Create an empty Receipts object + let receipts_empty = Receipts { receipt_vec: vec![] }; + + // Define the first block number + let first_block = 123; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let exec_res = ExecutionOutcome { + bundle: Default::default(), // Default value for bundle + receipts, // Include the created receipts + requests: vec![], // Empty vector for requests + first_block, // Set the first block number + }; + + // Assert that the length of receipts in exec_res is 1 + assert_eq!(exec_res.len(), 1); + + // Assert that exec_res is not empty + assert!(!exec_res.is_empty()); + + // Create a ExecutionOutcome object with an empty Receipts object + let exec_res_empty_receipts = ExecutionOutcome { + bundle: Default::default(), // Default value for bundle + receipts: receipts_empty, // Include the empty receipts + requests: vec![], // Empty vector for requests + first_block, // Set the first block number + }; + + // Assert that the length of receipts in exec_res_empty_receipts is 0 + assert_eq!(exec_res_empty_receipts.len(), 0); + + // Assert that exec_res_empty_receipts is empty + assert!(exec_res_empty_receipts.is_empty()); + } + + #[test] + fn test_revert_to() { + // Create a random receipt object + let receipt = Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 46913, + logs: vec![], + success: true, + deposit_nonce: Some(18), + deposit_receipt_version: Some(34), + }; + + // Create a Receipts object with a vector of receipt vectors + let receipts = Receipts { + receipt_vec: vec![vec![Some(receipt.clone())], vec![Some(receipt.clone())]], + }; + + // Define the first block number + let first_block = 123; + + // Create a request. + let request = bytes!("deadbeef"); + + // Create a vector of Requests containing the request. + let requests = + vec![Requests::new(vec![request.clone()]), Requests::new(vec![request.clone()])]; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let mut exec_res = + ExecutionOutcome { bundle: Default::default(), receipts, requests, first_block }; + + // Assert that the revert_to method returns true when reverting to the initial block number. + assert!(exec_res.revert_to(123)); + + // Assert that the receipts are properly cut after reverting to the initial block number. + assert_eq!(exec_res.receipts, Receipts { receipt_vec: vec![vec![Some(receipt)]] }); + + // Assert that the requests are properly cut after reverting to the initial block number. + assert_eq!(exec_res.requests, vec![Requests::new(vec![request])]); + + // Assert that the revert_to method returns false when attempting to revert to a block + // number greater than the initial block number. + assert!(!exec_res.revert_to(133)); + + // Assert that the revert_to method returns false when attempting to revert to a block + // number less than the initial block number. + assert!(!exec_res.revert_to(10)); + } + + #[test] + fn test_extend_execution_outcome() { + // Create a Receipt object with specific attributes. + let receipt = Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 46913, + logs: vec![], + success: true, + deposit_nonce: Some(18), + deposit_receipt_version: Some(34), + }; + + // Create a Receipts object containing the receipt. + let receipts = Receipts { receipt_vec: vec![vec![Some(receipt.clone())]] }; + + // Create a request. + let request = bytes!("deadbeef"); + + // Create a vector of Requests containing the request. + let requests = vec![Requests::new(vec![request.clone()])]; + + // Define the initial block number. + let first_block = 123; + + // Create an ExecutionOutcome object. + let mut exec_res = + ExecutionOutcome { bundle: Default::default(), receipts, requests, first_block }; + + // Extend the ExecutionOutcome object by itself. + exec_res.extend(exec_res.clone()); + + // Assert the extended ExecutionOutcome matches the expected outcome. + assert_eq!( + exec_res, + ExecutionOutcome { + bundle: Default::default(), + receipts: Receipts { + receipt_vec: vec![vec![Some(receipt.clone())], vec![Some(receipt)]] + }, + requests: vec![Requests::new(vec![request.clone()]), Requests::new(vec![request])], + first_block: 123, + } + ); + } + + #[test] + fn test_split_at_execution_outcome() { + // Create a random receipt object + let receipt = Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 46913, + logs: vec![], + success: true, + deposit_nonce: Some(18), + deposit_receipt_version: Some(34), + }; + + // Create a Receipts object with a vector of receipt vectors + let receipts = Receipts { + receipt_vec: vec![ + vec![Some(receipt.clone())], + vec![Some(receipt.clone())], + vec![Some(receipt.clone())], + ], + }; + + // Define the first block number + let first_block = 123; + + // Create a request. + let request = bytes!("deadbeef"); + + // Create a vector of Requests containing the request. + let requests = vec![ + Requests::new(vec![request.clone()]), + Requests::new(vec![request.clone()]), + Requests::new(vec![request.clone()]), + ]; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let exec_res = + ExecutionOutcome { bundle: Default::default(), receipts, requests, first_block }; + + // Split the ExecutionOutcome at block number 124 + let result = exec_res.clone().split_at(124); + + // Define the expected lower ExecutionOutcome after splitting + let lower_execution_outcome = ExecutionOutcome { + bundle: Default::default(), + receipts: Receipts { receipt_vec: vec![vec![Some(receipt.clone())]] }, + requests: vec![Requests::new(vec![request.clone()])], + first_block, + }; + + // Define the expected higher ExecutionOutcome after splitting + let higher_execution_outcome = ExecutionOutcome { + bundle: Default::default(), + receipts: Receipts { + receipt_vec: vec![vec![Some(receipt.clone())], vec![Some(receipt)]], + }, + requests: vec![Requests::new(vec![request.clone()]), Requests::new(vec![request])], + first_block: 124, + }; + + // Assert that the split result matches the expected lower and higher outcomes + assert_eq!(result.0, Some(lower_execution_outcome)); + assert_eq!(result.1, higher_execution_outcome); + + // Assert that splitting at the first block number returns None for the lower outcome + assert_eq!(exec_res.clone().split_at(123), (None, exec_res)); + } } From d5f5c0f11226879b3c7e46cf7226506871d689da Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Sat, 26 Oct 2024 23:40:46 +0800 Subject: [PATCH 277/425] chore(rpc): set `RpcNodeCore` as supertrait for `LoadState` (#12094) --- crates/optimism/rpc/src/eth/mod.rs | 19 +++-------- crates/rpc/rpc-eth-api/src/helpers/call.rs | 8 ++--- crates/rpc/rpc-eth-api/src/helpers/spec.rs | 11 +++--- crates/rpc/rpc-eth-api/src/helpers/state.rs | 34 ++++++++----------- crates/rpc/rpc-eth-api/src/helpers/trace.rs | 8 ++--- .../rpc-eth-api/src/helpers/transaction.rs | 6 ++-- crates/rpc/rpc-eth-api/src/node.rs | 2 +- crates/rpc/rpc/src/debug.rs | 6 ++-- crates/rpc/rpc/src/eth/helpers/state.rs | 24 +++++-------- 9 files changed, 48 insertions(+), 70 deletions(-) diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index ccff47789..bc1692dff 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -143,7 +143,6 @@ where impl EthApiSpec for OpEthApi where - Self: Send + Sync, N: RpcNodeCore< Provider: ChainSpecProvider + BlockNumReader @@ -213,25 +212,15 @@ where impl LoadState for OpEthApi where - Self: Send + Sync + Clone, - N: FullNodeComponents>, + N: RpcNodeCore< + Provider: StateProviderFactory + ChainSpecProvider, + Pool: TransactionPool, + >, { - #[inline] - fn provider( - &self, - ) -> impl StateProviderFactory + ChainSpecProvider { - self.inner.provider() - } - #[inline] fn cache(&self) -> &EthStateCache { self.inner.cache() } - - #[inline] - fn pool(&self) -> impl TransactionPool { - self.inner.pool() - } } impl EthState for OpEthApi diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index 1510233c5..89ae1c8ac 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -486,7 +486,7 @@ pub trait Call: LoadState + SpawnBlocking { DB: Database, EthApiError: From, { - let mut evm = self.evm_config().evm_with_env(db, env); + let mut evm = Call::evm_config(self).evm_with_env(db, env); let res = evm.transact().map_err(Self::Error::from_evm_err)?; let (_, env) = evm.into_db_and_env_with_handler_cfg(); Ok((res, env)) @@ -504,7 +504,7 @@ pub trait Call: LoadState + SpawnBlocking { DB: Database, EthApiError: From, { - let mut evm = self.evm_config().evm_with_env_and_inspector(db, env, inspector); + let mut evm = Call::evm_config(self).evm_with_env_and_inspector(db, env, inspector); let res = evm.transact().map_err(Self::Error::from_evm_err)?; let (_, env) = evm.into_db_and_env_with_handler_cfg(); Ok((res, env)) @@ -669,7 +669,7 @@ pub trait Call: LoadState + SpawnBlocking { { let env = EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, Default::default()); - let mut evm = self.evm_config().evm_with_env(db, env); + let mut evm = Call::evm_config(self).evm_with_env(db, env); let mut index = 0; for (sender, tx) in transactions { if tx.hash() == target_tx_hash { @@ -677,7 +677,7 @@ pub trait Call: LoadState + SpawnBlocking { break } - self.evm_config().fill_tx_env(evm.tx_mut(), tx, *sender); + Call::evm_config(self).fill_tx_env(evm.tx_mut(), tx, *sender); evm.transact_commit().map_err(Self::Error::from_evm_err)?; index += 1; } diff --git a/crates/rpc/rpc-eth-api/src/helpers/spec.rs b/crates/rpc/rpc-eth-api/src/helpers/spec.rs index 5aa0509e8..a6213017a 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/spec.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/spec.rs @@ -16,12 +16,11 @@ use crate::{helpers::EthSigner, RpcNodeCore}; #[auto_impl::auto_impl(&, Arc)] pub trait EthApiSpec: RpcNodeCore< - Provider: ChainSpecProvider - + BlockNumReader - + StageCheckpointReader, - Network: NetworkInfo, - > + Send - + Sync + Provider: ChainSpecProvider + + BlockNumReader + + StageCheckpointReader, + Network: NetworkInfo, +> { /// Returns the block node is started on. fn starting_block(&self) -> U256; diff --git a/crates/rpc/rpc-eth-api/src/helpers/state.rs b/crates/rpc/rpc-eth-api/src/helpers/state.rs index 080d90dc3..2a15b194f 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/state.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/state.rs @@ -18,7 +18,7 @@ use reth_rpc_types_compat::proof::from_primitive_account_proof; use reth_transaction_pool::TransactionPool; use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, SpecId}; -use crate::{EthApiTypes, FromEthApiError}; +use crate::{EthApiTypes, FromEthApiError, RpcNodeCore}; use super::{EthApiSpec, LoadPendingBlock, SpawnBlocking}; @@ -105,7 +105,8 @@ pub trait EthState: LoadState + SpawnBlocking { let block_id = block_id.unwrap_or_default(); // Check whether the distance to the block exceeds the maximum configured window. - let block_number = LoadState::provider(self) + let block_number = self + .provider() .block_number_for_id(block_id) .map_err(Self::Error::from_eth_err)? .ok_or(EthApiError::HeaderNotFound(block_id))?; @@ -138,9 +139,9 @@ pub trait EthState: LoadState + SpawnBlocking { let Some(account) = account else { return Ok(None) }; // Check whether the distance to the block exceeds the maximum configured proof window. - let chain_info = - LoadState::provider(&this).chain_info().map_err(Self::Error::from_eth_err)?; - let block_number = LoadState::provider(&this) + let chain_info = this.provider().chain_info().map_err(Self::Error::from_eth_err)?; + let block_number = this + .provider() .block_number_for_id(block_id) .map_err(Self::Error::from_eth_err)? .ok_or(EthApiError::HeaderNotFound(block_id))?; @@ -167,24 +168,19 @@ pub trait EthState: LoadState + SpawnBlocking { /// Loads state from database. /// /// Behaviour shared by several `eth_` RPC methods, not exclusive to `eth_` state RPC methods. -pub trait LoadState: EthApiTypes { - /// Returns a handle for reading state from database. - /// - /// Data access in default trait method implementations. - fn provider( - &self, - ) -> impl StateProviderFactory + ChainSpecProvider; - +pub trait LoadState: + EthApiTypes + + RpcNodeCore< + Provider: StateProviderFactory + + ChainSpecProvider, + Pool: TransactionPool, + > +{ /// Returns a handle for reading data from memory. /// /// Data access in default (L1) trait method implementations. fn cache(&self) -> &EthStateCache; - /// Returns a handle for reading data from transaction pool. - /// - /// Data access in default trait method implementations. - fn pool(&self) -> impl TransactionPool; - /// Returns the state at the given block number fn state_at_hash(&self, block_hash: B256) -> Result { self.provider().history_by_block_hash(block_hash).map_err(Self::Error::from_eth_err) @@ -266,7 +262,7 @@ pub trait LoadState: EthApiTypes { let (cfg, mut block_env, _) = self.evm_env_at(header.parent_hash.into()).await?; let after_merge = cfg.handler_cfg.spec_id >= SpecId::MERGE; - self.evm_config().fill_block_env(&mut block_env, header, after_merge); + LoadPendingBlock::evm_config(self).fill_block_env(&mut block_env, header, after_merge); Ok((cfg, block_env)) } diff --git a/crates/rpc/rpc-eth-api/src/helpers/trace.rs b/crates/rpc/rpc-eth-api/src/helpers/trace.rs index 64056148c..4f1173484 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/trace.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/trace.rs @@ -2,7 +2,7 @@ use std::sync::Arc; -use crate::FromEvmError; +use crate::{FromEvmError, RpcNodeCore}; use alloy_primitives::B256; use alloy_rpc_types::{BlockId, TransactionInfo}; use futures::Future; @@ -60,7 +60,7 @@ pub trait Trace: LoadState { I: GetInspector, { - let mut evm = self.evm_config().evm_with_env_and_inspector(db, env, inspector); + let mut evm = Trace::evm_config(self).evm_with_env_and_inspector(db, env, inspector); let res = evm.transact().map_err(Self::Error::from_evm_err)?; let (db, env) = evm.into_db_and_env_with_handler_cfg(); Ok((res, env, db)) @@ -202,7 +202,7 @@ pub trait Trace: LoadState { // apply relevant system calls let mut system_caller = SystemCaller::new( Trace::evm_config(&this).clone(), - LoadState::provider(&this).chain_spec(), + RpcNodeCore::provider(&this).chain_spec(), ); system_caller .pre_block_beacon_root_contract_call( @@ -345,7 +345,7 @@ pub trait Trace: LoadState { // apply relevant system calls let mut system_caller = SystemCaller::new( Trace::evm_config(&this).clone(), - LoadState::provider(&this).chain_spec(), + RpcNodeCore::provider(&this).chain_spec(), ); system_caller .pre_block_beacon_root_contract_call( diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index 0d16a5c91..a91e4e6fa 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -21,7 +21,9 @@ use reth_rpc_types_compat::transaction::{from_recovered, from_recovered_with_blo use reth_transaction_pool::{PoolTransaction, TransactionOrigin, TransactionPool}; use std::sync::Arc; -use crate::{FromEthApiError, FullEthApiTypes, IntoEthApiError, RpcReceipt, RpcTransaction}; +use crate::{ + FromEthApiError, FullEthApiTypes, IntoEthApiError, RpcNodeCore, RpcReceipt, RpcTransaction, +}; use super::{ Call, EthApiSpec, EthSigner, LoadBlock, LoadPendingBlock, LoadReceipt, LoadState, SpawnBlocking, @@ -235,7 +237,7 @@ pub trait EthTransactions: LoadTransaction { // Check the pool first if include_pending { if let Some(tx) = - LoadState::pool(self).get_transaction_by_sender_and_nonce(sender, nonce) + RpcNodeCore::pool(self).get_transaction_by_sender_and_nonce(sender, nonce) { let transaction = tx.transaction.clone().into_consensus(); return Ok(Some(from_recovered(transaction.into(), self.tx_resp_builder()))); diff --git a/crates/rpc/rpc-eth-api/src/node.rs b/crates/rpc/rpc-eth-api/src/node.rs index 8488677e3..950271dfc 100644 --- a/crates/rpc/rpc-eth-api/src/node.rs +++ b/crates/rpc/rpc-eth-api/src/node.rs @@ -8,7 +8,7 @@ use reth_node_api::FullNodeComponents; /// `N: RpcNodeCore` instead, allows access to all the associated types on [`FullNodeComponents`] /// that are used in RPC, but with more flexibility since they have no trait bounds (asides auto /// traits). -pub trait RpcNodeCore: Clone { +pub trait RpcNodeCore: Clone + Send + Sync { /// The provider type used to interact with the node. type Provider: Send + Sync + Clone + Unpin; /// The transaction pool of the node. diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 5a20bee97..eeab734a6 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -27,8 +27,8 @@ use reth_provider::{ use reth_revm::database::StateProviderDatabase; use reth_rpc_api::DebugApiServer; use reth_rpc_eth_api::{ - helpers::{Call, EthApiSpec, EthTransactions, LoadState, TraceExt}, - EthApiTypes, FromEthApiError, + helpers::{Call, EthApiSpec, EthTransactions, TraceExt}, + EthApiTypes, FromEthApiError, RpcNodeCore, }; use reth_rpc_eth_types::{EthApiError, StateCacheDb}; use reth_rpc_server_types::{result::internal_rpc_err, ToRpcResult}; @@ -264,7 +264,7 @@ where // apply relevant system calls let mut system_caller = SystemCaller::new( Call::evm_config(this.eth_api()).clone(), - LoadState::provider(this.eth_api()).chain_spec(), + RpcNodeCore::provider(this.eth_api()).chain_spec(), ); system_caller diff --git a/crates/rpc/rpc/src/eth/helpers/state.rs b/crates/rpc/rpc/src/eth/helpers/state.rs index 429a10333..8c958ea2a 100644 --- a/crates/rpc/rpc/src/eth/helpers/state.rs +++ b/crates/rpc/rpc/src/eth/helpers/state.rs @@ -4,7 +4,10 @@ use reth_chainspec::EthereumHardforks; use reth_provider::{ChainSpecProvider, StateProviderFactory}; use reth_transaction_pool::TransactionPool; -use reth_rpc_eth_api::helpers::{EthState, LoadState, SpawnBlocking}; +use reth_rpc_eth_api::{ + helpers::{EthState, LoadState, SpawnBlocking}, + RpcNodeCore, +}; use reth_rpc_eth_types::EthStateCache; use crate::EthApi; @@ -20,26 +23,15 @@ where impl LoadState for EthApi where - Self: Send + Sync, - Provider: StateProviderFactory + ChainSpecProvider, - Pool: TransactionPool, + Self: RpcNodeCore< + Provider: StateProviderFactory + ChainSpecProvider, + Pool: TransactionPool, + >, { - #[inline] - fn provider( - &self, - ) -> impl StateProviderFactory + ChainSpecProvider { - self.inner.provider() - } - #[inline] fn cache(&self) -> &EthStateCache { self.inner.cache() } - - #[inline] - fn pool(&self) -> impl TransactionPool { - self.inner.pool() - } } #[cfg(test)] From 09ebecffc7ffafbe965f6f652032b95e6acac473 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Sat, 26 Oct 2024 18:11:57 +0200 Subject: [PATCH 278/425] prune: add unit tests for `PruneInput` `get_next_tx_num_range` (#12081) --- crates/prune/prune/src/segments/mod.rs | 209 ++++++++++++++++++++++++- crates/prune/prune/src/segments/set.rs | 4 +- 2 files changed, 208 insertions(+), 5 deletions(-) diff --git a/crates/prune/prune/src/segments/mod.rs b/crates/prune/prune/src/segments/mod.rs index d1b7819ac..b3b40aab5 100644 --- a/crates/prune/prune/src/segments/mod.rs +++ b/crates/prune/prune/src/segments/mod.rs @@ -23,9 +23,9 @@ pub use user::{ /// A segment represents a pruning of some portion of the data. /// -/// Segments are called from [Pruner](crate::Pruner) with the following lifecycle: +/// Segments are called from [`Pruner`](crate::Pruner) with the following lifecycle: /// 1. Call [`Segment::prune`] with `delete_limit` of [`PruneInput`]. -/// 2. If [`Segment::prune`] returned a [Some] in `checkpoint` of [`SegmentOutput`], call +/// 2. If [`Segment::prune`] returned a [`Some`] in `checkpoint` of [`SegmentOutput`], call /// [`Segment::save_checkpoint`]. /// 3. Subtract `pruned` of [`SegmentOutput`] from `delete_limit` of next [`PruneInput`]. pub trait Segment: Debug + Send + Sync { @@ -88,7 +88,7 @@ impl PruneInput { }, }) // No checkpoint exists, prune from genesis - .unwrap_or(0); + .unwrap_or_default(); let to_tx_number = match provider.block_body_indices(self.to_block)? { Some(body) => { @@ -143,3 +143,206 @@ impl PruneInput { .unwrap_or(0) } } + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::B256; + use reth_provider::{ + providers::BlockchainProvider2, + test_utils::{create_test_provider_factory, MockEthProvider}, + }; + use reth_testing_utils::generators::{self, random_block_range, BlockRangeParams}; + + #[test] + fn test_prune_input_get_next_tx_num_range_no_to_block() { + let input = PruneInput { + previous_checkpoint: None, + to_block: 10, + limiter: PruneLimiter::default(), + }; + + // Default provider with no block corresponding to block 10 + let provider = MockEthProvider::default(); + + // No block body for block 10, expected None + let range = input.get_next_tx_num_range(&provider).expect("Expected range"); + assert!(range.is_none()); + } + + #[test] + fn test_prune_input_get_next_tx_num_range_no_tx() { + let input = PruneInput { + previous_checkpoint: None, + to_block: 10, + limiter: PruneLimiter::default(), + }; + + let mut rng = generators::rng(); + let factory = create_test_provider_factory(); + + // Generate 10 random blocks with no transactions + let blocks = random_block_range( + &mut rng, + 0..=10, + BlockRangeParams { parent: Some(B256::ZERO), tx_count: 0..1, ..Default::default() }, + ); + + // Insert the blocks into the database + let provider_rw = factory.provider_rw().expect("failed to get provider_rw"); + for block in &blocks { + provider_rw + .insert_historical_block( + block.clone().seal_with_senders().expect("failed to seal block with senders"), + ) + .expect("failed to insert block"); + } + provider_rw.commit().expect("failed to commit"); + + // Create a new provider + let provider = BlockchainProvider2::new(factory).unwrap(); + + // Since there are no transactions, expected None + let range = input.get_next_tx_num_range(&provider).expect("Expected range"); + assert!(range.is_none()); + } + + #[test] + fn test_prune_input_get_next_tx_num_range_valid() { + // Create a new prune input + let input = PruneInput { + previous_checkpoint: None, + to_block: 10, + limiter: PruneLimiter::default(), + }; + + let mut rng = generators::rng(); + let factory = create_test_provider_factory(); + + // Generate 10 random blocks with some transactions + let blocks = random_block_range( + &mut rng, + 0..=10, + BlockRangeParams { parent: Some(B256::ZERO), tx_count: 0..5, ..Default::default() }, + ); + + // Insert the blocks into the database + let provider_rw = factory.provider_rw().expect("failed to get provider_rw"); + for block in &blocks { + provider_rw + .insert_historical_block( + block.clone().seal_with_senders().expect("failed to seal block with senders"), + ) + .expect("failed to insert block"); + } + provider_rw.commit().expect("failed to commit"); + + // Create a new provider + let provider = BlockchainProvider2::new(factory).unwrap(); + + // Get the next tx number range + let range = input.get_next_tx_num_range(&provider).expect("Expected range").unwrap(); + + // Calculate the total number of transactions + let num_txs = + blocks.iter().map(|block| block.body.transactions().count() as u64).sum::(); + + assert_eq!(range, 0..=num_txs - 1); + } + + #[test] + fn test_prune_input_get_next_tx_checkpoint_without_tx_number() { + // Create a prune input with a previous checkpoint without a tx number (unexpected) + let input = PruneInput { + previous_checkpoint: Some(PruneCheckpoint { + block_number: Some(5), + tx_number: None, + prune_mode: PruneMode::Full, + }), + to_block: 10, + limiter: PruneLimiter::default(), + }; + + let mut rng = generators::rng(); + let factory = create_test_provider_factory(); + + // Generate 10 random blocks + let blocks = random_block_range( + &mut rng, + 0..=10, + BlockRangeParams { parent: Some(B256::ZERO), tx_count: 0..5, ..Default::default() }, + ); + + // Insert the blocks into the database + let provider_rw = factory.provider_rw().expect("failed to get provider_rw"); + for block in &blocks { + provider_rw + .insert_historical_block( + block.clone().seal_with_senders().expect("failed to seal block with senders"), + ) + .expect("failed to insert block"); + } + provider_rw.commit().expect("failed to commit"); + + // Create a new provider + let provider = BlockchainProvider2::new(factory).unwrap(); + + // Fetch the range and check if it is correct + let range = input.get_next_tx_num_range(&provider).expect("Expected range").unwrap(); + + // Calculate the total number of transactions + let num_txs = + blocks.iter().map(|block| block.body.transactions().count() as u64).sum::(); + + assert_eq!(range, 0..=num_txs - 1,); + } + + #[test] + fn test_prune_input_get_next_tx_empty_range() { + // Create a new provider via factory + let mut rng = generators::rng(); + let factory = create_test_provider_factory(); + + // Generate 10 random blocks + let blocks = random_block_range( + &mut rng, + 0..=10, + BlockRangeParams { parent: Some(B256::ZERO), tx_count: 0..5, ..Default::default() }, + ); + + // Insert the blocks into the database + let provider_rw = factory.provider_rw().expect("failed to get provider_rw"); + for block in &blocks { + provider_rw + .insert_historical_block( + block.clone().seal_with_senders().expect("failed to seal block with senders"), + ) + .expect("failed to insert block"); + } + provider_rw.commit().expect("failed to commit"); + + // Create a new provider + let provider = BlockchainProvider2::new(factory).unwrap(); + + // Get the last tx number + // Calculate the total number of transactions + let num_txs = + blocks.iter().map(|block| block.body.transactions().count() as u64).sum::(); + let max_range = num_txs - 1; + + // Create a prune input with a previous checkpoint that is the last tx number + let input = PruneInput { + previous_checkpoint: Some(PruneCheckpoint { + block_number: Some(5), + tx_number: Some(max_range), + prune_mode: PruneMode::Full, + }), + to_block: 10, + limiter: PruneLimiter::default(), + }; + + // We expect an empty range since the previous checkpoint is the last tx number + let range = input.get_next_tx_num_range(&provider).expect("Expected range"); + assert!(range.is_none()); + } +} diff --git a/crates/prune/prune/src/segments/set.rs b/crates/prune/prune/src/segments/set.rs index 710b2b721..23d03345b 100644 --- a/crates/prune/prune/src/segments/set.rs +++ b/crates/prune/prune/src/segments/set.rs @@ -11,7 +11,7 @@ use reth_prune_types::PruneModes; use super::{StaticFileHeaders, StaticFileReceipts, StaticFileTransactions}; -/// Collection of [Segment]. Thread-safe, allocated on the heap. +/// Collection of [`Segment`]. Thread-safe, allocated on the heap. #[derive(Debug)] pub struct SegmentSet { inner: Vec>>, @@ -23,7 +23,7 @@ impl SegmentSet { Self::default() } - /// Adds new [Segment] to collection. + /// Adds new [`Segment`] to collection. pub fn segment + 'static>(mut self, segment: S) -> Self { self.inner.push(Box::new(segment)); self From f616de6d94670687aedfd46e43fb991041cd5936 Mon Sep 17 00:00:00 2001 From: Ryan Schneider Date: Sat, 26 Oct 2024 09:15:08 -0700 Subject: [PATCH 279/425] feat(rpc): Start to implement flashbots_validateBuilderSubmissionV3 (#12061) --- Cargo.lock | 1 + book/cli/reth/node.md | 4 +- crates/rpc/rpc-api/src/validation.rs | 9 ++- crates/rpc/rpc-builder/src/lib.rs | 10 ++- crates/rpc/rpc-server-types/src/module.rs | 3 + crates/rpc/rpc/Cargo.toml | 1 + crates/rpc/rpc/src/lib.rs | 3 + crates/rpc/rpc/src/validation.rs | 94 +++++++++++++++++++++++ 8 files changed, 121 insertions(+), 4 deletions(-) create mode 100644 crates/rpc/rpc/src/validation.rs diff --git a/Cargo.lock b/Cargo.lock index f4dc7dae4..b1ca1c925 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8598,6 +8598,7 @@ dependencies = [ "alloy-rlp", "alloy-rpc-types", "alloy-rpc-types-admin", + "alloy-rpc-types-beacon", "alloy-rpc-types-debug", "alloy-rpc-types-eth", "alloy-rpc-types-mev", diff --git a/book/cli/reth/node.md b/book/cli/reth/node.md index 4cd55db1f..a3ff8f6a5 100644 --- a/book/cli/reth/node.md +++ b/book/cli/reth/node.md @@ -245,7 +245,7 @@ RPC: --http.api Rpc Modules to be configured for the HTTP server - [possible values: admin, debug, eth, net, trace, txpool, web3, rpc, reth, ots] + [possible values: admin, debug, eth, net, trace, txpool, web3, rpc, reth, ots, flashbots] --http.corsdomain Http Corsdomain to allow request from @@ -269,7 +269,7 @@ RPC: --ws.api Rpc Modules to be configured for the WS server - [possible values: admin, debug, eth, net, trace, txpool, web3, rpc, reth, ots] + [possible values: admin, debug, eth, net, trace, txpool, web3, rpc, reth, ots, flashbots] --ipcdisable Disable the IPC-RPC server diff --git a/crates/rpc/rpc-api/src/validation.rs b/crates/rpc/rpc-api/src/validation.rs index bbfa673d2..e1819dde4 100644 --- a/crates/rpc/rpc-api/src/validation.rs +++ b/crates/rpc/rpc-api/src/validation.rs @@ -1,7 +1,7 @@ //! API for block submission validation. use alloy_rpc_types_beacon::relay::{ - BuilderBlockValidationRequest, BuilderBlockValidationRequestV2, + BuilderBlockValidationRequest, BuilderBlockValidationRequestV2, BuilderBlockValidationRequestV3, }; use jsonrpsee::proc_macros::rpc; @@ -22,4 +22,11 @@ pub trait BlockSubmissionValidationApi { &self, request: BuilderBlockValidationRequestV2, ) -> jsonrpsee::core::RpcResult<()>; + + /// A Request to validate a block submission. + #[method(name = "validateBuilderSubmissionV3")] + async fn validate_builder_submission_v3( + &self, + request: BuilderBlockValidationRequestV3, + ) -> jsonrpsee::core::RpcResult<()>; } diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index fc98cd6ff..787dce08b 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -180,7 +180,7 @@ use reth_provider::{ }; use reth_rpc::{ AdminApi, DebugApi, EngineEthApi, EthBundle, NetApi, OtterscanApi, RPCApi, RethApi, TraceApi, - TxPoolApi, Web3Api, + TxPoolApi, ValidationApi, Web3Api, }; use reth_rpc_api::servers::*; use reth_rpc_eth_api::{ @@ -1067,6 +1067,11 @@ where pub fn reth_api(&self) -> RethApi { RethApi::new(self.provider.clone(), Box::new(self.executor.clone())) } + + /// Instantiates `ValidationApi` + pub fn validation_api(&self) -> ValidationApi { + ValidationApi::new(self.provider.clone()) + } } impl @@ -1223,6 +1228,9 @@ where .into_rpc() .into() } + RethRpcModule::Flashbots => { + ValidationApi::new(self.provider.clone()).into_rpc().into() + } }) .clone() }) diff --git a/crates/rpc/rpc-server-types/src/module.rs b/crates/rpc/rpc-server-types/src/module.rs index 56417dda7..9f96ff0ce 100644 --- a/crates/rpc/rpc-server-types/src/module.rs +++ b/crates/rpc/rpc-server-types/src/module.rs @@ -258,6 +258,8 @@ pub enum RethRpcModule { Reth, /// `ots_` module Ots, + /// `flashbots_` module + Flashbots, } // === impl RethRpcModule === @@ -306,6 +308,7 @@ impl FromStr for RethRpcModule { "rpc" => Self::Rpc, "reth" => Self::Reth, "ots" => Self::Ots, + "flashbots" => Self::Flashbots, _ => return Err(ParseError::VariantNotFound), }) } diff --git a/crates/rpc/rpc/Cargo.toml b/crates/rpc/rpc/Cargo.toml index dab86ac25..00799d761 100644 --- a/crates/rpc/rpc/Cargo.toml +++ b/crates/rpc/rpc/Cargo.toml @@ -45,6 +45,7 @@ alloy-network.workspace = true alloy-primitives.workspace = true alloy-rlp.workspace = true alloy-rpc-types.workspace = true +alloy-rpc-types-beacon.workspace = true alloy-rpc-types-eth = { workspace = true, features = ["jsonrpsee-types"] } alloy-rpc-types-debug.workspace = true alloy-rpc-types-trace.workspace = true diff --git a/crates/rpc/rpc/src/lib.rs b/crates/rpc/rpc/src/lib.rs index eec14981b..027edea3c 100644 --- a/crates/rpc/rpc/src/lib.rs +++ b/crates/rpc/rpc/src/lib.rs @@ -42,7 +42,9 @@ mod reth; mod rpc; mod trace; mod txpool; +mod validation; mod web3; + pub use admin::AdminApi; pub use debug::DebugApi; pub use engine::{EngineApi, EngineEthApi}; @@ -53,4 +55,5 @@ pub use reth::RethApi; pub use rpc::RPCApi; pub use trace::TraceApi; pub use txpool::TxPoolApi; +pub use validation::ValidationApi; pub use web3::Web3Api; diff --git a/crates/rpc/rpc/src/validation.rs b/crates/rpc/rpc/src/validation.rs new file mode 100644 index 000000000..c6419dc12 --- /dev/null +++ b/crates/rpc/rpc/src/validation.rs @@ -0,0 +1,94 @@ +use alloy_rpc_types_beacon::relay::{ + BuilderBlockValidationRequest, BuilderBlockValidationRequestV2, BuilderBlockValidationRequestV3, +}; +use async_trait::async_trait; +use jsonrpsee::core::RpcResult; +use reth_chainspec::ChainSpecProvider; +use reth_provider::{ + AccountReader, BlockReaderIdExt, HeaderProvider, StateProviderFactory, WithdrawalsProvider, +}; +use reth_rpc_api::BlockSubmissionValidationApiServer; +use reth_rpc_server_types::result::internal_rpc_err; +use std::sync::Arc; +use tracing::warn; + +/// The type that implements the `validation` rpc namespace trait +pub struct ValidationApi { + inner: Arc>, +} + +impl ValidationApi +where + Provider: BlockReaderIdExt + + ChainSpecProvider + + StateProviderFactory + + HeaderProvider + + AccountReader + + WithdrawalsProvider + + Clone + + 'static, +{ + /// The provider that can interact with the chain. + pub fn provider(&self) -> Provider { + self.inner.provider.clone() + } + + /// Create a new instance of the [`ValidationApi`] + pub fn new(provider: Provider) -> Self { + let inner = Arc::new(ValidationApiInner { provider }); + Self { inner } + } +} + +#[async_trait] +impl BlockSubmissionValidationApiServer for ValidationApi +where + Provider: BlockReaderIdExt + + ChainSpecProvider + + StateProviderFactory + + HeaderProvider + + AccountReader + + WithdrawalsProvider + + Clone + + 'static, +{ + async fn validate_builder_submission_v1( + &self, + _request: BuilderBlockValidationRequest, + ) -> RpcResult<()> { + Err(internal_rpc_err("unimplemented")) + } + + async fn validate_builder_submission_v2( + &self, + _request: BuilderBlockValidationRequestV2, + ) -> RpcResult<()> { + Err(internal_rpc_err("unimplemented")) + } + + /// Validates a block submitted to the relay + async fn validate_builder_submission_v3( + &self, + request: BuilderBlockValidationRequestV3, + ) -> RpcResult<()> { + warn!("flashbots_validateBuilderSubmissionV3: blindly accepting request without validation {:?}", request); + Ok(()) + } +} + +impl std::fmt::Debug for ValidationApi { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ValidationApi").finish_non_exhaustive() + } +} + +impl Clone for ValidationApi { + fn clone(&self) -> Self { + Self { inner: Arc::clone(&self.inner) } + } +} + +struct ValidationApiInner { + /// The provider that can interact with the chain. + provider: Provider, +} From b2574080609f8fd2584c0c95aa2555b45ff91288 Mon Sep 17 00:00:00 2001 From: lazymio Date: Sun, 27 Oct 2024 00:17:21 +0800 Subject: [PATCH 280/425] Fix readonly check in libmdbx-rs (#12096) --- crates/cli/commands/src/common.rs | 2 +- crates/storage/libmdbx-rs/src/environment.rs | 32 ++++++++++++++----- crates/storage/libmdbx-rs/src/transaction.rs | 5 ++- .../storage/libmdbx-rs/tests/environment.rs | 12 +++++++ 4 files changed, 41 insertions(+), 10 deletions(-) diff --git a/crates/cli/commands/src/common.rs b/crates/cli/commands/src/common.rs index 956a63a5a..49fee347e 100644 --- a/crates/cli/commands/src/common.rs +++ b/crates/cli/commands/src/common.rs @@ -126,7 +126,7 @@ impl> Environmen .static_file_provider() .check_consistency(&factory.provider()?, has_receipt_pruning)? { - if factory.db_ref().is_read_only() { + if factory.db_ref().is_read_only()? { warn!(target: "reth::cli", ?unwind_target, "Inconsistent storage. Restart node to heal."); return Ok(factory) } diff --git a/crates/storage/libmdbx-rs/src/environment.rs b/crates/storage/libmdbx-rs/src/environment.rs index edf9321ac..480f5aaab 100644 --- a/crates/storage/libmdbx-rs/src/environment.rs +++ b/crates/storage/libmdbx-rs/src/environment.rs @@ -4,7 +4,7 @@ use crate::{ flags::EnvironmentFlags, transaction::{RO, RW}, txn_manager::{TxnManager, TxnManagerMessage, TxnPtr}, - Transaction, TransactionKind, + Mode, SyncMode, Transaction, TransactionKind, }; use byteorder::{ByteOrder, NativeEndian}; use mem::size_of; @@ -72,14 +72,14 @@ impl Environment { /// Returns true if the environment was opened in [`crate::Mode::ReadWrite`] mode. #[inline] - pub fn is_read_write(&self) -> bool { - self.inner.env_kind.is_write_map() + pub fn is_read_write(&self) -> Result { + Ok(!self.is_read_only()?) } /// Returns true if the environment was opened in [`crate::Mode::ReadOnly`] mode. #[inline] - pub fn is_read_only(&self) -> bool { - !self.inner.env_kind.is_write_map() + pub fn is_read_only(&self) -> Result { + Ok(matches!(self.info()?.mode(), Mode::ReadOnly)) } /// Returns the transaction manager. @@ -425,6 +425,23 @@ impl Info { fsync: self.0.mi_pgop_stat.fsync, } } + + /// Return the mode of the database + #[inline] + pub const fn mode(&self) -> Mode { + let mode = self.0.mi_mode; + if (mode & ffi::MDBX_RDONLY) != 0 { + Mode::ReadOnly + } else if (mode & ffi::MDBX_UTTERLY_NOSYNC) != 0 { + Mode::ReadWrite { sync_mode: SyncMode::UtterlyNoSync } + } else if (mode & ffi::MDBX_NOMETASYNC) != 0 { + Mode::ReadWrite { sync_mode: SyncMode::NoMetaSync } + } else if (mode & ffi::MDBX_SAFE_NOSYNC) != 0 { + Mode::ReadWrite { sync_mode: SyncMode::SafeNoSync } + } else { + Mode::ReadWrite { sync_mode: SyncMode::Durable } + } + } } impl fmt::Debug for Environment { @@ -781,15 +798,14 @@ impl EnvironmentBuilder { } /// Sets the interprocess/shared threshold to force flush the data buffers to disk, if - /// [`SyncMode::SafeNoSync`](crate::flags::SyncMode::SafeNoSync) is used. + /// [`SyncMode::SafeNoSync`] is used. pub fn set_sync_bytes(&mut self, v: usize) -> &mut Self { self.sync_bytes = Some(v as u64); self } /// Sets the interprocess/shared relative period since the last unsteady commit to force flush - /// the data buffers to disk, if [`SyncMode::SafeNoSync`](crate::flags::SyncMode::SafeNoSync) is - /// used. + /// the data buffers to disk, if [`SyncMode::SafeNoSync`] is used. pub fn set_sync_period(&mut self, v: Duration) -> &mut Self { // For this option, mdbx uses units of 1/65536 of a second. let as_mdbx_units = (v.as_secs_f64() * 65536f64) as u64; diff --git a/crates/storage/libmdbx-rs/src/transaction.rs b/crates/storage/libmdbx-rs/src/transaction.rs index 88236ebe9..84b2dabc9 100644 --- a/crates/storage/libmdbx-rs/src/transaction.rs +++ b/crates/storage/libmdbx-rs/src/transaction.rs @@ -6,7 +6,7 @@ use crate::{ txn_manager::{TxnManagerMessage, TxnPtr}, Cursor, Error, Stat, TableObject, }; -use ffi::{mdbx_txn_renew, MDBX_txn_flags_t, MDBX_TXN_RDONLY, MDBX_TXN_READWRITE}; +use ffi::{MDBX_txn_flags_t, MDBX_TXN_RDONLY, MDBX_TXN_READWRITE}; use indexmap::IndexSet; use parking_lot::{Mutex, MutexGuard}; use std::{ @@ -18,6 +18,9 @@ use std::{ time::Duration, }; +#[cfg(feature = "read-tx-timeouts")] +use ffi::mdbx_txn_renew; + mod private { use super::*; diff --git a/crates/storage/libmdbx-rs/tests/environment.rs b/crates/storage/libmdbx-rs/tests/environment.rs index 99453ef11..007418f76 100644 --- a/crates/storage/libmdbx-rs/tests/environment.rs +++ b/crates/storage/libmdbx-rs/tests/environment.rs @@ -128,6 +128,18 @@ fn test_info() { // assert_eq!(info.last_pgno(), 1); // assert_eq!(info.last_txnid(), 0); assert_eq!(info.num_readers(), 0); + assert!(matches!(info.mode(), Mode::ReadWrite { sync_mode: SyncMode::Durable })); + assert!(env.is_read_write().unwrap()); + + drop(env); + let env = Environment::builder() + .set_geometry(Geometry { size: Some(map_size..), ..Default::default() }) + .set_flags(EnvironmentFlags { mode: Mode::ReadOnly, ..Default::default() }) + .open(dir.path()) + .unwrap(); + let info = env.info().unwrap(); + assert!(matches!(info.mode(), Mode::ReadOnly)); + assert!(env.is_read_only().unwrap()); } #[test] From 1bdf429af5092a0b737e900977c1418bc070ec59 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Sun, 27 Oct 2024 00:20:08 +0800 Subject: [PATCH 281/425] Remove trait method `Call::evm_config` (#12095) --- crates/optimism/rpc/src/eth/call.rs | 11 +++-------- crates/rpc/rpc-eth-api/src/helpers/call.rs | 20 ++++++++------------ crates/rpc/rpc-eth-api/src/helpers/trace.rs | 2 +- crates/rpc/rpc/src/debug.rs | 11 ++++++----- crates/rpc/rpc/src/eth/bundle.rs | 6 +++--- crates/rpc/rpc/src/eth/helpers/call.rs | 7 +------ crates/rpc/rpc/src/trace.rs | 7 ++----- 7 files changed, 24 insertions(+), 40 deletions(-) diff --git a/crates/optimism/rpc/src/eth/call.rs b/crates/optimism/rpc/src/eth/call.rs index f1c10e6f1..0402165f7 100644 --- a/crates/optimism/rpc/src/eth/call.rs +++ b/crates/optimism/rpc/src/eth/call.rs @@ -9,7 +9,7 @@ use reth_primitives::{ }; use reth_rpc_eth_api::{ helpers::{Call, EthCall, LoadState, SpawnBlocking}, - FromEthApiError, IntoEthApiError, + FromEthApiError, IntoEthApiError, RpcNodeCore, }; use reth_rpc_eth_types::{revm_utils::CallFees, RpcInvalidTransactionError}; @@ -24,9 +24,9 @@ where impl Call for OpEthApi where - Self: LoadState + SpawnBlocking, + N: RpcNodeCore, + Self: LoadState> + SpawnBlocking, Self::Error: From, - N: FullNodeComponents, { #[inline] fn call_gas_limit(&self) -> u64 { @@ -38,11 +38,6 @@ where self.inner.max_simulate_blocks() } - #[inline] - fn evm_config(&self) -> &impl ConfigureEvm
{ - self.inner.evm_config() - } - fn create_txn_env( &self, block_env: &BlockEnv, diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index 89ae1c8ac..f2662a86a 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -3,6 +3,7 @@ use crate::{ AsEthApiError, FromEthApiError, FromEvmError, FullEthApiTypes, IntoEthApiError, RpcBlock, + RpcNodeCore, }; use alloy_eips::{eip1559::calc_next_block_base_fee, eip2930::AccessListResult}; use alloy_primitives::{Address, Bytes, TxKind, B256, U256}; @@ -300,7 +301,7 @@ pub trait EthCall: Call + LoadPendingBlock { let env = EnvWithHandlerCfg::new_with_cfg_env( cfg.clone(), block_env.clone(), - Call::evm_config(&this).tx_env(tx, *signer), + RpcNodeCore::evm_config(&this).tx_env(tx, *signer), ); let (res, _) = this.transact(&mut db, env)?; db.commit(res.state); @@ -452,7 +453,7 @@ pub trait EthCall: Call + LoadPendingBlock { } /// Executes code on state. -pub trait Call: LoadState + SpawnBlocking { +pub trait Call: LoadState> + SpawnBlocking { /// Returns default gas limit to use for `eth_call` and tracing RPC methods. /// /// Data access in default trait method implementations. @@ -461,11 +462,6 @@ pub trait Call: LoadState + SpawnBlocking { /// Returns the maximum number of blocks accepted for `eth_simulateV1`. fn max_simulate_blocks(&self) -> u64; - /// Returns a handle for reading evm config. - /// - /// Data access in default (L1) trait method implementations. - fn evm_config(&self) -> &impl ConfigureEvm
; - /// Executes the closure with the state that corresponds to the given [`BlockId`]. fn with_state_at_block(&self, at: BlockId, f: F) -> Result where @@ -486,7 +482,7 @@ pub trait Call: LoadState + SpawnBlocking { DB: Database, EthApiError: From, { - let mut evm = Call::evm_config(self).evm_with_env(db, env); + let mut evm = self.evm_config().evm_with_env(db, env); let res = evm.transact().map_err(Self::Error::from_evm_err)?; let (_, env) = evm.into_db_and_env_with_handler_cfg(); Ok((res, env)) @@ -504,7 +500,7 @@ pub trait Call: LoadState + SpawnBlocking { DB: Database, EthApiError: From, { - let mut evm = Call::evm_config(self).evm_with_env_and_inspector(db, env, inspector); + let mut evm = self.evm_config().evm_with_env_and_inspector(db, env, inspector); let res = evm.transact().map_err(Self::Error::from_evm_err)?; let (_, env) = evm.into_db_and_env_with_handler_cfg(); Ok((res, env)) @@ -636,7 +632,7 @@ pub trait Call: LoadState + SpawnBlocking { let env = EnvWithHandlerCfg::new_with_cfg_env( cfg, block_env, - Call::evm_config(&this).tx_env(tx.as_signed(), tx.signer()), + RpcNodeCore::evm_config(&this).tx_env(tx.as_signed(), tx.signer()), ); let (res, _) = this.transact(&mut db, env)?; @@ -669,7 +665,7 @@ pub trait Call: LoadState + SpawnBlocking { { let env = EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, Default::default()); - let mut evm = Call::evm_config(self).evm_with_env(db, env); + let mut evm = self.evm_config().evm_with_env(db, env); let mut index = 0; for (sender, tx) in transactions { if tx.hash() == target_tx_hash { @@ -677,7 +673,7 @@ pub trait Call: LoadState + SpawnBlocking { break } - Call::evm_config(self).fill_tx_env(evm.tx_mut(), tx, *sender); + self.evm_config().fill_tx_env(evm.tx_mut(), tx, *sender); evm.transact_commit().map_err(Self::Error::from_evm_err)?; index += 1; } diff --git a/crates/rpc/rpc-eth-api/src/helpers/trace.rs b/crates/rpc/rpc-eth-api/src/helpers/trace.rs index 4f1173484..6c7dd0f6f 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/trace.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/trace.rs @@ -229,7 +229,7 @@ pub trait Trace: LoadState { let env = EnvWithHandlerCfg::new_with_cfg_env( cfg, block_env, - Call::evm_config(&this).tx_env(tx.as_signed(), tx.signer()), + RpcNodeCore::evm_config(&this).tx_env(tx.as_signed(), tx.signer()), ); let (res, _) = this.inspect(StateCacheDbRefMutWrapper(&mut db), env, &mut inspector)?; diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index eeab734a6..dd1cd9739 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -27,7 +27,7 @@ use reth_provider::{ use reth_revm::database::StateProviderDatabase; use reth_rpc_api::DebugApiServer; use reth_rpc_eth_api::{ - helpers::{Call, EthApiSpec, EthTransactions, TraceExt}, + helpers::{EthApiSpec, EthTransactions, TraceExt}, EthApiTypes, FromEthApiError, RpcNodeCore, }; use reth_rpc_eth_types::{EthApiError, StateCacheDb}; @@ -120,7 +120,8 @@ where env: Env::boxed( cfg.cfg_env.clone(), block_env.clone(), - Call::evm_config(this.eth_api()).tx_env(tx.as_signed(), tx.signer()), + RpcNodeCore::evm_config(this.eth_api()) + .tx_env(tx.as_signed(), tx.signer()), ), handler_cfg: cfg.handler_cfg, }; @@ -263,7 +264,7 @@ where // apply relevant system calls let mut system_caller = SystemCaller::new( - Call::evm_config(this.eth_api()).clone(), + RpcNodeCore::evm_config(this.eth_api()).clone(), RpcNodeCore::provider(this.eth_api()).chain_spec(), ); @@ -293,7 +294,7 @@ where env: Env::boxed( cfg.cfg_env.clone(), block_env, - Call::evm_config(this.eth_api()).tx_env(tx.as_signed(), tx.signer()), + RpcNodeCore::evm_config(this.eth_api()).tx_env(tx.as_signed(), tx.signer()), ), handler_cfg: cfg.handler_cfg, }; @@ -562,7 +563,7 @@ where env: Env::boxed( cfg.cfg_env.clone(), block_env.clone(), - Call::evm_config(this.eth_api()).tx_env(tx, *signer), + RpcNodeCore::evm_config(this.eth_api()).tx_env(tx, *signer), ), handler_cfg: cfg.handler_cfg, }; diff --git a/crates/rpc/rpc/src/eth/bundle.rs b/crates/rpc/rpc/src/eth/bundle.rs index e97497786..9ce9ee1eb 100644 --- a/crates/rpc/rpc/src/eth/bundle.rs +++ b/crates/rpc/rpc/src/eth/bundle.rs @@ -12,7 +12,7 @@ use reth_primitives::{ PooledTransactionsElement, }; use reth_revm::database::StateProviderDatabase; -use reth_rpc_eth_api::{FromEthApiError, FromEvmError}; +use reth_rpc_eth_api::{FromEthApiError, FromEvmError, RpcNodeCore}; use reth_tasks::pool::BlockingTaskGuard; use revm::{ db::CacheDB, @@ -166,7 +166,7 @@ where let mut total_gas_fess = U256::ZERO; let mut hasher = Keccak256::new(); - let mut evm = Call::evm_config(ð_api).evm_with_env(db, env); + let mut evm = RpcNodeCore::evm_config(ð_api).evm_with_env(db, env); let mut results = Vec::with_capacity(transactions.len()); let mut transactions = transactions.into_iter().peekable(); @@ -187,7 +187,7 @@ where .effective_tip_per_gas(basefee) .ok_or_else(|| RpcInvalidTransactionError::FeeCapTooLow) .map_err(Eth::Error::from_eth_err)?; - Call::evm_config(ð_api).fill_tx_env(evm.tx_mut(), &tx, signer); + RpcNodeCore::evm_config(ð_api).fill_tx_env(evm.tx_mut(), &tx, signer); let ResultAndState { result, state } = evm.transact().map_err(Eth::Error::from_evm_err)?; diff --git a/crates/rpc/rpc/src/eth/helpers/call.rs b/crates/rpc/rpc/src/eth/helpers/call.rs index 396bf9bd0..d1d33190a 100644 --- a/crates/rpc/rpc/src/eth/helpers/call.rs +++ b/crates/rpc/rpc/src/eth/helpers/call.rs @@ -13,7 +13,7 @@ impl EthCall for EthApi Call for EthApi where - Self: LoadState + SpawnBlocking, + Self: LoadState> + SpawnBlocking, EvmConfig: ConfigureEvm
, { #[inline] @@ -25,9 +25,4 @@ where fn max_simulate_blocks(&self) -> u64 { self.inner.max_simulate_blocks() } - - #[inline] - fn evm_config(&self) -> &impl ConfigureEvm
{ - self.inner.evm_config() - } } diff --git a/crates/rpc/rpc/src/trace.rs b/crates/rpc/rpc/src/trace.rs index b9b15b536..2883818af 100644 --- a/crates/rpc/rpc/src/trace.rs +++ b/crates/rpc/rpc/src/trace.rs @@ -21,10 +21,7 @@ use reth_primitives::{BlockId, Header}; use reth_provider::{BlockReader, ChainSpecProvider, EvmEnvProvider, StateProviderFactory}; use reth_revm::database::StateProviderDatabase; use reth_rpc_api::TraceApiServer; -use reth_rpc_eth_api::{ - helpers::{Call, TraceExt}, - FromEthApiError, -}; +use reth_rpc_eth_api::{helpers::TraceExt, FromEthApiError, RpcNodeCore}; use reth_rpc_eth_types::{error::EthApiError, utils::recover_raw_transaction}; use reth_tasks::pool::BlockingTaskGuard; use revm::{ @@ -124,7 +121,7 @@ where let env = EnvWithHandlerCfg::new_with_cfg_env( cfg, block, - Call::evm_config(self.eth_api()).tx_env(tx.as_signed(), tx.signer()), + RpcNodeCore::evm_config(self.eth_api()).tx_env(tx.as_signed(), tx.signer()), ); let config = TracingInspectorConfig::from_parity_config(&trace_types); From ab07fcfb113cb0b579e1f9f55a5dd9511b576687 Mon Sep 17 00:00:00 2001 From: Hai | RISE <150876604+hai-rise@users.noreply.github.com> Date: Sun, 27 Oct 2024 00:02:14 +0700 Subject: [PATCH 282/425] chore(op): simplify blob fields in newly built block header (#12035) --- Cargo.lock | 1 - crates/optimism/payload/Cargo.toml | 2 -- crates/optimism/payload/src/builder.rs | 36 ++++++++------------------ 3 files changed, 11 insertions(+), 28 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b1ca1c925..b483fd664 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8279,7 +8279,6 @@ dependencies = [ "reth-transaction-pool", "reth-trie", "revm", - "revm-primitives", "sha2 0.10.8", "thiserror", "tracing", diff --git a/crates/optimism/payload/Cargo.toml b/crates/optimism/payload/Cargo.toml index de61def83..ba0b105e8 100644 --- a/crates/optimism/payload/Cargo.toml +++ b/crates/optimism/payload/Cargo.toml @@ -39,7 +39,6 @@ alloy-eips.workspace = true alloy-primitives.workspace = true alloy-rlp.workspace = true op-alloy-rpc-types-engine.workspace = true -revm-primitives.workspace = true alloy-rpc-types-engine.workspace = true alloy-consensus.workspace = true @@ -56,5 +55,4 @@ optimism = [ "revm/optimism", "reth-execution-types/optimism", "reth-optimism-consensus/optimism", - "revm-primitives/optimism" ] diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 85f687aa8..5523770e0 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -7,12 +7,12 @@ use alloy_eips::merge::BEACON_NONCE; use alloy_primitives::U256; use reth_basic_payload_builder::*; use reth_chain_state::ExecutedBlock; -use reth_chainspec::{ChainSpecProvider, EthereumHardforks}; +use reth_chainspec::ChainSpecProvider; use reth_evm::{system_calls::SystemCaller, ConfigureEvm, ConfigureEvmEnv, NextBlockEnvAttributes}; use reth_execution_types::ExecutionOutcome; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::calculate_receipt_root_no_memo_optimism; -use reth_optimism_forks::OptimismHardfork; +use reth_optimism_forks::{OptimismHardfork, OptimismHardforks}; use reth_payload_primitives::{PayloadBuilderAttributes, PayloadBuilderError}; use reth_primitives::{ proofs, @@ -30,7 +30,6 @@ use revm::{ primitives::{EVMError, EnvWithHandlerCfg, InvalidTransaction, ResultAndState}, DatabaseCommit, }; -use revm_primitives::calc_excess_blob_gas; use tracing::{debug, trace, warn}; use crate::{ @@ -460,26 +459,16 @@ where // create the block header let transactions_root = proofs::calculate_transaction_root(&executed_txs); - // initialize empty blob sidecars. There are no blob transactions on L2. - let blob_sidecars = Vec::new(); - let mut excess_blob_gas = None; - let mut blob_gas_used = None; - - // only determine cancun fields when active - if chain_spec.is_cancun_active_at_timestamp(attributes.payload_attributes.timestamp) { - excess_blob_gas = if chain_spec.is_cancun_active_at_timestamp(parent_block.timestamp) { - let parent_excess_blob_gas = parent_block.excess_blob_gas.unwrap_or_default(); - let parent_blob_gas_used = parent_block.blob_gas_used.unwrap_or_default(); - Some(calc_excess_blob_gas(parent_excess_blob_gas, parent_blob_gas_used)) + // OP doesn't support blobs/EIP-4844. + // https://specs.optimism.io/protocol/exec-engine.html#ecotone-disable-blob-transactions + // Need [Some] or [None] based on hardfork to match block hash. + let (excess_blob_gas, blob_gas_used) = + if chain_spec.is_ecotone_active_at_timestamp(attributes.payload_attributes.timestamp) { + (Some(0), Some(0)) } else { - // for the first post-fork block, both parent.blob_gas_used and - // parent.excess_blob_gas are evaluated as 0 - Some(calc_excess_blob_gas(0, 0)) + (None, None) }; - blob_gas_used = Some(0); - } - let header = Header { parent_hash: parent_block.hash(), ommers_hash: EMPTY_OMMER_ROOT_HASH, @@ -500,7 +489,7 @@ where extra_data, parent_beacon_block_root: attributes.payload_attributes.parent_beacon_block_root, blob_gas_used, - excess_blob_gas: excess_blob_gas.map(Into::into), + excess_blob_gas, requests_hash: None, }; @@ -524,7 +513,7 @@ where let no_tx_pool = attributes.no_tx_pool; - let mut payload = OptimismBuiltPayload::new( + let payload = OptimismBuiltPayload::new( attributes.payload_attributes.id, sealed_block, total_fees, @@ -533,9 +522,6 @@ where Some(executed), ); - // extend the payload with the blob sidecars from the executed txs - payload.extend_sidecars(blob_sidecars); - if no_tx_pool { // if `no_tx_pool` is set only transactions from the payload attributes will be included in // the payload. In other words, the payload is deterministic and we can freeze it once we've From 923f4ffa92c6fe387050f099773b61dd6983bff2 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 26 Oct 2024 20:22:46 +0200 Subject: [PATCH 283/425] chore: only check for better payload if tx_pool (#12097) --- crates/optimism/payload/src/builder.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 5523770e0..36b7d17a0 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -413,8 +413,8 @@ where } } - // check if we have a better block - if !is_better_payload(best_payload.as_ref(), total_fees) { + // check if we have a better block, but only if we included transactions from the pool + if !attributes.no_tx_pool && !is_better_payload(best_payload.as_ref(), total_fees) { // can skip building the block return Ok(BuildOutcome::Aborted { fees: total_fees, cached_reads }) } From a98dc3973ff733feb11a7c8b50c60459dfaa9351 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Sun, 27 Oct 2024 02:56:29 +0800 Subject: [PATCH 284/425] chore(rpc): simplify trait bounds on `EthApiSpec` impl (#12101) --- crates/rpc/rpc/src/eth/helpers/spec.rs | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/crates/rpc/rpc/src/eth/helpers/spec.rs b/crates/rpc/rpc/src/eth/helpers/spec.rs index c5c8d54c6..a44692e18 100644 --- a/crates/rpc/rpc/src/eth/helpers/spec.rs +++ b/crates/rpc/rpc/src/eth/helpers/spec.rs @@ -3,20 +3,17 @@ use reth_chainspec::EthereumHardforks; use reth_network_api::NetworkInfo; use reth_provider::{BlockNumReader, ChainSpecProvider, StageCheckpointReader}; use reth_rpc_eth_api::{helpers::EthApiSpec, RpcNodeCore}; -use reth_transaction_pool::TransactionPool; use crate::EthApi; impl EthApiSpec for EthApi where - Self: RpcNodeCore, - Pool: TransactionPool + 'static, - Provider: ChainSpecProvider - + BlockNumReader - + StageCheckpointReader - + 'static, - Network: NetworkInfo + 'static, - EvmConfig: Send + Sync, + Self: RpcNodeCore< + Provider: ChainSpecProvider + + BlockNumReader + + StageCheckpointReader, + Network: NetworkInfo, + >, { fn starting_block(&self) -> U256 { self.inner.starting_block() From 988c5ee4c5d4b9e1c71d81aaf54ff7e6c1855895 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Sun, 27 Oct 2024 07:11:53 +0800 Subject: [PATCH 285/425] chore(rpc): Add super trait `RpcNodeCore` to `LoadPendingBlock` (#12098) --- crates/optimism/rpc/src/eth/call.rs | 8 ++-- crates/optimism/rpc/src/eth/pending_block.rs | 34 +++++---------- crates/rpc/rpc-eth-api/src/helpers/block.rs | 6 +-- crates/rpc/rpc-eth-api/src/helpers/call.rs | 4 +- .../rpc-eth-api/src/helpers/pending_block.rs | 34 ++++++--------- crates/rpc/rpc-eth-api/src/helpers/state.rs | 4 +- crates/rpc/rpc/src/eth/bundle.rs | 4 +- .../rpc/rpc/src/eth/helpers/pending_block.rs | 43 ++++++------------- 8 files changed, 48 insertions(+), 89 deletions(-) diff --git a/crates/optimism/rpc/src/eth/call.rs b/crates/optimism/rpc/src/eth/call.rs index 0402165f7..c8f8200bc 100644 --- a/crates/optimism/rpc/src/eth/call.rs +++ b/crates/optimism/rpc/src/eth/call.rs @@ -1,14 +1,12 @@ use alloy_primitives::{Bytes, TxKind, U256}; use alloy_rpc_types_eth::transaction::TransactionRequest; -use reth_chainspec::EthereumHardforks; use reth_evm::ConfigureEvm; -use reth_node_api::{FullNodeComponents, NodeTypes}; use reth_primitives::{ revm_primitives::{BlockEnv, OptimismFields, TxEnv}, Header, }; use reth_rpc_eth_api::{ - helpers::{Call, EthCall, LoadState, SpawnBlocking}, + helpers::{Call, EthCall, LoadPendingBlock, LoadState, SpawnBlocking}, FromEthApiError, IntoEthApiError, RpcNodeCore, }; use reth_rpc_eth_types::{revm_utils::CallFees, RpcInvalidTransactionError}; @@ -17,8 +15,8 @@ use crate::{OpEthApi, OpEthApiError}; impl EthCall for OpEthApi where - Self: Call, - N: FullNodeComponents>, + Self: Call + LoadPendingBlock, + N: RpcNodeCore, { } diff --git a/crates/optimism/rpc/src/eth/pending_block.rs b/crates/optimism/rpc/src/eth/pending_block.rs index 5b716f393..3b3b7845c 100644 --- a/crates/optimism/rpc/src/eth/pending_block.rs +++ b/crates/optimism/rpc/src/eth/pending_block.rs @@ -1,9 +1,8 @@ //! Loads OP pending block for a RPC response. use alloy_primitives::{BlockNumber, B256}; -use reth_chainspec::EthereumHardforks; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_evm::ConfigureEvm; -use reth_node_api::{FullNodeComponents, NodeTypes}; use reth_optimism_consensus::calculate_receipt_root_no_memo_optimism; use reth_primitives::{ revm_primitives::BlockEnv, BlockNumberOrTag, Header, Receipt, SealedBlockWithSenders, @@ -14,7 +13,7 @@ use reth_provider::{ }; use reth_rpc_eth_api::{ helpers::{LoadPendingBlock, SpawnBlocking}, - FromEthApiError, + FromEthApiError, RpcNodeCore, }; use reth_rpc_eth_types::{EthApiError, PendingBlock}; use reth_transaction_pool::TransactionPool; @@ -24,33 +23,20 @@ use crate::OpEthApi; impl LoadPendingBlock for OpEthApi where Self: SpawnBlocking, - N: FullNodeComponents>, + N: RpcNodeCore< + Provider: BlockReaderIdExt + + EvmEnvProvider + + ChainSpecProvider + + StateProviderFactory, + Pool: TransactionPool, + Evm: ConfigureEvm
, + >, { - #[inline] - fn provider( - &self, - ) -> impl BlockReaderIdExt - + EvmEnvProvider - + ChainSpecProvider - + StateProviderFactory { - self.inner.provider() - } - - #[inline] - fn pool(&self) -> impl TransactionPool { - self.inner.pool() - } - #[inline] fn pending_block(&self) -> &tokio::sync::Mutex> { self.inner.pending_block() } - #[inline] - fn evm_config(&self) -> &impl ConfigureEvm
{ - self.inner.evm_config() - } - /// Returns the locally built pending block async fn local_pending_block( &self, diff --git a/crates/rpc/rpc-eth-api/src/helpers/block.rs b/crates/rpc/rpc-eth-api/src/helpers/block.rs index da5f275ef..7599cbb59 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/block.rs @@ -9,7 +9,7 @@ use reth_provider::{BlockIdReader, BlockReader, BlockReaderIdExt, HeaderProvider use reth_rpc_eth_types::EthStateCache; use reth_rpc_types_compat::block::{from_block, uncle_block_from_header}; -use crate::{FromEthApiError, FullEthApiTypes, RpcBlock, RpcReceipt}; +use crate::{FromEthApiError, FullEthApiTypes, RpcBlock, RpcNodeCore, RpcReceipt}; use super::{LoadPendingBlock, LoadReceipt, SpawnBlocking}; @@ -220,7 +220,7 @@ pub trait LoadBlock: LoadPendingBlock + SpawnBlocking { async move { if block_id.is_pending() { // Pending block can be fetched directly without need for caching - if let Some(pending_block) = LoadPendingBlock::provider(self) + if let Some(pending_block) = RpcNodeCore::provider(self) .pending_block_with_senders() .map_err(Self::Error::from_eth_err)? { @@ -234,7 +234,7 @@ pub trait LoadBlock: LoadPendingBlock + SpawnBlocking { }; } - let block_hash = match LoadPendingBlock::provider(self) + let block_hash = match RpcNodeCore::provider(self) .block_hash_for_id(block_id) .map_err(Self::Error::from_eth_err)? { diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index f2662a86a..c2b6524b3 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -97,7 +97,7 @@ pub trait EthCall: Call + LoadPendingBlock { let base_block = self.block_with_senders(block).await?.ok_or(EthApiError::HeaderNotFound(block))?; let mut parent_hash = base_block.header.hash(); - let total_difficulty = LoadPendingBlock::provider(self) + let total_difficulty = RpcNodeCore::provider(self) .header_td_by_number(block_env.number.to()) .map_err(Self::Error::from_eth_err)? .ok_or(EthApiError::HeaderNotFound(block))?; @@ -119,7 +119,7 @@ pub trait EthCall: Call + LoadPendingBlock { block_env.timestamp += U256::from(1); if validation { - let chain_spec = LoadPendingBlock::provider(&this).chain_spec(); + let chain_spec = RpcNodeCore::provider(&this).chain_spec(); let base_fee_params = chain_spec.base_fee_params_at_timestamp(block_env.timestamp.to()); let base_fee = if let Some(latest) = blocks.last() { diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index 872f17ee9..2c04f3beb 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -3,7 +3,7 @@ use std::time::{Duration, Instant}; -use crate::{EthApiTypes, FromEthApiError, FromEvmError}; +use crate::{EthApiTypes, FromEthApiError, FromEvmError, RpcNodeCore}; use alloy_consensus::EMPTY_OMMER_ROOT_HASH; use alloy_eips::{eip7685::EMPTY_REQUESTS_HASH, merge::BEACON_NONCE}; @@ -43,32 +43,22 @@ use super::SpawnBlocking; /// Loads a pending block from database. /// /// Behaviour shared by several `eth_` RPC methods, not exclusive to `eth_` blocks RPC methods. -pub trait LoadPendingBlock: EthApiTypes { - /// Returns a handle for reading data from disk. - /// - /// Data access in default (L1) trait method implementations. - fn provider( - &self, - ) -> impl BlockReaderIdExt - + EvmEnvProvider - + ChainSpecProvider - + StateProviderFactory; - - /// Returns a handle for reading data from transaction pool. - /// - /// Data access in default (L1) trait method implementations. - fn pool(&self) -> impl TransactionPool; - +pub trait LoadPendingBlock: + EthApiTypes + + RpcNodeCore< + Provider: BlockReaderIdExt + + EvmEnvProvider + + ChainSpecProvider + + StateProviderFactory, + Pool: TransactionPool, + Evm: ConfigureEvm
, + > +{ /// Returns a handle to the pending block. /// /// Data access in default (L1) trait method implementations. fn pending_block(&self) -> &Mutex>; - /// Returns a handle for reading evm config. - /// - /// Data access in default (L1) trait method implementations. - fn evm_config(&self) -> &impl ConfigureEvm
; - /// Configures the [`CfgEnvWithHandlerCfg`] and [`BlockEnv`] for the pending block /// /// If no pending block is available, this will derive it from the `latest` block diff --git a/crates/rpc/rpc-eth-api/src/helpers/state.rs b/crates/rpc/rpc-eth-api/src/helpers/state.rs index 2a15b194f..87e66cb74 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/state.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/state.rs @@ -233,7 +233,7 @@ pub trait LoadState: Ok((cfg, block_env, origin.state_block_id())) } else { // Use cached values if there is no pending block - let block_hash = LoadPendingBlock::provider(self) + let block_hash = RpcNodeCore::provider(self) .block_hash_for_id(at) .map_err(Self::Error::from_eth_err)? .ok_or(EthApiError::HeaderNotFound(at))?; @@ -262,7 +262,7 @@ pub trait LoadState: let (cfg, mut block_env, _) = self.evm_env_at(header.parent_hash.into()).await?; let after_merge = cfg.handler_cfg.spec_id >= SpecId::MERGE; - LoadPendingBlock::evm_config(self).fill_block_env(&mut block_env, header, after_merge); + self.evm_config().fill_block_env(&mut block_env, header, after_merge); Ok((cfg, block_env)) } diff --git a/crates/rpc/rpc/src/eth/bundle.rs b/crates/rpc/rpc/src/eth/bundle.rs index 9ce9ee1eb..ec1a43c75 100644 --- a/crates/rpc/rpc/src/eth/bundle.rs +++ b/crates/rpc/rpc/src/eth/bundle.rs @@ -130,12 +130,12 @@ where } else if cfg.handler_cfg.spec_id.is_enabled_in(SpecId::LONDON) { let parent_block = block_env.number.saturating_to::(); // here we need to fetch the _next_ block's basefee based on the parent block - let parent = LoadPendingBlock::provider(self.eth_api()) + let parent = RpcNodeCore::provider(self.eth_api()) .header_by_number(parent_block) .map_err(Eth::Error::from_eth_err)? .ok_or(EthApiError::HeaderNotFound(parent_block.into()))?; if let Some(base_fee) = parent.next_block_base_fee( - LoadPendingBlock::provider(self.eth_api()) + RpcNodeCore::provider(self.eth_api()) .chain_spec() .base_fee_params_at_block(parent_block), ) { diff --git a/crates/rpc/rpc/src/eth/helpers/pending_block.rs b/crates/rpc/rpc/src/eth/helpers/pending_block.rs index 69d55f58b..6b28947df 100644 --- a/crates/rpc/rpc/src/eth/helpers/pending_block.rs +++ b/crates/rpc/rpc/src/eth/helpers/pending_block.rs @@ -1,10 +1,13 @@ //! Support for building a pending block with transactions from local view of mempool. -use reth_chainspec::EthereumHardforks; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_evm::ConfigureEvm; use reth_primitives::Header; use reth_provider::{BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, StateProviderFactory}; -use reth_rpc_eth_api::helpers::{LoadPendingBlock, SpawnBlocking}; +use reth_rpc_eth_api::{ + helpers::{LoadPendingBlock, SpawnBlocking}, + RpcNodeCore, +}; use reth_rpc_eth_types::PendingBlock; use reth_transaction_pool::TransactionPool; @@ -13,36 +16,18 @@ use crate::EthApi; impl LoadPendingBlock for EthApi where - Self: SpawnBlocking, - Provider: BlockReaderIdExt - + EvmEnvProvider - + ChainSpecProvider - + StateProviderFactory, - Pool: TransactionPool, - EvmConfig: ConfigureEvm
, + Self: SpawnBlocking + + RpcNodeCore< + Provider: BlockReaderIdExt + + EvmEnvProvider + + ChainSpecProvider + + StateProviderFactory, + Pool: TransactionPool, + Evm: ConfigureEvm
, + >, { - #[inline] - fn provider( - &self, - ) -> impl BlockReaderIdExt - + EvmEnvProvider - + ChainSpecProvider - + StateProviderFactory { - self.inner.provider() - } - - #[inline] - fn pool(&self) -> impl TransactionPool { - self.inner.pool() - } - #[inline] fn pending_block(&self) -> &tokio::sync::Mutex> { self.inner.pending_block() } - - #[inline] - fn evm_config(&self) -> &impl ConfigureEvm
{ - self.inner.evm_config() - } } From 8eb1742284964de1a7a32df523a2d0a4b6df1b4f Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Sun, 27 Oct 2024 02:59:57 +0100 Subject: [PATCH 286/425] refactor(tx-pool): small refactor (#12107) --- crates/transaction-pool/src/maintain.rs | 2 +- crates/transaction-pool/src/pool/best.rs | 2 +- crates/transaction-pool/src/pool/blob.rs | 15 +++++++-------- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/crates/transaction-pool/src/maintain.rs b/crates/transaction-pool/src/maintain.rs index 23a8d0dc6..09c042ae6 100644 --- a/crates/transaction-pool/src/maintain.rs +++ b/crates/transaction-pool/src/maintain.rs @@ -575,7 +575,7 @@ where // Filter out errors ::try_from_consensus(tx.into()).ok() }) - .collect::>(); + .collect(); let outcome = pool.add_transactions(crate::TransactionOrigin::Local, pool_transactions).await; diff --git a/crates/transaction-pool/src/pool/best.rs b/crates/transaction-pool/src/pool/best.rs index 77cd35d8a..268e3e262 100644 --- a/crates/transaction-pool/src/pool/best.rs +++ b/crates/transaction-pool/src/pool/best.rs @@ -196,7 +196,7 @@ impl Iterator for BestTransactions { } } -/// A[`BestTransactions`](crate::traits::BestTransactions) implementation that filters the +/// A [`BestTransactions`](crate::traits::BestTransactions) implementation that filters the /// transactions of iter with predicate. /// /// Filter out transactions are marked as invalid: diff --git a/crates/transaction-pool/src/pool/blob.rs b/crates/transaction-pool/src/pool/blob.rs index cb09e8234..ac39c6ab7 100644 --- a/crates/transaction-pool/src/pool/blob.rs +++ b/crates/transaction-pool/src/pool/blob.rs @@ -11,7 +11,7 @@ use std::{ /// A set of validated blob transactions in the pool that are __not pending__. /// -/// The purpose of this pool is keep track of blob transactions that are queued and to evict the +/// The purpose of this pool is to keep track of blob transactions that are queued and to evict the /// worst blob transactions once the sub-pool is full. /// /// This expects that certain constraints are met: @@ -198,14 +198,13 @@ impl BlobTransactions { &mut self, pending_fees: &PendingFees, ) -> Vec>> { - let to_remove = self.satisfy_pending_fee_ids(pending_fees); - - let mut removed = Vec::with_capacity(to_remove.len()); - for id in to_remove { - removed.push(self.remove_transaction(&id).expect("transaction exists")); - } + let removed = self + .satisfy_pending_fee_ids(pending_fees) + .into_iter() + .map(|id| self.remove_transaction(&id).expect("transaction exists")) + .collect(); - // set pending fees and reprioritize / resort + // Update pending fees and reprioritize self.pending_fees = pending_fees.clone(); self.reprioritize(); From fae36bd25ff1efa01f0c66d5c73606f63c1f0794 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Sun, 27 Oct 2024 03:00:32 +0100 Subject: [PATCH 287/425] refactor(storage): small refactor (#12106) --- crates/storage/db-api/src/cursor.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/crates/storage/db-api/src/cursor.rs b/crates/storage/db-api/src/cursor.rs index 585aa4947..9297f738a 100644 --- a/crates/storage/db-api/src/cursor.rs +++ b/crates/storage/db-api/src/cursor.rs @@ -152,12 +152,7 @@ where impl> Iterator for Walker<'_, T, CURSOR> { type Item = Result, DatabaseError>; fn next(&mut self) -> Option { - let start = self.start.take(); - if start.is_some() { - return start - } - - self.cursor.next().transpose() + self.start.take().or_else(|| self.cursor.next().transpose()) } } From e158369a68102661fdfdb8ecf1ee205360e1104c Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Sun, 27 Oct 2024 15:35:24 +0800 Subject: [PATCH 288/425] chore(rpc): remove redundant trait method `LoadBlock::provider` (#12100) --- crates/optimism/rpc/src/eth/block.rs | 7 +--- crates/optimism/rpc/src/eth/transaction.rs | 15 ++------- crates/rpc/rpc-eth-api/src/helpers/block.rs | 25 +++++++------- crates/rpc/rpc-eth-api/src/helpers/call.rs | 3 +- .../rpc-eth-api/src/helpers/transaction.rs | 33 ++++++++----------- crates/rpc/rpc/src/eth/helpers/block.rs | 5 --- crates/rpc/rpc/src/eth/helpers/transaction.rs | 20 +++-------- 7 files changed, 34 insertions(+), 74 deletions(-) diff --git a/crates/optimism/rpc/src/eth/block.rs b/crates/optimism/rpc/src/eth/block.rs index dfdd09608..42c4789d4 100644 --- a/crates/optimism/rpc/src/eth/block.rs +++ b/crates/optimism/rpc/src/eth/block.rs @@ -7,7 +7,7 @@ use reth_chainspec::ChainSpecProvider; use reth_node_api::{FullNodeComponents, NodeTypes}; use reth_optimism_chainspec::OpChainSpec; use reth_primitives::TransactionMeta; -use reth_provider::{BlockReaderIdExt, HeaderProvider}; +use reth_provider::HeaderProvider; use reth_rpc_eth_api::{ helpers::{EthBlocks, LoadBlock, LoadPendingBlock, LoadReceipt, SpawnBlocking}, RpcReceipt, @@ -87,11 +87,6 @@ where Self: LoadPendingBlock + SpawnBlocking, N: FullNodeComponents, { - #[inline] - fn provider(&self) -> impl BlockReaderIdExt { - self.inner.provider() - } - #[inline] fn cache(&self) -> &EthStateCache { self.inner.cache() diff --git a/crates/optimism/rpc/src/eth/transaction.rs b/crates/optimism/rpc/src/eth/transaction.rs index 4ac2d7e6b..451c8a805 100644 --- a/crates/optimism/rpc/src/eth/transaction.rs +++ b/crates/optimism/rpc/src/eth/transaction.rs @@ -10,7 +10,7 @@ use reth_provider::{BlockReaderIdExt, ReceiptProvider, TransactionsProvider}; use reth_rpc::eth::EthTxBuilder; use reth_rpc_eth_api::{ helpers::{EthSigner, EthTransactions, LoadTransaction, SpawnBlocking}, - FromEthApiError, FullEthApiTypes, TransactionCompat, + FromEthApiError, FullEthApiTypes, RpcNodeCore, TransactionCompat, }; use reth_rpc_eth_types::{utils::recover_raw_transaction, EthStateCache}; use reth_transaction_pool::{PoolTransaction, TransactionOrigin, TransactionPool}; @@ -61,21 +61,12 @@ where impl LoadTransaction for OpEthApi where Self: SpawnBlocking + FullEthApiTypes, - N: FullNodeComponents, + N: RpcNodeCore, { - type Pool = N::Pool; - - fn provider(&self) -> impl TransactionsProvider { - self.inner.provider() - } - + #[inline] fn cache(&self) -> &EthStateCache { self.inner.cache() } - - fn pool(&self) -> &Self::Pool { - self.inner.pool() - } } impl OpEthApi diff --git a/crates/rpc/rpc-eth-api/src/helpers/block.rs b/crates/rpc/rpc-eth-api/src/helpers/block.rs index 7599cbb59..839b48919 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/block.rs @@ -85,13 +85,13 @@ pub trait EthBlocks: LoadBlock { async move { if block_id.is_pending() { // Pending block can be fetched directly without need for caching - return Ok(LoadBlock::provider(self) + return Ok(RpcNodeCore::provider(self) .pending_block() .map_err(Self::Error::from_eth_err)? .map(|block| block.body.transactions.len())) } - let block_hash = match LoadBlock::provider(self) + let block_hash = match RpcNodeCore::provider(self) .block_hash_for_id(block_id) .map_err(Self::Error::from_eth_err)? { @@ -132,7 +132,7 @@ pub trait EthBlocks: LoadBlock { if block_id.is_pending() { // First, try to get the pending block from the provider, in case we already // received the actual pending block from the CL. - if let Some((block, receipts)) = LoadBlock::provider(self) + if let Some((block, receipts)) = RpcNodeCore::provider(self) .pending_block_and_receipts() .map_err(Self::Error::from_eth_err)? { @@ -145,7 +145,7 @@ pub trait EthBlocks: LoadBlock { } } - if let Some(block_hash) = LoadBlock::provider(self) + if let Some(block_hash) = RpcNodeCore::provider(self) .block_hash_for_id(block_id) .map_err(Self::Error::from_eth_err)? { @@ -167,7 +167,7 @@ pub trait EthBlocks: LoadBlock { &self, block_id: BlockId, ) -> Result>, Self::Error> { - LoadBlock::provider(self).ommers_by_id(block_id).map_err(Self::Error::from_eth_err) + RpcNodeCore::provider(self).ommers_by_id(block_id).map_err(Self::Error::from_eth_err) } /// Returns uncle block at given index in given block. @@ -182,12 +182,12 @@ pub trait EthBlocks: LoadBlock { async move { let uncles = if block_id.is_pending() { // Pending block can be fetched directly without need for caching - LoadBlock::provider(self) + RpcNodeCore::provider(self) .pending_block() .map_err(Self::Error::from_eth_err)? .map(|block| block.body.ommers) } else { - LoadBlock::provider(self) + RpcNodeCore::provider(self) .ommers_by_id(block_id) .map_err(Self::Error::from_eth_err)? } @@ -202,11 +202,6 @@ pub trait EthBlocks: LoadBlock { /// /// Behaviour shared by several `eth_` RPC methods, not exclusive to `eth_` blocks RPC methods. pub trait LoadBlock: LoadPendingBlock + SpawnBlocking { - // Returns a handle for reading data from disk. - /// - /// Data access in default (L1) trait method implementations. - fn provider(&self) -> impl BlockReaderIdExt; - /// Returns a handle for reading data from memory. /// /// Data access in default (L1) trait method implementations. @@ -220,7 +215,8 @@ pub trait LoadBlock: LoadPendingBlock + SpawnBlocking { async move { if block_id.is_pending() { // Pending block can be fetched directly without need for caching - if let Some(pending_block) = RpcNodeCore::provider(self) + if let Some(pending_block) = self + .provider() .pending_block_with_senders() .map_err(Self::Error::from_eth_err)? { @@ -234,7 +230,8 @@ pub trait LoadBlock: LoadPendingBlock + SpawnBlocking { }; } - let block_hash = match RpcNodeCore::provider(self) + let block_hash = match self + .provider() .block_hash_for_id(block_id) .map_err(Self::Error::from_eth_err)? { diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index c2b6524b3..b90577c14 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -259,7 +259,8 @@ pub trait EthCall: Call + LoadPendingBlock { // if it's not pending, we should always use block_hash over block_number to ensure that // different provider calls query data related to the same block. if !is_block_target_pending { - target_block = LoadBlock::provider(self) + target_block = self + .provider() .block_hash_for_id(target_block) .map_err(|_| EthApiError::HeaderNotFound(target_block))? .ok_or_else(|| EthApiError::HeaderNotFound(target_block))? diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index a91e4e6fa..c693945ca 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -111,7 +111,7 @@ pub trait EthTransactions: LoadTransaction { } self.spawn_blocking_io(move |ref this| { - Ok(LoadTransaction::provider(this) + Ok(RpcNodeCore::provider(this) .transaction_by_hash(hash) .map_err(Self::Error::from_eth_err)? .map(|tx| tx.encoded_2718().into())) @@ -166,7 +166,7 @@ pub trait EthTransactions: LoadTransaction { { let this = self.clone(); self.spawn_blocking_io(move |_| { - let (tx, meta) = match LoadTransaction::provider(&this) + let (tx, meta) = match RpcNodeCore::provider(&this) .transaction_by_hash_with_meta(hash) .map_err(Self::Error::from_eth_err)? { @@ -257,7 +257,7 @@ pub trait EthTransactions: LoadTransaction { return Ok(None); } - let Ok(high) = LoadBlock::provider(self).best_block_number() else { + let Ok(high) = RpcNodeCore::provider(self).best_block_number() else { return Err(EthApiError::HeaderNotFound(BlockNumberOrTag::Latest.into()).into()); }; @@ -383,10 +383,15 @@ pub trait EthTransactions: LoadTransaction { let transaction = self.sign_request(&from, request).await?.with_signer(from); - let pool_transaction = <::Pool as TransactionPool>::Transaction::try_from_consensus(transaction.into()).map_err(|_| EthApiError::TransactionConversionError)?; + let pool_transaction = + <::Pool as TransactionPool>::Transaction::try_from_consensus( + transaction.into(), + ) + .map_err(|_| EthApiError::TransactionConversionError)?; // submit the transaction to the pool with a `Local` origin - let hash = LoadTransaction::pool(self) + let hash = self + .pool() .add_transaction(TransactionOrigin::Local, pool_transaction) .await .map_err(Self::Error::from_eth_err)?; @@ -460,26 +465,14 @@ pub trait EthTransactions: LoadTransaction { /// /// Behaviour shared by several `eth_` RPC methods, not exclusive to `eth_` transactions RPC /// methods. -pub trait LoadTransaction: SpawnBlocking + FullEthApiTypes { - /// Transaction pool with pending transactions. [`TransactionPool::Transaction`] is the - /// supported transaction type. - type Pool: TransactionPool; - - /// Returns a handle for reading data from disk. - /// - /// Data access in default (L1) trait method implementations. - fn provider(&self) -> impl TransactionsProvider; - +pub trait LoadTransaction: + SpawnBlocking + FullEthApiTypes + RpcNodeCore +{ /// Returns a handle for reading data from memory. /// /// Data access in default (L1) trait method implementations. fn cache(&self) -> &EthStateCache; - /// Returns a handle for reading data from pool. - /// - /// Data access in default (L1) trait method implementations. - fn pool(&self) -> &Self::Pool; - /// Returns the transaction by hash. /// /// Checks the pool and state. diff --git a/crates/rpc/rpc/src/eth/helpers/block.rs b/crates/rpc/rpc/src/eth/helpers/block.rs index b29a24c38..22853f263 100644 --- a/crates/rpc/rpc/src/eth/helpers/block.rs +++ b/crates/rpc/rpc/src/eth/helpers/block.rs @@ -73,11 +73,6 @@ where Self: LoadPendingBlock + SpawnBlocking, Provider: BlockReaderIdExt, { - #[inline] - fn provider(&self) -> impl BlockReaderIdExt { - self.inner.provider() - } - #[inline] fn cache(&self) -> &EthStateCache { self.inner.cache() diff --git a/crates/rpc/rpc/src/eth/helpers/transaction.rs b/crates/rpc/rpc/src/eth/helpers/transaction.rs index 24a13cb80..c4505bef0 100644 --- a/crates/rpc/rpc/src/eth/helpers/transaction.rs +++ b/crates/rpc/rpc/src/eth/helpers/transaction.rs @@ -3,7 +3,7 @@ use reth_provider::{BlockReaderIdExt, TransactionsProvider}; use reth_rpc_eth_api::{ helpers::{EthSigner, EthTransactions, LoadTransaction, SpawnBlocking}, - FullEthApiTypes, + FullEthApiTypes, RpcNodeCore, }; use reth_rpc_eth_types::EthStateCache; use reth_transaction_pool::TransactionPool; @@ -31,26 +31,14 @@ where impl LoadTransaction for EthApi where - Self: SpawnBlocking + FullEthApiTypes, - Provider: TransactionsProvider, - Pool: TransactionPool, + Self: SpawnBlocking + + FullEthApiTypes + + RpcNodeCore, { - type Pool = Pool; - - #[inline] - fn provider(&self) -> impl TransactionsProvider { - self.inner.provider() - } - #[inline] fn cache(&self) -> &EthStateCache { self.inner.cache() } - - #[inline] - fn pool(&self) -> &Self::Pool { - self.inner.pool() - } } #[cfg(test)] From 768404c59e7b673d055faf1791bb376b97466ef4 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Sun, 27 Oct 2024 16:30:02 +0800 Subject: [PATCH 289/425] chore(rpc): remove redundant trait bounds in eth api (#12105) --- crates/e2e-test-utils/src/node.rs | 7 ++----- crates/rpc/rpc-eth-api/src/helpers/transaction.rs | 2 +- crates/rpc/rpc/src/eth/filter.rs | 3 +-- 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/crates/e2e-test-utils/src/node.rs b/crates/e2e-test-utils/src/node.rs index 07df36a33..8b385115b 100644 --- a/crates/e2e-test-utils/src/node.rs +++ b/crates/e2e-test-utils/src/node.rs @@ -10,10 +10,7 @@ use reth::{ network::PeersHandleProvider, providers::{BlockReader, BlockReaderIdExt, CanonStateSubscriptions, StageCheckpointReader}, rpc::{ - api::eth::{ - helpers::{EthApiSpec, EthTransactions, TraceExt}, - FullEthApiTypes, - }, + api::eth::helpers::{EthApiSpec, EthTransactions, TraceExt}, types::engine::PayloadStatusEnum, }, }; @@ -97,7 +94,7 @@ where where Engine::ExecutionPayloadEnvelopeV3: From + PayloadEnvelopeExt, Engine::ExecutionPayloadEnvelopeV4: From + PayloadEnvelopeExt, - AddOns::EthApi: EthApiSpec + EthTransactions + TraceExt + FullEthApiTypes, + AddOns::EthApi: EthApiSpec + EthTransactions + TraceExt, { let mut chain = Vec::with_capacity(length as usize); for i in 0..length { diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index c693945ca..791cb2ae1 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -231,7 +231,7 @@ pub trait EthTransactions: LoadTransaction { include_pending: bool, ) -> impl Future>, Self::Error>> + Send where - Self: LoadBlock + LoadState + FullEthApiTypes, + Self: LoadBlock + LoadState, { async move { // Check the pool first diff --git a/crates/rpc/rpc/src/eth/filter.rs b/crates/rpc/rpc/src/eth/filter.rs index 5ef224609..3d05cdc72 100644 --- a/crates/rpc/rpc/src/eth/filter.rs +++ b/crates/rpc/rpc/src/eth/filter.rs @@ -145,8 +145,7 @@ where impl EthFilter where Provider: BlockReader + BlockIdReader + EvmEnvProvider + 'static, - Pool: TransactionPool + 'static, - ::Transaction: 'static, + Pool: TransactionPool + 'static, Eth: FullEthApiTypes, { /// Returns all the filter changes for the given id, if any From 131cc5175ebf8c5714e090b230b4926678ef7d97 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Sun, 27 Oct 2024 17:21:34 +0800 Subject: [PATCH 290/425] chore(rpc): remove redundant `EthBlocks::provider` (#12109) --- crates/optimism/rpc/src/eth/block.rs | 11 ++---- crates/rpc/rpc-eth-api/src/helpers/block.rs | 39 +++++++++------------ crates/rpc/rpc/src/eth/helpers/block.rs | 7 +--- 3 files changed, 21 insertions(+), 36 deletions(-) diff --git a/crates/optimism/rpc/src/eth/block.rs b/crates/optimism/rpc/src/eth/block.rs index 42c4789d4..ed31a7509 100644 --- a/crates/optimism/rpc/src/eth/block.rs +++ b/crates/optimism/rpc/src/eth/block.rs @@ -4,13 +4,13 @@ use alloy_rpc_types::BlockId; use op_alloy_network::Network; use op_alloy_rpc_types::OpTransactionReceipt; use reth_chainspec::ChainSpecProvider; -use reth_node_api::{FullNodeComponents, NodeTypes}; +use reth_node_api::FullNodeComponents; use reth_optimism_chainspec::OpChainSpec; use reth_primitives::TransactionMeta; use reth_provider::HeaderProvider; use reth_rpc_eth_api::{ helpers::{EthBlocks, LoadBlock, LoadPendingBlock, LoadReceipt, SpawnBlocking}, - RpcReceipt, + RpcNodeCore, RpcReceipt, }; use reth_rpc_eth_types::EthStateCache; @@ -22,13 +22,8 @@ where Error = OpEthApiError, NetworkTypes: Network, >, - N: FullNodeComponents>, + N: RpcNodeCore + HeaderProvider>, { - #[inline] - fn provider(&self) -> impl HeaderProvider { - self.inner.provider() - } - async fn block_receipts( &self, block_id: BlockId, diff --git a/crates/rpc/rpc-eth-api/src/helpers/block.rs b/crates/rpc/rpc-eth-api/src/helpers/block.rs index 839b48919..861afb3ad 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/block.rs @@ -9,7 +9,7 @@ use reth_provider::{BlockIdReader, BlockReader, BlockReaderIdExt, HeaderProvider use reth_rpc_eth_types::EthStateCache; use reth_rpc_types_compat::block::{from_block, uncle_block_from_header}; -use crate::{FromEthApiError, FullEthApiTypes, RpcBlock, RpcNodeCore, RpcReceipt}; +use crate::{FromEthApiError, FullEthApiTypes, RpcBlock, RpcReceipt}; use super::{LoadPendingBlock, LoadReceipt, SpawnBlocking}; @@ -20,12 +20,7 @@ pub type BlockAndReceiptsResult = Result impl HeaderProvider; - +pub trait EthBlocks: LoadBlock { /// Returns the block header for the given block id. fn rpc_block_header( &self, @@ -52,15 +47,15 @@ pub trait EthBlocks: LoadBlock { async move { let Some(block) = self.block_with_senders(block_id).await? else { return Ok(None) }; let block_hash = block.hash(); - let mut total_difficulty = EthBlocks::provider(self) + let mut total_difficulty = self + .provider() .header_td_by_number(block.number) .map_err(Self::Error::from_eth_err)?; if total_difficulty.is_none() { // if we failed to find td after we successfully loaded the block, try again using // the hash this only matters if the chain is currently transitioning the merge block and there's a reorg: - total_difficulty = EthBlocks::provider(self) - .header_td(&block.hash()) - .map_err(Self::Error::from_eth_err)?; + total_difficulty = + self.provider().header_td(&block.hash()).map_err(Self::Error::from_eth_err)?; } let block = from_block( @@ -85,13 +80,15 @@ pub trait EthBlocks: LoadBlock { async move { if block_id.is_pending() { // Pending block can be fetched directly without need for caching - return Ok(RpcNodeCore::provider(self) + return Ok(self + .provider() .pending_block() .map_err(Self::Error::from_eth_err)? .map(|block| block.body.transactions.len())) } - let block_hash = match RpcNodeCore::provider(self) + let block_hash = match self + .provider() .block_hash_for_id(block_id) .map_err(Self::Error::from_eth_err)? { @@ -132,7 +129,8 @@ pub trait EthBlocks: LoadBlock { if block_id.is_pending() { // First, try to get the pending block from the provider, in case we already // received the actual pending block from the CL. - if let Some((block, receipts)) = RpcNodeCore::provider(self) + if let Some((block, receipts)) = self + .provider() .pending_block_and_receipts() .map_err(Self::Error::from_eth_err)? { @@ -145,9 +143,8 @@ pub trait EthBlocks: LoadBlock { } } - if let Some(block_hash) = RpcNodeCore::provider(self) - .block_hash_for_id(block_id) - .map_err(Self::Error::from_eth_err)? + if let Some(block_hash) = + self.provider().block_hash_for_id(block_id).map_err(Self::Error::from_eth_err)? { return LoadReceipt::cache(self) .get_block_and_receipts(block_hash) @@ -167,7 +164,7 @@ pub trait EthBlocks: LoadBlock { &self, block_id: BlockId, ) -> Result>, Self::Error> { - RpcNodeCore::provider(self).ommers_by_id(block_id).map_err(Self::Error::from_eth_err) + self.provider().ommers_by_id(block_id).map_err(Self::Error::from_eth_err) } /// Returns uncle block at given index in given block. @@ -182,14 +179,12 @@ pub trait EthBlocks: LoadBlock { async move { let uncles = if block_id.is_pending() { // Pending block can be fetched directly without need for caching - RpcNodeCore::provider(self) + self.provider() .pending_block() .map_err(Self::Error::from_eth_err)? .map(|block| block.body.ommers) } else { - RpcNodeCore::provider(self) - .ommers_by_id(block_id) - .map_err(Self::Error::from_eth_err)? + self.provider().ommers_by_id(block_id).map_err(Self::Error::from_eth_err)? } .unwrap_or_default(); diff --git a/crates/rpc/rpc/src/eth/helpers/block.rs b/crates/rpc/rpc/src/eth/helpers/block.rs index 22853f263..a869cbd54 100644 --- a/crates/rpc/rpc/src/eth/helpers/block.rs +++ b/crates/rpc/rpc/src/eth/helpers/block.rs @@ -17,14 +17,9 @@ where Self: LoadBlock< Error = EthApiError, NetworkTypes: alloy_network::Network, + Provider: HeaderProvider, >, - Provider: HeaderProvider, { - #[inline] - fn provider(&self) -> impl HeaderProvider { - self.inner.provider() - } - async fn block_receipts( &self, block_id: BlockId, From b7b3f8149c8f91c1b075f5d610cf3210bc23bd2e Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Sun, 27 Oct 2024 22:24:21 +0800 Subject: [PATCH 291/425] chore(rpc): remove redundant `Trace::evm_config` (#12102) --- crates/optimism/rpc/src/eth/mod.rs | 8 ++------ crates/rpc/rpc-eth-api/src/helpers/trace.rs | 15 +++++---------- crates/rpc/rpc/src/eth/helpers/trace.rs | 10 ++-------- 3 files changed, 9 insertions(+), 24 deletions(-) diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index bc1692dff..d12a2dd02 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -243,13 +243,9 @@ where impl Trace for OpEthApi where - Self: LoadState, - N: FullNodeComponents, + Self: LoadState>, + N: RpcNodeCore, { - #[inline] - fn evm_config(&self) -> &impl ConfigureEvm
{ - self.inner.evm_config() - } } impl AddDevSigners for OpEthApi diff --git a/crates/rpc/rpc-eth-api/src/helpers/trace.rs b/crates/rpc/rpc-eth-api/src/helpers/trace.rs index 6c7dd0f6f..da1d1cdb9 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/trace.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/trace.rs @@ -21,12 +21,7 @@ use revm_primitives::{EnvWithHandlerCfg, EvmState, ExecutionResult, ResultAndSta use super::{Call, LoadBlock, LoadPendingBlock, LoadState, LoadTransaction}; /// Executes CPU heavy tasks. -pub trait Trace: LoadState { - /// Returns a handle for reading evm config. - /// - /// Data access in default (L1) trait method implementations. - fn evm_config(&self) -> &impl ConfigureEvm
; - +pub trait Trace: LoadState> { /// Executes the [`EnvWithHandlerCfg`] against the given [Database] without committing state /// changes. fn inspect( @@ -60,7 +55,7 @@ pub trait Trace: LoadState { I: GetInspector, { - let mut evm = Trace::evm_config(self).evm_with_env_and_inspector(db, env, inspector); + let mut evm = self.evm_config().evm_with_env_and_inspector(db, env, inspector); let res = evm.transact().map_err(Self::Error::from_evm_err)?; let (db, env) = evm.into_db_and_env_with_handler_cfg(); Ok((res, env, db)) @@ -201,7 +196,7 @@ pub trait Trace: LoadState { // apply relevant system calls let mut system_caller = SystemCaller::new( - Trace::evm_config(&this).clone(), + this.evm_config().clone(), RpcNodeCore::provider(&this).chain_spec(), ); system_caller @@ -344,7 +339,7 @@ pub trait Trace: LoadState { // apply relevant system calls let mut system_caller = SystemCaller::new( - Trace::evm_config(&this).clone(), + this.evm_config().clone(), RpcNodeCore::provider(&this).chain_spec(), ); system_caller @@ -379,7 +374,7 @@ pub trait Trace: LoadState { block_number: Some(block_number), base_fee: Some(base_fee), }; - let tx_env = Trace::evm_config(&this).tx_env(tx, *signer); + let tx_env = this.evm_config().tx_env(tx, *signer); (tx_info, tx_env) }) .peekable(); diff --git a/crates/rpc/rpc/src/eth/helpers/trace.rs b/crates/rpc/rpc/src/eth/helpers/trace.rs index c40b7acf5..b270ed1b2 100644 --- a/crates/rpc/rpc/src/eth/helpers/trace.rs +++ b/crates/rpc/rpc/src/eth/helpers/trace.rs @@ -6,13 +6,7 @@ use reth_rpc_eth_api::helpers::{LoadState, Trace}; use crate::EthApi; -impl Trace for EthApi -where - Self: LoadState, - EvmConfig: ConfigureEvm
, +impl Trace for EthApi where + Self: LoadState> { - #[inline] - fn evm_config(&self) -> &impl ConfigureEvm
{ - self.inner.evm_config() - } } From 0c516091b8f45f07cf4d76879153d3cd90015363 Mon Sep 17 00:00:00 2001 From: Parikalp Bhardwaj <53660958+Parikalp-Bhardwaj@users.noreply.github.com> Date: Sun, 27 Oct 2024 19:11:03 +0400 Subject: [PATCH 292/425] TransactionsHandle propagation commands should not adhere to caching (#12079) Co-authored-by: Matthias Seitz --- crates/net/network/src/transactions/mod.rs | 113 +++++++++++++++++---- 1 file changed, 93 insertions(+), 20 deletions(-) diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index 439f92bad..b6e589c6f 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -416,6 +416,7 @@ where fn propagate_all(&mut self, hashes: Vec) { let propagated = self.propagate_transactions( self.pool.get_all(hashes).into_iter().map(PropagateTransaction::new).collect(), + PropagationMode::Basic, ); // notify pool so events get fired @@ -431,6 +432,7 @@ where fn propagate_transactions( &mut self, to_propagate: Vec, + propagation_mode: PropagationMode, ) -> PropagatedTransactions { let mut propagated = PropagatedTransactions::default(); if self.network.tx_gossip_disabled() { @@ -449,14 +451,18 @@ where PropagateTransactionsBuilder::full(peer.version) }; - // Iterate through the transactions to propagate and fill the hashes and full - // transaction lists, before deciding whether or not to send full transactions to the - // peer. - for tx in &to_propagate { - // Only proceed if the transaction is not in the peer's list of seen transactions - if !peer.seen_transactions.contains(&tx.hash()) { - // add transaction to the list of hashes to propagate - builder.push(tx); + if propagation_mode.is_forced() { + builder.extend(to_propagate.iter()); + } else { + // Iterate through the transactions to propagate and fill the hashes and full + // transaction lists, before deciding whether or not to send full transactions to + // the peer. + for tx in &to_propagate { + // Only proceed if the transaction is not in the peer's list of seen + // transactions + if !peer.seen_transactions.contains(&tx.hash()) { + builder.push(tx); + } } } @@ -514,6 +520,7 @@ where &mut self, txs: Vec, peer_id: PeerId, + propagation_mode: PropagationMode, ) -> Option { trace!(target: "net::tx", ?peer_id, "Propagating transactions to peer"); @@ -525,10 +532,17 @@ where let to_propagate = self.pool.get_all(txs).into_iter().map(PropagateTransaction::new); - // Iterate through the transactions to propagate and fill the hashes and full transaction - for tx in to_propagate { - if !peer.seen_transactions.contains(&tx.hash()) { - full_transactions.push(&tx); + if propagation_mode.is_forced() { + // skip cache check if forced + full_transactions.extend(to_propagate); + } else { + // Iterate through the transactions to propagate and fill the hashes and full + // transaction + for tx in to_propagate { + if !peer.seen_transactions.contains(&tx.hash()) { + // Only include if the peer hasn't seen the transaction + full_transactions.push(&tx); + } } } @@ -546,6 +560,7 @@ where // mark transaction as seen by peer peer.seen_transactions.insert(hash); } + // send hashes of transactions self.network.send_transactions_hashes(peer_id, new_pooled_hashes); } @@ -557,6 +572,7 @@ where // mark transaction as seen by peer peer.seen_transactions.insert(tx.hash()); } + // send full transactions self.network.send_transactions(peer_id, new_full_transactions); } @@ -570,7 +586,12 @@ where /// Propagate the transaction hashes to the given peer /// /// Note: This will only send the hashes for transactions that exist in the pool. - fn propagate_hashes_to(&mut self, hashes: Vec, peer_id: PeerId) { + fn propagate_hashes_to( + &mut self, + hashes: Vec, + peer_id: PeerId, + propagation_mode: PropagationMode, + ) { trace!(target: "net::tx", "Start propagating transactions as hashes"); // This fetches a transactions from the pool, including the blob transactions, which are @@ -589,9 +610,14 @@ where // check if transaction is known to peer let mut hashes = PooledTransactionsHashesBuilder::new(peer.version); - for tx in to_propagate { - if !peer.seen_transactions.insert(tx.hash()) { - hashes.push(&tx); + if propagation_mode.is_forced() { + hashes.extend(to_propagate) + } else { + for tx in to_propagate { + if !peer.seen_transactions.contains(&tx.hash()) { + // Include if the peer hasn't seen it + hashes.push(&tx); + } } } @@ -880,14 +906,16 @@ where self.on_new_pending_transactions(vec![hash]) } TransactionsCommand::PropagateHashesTo(hashes, peer) => { - self.propagate_hashes_to(hashes, peer) + self.propagate_hashes_to(hashes, peer, PropagationMode::Forced) } TransactionsCommand::GetActivePeers(tx) => { let peers = self.peers.keys().copied().collect::>(); tx.send(peers).ok(); } TransactionsCommand::PropagateTransactionsTo(txs, peer) => { - if let Some(propagated) = self.propagate_full_transactions_to_peer(txs, peer) { + if let Some(propagated) = + self.propagate_full_transactions_to_peer(txs, peer, PropagationMode::Forced) + { self.pool.on_propagated(propagated); } } @@ -1395,6 +1423,29 @@ where } } +/// Represents the different modes of transaction propagation. +/// +/// This enum is used to determine how transactions are propagated to peers in the network. +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +enum PropagationMode { + /// Default propagation mode. + /// + /// Transactions are only sent to peers that haven't seen them yet. + Basic, + /// Forced propagation mode. + /// + /// Transactions are sent to all peers regardless of whether they have been sent or received + /// before. + Forced, +} + +impl PropagationMode { + /// Returns `true` if the propagation kind is `Forced`. + const fn is_forced(self) -> bool { + matches!(self, Self::Forced) + } +} + /// A transaction that's about to be propagated to multiple peers. #[derive(Debug, Clone)] struct PropagateTransaction { @@ -1441,6 +1492,13 @@ impl PropagateTransactionsBuilder { Self::Full(FullTransactionsBuilder::new(version)) } + /// Appends all transactions + fn extend<'a>(&mut self, txs: impl IntoIterator) { + for tx in txs { + self.push(tx); + } + } + /// Appends a transaction to the list. fn push(&mut self, transaction: &PropagateTransaction) { match self { @@ -1502,6 +1560,13 @@ impl FullTransactionsBuilder { } } + /// Appends all transactions. + fn extend(&mut self, txs: impl IntoIterator) { + for tx in txs { + self.push(&tx) + } + } + /// Append a transaction to the list of full transaction if the total message bytes size doesn't /// exceed the soft maximum target byte size. The limit is soft, meaning if one single /// transaction goes over the limit, it will be broadcasted in its own [`Transactions`] @@ -1581,6 +1646,13 @@ impl PooledTransactionsHashesBuilder { } } + /// Appends all hashes + fn extend(&mut self, txs: impl IntoIterator) { + for tx in txs { + self.push(&tx); + } + } + fn push(&mut self, tx: &PropagateTransaction) { match self { Self::Eth66(msg) => msg.0.push(tx.hash()), @@ -2388,7 +2460,8 @@ mod tests { let eip4844_tx = Arc::new(factory.create_eip4844()); propagate.push(PropagateTransaction::new(eip4844_tx.clone())); - let propagated = tx_manager.propagate_transactions(propagate.clone()); + let propagated = + tx_manager.propagate_transactions(propagate.clone(), PropagationMode::Basic); assert_eq!(propagated.0.len(), 2); let prop_txs = propagated.0.get(eip1559_tx.transaction.hash()).unwrap(); assert_eq!(prop_txs.len(), 1); @@ -2404,7 +2477,7 @@ mod tests { peer.seen_transactions.contains(eip4844_tx.transaction.hash()); // propagate again - let propagated = tx_manager.propagate_transactions(propagate); + let propagated = tx_manager.propagate_transactions(propagate, PropagationMode::Basic); assert!(propagated.0.is_empty()); } } From 1c36b7161216a76382a082cd91cdf8c006f88609 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 28 Oct 2024 08:13:47 +0100 Subject: [PATCH 293/425] docs: small fix in payload doc (#12116) --- crates/payload/basic/src/lib.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index b8eab3c0f..d0bb29502 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -95,10 +95,11 @@ impl BasicPayloadJobGenerator Client software SHOULD stop the updating process when either a call to engine_getPayload - // > with the build process's payloadId is made or SECONDS_PER_SLOT (12s in the Mainnet - // > configuration) have passed since the point in time identified by the timestamp parameter. - // See also + /// > Client software SHOULD stop the updating process when either a call to engine_getPayload + /// > with the build process's payloadId is made or SECONDS_PER_SLOT (12s in the Mainnet + /// > configuration) have passed since the point in time identified by the timestamp parameter. + /// + /// See also #[inline] fn max_job_duration(&self, unix_timestamp: u64) -> Duration { let duration_until_timestamp = duration_until(unix_timestamp); From 8605d04a09679904ef25594729ac6b83dfcacfcb Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 28 Oct 2024 09:30:06 +0100 Subject: [PATCH 294/425] refactor: rm re-exports of alloy eip 4844 constants (#12120) --- Cargo.lock | 2 +- crates/consensus/common/src/validation.rs | 6 ++---- crates/ethereum/payload/src/lib.rs | 3 +-- crates/node/events/Cargo.toml | 2 +- crates/node/events/src/node.rs | 5 ++--- crates/primitives/src/constants/eip4844.rs | 7 ------- crates/primitives/src/constants/mod.rs | 3 --- crates/primitives/src/transaction/mod.rs | 2 +- crates/primitives/src/transaction/pooled.rs | 2 +- crates/rpc/rpc-eth-api/src/helpers/fee.rs | 2 +- crates/rpc/rpc-eth-api/src/helpers/pending_block.rs | 5 +++-- crates/rpc/rpc-eth-types/src/fee_history.rs | 2 +- crates/transaction-pool/src/pool/txpool.rs | 6 ++++-- crates/transaction-pool/src/test_utils/mock.rs | 8 ++++---- crates/transaction-pool/src/traits.rs | 3 ++- crates/transaction-pool/src/validate/eth.rs | 5 ++--- 16 files changed, 26 insertions(+), 37 deletions(-) delete mode 100644 crates/primitives/src/constants/eip4844.rs diff --git a/Cargo.lock b/Cargo.lock index b483fd664..6762da237 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8033,6 +8033,7 @@ name = "reth-node-events" version = "1.1.0" dependencies = [ "alloy-consensus", + "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", "futures", @@ -8041,7 +8042,6 @@ dependencies = [ "reth-beacon-consensus", "reth-network", "reth-network-api", - "reth-primitives", "reth-primitives-traits", "reth-provider", "reth-prune", diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index 1070bbdbc..c6539cdcf 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -1,12 +1,10 @@ //! Collection of methods for block validation. use alloy_consensus::constants::MAXIMUM_EXTRA_DATA_SIZE; +use alloy_eips::eip4844::{DATA_GAS_PER_BLOB, MAX_DATA_GAS_PER_BLOCK}; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_consensus::ConsensusError; -use reth_primitives::{ - constants::eip4844::{DATA_GAS_PER_BLOB, MAX_DATA_GAS_PER_BLOCK}, - EthereumHardfork, GotExpected, Header, SealedBlock, SealedHeader, -}; +use reth_primitives::{EthereumHardfork, GotExpected, Header, SealedBlock, SealedHeader}; use revm_primitives::calc_excess_blob_gas; /// Gas used needs to be less than gas limit. Gas used is going to be checked after execution. diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index f14c14588..73b22efac 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -10,7 +10,7 @@ #![allow(clippy::useless_let_if_seq)] use alloy_consensus::EMPTY_OMMER_ROOT_HASH; -use alloy_eips::{eip7685::Requests, merge::BEACON_NONCE}; +use alloy_eips::{eip4844::MAX_DATA_GAS_PER_BLOCK, eip7685::Requests, merge::BEACON_NONCE}; use alloy_primitives::U256; use reth_basic_payload_builder::{ commit_withdrawals, is_better_payload, BuildArguments, BuildOutcome, PayloadBuilder, @@ -25,7 +25,6 @@ use reth_execution_types::ExecutionOutcome; use reth_payload_builder::{EthBuiltPayload, EthPayloadBuilderAttributes}; use reth_payload_primitives::{PayloadBuilderAttributes, PayloadBuilderError}; use reth_primitives::{ - constants::eip4844::MAX_DATA_GAS_PER_BLOCK, proofs::{self}, revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg}, Block, BlockBody, EthereumHardforks, Header, Receipt, diff --git a/crates/node/events/Cargo.toml b/crates/node/events/Cargo.toml index 3b515b8ab..6af3d8cbe 100644 --- a/crates/node/events/Cargo.toml +++ b/crates/node/events/Cargo.toml @@ -19,13 +19,13 @@ reth-network-api.workspace = true reth-stages.workspace = true reth-prune.workspace = true reth-static-file.workspace = true -reth-primitives.workspace = true reth-primitives-traits.workspace = true # ethereum alloy-primitives.workspace = true alloy-rpc-types-engine.workspace = true alloy-consensus.workspace = true +alloy-eips.workspace = true # async tokio.workspace = true diff --git a/crates/node/events/src/node.rs b/crates/node/events/src/node.rs index 92f8cb5e0..fb0f4d48d 100644 --- a/crates/node/events/src/node.rs +++ b/crates/node/events/src/node.rs @@ -10,7 +10,6 @@ use reth_beacon_consensus::{ }; use reth_network::NetworkEvent; use reth_network_api::PeersInfo; -use reth_primitives::constants; use reth_primitives_traits::{format_gas, format_gas_throughput}; use reth_prune::PrunerEvent; use reth_stages::{EntitiesCheckpoint, ExecOutput, PipelineEvent, StageCheckpoint, StageId}; @@ -265,8 +264,8 @@ impl NodeState { gas_throughput=%format_gas_throughput(block.header.gas_used, elapsed), full=%format!("{:.1}%", block.header.gas_used as f64 * 100.0 / block.header.gas_limit as f64), base_fee=%format!("{:.2}gwei", block.header.base_fee_per_gas.unwrap_or(0) as f64 / GWEI_TO_WEI as f64), - blobs=block.header.blob_gas_used.unwrap_or(0) / constants::eip4844::DATA_GAS_PER_BLOB, - excess_blobs=block.header.excess_blob_gas.unwrap_or(0) / constants::eip4844::DATA_GAS_PER_BLOB, + blobs=block.header.blob_gas_used.unwrap_or(0) / alloy_eips::eip4844::DATA_GAS_PER_BLOB, + excess_blobs=block.header.excess_blob_gas.unwrap_or(0) / alloy_eips::eip4844::DATA_GAS_PER_BLOB, ?elapsed, "Block added to canonical chain" ); diff --git a/crates/primitives/src/constants/eip4844.rs b/crates/primitives/src/constants/eip4844.rs deleted file mode 100644 index 14e892adf..000000000 --- a/crates/primitives/src/constants/eip4844.rs +++ /dev/null @@ -1,7 +0,0 @@ -//! [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844#parameters) protocol constants and utils for shard Blob Transactions. - -pub use alloy_eips::eip4844::{ - BLOB_GASPRICE_UPDATE_FRACTION, BLOB_TX_MIN_BLOB_GASPRICE, DATA_GAS_PER_BLOB, - FIELD_ELEMENTS_PER_BLOB, FIELD_ELEMENT_BYTES, MAX_BLOBS_PER_BLOCK, MAX_DATA_GAS_PER_BLOCK, - TARGET_BLOBS_PER_BLOCK, TARGET_DATA_GAS_PER_BLOCK, VERSIONED_HASH_VERSION_KZG, -}; diff --git a/crates/primitives/src/constants/mod.rs b/crates/primitives/src/constants/mod.rs index fd1dc1586..09c488cc2 100644 --- a/crates/primitives/src/constants/mod.rs +++ b/crates/primitives/src/constants/mod.rs @@ -1,6 +1,3 @@ //! Ethereum protocol-related constants pub use reth_primitives_traits::constants::*; - -/// [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844#parameters) constants. -pub mod eip4844; diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index b09fff9e2..3a5c36741 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -297,7 +297,7 @@ impl Transaction { /// transaction. /// /// This is the number of blobs times the - /// [`DATA_GAS_PER_BLOB`](crate::constants::eip4844::DATA_GAS_PER_BLOB) a single blob consumes. + /// [`DATA_GAS_PER_BLOB`](alloy_eips::eip4844::DATA_GAS_PER_BLOB) a single blob consumes. pub fn blob_gas_used(&self) -> Option { self.as_eip4844().map(TxEip4844::blob_gas) } diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index 32d4da659..000ff41fe 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -264,7 +264,7 @@ impl PooledTransactionsElement { /// transaction. /// /// This is the number of blobs times the - /// [`DATA_GAS_PER_BLOB`](crate::constants::eip4844::DATA_GAS_PER_BLOB) a single blob consumes. + /// [`DATA_GAS_PER_BLOB`](alloy_eips::eip4844::DATA_GAS_PER_BLOB) a single blob consumes. pub fn blob_gas_used(&self) -> Option { self.as_eip4844().map(TxEip4844::blob_gas) } diff --git a/crates/rpc/rpc-eth-api/src/helpers/fee.rs b/crates/rpc/rpc-eth-api/src/helpers/fee.rs index 34ba6dc7e..20e847a8c 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/fee.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/fee.rs @@ -167,7 +167,7 @@ pub trait EthFees: LoadFee { base_fee_per_blob_gas.push(header.blob_fee().unwrap_or_default()); blob_gas_used_ratio.push( header.blob_gas_used.unwrap_or_default() as f64 - / reth_primitives::constants::eip4844::MAX_DATA_GAS_PER_BLOCK as f64, + / alloy_eips::eip4844::MAX_DATA_GAS_PER_BLOCK as f64, ); // Percentiles were specified, so we need to collect reward percentile ino diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index 2c04f3beb..f2d141613 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -6,7 +6,9 @@ use std::time::{Duration, Instant}; use crate::{EthApiTypes, FromEthApiError, FromEvmError, RpcNodeCore}; use alloy_consensus::EMPTY_OMMER_ROOT_HASH; -use alloy_eips::{eip7685::EMPTY_REQUESTS_HASH, merge::BEACON_NONCE}; +use alloy_eips::{ + eip4844::MAX_DATA_GAS_PER_BLOCK, eip7685::EMPTY_REQUESTS_HASH, merge::BEACON_NONCE, +}; use alloy_primitives::{BlockNumber, B256, U256}; use alloy_rpc_types::BlockNumberOrTag; use futures::Future; @@ -17,7 +19,6 @@ use reth_evm::{ }; use reth_execution_types::ExecutionOutcome; use reth_primitives::{ - constants::eip4844::MAX_DATA_GAS_PER_BLOCK, proofs::calculate_transaction_root, revm_primitives::{ BlockEnv, CfgEnv, CfgEnvWithHandlerCfg, EVMError, Env, ExecutionResult, InvalidTransaction, diff --git a/crates/rpc/rpc-eth-types/src/fee_history.rs b/crates/rpc/rpc-eth-types/src/fee_history.rs index c845d9683..7692d47de 100644 --- a/crates/rpc/rpc-eth-types/src/fee_history.rs +++ b/crates/rpc/rpc-eth-types/src/fee_history.rs @@ -366,7 +366,7 @@ impl FeeHistoryEntry { gas_used_ratio: block.gas_used as f64 / block.gas_limit as f64, base_fee_per_blob_gas: block.blob_fee(), blob_gas_used_ratio: block.blob_gas_used() as f64 / - reth_primitives::constants::eip4844::MAX_DATA_GAS_PER_BLOCK as f64, + alloy_eips::eip4844::MAX_DATA_GAS_PER_BLOCK as f64, excess_blob_gas: block.excess_blob_gas, blob_gas_used: block.blob_gas_used, gas_used: block.gas_used, diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index 42de860db..c6369c98a 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -22,9 +22,11 @@ use alloy_consensus::constants::{ EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, LEGACY_TX_TYPE_ID, }; -use alloy_eips::eip1559::{ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE}; +use alloy_eips::{ + eip1559::{ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE}, + eip4844::BLOB_TX_MIN_BLOB_GASPRICE, +}; use alloy_primitives::{Address, TxHash, B256}; -use reth_primitives::constants::eip4844::BLOB_TX_MIN_BLOB_GASPRICE; use rustc_hash::FxHashMap; use smallvec::SmallVec; use std::{ diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index 99b0caaf4..a3cddaf0a 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -11,7 +11,7 @@ use alloy_consensus::{ constants::{EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, LEGACY_TX_TYPE_ID}, TxEip1559, TxEip2930, TxEip4844, TxLegacy, }; -use alloy_eips::{eip1559::MIN_PROTOCOL_BASE_FEE, eip2930::AccessList}; +use alloy_eips::{eip1559::MIN_PROTOCOL_BASE_FEE, eip2930::AccessList, eip4844::DATA_GAS_PER_BLOB}; use alloy_primitives::{Address, Bytes, ChainId, TxHash, TxKind, B256, U256}; use paste::paste; use rand::{ @@ -19,9 +19,9 @@ use rand::{ prelude::Distribution, }; use reth_primitives::{ - constants::eip4844::DATA_GAS_PER_BLOB, transaction::TryFromRecoveredTransactionError, - BlobTransactionSidecar, BlobTransactionValidationError, PooledTransactionsElementEcRecovered, - Signature, Transaction, TransactionSigned, TransactionSignedEcRecovered, TxType, + transaction::TryFromRecoveredTransactionError, BlobTransactionSidecar, + BlobTransactionValidationError, PooledTransactionsElementEcRecovered, Signature, Transaction, + TransactionSigned, TransactionSignedEcRecovered, TxType, }; use std::{ops::Range, sync::Arc, time::Instant, vec::IntoIter}; diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index c21a7a4ea..9db9c53d3 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -1484,7 +1484,8 @@ impl Stream for NewSubpoolTransactionStream { mod tests { use super::*; use alloy_consensus::{TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy}; - use reth_primitives::{constants::eip4844::DATA_GAS_PER_BLOB, Signature, TransactionSigned}; + use alloy_eips::eip4844::DATA_GAS_PER_BLOB; + use reth_primitives::{Signature, TransactionSigned}; #[test] fn test_pool_size_invariants() { diff --git a/crates/transaction-pool/src/validate/eth.rs b/crates/transaction-pool/src/validate/eth.rs index bf7749fb8..62e9f3f29 100644 --- a/crates/transaction-pool/src/validate/eth.rs +++ b/crates/transaction-pool/src/validate/eth.rs @@ -15,10 +15,9 @@ use alloy_consensus::constants::{ EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, LEGACY_TX_TYPE_ID, }; +use alloy_eips::eip4844::MAX_BLOBS_PER_BLOCK; use reth_chainspec::{ChainSpec, EthereumHardforks}; -use reth_primitives::{ - constants::eip4844::MAX_BLOBS_PER_BLOCK, GotExpected, InvalidTransactionError, SealedBlock, -}; +use reth_primitives::{GotExpected, InvalidTransactionError, SealedBlock}; use reth_storage_api::{AccountReader, StateProviderFactory}; use reth_tasks::TaskSpawner; use revm::{ From fbdebe08e02214eb5df7a13d6277878a337c1b86 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 28 Oct 2024 11:16:33 +0100 Subject: [PATCH 295/425] chain-state: fix typo (#12112) --- crates/chain-state/src/in_memory.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/chain-state/src/in_memory.rs b/crates/chain-state/src/in_memory.rs index a850e6652..6bef197be 100644 --- a/crates/chain-state/src/in_memory.rs +++ b/crates/chain-state/src/in_memory.rs @@ -777,7 +777,7 @@ pub struct ExecutedBlock { pub senders: Arc>, /// Block's execution outcome. pub execution_output: Arc, - /// Block's hashedst state. + /// Block's hashed state. pub hashed_state: Arc, /// Trie updates that result of applying the block. pub trie: Arc, From 0f86287b65ac11113c47a6a0627a737ff9719106 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Mon, 28 Oct 2024 10:09:47 +0000 Subject: [PATCH 296/425] fix(trie): sparse trie walk should be done in a sorted manner (#12087) --- crates/trie/sparse/src/trie.rs | 49 ++++++++++++++++++---------------- 1 file changed, 26 insertions(+), 23 deletions(-) diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index 8d65378f6..8b214d5f7 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -11,7 +11,7 @@ use reth_trie_common::{ EMPTY_ROOT_HASH, }; use smallvec::SmallVec; -use std::{collections::HashSet, fmt}; +use std::fmt; /// Inner representation of the sparse trie. /// Sparse trie is blind by default until nodes are revealed. @@ -579,19 +579,19 @@ impl RevealedSparseTrie { /// Returns a list of paths to the nodes that are located at the provided depth when counting /// from the root node. If there's a leaf at a depth less than the provided depth, it will be /// included in the result. - fn get_nodes_at_depth(&self, depth: usize) -> HashSet { + fn get_nodes_at_depth(&self, depth: usize) -> Vec { let mut paths = Vec::from([(Nibbles::default(), 0)]); - let mut targets = HashSet::::default(); + let mut targets = Vec::new(); while let Some((mut path, level)) = paths.pop() { match self.nodes.get(&path).unwrap() { SparseNode::Empty | SparseNode::Hash(_) => {} SparseNode::Leaf { .. } => { - targets.insert(path); + targets.push(path); } SparseNode::Extension { key, .. } => { if level >= depth { - targets.insert(path); + targets.push(path); } else { path.extend_from_slice_unchecked(key); paths.push((path, level + 1)); @@ -599,9 +599,9 @@ impl RevealedSparseTrie { } SparseNode::Branch { state_mask, .. } => { if level >= depth { - targets.insert(path); + targets.push(path); } else { - for bit in CHILD_INDEX_RANGE { + for bit in CHILD_INDEX_RANGE.rev() { if state_mask.is_bit_set(bit) { let mut child_path = path.clone(); child_path.push_unchecked(bit); @@ -666,7 +666,9 @@ impl RevealedSparseTrie { } branch_child_buf.clear(); - for bit in CHILD_INDEX_RANGE { + // Walk children in a reverse order from `f` to `0`, so we pop the `0` first + // from the stack. + for bit in CHILD_INDEX_RANGE.rev() { if state_mask.is_bit_set(bit) { let mut child = path.clone(); child.push_unchecked(bit); @@ -674,13 +676,17 @@ impl RevealedSparseTrie { } } - branch_value_stack_buf.clear(); - for child_path in &branch_child_buf { + branch_value_stack_buf.resize(branch_child_buf.len(), Default::default()); + let mut added_children = false; + for (i, child_path) in branch_child_buf.iter().enumerate() { if rlp_node_stack.last().map_or(false, |e| &e.0 == child_path) { let (_, child) = rlp_node_stack.pop().unwrap(); - branch_value_stack_buf.push(child); + // Insert children in the resulting buffer in a normal order, because + // initially we iterated in reverse. + branch_value_stack_buf[branch_child_buf.len() - i - 1] = child; + added_children = true; } else { - debug_assert!(branch_value_stack_buf.is_empty()); + debug_assert!(!added_children); path_stack.push(path); path_stack.extend(branch_child_buf.drain(..)); continue 'main @@ -1568,38 +1574,35 @@ mod tests { .unwrap(); sparse.update_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0]), value).unwrap(); - assert_eq!(sparse.get_nodes_at_depth(0), HashSet::from([Nibbles::default()])); - assert_eq!( - sparse.get_nodes_at_depth(1), - HashSet::from([Nibbles::from_nibbles_unchecked([0x5])]) - ); + assert_eq!(sparse.get_nodes_at_depth(0), vec![Nibbles::default()]); + assert_eq!(sparse.get_nodes_at_depth(1), vec![Nibbles::from_nibbles_unchecked([0x5])]); assert_eq!( sparse.get_nodes_at_depth(2), - HashSet::from([ + vec![ Nibbles::from_nibbles_unchecked([0x5, 0x0]), Nibbles::from_nibbles_unchecked([0x5, 0x2]), Nibbles::from_nibbles_unchecked([0x5, 0x3]) - ]) + ] ); assert_eq!( sparse.get_nodes_at_depth(3), - HashSet::from([ + vec![ Nibbles::from_nibbles_unchecked([0x5, 0x0, 0x2, 0x3]), Nibbles::from_nibbles_unchecked([0x5, 0x2]), Nibbles::from_nibbles_unchecked([0x5, 0x3, 0x1]), Nibbles::from_nibbles_unchecked([0x5, 0x3, 0x3]) - ]) + ] ); assert_eq!( sparse.get_nodes_at_depth(4), - HashSet::from([ + vec![ Nibbles::from_nibbles_unchecked([0x5, 0x0, 0x2, 0x3, 0x1]), Nibbles::from_nibbles_unchecked([0x5, 0x0, 0x2, 0x3, 0x3]), Nibbles::from_nibbles_unchecked([0x5, 0x2]), Nibbles::from_nibbles_unchecked([0x5, 0x3, 0x1]), Nibbles::from_nibbles_unchecked([0x5, 0x3, 0x3, 0x0]), Nibbles::from_nibbles_unchecked([0x5, 0x3, 0x3, 0x2]) - ]) + ] ); } } From e4bd13534df447e5da190c7216ffcd7232ff8e8f Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Mon, 28 Oct 2024 19:13:43 +0900 Subject: [PATCH 297/425] fix(ci): remove import path from type names on `compact-codec` (#12125) --- .github/workflows/compact.yml | 4 +++- crates/cli/commands/src/test_vectors/compact.rs | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/compact.yml b/.github/workflows/compact.yml index 63f6f282f..c5d39f72a 100644 --- a/.github/workflows/compact.yml +++ b/.github/workflows/compact.yml @@ -36,7 +36,9 @@ jobs: ref: ${{ github.base_ref || 'main' }} # On `main` branch, generates test vectors and serializes them to disk using `Compact`. - name: Generate compact vectors - run: ${{ matrix.bin }} -- test-vectors compact --write + run: | + ${{ matrix.bin }} -- test-vectors compact --write && + for f in ./testdata/micro/compact/*; do mv "$f" "$(dirname "$f")/$(basename "$f" | awk -F '__' '{print $NF}')"; done - name: Checkout PR uses: actions/checkout@v4 with: diff --git a/crates/cli/commands/src/test_vectors/compact.rs b/crates/cli/commands/src/test_vectors/compact.rs index 162ee1cea..94552d5c2 100644 --- a/crates/cli/commands/src/test_vectors/compact.rs +++ b/crates/cli/commands/src/test_vectors/compact.rs @@ -267,5 +267,5 @@ where } pub fn type_name() -> String { - std::any::type_name::().replace("::", "__") + std::any::type_name::().split("::").last().unwrap_or(std::any::type_name::()).to_string() } From 77e5748124ffd68c646bf775fe086227f7603129 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Mon, 28 Oct 2024 18:14:11 +0800 Subject: [PATCH 298/425] chore(rpc): remove redundant `LoadFee::provider` (#12122) --- crates/optimism/rpc/src/eth/mod.rs | 24 ++++++++++----------- crates/rpc/rpc-eth-api/src/helpers/block.rs | 2 +- crates/rpc/rpc-eth-api/src/helpers/fee.rs | 23 +++++++------------- crates/rpc/rpc/src/eth/helpers/fees.rs | 21 +++++++----------- 4 files changed, 28 insertions(+), 42 deletions(-) diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index d12a2dd02..9b04e1c73 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -14,15 +14,15 @@ use std::{fmt, sync::Arc}; use alloy_primitives::U256; use derive_more::Deref; use op_alloy_network::Optimism; -use reth_chainspec::EthereumHardforks; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_evm::ConfigureEvm; use reth_network_api::NetworkInfo; use reth_node_api::{FullNodeComponents, NodeTypes}; use reth_node_builder::EthApiBuilderCtx; use reth_primitives::Header; use reth_provider::{ - BlockIdReader, BlockNumReader, BlockReaderIdExt, CanonStateSubscriptions, ChainSpecProvider, - HeaderProvider, StageCheckpointReader, StateProviderFactory, + BlockNumReader, BlockReaderIdExt, CanonStateSubscriptions, ChainSpecProvider, EvmEnvProvider, + StageCheckpointReader, StateProviderFactory, }; use reth_rpc::eth::{core::EthApiInner, DevSigner}; use reth_rpc_eth_api::{ @@ -184,23 +184,21 @@ where impl LoadFee for OpEthApi where - Self: LoadBlock, - N: FullNodeComponents>, + Self: LoadBlock, + N: RpcNodeCore< + Provider: BlockReaderIdExt + + EvmEnvProvider + + ChainSpecProvider + + StateProviderFactory, + >, { - #[inline] - fn provider( - &self, - ) -> impl BlockIdReader + HeaderProvider + ChainSpecProvider { - self.inner.provider() - } - #[inline] fn cache(&self) -> &EthStateCache { self.inner.cache() } #[inline] - fn gas_oracle(&self) -> &GasPriceOracle { + fn gas_oracle(&self) -> &GasPriceOracle { self.inner.gas_oracle() } diff --git a/crates/rpc/rpc-eth-api/src/helpers/block.rs b/crates/rpc/rpc-eth-api/src/helpers/block.rs index 861afb3ad..217e84a47 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/block.rs @@ -20,7 +20,7 @@ pub type BlockAndReceiptsResult = Result { +pub trait EthBlocks: LoadBlock { /// Returns the block header for the given block id. fn rpc_block_header( &self, diff --git a/crates/rpc/rpc-eth-api/src/helpers/fee.rs b/crates/rpc/rpc-eth-api/src/helpers/fee.rs index 20e847a8c..dcde2214a 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/fee.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/fee.rs @@ -3,8 +3,8 @@ use alloy_primitives::U256; use alloy_rpc_types::{BlockNumberOrTag, FeeHistory}; use futures::Future; -use reth_chainspec::{EthChainSpec, EthereumHardforks}; -use reth_provider::{BlockIdReader, BlockReaderIdExt, ChainSpecProvider, HeaderProvider}; +use reth_chainspec::EthChainSpec; +use reth_provider::{BlockIdReader, ChainSpecProvider, HeaderProvider}; use reth_rpc_eth_types::{ fee_history::calculate_reward_percentiles_for_block, EthApiError, EthStateCache, FeeHistoryCache, FeeHistoryEntry, GasPriceOracle, RpcInvalidTransactionError, @@ -82,7 +82,8 @@ pub trait EthFees: LoadFee { block_count = block_count.saturating_sub(1); } - let end_block = LoadFee::provider(self) + let end_block = self + .provider() .block_number_for_id(newest_block.into()) .map_err(Self::Error::from_eth_err)? .ok_or(EthApiError::HeaderNotFound(newest_block.into()))?; @@ -147,13 +148,12 @@ pub trait EthFees: LoadFee { // Also need to include the `base_fee_per_gas` and `base_fee_per_blob_gas` for the // next block base_fee_per_gas - .push(last_entry.next_block_base_fee(LoadFee::provider(self).chain_spec()) - as u128); + .push(last_entry.next_block_base_fee(self.provider().chain_spec()) as u128); base_fee_per_blob_gas.push(last_entry.next_block_blob_fee().unwrap_or_default()); } else { // read the requested header range - let headers = LoadFee::provider(self) + let headers = self.provider() .sealed_headers_range(start_block..=end_block) .map_err(Self::Error::from_eth_err)?; if headers.len() != block_count as usize { @@ -197,7 +197,7 @@ pub trait EthFees: LoadFee { // The unwrap is safe since we checked earlier that we got at least 1 header. let last_header = headers.last().expect("is present"); base_fee_per_gas.push( - LoadFee::provider(self) + self.provider() .chain_spec() .base_fee_params_at_timestamp(last_header.timestamp) .next_block_base_fee( @@ -242,13 +242,6 @@ pub trait EthFees: LoadFee { /// /// Behaviour shared by several `eth_` RPC methods, not exclusive to `eth_` fees RPC methods. pub trait LoadFee: LoadBlock { - // Returns a handle for reading data from disk. - /// - /// Data access in default (L1) trait method implementations. - fn provider( - &self, - ) -> impl BlockIdReader + HeaderProvider + ChainSpecProvider; - /// Returns a handle for reading data from memory. /// /// Data access in default (L1) trait method implementations. @@ -257,7 +250,7 @@ pub trait LoadFee: LoadBlock { /// Returns a handle for reading gas price. /// /// Data access in default (L1) trait method implementations. - fn gas_oracle(&self) -> &GasPriceOracle; + fn gas_oracle(&self) -> &GasPriceOracle; /// Returns a handle for reading fee history data from memory. /// diff --git a/crates/rpc/rpc/src/eth/helpers/fees.rs b/crates/rpc/rpc/src/eth/helpers/fees.rs index a792f7289..2c5db5bac 100644 --- a/crates/rpc/rpc/src/eth/helpers/fees.rs +++ b/crates/rpc/rpc/src/eth/helpers/fees.rs @@ -1,8 +1,7 @@ //! Contains RPC handler implementations for fee history. -use reth_chainspec::EthereumHardforks; -use reth_provider::{BlockIdReader, BlockReaderIdExt, ChainSpecProvider, HeaderProvider}; - +use reth_chainspec::{EthChainSpec, EthereumHardforks}; +use reth_provider::{BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, StateProviderFactory}; use reth_rpc_eth_api::helpers::{EthFees, LoadBlock, LoadFee}; use reth_rpc_eth_types::{EthStateCache, FeeHistoryCache, GasPriceOracle}; @@ -15,23 +14,19 @@ impl EthFees for EthApi LoadFee for EthApi where - Self: LoadBlock, - Provider: BlockReaderIdExt + HeaderProvider + ChainSpecProvider, + Self: LoadBlock, + Provider: BlockReaderIdExt + + EvmEnvProvider + + ChainSpecProvider + + StateProviderFactory, { - #[inline] - fn provider( - &self, - ) -> impl BlockIdReader + HeaderProvider + ChainSpecProvider { - self.inner.provider() - } - #[inline] fn cache(&self) -> &EthStateCache { self.inner.cache() } #[inline] - fn gas_oracle(&self) -> &GasPriceOracle { + fn gas_oracle(&self) -> &GasPriceOracle { self.inner.gas_oracle() } From 8f5fd1d70c1670e576b3103a12b9027fd91fac70 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Mon, 28 Oct 2024 18:14:40 +0800 Subject: [PATCH 299/425] chore(rpc): remove redundant `EthTransactions::provider` (#12121) --- crates/optimism/rpc/src/eth/transaction.rs | 10 +++---- .../rpc-eth-api/src/helpers/transaction.rs | 27 ++++++++----------- crates/rpc/rpc/src/eth/helpers/transaction.rs | 9 +------ 3 files changed, 15 insertions(+), 31 deletions(-) diff --git a/crates/optimism/rpc/src/eth/transaction.rs b/crates/optimism/rpc/src/eth/transaction.rs index 451c8a805..5135b13a2 100644 --- a/crates/optimism/rpc/src/eth/transaction.rs +++ b/crates/optimism/rpc/src/eth/transaction.rs @@ -19,13 +19,9 @@ use crate::{OpEthApi, SequencerClient}; impl EthTransactions for OpEthApi where - Self: LoadTransaction, - N: FullNodeComponents, + Self: LoadTransaction, + N: RpcNodeCore, { - fn provider(&self) -> impl BlockReaderIdExt { - self.inner.provider() - } - fn signers(&self) -> &parking_lot::RwLock>> { self.inner.signers() } @@ -71,7 +67,7 @@ where impl OpEthApi where - N: FullNodeComponents, + N: RpcNodeCore, { /// Returns the [`SequencerClient`] if one is set. pub fn raw_tx_forwarder(&self) -> Option { diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index 791cb2ae1..af647fedf 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -52,12 +52,7 @@ use super::{ /// See also /// /// This implementation follows the behaviour of Geth and disables the basefee check for tracing. -pub trait EthTransactions: LoadTransaction { - /// Returns a handle for reading data from disk. - /// - /// Data access in default (L1) trait method implementations. - fn provider(&self) -> impl BlockReaderIdExt; - +pub trait EthTransactions: LoadTransaction { /// Returns a handle for signing data. /// /// Singer access in default (L1) trait method implementations. @@ -111,7 +106,8 @@ pub trait EthTransactions: LoadTransaction { } self.spawn_blocking_io(move |ref this| { - Ok(RpcNodeCore::provider(this) + Ok(this + .provider() .transaction_by_hash(hash) .map_err(Self::Error::from_eth_err)? .map(|tx| tx.encoded_2718().into())) @@ -166,7 +162,8 @@ pub trait EthTransactions: LoadTransaction { { let this = self.clone(); self.spawn_blocking_io(move |_| { - let (tx, meta) = match RpcNodeCore::provider(&this) + let (tx, meta) = match this + .provider() .transaction_by_hash_with_meta(hash) .map_err(Self::Error::from_eth_err)? { @@ -174,13 +171,11 @@ pub trait EthTransactions: LoadTransaction { None => return Ok(None), }; - let receipt = match EthTransactions::provider(&this) - .receipt_by_hash(hash) - .map_err(Self::Error::from_eth_err)? - { - Some(recpt) => recpt, - None => return Ok(None), - }; + let receipt = + match this.provider().receipt_by_hash(hash).map_err(Self::Error::from_eth_err)? { + Some(recpt) => recpt, + None => return Ok(None), + }; Ok(Some((tx, meta, receipt))) }) @@ -257,7 +252,7 @@ pub trait EthTransactions: LoadTransaction { return Ok(None); } - let Ok(high) = RpcNodeCore::provider(self).best_block_number() else { + let Ok(high) = self.provider().best_block_number() else { return Err(EthApiError::HeaderNotFound(BlockNumberOrTag::Latest.into()).into()); }; diff --git a/crates/rpc/rpc/src/eth/helpers/transaction.rs b/crates/rpc/rpc/src/eth/helpers/transaction.rs index c4505bef0..623db35e5 100644 --- a/crates/rpc/rpc/src/eth/helpers/transaction.rs +++ b/crates/rpc/rpc/src/eth/helpers/transaction.rs @@ -13,15 +13,8 @@ use crate::EthApi; impl EthTransactions for EthApi where - Self: LoadTransaction, - Pool: TransactionPool + 'static, - Provider: BlockReaderIdExt, + Self: LoadTransaction, { - #[inline] - fn provider(&self) -> impl BlockReaderIdExt { - self.inner.provider() - } - #[inline] fn signers(&self) -> &parking_lot::RwLock>> { self.inner.signers() From 268090e879f40bf388e3b0f1dac9983b76c2255b Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Mon, 28 Oct 2024 10:44:36 +0000 Subject: [PATCH 300/425] bench(trie): `RevealedSparseTrie::update_rlp_node_level` (#12046) --- crates/trie/sparse/Cargo.toml | 4 ++ crates/trie/sparse/benches/rlp_node.rs | 78 ++++++++++++++++++++++++++ crates/trie/sparse/benches/root.rs | 1 + crates/trie/sparse/src/trie.rs | 2 +- 4 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 crates/trie/sparse/benches/rlp_node.rs diff --git a/crates/trie/sparse/Cargo.toml b/crates/trie/sparse/Cargo.toml index 26d036f57..1c5bb7d8a 100644 --- a/crates/trie/sparse/Cargo.toml +++ b/crates/trie/sparse/Cargo.toml @@ -41,3 +41,7 @@ rand.workspace = true [[bench]] name = "root" harness = false + +[[bench]] +name = "rlp_node" +harness = false diff --git a/crates/trie/sparse/benches/rlp_node.rs b/crates/trie/sparse/benches/rlp_node.rs new file mode 100644 index 000000000..57ab52978 --- /dev/null +++ b/crates/trie/sparse/benches/rlp_node.rs @@ -0,0 +1,78 @@ +#![allow(missing_docs, unreachable_pub)] + +use std::time::{Duration, Instant}; + +use alloy_primitives::{B256, U256}; +use criterion::{criterion_group, criterion_main, Criterion}; +use prop::strategy::ValueTree; +use proptest::{prelude::*, test_runner::TestRunner}; +use rand::seq::IteratorRandom; +use reth_testing_utils::generators; +use reth_trie::Nibbles; +use reth_trie_sparse::RevealedSparseTrie; + +pub fn update_rlp_node_level(c: &mut Criterion) { + let mut rng = generators::rng(); + + let mut group = c.benchmark_group("update rlp node level"); + group.sample_size(20); + + for size in [100_000] { + let mut runner = TestRunner::new(ProptestConfig::default()); + let state = proptest::collection::hash_map(any::(), any::(), size) + .new_tree(&mut runner) + .unwrap() + .current(); + + // Create a sparse trie with `size` leaves + let mut sparse = RevealedSparseTrie::default(); + for (key, value) in &state { + sparse + .update_leaf(Nibbles::unpack(key), alloy_rlp::encode_fixed_size(value).to_vec()) + .unwrap(); + } + sparse.root(); + + for updated_leaves in [0.1, 1.0] { + for key in state + .keys() + .choose_multiple(&mut rng, (size as f64 * (updated_leaves / 100.0)) as usize) + { + sparse + .update_leaf( + Nibbles::unpack(key), + alloy_rlp::encode_fixed_size(&rng.gen::()).to_vec(), + ) + .unwrap(); + } + + // Calculate the maximum depth of the trie for the given number of leaves + let max_depth = (size as f64).log(16.0).ceil() as usize; + + for depth in 0..=max_depth { + group.bench_function( + format!("size {size} | updated {updated_leaves}% | depth {depth}"), + |b| { + // Use `iter_custom` to avoid measuring clones and drops + b.iter_custom(|iters| { + let mut elapsed = Duration::ZERO; + + let mut cloned = sparse.clone(); + for _ in 0..iters { + let start = Instant::now(); + cloned.update_rlp_node_level(depth); + elapsed += start.elapsed(); + cloned = sparse.clone(); + } + + elapsed + }) + }, + ); + } + } + } +} + +criterion_group!(rlp_node, update_rlp_node_level); +criterion_main!(rlp_node); diff --git a/crates/trie/sparse/benches/root.rs b/crates/trie/sparse/benches/root.rs index bc221a8f8..30ce566fb 100644 --- a/crates/trie/sparse/benches/root.rs +++ b/crates/trie/sparse/benches/root.rs @@ -1,4 +1,5 @@ #![allow(missing_docs, unreachable_pub)] + use alloy_primitives::{map::HashMap, B256, U256}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; use itertools::Itertools; diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index 8b214d5f7..e7ee66c54 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -77,7 +77,7 @@ impl SparseTrie { /// - Each leaf entry in `nodes` collection must have a corresponding entry in `values` collection. /// The opposite is also true. /// - All keys in `values` collection are full leaf paths. -#[derive(PartialEq, Eq)] +#[derive(Clone, PartialEq, Eq)] pub struct RevealedSparseTrie { /// All trie nodes. nodes: HashMap, From 1b0f625f1d315473b86fc38153a7c5ed27d27ffa Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Mon, 28 Oct 2024 11:06:26 +0000 Subject: [PATCH 301/425] perf(trie): collect only changed sparse nodes at a depth (#12093) --- crates/trie/sparse/src/trie.rs | 47 ++++++++++++++++++++++++---------- 1 file changed, 33 insertions(+), 14 deletions(-) diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index e7ee66c54..91362eed5 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -569,27 +569,36 @@ impl RevealedSparseTrie { /// Update hashes of the nodes that are located at a level deeper than or equal to the provided /// depth. Root node has a level of 0. pub fn update_rlp_node_level(&mut self, depth: usize) { - let targets = self.get_nodes_at_depth(depth); let mut prefix_set = self.prefix_set.clone().freeze(); + + let targets = self.get_changed_nodes_at_depth(&mut prefix_set, depth); for target in targets { self.rlp_node(target, &mut prefix_set); } } - /// Returns a list of paths to the nodes that are located at the provided depth when counting - /// from the root node. If there's a leaf at a depth less than the provided depth, it will be - /// included in the result. - fn get_nodes_at_depth(&self, depth: usize) -> Vec { + /// Returns a list of paths to the nodes that were changed according to the prefix set and are + /// located at the provided depth when counting from the root node. If there's a leaf at a + /// depth less than the provided depth, it will be included in the result. + fn get_changed_nodes_at_depth(&self, prefix_set: &mut PrefixSet, depth: usize) -> Vec { let mut paths = Vec::from([(Nibbles::default(), 0)]); let mut targets = Vec::new(); while let Some((mut path, level)) = paths.pop() { match self.nodes.get(&path).unwrap() { SparseNode::Empty | SparseNode::Hash(_) => {} - SparseNode::Leaf { .. } => { + SparseNode::Leaf { hash, .. } => { + if hash.is_some() && !prefix_set.contains(&path) { + continue + } + targets.push(path); } - SparseNode::Extension { key, .. } => { + SparseNode::Extension { key, hash } => { + if hash.is_some() && !prefix_set.contains(&path) { + continue + } + if level >= depth { targets.push(path); } else { @@ -597,7 +606,11 @@ impl RevealedSparseTrie { paths.push((path, level + 1)); } } - SparseNode::Branch { state_mask, .. } => { + SparseNode::Branch { state_mask, hash } => { + if hash.is_some() && !prefix_set.contains(&path) { + continue + } + if level >= depth { targets.push(path); } else { @@ -1540,7 +1553,7 @@ mod tests { } #[test] - fn sparse_trie_get_nodes_at_depth() { + fn sparse_trie_get_changed_nodes_at_depth() { let mut sparse = RevealedSparseTrie::default(); let value = alloy_rlp::encode_fixed_size(&U256::ZERO).to_vec(); @@ -1574,10 +1587,16 @@ mod tests { .unwrap(); sparse.update_leaf(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0]), value).unwrap(); - assert_eq!(sparse.get_nodes_at_depth(0), vec![Nibbles::default()]); - assert_eq!(sparse.get_nodes_at_depth(1), vec![Nibbles::from_nibbles_unchecked([0x5])]); assert_eq!( - sparse.get_nodes_at_depth(2), + sparse.get_changed_nodes_at_depth(&mut PrefixSet::default(), 0), + vec![Nibbles::default()] + ); + assert_eq!( + sparse.get_changed_nodes_at_depth(&mut PrefixSet::default(), 1), + vec![Nibbles::from_nibbles_unchecked([0x5])] + ); + assert_eq!( + sparse.get_changed_nodes_at_depth(&mut PrefixSet::default(), 2), vec![ Nibbles::from_nibbles_unchecked([0x5, 0x0]), Nibbles::from_nibbles_unchecked([0x5, 0x2]), @@ -1585,7 +1604,7 @@ mod tests { ] ); assert_eq!( - sparse.get_nodes_at_depth(3), + sparse.get_changed_nodes_at_depth(&mut PrefixSet::default(), 3), vec![ Nibbles::from_nibbles_unchecked([0x5, 0x0, 0x2, 0x3]), Nibbles::from_nibbles_unchecked([0x5, 0x2]), @@ -1594,7 +1613,7 @@ mod tests { ] ); assert_eq!( - sparse.get_nodes_at_depth(4), + sparse.get_changed_nodes_at_depth(&mut PrefixSet::default(), 4), vec![ Nibbles::from_nibbles_unchecked([0x5, 0x0, 0x2, 0x3, 0x1]), Nibbles::from_nibbles_unchecked([0x5, 0x0, 0x2, 0x3, 0x3]), From 0d07d27f3c5eea7b3ab9dedecf786fcc78954b1f Mon Sep 17 00:00:00 2001 From: 0xOsiris Date: Mon, 28 Oct 2024 05:00:36 -0700 Subject: [PATCH 302/425] feat: Add version to `BeaconEngineMessage` FCU (#12089) Co-authored-by: Dan Cline <6798349+Rjected@users.noreply.github.com> --- .../src/commands/debug_cmd/replay_engine.rs | 13 ++++++++--- crates/consensus/auto-seal/src/task.rs | 3 ++- crates/consensus/beacon/src/engine/handle.rs | 7 ++++-- crates/consensus/beacon/src/engine/message.rs | 4 +++- crates/consensus/beacon/src/engine/mod.rs | 17 +++++++++++--- .../consensus/beacon/src/engine/test_utils.rs | 10 ++++++-- crates/engine/local/src/miner.rs | 4 +++- crates/engine/tree/src/tree/mod.rs | 23 ++++++++++++++----- crates/engine/util/src/engine_store.rs | 7 +++++- crates/engine/util/src/reorg.rs | 20 +++++++++++++--- crates/engine/util/src/skip_fcu.rs | 14 +++++++++-- crates/payload/primitives/src/lib.rs | 11 +++++---- crates/payload/primitives/src/traits.rs | 2 +- crates/rpc/rpc-engine-api/src/engine_api.rs | 5 ++-- 14 files changed, 107 insertions(+), 33 deletions(-) diff --git a/bin/reth/src/commands/debug_cmd/replay_engine.rs b/bin/reth/src/commands/debug_cmd/replay_engine.rs index e7b3de6b6..9314a4392 100644 --- a/bin/reth/src/commands/debug_cmd/replay_engine.rs +++ b/bin/reth/src/commands/debug_cmd/replay_engine.rs @@ -18,7 +18,9 @@ use reth_engine_util::engine_store::{EngineMessageStore, StoredEngineApiMessage} use reth_fs_util as fs; use reth_network::{BlockDownloaderProvider, NetworkHandle}; use reth_network_api::NetworkInfo; -use reth_node_api::{NodeTypesWithDB, NodeTypesWithDBAdapter, NodeTypesWithEngine}; +use reth_node_api::{ + EngineApiMessageVersion, NodeTypesWithDB, NodeTypesWithDBAdapter, NodeTypesWithEngine, +}; use reth_node_ethereum::{EthEngineTypes, EthEvmConfig, EthExecutorProvider}; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; use reth_provider::{ @@ -166,8 +168,13 @@ impl> Command { debug!(target: "reth::cli", filepath = %filepath.display(), ?message, "Forwarding Engine API message"); match message { StoredEngineApiMessage::ForkchoiceUpdated { state, payload_attrs } => { - let response = - beacon_engine_handle.fork_choice_updated(state, payload_attrs).await?; + let response = beacon_engine_handle + .fork_choice_updated( + state, + payload_attrs, + EngineApiMessageVersion::default(), + ) + .await?; debug!(target: "reth::cli", ?response, "Received for forkchoice updated"); } StoredEngineApiMessage::NewPayload { payload, sidecar } => { diff --git a/crates/consensus/auto-seal/src/task.rs b/crates/consensus/auto-seal/src/task.rs index cb0586d44..75ddda908 100644 --- a/crates/consensus/auto-seal/src/task.rs +++ b/crates/consensus/auto-seal/src/task.rs @@ -3,7 +3,7 @@ use alloy_rpc_types_engine::ForkchoiceState; use futures_util::{future::BoxFuture, FutureExt}; use reth_beacon_consensus::{BeaconEngineMessage, ForkchoiceStatus}; use reth_chainspec::{EthChainSpec, EthereumHardforks}; -use reth_engine_primitives::EngineTypes; +use reth_engine_primitives::{EngineApiMessageVersion, EngineTypes}; use reth_evm::execute::BlockExecutorProvider; use reth_provider::{CanonChainTracker, StateProviderFactory}; use reth_stages_api::PipelineEvent; @@ -155,6 +155,7 @@ where state, payload_attrs: None, tx, + version: EngineApiMessageVersion::default(), }); debug!(target: "consensus::auto", ?state, "Sent fork choice update"); diff --git a/crates/consensus/beacon/src/engine/handle.rs b/crates/consensus/beacon/src/engine/handle.rs index 4aafc6e07..f8840cf78 100644 --- a/crates/consensus/beacon/src/engine/handle.rs +++ b/crates/consensus/beacon/src/engine/handle.rs @@ -8,7 +8,7 @@ use alloy_rpc_types_engine::{ ExecutionPayload, ExecutionPayloadSidecar, ForkchoiceState, ForkchoiceUpdated, PayloadStatus, }; use futures::TryFutureExt; -use reth_engine_primitives::EngineTypes; +use reth_engine_primitives::{EngineApiMessageVersion, EngineTypes}; use reth_errors::RethResult; use reth_tokio_util::{EventSender, EventStream}; use tokio::sync::{mpsc::UnboundedSender, oneshot}; @@ -60,9 +60,10 @@ where &self, state: ForkchoiceState, payload_attrs: Option, + version: EngineApiMessageVersion, ) -> Result { Ok(self - .send_fork_choice_updated(state, payload_attrs) + .send_fork_choice_updated(state, payload_attrs, version) .map_err(|_| BeaconForkChoiceUpdateError::EngineUnavailable) .await?? .await?) @@ -74,12 +75,14 @@ where &self, state: ForkchoiceState, payload_attrs: Option, + version: EngineApiMessageVersion, ) -> oneshot::Receiver> { let (tx, rx) = oneshot::channel(); let _ = self.to_engine.send(BeaconEngineMessage::ForkchoiceUpdated { state, payload_attrs, tx, + version, }); rx } diff --git a/crates/consensus/beacon/src/engine/message.rs b/crates/consensus/beacon/src/engine/message.rs index e33decbd8..fa7457c12 100644 --- a/crates/consensus/beacon/src/engine/message.rs +++ b/crates/consensus/beacon/src/engine/message.rs @@ -4,7 +4,7 @@ use alloy_rpc_types_engine::{ ForkchoiceUpdateError, ForkchoiceUpdated, PayloadId, PayloadStatus, PayloadStatusEnum, }; use futures::{future::Either, FutureExt}; -use reth_engine_primitives::EngineTypes; +use reth_engine_primitives::{EngineApiMessageVersion, EngineTypes}; use reth_errors::RethResult; use reth_payload_primitives::PayloadBuilderError; use std::{ @@ -156,6 +156,8 @@ pub enum BeaconEngineMessage { state: ForkchoiceState, /// The payload attributes for block building. payload_attrs: Option, + /// The Engine API Version. + version: EngineApiMessageVersion, /// The sender for returning forkchoice updated result. tx: oneshot::Sender>, }, diff --git a/crates/consensus/beacon/src/engine/mod.rs b/crates/consensus/beacon/src/engine/mod.rs index 2363b9078..770821de7 100644 --- a/crates/consensus/beacon/src/engine/mod.rs +++ b/crates/consensus/beacon/src/engine/mod.rs @@ -10,7 +10,7 @@ use reth_blockchain_tree_api::{ error::{BlockchainTreeError, CanonicalError, InsertBlockError, InsertBlockErrorKind}, BlockStatus, BlockValidationKind, BlockchainTreeEngine, CanonicalOutcome, InsertPayloadOk, }; -use reth_engine_primitives::{EngineTypes, PayloadTypes}; +use reth_engine_primitives::{EngineApiMessageVersion, EngineTypes, PayloadTypes}; use reth_errors::{BlockValidationError, ProviderResult, RethError, RethResult}; use reth_network_p2p::{ sync::{NetworkSyncUpdater, SyncState}, @@ -428,7 +428,12 @@ where } else if let Some(attrs) = attrs { // the CL requested to build a new payload on top of this new VALID head let head = outcome.into_header().unseal(); - self.process_payload_attributes(attrs, head, state) + self.process_payload_attributes( + attrs, + head, + state, + EngineApiMessageVersion::default(), + ) } else { OnForkChoiceUpdated::valid(PayloadStatus::new( PayloadStatusEnum::Valid, @@ -1160,6 +1165,7 @@ where attrs: ::PayloadAttributes, head: Header, state: ForkchoiceState, + _version: EngineApiMessageVersion, ) -> OnForkChoiceUpdated { // 7. Client software MUST ensure that payloadAttributes.timestamp is greater than timestamp // of a block referenced by forkchoiceState.headBlockHash. If this condition isn't held @@ -1855,7 +1861,12 @@ where // sensitive, hence they are polled first. if let Poll::Ready(Some(msg)) = this.engine_message_stream.poll_next_unpin(cx) { match msg { - BeaconEngineMessage::ForkchoiceUpdated { state, payload_attrs, tx } => { + BeaconEngineMessage::ForkchoiceUpdated { + state, + payload_attrs, + tx, + version: _version, + } => { this.on_forkchoice_updated(state, payload_attrs, tx); } BeaconEngineMessage::NewPayload { payload, sidecar, tx } => { diff --git a/crates/consensus/beacon/src/engine/test_utils.rs b/crates/consensus/beacon/src/engine/test_utils.rs index 912f0a871..6e03aebfa 100644 --- a/crates/consensus/beacon/src/engine/test_utils.rs +++ b/crates/consensus/beacon/src/engine/test_utils.rs @@ -19,6 +19,7 @@ use reth_downloaders::{ bodies::bodies::BodiesDownloaderBuilder, headers::reverse_headers::ReverseHeadersDownloaderBuilder, }; +use reth_engine_primitives::EngineApiMessageVersion; use reth_ethereum_engine_primitives::EthEngineTypes; use reth_evm::{either::Either, test_utils::MockExecutorProvider}; use reth_evm_ethereum::execute::EthExecutorProvider; @@ -93,7 +94,9 @@ impl TestEnv { &self, state: ForkchoiceState, ) -> Result { - self.engine_handle.fork_choice_updated(state, None).await + self.engine_handle + .fork_choice_updated(state, None, EngineApiMessageVersion::default()) + .await } /// Sends the `ForkchoiceUpdated` message to the consensus engine and retries if the engine @@ -103,7 +106,10 @@ impl TestEnv { state: ForkchoiceState, ) -> Result { loop { - let result = self.engine_handle.fork_choice_updated(state, None).await?; + let result = self + .engine_handle + .fork_choice_updated(state, None, EngineApiMessageVersion::default()) + .await?; if !result.is_syncing() { return Ok(result) } diff --git a/crates/engine/local/src/miner.rs b/crates/engine/local/src/miner.rs index 706ddc43d..7cebd3063 100644 --- a/crates/engine/local/src/miner.rs +++ b/crates/engine/local/src/miner.rs @@ -6,7 +6,7 @@ use eyre::OptionExt; use futures_util::{stream::Fuse, StreamExt}; use reth_beacon_consensus::BeaconEngineMessage; use reth_chainspec::EthereumHardforks; -use reth_engine_primitives::EngineTypes; +use reth_engine_primitives::{EngineApiMessageVersion, EngineTypes}; use reth_payload_builder::PayloadBuilderHandle; use reth_payload_primitives::{ BuiltPayload, PayloadAttributesBuilder, PayloadBuilder, PayloadKind, PayloadTypes, @@ -167,6 +167,7 @@ where state: self.forkchoice_state(), payload_attrs: None, tx, + version: EngineApiMessageVersion::default(), })?; let res = rx.await??; @@ -193,6 +194,7 @@ where state: self.forkchoice_state(), payload_attrs: Some(self.payload_attributes_builder.build(timestamp)), tx, + version: EngineApiMessageVersion::default(), })?; let res = rx.await??.await?; diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index 555cf8916..dd2f67916 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -26,7 +26,7 @@ use reth_chain_state::{ }; use reth_chainspec::EthereumHardforks; use reth_consensus::{Consensus, PostExecutionInput}; -use reth_engine_primitives::EngineTypes; +use reth_engine_primitives::{EngineApiMessageVersion, EngineTypes}; use reth_errors::{ConsensusError, ProviderResult}; use reth_evm::execute::BlockExecutorProvider; use reth_payload_builder::PayloadBuilderHandle; @@ -969,6 +969,7 @@ where &mut self, state: ForkchoiceState, attrs: Option, + version: EngineApiMessageVersion, ) -> ProviderResult> { trace!(target: "engine::tree", ?attrs, "invoked forkchoice update"); self.metrics.engine.forkchoice_updated_messages.increment(1); @@ -1018,7 +1019,7 @@ where // to return an error ProviderError::HeaderNotFound(state.head_block_hash.into()) })?; - let updated = self.process_payload_attributes(attr, &tip, state); + let updated = self.process_payload_attributes(attr, &tip, state, version); return Ok(TreeOutcome::new(updated)) } @@ -1038,7 +1039,7 @@ where } if let Some(attr) = attrs { - let updated = self.process_payload_attributes(attr, &tip, state); + let updated = self.process_payload_attributes(attr, &tip, state, version); return Ok(TreeOutcome::new(updated)) } @@ -1054,7 +1055,8 @@ where if self.engine_kind.is_opstack() { if let Some(attr) = attrs { debug!(target: "engine::tree", head = canonical_header.number, "handling payload attributes for canonical head"); - let updated = self.process_payload_attributes(attr, &canonical_header, state); + let updated = + self.process_payload_attributes(attr, &canonical_header, state, version); return Ok(TreeOutcome::new(updated)) } } @@ -1206,8 +1208,14 @@ where } EngineApiRequest::Beacon(request) => { match request { - BeaconEngineMessage::ForkchoiceUpdated { state, payload_attrs, tx } => { - let mut output = self.on_forkchoice_updated(state, payload_attrs); + BeaconEngineMessage::ForkchoiceUpdated { + state, + payload_attrs, + tx, + version, + } => { + let mut output = + self.on_forkchoice_updated(state, payload_attrs, version); if let Ok(res) = &mut output { // track last received forkchoice state @@ -2484,6 +2492,7 @@ where attrs: T::PayloadAttributes, head: &Header, state: ForkchoiceState, + _version: EngineApiMessageVersion, ) -> OnForkChoiceUpdated { // 7. Client software MUST ensure that payloadAttributes.timestamp is greater than timestamp // of a block referenced by forkchoiceState.headBlockHash. If this condition isn't held @@ -2808,6 +2817,7 @@ mod tests { state: fcu_state, payload_attrs: None, tx, + version: EngineApiMessageVersion::default(), } .into(), )) @@ -3097,6 +3107,7 @@ mod tests { }, payload_attrs: None, tx, + version: EngineApiMessageVersion::default(), } .into(), )) diff --git a/crates/engine/util/src/engine_store.rs b/crates/engine/util/src/engine_store.rs index 85c5e126f..6b584f0c1 100644 --- a/crates/engine/util/src/engine_store.rs +++ b/crates/engine/util/src/engine_store.rs @@ -64,7 +64,12 @@ impl EngineMessageStore { fs::create_dir_all(&self.path)?; // ensure that store path had been created let timestamp = received_at.duration_since(SystemTime::UNIX_EPOCH).unwrap().as_millis(); match msg { - BeaconEngineMessage::ForkchoiceUpdated { state, payload_attrs, tx: _tx } => { + BeaconEngineMessage::ForkchoiceUpdated { + state, + payload_attrs, + tx: _tx, + version: _version, + } => { let filename = format!("{}-fcu-{}.json", timestamp, state.head_block_hash); fs::write( self.path.join(filename), diff --git a/crates/engine/util/src/reorg.rs b/crates/engine/util/src/reorg.rs index d109fb9e9..0d51d2dfa 100644 --- a/crates/engine/util/src/reorg.rs +++ b/crates/engine/util/src/reorg.rs @@ -8,7 +8,7 @@ use alloy_rpc_types_engine::{ use futures::{stream::FuturesUnordered, Stream, StreamExt, TryFutureExt}; use itertools::Either; use reth_beacon_consensus::{BeaconEngineMessage, BeaconOnNewPayloadError, OnForkChoiceUpdated}; -use reth_engine_primitives::EngineTypes; +use reth_engine_primitives::{EngineApiMessageVersion, EngineTypes}; use reth_errors::{BlockExecutionError, BlockValidationError, RethError, RethResult}; use reth_ethereum_forks::EthereumHardforks; use reth_evm::{ @@ -211,18 +211,32 @@ where state: reorg_forkchoice_state, payload_attrs: None, tx: reorg_fcu_tx, + version: EngineApiMessageVersion::default(), }, ]); *this.state = EngineReorgState::Reorg { queue }; continue } - (Some(BeaconEngineMessage::ForkchoiceUpdated { state, payload_attrs, tx }), _) => { + ( + Some(BeaconEngineMessage::ForkchoiceUpdated { + state, + payload_attrs, + tx, + version, + }), + _, + ) => { // Record last forkchoice state forwarded to the engine. // We do not care if it's valid since engine should be able to handle // reorgs that rely on invalid forkchoice state. *this.last_forkchoice_state = Some(state); *this.forkchoice_states_forwarded += 1; - Some(BeaconEngineMessage::ForkchoiceUpdated { state, payload_attrs, tx }) + Some(BeaconEngineMessage::ForkchoiceUpdated { + state, + payload_attrs, + tx, + version, + }) } (item, _) => item, }; diff --git a/crates/engine/util/src/skip_fcu.rs b/crates/engine/util/src/skip_fcu.rs index e110ceced..adadfb595 100644 --- a/crates/engine/util/src/skip_fcu.rs +++ b/crates/engine/util/src/skip_fcu.rs @@ -45,7 +45,12 @@ where loop { let next = ready!(this.stream.poll_next_unpin(cx)); let item = match next { - Some(BeaconEngineMessage::ForkchoiceUpdated { state, payload_attrs, tx }) => { + Some(BeaconEngineMessage::ForkchoiceUpdated { + state, + payload_attrs, + tx, + version, + }) => { if this.skipped < this.threshold { *this.skipped += 1; tracing::warn!(target: "engine::stream::skip_fcu", ?state, ?payload_attrs, threshold=this.threshold, skipped=this.skipped, "Skipping FCU"); @@ -53,7 +58,12 @@ where continue } *this.skipped = 0; - Some(BeaconEngineMessage::ForkchoiceUpdated { state, payload_attrs, tx }) + Some(BeaconEngineMessage::ForkchoiceUpdated { + state, + payload_attrs, + tx, + version, + }) } next => next, }; diff --git a/crates/payload/primitives/src/lib.rs b/crates/payload/primitives/src/lib.rs index 08aa42800..7013d9fd9 100644 --- a/crates/payload/primitives/src/lib.rs +++ b/crates/payload/primitives/src/lib.rs @@ -324,22 +324,23 @@ where } /// The version of Engine API message. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)] pub enum EngineApiMessageVersion { /// Version 1 - V1, + V1 = 1, /// Version 2 /// /// Added in the Shanghai hardfork. - V2, + V2 = 2, /// Version 3 /// /// Added in the Cancun hardfork. - V3, + #[default] + V3 = 3, /// Version 4 /// /// Added in the Prague hardfork. - V4, + V4 = 4, } /// Determines how we should choose the payload to return. diff --git a/crates/payload/primitives/src/traits.rs b/crates/payload/primitives/src/traits.rs index df7614902..f6a043755 100644 --- a/crates/payload/primitives/src/traits.rs +++ b/crates/payload/primitives/src/traits.rs @@ -84,7 +84,7 @@ pub trait PayloadBuilderAttributes: Send + Sync + std::fmt::Debug { /// Creates a new payload builder for the given parent block and the attributes. /// - /// Derives the unique [`PayloadId`] for the given parent and attributes + /// Derives the unique [`PayloadId`] for the given parent, attributes and version. fn try_new( parent: B256, rpc_payload_attributes: Self::RpcPayloadAttributes, diff --git a/crates/rpc/rpc-engine-api/src/engine_api.rs b/crates/rpc/rpc-engine-api/src/engine_api.rs index eb280408e..cca9f5d6b 100644 --- a/crates/rpc/rpc-engine-api/src/engine_api.rs +++ b/crates/rpc/rpc-engine-api/src/engine_api.rs @@ -616,7 +616,8 @@ where // To do this, we set the payload attrs to `None` if attribute validation failed, but // we still apply the forkchoice update. if let Err(err) = attr_validation_res { - let fcu_res = self.inner.beacon_consensus.fork_choice_updated(state, None).await?; + let fcu_res = + self.inner.beacon_consensus.fork_choice_updated(state, None, version).await?; // TODO: decide if we want this branch - the FCU INVALID response might be more // useful than the payload attributes INVALID response if fcu_res.is_invalid() { @@ -626,7 +627,7 @@ where } } - Ok(self.inner.beacon_consensus.fork_choice_updated(state, payload_attrs).await?) + Ok(self.inner.beacon_consensus.fork_choice_updated(state, payload_attrs, version).await?) } } From b5c0a46363ad4d32c5f6fbb704c4da3e98e0495b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C3=A9a=20Narzis?= <78718413+lean-apple@users.noreply.github.com> Date: Mon, 28 Oct 2024 19:03:20 +0700 Subject: [PATCH 303/425] feat: add `pending|queued` txs pool helpers (#12128) --- crates/transaction-pool/src/lib.rs | 14 +++++++++++++ crates/transaction-pool/src/noop.rs | 14 +++++++++++++ crates/transaction-pool/src/pool/mod.rs | 18 +++++++++++++++++ crates/transaction-pool/src/pool/txpool.rs | 23 ++++++++++++++++++++++ crates/transaction-pool/src/traits.rs | 14 ++++++++++++- 5 files changed, 82 insertions(+), 1 deletion(-) diff --git a/crates/transaction-pool/src/lib.rs b/crates/transaction-pool/src/lib.rs index 2cffcd33f..609ab987f 100644 --- a/crates/transaction-pool/src/lib.rs +++ b/crates/transaction-pool/src/lib.rs @@ -503,6 +503,20 @@ where self.pool.get_transactions_by_sender(sender) } + fn get_pending_transactions_by_sender( + &self, + sender: Address, + ) -> Vec>> { + self.pool.get_pending_transactions_by_sender(sender) + } + + fn get_queued_transactions_by_sender( + &self, + sender: Address, + ) -> Vec>> { + self.pool.get_queued_transactions_by_sender(sender) + } + fn get_highest_transaction_by_sender( &self, sender: Address, diff --git a/crates/transaction-pool/src/noop.rs b/crates/transaction-pool/src/noop.rs index 11c5e7eea..817ea7bad 100644 --- a/crates/transaction-pool/src/noop.rs +++ b/crates/transaction-pool/src/noop.rs @@ -220,6 +220,20 @@ impl TransactionPool for NoopTransactionPool { vec![] } + fn get_pending_transactions_by_sender( + &self, + _sender: Address, + ) -> Vec>> { + vec![] + } + + fn get_queued_transactions_by_sender( + &self, + _sender: Address, + ) -> Vec>> { + vec![] + } + fn get_highest_transaction_by_sender( &self, _sender: Address, diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index 600a8da93..a408c7684 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -776,6 +776,24 @@ where self.get_pool_data().get_transactions_by_sender(sender_id) } + /// Returns all queued transactions of the address by sender + pub(crate) fn get_queued_transactions_by_sender( + &self, + sender: Address, + ) -> Vec>> { + let sender_id = self.get_sender_id(sender); + self.get_pool_data().pending_txs_by_sender(sender_id) + } + + /// Returns all pending transactions of the address by sender + pub(crate) fn get_pending_transactions_by_sender( + &self, + sender: Address, + ) -> Vec>> { + let sender_id = self.get_sender_id(sender); + self.get_pool_data().queued_txs_by_sender(sender_id) + } + /// Returns the highest transaction of the address pub(crate) fn get_highest_transaction_by_sender( &self, diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index c6369c98a..b11815fc4 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -364,11 +364,26 @@ impl TxPool { self.pending_pool.all() } + /// Returns all pending transactions for the specified sender + pub(crate) fn pending_txs_by_sender( + &self, + sender: SenderId, + ) -> Vec>> { + self.pending_transactions_iter().filter(|tx| tx.sender_id() == sender).collect() + } + /// Returns all transactions from parked pools pub(crate) fn queued_transactions(&self) -> Vec>> { self.basefee_pool.all().chain(self.queued_pool.all()).collect() } + /// Returns an iterator over all transactions from parked pools + pub(crate) fn queued_transactions_iter( + &self, + ) -> impl Iterator>> + '_ { + self.basefee_pool.all().chain(self.queued_pool.all()) + } + /// Returns queued and pending transactions for the specified sender pub fn queued_and_pending_txs_by_sender( &self, @@ -377,6 +392,14 @@ impl TxPool { (self.queued_pool.get_txs_by_sender(sender), self.pending_pool.get_txs_by_sender(sender)) } + /// Returns all queued transactions for the specified sender + pub(crate) fn queued_txs_by_sender( + &self, + sender: SenderId, + ) -> Vec>> { + self.queued_transactions_iter().filter(|tx| tx.sender_id() == sender).collect() + } + /// Returns `true` if the transaction with the given hash is already included in this pool. pub(crate) fn contains(&self, tx_hash: &TxHash) -> bool { self.all_transactions.contains(tx_hash) diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index 9db9c53d3..ff6e8855e 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -352,6 +352,18 @@ pub trait TransactionPool: Send + Sync + Clone { sender: Address, ) -> Vec>>; + /// Returns all pending transactions sent by a given user + fn get_pending_transactions_by_sender( + &self, + sender: Address, + ) -> Vec>>; + + /// Returns all queued transactions sent by a given user + fn get_queued_transactions_by_sender( + &self, + sender: Address, + ) -> Vec>>; + /// Returns the highest transaction sent by a given user fn get_highest_transaction_by_sender( &self, @@ -1332,7 +1344,7 @@ impl TryFrom for EthPooledTransaction { } EIP4844_TX_TYPE_ID => { // doesn't have a blob sidecar - return Err(TryFromRecoveredTransactionError::BlobSidecarMissing) + return Err(TryFromRecoveredTransactionError::BlobSidecarMissing); } unsupported => { // unsupported transaction type From 87a615fe265137fab0831fb9047268addbbe4d2c Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Mon, 28 Oct 2024 21:28:52 +0900 Subject: [PATCH 304/425] fix(ci): remove renaming from `compact-codec` (#12133) --- .github/workflows/compact.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/compact.yml b/.github/workflows/compact.yml index c5d39f72a..484b27c82 100644 --- a/.github/workflows/compact.yml +++ b/.github/workflows/compact.yml @@ -37,8 +37,7 @@ jobs: # On `main` branch, generates test vectors and serializes them to disk using `Compact`. - name: Generate compact vectors run: | - ${{ matrix.bin }} -- test-vectors compact --write && - for f in ./testdata/micro/compact/*; do mv "$f" "$(dirname "$f")/$(basename "$f" | awk -F '__' '{print $NF}')"; done + ${{ matrix.bin }} -- test-vectors compact --write - name: Checkout PR uses: actions/checkout@v4 with: From d74730af3b8fbbf67d8db25ae43916e67a4b89b6 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Mon, 28 Oct 2024 16:31:08 +0400 Subject: [PATCH 305/425] feat: add a wrapper for `BestTransactions` prioritizing given senders (#12123) --- crates/transaction-pool/src/pool/best.rs | 86 +++++++++++++++++++++++- crates/transaction-pool/src/pool/mod.rs | 2 +- crates/transaction-pool/src/traits.rs | 4 +- 3 files changed, 88 insertions(+), 4 deletions(-) diff --git a/crates/transaction-pool/src/pool/best.rs b/crates/transaction-pool/src/pool/best.rs index 268e3e262..763572e7e 100644 --- a/crates/transaction-pool/src/pool/best.rs +++ b/crates/transaction-pool/src/pool/best.rs @@ -2,10 +2,10 @@ use crate::{ identifier::TransactionId, pool::pending::PendingTransaction, PoolTransaction, TransactionOrdering, ValidPoolTransaction, }; -use alloy_primitives::B256 as TxHash; +use alloy_primitives::{Address, B256 as TxHash}; use core::fmt; use std::{ - collections::{BTreeMap, BTreeSet, HashSet}, + collections::{BTreeMap, BTreeSet, HashSet, VecDeque}, sync::Arc, }; @@ -259,6 +259,88 @@ impl fmt::Debug for BestTransactionFilter { } } +/// Wrapper over [`crate::traits::BestTransactions`] that prioritizes transactions of certain +/// senders capping total gas used by such transactions. +#[derive(Debug)] +pub struct BestTransactionsWithPrioritizedSenders { + /// Inner iterator + inner: I, + /// A set of senders which transactions should be prioritized + prioritized_senders: HashSet
, + /// Maximum total gas limit of prioritized transactions + max_prioritized_gas: u64, + /// Buffer with transactions that are not being prioritized. Those will be the first to be + /// included after the prioritized transactions + buffer: VecDeque, + /// Tracker of total gas limit of prioritized transactions. Once it reaches + /// `max_prioritized_gas` no more transactions will be prioritized + prioritized_gas: u64, +} + +impl BestTransactionsWithPrioritizedSenders { + /// Constructs a new [`BestTransactionsWithPrioritizedSenders`]. + pub fn new(prioritized_senders: HashSet
, max_prioritized_gas: u64, inner: I) -> Self { + Self { + inner, + prioritized_senders, + max_prioritized_gas, + buffer: Default::default(), + prioritized_gas: Default::default(), + } + } +} + +impl Iterator for BestTransactionsWithPrioritizedSenders +where + I: crate::traits::BestTransactions>>, + T: PoolTransaction, +{ + type Item = ::Item; + + fn next(&mut self) -> Option { + // If we have space, try prioritizing transactions + if self.prioritized_gas < self.max_prioritized_gas { + for item in &mut self.inner { + if self.prioritized_senders.contains(&item.transaction.sender()) && + self.prioritized_gas + item.transaction.gas_limit() <= + self.max_prioritized_gas + { + self.prioritized_gas += item.transaction.gas_limit(); + return Some(item) + } + self.buffer.push_back(item); + } + } + + if let Some(item) = self.buffer.pop_front() { + Some(item) + } else { + self.inner.next() + } + } +} + +impl crate::traits::BestTransactions for BestTransactionsWithPrioritizedSenders +where + I: crate::traits::BestTransactions>>, + T: PoolTransaction, +{ + fn mark_invalid(&mut self, tx: &Self::Item) { + self.inner.mark_invalid(tx) + } + + fn no_updates(&mut self) { + self.inner.no_updates() + } + + fn set_skip_blobs(&mut self, skip_blobs: bool) { + if skip_blobs { + self.buffer.retain(|tx| !tx.transaction.is_eip4844()) + } + self.inner.set_skip_blobs(skip_blobs) + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index a408c7684..69f17504f 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -106,7 +106,7 @@ use crate::{ traits::{GetPooledTransactionLimit, NewBlobSidecar, TransactionListenerKind}, validate::ValidTransaction, }; -pub use best::BestTransactionFilter; +pub use best::{BestTransactionFilter, BestTransactionsWithPrioritizedSenders}; pub use blob::{blob_tx_priority, fee_delta}; pub use events::{FullTransactionEvent, TransactionEvent}; pub use listener::{AllTransactionsEvents, TransactionEvents}; diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index ff6e8855e..fbbddb98f 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -776,7 +776,9 @@ pub trait BestTransactions: Iterator + Send { /// If called then the iterator will no longer yield blob transactions. /// /// Note: this will also exclude any transactions that depend on blob transactions. - fn skip_blobs(&mut self); + fn skip_blobs(&mut self) { + self.set_skip_blobs(true); + } /// Controls whether the iterator skips blob transactions or not. /// From 719ca3a68280947dce04de3fea6a58bf246e923a Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 28 Oct 2024 12:56:28 +0100 Subject: [PATCH 306/425] chain-spec: use alloy `MAINNET_DEPOSIT_CONTRACT_ADDRESS` constant (#12113) --- crates/chainspec/src/constants.rs | 5 +++-- crates/chainspec/src/spec.rs | 7 +++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/crates/chainspec/src/constants.rs b/crates/chainspec/src/constants.rs index 2e22b2299..3f46fb6b7 100644 --- a/crates/chainspec/src/constants.rs +++ b/crates/chainspec/src/constants.rs @@ -1,11 +1,12 @@ use crate::spec::DepositContract; -use alloy_primitives::{address, b256}; +use alloy_eips::eip6110::MAINNET_DEPOSIT_CONTRACT_ADDRESS; +use alloy_primitives::b256; /// Gas per transaction not creating a contract. pub const MIN_TRANSACTION_GAS: u64 = 21_000u64; /// Deposit contract address: `0x00000000219ab540356cbb839cbe05303d7705fa` pub(crate) const MAINNET_DEPOSIT_CONTRACT: DepositContract = DepositContract::new( - address!("00000000219ab540356cbb839cbe05303d7705fa"), + MAINNET_DEPOSIT_CONTRACT_ADDRESS, 11052984, b256!("649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c5"), ); diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index 02f4b5ca9..b0958d4fb 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -3,7 +3,10 @@ pub use alloy_eips::eip1559::BaseFeeParams; use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_chains::{Chain, NamedChain}; use alloy_consensus::constants::EMPTY_WITHDRAWALS; -use alloy_eips::{eip1559::INITIAL_BASE_FEE, eip7685::EMPTY_REQUESTS_HASH}; +use alloy_eips::{ + eip1559::INITIAL_BASE_FEE, eip6110::MAINNET_DEPOSIT_CONTRACT_ADDRESS, + eip7685::EMPTY_REQUESTS_HASH, +}; use alloy_genesis::Genesis; use alloy_primitives::{address, b256, Address, BlockNumber, B256, U256}; use derive_more::From; @@ -39,7 +42,7 @@ pub static MAINNET: LazyLock> = LazyLock::new(|| { hardforks: EthereumHardfork::mainnet().into(), // https://etherscan.io/tx/0xe75fb554e433e03763a1560646ee22dcb74e5274b34c5ad644e7c0f619a7e1d0 deposit_contract: Some(DepositContract::new( - address!("00000000219ab540356cbb839cbe05303d7705fa"), + MAINNET_DEPOSIT_CONTRACT_ADDRESS, 11052984, b256!("649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c5"), )), From 72096221dfecd611128848f3d4dea0970542e056 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 28 Oct 2024 13:08:57 +0100 Subject: [PATCH 307/425] refactor(chainspec): refac and improved doc for `last_block_fork_before_merge_or_timestamp` (#12114) --- crates/chainspec/src/spec.rs | 44 +++++++++++++++++++----------------- 1 file changed, 23 insertions(+), 21 deletions(-) diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index b0958d4fb..779eb8a37 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -512,34 +512,36 @@ impl ChainSpec { } } - /// An internal helper function that returns the block number of the last block-based - /// fork that occurs before any existing TTD (merge)/timestamp based forks. + /// This internal helper function retrieves the block number of the last block-based fork + /// that occurs before: + /// - Any existing Total Terminal Difficulty (TTD) or + /// - Timestamp-based forks in the current [`ChainSpec`]. /// - /// Note: this returns None if the `ChainSpec` is not configured with a TTD/Timestamp fork. + /// The function operates by examining the configured hard forks in the chain. It iterates + /// through the fork conditions and identifies the most recent block-based fork that + /// precedes any TTD or timestamp-based conditions. + /// + /// If there are no block-based forks found before these conditions, or if the [`ChainSpec`] + /// is not configured with a TTD or timestamp fork, this function will return `None`. pub(crate) fn last_block_fork_before_merge_or_timestamp(&self) -> Option { let mut hardforks_iter = self.hardforks.forks_iter().peekable(); while let Some((_, curr_cond)) = hardforks_iter.next() { if let Some((_, next_cond)) = hardforks_iter.peek() { - // peek and find the first occurrence of ForkCondition::TTD (merge) , or in - // custom ChainSpecs, the first occurrence of - // ForkCondition::Timestamp. If curr_cond is ForkCondition::Block at - // this point, which it should be in most "normal" ChainSpecs, - // return its block_num + // Match against the `next_cond` to see if it represents: + // - A TTD (merge) + // - A timestamp-based fork match next_cond { - ForkCondition::TTD { fork_block, .. } => { - // handle Sepolia merge netsplit case - if fork_block.is_some() { - return *fork_block - } - // ensure curr_cond is indeed ForkCondition::Block and return block_num - if let ForkCondition::Block(block_num) = curr_cond { - return Some(block_num) - } - } - ForkCondition::Timestamp(_) => { - // ensure curr_cond is indeed ForkCondition::Block and return block_num + // If the next fork is TTD and specifies a specific block, return that block + // number + ForkCondition::TTD { fork_block: Some(block), .. } => return Some(*block), + + // If the next fork is TTD without a specific block or is timestamp-based, + // return the block number of the current condition if it is block-based. + ForkCondition::TTD { .. } | ForkCondition::Timestamp(_) => { + // Check if `curr_cond` is a block-based fork and return its block number if + // true. if let ForkCondition::Block(block_num) = curr_cond { - return Some(block_num) + return Some(block_num); } } ForkCondition::Block(_) | ForkCondition::Never => continue, From 1f1c68d65e5534e2b351a3af8fc56711d56ee129 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Mon, 28 Oct 2024 12:36:12 +0000 Subject: [PATCH 308/425] perf(trie): cache prefix set lookups in sparse trie (#12088) --- crates/trie/sparse/src/trie.rs | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index 91362eed5..035eeaf73 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -631,7 +631,7 @@ impl RevealedSparseTrie { fn rlp_node(&mut self, path: Nibbles, prefix_set: &mut PrefixSet) -> RlpNode { // stack of paths we need rlp nodes for - let mut path_stack = Vec::from([path]); + let mut path_stack = Vec::from([(path, None)]); // stack of rlp nodes let mut rlp_node_stack = Vec::<(Nibbles, RlpNode)>::new(); // reusable branch child path @@ -639,7 +639,13 @@ impl RevealedSparseTrie { // reusable branch value stack let mut branch_value_stack_buf = SmallVec::<[RlpNode; 16]>::new_const(); - 'main: while let Some(path) = path_stack.pop() { + 'main: while let Some((path, mut is_in_prefix_set)) = path_stack.pop() { + // Check if the path is in the prefix set. + // First, check the cached value. If it's `None`, then check the prefix set, and update + // the cached value. + let mut prefix_set_contains = + |path: &Nibbles| *is_in_prefix_set.get_or_insert_with(|| prefix_set.contains(path)); + let rlp_node = match self.nodes.get_mut(&path).unwrap() { SparseNode::Empty => RlpNode::word_rlp(&EMPTY_ROOT_HASH), SparseNode::Hash(hash) => RlpNode::word_rlp(hash), @@ -647,7 +653,7 @@ impl RevealedSparseTrie { self.rlp_buf.clear(); let mut path = path.clone(); path.extend_from_slice_unchecked(key); - if let Some(hash) = hash.filter(|_| !prefix_set.contains(&path)) { + if let Some(hash) = hash.filter(|_| !prefix_set_contains(&path)) { RlpNode::word_rlp(&hash) } else { let value = self.values.get(&path).unwrap(); @@ -659,7 +665,7 @@ impl RevealedSparseTrie { SparseNode::Extension { key, hash } => { let mut child_path = path.clone(); child_path.extend_from_slice_unchecked(key); - if let Some(hash) = hash.filter(|_| !prefix_set.contains(&path)) { + if let Some(hash) = hash.filter(|_| !prefix_set_contains(&path)) { RlpNode::word_rlp(&hash) } else if rlp_node_stack.last().map_or(false, |e| e.0 == child_path) { let (_, child) = rlp_node_stack.pop().unwrap(); @@ -668,12 +674,13 @@ impl RevealedSparseTrie { *hash = rlp_node.as_hash(); rlp_node } else { - path_stack.extend([path, child_path]); // need to get rlp node for child first + // need to get rlp node for child first + path_stack.extend([(path, is_in_prefix_set), (child_path, None)]); continue } } SparseNode::Branch { state_mask, hash } => { - if let Some(hash) = hash.filter(|_| !prefix_set.contains(&path)) { + if let Some(hash) = hash.filter(|_| !prefix_set_contains(&path)) { rlp_node_stack.push((path, RlpNode::word_rlp(&hash))); continue } @@ -700,8 +707,8 @@ impl RevealedSparseTrie { added_children = true; } else { debug_assert!(!added_children); - path_stack.push(path); - path_stack.extend(branch_child_buf.drain(..)); + path_stack.push((path, is_in_prefix_set)); + path_stack.extend(branch_child_buf.drain(..).map(|p| (p, None))); continue 'main } } From e446feb116049c74a9ff21701fe14141e4f8e675 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 12:44:11 +0000 Subject: [PATCH 309/425] chore(deps): weekly `cargo update` (#12108) Co-authored-by: github-merge-queue <118344674+github-merge-queue@users.noreply.github.com> Co-authored-by: joshieDo <93316087+joshieDo@users.noreply.github.com> --- Cargo.lock | 319 ++++++++++++++++++++++++++--------------------------- Cargo.toml | 6 +- 2 files changed, 159 insertions(+), 166 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6762da237..c7a47e8f8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -97,9 +97,9 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "alloy-chains" -version = "0.1.40" +version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4932d790c723181807738cf1ac68198ab581cd699545b155601332541ee47bd" +checksum = "dca4a1469a3e572e9ba362920ff145f5d0a00a3e71a64ddcb4a3659cf64c76a7" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -150,9 +150,9 @@ dependencies = [ [[package]] name = "alloy-dyn-abi" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6228abfc751a29cde117b0879b805a3e0b3b641358f063272c83ca459a56886" +checksum = "5647fce5a168f9630f935bf7821c4207b1755184edaeba783cb4e11d35058484" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -229,9 +229,9 @@ dependencies = [ [[package]] name = "alloy-json-abi" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d46eb5871592c216d39192499c95a99f7175cb94104f88c307e6dc960676d9f1" +checksum = "4b5671117c38b1c2306891f97ad3828d85487087f54ebe2c7591a055ea5bcea7" dependencies = [ "alloy-primitives", "alloy-sol-type-parser", @@ -276,9 +276,9 @@ dependencies = [ [[package]] name = "alloy-network-primitives" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a968c063fcfcb937736665c865a71fc2242b68916156f5ffa41fee7b44bb695" +checksum = "514f70ee2a953db21631cd817b13a1571474ec77ddc03d47616d5e8203489fde" dependencies = [ "alloy-consensus", "alloy-eips", @@ -398,9 +398,9 @@ dependencies = [ [[package]] name = "alloy-rlp" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26154390b1d205a4a7ac7352aa2eb4f81f391399d4e2f546fb81a2f8bb383f62" +checksum = "da0822426598f95e45dd1ea32a738dac057529a709ee645fcc516ffa4cbde08f" dependencies = [ "alloy-rlp-derive", "arrayvec", @@ -409,13 +409,13 @@ dependencies = [ [[package]] name = "alloy-rlp-derive" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d0f2d905ebd295e7effec65e5f6868d153936130ae718352771de3e7d03c75c" +checksum = "2b09cae092c27b6f1bde952653a22708691802e57bfef4a2973b80bea21efd3f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -639,7 +639,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -655,7 +655,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", "syn-solidity", "tiny-keccak", ] @@ -671,15 +671,15 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", "syn-solidity", ] [[package]] name = "alloy-sol-type-parser" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f631f0bd9a9d79619b27c91b6b1ab2c4ef4e606a65192369a1ee05d40dcf81cc" +checksum = "45d1fbee9e698f3ba176b6e7a145f4aefe6d2b746b611e8bb246fe11a0e9f6c4" dependencies = [ "serde", "winnow", @@ -687,9 +687,9 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2841af22d99e2c0f82a78fe107b6481be3dd20b89bfb067290092794734343a" +checksum = "086f41bc6ebcd8cb15f38ba20e47be38dd03692149681ce8061c35d960dbf850" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -813,9 +813,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.6.15" +version = "0.6.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" +checksum = "23a1e53f0f5d86382dafe1cf314783b2044280f406e7e1506368220ad11b1338" dependencies = [ "anstyle", "anstyle-parse", @@ -828,43 +828,43 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.8" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" +checksum = "8365de52b16c035ff4fcafe0092ba9390540e3e352870ac09933bebcaa2c8c56" [[package]] name = "anstyle-parse" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.4" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.90" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37bf3594c4c988a53154954629820791dde498571819ae4ca50ca811e060cc95" +checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8" [[package]] name = "aquamarine" @@ -877,7 +877,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -1053,9 +1053,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.16" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "103db485efc3e41214fe4fda9f3dbeae2eb9082f48fd236e6095627a9422066e" +checksum = "0cb8f1d480b0ea3783ab015936d2a55c87e219676f0c0b7dec61494043f21857" dependencies = [ "brotli", "flate2", @@ -1100,7 +1100,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -1111,7 +1111,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -1149,7 +1149,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -1255,7 +1255,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -1437,7 +1437,7 @@ checksum = "240f4126219a83519bad05c9a40bfc0303921eeb571fc2d7e44c17ffac99d3f1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", "synstructure", ] @@ -1559,7 +1559,7 @@ checksum = "bcfcc3cd946cb52f0bbfdbbcfa2f4e24f75ebb6c0e1002f7c25904fada18b9ec" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -1570,9 +1570,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.2" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" dependencies = [ "serde", ] @@ -1771,7 +1771,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -1833,9 +1833,9 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "combine" @@ -2228,7 +2228,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -2252,7 +2252,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -2263,7 +2263,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -2385,7 +2385,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -2396,7 +2396,7 @@ checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -2417,7 +2417,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", "unicode-xid", ] @@ -2531,7 +2531,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -2679,7 +2679,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -2690,7 +2690,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -2747,7 +2747,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -3302,7 +3302,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -3828,7 +3828,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -3978,7 +3978,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -4146,7 +4146,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b23a0c8dfe501baac4adf6ebbfa6eddf8f0c07f56b058cc1288017e32397846c" dependencies = [ "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -4394,7 +4394,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -4570,9 +4570,9 @@ dependencies = [ [[package]] name = "libm" -version = "0.2.8" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" +checksum = "a00419de735aac21d53b0de5ce2c03bd3627277cf471300f27ebc89f7d828047" [[package]] name = "libp2p-identity" @@ -4795,9 +4795,9 @@ dependencies = [ [[package]] name = "metrics" -version = "0.23.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "884adb57038347dfbaf2d5065887b6cf4312330dc8e94bc30a1a839bd79d3261" +checksum = "8ae428771d17306715c5091d446327d1cfdedc82185c65ba8423ab404e45bf10" dependencies = [ "ahash", "portable-atomic", @@ -4812,14 +4812,14 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] name = "metrics-exporter-prometheus" -version = "0.15.3" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4f0c8427b39666bf970460908b213ec09b3b350f20c0c2eabcbba51704a08e6" +checksum = "85b6f8152da6d7892ff1b7a1c0fa3f435e92b5918ad67035c3bb432111d9a29b" dependencies = [ "base64 0.22.1", "indexmap 2.6.0", @@ -4831,9 +4831,9 @@ dependencies = [ [[package]] name = "metrics-process" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e69e6ced169644e186e060ddc15f3923fdf06862c811a867bb1e5e7c7824f4d0" +checksum = "57ca8ecd85575fbb143b2678cb123bb818779391ec0f745b1c4a9dbabadde407" dependencies = [ "libc", "libproc", @@ -4847,15 +4847,14 @@ dependencies = [ [[package]] name = "metrics-util" -version = "0.17.0" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4259040465c955f9f2f1a4a8a16dc46726169bca0f88e8fb2dbeced487c3e828" +checksum = "15b482df36c13dd1869d73d14d28cd4855fbd6cfc32294bee109908a9f4a4ed7" dependencies = [ "crossbeam-epoch", "crossbeam-utils", - "hashbrown 0.14.5", + "hashbrown 0.15.0", "metrics", - "num_cpus", "quanta", "sketches-ddsketch", ] @@ -4959,7 +4958,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -5004,7 +5003,7 @@ dependencies = [ "percent-encoding", "serde", "static_assertions", - "unsigned-varint 0.8.0", + "unsigned-varint", "url", ] @@ -5021,12 +5020,12 @@ dependencies = [ [[package]] name = "multihash" -version = "0.19.1" +version = "0.19.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "076d548d76a0e2a0d4ab471d0b1c36c577786dfc4471242035d97a12a735c492" +checksum = "cc41f430805af9d1cf4adae4ed2149c759b877b01d909a1f40256188d09345d2" dependencies = [ "core2", - "unsigned-varint 0.7.2", + "unsigned-varint", ] [[package]] @@ -5207,7 +5206,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -5260,9 +5259,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "op-alloy-consensus" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99d49163f952491820088dd0e66f3a35d63337c3066eceff0a931bf83a8e2101" +checksum = "ba7c98055fd048073738df0cc6d6537e992a0d8828f39d99a469e870db126dbd" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5278,9 +5277,9 @@ dependencies = [ [[package]] name = "op-alloy-genesis" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e46c2ab105f679f0cbfbc3fb762f3456d4b8556c841e667fc8f3c2226eb6c1e" +checksum = "d631e8113cf88d30e621022677209caa148a9ca3ccb590fd34bbd1c731e3aff3" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5292,9 +5291,9 @@ dependencies = [ [[package]] name = "op-alloy-network" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75ff1ea317441b9eb6317b24d13f9088e3b14ef48b15bfb6a125ca404df036d8" +checksum = "1eabe7683d7e19c7cc5171d664e49fc449176cf1334ffff82808e2a7eea5933a" dependencies = [ "alloy-consensus", "alloy-network", @@ -5306,9 +5305,9 @@ dependencies = [ [[package]] name = "op-alloy-protocol" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c439457b2a1791325603fc18a94cc175e0b4b1127f11ff8a45071f05d044dcb" +checksum = "9b39574acb1873315e6bd89df174f6223e897188fb87eeea2ad1eda04f7d28eb" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5323,9 +5322,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c9556293835232b019ec9c6fd84e4265a3151111af60ea09b5b513e3dbed41c" +checksum = "919e9b69212d61f3c8932bfb717c7ad458ea3fc52072b3433d99994f8223d555" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5341,9 +5340,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types-engine" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a42a5ac4e07ed226b6a2aeefaad9b2cc7ec160e372ba626a4214d681a355fc2" +checksum = "0e3a47ea24cee189b4351be247fd138c68571704ee57060cf5a722502f44412c" dependencies = [ "alloy-primitives", "alloy-rpc-types-engine", @@ -5561,7 +5560,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -5575,29 +5574,29 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf123a161dde1e524adf36f90bc5d8d3462824a9c43553ad07a8183161189ec" +checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4502d8515ca9f32f1fb543d987f63d95a14934883db45bdb48060b6b69257f8" +checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" [[package]] name = "pin-utils" @@ -5757,12 +5756,12 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.23" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "904afd36257cdb6ce0bee88b7981847bd7b955e5e216bb32f466b302923ad446" +checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -5813,14 +5812,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] name = "proc-macro2" -version = "1.0.88" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c3a7fc5db1e57d5a779a352c8cdb57b29aa4c40cc69c3a68a7fedc815fbf2f9" +checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" dependencies = [ "unicode-ident", ] @@ -5911,7 +5910,7 @@ checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -6175,9 +6174,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", @@ -6731,7 +6730,7 @@ dependencies = [ "proc-macro2", "quote", "similar-asserts", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -9303,9 +9302,9 @@ dependencies = [ [[package]] name = "revm" -version = "17.0.0" +version = "17.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eab16cb0a8cd5ac88b11230b20df588b7e8aae7dfab4b3f830e98aebeb4b365" +checksum = "055bee6a81aaeee8c2389ae31f0d4de87f44df24f4444a1116f9755fd87a76ad" dependencies = [ "auto_impl", "cfg-if", @@ -9578,9 +9577,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.37" +version = "0.38.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" +checksum = "aa260229e6538e52293eeb577aabd09945a09d6d9cc0fc550ed7529056c2e32a" dependencies = [ "bitflags 2.6.0", "errno", @@ -9724,9 +9723,9 @@ dependencies = [ [[package]] name = "scc" -version = "2.2.2" +version = "2.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2c1f7fc6deb21665a9060dfc7d271be784669295a31babdcd4dd2c79ae8cbfb" +checksum = "d8d25269dd3a12467afe2e510f69fb0b46b698e5afb296b59f2145259deaf8e8" dependencies = [ "sdd", ] @@ -9868,22 +9867,22 @@ checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" [[package]] name = "serde" -version = "1.0.210" +version = "1.0.213" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +checksum = "3ea7893ff5e2466df8d720bb615088341b295f849602c6956047f8f80f0e9bc1" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.210" +version = "1.0.213" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +checksum = "7e85ad2009c50b58e87caa8cd6dac16bdf511bbfb7af6c33df902396aa480fa5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -9918,7 +9917,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -9969,7 +9968,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -9992,7 +9991,7 @@ checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -10155,9 +10154,9 @@ checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "sketches-ddsketch" -version = "0.2.2" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85636c14b73d81f541e525f585c0a2109e6744e1565b5c1668e31c70c10ed65c" +checksum = "c1e9a774a6c28142ac54bb25d25562e6bcf957493a184f15ad4eebccb23e410a" [[package]] name = "slab" @@ -10278,7 +10277,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -10336,9 +10335,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.80" +version = "2.0.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6e185e337f816bc8da115b8afcb3324006ccc82eeaddf35113888d3bd8e44ac" +checksum = "5023162dfcd14ef8f32034d8bcd4cc5ddc61ef7a247c024a33e24e1f24d21b56" dependencies = [ "proc-macro2", "quote", @@ -10354,7 +10353,7 @@ dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -10380,7 +10379,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -10457,7 +10456,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -10481,22 +10480,22 @@ checksum = "a38c90d48152c236a3ab59271da4f4ae63d678c5d7ad6b7714d7cb9760be5e4b" [[package]] name = "thiserror" -version = "1.0.64" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" +checksum = "5d11abd9594d9b38965ef50805c5e469ca9cc6f197f883f717e0269a3057b3d5" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.64" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" +checksum = "ae71770322cbd277e69d762a16c444af02aa0575ac0d174f0b9562d3b37f8602" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -10649,9 +10648,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.40.0" +version = "1.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +checksum = "145f3413504347a2be84393cc8a7d2fb4d863b375909ea59f2158261aa258bbb" dependencies = [ "backtrace", "bytes", @@ -10673,7 +10672,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -10874,7 +10873,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -11174,12 +11173,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "unsigned-varint" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6889a77d49f1f013504cec6bf97a2c730394adedaeb1deb5ea08949a50541105" - [[package]] name = "unsigned-varint" version = "0.8.0" @@ -11271,7 +11264,7 @@ checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -11342,7 +11335,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", "wasm-bindgen-shared", ] @@ -11376,7 +11369,7 @@ checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11389,9 +11382,9 @@ checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" [[package]] name = "wasm-streams" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e072d4e72f700fb3443d8fe94a39315df013eef1104903cdb0a2abd322bbecd" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" dependencies = [ "futures-util", "js-sys", @@ -11532,7 +11525,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -11543,7 +11536,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -11554,7 +11547,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -11565,7 +11558,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -11840,7 +11833,7 @@ checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", "synstructure", ] @@ -11862,7 +11855,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -11882,7 +11875,7 @@ checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", "synstructure", ] @@ -11903,7 +11896,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] @@ -11925,7 +11918,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.80", + "syn 2.0.85", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index d01ee01ce..c83d76318 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -527,11 +527,11 @@ url = "2.3" zstd = "0.13" # metrics -metrics = "0.23.0" +metrics = "0.24.0" metrics-derive = "0.1" -metrics-exporter-prometheus = { version = "0.15.0", default-features = false } +metrics-exporter-prometheus = { version = "0.16.0", default-features = false } metrics-process = "2.1.0" -metrics-util = { default-features = false, version = "0.17.0" } +metrics-util = { default-features = false, version = "0.18.0" } # proc-macros proc-macro2 = "1.0" From 380e237257e4b6005f2d4146bc73d0f4907a5769 Mon Sep 17 00:00:00 2001 From: Hoa Nguyen Date: Mon, 28 Oct 2024 20:48:32 +0700 Subject: [PATCH 310/425] refactor: replace receipt envelope encoded with trait (#11742) Co-authored-by: Tuan Tran Co-authored-by: Matthias Seitz Co-authored-by: Dan Cline <6798349+Rjected@users.noreply.github.com> --- crates/primitives/src/receipt.rs | 82 ++++++++++++++++++++++++++++---- crates/rpc/rpc/src/debug.rs | 2 +- 2 files changed, 73 insertions(+), 11 deletions(-) diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index 940b491e3..21443f482 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -5,7 +5,8 @@ use alloc::{vec, vec::Vec}; use alloy_consensus::constants::{ EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, }; -use alloy_primitives::{Bloom, Bytes, Log, B256}; +use alloy_eips::eip2718::Encodable2718; +use alloy_primitives::{Bloom, Log, B256}; use alloy_rlp::{length_of_length, Decodable, Encodable, RlpDecodable, RlpEncodable}; use bytes::{Buf, BufMut}; use core::{cmp::Ordering, ops::Deref}; @@ -204,14 +205,20 @@ impl<'a> arbitrary::Arbitrary<'a> for Receipt { } } -impl ReceiptWithBloom { - /// Returns the enveloped encoded receipt. - /// - /// See also [`ReceiptWithBloom::encode_enveloped`] - pub fn envelope_encoded(&self) -> Bytes { - let mut buf = Vec::new(); - self.encode_enveloped(&mut buf); - buf.into() +impl Encodable2718 for ReceiptWithBloom { + fn type_flag(&self) -> Option { + match self.receipt.tx_type { + TxType::Legacy => None, + tx_type => Some(tx_type as u8), + } + } + + fn encode_2718_len(&self) -> usize { + let encoder = self.as_encoder(); + match self.receipt.tx_type { + TxType::Legacy => encoder.receipt_length(), + _ => 1 + encoder.receipt_length(), // 1 byte for the type prefix + } } /// Encodes the receipt into its "raw" format. @@ -223,10 +230,18 @@ impl ReceiptWithBloom { /// of the receipt: /// - EIP-1559, 2930 and 4844 transactions: `tx-type || rlp([status, cumulativeGasUsed, /// logsBloom, logs])` - pub fn encode_enveloped(&self, out: &mut dyn bytes::BufMut) { + fn encode_2718(&self, out: &mut dyn BufMut) { self.encode_inner(out, false) } + fn encoded_2718(&self) -> Vec { + let mut out = vec![]; + self.encode_2718(&mut out); + out + } +} + +impl ReceiptWithBloom { /// Encode receipt with or without the header data. pub fn encode_inner(&self, out: &mut dyn BufMut, with_header: bool) { self.as_encoder().encode_inner(out, with_header) @@ -501,6 +516,7 @@ impl Encodable for ReceiptWithBloomEncoder<'_> { #[cfg(test)] mod tests { use super::*; + use crate::revm_primitives::Bytes; use alloy_primitives::{address, b256, bytes, hex_literal::hex}; #[test] @@ -661,4 +677,50 @@ mod tests { let (decoded, _) = Receipt::from_compact(&data[..], data.len()); assert_eq!(decoded, receipt); } + + #[test] + fn test_encode_2718_length() { + let receipt = ReceiptWithBloom { + receipt: Receipt { + tx_type: TxType::Eip1559, + success: true, + cumulative_gas_used: 21000, + logs: vec![], + #[cfg(feature = "optimism")] + deposit_nonce: None, + #[cfg(feature = "optimism")] + deposit_receipt_version: None, + }, + bloom: Bloom::default(), + }; + + let encoded = receipt.encoded_2718(); + assert_eq!( + encoded.len(), + receipt.encode_2718_len(), + "Encoded length should match the actual encoded data length" + ); + + // Test for legacy receipt as well + let legacy_receipt = ReceiptWithBloom { + receipt: Receipt { + tx_type: TxType::Legacy, + success: true, + cumulative_gas_used: 21000, + logs: vec![], + #[cfg(feature = "optimism")] + deposit_nonce: None, + #[cfg(feature = "optimism")] + deposit_receipt_version: None, + }, + bloom: Bloom::default(), + }; + + let legacy_encoded = legacy_receipt.encoded_2718(); + assert_eq!( + legacy_encoded.len(), + legacy_receipt.encode_2718_len(), + "Encoded length for legacy receipt should match the actual encoded data length" + ); + } } diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index dd1cd9739..6da03b046 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -940,7 +940,7 @@ where .to_rpc_result()? .unwrap_or_default() .into_iter() - .map(|receipt| receipt.with_bloom().envelope_encoded()) + .map(|receipt| receipt.with_bloom().encoded_2718().into()) .collect()) } From af5ae5a792762d2b02abc31a99807ed185c3b617 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Mon, 28 Oct 2024 13:55:31 +0000 Subject: [PATCH 311/425] perf(trie): reduce allocations in sparse trie rlp node calculation (#12092) --- crates/trie/sparse/src/trie.rs | 86 ++++++++++++++++++++++------------ 1 file changed, 57 insertions(+), 29 deletions(-) diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index 035eeaf73..11ca10079 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -558,7 +558,7 @@ impl RevealedSparseTrie { pub fn root(&mut self) -> B256 { // take the current prefix set. let mut prefix_set = std::mem::take(&mut self.prefix_set).freeze(); - let root_rlp = self.rlp_node(Nibbles::default(), &mut prefix_set); + let root_rlp = self.rlp_node_allocate(Nibbles::default(), &mut prefix_set); if let Some(root_hash) = root_rlp.as_hash() { root_hash } else { @@ -570,10 +570,12 @@ impl RevealedSparseTrie { /// depth. Root node has a level of 0. pub fn update_rlp_node_level(&mut self, depth: usize) { let mut prefix_set = self.prefix_set.clone().freeze(); + let mut buffers = RlpNodeBuffers::default(); let targets = self.get_changed_nodes_at_depth(&mut prefix_set, depth); for target in targets { - self.rlp_node(target, &mut prefix_set); + buffers.path_stack.push((target, Some(true))); + self.rlp_node(&mut prefix_set, &mut buffers); } } @@ -629,17 +631,13 @@ impl RevealedSparseTrie { targets } - fn rlp_node(&mut self, path: Nibbles, prefix_set: &mut PrefixSet) -> RlpNode { - // stack of paths we need rlp nodes for - let mut path_stack = Vec::from([(path, None)]); - // stack of rlp nodes - let mut rlp_node_stack = Vec::<(Nibbles, RlpNode)>::new(); - // reusable branch child path - let mut branch_child_buf = SmallVec::<[Nibbles; 16]>::new_const(); - // reusable branch value stack - let mut branch_value_stack_buf = SmallVec::<[RlpNode; 16]>::new_const(); - - 'main: while let Some((path, mut is_in_prefix_set)) = path_stack.pop() { + fn rlp_node_allocate(&mut self, path: Nibbles, prefix_set: &mut PrefixSet) -> RlpNode { + let mut buffers = RlpNodeBuffers::new_with_path(path); + self.rlp_node(prefix_set, &mut buffers) + } + + fn rlp_node(&mut self, prefix_set: &mut PrefixSet, buffers: &mut RlpNodeBuffers) -> RlpNode { + 'main: while let Some((path, mut is_in_prefix_set)) = buffers.path_stack.pop() { // Check if the path is in the prefix set. // First, check the cached value. If it's `None`, then check the prefix set, and update // the cached value. @@ -667,63 +665,68 @@ impl RevealedSparseTrie { child_path.extend_from_slice_unchecked(key); if let Some(hash) = hash.filter(|_| !prefix_set_contains(&path)) { RlpNode::word_rlp(&hash) - } else if rlp_node_stack.last().map_or(false, |e| e.0 == child_path) { - let (_, child) = rlp_node_stack.pop().unwrap(); + } else if buffers.rlp_node_stack.last().map_or(false, |e| e.0 == child_path) { + let (_, child) = buffers.rlp_node_stack.pop().unwrap(); self.rlp_buf.clear(); let rlp_node = ExtensionNodeRef::new(key, &child).rlp(&mut self.rlp_buf); *hash = rlp_node.as_hash(); rlp_node } else { // need to get rlp node for child first - path_stack.extend([(path, is_in_prefix_set), (child_path, None)]); + buffers.path_stack.extend([(path, is_in_prefix_set), (child_path, None)]); continue } } SparseNode::Branch { state_mask, hash } => { if let Some(hash) = hash.filter(|_| !prefix_set_contains(&path)) { - rlp_node_stack.push((path, RlpNode::word_rlp(&hash))); + buffers.rlp_node_stack.push((path, RlpNode::word_rlp(&hash))); continue } - branch_child_buf.clear(); + buffers.branch_child_buf.clear(); // Walk children in a reverse order from `f` to `0`, so we pop the `0` first // from the stack. for bit in CHILD_INDEX_RANGE.rev() { if state_mask.is_bit_set(bit) { let mut child = path.clone(); child.push_unchecked(bit); - branch_child_buf.push(child); + buffers.branch_child_buf.push(child); } } - branch_value_stack_buf.resize(branch_child_buf.len(), Default::default()); + buffers + .branch_value_stack_buf + .resize(buffers.branch_child_buf.len(), Default::default()); let mut added_children = false; - for (i, child_path) in branch_child_buf.iter().enumerate() { - if rlp_node_stack.last().map_or(false, |e| &e.0 == child_path) { - let (_, child) = rlp_node_stack.pop().unwrap(); + for (i, child_path) in buffers.branch_child_buf.iter().enumerate() { + if buffers.rlp_node_stack.last().map_or(false, |e| &e.0 == child_path) { + let (_, child) = buffers.rlp_node_stack.pop().unwrap(); // Insert children in the resulting buffer in a normal order, because // initially we iterated in reverse. - branch_value_stack_buf[branch_child_buf.len() - i - 1] = child; + buffers.branch_value_stack_buf + [buffers.branch_child_buf.len() - i - 1] = child; added_children = true; } else { debug_assert!(!added_children); - path_stack.push((path, is_in_prefix_set)); - path_stack.extend(branch_child_buf.drain(..).map(|p| (p, None))); + buffers.path_stack.push((path, is_in_prefix_set)); + buffers + .path_stack + .extend(buffers.branch_child_buf.drain(..).map(|p| (p, None))); continue 'main } } self.rlp_buf.clear(); - let rlp_node = BranchNodeRef::new(&branch_value_stack_buf, *state_mask) + let rlp_node = BranchNodeRef::new(&buffers.branch_value_stack_buf, *state_mask) .rlp(&mut self.rlp_buf); *hash = rlp_node.as_hash(); rlp_node } }; - rlp_node_stack.push((path, rlp_node)); + buffers.rlp_node_stack.push((path, rlp_node)); } - rlp_node_stack.pop().unwrap().1 + buffers.rlp_node_stack.pop().unwrap().1 } } @@ -803,6 +806,31 @@ struct RemovedSparseNode { unset_branch_nibble: Option, } +/// Collection of reusable buffers for [`RevealedSparseTrie::rlp_node`]. +#[derive(Debug, Default)] +struct RlpNodeBuffers { + /// Stack of paths we need rlp nodes for and whether the path is in the prefix set. + path_stack: Vec<(Nibbles, Option)>, + /// Stack of rlp nodes + rlp_node_stack: Vec<(Nibbles, RlpNode)>, + /// Reusable branch child path + branch_child_buf: SmallVec<[Nibbles; 16]>, + /// Reusable branch value stack + branch_value_stack_buf: SmallVec<[RlpNode; 16]>, +} + +impl RlpNodeBuffers { + /// Creates a new instance of buffers with the given path on the stack. + fn new_with_path(path: Nibbles) -> Self { + Self { + path_stack: vec![(path, None)], + rlp_node_stack: Vec::new(), + branch_child_buf: SmallVec::<[Nibbles; 16]>::new_const(), + branch_value_stack_buf: SmallVec::<[RlpNode; 16]>::new_const(), + } + } +} + #[cfg(test)] mod tests { use std::collections::BTreeMap; From 3f4634ccbc4cbf64720627872ead3155259ce9cc Mon Sep 17 00:00:00 2001 From: 0xOsiris Date: Mon, 28 Oct 2024 07:37:36 -0700 Subject: [PATCH 312/425] chore: add version to PayloadBuilderAttributes::try_new (#12137) --- bin/reth/src/commands/debug_cmd/build_block.rs | 5 ++++- crates/consensus/beacon/src/engine/mod.rs | 3 ++- crates/engine/tree/src/tree/mod.rs | 3 ++- crates/ethereum/engine-primitives/src/payload.rs | 6 +++++- crates/optimism/payload/src/payload.rs | 6 +++++- crates/payload/primitives/src/traits.rs | 1 + examples/custom-engine-types/src/main.rs | 6 +++++- 7 files changed, 24 insertions(+), 6 deletions(-) diff --git a/bin/reth/src/commands/debug_cmd/build_block.rs b/bin/reth/src/commands/debug_cmd/build_block.rs index 455d8356a..272f107d3 100644 --- a/bin/reth/src/commands/debug_cmd/build_block.rs +++ b/bin/reth/src/commands/debug_cmd/build_block.rs @@ -22,7 +22,9 @@ use reth_errors::RethResult; use reth_evm::execute::{BlockExecutorProvider, Executor}; use reth_execution_types::ExecutionOutcome; use reth_fs_util as fs; -use reth_node_api::{NodeTypesWithDB, NodeTypesWithEngine, PayloadBuilderAttributes}; +use reth_node_api::{ + EngineApiMessageVersion, NodeTypesWithDB, NodeTypesWithEngine, PayloadBuilderAttributes, +}; use reth_node_ethereum::{EthEvmConfig, EthExecutorProvider}; use reth_payload_builder::database::CachedReads; use reth_primitives::{ @@ -227,6 +229,7 @@ impl> Command { reth_payload_builder::EthPayloadBuilderAttributes::try_new( best_block.hash(), payload_attrs, + EngineApiMessageVersion::default() as u8, )?, ); diff --git a/crates/consensus/beacon/src/engine/mod.rs b/crates/consensus/beacon/src/engine/mod.rs index 770821de7..a00f507db 100644 --- a/crates/consensus/beacon/src/engine/mod.rs +++ b/crates/consensus/beacon/src/engine/mod.rs @@ -1165,7 +1165,7 @@ where attrs: ::PayloadAttributes, head: Header, state: ForkchoiceState, - _version: EngineApiMessageVersion, + version: EngineApiMessageVersion, ) -> OnForkChoiceUpdated { // 7. Client software MUST ensure that payloadAttributes.timestamp is greater than timestamp // of a block referenced by forkchoiceState.headBlockHash. If this condition isn't held @@ -1183,6 +1183,7 @@ where match <::PayloadBuilderAttributes as PayloadBuilderAttributes>::try_new( state.head_block_hash, attrs, + version as u8 ) { Ok(attributes) => { // send the payload to the builder and return the receiver for the pending payload diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index dd2f67916..bc070d873 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -2492,7 +2492,7 @@ where attrs: T::PayloadAttributes, head: &Header, state: ForkchoiceState, - _version: EngineApiMessageVersion, + version: EngineApiMessageVersion, ) -> OnForkChoiceUpdated { // 7. Client software MUST ensure that payloadAttributes.timestamp is greater than timestamp // of a block referenced by forkchoiceState.headBlockHash. If this condition isn't held @@ -2510,6 +2510,7 @@ where match ::try_new( state.head_block_hash, attrs, + version as u8, ) { Ok(attributes) => { // send the payload to the builder and return the receiver for the pending payload diff --git a/crates/ethereum/engine-primitives/src/payload.rs b/crates/ethereum/engine-primitives/src/payload.rs index 420352cf2..2d162ef15 100644 --- a/crates/ethereum/engine-primitives/src/payload.rs +++ b/crates/ethereum/engine-primitives/src/payload.rs @@ -237,7 +237,11 @@ impl PayloadBuilderAttributes for EthPayloadBuilderAttributes { /// Creates a new payload builder for the given parent block and the attributes. /// /// Derives the unique [`PayloadId`] for the given parent and attributes - fn try_new(parent: B256, attributes: PayloadAttributes) -> Result { + fn try_new( + parent: B256, + attributes: PayloadAttributes, + _version: u8, + ) -> Result { Ok(Self::new(parent, attributes)) } diff --git a/crates/optimism/payload/src/payload.rs b/crates/optimism/payload/src/payload.rs index 98b0e41b0..f46891fdc 100644 --- a/crates/optimism/payload/src/payload.rs +++ b/crates/optimism/payload/src/payload.rs @@ -43,7 +43,11 @@ impl PayloadBuilderAttributes for OptimismPayloadBuilderAttributes { /// Creates a new payload builder for the given parent block and the attributes. /// /// Derives the unique [`PayloadId`] for the given parent and attributes - fn try_new(parent: B256, attributes: OpPayloadAttributes) -> Result { + fn try_new( + parent: B256, + attributes: OpPayloadAttributes, + _version: u8, + ) -> Result { let id = payload_id_optimism(&parent, &attributes); let transactions = attributes diff --git a/crates/payload/primitives/src/traits.rs b/crates/payload/primitives/src/traits.rs index f6a043755..a78dc8c13 100644 --- a/crates/payload/primitives/src/traits.rs +++ b/crates/payload/primitives/src/traits.rs @@ -88,6 +88,7 @@ pub trait PayloadBuilderAttributes: Send + Sync + std::fmt::Debug { fn try_new( parent: B256, rpc_payload_attributes: Self::RpcPayloadAttributes, + version: u8, ) -> Result where Self: Sized; diff --git a/examples/custom-engine-types/src/main.rs b/examples/custom-engine-types/src/main.rs index 46a7d7d9a..0fa2b1658 100644 --- a/examples/custom-engine-types/src/main.rs +++ b/examples/custom-engine-types/src/main.rs @@ -110,7 +110,11 @@ impl PayloadBuilderAttributes for CustomPayloadBuilderAttributes { type RpcPayloadAttributes = CustomPayloadAttributes; type Error = Infallible; - fn try_new(parent: B256, attributes: CustomPayloadAttributes) -> Result { + fn try_new( + parent: B256, + attributes: CustomPayloadAttributes, + _version: u8, + ) -> Result { Ok(Self(EthPayloadBuilderAttributes::new(parent, attributes.inner))) } From 12762775686c6429d13107b69a2708acdcece21f Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Mon, 28 Oct 2024 15:57:58 +0000 Subject: [PATCH 313/425] test(trie): use proptest to generate random values (#12140) --- crates/trie/sparse/src/trie.rs | 126 ++++++++++++++++++--------------- 1 file changed, 69 insertions(+), 57 deletions(-) diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index 11ca10079..9db1dff53 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -836,13 +836,12 @@ mod tests { use std::collections::BTreeMap; use super::*; - use alloy_primitives::U256; + use alloy_primitives::{map::HashSet, U256}; use assert_matches::assert_matches; use itertools::Itertools; use prop::sample::SizeRange; use proptest::prelude::*; use rand::seq::IteratorRandom; - use reth_testing_utils::generators; use reth_trie::{BranchNode, ExtensionNode, LeafNode}; use reth_trie_common::{ proof::{ProofNodes, ProofRetainer}, @@ -1304,6 +1303,7 @@ mod tests { ); } + #[allow(clippy::type_complexity)] #[test] fn sparse_trie_fuzz() { // Having only the first 3 nibbles set, we narrow down the range of keys @@ -1311,63 +1311,51 @@ mod tests { // to test the sparse trie updates. const KEY_NIBBLES_LEN: usize = 3; - fn test(updates: I) - where - I: IntoIterator, - T: IntoIterator)> + Clone, - { - let mut rng = generators::rng(); + fn test(updates: Vec<(HashMap>, HashSet)>) { + { + let mut state = BTreeMap::default(); + let mut sparse = RevealedSparseTrie::default(); - let mut state = BTreeMap::default(); - let mut sparse = RevealedSparseTrie::default(); + for (update, keys_to_delete) in updates { + // Insert state updates into the sparse trie and calculate the root + for (key, value) in update.clone() { + sparse.update_leaf(key, value).unwrap(); + } + let sparse_root = sparse.root(); + + // Insert state updates into the hash builder and calculate the root + state.extend(update); + let (hash_builder_root, hash_builder_proof_nodes) = + hash_builder_root_with_proofs( + state.clone(), + state.keys().cloned().collect::>(), + ); - for update in updates { - let mut count = 0; - // Insert state updates into the sparse trie and calculate the root - for (key, value) in update.clone() { - sparse.update_leaf(key, value).unwrap(); - count += 1; - } - let keys_to_delete_len = count / 2; - let sparse_root = sparse.root(); - - // Insert state updates into the hash builder and calculate the root - state.extend(update); - let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( - state.clone(), - state.keys().cloned().collect::>(), - ); - - // Assert that the sparse trie root matches the hash builder root - assert_eq!(sparse_root, hash_builder_root); - // Assert that the sparse trie nodes match the hash builder proof nodes - assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); - - // Delete some keys from both the hash builder and the sparse trie and check - // that the sparse trie root still matches the hash builder root - - let keys_to_delete = state - .keys() - .choose_multiple(&mut rng, keys_to_delete_len) - .into_iter() - .cloned() - .collect::>(); - for key in keys_to_delete { - state.remove(&key).unwrap(); - sparse.remove_leaf(&key).unwrap(); - } + // Assert that the sparse trie root matches the hash builder root + assert_eq!(sparse_root, hash_builder_root); + // Assert that the sparse trie nodes match the hash builder proof nodes + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + + // Delete some keys from both the hash builder and the sparse trie and check + // that the sparse trie root still matches the hash builder root + for key in keys_to_delete { + state.remove(&key).unwrap(); + sparse.remove_leaf(&key).unwrap(); + } - let sparse_root = sparse.root(); + let sparse_root = sparse.root(); - let (hash_builder_root, hash_builder_proof_nodes) = hash_builder_root_with_proofs( - state.clone(), - state.keys().cloned().collect::>(), - ); + let (hash_builder_root, hash_builder_proof_nodes) = + hash_builder_root_with_proofs( + state.clone(), + state.keys().cloned().collect::>(), + ); - // Assert that the sparse trie root matches the hash builder root - assert_eq!(sparse_root, hash_builder_root); - // Assert that the sparse trie nodes match the hash builder proof nodes - assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + // Assert that the sparse trie root matches the hash builder root + assert_eq!(sparse_root, hash_builder_root); + // Assert that the sparse trie nodes match the hash builder proof nodes + assert_eq_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes); + } } } @@ -1379,17 +1367,41 @@ mod tests { base } + fn transform_updates( + updates: Vec>>, + mut rng: impl Rng, + ) -> Vec<(HashMap>, HashSet)> { + let mut keys = HashSet::new(); + updates + .into_iter() + .map(|update| { + keys.extend(update.keys().cloned()); + + let keys_to_delete_len = update.len() / 2; + let keys_to_delete = (0..keys_to_delete_len) + .map(|_| { + let key = keys.iter().choose(&mut rng).unwrap().clone(); + keys.take(&key).unwrap() + }) + .collect(); + + (update, keys_to_delete) + }) + .collect::>() + } + proptest!(ProptestConfig::with_cases(10), |( updates in proptest::collection::vec( proptest::collection::hash_map( any_with::(SizeRange::new(KEY_NIBBLES_LEN..=KEY_NIBBLES_LEN)).prop_map(pad_nibbles), any::>(), 1..100, - ), + ).prop_map(HashMap::from_iter), 1..100, - ) + ).prop_perturb(transform_updates) )| { - test(updates) }); + test(updates) + }); } /// We have three leaves that share the same prefix: 0x00, 0x01 and 0x02. Hash builder trie has From b36b021aa27ee8fa931e1833d39806c6773ac63d Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Mon, 28 Oct 2024 23:59:26 +0800 Subject: [PATCH 314/425] chore(rpc): define trait `RpcNodeCoreExt` and replace `LoadBlock::cache` (#12141) --- crates/optimism/rpc/src/eth/block.rs | 8 +------- crates/optimism/rpc/src/eth/mod.rs | 13 +++++++++++-- crates/rpc/rpc-eth-api/src/helpers/block.rs | 10 ++-------- crates/rpc/rpc-eth-api/src/lib.rs | 2 +- crates/rpc/rpc-eth-api/src/node.rs | 8 ++++++++ crates/rpc/rpc/src/eth/core.rs | 12 ++++++++++++ crates/rpc/rpc/src/eth/helpers/block.rs | 6 +----- 7 files changed, 36 insertions(+), 23 deletions(-) diff --git a/crates/optimism/rpc/src/eth/block.rs b/crates/optimism/rpc/src/eth/block.rs index ed31a7509..85f36570f 100644 --- a/crates/optimism/rpc/src/eth/block.rs +++ b/crates/optimism/rpc/src/eth/block.rs @@ -4,7 +4,6 @@ use alloy_rpc_types::BlockId; use op_alloy_network::Network; use op_alloy_rpc_types::OpTransactionReceipt; use reth_chainspec::ChainSpecProvider; -use reth_node_api::FullNodeComponents; use reth_optimism_chainspec::OpChainSpec; use reth_primitives::TransactionMeta; use reth_provider::HeaderProvider; @@ -12,7 +11,6 @@ use reth_rpc_eth_api::{ helpers::{EthBlocks, LoadBlock, LoadPendingBlock, LoadReceipt, SpawnBlocking}, RpcNodeCore, RpcReceipt, }; -use reth_rpc_eth_types::EthStateCache; use crate::{OpEthApi, OpEthApiError, OpReceiptBuilder}; @@ -80,10 +78,6 @@ where impl LoadBlock for OpEthApi where Self: LoadPendingBlock + SpawnBlocking, - N: FullNodeComponents, + N: RpcNodeCore, { - #[inline] - fn cache(&self) -> &EthStateCache { - self.inner.cache() - } } diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index 9b04e1c73..b69b5ef28 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -30,7 +30,7 @@ use reth_rpc_eth_api::{ AddDevSigners, EthApiSpec, EthFees, EthSigner, EthState, LoadBlock, LoadFee, LoadState, SpawnBlocking, Trace, }, - EthApiTypes, RpcNodeCore, + EthApiTypes, RpcNodeCore, RpcNodeCoreExt, }; use reth_rpc_eth_types::{EthStateCache, FeeHistoryCache, GasPriceOracle}; use reth_tasks::{ @@ -116,7 +116,6 @@ where impl RpcNodeCore for OpEthApi where - Self: Clone, N: RpcNodeCore, { type Provider = N::Provider; @@ -141,6 +140,16 @@ where } } +impl RpcNodeCoreExt for OpEthApi +where + N: RpcNodeCore, +{ + #[inline] + fn cache(&self) -> &EthStateCache { + self.inner.cache() + } +} + impl EthApiSpec for OpEthApi where N: RpcNodeCore< diff --git a/crates/rpc/rpc-eth-api/src/helpers/block.rs b/crates/rpc/rpc-eth-api/src/helpers/block.rs index 217e84a47..c777c64d4 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/block.rs @@ -6,10 +6,9 @@ use alloy_rpc_types::{Header, Index}; use futures::Future; use reth_primitives::{BlockId, Receipt, SealedBlock, SealedBlockWithSenders}; use reth_provider::{BlockIdReader, BlockReader, BlockReaderIdExt, HeaderProvider}; -use reth_rpc_eth_types::EthStateCache; use reth_rpc_types_compat::block::{from_block, uncle_block_from_header}; -use crate::{FromEthApiError, FullEthApiTypes, RpcBlock, RpcReceipt}; +use crate::{node::RpcNodeCoreExt, FromEthApiError, FullEthApiTypes, RpcBlock, RpcReceipt}; use super::{LoadPendingBlock, LoadReceipt, SpawnBlocking}; @@ -196,12 +195,7 @@ pub trait EthBlocks: LoadBlock { /// Loads a block from database. /// /// Behaviour shared by several `eth_` RPC methods, not exclusive to `eth_` blocks RPC methods. -pub trait LoadBlock: LoadPendingBlock + SpawnBlocking { - /// Returns a handle for reading data from memory. - /// - /// Data access in default (L1) trait method implementations. - fn cache(&self) -> &EthStateCache; - +pub trait LoadBlock: LoadPendingBlock + SpawnBlocking + RpcNodeCoreExt { /// Returns the block object for the given block id. fn block_with_senders( &self, diff --git a/crates/rpc/rpc-eth-api/src/lib.rs b/crates/rpc/rpc-eth-api/src/lib.rs index bc46d526c..fa9737f84 100644 --- a/crates/rpc/rpc-eth-api/src/lib.rs +++ b/crates/rpc/rpc-eth-api/src/lib.rs @@ -26,7 +26,7 @@ pub use bundle::{EthBundleApiServer, EthCallBundleApiServer}; pub use core::{EthApiServer, FullEthApiServer}; pub use filter::EthFilterApiServer; pub use helpers::error::{AsEthApiError, FromEthApiError, FromEvmError, IntoEthApiError}; -pub use node::RpcNodeCore; +pub use node::{RpcNodeCore, RpcNodeCoreExt}; pub use pubsub::EthPubSubApiServer; pub use types::{EthApiTypes, FullEthApiTypes, RpcBlock, RpcReceipt, RpcTransaction}; diff --git a/crates/rpc/rpc-eth-api/src/node.rs b/crates/rpc/rpc-eth-api/src/node.rs index 950271dfc..463f508f7 100644 --- a/crates/rpc/rpc-eth-api/src/node.rs +++ b/crates/rpc/rpc-eth-api/src/node.rs @@ -1,6 +1,7 @@ //! Helper trait for interfacing with [`FullNodeComponents`]. use reth_node_api::FullNodeComponents; +use reth_rpc_eth_types::EthStateCache; /// Helper trait to relax trait bounds on [`FullNodeComponents`]. /// @@ -56,3 +57,10 @@ where FullNodeComponents::provider(self) } } + +/// Additional components, asides the core node components, needed to run `eth_` namespace API +/// server. +pub trait RpcNodeCoreExt: RpcNodeCore { + /// Returns handle to RPC cache service. + fn cache(&self) -> &EthStateCache; +} diff --git a/crates/rpc/rpc/src/eth/core.rs b/crates/rpc/rpc/src/eth/core.rs index 3fca76e8b..339f2200c 100644 --- a/crates/rpc/rpc/src/eth/core.rs +++ b/crates/rpc/rpc/src/eth/core.rs @@ -10,6 +10,7 @@ use reth_primitives::BlockNumberOrTag; use reth_provider::{BlockReaderIdExt, CanonStateSubscriptions, ChainSpecProvider}; use reth_rpc_eth_api::{ helpers::{EthSigner, SpawnBlocking}, + node::RpcNodeCoreExt, EthApiTypes, RpcNodeCore, }; use reth_rpc_eth_types::{ @@ -169,6 +170,17 @@ where } } +impl RpcNodeCoreExt + for EthApi +where + Self: RpcNodeCore, +{ + #[inline] + fn cache(&self) -> &EthStateCache { + self.inner.cache() + } +} + impl std::fmt::Debug for EthApi { diff --git a/crates/rpc/rpc/src/eth/helpers/block.rs b/crates/rpc/rpc/src/eth/helpers/block.rs index a869cbd54..d5341d0b2 100644 --- a/crates/rpc/rpc/src/eth/helpers/block.rs +++ b/crates/rpc/rpc/src/eth/helpers/block.rs @@ -8,7 +8,7 @@ use reth_rpc_eth_api::{ helpers::{EthBlocks, LoadBlock, LoadPendingBlock, LoadReceipt, SpawnBlocking}, RpcReceipt, }; -use reth_rpc_eth_types::{EthApiError, EthStateCache, ReceiptBuilder}; +use reth_rpc_eth_types::{EthApiError, ReceiptBuilder}; use crate::EthApi; @@ -68,8 +68,4 @@ where Self: LoadPendingBlock + SpawnBlocking, Provider: BlockReaderIdExt, { - #[inline] - fn cache(&self) -> &EthStateCache { - self.inner.cache() - } } From 06d73eec8aa4068e039b2c3ec28fe0b7682fd9e2 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Tue, 29 Oct 2024 00:35:26 +0800 Subject: [PATCH 315/425] chore(rpc): inline trait methods of `RpcNodeCore` impl (#12144) --- crates/optimism/rpc/src/eth/mod.rs | 4 ++++ crates/rpc/rpc-eth-api/src/node.rs | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index b69b5ef28..ae463d715 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -123,18 +123,22 @@ where type Network = ::Network; type Evm = ::Evm; + #[inline] fn pool(&self) -> &Self::Pool { self.inner.pool() } + #[inline] fn evm_config(&self) -> &Self::Evm { self.inner.evm_config() } + #[inline] fn network(&self) -> &Self::Network { self.inner.network() } + #[inline] fn provider(&self) -> &Self::Provider { self.inner.provider() } diff --git a/crates/rpc/rpc-eth-api/src/node.rs b/crates/rpc/rpc-eth-api/src/node.rs index 463f508f7..851b26b72 100644 --- a/crates/rpc/rpc-eth-api/src/node.rs +++ b/crates/rpc/rpc-eth-api/src/node.rs @@ -41,18 +41,22 @@ where type Network = ::Network; type Evm = ::Evm; + #[inline] fn pool(&self) -> &Self::Pool { FullNodeComponents::pool(self) } + #[inline] fn evm_config(&self) -> &Self::Evm { FullNodeComponents::evm_config(self) } + #[inline] fn network(&self) -> &Self::Network { FullNodeComponents::network(self) } + #[inline] fn provider(&self) -> &Self::Provider { FullNodeComponents::provider(self) } From 0733da9e12d1daa8febbc6e9fc4131c2acfe6798 Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Tue, 29 Oct 2024 00:37:11 +0800 Subject: [PATCH 316/425] chore(rpc): relax `FullNodeComponents` trait bound on `OpEthApi` to `RpcNodeCore` (#12142) --- crates/optimism/rpc/src/eth/call.rs | 2 +- crates/optimism/rpc/src/eth/mod.rs | 11 +++++------ 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/crates/optimism/rpc/src/eth/call.rs b/crates/optimism/rpc/src/eth/call.rs index c8f8200bc..9ddf7b385 100644 --- a/crates/optimism/rpc/src/eth/call.rs +++ b/crates/optimism/rpc/src/eth/call.rs @@ -22,9 +22,9 @@ where impl Call for OpEthApi where - N: RpcNodeCore, Self: LoadState> + SpawnBlocking, Self::Error: From, + N: RpcNodeCore, { #[inline] fn call_gas_limit(&self) -> u64 { diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index ae463d715..83e79078c 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -17,7 +17,6 @@ use op_alloy_network::Optimism; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_evm::ConfigureEvm; use reth_network_api::NetworkInfo; -use reth_node_api::{FullNodeComponents, NodeTypes}; use reth_node_builder::EthApiBuilderCtx; use reth_primitives::Header; use reth_provider::{ @@ -177,7 +176,7 @@ where impl SpawnBlocking for OpEthApi where Self: Send + Sync + Clone + 'static, - N: FullNodeComponents, + N: RpcNodeCore, { #[inline] fn io_task_spawner(&self) -> impl TaskSpawner { @@ -237,7 +236,7 @@ where impl EthState for OpEthApi where Self: LoadState + SpawnBlocking, - N: FullNodeComponents, + N: RpcNodeCore, { #[inline] fn max_proof_window(&self) -> u64 { @@ -248,7 +247,7 @@ where impl EthFees for OpEthApi where Self: LoadFee, - N: FullNodeComponents, + N: RpcNodeCore, { } @@ -261,10 +260,10 @@ where impl AddDevSigners for OpEthApi where - N: FullNodeComponents>, + N: RpcNodeCore, { fn with_dev_accounts(&self) { - *self.signers().write() = DevSigner::random_signers(20) + *self.inner.signers().write() = DevSigner::random_signers(20) } } From 473026f40a665cbdfb1125d50012d6403601f2af Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Tue, 29 Oct 2024 00:51:45 +0800 Subject: [PATCH 317/425] chore(rpc): remove redundant `LoadFee::cache` (#12146) --- crates/optimism/rpc/src/eth/mod.rs | 5 ----- crates/rpc/rpc-eth-api/src/helpers/fee.rs | 11 +++-------- crates/rpc/rpc/src/eth/helpers/fees.rs | 7 +------ 3 files changed, 4 insertions(+), 19 deletions(-) diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index 83e79078c..52acf4240 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -204,11 +204,6 @@ where + StateProviderFactory, >, { - #[inline] - fn cache(&self) -> &EthStateCache { - self.inner.cache() - } - #[inline] fn gas_oracle(&self) -> &GasPriceOracle { self.inner.gas_oracle() diff --git a/crates/rpc/rpc-eth-api/src/helpers/fee.rs b/crates/rpc/rpc-eth-api/src/helpers/fee.rs index dcde2214a..18d2d6311 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/fee.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/fee.rs @@ -6,8 +6,8 @@ use futures::Future; use reth_chainspec::EthChainSpec; use reth_provider::{BlockIdReader, ChainSpecProvider, HeaderProvider}; use reth_rpc_eth_types::{ - fee_history::calculate_reward_percentiles_for_block, EthApiError, EthStateCache, - FeeHistoryCache, FeeHistoryEntry, GasPriceOracle, RpcInvalidTransactionError, + fee_history::calculate_reward_percentiles_for_block, EthApiError, FeeHistoryCache, + FeeHistoryEntry, GasPriceOracle, RpcInvalidTransactionError, }; use tracing::debug; @@ -172,7 +172,7 @@ pub trait EthFees: LoadFee { // Percentiles were specified, so we need to collect reward percentile ino if let Some(percentiles) = &reward_percentiles { - let (block, receipts) = LoadFee::cache(self) + let (block, receipts) = self.cache() .get_block_and_receipts(header.hash()) .await .map_err(Self::Error::from_eth_err)? @@ -242,11 +242,6 @@ pub trait EthFees: LoadFee { /// /// Behaviour shared by several `eth_` RPC methods, not exclusive to `eth_` fees RPC methods. pub trait LoadFee: LoadBlock { - /// Returns a handle for reading data from memory. - /// - /// Data access in default (L1) trait method implementations. - fn cache(&self) -> &EthStateCache; - /// Returns a handle for reading gas price. /// /// Data access in default (L1) trait method implementations. diff --git a/crates/rpc/rpc/src/eth/helpers/fees.rs b/crates/rpc/rpc/src/eth/helpers/fees.rs index 2c5db5bac..e1a17ef64 100644 --- a/crates/rpc/rpc/src/eth/helpers/fees.rs +++ b/crates/rpc/rpc/src/eth/helpers/fees.rs @@ -3,7 +3,7 @@ use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_provider::{BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, StateProviderFactory}; use reth_rpc_eth_api::helpers::{EthFees, LoadBlock, LoadFee}; -use reth_rpc_eth_types::{EthStateCache, FeeHistoryCache, GasPriceOracle}; +use reth_rpc_eth_types::{FeeHistoryCache, GasPriceOracle}; use crate::EthApi; @@ -20,11 +20,6 @@ where + ChainSpecProvider + StateProviderFactory, { - #[inline] - fn cache(&self) -> &EthStateCache { - self.inner.cache() - } - #[inline] fn gas_oracle(&self) -> &GasPriceOracle { self.inner.gas_oracle() From 3d62bfde14c469f3fea089b90dc0559ef4e096ae Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Tue, 29 Oct 2024 02:09:58 +0800 Subject: [PATCH 318/425] chore(rpc): add super trait `RpcNodeCoreExt` to `LoadReceipt` (#12149) --- crates/optimism/rpc/src/eth/receipt.rs | 10 +++------- crates/rpc/rpc-eth-api/src/helpers/block.rs | 3 ++- crates/rpc/rpc-eth-api/src/helpers/receipt.rs | 10 ++-------- crates/rpc/rpc/src/eth/helpers/receipt.rs | 11 +++-------- 4 files changed, 10 insertions(+), 24 deletions(-) diff --git a/crates/optimism/rpc/src/eth/receipt.rs b/crates/optimism/rpc/src/eth/receipt.rs index f2f09cdc7..3f2a81573 100644 --- a/crates/optimism/rpc/src/eth/receipt.rs +++ b/crates/optimism/rpc/src/eth/receipt.rs @@ -11,7 +11,7 @@ use reth_optimism_forks::OptimismHardforks; use reth_primitives::{Receipt, TransactionMeta, TransactionSigned, TxType}; use reth_provider::ChainSpecProvider; use reth_rpc_eth_api::{helpers::LoadReceipt, FromEthApiError, RpcReceipt}; -use reth_rpc_eth_types::{EthApiError, EthStateCache, ReceiptBuilder}; +use reth_rpc_eth_types::{EthApiError, ReceiptBuilder}; use crate::{OpEthApi, OpEthApiError}; @@ -20,18 +20,14 @@ where Self: Send + Sync, N: FullNodeComponents>, { - #[inline] - fn cache(&self) -> &EthStateCache { - self.inner.cache() - } - async fn build_transaction_receipt( &self, tx: TransactionSigned, meta: TransactionMeta, receipt: Receipt, ) -> Result, Self::Error> { - let (block, receipts) = LoadReceipt::cache(self) + let (block, receipts) = self + .cache() .get_block_and_receipts(meta.block_hash) .await .map_err(Self::Error::from_eth_err)? diff --git a/crates/rpc/rpc-eth-api/src/helpers/block.rs b/crates/rpc/rpc-eth-api/src/helpers/block.rs index c777c64d4..fa397db35 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/block.rs @@ -145,7 +145,8 @@ pub trait EthBlocks: LoadBlock { if let Some(block_hash) = self.provider().block_hash_for_id(block_id).map_err(Self::Error::from_eth_err)? { - return LoadReceipt::cache(self) + return self + .cache() .get_block_and_receipts(block_hash) .await .map_err(Self::Error::from_eth_err) diff --git a/crates/rpc/rpc-eth-api/src/helpers/receipt.rs b/crates/rpc/rpc-eth-api/src/helpers/receipt.rs index eae99bbe4..48394f1cd 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/receipt.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/receipt.rs @@ -3,19 +3,13 @@ use futures::Future; use reth_primitives::{Receipt, TransactionMeta, TransactionSigned}; -use reth_rpc_eth_types::EthStateCache; -use crate::{EthApiTypes, RpcReceipt}; +use crate::{EthApiTypes, RpcNodeCoreExt, RpcReceipt}; /// Assembles transaction receipt data w.r.t to network. /// /// Behaviour shared by several `eth_` RPC methods, not exclusive to `eth_` receipts RPC methods. -pub trait LoadReceipt: EthApiTypes + Send + Sync { - /// Returns a handle for reading data from memory. - /// - /// Data access in default (L1) trait method implementations. - fn cache(&self) -> &EthStateCache; - +pub trait LoadReceipt: EthApiTypes + RpcNodeCoreExt + Send + Sync { /// Helper method for `eth_getBlockReceipts` and `eth_getTransactionReceipt`. fn build_transaction_receipt( &self, diff --git a/crates/rpc/rpc/src/eth/helpers/receipt.rs b/crates/rpc/rpc/src/eth/helpers/receipt.rs index 570ec4fa3..d0cb5867e 100644 --- a/crates/rpc/rpc/src/eth/helpers/receipt.rs +++ b/crates/rpc/rpc/src/eth/helpers/receipt.rs @@ -2,20 +2,15 @@ use alloy_serde::WithOtherFields; use reth_primitives::{Receipt, TransactionMeta, TransactionSigned}; -use reth_rpc_eth_api::{helpers::LoadReceipt, FromEthApiError, RpcReceipt}; -use reth_rpc_eth_types::{EthApiError, EthStateCache, ReceiptBuilder}; +use reth_rpc_eth_api::{helpers::LoadReceipt, FromEthApiError, RpcNodeCoreExt, RpcReceipt}; +use reth_rpc_eth_types::{EthApiError, ReceiptBuilder}; use crate::EthApi; impl LoadReceipt for EthApi where - Self: Send + Sync, + Self: RpcNodeCoreExt, { - #[inline] - fn cache(&self) -> &EthStateCache { - self.inner.cache() - } - async fn build_transaction_receipt( &self, tx: TransactionSigned, From 28f8c47dc0642e0bbd5f2abd162e003c22349eec Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Tue, 29 Oct 2024 02:10:18 +0800 Subject: [PATCH 319/425] chore(rpc): remove redundant `LoadTransaction::cache` (#12148) --- crates/optimism/rpc/src/eth/transaction.rs | 6 +----- crates/rpc/rpc-eth-api/src/helpers/transaction.rs | 14 ++++++-------- crates/rpc/rpc/src/eth/helpers/transaction.rs | 5 ----- 3 files changed, 7 insertions(+), 18 deletions(-) diff --git a/crates/optimism/rpc/src/eth/transaction.rs b/crates/optimism/rpc/src/eth/transaction.rs index 5135b13a2..3345ac5d4 100644 --- a/crates/optimism/rpc/src/eth/transaction.rs +++ b/crates/optimism/rpc/src/eth/transaction.rs @@ -12,7 +12,7 @@ use reth_rpc_eth_api::{ helpers::{EthSigner, EthTransactions, LoadTransaction, SpawnBlocking}, FromEthApiError, FullEthApiTypes, RpcNodeCore, TransactionCompat, }; -use reth_rpc_eth_types::{utils::recover_raw_transaction, EthStateCache}; +use reth_rpc_eth_types::utils::recover_raw_transaction; use reth_transaction_pool::{PoolTransaction, TransactionOrigin, TransactionPool}; use crate::{OpEthApi, SequencerClient}; @@ -59,10 +59,6 @@ where Self: SpawnBlocking + FullEthApiTypes, N: RpcNodeCore, { - #[inline] - fn cache(&self) -> &EthStateCache { - self.inner.cache() - } } impl OpEthApi diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index af647fedf..3c526cbb0 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -15,14 +15,15 @@ use reth_primitives::{ use reth_provider::{BlockNumReader, BlockReaderIdExt, ReceiptProvider, TransactionsProvider}; use reth_rpc_eth_types::{ utils::{binary_search, recover_raw_transaction}, - EthApiError, EthStateCache, SignError, TransactionSource, + EthApiError, SignError, TransactionSource, }; use reth_rpc_types_compat::transaction::{from_recovered, from_recovered_with_block_context}; use reth_transaction_pool::{PoolTransaction, TransactionOrigin, TransactionPool}; use std::sync::Arc; use crate::{ - FromEthApiError, FullEthApiTypes, IntoEthApiError, RpcNodeCore, RpcReceipt, RpcTransaction, + FromEthApiError, FullEthApiTypes, IntoEthApiError, RpcNodeCore, RpcNodeCoreExt, RpcReceipt, + RpcTransaction, }; use super::{ @@ -461,13 +462,10 @@ pub trait EthTransactions: LoadTransaction { /// Behaviour shared by several `eth_` RPC methods, not exclusive to `eth_` transactions RPC /// methods. pub trait LoadTransaction: - SpawnBlocking + FullEthApiTypes + RpcNodeCore + SpawnBlocking + + FullEthApiTypes + + RpcNodeCoreExt { - /// Returns a handle for reading data from memory. - /// - /// Data access in default (L1) trait method implementations. - fn cache(&self) -> &EthStateCache; - /// Returns the transaction by hash. /// /// Checks the pool and state. diff --git a/crates/rpc/rpc/src/eth/helpers/transaction.rs b/crates/rpc/rpc/src/eth/helpers/transaction.rs index 623db35e5..8ac0785b2 100644 --- a/crates/rpc/rpc/src/eth/helpers/transaction.rs +++ b/crates/rpc/rpc/src/eth/helpers/transaction.rs @@ -5,7 +5,6 @@ use reth_rpc_eth_api::{ helpers::{EthSigner, EthTransactions, LoadTransaction, SpawnBlocking}, FullEthApiTypes, RpcNodeCore, }; -use reth_rpc_eth_types::EthStateCache; use reth_transaction_pool::TransactionPool; use crate::EthApi; @@ -28,10 +27,6 @@ where + FullEthApiTypes + RpcNodeCore, { - #[inline] - fn cache(&self) -> &EthStateCache { - self.inner.cache() - } } #[cfg(test)] From 37d96436073134136f625cd8f33862b39480af0f Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Tue, 29 Oct 2024 02:10:30 +0800 Subject: [PATCH 320/425] chore(rpc): remove redundant `LoadState::cache` (#12147) --- crates/optimism/rpc/src/eth/mod.rs | 9 ++------- crates/rpc/rpc-eth-api/src/helpers/state.rs | 11 +++-------- crates/rpc/rpc/src/eth/helpers/state.rs | 10 ++-------- 3 files changed, 7 insertions(+), 23 deletions(-) diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index 52acf4240..7b427467b 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -215,17 +215,12 @@ where } } -impl LoadState for OpEthApi -where +impl LoadState for OpEthApi where N: RpcNodeCore< Provider: StateProviderFactory + ChainSpecProvider, Pool: TransactionPool, - >, + > { - #[inline] - fn cache(&self) -> &EthStateCache { - self.inner.cache() - } } impl EthState for OpEthApi diff --git a/crates/rpc/rpc-eth-api/src/helpers/state.rs b/crates/rpc/rpc-eth-api/src/helpers/state.rs index 87e66cb74..97c94b949 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/state.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/state.rs @@ -13,12 +13,12 @@ use reth_provider::{ BlockIdReader, BlockNumReader, ChainSpecProvider, StateProvider, StateProviderBox, StateProviderFactory, }; -use reth_rpc_eth_types::{EthApiError, EthStateCache, PendingBlockEnv, RpcInvalidTransactionError}; +use reth_rpc_eth_types::{EthApiError, PendingBlockEnv, RpcInvalidTransactionError}; use reth_rpc_types_compat::proof::from_primitive_account_proof; use reth_transaction_pool::TransactionPool; use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, SpecId}; -use crate::{EthApiTypes, FromEthApiError, RpcNodeCore}; +use crate::{EthApiTypes, FromEthApiError, RpcNodeCore, RpcNodeCoreExt}; use super::{EthApiSpec, LoadPendingBlock, SpawnBlocking}; @@ -170,17 +170,12 @@ pub trait EthState: LoadState + SpawnBlocking { /// Behaviour shared by several `eth_` RPC methods, not exclusive to `eth_` state RPC methods. pub trait LoadState: EthApiTypes - + RpcNodeCore< + + RpcNodeCoreExt< Provider: StateProviderFactory + ChainSpecProvider, Pool: TransactionPool, > { - /// Returns a handle for reading data from memory. - /// - /// Data access in default (L1) trait method implementations. - fn cache(&self) -> &EthStateCache; - /// Returns the state at the given block number fn state_at_hash(&self, block_hash: B256) -> Result { self.provider().history_by_block_hash(block_hash).map_err(Self::Error::from_eth_err) diff --git a/crates/rpc/rpc/src/eth/helpers/state.rs b/crates/rpc/rpc/src/eth/helpers/state.rs index 8c958ea2a..a3e909cf6 100644 --- a/crates/rpc/rpc/src/eth/helpers/state.rs +++ b/crates/rpc/rpc/src/eth/helpers/state.rs @@ -8,7 +8,6 @@ use reth_rpc_eth_api::{ helpers::{EthState, LoadState, SpawnBlocking}, RpcNodeCore, }; -use reth_rpc_eth_types::EthStateCache; use crate::EthApi; @@ -21,17 +20,12 @@ where } } -impl LoadState for EthApi -where +impl LoadState for EthApi where Self: RpcNodeCore< Provider: StateProviderFactory + ChainSpecProvider, Pool: TransactionPool, - >, + > { - #[inline] - fn cache(&self) -> &EthStateCache { - self.inner.cache() - } } #[cfg(test)] From ddc9bda315c0815369794f51bc232dff0ac9f43e Mon Sep 17 00:00:00 2001 From: Dan Cline <6798349+Rjected@users.noreply.github.com> Date: Mon, 28 Oct 2024 16:27:56 -0400 Subject: [PATCH 321/425] fix(op): fix payload id calculation (#11730) Co-authored-by: Matthias Seitz --- crates/optimism/payload/src/payload.rs | 76 +++++++++++++++++++++++--- 1 file changed, 68 insertions(+), 8 deletions(-) diff --git a/crates/optimism/payload/src/payload.rs b/crates/optimism/payload/src/payload.rs index f46891fdc..7f95d04ad 100644 --- a/crates/optimism/payload/src/payload.rs +++ b/crates/optimism/payload/src/payload.rs @@ -3,7 +3,7 @@ //! Optimism builder support use alloy_eips::{eip2718::Decodable2718, eip7685::Requests}; -use alloy_primitives::{Address, B256, U256}; +use alloy_primitives::{keccak256, Address, B256, U256}; use alloy_rlp::Encodable; use alloy_rpc_types_engine::{ExecutionPayloadEnvelopeV2, ExecutionPayloadV1, PayloadId}; /// Re-export for use in downstream arguments. @@ -46,9 +46,9 @@ impl PayloadBuilderAttributes for OptimismPayloadBuilderAttributes { fn try_new( parent: B256, attributes: OpPayloadAttributes, - _version: u8, + version: u8, ) -> Result { - let id = payload_id_optimism(&parent, &attributes); + let id = payload_id_optimism(&parent, &attributes, version); let transactions = attributes .transactions @@ -281,7 +281,11 @@ impl From for OpExecutionPayloadEnvelopeV4 { /// Generates the payload id for the configured payload from the [`OpPayloadAttributes`]. /// /// Returns an 8-byte identifier by hashing the payload components with sha256 hash. -pub(crate) fn payload_id_optimism(parent: &B256, attributes: &OpPayloadAttributes) -> PayloadId { +pub(crate) fn payload_id_optimism( + parent: &B256, + attributes: &OpPayloadAttributes, + payload_version: u8, +) -> PayloadId { use sha2::Digest; let mut hasher = sha2::Sha256::new(); hasher.update(parent.as_slice()); @@ -299,15 +303,71 @@ pub(crate) fn payload_id_optimism(parent: &B256, attributes: &OpPayloadAttribute } let no_tx_pool = attributes.no_tx_pool.unwrap_or_default(); - hasher.update([no_tx_pool as u8]); - if let Some(txs) = &attributes.transactions { - txs.iter().for_each(|tx| hasher.update(tx)); + if no_tx_pool || attributes.transactions.as_ref().is_some_and(|txs| !txs.is_empty()) { + hasher.update([no_tx_pool as u8]); + let txs_len = attributes.transactions.as_ref().map(|txs| txs.len()).unwrap_or_default(); + hasher.update(&txs_len.to_be_bytes()[..]); + if let Some(txs) = &attributes.transactions { + for tx in txs { + // we have to just hash the bytes here because otherwise we would need to decode + // the transactions here which really isn't ideal + let tx_hash = keccak256(tx); + // maybe we can try just taking the hash and not decoding + hasher.update(tx_hash) + } + } } if let Some(gas_limit) = attributes.gas_limit { hasher.update(gas_limit.to_be_bytes()); } - let out = hasher.finalize(); + if let Some(eip_1559_params) = attributes.eip_1559_params { + hasher.update(eip_1559_params.as_slice()); + } + + let mut out = hasher.finalize(); + out[0] = payload_version; PayloadId::new(out.as_slice()[..8].try_into().expect("sufficient length")) } + +#[cfg(test)] +mod tests { + use super::*; + use crate::OpPayloadAttributes; + use alloy_primitives::{address, b256, bytes, FixedBytes}; + use alloy_rpc_types_engine::PayloadAttributes; + use reth_payload_primitives::EngineApiMessageVersion; + use std::str::FromStr; + + #[test] + fn test_payload_id_parity_op_geth() { + // INFO rollup_boost::server:received fork_choice_updated_v3 from builder and l2_client + // payload_id_builder="0x6ef26ca02318dcf9" payload_id_l2="0x03d2dae446d2a86a" + let expected = + PayloadId::new(FixedBytes::<8>::from_str("0x03d2dae446d2a86a").unwrap().into()); + let attrs = OpPayloadAttributes { + payload_attributes: PayloadAttributes { + timestamp: 1728933301, + prev_randao: b256!("9158595abbdab2c90635087619aa7042bbebe47642dfab3c9bfb934f6b082765"), + suggested_fee_recipient: address!("4200000000000000000000000000000000000011"), + withdrawals: Some([].into()), + parent_beacon_block_root: b256!("8fe0193b9bf83cb7e5a08538e494fecc23046aab9a497af3704f4afdae3250ff").into() + }, + transactions: Some([bytes!("7ef8f8a0dc19cfa777d90980e4875d0a548a881baaa3f83f14d1bc0d3038bc329350e54194deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000f424000000000000000000000000300000000670d6d890000000000000125000000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000014bf9181db6e381d4384bbf69c48b0ee0eed23c6ca26143c6d2544f9d39997a590000000000000000000000007f83d659683caf2767fd3c720981d51f5bc365bc")].into()), + no_tx_pool: None, + gas_limit: Some(30000000), + eip_1559_params: None, + }; + + // Reth's `PayloadId` should match op-geth's `PayloadId`. This fails + assert_eq!( + expected, + payload_id_optimism( + &b256!("3533bf30edaf9505d0810bf475cbe4e5f4b9889904b9845e83efdeab4e92eb1e"), + &attrs, + EngineApiMessageVersion::V3 as u8 + ) + ); + } +} From 05ee75f32c95600f0010455ccaf66ed6187b2c59 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 28 Oct 2024 23:31:10 +0100 Subject: [PATCH 322/425] fix: restrict concurrent incoming connections (#12150) --- crates/net/network-types/src/peers/state.rs | 6 ++ crates/net/network/src/peers.rs | 62 +++++++++++++++++++-- 2 files changed, 63 insertions(+), 5 deletions(-) diff --git a/crates/net/network-types/src/peers/state.rs b/crates/net/network-types/src/peers/state.rs index f6ab1a39f..1e2466c80 100644 --- a/crates/net/network-types/src/peers/state.rs +++ b/crates/net/network-types/src/peers/state.rs @@ -31,6 +31,12 @@ impl PeerConnectionState { } } + /// Returns true if this is the idle state. + #[inline] + pub const fn is_idle(&self) -> bool { + matches!(self, Self::Idle) + } + /// Returns true if this is an active incoming connection. #[inline] pub const fn is_incoming(&self) -> bool { diff --git a/crates/net/network/src/peers.rs b/crates/net/network/src/peers.rs index 3d5ff7a0d..016e7fa8d 100644 --- a/crates/net/network/src/peers.rs +++ b/crates/net/network/src/peers.rs @@ -218,6 +218,11 @@ impl PeersManager { self.backed_off_peers.len() } + /// Returns the number of idle trusted peers. + fn num_idle_trusted_peers(&self) -> usize { + self.peers.iter().filter(|(_, peer)| peer.kind.is_trusted() && peer.state.is_idle()).count() + } + /// Invoked when a new _incoming_ tcp connection is accepted. /// /// returns an error if the inbound ip address is on the ban list @@ -229,9 +234,34 @@ impl PeersManager { return Err(InboundConnectionError::IpBanned) } - if !self.connection_info.has_in_capacity() && self.trusted_peer_ids.is_empty() { - // if we don't have any inbound slots and no trusted peers, we don't accept any new - // connections + // check if we even have slots for a new incoming connection + if !self.connection_info.has_in_capacity() { + if self.trusted_peer_ids.is_empty() { + // if we don't have any incoming slots and no trusted peers, we don't accept any new + // connections + return Err(InboundConnectionError::ExceedsCapacity) + } + + // there's an edge case here where no incoming connections besides from trusted peers + // are allowed (max_inbound == 0), in which case we still need to allow new pending + // incoming connections until all trusted peers are connected. + let num_idle_trusted_peers = self.num_idle_trusted_peers(); + if num_idle_trusted_peers <= self.trusted_peer_ids.len() { + // we still want to limit concurrent pending connections + let max_inbound = + self.trusted_peer_ids.len().max(self.connection_info.config.max_inbound); + if self.connection_info.num_pending_in <= max_inbound { + self.connection_info.inc_pending_in(); + } + return Ok(()) + } + + // all trusted peers are either connected or connecting + return Err(InboundConnectionError::ExceedsCapacity) + } + + // also cap the incoming connections we can process at once + if !self.connection_info.has_in_pending_capacity() { return Err(InboundConnectionError::ExceedsCapacity) } @@ -968,17 +998,22 @@ impl ConnectionInfo { Self { config, num_outbound: 0, num_pending_out: 0, num_inbound: 0, num_pending_in: 0 } } - /// Returns `true` if there's still capacity for a new outgoing connection. + /// Returns `true` if there's still capacity to perform an outgoing connection. const fn has_out_capacity(&self) -> bool { self.num_pending_out < self.config.max_concurrent_outbound_dials && self.num_outbound < self.config.max_outbound } - /// Returns `true` if there's still capacity for a new incoming connection. + /// Returns `true` if there's still capacity to accept a new incoming connection. const fn has_in_capacity(&self) -> bool { self.num_inbound < self.config.max_inbound } + /// Returns `true` if we can handle an additional incoming pending connection. + const fn has_in_pending_capacity(&self) -> bool { + self.num_pending_in < self.config.max_inbound + } + fn decr_state(&mut self, state: PeerConnectionState) { match state { PeerConnectionState::Idle => {} @@ -1597,6 +1632,23 @@ mod tests { assert_eq!(peers.connection_info.num_pending_in, 0); } + #[tokio::test] + async fn test_reject_incoming_at_pending_capacity() { + let mut peers = PeersManager::default(); + + for count in 1..=peers.connection_info.config.max_inbound { + let socket_addr = + SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 1, count as u8)), 8008); + assert!(peers.on_incoming_pending_session(socket_addr.ip()).is_ok()); + assert_eq!(peers.connection_info.num_pending_in, count); + } + assert!(peers.connection_info.has_in_capacity()); + assert!(!peers.connection_info.has_in_pending_capacity()); + + let socket_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 1, 100)), 8008); + assert!(peers.on_incoming_pending_session(socket_addr.ip()).is_err()); + } + #[tokio::test] async fn test_closed_incoming() { let socket_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 1, 2)), 8008); From 367eb44177b3cba8dae4c98acf0cdc93273e745e Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 29 Oct 2024 00:18:37 +0100 Subject: [PATCH 323/425] chore: remove one unwrap (#12152) --- crates/net/ecies/src/algorithm.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/net/ecies/src/algorithm.rs b/crates/net/ecies/src/algorithm.rs index 83dcc657b..6bf9717fe 100644 --- a/crates/net/ecies/src/algorithm.rs +++ b/crates/net/ecies/src/algorithm.rs @@ -650,6 +650,7 @@ impl ECIES { out.extend_from_slice(tag.as_slice()); } + /// Extracts the header from slice and returns the body size. pub fn read_header(&mut self, data: &mut [u8]) -> Result { // If the data is not large enough to fit the header and mac bytes, return an error // @@ -677,7 +678,7 @@ impl ECIES { self.body_size = Some(body_size); - Ok(self.body_size.unwrap()) + Ok(body_size) } pub const fn header_len() -> usize { From 462157880c5cf27408c304d9ce8dd32e10201800 Mon Sep 17 00:00:00 2001 From: greged93 <82421016+greged93@users.noreply.github.com> Date: Tue, 29 Oct 2024 00:51:20 +0100 Subject: [PATCH 324/425] dev: track invalid transactions by sender in pool (#12138) --- crates/transaction-pool/src/pool/best.rs | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/crates/transaction-pool/src/pool/best.rs b/crates/transaction-pool/src/pool/best.rs index 763572e7e..6ade15be7 100644 --- a/crates/transaction-pool/src/pool/best.rs +++ b/crates/transaction-pool/src/pool/best.rs @@ -1,14 +1,14 @@ use crate::{ - identifier::TransactionId, pool::pending::PendingTransaction, PoolTransaction, - TransactionOrdering, ValidPoolTransaction, + identifier::{SenderId, TransactionId}, + pool::pending::PendingTransaction, + PoolTransaction, TransactionOrdering, ValidPoolTransaction, }; -use alloy_primitives::{Address, B256 as TxHash}; +use alloy_primitives::Address; use core::fmt; use std::{ collections::{BTreeMap, BTreeSet, HashSet, VecDeque}, sync::Arc, }; - use tokio::sync::broadcast::{error::TryRecvError, Receiver}; use tracing::debug; @@ -80,7 +80,7 @@ pub(crate) struct BestTransactions { /// then can be moved from the `all` set to the `independent` set. pub(crate) independent: BTreeSet>, /// There might be the case where a yielded transactions is invalid, this will track it. - pub(crate) invalid: HashSet, + pub(crate) invalid: HashSet, /// Used to receive any new pending transactions that have been added to the pool after this /// iterator was static fileted /// @@ -94,7 +94,7 @@ pub(crate) struct BestTransactions { impl BestTransactions { /// Mark the transaction and it's descendants as invalid. pub(crate) fn mark_invalid(&mut self, tx: &Arc>) { - self.invalid.insert(*tx.hash()); + self.invalid.insert(tx.sender_id()); } /// Returns the ancestor the given transaction, the transaction with `nonce - 1`. @@ -168,14 +168,14 @@ impl Iterator for BestTransactions { self.add_new_transactions(); // Remove the next independent tx with the highest priority let best = self.independent.pop_last()?; - let hash = best.transaction.hash(); + let sender_id = best.transaction.sender_id(); - // skip transactions that were marked as invalid - if self.invalid.contains(hash) { + // skip transactions for which sender was marked as invalid + if self.invalid.contains(&sender_id) { debug!( target: "txpool", "[{:?}] skipping invalid transaction", - hash + best.transaction.hash() ); continue } @@ -186,7 +186,7 @@ impl Iterator for BestTransactions { } if self.skip_blobs && best.transaction.transaction.is_eip4844() { - // blobs should be skipped, marking the as invalid will ensure that no dependent + // blobs should be skipped, marking them as invalid will ensure that no dependent // transactions are returned self.mark_invalid(&best.transaction) } else { From 0297b8f6949d2354c1d68df5360cc375dc889879 Mon Sep 17 00:00:00 2001 From: zilayo <84344709+zilayo@users.noreply.github.com> Date: Mon, 28 Oct 2024 23:59:52 +0000 Subject: [PATCH 325/425] fix: use net::discv5 for reth's discv5 tracing target namespace (reverts #12045) (#12151) --- crates/net/discv5/src/config.rs | 4 ++-- crates/net/discv5/src/lib.rs | 30 +++++++++++++++--------------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/crates/net/discv5/src/config.rs b/crates/net/discv5/src/config.rs index 0684c263b..203ef7613 100644 --- a/crates/net/discv5/src/config.rs +++ b/crates/net/discv5/src/config.rs @@ -412,7 +412,7 @@ pub fn discv5_sockets_wrt_rlpx_addr( discv5_addr_ipv6.map(|ip| SocketAddrV6::new(ip, discv5_port_ipv6, 0, 0)); if let Some(discv5_addr) = discv5_addr_ipv4 { - warn!(target: "discv5", + warn!(target: "net::discv5", %discv5_addr, %rlpx_addr, "Overwriting discv5 IPv4 address with RLPx IPv4 address, limited to one advertised IP address per IP version" @@ -429,7 +429,7 @@ pub fn discv5_sockets_wrt_rlpx_addr( discv5_addr_ipv4.map(|ip| SocketAddrV4::new(ip, discv5_port_ipv4)); if let Some(discv5_addr) = discv5_addr_ipv6 { - warn!(target: "discv5", + warn!(target: "net::discv5", %discv5_addr, %rlpx_addr, "Overwriting discv5 IPv6 address with RLPx IPv6 address, limited to one advertised IP address per IP version" diff --git a/crates/net/discv5/src/lib.rs b/crates/net/discv5/src/lib.rs index a8154b7bd..da54d0b52 100644 --- a/crates/net/discv5/src/lib.rs +++ b/crates/net/discv5/src/lib.rs @@ -95,14 +95,14 @@ impl Discv5 { /// CAUTION: The value **must** be rlp encoded pub fn set_eip868_in_local_enr(&self, key: Vec, rlp: Bytes) { let Ok(key_str) = std::str::from_utf8(&key) else { - error!(target: "discv5", + error!(target: "net::discv5", err="key not utf-8", "failed to update local enr" ); return }; if let Err(err) = self.discv5.enr_insert(key_str, &rlp) { - error!(target: "discv5", + error!(target: "net::discv5", %err, "failed to update local enr" ); @@ -131,7 +131,7 @@ impl Discv5 { self.discv5.ban_node(&node_id, None); self.ban_ip(ip); } - Err(err) => error!(target: "discv5", + Err(err) => error!(target: "net::discv5", %err, "failed to ban peer" ), @@ -167,7 +167,7 @@ impl Discv5 { // let (enr, bc_enr, fork_key, rlpx_ip_mode) = build_local_enr(sk, &discv5_config); - trace!(target: "discv5", + trace!(target: "net::discv5", ?enr, "local ENR" ); @@ -271,7 +271,7 @@ impl Discv5 { // to them over RLPx, to be compatible with EL discv5 implementations that don't // enforce this security measure. - trace!(target: "discv5", + trace!(target: "net::discv5", ?enr, %socket, "discovered unverifiable enr, source socket doesn't match socket advertised in ENR" @@ -296,7 +296,7 @@ impl Discv5 { let node_record = match self.try_into_reachable(enr, socket) { Ok(enr_bc) => enr_bc, Err(err) => { - trace!(target: "discv5", + trace!(target: "net::discv5", %err, ?enr, "discovered peer is unreachable" @@ -308,7 +308,7 @@ impl Discv5 { } }; if let FilterOutcome::Ignore { reason } = self.filter_discovered_peer(enr) { - trace!(target: "discv5", + trace!(target: "net::discv5", ?enr, reason, "filtered out discovered peer" @@ -324,7 +324,7 @@ impl Discv5 { .then(|| self.get_fork_id(enr).ok()) .flatten(); - trace!(target: "discv5", + trace!(target: "net::discv5", ?fork_id, ?enr, "discovered peer" @@ -491,7 +491,7 @@ pub async fn bootstrap( bootstrap_nodes: HashSet, discv5: &Arc, ) -> Result<(), Error> { - trace!(target: "discv5", + trace!(target: "net::discv5", ?bootstrap_nodes, "adding bootstrap nodes .." ); @@ -508,7 +508,7 @@ pub async fn bootstrap( let discv5 = discv5.clone(); enr_requests.push(async move { if let Err(err) = discv5.request_enr(enode.to_string()).await { - debug!(target: "discv5", + debug!(target: "net::discv5", ?enode, %err, "failed adding boot node" @@ -545,7 +545,7 @@ pub fn spawn_populate_kbuckets_bg( for i in (0..bootstrap_lookup_countdown).rev() { let target = discv5::enr::NodeId::random(); - trace!(target: "discv5", + trace!(target: "net::discv5", %target, bootstrap_boost_runs_countdown=i, lookup_interval=format!("{:#?}", pulse_lookup_interval), @@ -563,7 +563,7 @@ pub fn spawn_populate_kbuckets_bg( // selection (ref kademlia) let target = get_lookup_target(kbucket_index, local_node_id); - trace!(target: "discv5", + trace!(target: "net::discv5", %target, lookup_interval=format!("{:#?}", lookup_interval), "starting periodic lookup query" @@ -628,11 +628,11 @@ pub async fn lookup( ); match discv5.find_node(target).await { - Err(err) => trace!(target: "discv5", + Err(err) => trace!(target: "net::discv5", %err, "lookup query failed" ), - Ok(peers) => trace!(target: "discv5", + Ok(peers) => trace!(target: "net::discv5", target=format!("{:#?}", target), peers_count=peers.len(), peers=format!("[{:#}]", peers.iter() @@ -645,7 +645,7 @@ pub async fn lookup( // `Discv5::connected_peers` can be subset of sessions, not all peers make it // into kbuckets, e.g. incoming sessions from peers with // unreachable enrs - debug!(target: "discv5", + debug!(target: "net::discv5", connected_peers=discv5.connected_peers(), "connected peers in routing table" ); From fbe04625b9c488e76d41b82cc28bbce268ebc4c1 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 29 Oct 2024 01:04:24 +0100 Subject: [PATCH 326/425] test: use port0 in tests (#12154) --- crates/net/network/tests/it/startup.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/net/network/tests/it/startup.rs b/crates/net/network/tests/it/startup.rs index 89889a869..d84ff492e 100644 --- a/crates/net/network/tests/it/startup.rs +++ b/crates/net/network/tests/it/startup.rs @@ -113,6 +113,7 @@ async fn test_node_record_address_with_nat() { .add_nat(Some(NatResolver::ExternalIp("10.1.1.1".parse().unwrap()))) .disable_discv4_discovery() .disable_dns_discovery() + .listener_port(0) .build_with_noop_provider(MAINNET.clone()); let network = NetworkManager::new(config).await.unwrap(); @@ -127,6 +128,7 @@ async fn test_node_record_address_with_nat_disable_discovery() { let config = NetworkConfigBuilder::new(secret_key) .add_nat(Some(NatResolver::ExternalIp("10.1.1.1".parse().unwrap()))) .disable_discovery() + .listener_port(0) .build_with_noop_provider(MAINNET.clone()); let network = NetworkManager::new(config).await.unwrap(); From cc2a33cfc0c5f06b0d44fef72ffa987e1c97f025 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 29 Oct 2024 03:09:11 +0100 Subject: [PATCH 327/425] feat: rate limit incoming ips (#12153) --- crates/net/network-types/src/peers/config.rs | 9 +++ crates/net/network/src/peers.rs | 63 +++++++++++++++++--- 2 files changed, 63 insertions(+), 9 deletions(-) diff --git a/crates/net/network-types/src/peers/config.rs b/crates/net/network-types/src/peers/config.rs index 97a8bb3ca..890679f5d 100644 --- a/crates/net/network-types/src/peers/config.rs +++ b/crates/net/network-types/src/peers/config.rs @@ -24,6 +24,9 @@ pub const DEFAULT_MAX_COUNT_PEERS_INBOUND: u32 = 30; /// This restricts how many outbound dials can be performed concurrently. pub const DEFAULT_MAX_COUNT_CONCURRENT_OUTBOUND_DIALS: usize = 15; +/// A temporary timeout for ips on incoming connection attempts. +pub const INBOUND_IP_THROTTLE_DURATION: Duration = Duration::from_secs(30); + /// The durations to use when a backoff should be applied to a peer. /// /// See also [`BackoffKind`]. @@ -155,6 +158,11 @@ pub struct PeersConfig { /// /// The backoff duration increases with number of backoff attempts. pub backoff_durations: PeerBackoffDurations, + /// How long to temporarily ban ips on incoming connection attempts. + /// + /// This acts as an IP based rate limit. + #[cfg_attr(feature = "serde", serde(default, with = "humantime_serde"))] + pub incoming_ip_throttle_duration: Duration, } impl Default for PeersConfig { @@ -171,6 +179,7 @@ impl Default for PeersConfig { trusted_nodes_only: false, basic_nodes: Default::default(), max_backoff_count: 5, + incoming_ip_throttle_duration: INBOUND_IP_THROTTLE_DURATION, } } } diff --git a/crates/net/network/src/peers.rs b/crates/net/network/src/peers.rs index 016e7fa8d..4855ff5e7 100644 --- a/crates/net/network/src/peers.rs +++ b/crates/net/network/src/peers.rs @@ -84,6 +84,8 @@ pub struct PeersManager { max_backoff_count: u8, /// Tracks the connection state of the node net_connection_state: NetworkConnectionState, + /// How long to temporarily ban ip on an incoming connection attempt. + incoming_ip_throttle_duration: Duration, } impl PeersManager { @@ -100,6 +102,7 @@ impl PeersManager { trusted_nodes_only, basic_nodes, max_backoff_count, + incoming_ip_throttle_duration, } = config; let (manager_tx, handle_rx) = mpsc::unbounded_channel(); let now = Instant::now(); @@ -148,6 +151,7 @@ impl PeersManager { last_tick: Instant::now(), max_backoff_count, net_connection_state: NetworkConnectionState::default(), + incoming_ip_throttle_duration, } } @@ -265,6 +269,9 @@ impl PeersManager { return Err(InboundConnectionError::ExceedsCapacity) } + // apply the rate limit + self.throttle_incoming_ip(addr); + self.connection_info.inc_pending_in(); Ok(()) } @@ -383,6 +390,12 @@ impl PeersManager { self.ban_list.ban_ip_until(ip, std::time::Instant::now() + self.ban_duration); } + /// Bans the IP temporarily to rate limit inbound connection attempts per IP. + fn throttle_incoming_ip(&mut self, ip: IpAddr) { + self.ban_list + .ban_ip_until(ip, std::time::Instant::now() + self.incoming_ip_throttle_duration); + } + /// Temporarily puts the peer in timeout by inserting it into the backedoff peers set fn backoff_peer_until(&mut self, peer_id: PeerId, until: std::time::Instant) { trace!(target: "net::peers", ?peer_id, "backing off"); @@ -1129,15 +1142,6 @@ impl Display for InboundConnectionError { #[cfg(test)] mod tests { - use std::{ - future::{poll_fn, Future}, - io, - net::{IpAddr, Ipv4Addr, SocketAddr}, - pin::Pin, - task::{Context, Poll}, - time::Duration, - }; - use alloy_primitives::B512; use reth_eth_wire::{ errors::{EthHandshakeError, EthStreamError, P2PHandshakeError, P2PStreamError}, @@ -1149,6 +1153,14 @@ mod tests { use reth_network_types::{ peers::reputation::DEFAULT_REPUTATION, BackoffKind, ReputationChangeKind, }; + use std::{ + future::{poll_fn, Future}, + io, + net::{IpAddr, Ipv4Addr, SocketAddr}, + pin::Pin, + task::{Context, Poll}, + time::Duration, + }; use url::Host; use super::PeersManager; @@ -2330,6 +2342,39 @@ mod tests { ); } + #[tokio::test] + async fn test_incoming_rate_limit() { + let config = PeersConfig { + incoming_ip_throttle_duration: Duration::from_millis(100), + ..PeersConfig::test() + }; + let mut peers = PeersManager::new(config); + + let addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(168, 0, 1, 2)), 8009); + assert!(peers.on_incoming_pending_session(addr.ip()).is_ok()); + assert_eq!( + peers.on_incoming_pending_session(addr.ip()).unwrap_err(), + InboundConnectionError::IpBanned + ); + + peers.release_interval.reset_immediately(); + tokio::time::sleep(peers.incoming_ip_throttle_duration).await; + + // await unban + poll_fn(|cx| loop { + if peers.poll(cx).is_pending() { + return Poll::Ready(()); + } + }) + .await; + + assert!(peers.on_incoming_pending_session(addr.ip()).is_ok()); + assert_eq!( + peers.on_incoming_pending_session(addr.ip()).unwrap_err(), + InboundConnectionError::IpBanned + ); + } + #[tokio::test] async fn test_tick() { let ip = IpAddr::V4(Ipv4Addr::new(127, 0, 1, 2)); From b48fa68f65a606b53ec19eb1dd2cbc090c3cd777 Mon Sep 17 00:00:00 2001 From: Dan Cline <6798349+Rjected@users.noreply.github.com> Date: Tue, 29 Oct 2024 01:14:34 -0400 Subject: [PATCH 328/425] fix(ecies): ecies typo (#12155) --- crates/net/network/src/session/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/net/network/src/session/mod.rs b/crates/net/network/src/session/mod.rs index 74f303df7..3522aa6a7 100644 --- a/crates/net/network/src/session/mod.rs +++ b/crates/net/network/src/session/mod.rs @@ -868,7 +868,7 @@ async fn authenticate( extra_handlers: RlpxSubProtocolHandlers, ) { let local_addr = stream.local_addr().ok(); - let stream = match get_eciess_stream(stream, secret_key, direction).await { + let stream = match get_ecies_stream(stream, secret_key, direction).await { Ok(stream) => stream, Err(error) => { let _ = events @@ -917,7 +917,7 @@ async fn authenticate( /// Returns an [`ECIESStream`] if it can be built. If not, send a /// [`PendingSessionEvent::EciesAuthError`] and returns `None` -async fn get_eciess_stream( +async fn get_ecies_stream( stream: Io, secret_key: SecretKey, direction: Direction, From 7880d4ddb016c9befc16c5a8f2c70601d9dd0fea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C3=A9a=20Narzis?= <78718413+lean-apple@users.noreply.github.com> Date: Tue, 29 Oct 2024 16:52:00 +0700 Subject: [PATCH 329/425] refactor: change `PayloadConfig` to use parent header instead of parent block (#12159) --- .../src/commands/debug_cmd/build_block.rs | 5 ++-- crates/ethereum/payload/src/lib.rs | 28 +++++++++---------- crates/optimism/payload/src/builder.rs | 18 ++++++------ crates/payload/basic/src/lib.rs | 20 +++++++------ examples/custom-engine-types/src/main.rs | 8 +++--- .../custom-payload-builder/src/generator.rs | 7 +++-- 6 files changed, 47 insertions(+), 39 deletions(-) diff --git a/bin/reth/src/commands/debug_cmd/build_block.rs b/bin/reth/src/commands/debug_cmd/build_block.rs index 272f107d3..a2dfb5ab3 100644 --- a/bin/reth/src/commands/debug_cmd/build_block.rs +++ b/bin/reth/src/commands/debug_cmd/build_block.rs @@ -29,7 +29,8 @@ use reth_node_ethereum::{EthEvmConfig, EthExecutorProvider}; use reth_payload_builder::database::CachedReads; use reth_primitives::{ revm_primitives::KzgSettings, BlobTransaction, BlobTransactionSidecar, - PooledTransactionsElement, SealedBlock, SealedBlockWithSenders, Transaction, TransactionSigned, + PooledTransactionsElement, SealedBlock, SealedBlockWithSenders, SealedHeader, Transaction, + TransactionSigned, }; use reth_provider::{ providers::BlockchainProvider, BlockHashReader, BlockReader, BlockWriter, ChainSpecProvider, @@ -224,7 +225,7 @@ impl> Command { withdrawals: None, }; let payload_config = PayloadConfig::new( - Arc::clone(&best_block), + Arc::new(SealedHeader::new(best_block.header().clone(), best_block.hash())), Bytes::default(), reth_payload_builder::EthPayloadBuilderAttributes::try_new( best_block.hash(), diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 73b22efac..8e188f890 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -97,7 +97,7 @@ where &self, args: BuildArguments, ) -> Result, PayloadBuilderError> { - let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_block); + let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_header); let pool = args.pool.clone(); default_ethereum_payload(self.evm_config.clone(), args, cfg_env, block_env, |attributes| { @@ -120,7 +120,7 @@ where None, ); - let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_block); + let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_header); let pool = args.pool.clone(); @@ -154,13 +154,13 @@ where let BuildArguments { client, pool, mut cached_reads, config, cancel, best_payload } = args; let chain_spec = client.chain_spec(); - let state_provider = client.state_by_block_hash(config.parent_block.hash())?; + let state_provider = client.state_by_block_hash(config.parent_header.hash())?; let state = StateProviderDatabase::new(state_provider); let mut db = State::builder().with_database_ref(cached_reads.as_db(state)).with_bundle_update().build(); - let PayloadConfig { parent_block, extra_data, attributes } = config; + let PayloadConfig { parent_header, extra_data, attributes } = config; - debug!(target: "payload_builder", id=%attributes.id, parent_hash = ?parent_block.hash(), parent_number = parent_block.number, "building new payload"); + debug!(target: "payload_builder", id=%attributes.id, parent_header = ?parent_header.hash(), parent_number = parent_header.number, "building new payload"); let mut cumulative_gas_used = 0; let mut sum_blob_gas_used = 0; let block_gas_limit: u64 = initialized_block_env.gas_limit.to::(); @@ -189,7 +189,7 @@ where ) .map_err(|err| { warn!(target: "payload_builder", - parent_hash=%parent_block.hash(), + parent_hash=%parent_header.hash(), %err, "failed to apply beacon root contract call for payload" ); @@ -201,10 +201,10 @@ where &mut db, &initialized_cfg, &initialized_block_env, - parent_block.hash(), + parent_header.hash(), ) .map_err(|err| { - warn!(target: "payload_builder", parent_hash=%parent_block.hash(), %err, "failed to update blockhashes for payload"); + warn!(target: "payload_builder", parent_hash=%parent_header.hash(), %err, "failed to update parent header blockhashes for payload"); PayloadBuilderError::Internal(err.into()) })?; @@ -371,7 +371,7 @@ where let state_provider = db.database.0.inner.borrow_mut(); state_provider.db.state_root_with_updates(hashed_state.clone()).inspect_err(|err| { warn!(target: "payload_builder", - parent_hash=%parent_block.hash(), + parent_hash=%parent_header.hash(), %err, "failed to calculate state root for payload" ); @@ -393,9 +393,9 @@ where executed_txs.iter().filter(|tx| tx.is_eip4844()).map(|tx| tx.hash).collect(), )?; - excess_blob_gas = if chain_spec.is_cancun_active_at_timestamp(parent_block.timestamp) { - let parent_excess_blob_gas = parent_block.excess_blob_gas.unwrap_or_default(); - let parent_blob_gas_used = parent_block.blob_gas_used.unwrap_or_default(); + excess_blob_gas = if chain_spec.is_cancun_active_at_timestamp(parent_header.timestamp) { + let parent_excess_blob_gas = parent_header.excess_blob_gas.unwrap_or_default(); + let parent_blob_gas_used = parent_header.blob_gas_used.unwrap_or_default(); Some(calc_excess_blob_gas(parent_excess_blob_gas, parent_blob_gas_used)) } else { // for the first post-fork block, both parent.blob_gas_used and @@ -407,7 +407,7 @@ where } let header = Header { - parent_hash: parent_block.hash(), + parent_hash: parent_header.hash(), ommers_hash: EMPTY_OMMER_ROOT_HASH, beneficiary: initialized_block_env.coinbase, state_root, @@ -419,7 +419,7 @@ where mix_hash: attributes.prev_randao, nonce: BEACON_NONCE.into(), base_fee_per_gas: Some(base_fee), - number: parent_block.number + 1, + number: parent_header.number + 1, gas_limit: block_gas_limit, difficulty: U256::ZERO, gas_used: cumulative_gas_used, diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 36b7d17a0..a1536ccf8 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -103,7 +103,7 @@ where &self, args: BuildArguments, ) -> Result, PayloadBuilderError> { - let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_block); + let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_header); optimism_payload(&self.evm_config, args, cfg_env, block_env, self.compute_pending_block) } @@ -132,7 +132,7 @@ where cancel: Default::default(), best_payload: None, }; - let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_block); + let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_header); optimism_payload(&self.evm_config, args, cfg_env, block_env, false)? .into_payload() .ok_or_else(|| PayloadBuilderError::MissingPayload) @@ -163,13 +163,13 @@ where let BuildArguments { client, pool, mut cached_reads, config, cancel, best_payload } = args; let chain_spec = client.chain_spec(); - let state_provider = client.state_by_block_hash(config.parent_block.hash())?; + let state_provider = client.state_by_block_hash(config.parent_header.hash())?; let state = StateProviderDatabase::new(state_provider); let mut db = State::builder().with_database_ref(cached_reads.as_db(state)).with_bundle_update().build(); - let PayloadConfig { parent_block, attributes, extra_data } = config; + let PayloadConfig { parent_header, attributes, extra_data } = config; - debug!(target: "payload_builder", id=%attributes.payload_attributes.payload_id(), parent_hash = ?parent_block.hash(), parent_number = parent_block.number, "building new payload"); + debug!(target: "payload_builder", id=%attributes.payload_attributes.payload_id(), parent_header = ?parent_header.hash(), parent_number = parent_header.number, "building new payload"); let mut cumulative_gas_used = 0; let block_gas_limit: u64 = attributes.gas_limit.unwrap_or_else(|| { @@ -206,7 +206,7 @@ where ) .map_err(|err| { warn!(target: "payload_builder", - parent_hash=%parent_block.hash(), + parent_header=%parent_header.hash(), %err, "failed to apply beacon root contract call for payload" ); @@ -449,7 +449,7 @@ where let state_provider = db.database.0.inner.borrow_mut(); state_provider.db.state_root_with_updates(hashed_state.clone()).inspect_err(|err| { warn!(target: "payload_builder", - parent_hash=%parent_block.hash(), + parent_header=%parent_header.hash(), %err, "failed to calculate state root for payload" ); @@ -470,7 +470,7 @@ where }; let header = Header { - parent_hash: parent_block.hash(), + parent_hash: parent_header.hash(), ommers_hash: EMPTY_OMMER_ROOT_HASH, beneficiary: initialized_block_env.coinbase, state_root, @@ -482,7 +482,7 @@ where mix_hash: attributes.payload_attributes.prev_randao, nonce: BEACON_NONCE.into(), base_fee_per_gas: Some(base_fee), - number: parent_block.number + 1, + number: parent_header.number + 1, gas_limit: block_gas_limit, difficulty: U256::ZERO, gas_used: cumulative_gas_used, diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index d0bb29502..bb8dc0ef6 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -23,7 +23,7 @@ use reth_payload_primitives::{ BuiltPayload, PayloadBuilderAttributes, PayloadBuilderError, PayloadKind, }; use reth_primitives::{ - constants::RETH_CLIENT_VERSION, proofs, BlockNumberOrTag, SealedBlock, Withdrawals, + constants::RETH_CLIENT_VERSION, proofs, BlockNumberOrTag, SealedHeader, Withdrawals, }; use reth_provider::{ BlockReaderIdExt, BlockSource, CanonStateNotification, ProviderError, StateProviderFactory, @@ -122,7 +122,7 @@ impl BasicPayloadJobGenerator Option { self.pre_cached.as_ref().filter(|pc| pc.block == parent).map(|pc| pc.cached.clone()) @@ -163,13 +163,17 @@ where block.seal(attributes.parent()) }; + let hash = parent_block.hash(); + let parent_header = parent_block.header(); + let header = SealedHeader::new(parent_header.clone(), hash); + let config = - PayloadConfig::new(Arc::new(parent_block), self.config.extradata.clone(), attributes); + PayloadConfig::new(Arc::new(header), self.config.extradata.clone(), attributes); let until = self.job_deadline(config.attributes.timestamp()); let deadline = Box::pin(tokio::time::sleep_until(until)); - let cached_reads = self.maybe_pre_cached(config.parent_block.hash()); + let cached_reads = self.maybe_pre_cached(hash); let mut job = BasicPayloadJob { config, @@ -706,8 +710,8 @@ impl Drop for Cancelled { /// Static config for how to build a payload. #[derive(Clone, Debug)] pub struct PayloadConfig { - /// The parent block. - pub parent_block: Arc, + /// The parent header. + pub parent_header: Arc, /// Block extra data. pub extra_data: Bytes, /// Requested attributes for the payload. @@ -727,11 +731,11 @@ where { /// Create new payload config. pub const fn new( - parent_block: Arc, + parent_header: Arc, extra_data: Bytes, attributes: Attributes, ) -> Self { - Self { parent_block, extra_data, attributes } + Self { parent_header, extra_data, attributes } } /// Returns the payload id. diff --git a/examples/custom-engine-types/src/main.rs b/examples/custom-engine-types/src/main.rs index 0fa2b1658..30f89a0b9 100644 --- a/examples/custom-engine-types/src/main.rs +++ b/examples/custom-engine-types/src/main.rs @@ -341,7 +341,7 @@ where args: BuildArguments, ) -> Result, PayloadBuilderError> { let BuildArguments { client, pool, cached_reads, config, cancel, best_payload } = args; - let PayloadConfig { parent_block, extra_data, attributes } = config; + let PayloadConfig { parent_header, extra_data, attributes } = config; let chain_spec = client.chain_spec(); @@ -354,7 +354,7 @@ where client, pool, cached_reads, - config: PayloadConfig { parent_block, extra_data, attributes: attributes.0 }, + config: PayloadConfig { parent_header, extra_data, attributes: attributes.0 }, cancel, best_payload, }) @@ -365,10 +365,10 @@ where client: &Client, config: PayloadConfig, ) -> Result { - let PayloadConfig { parent_block, extra_data, attributes } = config; + let PayloadConfig { parent_header, extra_data, attributes } = config; let chain_spec = client.chain_spec(); >::build_empty_payload(&reth_ethereum_payload_builder::EthereumPayloadBuilder::new(EthEvmConfig::new(chain_spec.clone())),client, - PayloadConfig { parent_block, extra_data, attributes: attributes.0}) + PayloadConfig { parent_header, extra_data, attributes: attributes.0}) } } diff --git a/examples/custom-payload-builder/src/generator.rs b/examples/custom-payload-builder/src/generator.rs index f5d64e41c..734142887 100644 --- a/examples/custom-payload-builder/src/generator.rs +++ b/examples/custom-payload-builder/src/generator.rs @@ -8,7 +8,7 @@ use reth::{ use reth_basic_payload_builder::{BasicPayloadJobGeneratorConfig, PayloadBuilder, PayloadConfig}; use reth_node_api::PayloadBuilderAttributes; use reth_payload_builder::{PayloadBuilderError, PayloadJobGenerator}; -use reth_primitives::BlockNumberOrTag; +use reth_primitives::{BlockNumberOrTag, SealedHeader}; use std::sync::Arc; /// The generator type that creates new jobs that builds empty blocks. @@ -77,7 +77,10 @@ where // we already know the hash, so we can seal it block.seal(attributes.parent()) }; - let config = PayloadConfig::new(Arc::new(parent_block), Bytes::default(), attributes); + let hash = parent_block.hash(); + let header = SealedHeader::new(parent_block.header().clone(), hash); + + let config = PayloadConfig::new(Arc::new(header), Bytes::default(), attributes); Ok(EmptyBlockPayloadJob { client: self.client.clone(), _pool: self.pool.clone(), From 2dbbd152cbc77f12e0056b8ded5bbe7141d1f6f7 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Tue, 29 Oct 2024 20:00:22 +0900 Subject: [PATCH 330/425] chore(provider): remove unused `BlockExecutionReader` trait (#12156) --- .../src/providers/database/provider.rs | 180 +----------------- .../storage/provider/src/test_utils/mock.rs | 20 +- crates/storage/provider/src/traits/block.rs | 11 -- 3 files changed, 13 insertions(+), 198 deletions(-) diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 308fa364a..e59a4f563 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -6,15 +6,14 @@ use crate::{ AccountExtReader, BlockSource, ChangeSetReader, ReceiptProvider, StageCheckpointWriter, }, writer::UnifiedStorageWriter, - AccountReader, BlockExecutionReader, BlockExecutionWriter, BlockHashReader, BlockNumReader, - BlockReader, BlockWriter, BundleStateInit, ChainStateBlockReader, ChainStateBlockWriter, - DBProvider, EvmEnvProvider, HashingWriter, HeaderProvider, HeaderSyncGap, - HeaderSyncGapProvider, HistoricalStateProvider, HistoricalStateProviderRef, HistoryWriter, - LatestStateProvider, LatestStateProviderRef, OriginalValuesKnown, ProviderError, - PruneCheckpointReader, PruneCheckpointWriter, RevertsInit, StageCheckpointReader, - StateChangeWriter, StateProviderBox, StateReader, StateWriter, StaticFileProviderFactory, - StatsReader, StorageReader, StorageTrieWriter, TransactionVariant, TransactionsProvider, - TransactionsProviderExt, TrieWriter, WithdrawalsProvider, + AccountReader, BlockExecutionWriter, BlockHashReader, BlockNumReader, BlockReader, BlockWriter, + BundleStateInit, ChainStateBlockReader, ChainStateBlockWriter, DBProvider, EvmEnvProvider, + HashingWriter, HeaderProvider, HeaderSyncGap, HeaderSyncGapProvider, HistoricalStateProvider, + HistoricalStateProviderRef, HistoryWriter, LatestStateProvider, LatestStateProviderRef, + OriginalValuesKnown, ProviderError, PruneCheckpointReader, PruneCheckpointWriter, RevertsInit, + StageCheckpointReader, StateChangeWriter, StateProviderBox, StateReader, StateWriter, + StaticFileProviderFactory, StatsReader, StorageReader, StorageTrieWriter, TransactionVariant, + TransactionsProvider, TransactionsProviderExt, TrieWriter, WithdrawalsProvider, }; use alloy_eips::BlockHashOrNumber; use alloy_primitives::{keccak256, Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; @@ -684,152 +683,6 @@ impl DatabaseProvider { }) } - /// Get requested blocks transaction with senders - pub(crate) fn get_block_transaction_range( - &self, - range: impl RangeBounds + Clone, - ) -> ProviderResult)>> { - // Raad range of block bodies to get all transactions id's of this range. - let block_bodies = self.get::(range)?; - - if block_bodies.is_empty() { - return Ok(Vec::new()) - } - - // Compute the first and last tx ID in the range - let first_transaction = block_bodies.first().expect("If we have headers").1.first_tx_num(); - let last_transaction = block_bodies.last().expect("Not empty").1.last_tx_num(); - - // If this is the case then all of the blocks in the range are empty - if last_transaction < first_transaction { - return Ok(block_bodies.into_iter().map(|(n, _)| (n, Vec::new())).collect()) - } - - // Get transactions and senders - let transactions = self - .get::(first_transaction..=last_transaction)? - .into_iter() - .map(|(id, tx)| (id, tx.into())) - .collect::>(); - - let mut senders = - self.get::(first_transaction..=last_transaction)?; - - recover_block_senders(&mut senders, &transactions, first_transaction, last_transaction)?; - - // Merge transaction into blocks - let mut block_tx = Vec::with_capacity(block_bodies.len()); - let mut senders = senders.into_iter(); - let mut transactions = transactions.into_iter(); - for (block_number, block_body) in block_bodies { - let mut one_block_tx = Vec::with_capacity(block_body.tx_count as usize); - for _ in block_body.tx_num_range() { - let tx = transactions.next(); - let sender = senders.next(); - - let recovered = match (tx, sender) { - (Some((tx_id, tx)), Some((sender_tx_id, sender))) => { - if tx_id == sender_tx_id { - Ok(TransactionSignedEcRecovered::from_signed_transaction(tx, sender)) - } else { - Err(ProviderError::MismatchOfTransactionAndSenderId { tx_id }) - } - } - (Some((tx_id, _)), _) | (_, Some((tx_id, _))) => { - Err(ProviderError::MismatchOfTransactionAndSenderId { tx_id }) - } - (None, None) => Err(ProviderError::BlockBodyTransactionCount), - }?; - one_block_tx.push(recovered) - } - block_tx.push((block_number, one_block_tx)); - } - - Ok(block_tx) - } - - /// Get the given range of blocks. - pub fn get_block_range( - &self, - range: impl RangeBounds + Clone, - ) -> ProviderResult> - where - Spec: EthereumHardforks, - { - // For blocks we need: - // - // - Headers - // - Bodies (transactions) - // - Uncles/ommers - // - Withdrawals - // - Signers - - let block_headers = self.get::(range.clone())?; - if block_headers.is_empty() { - return Ok(Vec::new()) - } - - let block_header_hashes = self.get::(range.clone())?; - let block_ommers = self.get::(range.clone())?; - let block_withdrawals = self.get::(range.clone())?; - - let block_tx = self.get_block_transaction_range(range)?; - let mut blocks = Vec::with_capacity(block_headers.len()); - - // merge all into block - let block_header_iter = block_headers.into_iter(); - let block_header_hashes_iter = block_header_hashes.into_iter(); - let block_tx_iter = block_tx.into_iter(); - - // Ommers can be empty for some blocks - let mut block_ommers_iter = block_ommers.into_iter(); - let mut block_withdrawals_iter = block_withdrawals.into_iter(); - let mut block_ommers = block_ommers_iter.next(); - let mut block_withdrawals = block_withdrawals_iter.next(); - - for ((main_block_number, header), (_, header_hash), (_, tx)) in - izip!(block_header_iter, block_header_hashes_iter, block_tx_iter) - { - let header = SealedHeader::new(header, header_hash); - - let (transactions, senders) = tx.into_iter().map(|tx| tx.to_components()).unzip(); - - // Ommers can be missing - let mut ommers = Vec::new(); - if let Some((block_number, _)) = block_ommers.as_ref() { - if *block_number == main_block_number { - ommers = block_ommers.take().unwrap().1.ommers; - block_ommers = block_ommers_iter.next(); - } - }; - - // withdrawal can be missing - let shanghai_is_active = - self.chain_spec.is_shanghai_active_at_timestamp(header.timestamp); - let mut withdrawals = Some(Withdrawals::default()); - if shanghai_is_active { - if let Some((block_number, _)) = block_withdrawals.as_ref() { - if *block_number == main_block_number { - withdrawals = Some(block_withdrawals.take().unwrap().1.withdrawals); - block_withdrawals = block_withdrawals_iter.next(); - } - } - } else { - withdrawals = None - } - - blocks.push(SealedBlockWithSenders { - block: SealedBlock { - header, - body: BlockBody { transactions, ommers, withdrawals }, - }, - senders, - }) - } - - Ok(blocks) - } - /// Return the last N blocks of state, recreating the [`ExecutionOutcome`]. /// /// 1. Iterate over the [`BlockBodyIndices`][tables::BlockBodyIndices] table to get all the @@ -3107,23 +2960,6 @@ impl HistoryWriter for DatabaseProvider BlockExecutionReader - for DatabaseProvider -{ - fn get_block_and_execution_range( - &self, - range: RangeInclusive, - ) -> ProviderResult { - // get blocks - let blocks = self.get_block_range(range.clone())?; - - // get execution res - let execution_state = self.get_state(range)?.unwrap_or_default(); - - Ok(Chain::new(blocks, execution_state, None)) - } -} - impl StateReader for DatabaseProvider { fn get_state(&self, block: BlockNumber) -> ProviderResult> { self.get_state(block..=block) diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index 08530acf0..ed861f5f1 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -1,10 +1,9 @@ use crate::{ traits::{BlockSource, ReceiptProvider}, - AccountReader, BlockExecutionReader, BlockHashReader, BlockIdReader, BlockNumReader, - BlockReader, BlockReaderIdExt, ChainSpecProvider, ChangeSetReader, DatabaseProvider, - EvmEnvProvider, HeaderProvider, ReceiptProviderIdExt, StateProvider, StateProviderBox, - StateProviderFactory, StateReader, StateRootProvider, TransactionVariant, TransactionsProvider, - WithdrawalsProvider, + AccountReader, BlockHashReader, BlockIdReader, BlockNumReader, BlockReader, BlockReaderIdExt, + ChainSpecProvider, ChangeSetReader, DatabaseProvider, EvmEnvProvider, HeaderProvider, + ReceiptProviderIdExt, StateProvider, StateProviderBox, StateProviderFactory, StateReader, + StateRootProvider, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; use alloy_consensus::constants::EMPTY_ROOT_HASH; use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumberOrTag}; @@ -19,7 +18,7 @@ use reth_chainspec::{ChainInfo, ChainSpec}; use reth_db::mock::{DatabaseMock, TxMock}; use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; use reth_evm::ConfigureEvmEnv; -use reth_execution_types::{Chain, ExecutionOutcome}; +use reth_execution_types::ExecutionOutcome; use reth_primitives::{ Account, Block, BlockWithSenders, Bytecode, GotExpected, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, @@ -818,15 +817,6 @@ impl ChangeSetReader for MockEthProvider { } } -impl BlockExecutionReader for MockEthProvider { - fn get_block_and_execution_range( - &self, - _range: RangeInclusive, - ) -> ProviderResult { - Ok(Chain::default()) - } -} - impl StateReader for MockEthProvider { fn get_state(&self, _block: BlockNumber) -> ProviderResult> { Ok(None) diff --git a/crates/storage/provider/src/traits/block.rs b/crates/storage/provider/src/traits/block.rs index 8e3a54d86..7202c405f 100644 --- a/crates/storage/provider/src/traits/block.rs +++ b/crates/storage/provider/src/traits/block.rs @@ -2,7 +2,6 @@ use alloy_primitives::BlockNumber; use reth_db_api::models::StoredBlockBodyIndices; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_primitives::SealedBlockWithSenders; -use reth_storage_api::BlockReader; use reth_storage_errors::provider::ProviderResult; use reth_trie::{updates::TrieUpdates, HashedPostStateSorted}; use std::ops::RangeInclusive; @@ -23,16 +22,6 @@ pub trait BlockExecutionWriter: BlockWriter + Send + Sync { ) -> ProviderResult<()>; } -/// BlockExecution Reader -#[auto_impl::auto_impl(&, Arc, Box)] -pub trait BlockExecutionReader: BlockReader + Send + Sync { - /// Get range of blocks and its execution result - fn get_block_and_execution_range( - &self, - range: RangeInclusive, - ) -> ProviderResult; -} - /// This just receives state, or [`ExecutionOutcome`], from the provider #[auto_impl::auto_impl(&, Arc, Box)] pub trait StateReader: Send + Sync { From dd18af1f1617ed855a889fe208ea4ba772409fb5 Mon Sep 17 00:00:00 2001 From: Debjit Bhowal Date: Tue, 29 Oct 2024 16:45:20 +0530 Subject: [PATCH 331/425] feat: without-evm cli option in reth (#12134) Co-authored-by: Matthias Seitz --- Cargo.lock | 1 + book/cli/reth/init-state.md | 16 +++ crates/cli/commands/Cargo.toml | 1 + crates/cli/commands/src/init_state.rs | 79 ----------- crates/cli/commands/src/init_state/mod.rs | 132 ++++++++++++++++++ .../commands/src/init_state/without_evm.rs} | 65 +++++---- .../{init_state/mod.rs => init_state.rs} | 12 +- 7 files changed, 194 insertions(+), 112 deletions(-) delete mode 100644 crates/cli/commands/src/init_state.rs create mode 100644 crates/cli/commands/src/init_state/mod.rs rename crates/{optimism/cli/src/commands/init_state/bedrock.rs => cli/commands/src/init_state/without_evm.rs} (70%) rename crates/optimism/cli/src/commands/{init_state/mod.rs => init_state.rs} (87%) diff --git a/Cargo.lock b/Cargo.lock index c7a47e8f8..8febcffa5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6616,6 +6616,7 @@ dependencies = [ "ahash", "alloy-eips", "alloy-primitives", + "alloy-rlp", "arbitrary", "backon", "clap", diff --git a/book/cli/reth/init-state.md b/book/cli/reth/init-state.md index ddcd3cece..3e0735167 100644 --- a/book/cli/reth/init-state.md +++ b/book/cli/reth/init-state.md @@ -72,6 +72,22 @@ Database: --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout + --without-evm + Specifies whether to initialize the state without relying on EVM historical data. + + When enabled, and before inserting the state, it creates a dummy chain up to the last EVM block specified. It then, appends the first block provided block. + + - **Note**: **Do not** import receipts and blocks beforehand, or this will fail or be ignored. + + --header + Header file containing the header in an RLP encoded format. + + --total-difficulty + Total difficulty of the header. + + --header-hash + Hash of the header. + JSONL file with state dump. diff --git a/crates/cli/commands/Cargo.toml b/crates/cli/commands/Cargo.toml index ef66a9941..a0bc51477 100644 --- a/crates/cli/commands/Cargo.toml +++ b/crates/cli/commands/Cargo.toml @@ -51,6 +51,7 @@ reth-trie-common = { workspace = true, optional = true } # ethereum alloy-eips.workspace = true alloy-primitives.workspace = true +alloy-rlp.workspace = true itertools.workspace = true futures.workspace = true diff --git a/crates/cli/commands/src/init_state.rs b/crates/cli/commands/src/init_state.rs deleted file mode 100644 index 16e99f8fe..000000000 --- a/crates/cli/commands/src/init_state.rs +++ /dev/null @@ -1,79 +0,0 @@ -//! Command that initializes the node from a genesis file. - -use crate::common::{AccessRights, Environment, EnvironmentArgs}; -use alloy_primitives::B256; -use clap::Parser; -use reth_chainspec::{EthChainSpec, EthereumHardforks}; -use reth_cli::chainspec::ChainSpecParser; -use reth_config::config::EtlConfig; -use reth_db_common::init::init_from_state_dump; -use reth_node_builder::NodeTypesWithEngine; -use reth_provider::{providers::ProviderNodeTypes, ProviderFactory}; - -use std::{fs::File, io::BufReader, path::PathBuf}; -use tracing::info; - -/// Initializes the database with the genesis block. -#[derive(Debug, Parser)] -pub struct InitStateCommand { - #[command(flatten)] - pub env: EnvironmentArgs, - - /// JSONL file with state dump. - /// - /// Must contain accounts in following format, additional account fields are ignored. Must - /// also contain { "root": \ } as first line. - /// { - /// "balance": "\", - /// "nonce": \, - /// "code": "\", - /// "storage": { - /// "\": "\", - /// .. - /// }, - /// "address": "\", - /// } - /// - /// Allows init at a non-genesis block. Caution! Blocks must be manually imported up until - /// and including the non-genesis block to init chain at. See 'import' command. - #[arg(value_name = "STATE_DUMP_FILE", verbatim_doc_comment)] - pub state: PathBuf, -} - -impl> InitStateCommand { - /// Execute the `init` command - pub async fn execute>( - self, - ) -> eyre::Result<()> { - info!(target: "reth::cli", "Reth init-state starting"); - - let Environment { config, provider_factory, .. } = self.env.init::(AccessRights::RW)?; - - info!(target: "reth::cli", "Initiating state dump"); - - let hash = init_at_state(self.state, provider_factory, config.stages.etl)?; - - info!(target: "reth::cli", hash = ?hash, "Genesis block written"); - Ok(()) - } -} - -/// Initialize chain with state at specific block, from a file with state dump. -pub fn init_at_state( - state_dump_path: PathBuf, - factory: ProviderFactory, - etl_config: EtlConfig, -) -> eyre::Result { - info!(target: "reth::cli", - path=?state_dump_path, - "Opening state dump"); - - let file = File::open(state_dump_path)?; - let reader = BufReader::new(file); - - let provider_rw = factory.provider_rw()?; - let hash = init_from_state_dump(reader, &provider_rw.0, etl_config)?; - provider_rw.commit()?; - - Ok(hash) -} diff --git a/crates/cli/commands/src/init_state/mod.rs b/crates/cli/commands/src/init_state/mod.rs new file mode 100644 index 000000000..adaec3e8b --- /dev/null +++ b/crates/cli/commands/src/init_state/mod.rs @@ -0,0 +1,132 @@ +//! Command that initializes the node from a genesis file. + +use crate::common::{AccessRights, Environment, EnvironmentArgs}; +use alloy_primitives::{B256, U256}; +use clap::Parser; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; +use reth_cli::chainspec::ChainSpecParser; +use reth_db_common::init::init_from_state_dump; +use reth_node_builder::NodeTypesWithEngine; +use reth_primitives::SealedHeader; +use reth_provider::{ + BlockNumReader, DatabaseProviderFactory, StaticFileProviderFactory, StaticFileWriter, +}; + +use std::{fs::File, io::BufReader, path::PathBuf, str::FromStr}; +use tracing::info; + +pub mod without_evm; + +/// Initializes the database with the genesis block. +#[derive(Debug, Parser)] +pub struct InitStateCommand { + #[command(flatten)] + pub env: EnvironmentArgs, + + /// JSONL file with state dump. + /// + /// Must contain accounts in following format, additional account fields are ignored. Must + /// also contain { "root": \ } as first line. + /// { + /// "balance": "\", + /// "nonce": \, + /// "code": "\", + /// "storage": { + /// "\": "\", + /// .. + /// }, + /// "address": "\", + /// } + /// + /// Allows init at a non-genesis block. Caution! Blocks must be manually imported up until + /// and including the non-genesis block to init chain at. See 'import' command. + #[arg(value_name = "STATE_DUMP_FILE", verbatim_doc_comment)] + pub state: PathBuf, + + /// Specifies whether to initialize the state without relying on EVM historical data. + /// + /// When enabled, and before inserting the state, it creates a dummy chain up to the last EVM + /// block specified. It then, appends the first block provided block. + /// + /// - **Note**: **Do not** import receipts and blocks beforehand, or this will fail or be + /// ignored. + #[arg(long, default_value = "false")] + pub without_evm: bool, + + /// Header file containing the header in an RLP encoded format. + #[arg(long, value_name = "HEADER_FILE", verbatim_doc_comment)] + pub header: Option, + + /// Total difficulty of the header. + #[arg(long, value_name = "TOTAL_DIFFICULTY", verbatim_doc_comment)] + pub total_difficulty: Option, + + /// Hash of the header. + #[arg(long, value_name = "HEADER_HASH", verbatim_doc_comment)] + pub header_hash: Option, +} + +impl> InitStateCommand { + /// Execute the `init` command + pub async fn execute>( + self, + ) -> eyre::Result<()> { + info!(target: "reth::cli", "Reth init-state starting"); + + let Environment { config, provider_factory, .. } = self.env.init::(AccessRights::RW)?; + + let static_file_provider = provider_factory.static_file_provider(); + let provider_rw = provider_factory.database_provider_rw()?; + + if self.without_evm { + // ensure header, total difficulty and header hash are provided + let header = self.header.ok_or_else(|| eyre::eyre!("Header file must be provided"))?; + let header = without_evm::read_header_from_file(header)?; + + let header_hash = + self.header_hash.ok_or_else(|| eyre::eyre!("Header hash must be provided"))?; + let header_hash = B256::from_str(&header_hash)?; + + let total_difficulty = self + .total_difficulty + .ok_or_else(|| eyre::eyre!("Total difficulty must be provided"))?; + let total_difficulty = U256::from_str(&total_difficulty)?; + + let last_block_number = provider_rw.last_block_number()?; + + if last_block_number == 0 { + without_evm::setup_without_evm( + &provider_rw, + &static_file_provider, + // &header, + // header_hash, + SealedHeader::new(header, header_hash), + total_difficulty, + )?; + + // SAFETY: it's safe to commit static files, since in the event of a crash, they + // will be unwinded according to database checkpoints. + // + // Necessary to commit, so the header is accessible to provider_rw and + // init_state_dump + static_file_provider.commit()?; + } else if last_block_number > 0 && last_block_number < header.number { + return Err(eyre::eyre!( + "Data directory should be empty when calling init-state with --without-evm-history." + )); + } + } + + info!(target: "reth::cli", "Initiating state dump"); + + let file = File::open(self.state)?; + let reader = BufReader::new(file); + + let hash = init_from_state_dump(reader, &provider_rw, config.stages.etl)?; + + provider_rw.commit()?; + + info!(target: "reth::cli", hash = ?hash, "Genesis block written"); + Ok(()) + } +} diff --git a/crates/optimism/cli/src/commands/init_state/bedrock.rs b/crates/cli/commands/src/init_state/without_evm.rs similarity index 70% rename from crates/optimism/cli/src/commands/init_state/bedrock.rs rename to crates/cli/commands/src/init_state/without_evm.rs index efff065e5..187996653 100644 --- a/crates/optimism/cli/src/commands/init_state/bedrock.rs +++ b/crates/cli/commands/src/init_state/without_evm.rs @@ -1,5 +1,6 @@ use alloy_primitives::{BlockNumber, B256, U256}; -use reth_optimism_primitives::bedrock::{BEDROCK_HEADER, BEDROCK_HEADER_HASH, BEDROCK_HEADER_TTD}; +use alloy_rlp::Decodable; + use reth_primitives::{ BlockBody, Header, SealedBlock, SealedBlockWithSenders, SealedHeader, StaticFileSegment, }; @@ -7,28 +8,42 @@ use reth_provider::{ providers::StaticFileProvider, BlockWriter, StageCheckpointWriter, StaticFileWriter, }; use reth_stages::{StageCheckpoint, StageId}; + +use std::{fs::File, io::Read, path::PathBuf}; use tracing::info; -/// Creates a dummy chain (with no transactions) up to the last OVM block and appends the -/// first valid Bedrock block. -pub(crate) fn setup_op_mainnet_without_ovm( +/// Reads the header RLP from a file and returns the Header. +pub(crate) fn read_header_from_file(path: PathBuf) -> Result { + let mut file = File::open(path)?; + let mut buf = Vec::new(); + file.read_to_end(&mut buf)?; + + let header = Header::decode(&mut &buf[..])?; + Ok(header) +} + +/// Creates a dummy chain (with no transactions) up to the last EVM block and appends the +/// first valid block. +pub fn setup_without_evm( provider_rw: &Provider, static_file_provider: &StaticFileProvider, + header: SealedHeader, + total_difficulty: U256, ) -> Result<(), eyre::Error> where Provider: StageCheckpointWriter + BlockWriter, { - info!(target: "reth::cli", "Setting up dummy OVM chain before importing state."); + info!(target: "reth::cli", "Setting up dummy EVM chain before importing state."); - // Write OVM dummy data up to `BEDROCK_HEADER - 1` block - append_dummy_chain(static_file_provider, BEDROCK_HEADER.number - 1)?; + // Write EVM dummy data up to `header - 1` block + append_dummy_chain(static_file_provider, header.number - 1)?; - info!(target: "reth::cli", "Appending Bedrock block."); + info!(target: "reth::cli", "Appending first valid block."); - append_bedrock_block(provider_rw, static_file_provider)?; + append_first_block(provider_rw, static_file_provider, &header, total_difficulty)?; for stage in StageId::ALL { - provider_rw.save_stage_checkpoint(stage, StageCheckpoint::new(BEDROCK_HEADER.number))?; + provider_rw.save_stage_checkpoint(stage, StageCheckpoint::new(header.number))?; } info!(target: "reth::cli", "Set up finished."); @@ -36,38 +51,30 @@ where Ok(()) } -/// Appends the first bedrock block. +/// Appends the first block. /// /// By appending it, static file writer also verifies that all segments are at the same /// height. -fn append_bedrock_block( +fn append_first_block( provider_rw: impl BlockWriter, sf_provider: &StaticFileProvider, + header: &SealedHeader, + total_difficulty: U256, ) -> Result<(), eyre::Error> { provider_rw.insert_block( - SealedBlockWithSenders::new( - SealedBlock::new( - SealedHeader::new(BEDROCK_HEADER, BEDROCK_HEADER_HASH), - BlockBody::default(), - ), - vec![], - ) - .expect("no senders or txes"), + SealedBlockWithSenders::new(SealedBlock::new(header.clone(), BlockBody::default()), vec![]) + .expect("no senders or txes"), )?; sf_provider.latest_writer(StaticFileSegment::Headers)?.append_header( - &BEDROCK_HEADER, - BEDROCK_HEADER_TTD, - &BEDROCK_HEADER_HASH, + header, + total_difficulty, + &header.hash(), )?; - sf_provider - .latest_writer(StaticFileSegment::Receipts)? - .increment_block(BEDROCK_HEADER.number)?; + sf_provider.latest_writer(StaticFileSegment::Receipts)?.increment_block(header.number)?; - sf_provider - .latest_writer(StaticFileSegment::Transactions)? - .increment_block(BEDROCK_HEADER.number)?; + sf_provider.latest_writer(StaticFileSegment::Transactions)?.increment_block(header.number)?; Ok(()) } diff --git a/crates/optimism/cli/src/commands/init_state/mod.rs b/crates/optimism/cli/src/commands/init_state.rs similarity index 87% rename from crates/optimism/cli/src/commands/init_state/mod.rs rename to crates/optimism/cli/src/commands/init_state.rs index 3537f89e7..68f5d9a58 100644 --- a/crates/optimism/cli/src/commands/init_state/mod.rs +++ b/crates/optimism/cli/src/commands/init_state.rs @@ -6,7 +6,8 @@ use reth_cli_commands::common::{AccessRights, Environment}; use reth_db_common::init::init_from_state_dump; use reth_node_builder::NodeTypesWithEngine; use reth_optimism_chainspec::OpChainSpec; -use reth_optimism_primitives::bedrock::BEDROCK_HEADER; +use reth_optimism_primitives::bedrock::{BEDROCK_HEADER, BEDROCK_HEADER_HASH, BEDROCK_HEADER_TTD}; +use reth_primitives::SealedHeader; use reth_provider::{ BlockNumReader, ChainSpecProvider, DatabaseProviderFactory, StaticFileProviderFactory, StaticFileWriter, @@ -14,8 +15,6 @@ use reth_provider::{ use std::{fs::File, io::BufReader}; use tracing::info; -mod bedrock; - /// Initializes the database with the genesis block. #[derive(Debug, Parser)] pub struct InitStateCommandOp { @@ -53,7 +52,12 @@ impl> InitStateCommandOp { let last_block_number = provider_rw.last_block_number()?; if last_block_number == 0 { - bedrock::setup_op_mainnet_without_ovm(&provider_rw, &static_file_provider)?; + reth_cli_commands::init_state::without_evm::setup_without_evm( + &provider_rw, + &static_file_provider, + SealedHeader::new(BEDROCK_HEADER, BEDROCK_HEADER_HASH), + BEDROCK_HEADER_TTD, + )?; // SAFETY: it's safe to commit static files, since in the event of a crash, they // will be unwinded according to database checkpoints. From 3c6077812678e1a63896162db0bab225a809771d Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 29 Oct 2024 13:01:59 +0100 Subject: [PATCH 332/425] storage: `into_iter` with `self` by value (#12115) --- crates/storage/libmdbx-rs/src/cursor.rs | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/crates/storage/libmdbx-rs/src/cursor.rs b/crates/storage/libmdbx-rs/src/cursor.rs index 3deff0c24..26cfef54d 100644 --- a/crates/storage/libmdbx-rs/src/cursor.rs +++ b/crates/storage/libmdbx-rs/src/cursor.rs @@ -59,19 +59,18 @@ where } /// Returns an iterator over the raw key value slices. - #[allow(clippy::needless_lifetimes)] - pub fn iter_slices<'a>(&'a self) -> IntoIter<'a, K, Cow<'a, [u8]>, Cow<'a, [u8]>> { + pub fn iter_slices<'a>(self) -> IntoIter, Cow<'a, [u8]>> { self.into_iter() } /// Returns an iterator over database items. #[allow(clippy::should_implement_trait)] - pub fn into_iter(&self) -> IntoIter<'_, K, Key, Value> + pub fn into_iter(self) -> IntoIter where Key: TableObject, Value: TableObject, { - IntoIter::new(self.clone(), MDBX_NEXT, MDBX_NEXT) + IntoIter::new(self, MDBX_NEXT, MDBX_NEXT) } /// Retrieves a key/data pair from the cursor. Depending on the cursor op, @@ -508,7 +507,7 @@ unsafe impl Sync for Cursor where K: TransactionKind {} /// An iterator over the key/value pairs in an MDBX database. #[derive(Debug)] -pub enum IntoIter<'cur, K, Key, Value> +pub enum IntoIter where K: TransactionKind, Key: TableObject, @@ -535,11 +534,11 @@ where /// The next and subsequent operations to perform. next_op: ffi::MDBX_cursor_op, - _marker: PhantomData<(&'cur (), Key, Value)>, + _marker: PhantomData<(Key, Value)>, }, } -impl IntoIter<'_, K, Key, Value> +impl IntoIter where K: TransactionKind, Key: TableObject, @@ -547,11 +546,11 @@ where { /// Creates a new iterator backed by the given cursor. fn new(cursor: Cursor, op: ffi::MDBX_cursor_op, next_op: ffi::MDBX_cursor_op) -> Self { - IntoIter::Ok { cursor, op, next_op, _marker: Default::default() } + Self::Ok { cursor, op, next_op, _marker: Default::default() } } } -impl Iterator for IntoIter<'_, K, Key, Value> +impl Iterator for IntoIter where K: TransactionKind, Key: TableObject, @@ -747,13 +746,13 @@ where } } -impl<'cur, K, Key, Value> Iterator for IterDup<'cur, K, Key, Value> +impl Iterator for IterDup<'_, K, Key, Value> where K: TransactionKind, Key: TableObject, Value: TableObject, { - type Item = IntoIter<'cur, K, Key, Value>; + type Item = IntoIter; fn next(&mut self) -> Option { match self { From 1653877ed50b7a3d8d31104e18fd39f1e0ba66c9 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Tue, 29 Oct 2024 21:14:41 +0900 Subject: [PATCH 333/425] chore(ci): try to read all vectors on `compact-codec` before exiting in error (#12160) --- .../cli/commands/src/test_vectors/compact.rs | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/crates/cli/commands/src/test_vectors/compact.rs b/crates/cli/commands/src/test_vectors/compact.rs index 94552d5c2..8def25fa3 100644 --- a/crates/cli/commands/src/test_vectors/compact.rs +++ b/crates/cli/commands/src/test_vectors/compact.rs @@ -167,9 +167,22 @@ pub fn generate_vectors_with(gen: &[fn(&mut TestRunner) -> eyre::Result<()>]) -> /// re-encoding. pub fn read_vectors_with(read: &[fn() -> eyre::Result<()>]) -> Result<()> { fs::create_dir_all(VECTORS_FOLDER)?; + let mut errors = None; for read_fn in read { - read_fn()?; + if let Err(err) = read_fn() { + errors.get_or_insert_with(Vec::new).push(err); + } + } + + if let Some(err_list) = errors { + for error in err_list { + eprintln!("{:?}", error); + } + return Err(eyre::eyre!( + "If there are missing types, make sure to run `reth test-vectors compact --write` first.\n + If it happened during CI, ignore IF it's a new proposed type that `main` branch does not have." + )); } Ok(()) @@ -238,9 +251,8 @@ where // Read the file where the vectors are stored let file_path = format!("{VECTORS_FOLDER}/{}.json", &type_name); - let file = File::open(&file_path).wrap_err_with(|| { - "Failed to open vector. Make sure to run `reth test-vectors compact --write` first." - })?; + let file = + File::open(&file_path).wrap_err_with(|| format!("Failed to open vector {type_name}."))?; let reader = BufReader::new(file); let stored_values: Vec = serde_json::from_reader(reader)?; From 0f9ba64e954136a5507419ced37d1de96ff5b262 Mon Sep 17 00:00:00 2001 From: Evan Chipman <42247026+evchip@users.noreply.github.com> Date: Tue, 29 Oct 2024 19:17:04 +0700 Subject: [PATCH 334/425] feat: add geometry to database args (#11828) Co-authored-by: Matthias Seitz --- book/cli/reth/db.md | 6 + book/cli/reth/db/diff.md | 6 + book/cli/reth/debug/build-block.md | 6 + book/cli/reth/debug/execution.md | 6 + book/cli/reth/debug/in-memory-merkle.md | 6 + book/cli/reth/debug/merkle.md | 6 + book/cli/reth/debug/replay-engine.md | 6 + book/cli/reth/import.md | 6 + book/cli/reth/init-state.md | 6 + book/cli/reth/init.md | 6 + book/cli/reth/node.md | 6 + book/cli/reth/p2p.md | 6 + book/cli/reth/prune.md | 6 + book/cli/reth/recover/storage-tries.md | 6 + book/cli/reth/stage/drop.md | 6 + book/cli/reth/stage/dump.md | 6 + book/cli/reth/stage/run.md | 6 + book/cli/reth/stage/unwind.md | 6 + crates/node/core/src/args/database.rs | 183 +++++++++++++++++- .../storage/db/src/implementation/mdbx/mod.rs | 48 +++-- 20 files changed, 323 insertions(+), 16 deletions(-) diff --git a/book/cli/reth/db.md b/book/cli/reth/db.md index f9a8a158a..17a6de4e6 100644 --- a/book/cli/reth/db.md +++ b/book/cli/reth/db.md @@ -81,6 +81,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/db/diff.md b/book/cli/reth/db/diff.md index f57c6ac36..efb9e7d32 100644 --- a/book/cli/reth/db/diff.md +++ b/book/cli/reth/db/diff.md @@ -45,6 +45,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/build-block.md b/book/cli/reth/debug/build-block.md index 2e6d637d5..7bceb62b9 100644 --- a/book/cli/reth/debug/build-block.md +++ b/book/cli/reth/debug/build-block.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/execution.md b/book/cli/reth/debug/execution.md index 9ca74897c..b8e1ce05d 100644 --- a/book/cli/reth/debug/execution.md +++ b/book/cli/reth/debug/execution.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/in-memory-merkle.md b/book/cli/reth/debug/in-memory-merkle.md index 3e322a691..a183db997 100644 --- a/book/cli/reth/debug/in-memory-merkle.md +++ b/book/cli/reth/debug/in-memory-merkle.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/merkle.md b/book/cli/reth/debug/merkle.md index d701803b8..d9a72794e 100644 --- a/book/cli/reth/debug/merkle.md +++ b/book/cli/reth/debug/merkle.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/replay-engine.md b/book/cli/reth/debug/replay-engine.md index dd587620a..b7a1266d3 100644 --- a/book/cli/reth/debug/replay-engine.md +++ b/book/cli/reth/debug/replay-engine.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/import.md b/book/cli/reth/import.md index 28e085bda..82a521ac0 100644 --- a/book/cli/reth/import.md +++ b/book/cli/reth/import.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/init-state.md b/book/cli/reth/init-state.md index 3e0735167..533c0f8f8 100644 --- a/book/cli/reth/init-state.md +++ b/book/cli/reth/init-state.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/init.md b/book/cli/reth/init.md index cd01accc0..ebe2a8386 100644 --- a/book/cli/reth/init.md +++ b/book/cli/reth/init.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/node.md b/book/cli/reth/node.md index a3ff8f6a5..52f597279 100644 --- a/book/cli/reth/node.md +++ b/book/cli/reth/node.md @@ -590,6 +590,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/p2p.md b/book/cli/reth/p2p.md index 603b451d9..33639042a 100644 --- a/book/cli/reth/p2p.md +++ b/book/cli/reth/p2p.md @@ -247,6 +247,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/prune.md b/book/cli/reth/prune.md index ed16197a7..41684ecd9 100644 --- a/book/cli/reth/prune.md +++ b/book/cli/reth/prune.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/recover/storage-tries.md b/book/cli/reth/recover/storage-tries.md index ecdaabe77..1afe94f55 100644 --- a/book/cli/reth/recover/storage-tries.md +++ b/book/cli/reth/recover/storage-tries.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/stage/drop.md b/book/cli/reth/stage/drop.md index 399b3818c..c22d6be66 100644 --- a/book/cli/reth/stage/drop.md +++ b/book/cli/reth/stage/drop.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/stage/dump.md b/book/cli/reth/stage/dump.md index 4b3de3fb1..e3df5bf2d 100644 --- a/book/cli/reth/stage/dump.md +++ b/book/cli/reth/stage/dump.md @@ -76,6 +76,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/stage/run.md b/book/cli/reth/stage/run.md index 9da3ce0de..204efc968 100644 --- a/book/cli/reth/stage/run.md +++ b/book/cli/reth/stage/run.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/stage/unwind.md b/book/cli/reth/stage/unwind.md index 700ab3d7e..cb72b9313 100644 --- a/book/cli/reth/stage/unwind.md +++ b/book/cli/reth/stage/unwind.md @@ -74,6 +74,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/crates/node/core/src/args/database.rs b/crates/node/core/src/args/database.rs index 0eec6639a..16ba61935 100644 --- a/crates/node/core/src/args/database.rs +++ b/crates/node/core/src/args/database.rs @@ -1,6 +1,6 @@ //! clap [Args](clap::Args) for database configuration -use std::time::Duration; +use std::{fmt, str::FromStr, time::Duration}; use crate::version::default_client_version; use clap::{ @@ -22,6 +22,12 @@ pub struct DatabaseArgs { /// NFS volume. #[arg(long = "db.exclusive")] pub exclusive: Option, + /// Maximum database size (e.g., 4TB, 8MB) + #[arg(long = "db.max-size", value_parser = parse_byte_size)] + pub max_size: Option, + /// Database growth step (e.g., 4GB, 4KB) + #[arg(long = "db.growth-step", value_parser = parse_byte_size)] + pub growth_step: Option, /// Read transaction timeout in seconds, 0 means no timeout. #[arg(long = "db.read-transaction-timeout")] pub read_transaction_timeout: Option, @@ -33,8 +39,9 @@ impl DatabaseArgs { self.get_database_args(default_client_version()) } - /// Returns the database arguments with configured log level and given client version. - pub const fn get_database_args( + /// Returns the database arguments with configured log level, client version, + /// max read transaction duration, and geometry. + pub fn get_database_args( &self, client_version: ClientVersion, ) -> reth_db::mdbx::DatabaseArguments { @@ -48,6 +55,7 @@ impl DatabaseArgs { .with_log_level(self.log_level) .with_exclusive(self.exclusive) .with_max_read_transaction_duration(max_read_transaction_duration) + .with_geometry(self.max_size, self.growth_step) } } @@ -89,10 +97,84 @@ impl TypedValueParser for LogLevelValueParser { Some(Box::new(values)) } } + +/// Size in bytes. +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] +pub struct ByteSize(pub usize); + +impl From for usize { + fn from(s: ByteSize) -> Self { + s.0 + } +} + +impl FromStr for ByteSize { + type Err = String; + + fn from_str(s: &str) -> Result { + let s = s.trim().to_uppercase(); + let parts: Vec<&str> = s.split_whitespace().collect(); + + let (num_str, unit) = match parts.len() { + 1 => { + let (num, unit) = + s.split_at(s.find(|c: char| c.is_alphabetic()).unwrap_or(s.len())); + (num, unit) + } + 2 => (parts[0], parts[1]), + _ => { + return Err("Invalid format. Use '' or ' '.".to_string()) + } + }; + + let num: usize = num_str.parse().map_err(|_| "Invalid number".to_string())?; + + let multiplier = match unit { + "B" | "" => 1, // Assume bytes if no unit is specified + "KB" => 1024, + "MB" => 1024 * 1024, + "GB" => 1024 * 1024 * 1024, + "TB" => 1024 * 1024 * 1024 * 1024, + _ => return Err(format!("Invalid unit: {}. Use B, KB, MB, GB, or TB.", unit)), + }; + + Ok(Self(num * multiplier)) + } +} + +impl fmt::Display for ByteSize { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + const KB: usize = 1024; + const MB: usize = KB * 1024; + const GB: usize = MB * 1024; + const TB: usize = GB * 1024; + + let (size, unit) = if self.0 >= TB { + (self.0 as f64 / TB as f64, "TB") + } else if self.0 >= GB { + (self.0 as f64 / GB as f64, "GB") + } else if self.0 >= MB { + (self.0 as f64 / MB as f64, "MB") + } else if self.0 >= KB { + (self.0 as f64 / KB as f64, "KB") + } else { + (self.0 as f64, "B") + }; + + write!(f, "{:.2}{}", size, unit) + } +} + +/// Value parser function that supports various formats. +fn parse_byte_size(s: &str) -> Result { + s.parse::().map(Into::into) +} + #[cfg(test)] mod tests { use super::*; use clap::Parser; + use reth_db::mdbx::{GIGABYTE, KILOBYTE, MEGABYTE, TERABYTE}; /// A helper type to parse Args more easily #[derive(Parser)] @@ -108,6 +190,101 @@ mod tests { assert_eq!(args, default_args); } + #[test] + fn test_command_parser_with_valid_max_size() { + let cmd = CommandParser::::try_parse_from([ + "reth", + "--db.max-size", + "4398046511104", + ]) + .unwrap(); + assert_eq!(cmd.args.max_size, Some(TERABYTE * 4)); + } + + #[test] + fn test_command_parser_with_invalid_max_size() { + let result = + CommandParser::::try_parse_from(["reth", "--db.max-size", "invalid"]); + assert!(result.is_err()); + } + + #[test] + fn test_command_parser_with_valid_growth_step() { + let cmd = CommandParser::::try_parse_from([ + "reth", + "--db.growth-step", + "4294967296", + ]) + .unwrap(); + assert_eq!(cmd.args.growth_step, Some(GIGABYTE * 4)); + } + + #[test] + fn test_command_parser_with_invalid_growth_step() { + let result = + CommandParser::::try_parse_from(["reth", "--db.growth-step", "invalid"]); + assert!(result.is_err()); + } + + #[test] + fn test_command_parser_with_valid_max_size_and_growth_step_from_str() { + let cmd = CommandParser::::try_parse_from([ + "reth", + "--db.max-size", + "2TB", + "--db.growth-step", + "1GB", + ]) + .unwrap(); + assert_eq!(cmd.args.max_size, Some(TERABYTE * 2)); + assert_eq!(cmd.args.growth_step, Some(GIGABYTE)); + + let cmd = CommandParser::::try_parse_from([ + "reth", + "--db.max-size", + "12MB", + "--db.growth-step", + "2KB", + ]) + .unwrap(); + assert_eq!(cmd.args.max_size, Some(MEGABYTE * 12)); + assert_eq!(cmd.args.growth_step, Some(KILOBYTE * 2)); + + // with spaces + let cmd = CommandParser::::try_parse_from([ + "reth", + "--db.max-size", + "12 MB", + "--db.growth-step", + "2 KB", + ]) + .unwrap(); + assert_eq!(cmd.args.max_size, Some(MEGABYTE * 12)); + assert_eq!(cmd.args.growth_step, Some(KILOBYTE * 2)); + + let cmd = CommandParser::::try_parse_from([ + "reth", + "--db.max-size", + "1073741824", + "--db.growth-step", + "1048576", + ]) + .unwrap(); + assert_eq!(cmd.args.max_size, Some(GIGABYTE)); + assert_eq!(cmd.args.growth_step, Some(MEGABYTE)); + } + + #[test] + fn test_command_parser_max_size_and_growth_step_from_str_invalid_unit() { + let result = + CommandParser::::try_parse_from(["reth", "--db.growth-step", "1 PB"]); + assert!(result.is_err()); + + let result = + CommandParser::::try_parse_from(["reth", "--db.max-size", "2PB"]); + assert!(result.is_err()); + } + #[test] fn test_possible_values() { // Initialize the LogLevelValueParser diff --git a/crates/storage/db/src/implementation/mdbx/mod.rs b/crates/storage/db/src/implementation/mdbx/mod.rs index 65b804e6a..92ad20272 100644 --- a/crates/storage/db/src/implementation/mdbx/mod.rs +++ b/crates/storage/db/src/implementation/mdbx/mod.rs @@ -23,7 +23,7 @@ use reth_libmdbx::{ use reth_storage_errors::db::LogLevel; use reth_tracing::tracing::error; use std::{ - ops::Deref, + ops::{Deref, Range}, path::Path, sync::Arc, time::{SystemTime, UNIX_EPOCH}, @@ -33,8 +33,14 @@ use tx::Tx; pub mod cursor; pub mod tx; -const GIGABYTE: usize = 1024 * 1024 * 1024; -const TERABYTE: usize = GIGABYTE * 1024; +/// 1 KB in bytes +pub const KILOBYTE: usize = 1024; +/// 1 MB in bytes +pub const MEGABYTE: usize = KILOBYTE * 1024; +/// 1 GB in bytes +pub const GIGABYTE: usize = MEGABYTE * 1024; +/// 1 TB in bytes +pub const TERABYTE: usize = GIGABYTE * 1024; /// MDBX allows up to 32767 readers (`MDBX_READERS_LIMIT`), but we limit it to slightly below that const DEFAULT_MAX_READERS: u64 = 32_000; @@ -64,6 +70,8 @@ impl DatabaseEnvKind { pub struct DatabaseArguments { /// Client version that accesses the database. client_version: ClientVersion, + /// Database geometry settings. + geometry: Geometry>, /// Database log level. If [None], the default value is used. log_level: Option, /// Maximum duration of a read transaction. If [None], the default value is used. @@ -93,15 +101,37 @@ pub struct DatabaseArguments { impl DatabaseArguments { /// Create new database arguments with given client version. - pub const fn new(client_version: ClientVersion) -> Self { + pub fn new(client_version: ClientVersion) -> Self { Self { client_version, + geometry: Geometry { + size: Some(0..(4 * TERABYTE)), + growth_step: Some(4 * GIGABYTE as isize), + shrink_threshold: Some(0), + page_size: Some(PageSize::Set(default_page_size())), + }, log_level: None, max_read_transaction_duration: None, exclusive: None, } } + /// Set the geometry. + /// + /// # Arguments + /// + /// * `max_size` - Maximum database size in bytes + /// * `growth_step` - Database growth step in bytes + pub fn with_geometry(mut self, max_size: Option, growth_step: Option) -> Self { + self.geometry = Geometry { + size: max_size.map(|size| 0..size), + growth_step: growth_step.map(|growth_step| growth_step as isize), + shrink_threshold: Some(0), + page_size: Some(PageSize::Set(default_page_size())), + }; + self + } + /// Set the log level. pub const fn with_log_level(mut self, log_level: Option) -> Self { self.log_level = log_level; @@ -278,15 +308,7 @@ impl DatabaseEnv { // environment creation. debug_assert!(Tables::ALL.len() <= 256, "number of tables exceed max dbs"); inner_env.set_max_dbs(256); - inner_env.set_geometry(Geometry { - // Maximum database size of 4 terabytes - size: Some(0..(4 * TERABYTE)), - // We grow the database in increments of 4 gigabytes - growth_step: Some(4 * GIGABYTE as isize), - // The database never shrinks - shrink_threshold: Some(0), - page_size: Some(PageSize::Set(default_page_size())), - }); + inner_env.set_geometry(args.geometry); fn is_current_process(id: u32) -> bool { #[cfg(unix)] From 4a8799f98bba7aa80a8c333f9b32f23221cb0cb8 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 29 Oct 2024 13:17:48 +0100 Subject: [PATCH 335/425] consensus: add unit tests for `ForkchoiceStateTracker` and `ForkchoiceStateHash` (#12077) --- .../consensus/beacon/src/engine/forkchoice.rs | 269 ++++++++++++++++-- 1 file changed, 243 insertions(+), 26 deletions(-) diff --git a/crates/consensus/beacon/src/engine/forkchoice.rs b/crates/consensus/beacon/src/engine/forkchoice.rs index 7e49714ba..a9d930173 100644 --- a/crates/consensus/beacon/src/engine/forkchoice.rs +++ b/crates/consensus/beacon/src/engine/forkchoice.rs @@ -8,7 +8,6 @@ pub struct ForkchoiceStateTracker { /// /// Caution: this can be invalid. latest: Option, - /// Tracks the latest forkchoice state that we received to which we need to sync. last_syncing: Option, /// The latest valid forkchoice state that we received and processed as valid. @@ -48,19 +47,19 @@ impl ForkchoiceStateTracker { /// Returns whether the latest received FCU is valid: [`ForkchoiceStatus::Valid`] #[allow(dead_code)] pub(crate) fn is_latest_valid(&self) -> bool { - self.latest_status().map(|s| s.is_valid()).unwrap_or(false) + self.latest_status().map_or(false, |s| s.is_valid()) } /// Returns whether the latest received FCU is syncing: [`ForkchoiceStatus::Syncing`] #[allow(dead_code)] pub(crate) fn is_latest_syncing(&self) -> bool { - self.latest_status().map(|s| s.is_syncing()).unwrap_or(false) + self.latest_status().map_or(false, |s| s.is_syncing()) } /// Returns whether the latest received FCU is syncing: [`ForkchoiceStatus::Invalid`] #[allow(dead_code)] pub(crate) fn is_latest_invalid(&self) -> bool { - self.latest_status().map(|s| s.is_invalid()).unwrap_or(false) + self.latest_status().map_or(false, |s| s.is_invalid()) } /// Returns the last valid head hash. @@ -75,32 +74,28 @@ impl ForkchoiceStateTracker { self.last_syncing.as_ref().map(|s| s.head_block_hash) } - /// Returns the latest received `ForkchoiceState`. + /// Returns the latest received [`ForkchoiceState`]. /// /// Caution: this can be invalid. pub const fn latest_state(&self) -> Option { self.last_valid } - /// Returns the last valid `ForkchoiceState`. + /// Returns the last valid [`ForkchoiceState`]. pub const fn last_valid_state(&self) -> Option { self.last_valid } /// Returns the last valid finalized hash. /// - /// This will return [`None`], if either there is no valid finalized forkchoice state, or the - /// finalized hash for the latest valid forkchoice state is zero. + /// This will return [`None`]: + /// - If either there is no valid finalized forkchoice state, + /// - Or the finalized hash for the latest valid forkchoice state is zero. #[inline] pub fn last_valid_finalized(&self) -> Option { - self.last_valid.and_then(|state| { - // if the hash is zero then we should act like there is no finalized hash - if state.finalized_block_hash.is_zero() { - None - } else { - Some(state.finalized_block_hash) - } - }) + self.last_valid + .filter(|state| !state.finalized_block_hash.is_zero()) + .map(|state| state.finalized_block_hash) } /// Returns the last received `ForkchoiceState` to which we need to sync. @@ -110,18 +105,14 @@ impl ForkchoiceStateTracker { /// Returns the sync target finalized hash. /// - /// This will return [`None`], if either there is no sync target forkchoice state, or the - /// finalized hash for the sync target forkchoice state is zero. + /// This will return [`None`]: + /// - If either there is no sync target forkchoice state, + /// - Or the finalized hash for the sync target forkchoice state is zero. #[inline] pub fn sync_target_finalized(&self) -> Option { - self.last_syncing.and_then(|state| { - // if the hash is zero then we should act like there is no finalized hash - if state.finalized_block_hash.is_zero() { - None - } else { - Some(state.finalized_block_hash) - } - }) + self.last_syncing + .filter(|state| !state.finalized_block_hash.is_zero()) + .map(|state| state.finalized_block_hash) } /// Returns true if no forkchoice state has been received yet. @@ -222,3 +213,229 @@ impl AsRef for ForkchoiceStateHash { } } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_forkchoice_state_tracker_set_latest_valid() { + let mut tracker = ForkchoiceStateTracker::default(); + + // Latest state is None + assert!(tracker.latest_status().is_none()); + + // Create a valid ForkchoiceState + let state = ForkchoiceState { + head_block_hash: B256::from_slice(&[1; 32]), + safe_block_hash: B256::from_slice(&[2; 32]), + finalized_block_hash: B256::from_slice(&[3; 32]), + }; + let status = ForkchoiceStatus::Valid; + + tracker.set_latest(state, status); + + // Assert that the latest state is set + assert!(tracker.latest.is_some()); + assert_eq!(tracker.latest.as_ref().unwrap().state, state); + + // Assert that last valid state is updated + assert!(tracker.last_valid.is_some()); + assert_eq!(tracker.last_valid.as_ref().unwrap(), &state); + + // Assert that last syncing state is None + assert!(tracker.last_syncing.is_none()); + + // Test when there is a latest status and it is valid + assert_eq!(tracker.latest_status(), Some(ForkchoiceStatus::Valid)); + } + + #[test] + fn test_forkchoice_state_tracker_set_latest_syncing() { + let mut tracker = ForkchoiceStateTracker::default(); + + // Create a syncing ForkchoiceState + let state = ForkchoiceState { + head_block_hash: B256::from_slice(&[1; 32]), + safe_block_hash: B256::from_slice(&[2; 32]), + finalized_block_hash: B256::from_slice(&[0; 32]), // Zero to simulate not finalized + }; + let status = ForkchoiceStatus::Syncing; + + tracker.set_latest(state, status); + + // Assert that the latest state is set + assert!(tracker.latest.is_some()); + assert_eq!(tracker.latest.as_ref().unwrap().state, state); + + // Assert that last valid state is None since the status is syncing + assert!(tracker.last_valid.is_none()); + + // Assert that last syncing state is updated + assert!(tracker.last_syncing.is_some()); + assert_eq!(tracker.last_syncing.as_ref().unwrap(), &state); + + // Test when there is a latest status and it is syncing + assert_eq!(tracker.latest_status(), Some(ForkchoiceStatus::Syncing)); + } + + #[test] + fn test_forkchoice_state_tracker_set_latest_invalid() { + let mut tracker = ForkchoiceStateTracker::default(); + + // Create an invalid ForkchoiceState + let state = ForkchoiceState { + head_block_hash: B256::from_slice(&[1; 32]), + safe_block_hash: B256::from_slice(&[2; 32]), + finalized_block_hash: B256::from_slice(&[3; 32]), + }; + let status = ForkchoiceStatus::Invalid; + + tracker.set_latest(state, status); + + // Assert that the latest state is set + assert!(tracker.latest.is_some()); + assert_eq!(tracker.latest.as_ref().unwrap().state, state); + + // Assert that last valid state is None since the status is invalid + assert!(tracker.last_valid.is_none()); + + // Assert that last syncing state is None since the status is invalid + assert!(tracker.last_syncing.is_none()); + + // Test when there is a latest status and it is invalid + assert_eq!(tracker.latest_status(), Some(ForkchoiceStatus::Invalid)); + } + + #[test] + fn test_forkchoice_state_tracker_sync_target() { + let mut tracker = ForkchoiceStateTracker::default(); + + // Test when there is no last syncing state (should return None) + assert!(tracker.sync_target().is_none()); + + // Set a last syncing forkchoice state + let state = ForkchoiceState { + head_block_hash: B256::from_slice(&[1; 32]), + safe_block_hash: B256::from_slice(&[2; 32]), + finalized_block_hash: B256::from_slice(&[3; 32]), + }; + tracker.last_syncing = Some(state); + + // Test when the last syncing state is set (should return the head block hash) + assert_eq!(tracker.sync_target(), Some(B256::from_slice(&[1; 32]))); + } + + #[test] + fn test_forkchoice_state_tracker_last_valid_finalized() { + let mut tracker = ForkchoiceStateTracker::default(); + + // No valid finalized state (should return None) + assert!(tracker.last_valid_finalized().is_none()); + + // Valid finalized state, but finalized hash is zero (should return None) + let zero_finalized_state = ForkchoiceState { + head_block_hash: B256::ZERO, + safe_block_hash: B256::ZERO, + finalized_block_hash: B256::ZERO, // Zero finalized hash + }; + tracker.last_valid = Some(zero_finalized_state); + assert!(tracker.last_valid_finalized().is_none()); + + // Valid finalized state with non-zero finalized hash (should return finalized hash) + let valid_finalized_state = ForkchoiceState { + head_block_hash: B256::from_slice(&[1; 32]), + safe_block_hash: B256::from_slice(&[2; 32]), + finalized_block_hash: B256::from_slice(&[123; 32]), // Non-zero finalized hash + }; + tracker.last_valid = Some(valid_finalized_state); + assert_eq!(tracker.last_valid_finalized(), Some(B256::from_slice(&[123; 32]))); + + // Reset the last valid state to None + tracker.last_valid = None; + assert!(tracker.last_valid_finalized().is_none()); + } + + #[test] + fn test_forkchoice_state_tracker_sync_target_finalized() { + let mut tracker = ForkchoiceStateTracker::default(); + + // No sync target state (should return None) + assert!(tracker.sync_target_finalized().is_none()); + + // Sync target state with finalized hash as zero (should return None) + let zero_finalized_sync_target = ForkchoiceState { + head_block_hash: B256::from_slice(&[1; 32]), + safe_block_hash: B256::from_slice(&[2; 32]), + finalized_block_hash: B256::ZERO, // Zero finalized hash + }; + tracker.last_syncing = Some(zero_finalized_sync_target); + assert!(tracker.sync_target_finalized().is_none()); + + // Sync target state with non-zero finalized hash (should return the hash) + let valid_sync_target = ForkchoiceState { + head_block_hash: B256::from_slice(&[1; 32]), + safe_block_hash: B256::from_slice(&[2; 32]), + finalized_block_hash: B256::from_slice(&[22; 32]), // Non-zero finalized hash + }; + tracker.last_syncing = Some(valid_sync_target); + assert_eq!(tracker.sync_target_finalized(), Some(B256::from_slice(&[22; 32]))); + + // Reset the last sync target state to None + tracker.last_syncing = None; + assert!(tracker.sync_target_finalized().is_none()); + } + + #[test] + fn test_forkchoice_state_tracker_is_empty() { + let mut forkchoice = ForkchoiceStateTracker::default(); + + // Initially, no forkchoice state has been received, so it should be empty. + assert!(forkchoice.is_empty()); + + // After setting a forkchoice state, it should no longer be empty. + forkchoice.set_latest(ForkchoiceState::default(), ForkchoiceStatus::Valid); + assert!(!forkchoice.is_empty()); + + // Reset the forkchoice latest, it should be empty again. + forkchoice.latest = None; + assert!(forkchoice.is_empty()); + } + + #[test] + fn test_forkchoice_state_hash_find() { + // Define example hashes + let head_hash = B256::random(); + let safe_hash = B256::random(); + let finalized_hash = B256::random(); + let non_matching_hash = B256::random(); + + // Create a ForkchoiceState with specific hashes + let state = ForkchoiceState { + head_block_hash: head_hash, + safe_block_hash: safe_hash, + finalized_block_hash: finalized_hash, + }; + + // Test finding the head hash + assert_eq!( + ForkchoiceStateHash::find(&state, head_hash), + Some(ForkchoiceStateHash::Head(head_hash)) + ); + + // Test finding the safe hash + assert_eq!( + ForkchoiceStateHash::find(&state, safe_hash), + Some(ForkchoiceStateHash::Safe(safe_hash)) + ); + + // Test finding the finalized hash + assert_eq!( + ForkchoiceStateHash::find(&state, finalized_hash), + Some(ForkchoiceStateHash::Finalized(finalized_hash)) + ); + + // Test with a hash that doesn't match any of the hashes in ForkchoiceState + assert_eq!(ForkchoiceStateHash::find(&state, non_matching_hash), None); + } +} From 6f3600dc38f7548a40999cc4c479f84d69326b76 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 29 Oct 2024 13:58:57 +0100 Subject: [PATCH 336/425] Revert "feat: add geometry to database args" (#12164) --- book/cli/reth/db.md | 6 - book/cli/reth/db/diff.md | 6 - book/cli/reth/debug/build-block.md | 6 - book/cli/reth/debug/execution.md | 6 - book/cli/reth/debug/in-memory-merkle.md | 6 - book/cli/reth/debug/merkle.md | 6 - book/cli/reth/debug/replay-engine.md | 6 - book/cli/reth/import.md | 6 - book/cli/reth/init-state.md | 6 - book/cli/reth/init.md | 6 - book/cli/reth/node.md | 6 - book/cli/reth/p2p.md | 6 - book/cli/reth/prune.md | 6 - book/cli/reth/recover/storage-tries.md | 6 - book/cli/reth/stage/drop.md | 6 - book/cli/reth/stage/dump.md | 6 - book/cli/reth/stage/run.md | 6 - book/cli/reth/stage/unwind.md | 6 - crates/node/core/src/args/database.rs | 183 +----------------- .../storage/db/src/implementation/mdbx/mod.rs | 48 ++--- 20 files changed, 16 insertions(+), 323 deletions(-) diff --git a/book/cli/reth/db.md b/book/cli/reth/db.md index 17a6de4e6..f9a8a158a 100644 --- a/book/cli/reth/db.md +++ b/book/cli/reth/db.md @@ -81,12 +81,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/db/diff.md b/book/cli/reth/db/diff.md index efb9e7d32..f57c6ac36 100644 --- a/book/cli/reth/db/diff.md +++ b/book/cli/reth/db/diff.md @@ -45,12 +45,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/build-block.md b/book/cli/reth/debug/build-block.md index 7bceb62b9..2e6d637d5 100644 --- a/book/cli/reth/debug/build-block.md +++ b/book/cli/reth/debug/build-block.md @@ -69,12 +69,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/execution.md b/book/cli/reth/debug/execution.md index b8e1ce05d..9ca74897c 100644 --- a/book/cli/reth/debug/execution.md +++ b/book/cli/reth/debug/execution.md @@ -69,12 +69,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/in-memory-merkle.md b/book/cli/reth/debug/in-memory-merkle.md index a183db997..3e322a691 100644 --- a/book/cli/reth/debug/in-memory-merkle.md +++ b/book/cli/reth/debug/in-memory-merkle.md @@ -69,12 +69,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/merkle.md b/book/cli/reth/debug/merkle.md index d9a72794e..d701803b8 100644 --- a/book/cli/reth/debug/merkle.md +++ b/book/cli/reth/debug/merkle.md @@ -69,12 +69,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/replay-engine.md b/book/cli/reth/debug/replay-engine.md index b7a1266d3..dd587620a 100644 --- a/book/cli/reth/debug/replay-engine.md +++ b/book/cli/reth/debug/replay-engine.md @@ -69,12 +69,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/import.md b/book/cli/reth/import.md index 82a521ac0..28e085bda 100644 --- a/book/cli/reth/import.md +++ b/book/cli/reth/import.md @@ -69,12 +69,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/init-state.md b/book/cli/reth/init-state.md index 533c0f8f8..3e0735167 100644 --- a/book/cli/reth/init-state.md +++ b/book/cli/reth/init-state.md @@ -69,12 +69,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/init.md b/book/cli/reth/init.md index ebe2a8386..cd01accc0 100644 --- a/book/cli/reth/init.md +++ b/book/cli/reth/init.md @@ -69,12 +69,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/node.md b/book/cli/reth/node.md index 52f597279..a3ff8f6a5 100644 --- a/book/cli/reth/node.md +++ b/book/cli/reth/node.md @@ -590,12 +590,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/p2p.md b/book/cli/reth/p2p.md index 33639042a..603b451d9 100644 --- a/book/cli/reth/p2p.md +++ b/book/cli/reth/p2p.md @@ -247,12 +247,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/prune.md b/book/cli/reth/prune.md index 41684ecd9..ed16197a7 100644 --- a/book/cli/reth/prune.md +++ b/book/cli/reth/prune.md @@ -69,12 +69,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/recover/storage-tries.md b/book/cli/reth/recover/storage-tries.md index 1afe94f55..ecdaabe77 100644 --- a/book/cli/reth/recover/storage-tries.md +++ b/book/cli/reth/recover/storage-tries.md @@ -69,12 +69,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/stage/drop.md b/book/cli/reth/stage/drop.md index c22d6be66..399b3818c 100644 --- a/book/cli/reth/stage/drop.md +++ b/book/cli/reth/stage/drop.md @@ -69,12 +69,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/stage/dump.md b/book/cli/reth/stage/dump.md index e3df5bf2d..4b3de3fb1 100644 --- a/book/cli/reth/stage/dump.md +++ b/book/cli/reth/stage/dump.md @@ -76,12 +76,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/stage/run.md b/book/cli/reth/stage/run.md index 204efc968..9da3ce0de 100644 --- a/book/cli/reth/stage/run.md +++ b/book/cli/reth/stage/run.md @@ -69,12 +69,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/stage/unwind.md b/book/cli/reth/stage/unwind.md index cb72b9313..700ab3d7e 100644 --- a/book/cli/reth/stage/unwind.md +++ b/book/cli/reth/stage/unwind.md @@ -74,12 +74,6 @@ Database: [possible values: true, false] - --db.max-size - Maximum database size (e.g., 4TB, 8MB) - - --db.growth-step - Database growth step (e.g., 4GB, 4KB) - --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/crates/node/core/src/args/database.rs b/crates/node/core/src/args/database.rs index 16ba61935..0eec6639a 100644 --- a/crates/node/core/src/args/database.rs +++ b/crates/node/core/src/args/database.rs @@ -1,6 +1,6 @@ //! clap [Args](clap::Args) for database configuration -use std::{fmt, str::FromStr, time::Duration}; +use std::time::Duration; use crate::version::default_client_version; use clap::{ @@ -22,12 +22,6 @@ pub struct DatabaseArgs { /// NFS volume. #[arg(long = "db.exclusive")] pub exclusive: Option, - /// Maximum database size (e.g., 4TB, 8MB) - #[arg(long = "db.max-size", value_parser = parse_byte_size)] - pub max_size: Option, - /// Database growth step (e.g., 4GB, 4KB) - #[arg(long = "db.growth-step", value_parser = parse_byte_size)] - pub growth_step: Option, /// Read transaction timeout in seconds, 0 means no timeout. #[arg(long = "db.read-transaction-timeout")] pub read_transaction_timeout: Option, @@ -39,9 +33,8 @@ impl DatabaseArgs { self.get_database_args(default_client_version()) } - /// Returns the database arguments with configured log level, client version, - /// max read transaction duration, and geometry. - pub fn get_database_args( + /// Returns the database arguments with configured log level and given client version. + pub const fn get_database_args( &self, client_version: ClientVersion, ) -> reth_db::mdbx::DatabaseArguments { @@ -55,7 +48,6 @@ impl DatabaseArgs { .with_log_level(self.log_level) .with_exclusive(self.exclusive) .with_max_read_transaction_duration(max_read_transaction_duration) - .with_geometry(self.max_size, self.growth_step) } } @@ -97,84 +89,10 @@ impl TypedValueParser for LogLevelValueParser { Some(Box::new(values)) } } - -/// Size in bytes. -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] -pub struct ByteSize(pub usize); - -impl From for usize { - fn from(s: ByteSize) -> Self { - s.0 - } -} - -impl FromStr for ByteSize { - type Err = String; - - fn from_str(s: &str) -> Result { - let s = s.trim().to_uppercase(); - let parts: Vec<&str> = s.split_whitespace().collect(); - - let (num_str, unit) = match parts.len() { - 1 => { - let (num, unit) = - s.split_at(s.find(|c: char| c.is_alphabetic()).unwrap_or(s.len())); - (num, unit) - } - 2 => (parts[0], parts[1]), - _ => { - return Err("Invalid format. Use '' or ' '.".to_string()) - } - }; - - let num: usize = num_str.parse().map_err(|_| "Invalid number".to_string())?; - - let multiplier = match unit { - "B" | "" => 1, // Assume bytes if no unit is specified - "KB" => 1024, - "MB" => 1024 * 1024, - "GB" => 1024 * 1024 * 1024, - "TB" => 1024 * 1024 * 1024 * 1024, - _ => return Err(format!("Invalid unit: {}. Use B, KB, MB, GB, or TB.", unit)), - }; - - Ok(Self(num * multiplier)) - } -} - -impl fmt::Display for ByteSize { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - const KB: usize = 1024; - const MB: usize = KB * 1024; - const GB: usize = MB * 1024; - const TB: usize = GB * 1024; - - let (size, unit) = if self.0 >= TB { - (self.0 as f64 / TB as f64, "TB") - } else if self.0 >= GB { - (self.0 as f64 / GB as f64, "GB") - } else if self.0 >= MB { - (self.0 as f64 / MB as f64, "MB") - } else if self.0 >= KB { - (self.0 as f64 / KB as f64, "KB") - } else { - (self.0 as f64, "B") - }; - - write!(f, "{:.2}{}", size, unit) - } -} - -/// Value parser function that supports various formats. -fn parse_byte_size(s: &str) -> Result { - s.parse::().map(Into::into) -} - #[cfg(test)] mod tests { use super::*; use clap::Parser; - use reth_db::mdbx::{GIGABYTE, KILOBYTE, MEGABYTE, TERABYTE}; /// A helper type to parse Args more easily #[derive(Parser)] @@ -190,101 +108,6 @@ mod tests { assert_eq!(args, default_args); } - #[test] - fn test_command_parser_with_valid_max_size() { - let cmd = CommandParser::::try_parse_from([ - "reth", - "--db.max-size", - "4398046511104", - ]) - .unwrap(); - assert_eq!(cmd.args.max_size, Some(TERABYTE * 4)); - } - - #[test] - fn test_command_parser_with_invalid_max_size() { - let result = - CommandParser::::try_parse_from(["reth", "--db.max-size", "invalid"]); - assert!(result.is_err()); - } - - #[test] - fn test_command_parser_with_valid_growth_step() { - let cmd = CommandParser::::try_parse_from([ - "reth", - "--db.growth-step", - "4294967296", - ]) - .unwrap(); - assert_eq!(cmd.args.growth_step, Some(GIGABYTE * 4)); - } - - #[test] - fn test_command_parser_with_invalid_growth_step() { - let result = - CommandParser::::try_parse_from(["reth", "--db.growth-step", "invalid"]); - assert!(result.is_err()); - } - - #[test] - fn test_command_parser_with_valid_max_size_and_growth_step_from_str() { - let cmd = CommandParser::::try_parse_from([ - "reth", - "--db.max-size", - "2TB", - "--db.growth-step", - "1GB", - ]) - .unwrap(); - assert_eq!(cmd.args.max_size, Some(TERABYTE * 2)); - assert_eq!(cmd.args.growth_step, Some(GIGABYTE)); - - let cmd = CommandParser::::try_parse_from([ - "reth", - "--db.max-size", - "12MB", - "--db.growth-step", - "2KB", - ]) - .unwrap(); - assert_eq!(cmd.args.max_size, Some(MEGABYTE * 12)); - assert_eq!(cmd.args.growth_step, Some(KILOBYTE * 2)); - - // with spaces - let cmd = CommandParser::::try_parse_from([ - "reth", - "--db.max-size", - "12 MB", - "--db.growth-step", - "2 KB", - ]) - .unwrap(); - assert_eq!(cmd.args.max_size, Some(MEGABYTE * 12)); - assert_eq!(cmd.args.growth_step, Some(KILOBYTE * 2)); - - let cmd = CommandParser::::try_parse_from([ - "reth", - "--db.max-size", - "1073741824", - "--db.growth-step", - "1048576", - ]) - .unwrap(); - assert_eq!(cmd.args.max_size, Some(GIGABYTE)); - assert_eq!(cmd.args.growth_step, Some(MEGABYTE)); - } - - #[test] - fn test_command_parser_max_size_and_growth_step_from_str_invalid_unit() { - let result = - CommandParser::::try_parse_from(["reth", "--db.growth-step", "1 PB"]); - assert!(result.is_err()); - - let result = - CommandParser::::try_parse_from(["reth", "--db.max-size", "2PB"]); - assert!(result.is_err()); - } - #[test] fn test_possible_values() { // Initialize the LogLevelValueParser diff --git a/crates/storage/db/src/implementation/mdbx/mod.rs b/crates/storage/db/src/implementation/mdbx/mod.rs index 92ad20272..65b804e6a 100644 --- a/crates/storage/db/src/implementation/mdbx/mod.rs +++ b/crates/storage/db/src/implementation/mdbx/mod.rs @@ -23,7 +23,7 @@ use reth_libmdbx::{ use reth_storage_errors::db::LogLevel; use reth_tracing::tracing::error; use std::{ - ops::{Deref, Range}, + ops::Deref, path::Path, sync::Arc, time::{SystemTime, UNIX_EPOCH}, @@ -33,14 +33,8 @@ use tx::Tx; pub mod cursor; pub mod tx; -/// 1 KB in bytes -pub const KILOBYTE: usize = 1024; -/// 1 MB in bytes -pub const MEGABYTE: usize = KILOBYTE * 1024; -/// 1 GB in bytes -pub const GIGABYTE: usize = MEGABYTE * 1024; -/// 1 TB in bytes -pub const TERABYTE: usize = GIGABYTE * 1024; +const GIGABYTE: usize = 1024 * 1024 * 1024; +const TERABYTE: usize = GIGABYTE * 1024; /// MDBX allows up to 32767 readers (`MDBX_READERS_LIMIT`), but we limit it to slightly below that const DEFAULT_MAX_READERS: u64 = 32_000; @@ -70,8 +64,6 @@ impl DatabaseEnvKind { pub struct DatabaseArguments { /// Client version that accesses the database. client_version: ClientVersion, - /// Database geometry settings. - geometry: Geometry>, /// Database log level. If [None], the default value is used. log_level: Option, /// Maximum duration of a read transaction. If [None], the default value is used. @@ -101,37 +93,15 @@ pub struct DatabaseArguments { impl DatabaseArguments { /// Create new database arguments with given client version. - pub fn new(client_version: ClientVersion) -> Self { + pub const fn new(client_version: ClientVersion) -> Self { Self { client_version, - geometry: Geometry { - size: Some(0..(4 * TERABYTE)), - growth_step: Some(4 * GIGABYTE as isize), - shrink_threshold: Some(0), - page_size: Some(PageSize::Set(default_page_size())), - }, log_level: None, max_read_transaction_duration: None, exclusive: None, } } - /// Set the geometry. - /// - /// # Arguments - /// - /// * `max_size` - Maximum database size in bytes - /// * `growth_step` - Database growth step in bytes - pub fn with_geometry(mut self, max_size: Option, growth_step: Option) -> Self { - self.geometry = Geometry { - size: max_size.map(|size| 0..size), - growth_step: growth_step.map(|growth_step| growth_step as isize), - shrink_threshold: Some(0), - page_size: Some(PageSize::Set(default_page_size())), - }; - self - } - /// Set the log level. pub const fn with_log_level(mut self, log_level: Option) -> Self { self.log_level = log_level; @@ -308,7 +278,15 @@ impl DatabaseEnv { // environment creation. debug_assert!(Tables::ALL.len() <= 256, "number of tables exceed max dbs"); inner_env.set_max_dbs(256); - inner_env.set_geometry(args.geometry); + inner_env.set_geometry(Geometry { + // Maximum database size of 4 terabytes + size: Some(0..(4 * TERABYTE)), + // We grow the database in increments of 4 gigabytes + growth_step: Some(4 * GIGABYTE as isize), + // The database never shrinks + shrink_threshold: Some(0), + page_size: Some(PageSize::Set(default_page_size())), + }); fn is_current_process(id: u32) -> bool { #[cfg(unix)] From 52328422aad608705e68e118e911362c9adf761d Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Tue, 29 Oct 2024 13:18:12 +0000 Subject: [PATCH 337/425] feat(storage): pass changesets to unwind methods (#7879) --- .../stages/src/stages/hashing_account.rs | 2 +- .../stages/src/stages/hashing_storage.rs | 2 +- .../src/stages/index_account_history.rs | 2 +- .../src/stages/index_storage_history.rs | 2 +- .../src/providers/database/provider.rs | 149 ++++++++++++------ crates/storage/provider/src/traits/hashing.rs | 30 +++- crates/storage/provider/src/traits/history.rs | 27 +++- 7 files changed, 155 insertions(+), 59 deletions(-) diff --git a/crates/stages/stages/src/stages/hashing_account.rs b/crates/stages/stages/src/stages/hashing_account.rs index 14afb37d8..5b4f72097 100644 --- a/crates/stages/stages/src/stages/hashing_account.rs +++ b/crates/stages/stages/src/stages/hashing_account.rs @@ -234,7 +234,7 @@ where input.unwind_block_range_with_threshold(self.commit_threshold); // Aggregate all transition changesets and make a list of accounts that have been changed. - provider.unwind_account_hashing(range)?; + provider.unwind_account_hashing_range(range)?; let mut stage_checkpoint = input.checkpoint.account_hashing_stage_checkpoint().unwrap_or_default(); diff --git a/crates/stages/stages/src/stages/hashing_storage.rs b/crates/stages/stages/src/stages/hashing_storage.rs index ef070d30c..dcabbe83e 100644 --- a/crates/stages/stages/src/stages/hashing_storage.rs +++ b/crates/stages/stages/src/stages/hashing_storage.rs @@ -169,7 +169,7 @@ where let (range, unwind_progress, _) = input.unwind_block_range_with_threshold(self.commit_threshold); - provider.unwind_storage_hashing(BlockNumberAddress::range(range))?; + provider.unwind_storage_hashing_range(BlockNumberAddress::range(range))?; let mut stage_checkpoint = input.checkpoint.storage_hashing_stage_checkpoint().unwrap_or_default(); diff --git a/crates/stages/stages/src/stages/index_account_history.rs b/crates/stages/stages/src/stages/index_account_history.rs index 8b10283fb..38c238e5d 100644 --- a/crates/stages/stages/src/stages/index_account_history.rs +++ b/crates/stages/stages/src/stages/index_account_history.rs @@ -134,7 +134,7 @@ where let (range, unwind_progress, _) = input.unwind_block_range_with_threshold(self.commit_threshold); - provider.unwind_account_history_indices(range)?; + provider.unwind_account_history_indices_range(range)?; // from HistoryIndex higher than that number. Ok(UnwindOutput { checkpoint: StageCheckpoint::new(unwind_progress) }) diff --git a/crates/stages/stages/src/stages/index_storage_history.rs b/crates/stages/stages/src/stages/index_storage_history.rs index ac645b8dd..ba61e6312 100644 --- a/crates/stages/stages/src/stages/index_storage_history.rs +++ b/crates/stages/stages/src/stages/index_storage_history.rs @@ -140,7 +140,7 @@ where let (range, unwind_progress, _) = input.unwind_block_range_with_threshold(self.commit_threshold); - provider.unwind_storage_history_indices(BlockNumberAddress::range(range))?; + provider.unwind_storage_history_indices_range(BlockNumberAddress::range(range))?; Ok(UnwindOutput { checkpoint: StageCheckpoint::new(unwind_progress) }) } diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index e59a4f563..2dcc3f92d 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -2639,19 +2639,17 @@ impl StorageTrieWriter for DatabaseProvid } impl HashingWriter for DatabaseProvider { - fn unwind_account_hashing( + fn unwind_account_hashing<'a>( &self, - range: RangeInclusive, + changesets: impl Iterator, ) -> ProviderResult>> { // Aggregate all block changesets and make a list of accounts that have been changed. // Note that collecting and then reversing the order is necessary to ensure that the // changes are applied in the correct order. - let hashed_accounts = self - .tx - .cursor_read::()? - .walk_range(range)? - .map(|entry| entry.map(|(_, e)| (keccak256(e.address), e.info))) - .collect::, _>>()? + let hashed_accounts = changesets + .into_iter() + .map(|(_, e)| (keccak256(e.address), e.info)) + .collect::>() .into_iter() .rev() .collect::>(); @@ -2669,13 +2667,25 @@ impl HashingWriter for DatabaseProvider, + ) -> ProviderResult>> { + let changesets = self + .tx + .cursor_read::()? + .walk_range(range)? + .collect::, _>>()?; + self.unwind_account_hashing(changesets.iter()) + } + fn insert_account_for_hashing( &self, - accounts: impl IntoIterator)>, + changesets: impl IntoIterator)>, ) -> ProviderResult>> { let mut hashed_accounts_cursor = self.tx.cursor_write::()?; let hashed_accounts = - accounts.into_iter().map(|(ad, ac)| (keccak256(ad), ac)).collect::>(); + changesets.into_iter().map(|(ad, ac)| (keccak256(ad), ac)).collect::>(); for (hashed_address, account) in &hashed_accounts { if let Some(account) = account { hashed_accounts_cursor.upsert(*hashed_address, *account)?; @@ -2688,18 +2698,15 @@ impl HashingWriter for DatabaseProvider, + changesets: impl Iterator, ) -> ProviderResult>> { // Aggregate all block changesets and make list of accounts that have been changed. - let mut changesets = self.tx.cursor_read::()?; let mut hashed_storages = changesets - .walk_range(range)? - .map(|entry| { - entry.map(|(BlockNumberAddress((_, address)), storage_entry)| { - (keccak256(address), keccak256(storage_entry.key), storage_entry.value) - }) + .into_iter() + .map(|(BlockNumberAddress((_, address)), storage_entry)| { + (keccak256(address), keccak256(storage_entry.key), storage_entry.value) }) - .collect::, _>>()?; + .collect::>(); hashed_storages.sort_by_key(|(ha, hk, _)| (*ha, *hk)); // Apply values to HashedState, and remove the account if it's None. @@ -2724,6 +2731,18 @@ impl HashingWriter for DatabaseProvider, + ) -> ProviderResult>> { + let changesets = self + .tx + .cursor_read::()? + .walk_range(range)? + .collect::, _>>()?; + self.unwind_storage_hashing(changesets.into_iter()) + } + fn insert_storage_for_hashing( &self, storages: impl IntoIterator)>, @@ -2845,16 +2864,14 @@ impl HashingWriter for DatabaseProvider HistoryWriter for DatabaseProvider { - fn unwind_account_history_indices( + fn unwind_account_history_indices<'a>( &self, - range: RangeInclusive, + changesets: impl Iterator, ) -> ProviderResult { - let mut last_indices = self - .tx - .cursor_read::()? - .walk_range(range)? - .map(|entry| entry.map(|(index, account)| (account.address, index))) - .collect::, _>>()?; + let mut last_indices = changesets + .into_iter() + .map(|(index, account)| (account.address, *index)) + .collect::>(); last_indices.sort_by_key(|(a, _)| *a); // Unwind the account history index. @@ -2881,6 +2898,18 @@ impl HistoryWriter for DatabaseProvider, + ) -> ProviderResult { + let changesets = self + .tx + .cursor_read::()? + .walk_range(range)? + .collect::, _>>()?; + self.unwind_account_history_indices(changesets.iter()) + } + fn insert_account_history_index( &self, account_transitions: impl IntoIterator)>, @@ -2893,16 +2922,12 @@ impl HistoryWriter for DatabaseProvider, + changesets: impl Iterator, ) -> ProviderResult { - let mut storage_changesets = self - .tx - .cursor_read::()? - .walk_range(range)? - .map(|entry| { - entry.map(|(BlockNumberAddress((bn, address)), storage)| (address, storage.key, bn)) - }) - .collect::, _>>()?; + let mut storage_changesets = changesets + .into_iter() + .map(|(BlockNumberAddress((bn, address)), storage)| (address, storage.key, bn)) + .collect::>(); storage_changesets.sort_by_key(|(address, key, _)| (*address, *key)); let mut cursor = self.tx.cursor_write::()?; @@ -2931,6 +2956,18 @@ impl HistoryWriter for DatabaseProvider, + ) -> ProviderResult { + let changesets = self + .tx + .cursor_read::()? + .walk_range(range)? + .collect::, _>>()?; + self.unwind_storage_history_indices(changesets.into_iter()) + } + fn insert_storage_history_index( &self, storage_transitions: impl IntoIterator)>, @@ -2973,10 +3010,14 @@ impl, ) -> ProviderResult { - let storage_range = BlockNumberAddress::range(range.clone()); + let changed_accounts = self + .tx + .cursor_read::()? + .walk_range(range.clone())? + .collect::, _>>()?; // Unwind account hashes. Add changed accounts to account prefix set. - let hashed_addresses = self.unwind_account_hashing(range.clone())?; + let hashed_addresses = self.unwind_account_hashing(changed_accounts.iter())?; let mut account_prefix_set = PrefixSetMut::with_capacity(hashed_addresses.len()); let mut destroyed_accounts = HashSet::default(); for (hashed_address, account) in hashed_addresses { @@ -2987,12 +3028,19 @@ impl()? + .walk_range(storage_range)? + .collect::, _>>()?; // Unwind storage hashes. Add changed account and storage keys to corresponding prefix // sets. let mut storage_prefix_sets = HashMap::::default(); - let storage_entries = self.unwind_storage_hashing(storage_range.clone())?; + let storage_entries = self.unwind_storage_hashing(changed_storages.iter().copied())?; for (hashed_address, hashed_slots) in storage_entries { account_prefix_set.insert(Nibbles::unpack(hashed_address)); let mut storage_prefix_set = PrefixSetMut::with_capacity(hashed_slots.len()); @@ -3003,7 +3051,7 @@ impl, ) -> ProviderResult<()> { - let storage_range = BlockNumberAddress::range(range.clone()); + let changed_accounts = self + .tx + .cursor_read::()? + .walk_range(range.clone())? + .collect::, _>>()?; // Unwind account hashes. Add changed accounts to account prefix set. - let hashed_addresses = self.unwind_account_hashing(range.clone())?; + let hashed_addresses = self.unwind_account_hashing(changed_accounts.iter())?; let mut account_prefix_set = PrefixSetMut::with_capacity(hashed_addresses.len()); let mut destroyed_accounts = HashSet::default(); for (hashed_address, account) in hashed_addresses { @@ -3075,12 +3127,19 @@ impl()? + .walk_range(storage_range)? + .collect::, _>>()?; // Unwind storage hashes. Add changed account and storage keys to corresponding prefix // sets. let mut storage_prefix_sets = HashMap::::default(); - let storage_entries = self.unwind_storage_hashing(storage_range.clone())?; + let storage_entries = self.unwind_storage_hashing(changed_storages.iter().copied())?; for (hashed_address, hashed_slots) in storage_entries { account_prefix_set.insert(Nibbles::unpack(hashed_address)); let mut storage_prefix_set = PrefixSetMut::with_capacity(hashed_slots.len()); @@ -3091,7 +3150,7 @@ impl( &self, - range: RangeInclusive, + changesets: impl Iterator, + ) -> ProviderResult>>; + + /// Unwind and clear account hashing in a given block range. + /// + /// # Returns + /// + /// Set of hashed keys of updated accounts. + fn unwind_account_hashing_range( + &self, + range: impl RangeBounds, ) -> ProviderResult>>; /// Inserts all accounts into [reth_db::tables::AccountsHistory] table. @@ -38,7 +48,17 @@ pub trait HashingWriter: Send + Sync { /// Mapping of hashed keys of updated accounts to their respective updated hashed slots. fn unwind_storage_hashing( &self, - range: Range, + changesets: impl Iterator, + ) -> ProviderResult>>; + + /// Unwind and clear storage hashing in a given block range. + /// + /// # Returns + /// + /// Mapping of hashed keys of updated accounts to their respective updated hashed slots. + fn unwind_storage_hashing_range( + &self, + range: impl RangeBounds, ) -> ProviderResult>>; /// Iterates over storages and inserts them to hashing table. diff --git a/crates/storage/provider/src/traits/history.rs b/crates/storage/provider/src/traits/history.rs index cbf9bece4..4eadd6031 100644 --- a/crates/storage/provider/src/traits/history.rs +++ b/crates/storage/provider/src/traits/history.rs @@ -1,8 +1,9 @@ use alloy_primitives::{Address, BlockNumber, B256}; use auto_impl::auto_impl; -use reth_db_api::models::BlockNumberAddress; +use reth_db::models::{AccountBeforeTx, BlockNumberAddress}; +use reth_primitives::StorageEntry; use reth_storage_errors::provider::ProviderResult; -use std::ops::{Range, RangeInclusive}; +use std::ops::{RangeBounds, RangeInclusive}; /// History Writer #[auto_impl(&, Arc, Box)] @@ -10,9 +11,17 @@ pub trait HistoryWriter: Send + Sync { /// Unwind and clear account history indices. /// /// Returns number of changesets walked. - fn unwind_account_history_indices( + fn unwind_account_history_indices<'a>( &self, - range: RangeInclusive, + changesets: impl Iterator, + ) -> ProviderResult; + + /// Unwind and clear account history indices in a given block range. + /// + /// Returns number of changesets walked. + fn unwind_account_history_indices_range( + &self, + range: impl RangeBounds, ) -> ProviderResult; /// Insert account change index to database. Used inside AccountHistoryIndex stage @@ -26,7 +35,15 @@ pub trait HistoryWriter: Send + Sync { /// Returns number of changesets walked. fn unwind_storage_history_indices( &self, - range: Range, + changesets: impl Iterator, + ) -> ProviderResult; + + /// Unwind and clear storage history indices in a given block range. + /// + /// Returns number of changesets walked. + fn unwind_storage_history_indices_range( + &self, + range: impl RangeBounds, ) -> ProviderResult; /// Insert storage change index to database. Used inside StorageHistoryIndex stage From f545877bb82724f60092ec87dbd71b668ab0132e Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 29 Oct 2024 14:23:54 +0100 Subject: [PATCH 338/425] Revert "Revert "feat: add geometry to database args"" (#12165) --- book/cli/reth/db.md | 6 + book/cli/reth/db/diff.md | 6 + book/cli/reth/debug/build-block.md | 6 + book/cli/reth/debug/execution.md | 6 + book/cli/reth/debug/in-memory-merkle.md | 6 + book/cli/reth/debug/merkle.md | 6 + book/cli/reth/debug/replay-engine.md | 6 + book/cli/reth/import.md | 6 + book/cli/reth/init-state.md | 6 + book/cli/reth/init.md | 6 + book/cli/reth/node.md | 6 + book/cli/reth/p2p.md | 6 + book/cli/reth/prune.md | 6 + book/cli/reth/recover/storage-tries.md | 6 + book/cli/reth/stage/drop.md | 6 + book/cli/reth/stage/dump.md | 6 + book/cli/reth/stage/run.md | 6 + book/cli/reth/stage/unwind.md | 6 + crates/node/core/src/args/database.rs | 184 +++++++++++++++++- .../storage/db/src/implementation/mdbx/mod.rs | 48 +++-- crates/storage/libmdbx-rs/src/environment.rs | 2 + 21 files changed, 326 insertions(+), 16 deletions(-) diff --git a/book/cli/reth/db.md b/book/cli/reth/db.md index f9a8a158a..17a6de4e6 100644 --- a/book/cli/reth/db.md +++ b/book/cli/reth/db.md @@ -81,6 +81,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/db/diff.md b/book/cli/reth/db/diff.md index f57c6ac36..efb9e7d32 100644 --- a/book/cli/reth/db/diff.md +++ b/book/cli/reth/db/diff.md @@ -45,6 +45,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/build-block.md b/book/cli/reth/debug/build-block.md index 2e6d637d5..7bceb62b9 100644 --- a/book/cli/reth/debug/build-block.md +++ b/book/cli/reth/debug/build-block.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/execution.md b/book/cli/reth/debug/execution.md index 9ca74897c..b8e1ce05d 100644 --- a/book/cli/reth/debug/execution.md +++ b/book/cli/reth/debug/execution.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/in-memory-merkle.md b/book/cli/reth/debug/in-memory-merkle.md index 3e322a691..a183db997 100644 --- a/book/cli/reth/debug/in-memory-merkle.md +++ b/book/cli/reth/debug/in-memory-merkle.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/merkle.md b/book/cli/reth/debug/merkle.md index d701803b8..d9a72794e 100644 --- a/book/cli/reth/debug/merkle.md +++ b/book/cli/reth/debug/merkle.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/debug/replay-engine.md b/book/cli/reth/debug/replay-engine.md index dd587620a..b7a1266d3 100644 --- a/book/cli/reth/debug/replay-engine.md +++ b/book/cli/reth/debug/replay-engine.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/import.md b/book/cli/reth/import.md index 28e085bda..82a521ac0 100644 --- a/book/cli/reth/import.md +++ b/book/cli/reth/import.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/init-state.md b/book/cli/reth/init-state.md index 3e0735167..533c0f8f8 100644 --- a/book/cli/reth/init-state.md +++ b/book/cli/reth/init-state.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/init.md b/book/cli/reth/init.md index cd01accc0..ebe2a8386 100644 --- a/book/cli/reth/init.md +++ b/book/cli/reth/init.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/node.md b/book/cli/reth/node.md index a3ff8f6a5..52f597279 100644 --- a/book/cli/reth/node.md +++ b/book/cli/reth/node.md @@ -590,6 +590,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/p2p.md b/book/cli/reth/p2p.md index 603b451d9..33639042a 100644 --- a/book/cli/reth/p2p.md +++ b/book/cli/reth/p2p.md @@ -247,6 +247,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/prune.md b/book/cli/reth/prune.md index ed16197a7..41684ecd9 100644 --- a/book/cli/reth/prune.md +++ b/book/cli/reth/prune.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/recover/storage-tries.md b/book/cli/reth/recover/storage-tries.md index ecdaabe77..1afe94f55 100644 --- a/book/cli/reth/recover/storage-tries.md +++ b/book/cli/reth/recover/storage-tries.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/stage/drop.md b/book/cli/reth/stage/drop.md index 399b3818c..c22d6be66 100644 --- a/book/cli/reth/stage/drop.md +++ b/book/cli/reth/stage/drop.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/stage/dump.md b/book/cli/reth/stage/dump.md index 4b3de3fb1..e3df5bf2d 100644 --- a/book/cli/reth/stage/dump.md +++ b/book/cli/reth/stage/dump.md @@ -76,6 +76,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/stage/run.md b/book/cli/reth/stage/run.md index 9da3ce0de..204efc968 100644 --- a/book/cli/reth/stage/run.md +++ b/book/cli/reth/stage/run.md @@ -69,6 +69,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/book/cli/reth/stage/unwind.md b/book/cli/reth/stage/unwind.md index 700ab3d7e..cb72b9313 100644 --- a/book/cli/reth/stage/unwind.md +++ b/book/cli/reth/stage/unwind.md @@ -74,6 +74,12 @@ Database: [possible values: true, false] + --db.max-size + Maximum database size (e.g., 4TB, 8MB) + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + --db.read-transaction-timeout Read transaction timeout in seconds, 0 means no timeout diff --git a/crates/node/core/src/args/database.rs b/crates/node/core/src/args/database.rs index 0eec6639a..5b9d6ae61 100644 --- a/crates/node/core/src/args/database.rs +++ b/crates/node/core/src/args/database.rs @@ -1,6 +1,6 @@ //! clap [Args](clap::Args) for database configuration -use std::time::Duration; +use std::{fmt, str::FromStr, time::Duration}; use crate::version::default_client_version; use clap::{ @@ -22,6 +22,12 @@ pub struct DatabaseArgs { /// NFS volume. #[arg(long = "db.exclusive")] pub exclusive: Option, + /// Maximum database size (e.g., 4TB, 8MB) + #[arg(long = "db.max-size", value_parser = parse_byte_size)] + pub max_size: Option, + /// Database growth step (e.g., 4GB, 4KB) + #[arg(long = "db.growth-step", value_parser = parse_byte_size)] + pub growth_step: Option, /// Read transaction timeout in seconds, 0 means no timeout. #[arg(long = "db.read-transaction-timeout")] pub read_transaction_timeout: Option, @@ -33,8 +39,9 @@ impl DatabaseArgs { self.get_database_args(default_client_version()) } - /// Returns the database arguments with configured log level and given client version. - pub const fn get_database_args( + /// Returns the database arguments with configured log level, client version, + /// max read transaction duration, and geometry. + pub fn get_database_args( &self, client_version: ClientVersion, ) -> reth_db::mdbx::DatabaseArguments { @@ -48,6 +55,8 @@ impl DatabaseArgs { .with_log_level(self.log_level) .with_exclusive(self.exclusive) .with_max_read_transaction_duration(max_read_transaction_duration) + .with_geometry_max_size(self.max_size) + .with_growth_step(self.growth_step) } } @@ -89,10 +98,84 @@ impl TypedValueParser for LogLevelValueParser { Some(Box::new(values)) } } + +/// Size in bytes. +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] +pub struct ByteSize(pub usize); + +impl From for usize { + fn from(s: ByteSize) -> Self { + s.0 + } +} + +impl FromStr for ByteSize { + type Err = String; + + fn from_str(s: &str) -> Result { + let s = s.trim().to_uppercase(); + let parts: Vec<&str> = s.split_whitespace().collect(); + + let (num_str, unit) = match parts.len() { + 1 => { + let (num, unit) = + s.split_at(s.find(|c: char| c.is_alphabetic()).unwrap_or(s.len())); + (num, unit) + } + 2 => (parts[0], parts[1]), + _ => { + return Err("Invalid format. Use '' or ' '.".to_string()) + } + }; + + let num: usize = num_str.parse().map_err(|_| "Invalid number".to_string())?; + + let multiplier = match unit { + "B" | "" => 1, // Assume bytes if no unit is specified + "KB" => 1024, + "MB" => 1024 * 1024, + "GB" => 1024 * 1024 * 1024, + "TB" => 1024 * 1024 * 1024 * 1024, + _ => return Err(format!("Invalid unit: {}. Use B, KB, MB, GB, or TB.", unit)), + }; + + Ok(Self(num * multiplier)) + } +} + +impl fmt::Display for ByteSize { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + const KB: usize = 1024; + const MB: usize = KB * 1024; + const GB: usize = MB * 1024; + const TB: usize = GB * 1024; + + let (size, unit) = if self.0 >= TB { + (self.0 as f64 / TB as f64, "TB") + } else if self.0 >= GB { + (self.0 as f64 / GB as f64, "GB") + } else if self.0 >= MB { + (self.0 as f64 / MB as f64, "MB") + } else if self.0 >= KB { + (self.0 as f64 / KB as f64, "KB") + } else { + (self.0 as f64, "B") + }; + + write!(f, "{:.2}{}", size, unit) + } +} + +/// Value parser function that supports various formats. +fn parse_byte_size(s: &str) -> Result { + s.parse::().map(Into::into) +} + #[cfg(test)] mod tests { use super::*; use clap::Parser; + use reth_db::mdbx::{GIGABYTE, KILOBYTE, MEGABYTE, TERABYTE}; /// A helper type to parse Args more easily #[derive(Parser)] @@ -108,6 +191,101 @@ mod tests { assert_eq!(args, default_args); } + #[test] + fn test_command_parser_with_valid_max_size() { + let cmd = CommandParser::::try_parse_from([ + "reth", + "--db.max-size", + "4398046511104", + ]) + .unwrap(); + assert_eq!(cmd.args.max_size, Some(TERABYTE * 4)); + } + + #[test] + fn test_command_parser_with_invalid_max_size() { + let result = + CommandParser::::try_parse_from(["reth", "--db.max-size", "invalid"]); + assert!(result.is_err()); + } + + #[test] + fn test_command_parser_with_valid_growth_step() { + let cmd = CommandParser::::try_parse_from([ + "reth", + "--db.growth-step", + "4294967296", + ]) + .unwrap(); + assert_eq!(cmd.args.growth_step, Some(GIGABYTE * 4)); + } + + #[test] + fn test_command_parser_with_invalid_growth_step() { + let result = + CommandParser::::try_parse_from(["reth", "--db.growth-step", "invalid"]); + assert!(result.is_err()); + } + + #[test] + fn test_command_parser_with_valid_max_size_and_growth_step_from_str() { + let cmd = CommandParser::::try_parse_from([ + "reth", + "--db.max-size", + "2TB", + "--db.growth-step", + "1GB", + ]) + .unwrap(); + assert_eq!(cmd.args.max_size, Some(TERABYTE * 2)); + assert_eq!(cmd.args.growth_step, Some(GIGABYTE)); + + let cmd = CommandParser::::try_parse_from([ + "reth", + "--db.max-size", + "12MB", + "--db.growth-step", + "2KB", + ]) + .unwrap(); + assert_eq!(cmd.args.max_size, Some(MEGABYTE * 12)); + assert_eq!(cmd.args.growth_step, Some(KILOBYTE * 2)); + + // with spaces + let cmd = CommandParser::::try_parse_from([ + "reth", + "--db.max-size", + "12 MB", + "--db.growth-step", + "2 KB", + ]) + .unwrap(); + assert_eq!(cmd.args.max_size, Some(MEGABYTE * 12)); + assert_eq!(cmd.args.growth_step, Some(KILOBYTE * 2)); + + let cmd = CommandParser::::try_parse_from([ + "reth", + "--db.max-size", + "1073741824", + "--db.growth-step", + "1048576", + ]) + .unwrap(); + assert_eq!(cmd.args.max_size, Some(GIGABYTE)); + assert_eq!(cmd.args.growth_step, Some(MEGABYTE)); + } + + #[test] + fn test_command_parser_max_size_and_growth_step_from_str_invalid_unit() { + let result = + CommandParser::::try_parse_from(["reth", "--db.growth-step", "1 PB"]); + assert!(result.is_err()); + + let result = + CommandParser::::try_parse_from(["reth", "--db.max-size", "2PB"]); + assert!(result.is_err()); + } + #[test] fn test_possible_values() { // Initialize the LogLevelValueParser diff --git a/crates/storage/db/src/implementation/mdbx/mod.rs b/crates/storage/db/src/implementation/mdbx/mod.rs index 65b804e6a..78a3f7971 100644 --- a/crates/storage/db/src/implementation/mdbx/mod.rs +++ b/crates/storage/db/src/implementation/mdbx/mod.rs @@ -23,7 +23,7 @@ use reth_libmdbx::{ use reth_storage_errors::db::LogLevel; use reth_tracing::tracing::error; use std::{ - ops::Deref, + ops::{Deref, Range}, path::Path, sync::Arc, time::{SystemTime, UNIX_EPOCH}, @@ -33,8 +33,14 @@ use tx::Tx; pub mod cursor; pub mod tx; -const GIGABYTE: usize = 1024 * 1024 * 1024; -const TERABYTE: usize = GIGABYTE * 1024; +/// 1 KB in bytes +pub const KILOBYTE: usize = 1024; +/// 1 MB in bytes +pub const MEGABYTE: usize = KILOBYTE * 1024; +/// 1 GB in bytes +pub const GIGABYTE: usize = MEGABYTE * 1024; +/// 1 TB in bytes +pub const TERABYTE: usize = GIGABYTE * 1024; /// MDBX allows up to 32767 readers (`MDBX_READERS_LIMIT`), but we limit it to slightly below that const DEFAULT_MAX_READERS: u64 = 32_000; @@ -64,6 +70,8 @@ impl DatabaseEnvKind { pub struct DatabaseArguments { /// Client version that accesses the database. client_version: ClientVersion, + /// Database geometry settings. + geometry: Geometry>, /// Database log level. If [None], the default value is used. log_level: Option, /// Maximum duration of a read transaction. If [None], the default value is used. @@ -93,15 +101,37 @@ pub struct DatabaseArguments { impl DatabaseArguments { /// Create new database arguments with given client version. - pub const fn new(client_version: ClientVersion) -> Self { + pub fn new(client_version: ClientVersion) -> Self { Self { client_version, + geometry: Geometry { + size: Some(0..(4 * TERABYTE)), + growth_step: Some(4 * GIGABYTE as isize), + shrink_threshold: Some(0), + page_size: Some(PageSize::Set(default_page_size())), + }, log_level: None, max_read_transaction_duration: None, exclusive: None, } } + /// Sets the upper size limit of the db environment, the maximum database size in bytes. + pub const fn with_geometry_max_size(mut self, max_size: Option) -> Self { + if let Some(max_size) = max_size { + self.geometry.size = Some(0..max_size); + } + self + } + + /// Configures the database growth step in bytes. + pub const fn with_growth_step(mut self, growth_step: Option) -> Self { + if let Some(growth_step) = growth_step { + self.geometry.growth_step = Some(growth_step as isize); + } + self + } + /// Set the log level. pub const fn with_log_level(mut self, log_level: Option) -> Self { self.log_level = log_level; @@ -278,15 +308,7 @@ impl DatabaseEnv { // environment creation. debug_assert!(Tables::ALL.len() <= 256, "number of tables exceed max dbs"); inner_env.set_max_dbs(256); - inner_env.set_geometry(Geometry { - // Maximum database size of 4 terabytes - size: Some(0..(4 * TERABYTE)), - // We grow the database in increments of 4 gigabytes - growth_step: Some(4 * GIGABYTE as isize), - // The database never shrinks - shrink_threshold: Some(0), - page_size: Some(PageSize::Set(default_page_size())), - }); + inner_env.set_geometry(args.geometry); fn is_current_process(id: u32) -> bool { #[cfg(unix)] diff --git a/crates/storage/libmdbx-rs/src/environment.rs b/crates/storage/libmdbx-rs/src/environment.rs index 480f5aaab..6a0b21040 100644 --- a/crates/storage/libmdbx-rs/src/environment.rs +++ b/crates/storage/libmdbx-rs/src/environment.rs @@ -489,8 +489,10 @@ pub struct PageOps { pub mincore: u64, } +/// Represents the geometry settings for the database environment #[derive(Clone, Debug, PartialEq, Eq)] pub struct Geometry { + /// The size range in bytes. pub size: Option, pub growth_step: Option, pub shrink_threshold: Option, From e92ecfbc220929f6bddb283b32896c8257a82748 Mon Sep 17 00:00:00 2001 From: Hoa Nguyen Date: Tue, 29 Oct 2024 21:34:12 +0700 Subject: [PATCH 339/425] feat: Introduce trait for OpTransaction (#11745) Co-authored-by: Matthias Seitz --- Cargo.lock | 1 + Cargo.toml | 2 +- crates/optimism/evm/src/execute.rs | 1 + crates/optimism/payload/Cargo.toml | 5 +- crates/optimism/payload/src/builder.rs | 1 + crates/optimism/rpc/src/eth/receipt.rs | 4 +- crates/optimism/rpc/src/eth/transaction.rs | 1 + crates/primitives/src/transaction/compat.rs | 2 + crates/primitives/src/transaction/mod.rs | 63 +++++++++------------ 9 files changed, 40 insertions(+), 40 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8febcffa5..5b6224457 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8260,6 +8260,7 @@ dependencies = [ "alloy-primitives", "alloy-rlp", "alloy-rpc-types-engine", + "op-alloy-consensus", "op-alloy-rpc-types-engine", "reth-basic-payload-builder", "reth-chain-state", diff --git a/Cargo.toml b/Cargo.toml index c83d76318..8c5cf7b0c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -633,4 +633,4 @@ tracy-client = "0.17.3" #op-alloy-rpc-types = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } #op-alloy-rpc-types-engine = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } #op-alloy-network = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } -#op-alloy-consensus = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } +#op-alloy-consensus = { git = "https://github.com/alloy-rs/op-alloy", rev = "6a042e7681b1" } \ No newline at end of file diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index 1cd924098..9c5db9d4b 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -5,6 +5,7 @@ use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_consensus::Transaction as _; use alloy_eips::eip7685::Requests; use core::fmt::Display; +use op_alloy_consensus::DepositTransaction; use reth_chainspec::EthereumHardforks; use reth_consensus::ConsensusError; use reth_evm::{ diff --git a/crates/optimism/payload/Cargo.toml b/crates/optimism/payload/Cargo.toml index ba0b105e8..4b8e64f2d 100644 --- a/crates/optimism/payload/Cargo.toml +++ b/crates/optimism/payload/Cargo.toml @@ -39,6 +39,7 @@ alloy-eips.workspace = true alloy-primitives.workspace = true alloy-rlp.workspace = true op-alloy-rpc-types-engine.workspace = true +op-alloy-consensus.workspace = true alloy-rpc-types-engine.workspace = true alloy-consensus.workspace = true @@ -54,5 +55,5 @@ optimism = [ "reth-optimism-evm/optimism", "revm/optimism", "reth-execution-types/optimism", - "reth-optimism-consensus/optimism", -] + "reth-optimism-consensus/optimism" +] \ No newline at end of file diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index a1536ccf8..0550adeaa 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -36,6 +36,7 @@ use crate::{ error::OptimismPayloadBuilderError, payload::{OptimismBuiltPayload, OptimismPayloadBuilderAttributes}, }; +use op_alloy_consensus::DepositTransaction; /// Optimism's payload builder #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/crates/optimism/rpc/src/eth/receipt.rs b/crates/optimism/rpc/src/eth/receipt.rs index 3f2a81573..2734fb545 100644 --- a/crates/optimism/rpc/src/eth/receipt.rs +++ b/crates/optimism/rpc/src/eth/receipt.rs @@ -2,7 +2,9 @@ use alloy_eips::eip2718::Encodable2718; use alloy_rpc_types::{AnyReceiptEnvelope, Log, TransactionReceipt}; -use op_alloy_consensus::{OpDepositReceipt, OpDepositReceiptWithBloom, OpReceiptEnvelope}; +use op_alloy_consensus::{ + DepositTransaction, OpDepositReceipt, OpDepositReceiptWithBloom, OpReceiptEnvelope, +}; use op_alloy_rpc_types::{receipt::L1BlockInfo, OpTransactionReceipt, OpTransactionReceiptFields}; use reth_node_api::{FullNodeComponents, NodeTypes}; use reth_optimism_chainspec::OpChainSpec; diff --git a/crates/optimism/rpc/src/eth/transaction.rs b/crates/optimism/rpc/src/eth/transaction.rs index 3345ac5d4..3994afe19 100644 --- a/crates/optimism/rpc/src/eth/transaction.rs +++ b/crates/optimism/rpc/src/eth/transaction.rs @@ -3,6 +3,7 @@ use alloy_consensus::Transaction as _; use alloy_primitives::{Bytes, B256}; use alloy_rpc_types::TransactionInfo; +use op_alloy_consensus::DepositTransaction; use op_alloy_rpc_types::Transaction; use reth_node_api::FullNodeComponents; use reth_primitives::TransactionSignedEcRecovered; diff --git a/crates/primitives/src/transaction/compat.rs b/crates/primitives/src/transaction/compat.rs index 81281186f..883c89c45 100644 --- a/crates/primitives/src/transaction/compat.rs +++ b/crates/primitives/src/transaction/compat.rs @@ -1,5 +1,7 @@ use crate::{Transaction, TransactionSigned}; use alloy_primitives::{Address, TxKind, U256}; +#[cfg(feature = "optimism")] +use op_alloy_consensus::DepositTransaction; use revm_primitives::{AuthorizationList, TxEnv}; /// Implements behaviour to fill a [`TxEnv`] from another transaction. diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 3a5c36741..e81c5ad33 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -18,6 +18,8 @@ use derive_more::{AsRef, Deref}; use once_cell as _; #[cfg(not(feature = "std"))] use once_cell::sync::Lazy as LazyLock; +#[cfg(feature = "optimism")] +use op_alloy_consensus::DepositTransaction; use rayon::prelude::{IntoParallelIterator, ParallelIterator}; use serde::{Deserialize, Serialize}; use signature::{decode_with_eip155_chain_id, with_eip155_parity}; @@ -136,6 +138,31 @@ pub enum Transaction { Deposit(TxDeposit), } +#[cfg(feature = "optimism")] +impl DepositTransaction for Transaction { + fn source_hash(&self) -> Option { + match self { + Self::Deposit(tx) => tx.source_hash(), + _ => None, + } + } + fn mint(&self) -> Option { + match self { + Self::Deposit(tx) => tx.mint(), + _ => None, + } + } + fn is_system_transaction(&self) -> bool { + match self { + Self::Deposit(tx) => tx.is_system_transaction(), + _ => false, + } + } + fn is_deposit(&self) -> bool { + matches!(self, Self::Deposit(_)) + } +} + #[cfg(any(test, feature = "arbitrary"))] impl<'a> arbitrary::Arbitrary<'a> for Transaction { fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { @@ -361,42 +388,6 @@ impl Transaction { } } - /// Returns the source hash of the transaction, which uniquely identifies its source. - /// If not a deposit transaction, this will always return `None`. - #[cfg(feature = "optimism")] - pub const fn source_hash(&self) -> Option { - match self { - Self::Deposit(TxDeposit { source_hash, .. }) => Some(*source_hash), - _ => None, - } - } - - /// Returns the amount of ETH locked up on L1 that will be minted on L2. If the transaction - /// is not a deposit transaction, this will always return `None`. - #[cfg(feature = "optimism")] - pub const fn mint(&self) -> Option { - match self { - Self::Deposit(TxDeposit { mint, .. }) => *mint, - _ => None, - } - } - - /// Returns whether or not the transaction is a system transaction. If the transaction - /// is not a deposit transaction, this will always return `false`. - #[cfg(feature = "optimism")] - pub const fn is_system_transaction(&self) -> bool { - match self { - Self::Deposit(TxDeposit { is_system_transaction, .. }) => *is_system_transaction, - _ => false, - } - } - - /// Returns whether or not the transaction is an Optimism Deposited transaction. - #[cfg(feature = "optimism")] - pub const fn is_deposit(&self) -> bool { - matches!(self, Self::Deposit(_)) - } - /// This encodes the transaction _without_ the signature, and is only suitable for creating a /// hash intended for signing. pub fn encode_without_signature(&self, out: &mut dyn bytes::BufMut) { From 58f24e0056b8113b4762796c005b2a73a75a42c9 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 29 Oct 2024 15:35:42 +0100 Subject: [PATCH 340/425] chore: remove generate sidecar fn (#12167) --- crates/primitives/src/transaction/mod.rs | 2 -- crates/primitives/src/transaction/sidecar.rs | 37 +++----------------- crates/transaction-pool/src/pool/mod.rs | 5 +-- 3 files changed, 7 insertions(+), 37 deletions(-) diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index e81c5ad33..2e0a786fc 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -31,8 +31,6 @@ pub use error::{ }; pub use meta::TransactionMeta; pub use pooled::{PooledTransactionsElement, PooledTransactionsElementEcRecovered}; -#[cfg(all(feature = "c-kzg", any(test, feature = "arbitrary")))] -pub use sidecar::generate_blob_sidecar; #[cfg(feature = "c-kzg")] pub use sidecar::BlobTransactionValidationError; pub use sidecar::{BlobTransaction, BlobTransactionSidecar}; diff --git a/crates/primitives/src/transaction/sidecar.rs b/crates/primitives/src/transaction/sidecar.rs index e901cbfc0..1e6560e15 100644 --- a/crates/primitives/src/transaction/sidecar.rs +++ b/crates/primitives/src/transaction/sidecar.rs @@ -191,35 +191,6 @@ impl BlobTransaction { } } -/// Generates a [`BlobTransactionSidecar`] structure containing blobs, commitments, and proofs. -#[cfg(all(feature = "c-kzg", any(test, feature = "arbitrary")))] -pub fn generate_blob_sidecar(blobs: Vec) -> BlobTransactionSidecar { - use alloc::vec::Vec; - use alloy_eips::eip4844::env_settings::EnvKzgSettings; - use c_kzg::{KzgCommitment, KzgProof}; - - let kzg_settings = EnvKzgSettings::Default; - - let commitments: Vec = blobs - .iter() - .map(|blob| { - KzgCommitment::blob_to_kzg_commitment(&blob.clone(), kzg_settings.get()).unwrap() - }) - .map(|commitment| commitment.to_bytes()) - .collect(); - - let proofs: Vec = blobs - .iter() - .zip(commitments.iter()) - .map(|(blob, commitment)| { - KzgProof::compute_blob_kzg_proof(blob, commitment, kzg_settings.get()).unwrap() - }) - .map(|proof| proof.to_bytes()) - .collect(); - - BlobTransactionSidecar::from_kzg(blobs, commitments, proofs) -} - #[cfg(all(test, feature = "c-kzg"))] mod tests { use super::*; @@ -251,7 +222,7 @@ mod tests { .unwrap()]; // Generate a BlobTransactionSidecar from the blobs - let sidecar = generate_blob_sidecar(blobs); + let sidecar = BlobTransactionSidecar::try_from_blobs(blobs).unwrap(); // Assert commitment equality assert_eq!( @@ -300,7 +271,7 @@ mod tests { } // Generate a BlobTransactionSidecar from the blobs - let sidecar = generate_blob_sidecar(blobs.clone()); + let sidecar = BlobTransactionSidecar::try_from_blobs(blobs).unwrap(); // Assert sidecar size assert_eq!(sidecar.size(), 524672); @@ -325,7 +296,7 @@ mod tests { .unwrap()]; // Generate a BlobTransactionSidecar from the blobs - let sidecar = generate_blob_sidecar(blobs); + let sidecar = BlobTransactionSidecar::try_from_blobs(blobs).unwrap(); // Create a vector to store the encoded RLP let mut encoded_rlp = Vec::new(); @@ -356,7 +327,7 @@ mod tests { .unwrap()]; // Generate a BlobTransactionSidecar from the blobs - let sidecar = generate_blob_sidecar(blobs); + let sidecar = BlobTransactionSidecar::try_from_blobs(blobs).unwrap(); // Create a vector to store the encoded RLP let mut encoded_rlp = Vec::new(); diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index 69f17504f..fef0fd0ee 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -1236,7 +1236,8 @@ mod tests { validate::ValidTransaction, BlockInfo, PoolConfig, SubPoolLimit, TransactionOrigin, TransactionValidationOutcome, U256, }; - use reth_primitives::{kzg::Blob, transaction::generate_blob_sidecar}; + use alloy_eips::eip4844::BlobTransactionSidecar; + use reth_primitives::kzg::Blob; use std::{fs, path::PathBuf}; #[test] @@ -1271,7 +1272,7 @@ mod tests { .unwrap()]; // Generate a BlobTransactionSidecar from the blobs. - let sidecar = generate_blob_sidecar(blobs); + let sidecar = BlobTransactionSidecar::try_from_blobs(blobs).unwrap(); // Create an in-memory blob store. let blob_store = InMemoryBlobStore::default(); From 1006ce78c530088fd103bf9ffe14cc3ae6123ce9 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Tue, 29 Oct 2024 23:37:41 +0900 Subject: [PATCH 341/425] feat(provider): use `NodeTypes` on `DatabaseProvider` instead (#12166) --- crates/stages/stages/benches/criterion.rs | 6 +- crates/stages/stages/benches/setup/mod.rs | 11 +- .../stages/src/stages/hashing_account.rs | 7 +- crates/stages/stages/src/test_utils/runner.rs | 5 +- .../provider/src/providers/consistent.rs | 8 +- .../provider/src/providers/database/mod.rs | 8 +- .../src/providers/database/provider.rs | 151 +++++++++--------- .../storage/provider/src/test_utils/blocks.rs | 5 +- .../storage/provider/src/test_utils/mock.rs | 14 +- crates/trie/db/tests/trie.rs | 11 +- 10 files changed, 118 insertions(+), 108 deletions(-) diff --git a/crates/stages/stages/benches/criterion.rs b/crates/stages/stages/benches/criterion.rs index 7519d81a3..0f876dd70 100644 --- a/crates/stages/stages/benches/criterion.rs +++ b/crates/stages/stages/benches/criterion.rs @@ -2,12 +2,11 @@ use criterion::{criterion_main, measurement::WallTime, BenchmarkGroup, Criterion}; #[cfg(not(target_os = "windows"))] use pprof::criterion::{Output, PProfProfiler}; -use reth_chainspec::ChainSpec; use reth_config::config::{EtlConfig, TransactionLookupConfig}; use reth_db::{test_utils::TempDatabase, Database, DatabaseEnv}; use alloy_primitives::BlockNumber; -use reth_provider::{DatabaseProvider, DatabaseProviderFactory}; +use reth_provider::{test_utils::MockNodeTypesWithDB, DatabaseProvider, DatabaseProviderFactory}; use reth_stages::{ stages::{MerkleStage, SenderRecoveryStage, TransactionLookupStage}, test_utils::TestStageDB, @@ -148,7 +147,8 @@ fn measure_stage( block_interval: RangeInclusive, label: String, ) where - S: Clone + Stage as Database>::TXMut, ChainSpec>>, + S: Clone + + Stage as Database>::TXMut, MockNodeTypesWithDB>>, F: Fn(S, &TestStageDB, StageRange), { let stage_range = ( diff --git a/crates/stages/stages/benches/setup/mod.rs b/crates/stages/stages/benches/setup/mod.rs index 4812fb13c..e6ae33f9c 100644 --- a/crates/stages/stages/benches/setup/mod.rs +++ b/crates/stages/stages/benches/setup/mod.rs @@ -1,14 +1,15 @@ #![allow(unreachable_pub)] use alloy_primitives::{Address, Sealable, B256, U256}; use itertools::concat; -use reth_chainspec::ChainSpec; use reth_db::{tables, test_utils::TempDatabase, Database, DatabaseEnv}; use reth_db_api::{ cursor::DbCursorRO, transaction::{DbTx, DbTxMut}, }; use reth_primitives::{Account, SealedBlock, SealedHeader}; -use reth_provider::{DatabaseProvider, DatabaseProviderFactory, TrieWriter}; +use reth_provider::{ + test_utils::MockNodeTypesWithDB, DatabaseProvider, DatabaseProviderFactory, TrieWriter, +}; use reth_stages::{ stages::{AccountHashingStage, StorageHashingStage}, test_utils::{StorageKind, TestStageDB}, @@ -31,7 +32,8 @@ use reth_trie_db::DatabaseStateRoot; pub(crate) type StageRange = (ExecInput, UnwindInput); pub(crate) fn stage_unwind< - S: Clone + Stage as Database>::TXMut, ChainSpec>>, + S: Clone + + Stage as Database>::TXMut, MockNodeTypesWithDB>>, >( stage: S, db: &TestStageDB, @@ -63,7 +65,8 @@ pub(crate) fn stage_unwind< pub(crate) fn unwind_hashes(stage: S, db: &TestStageDB, range: StageRange) where - S: Clone + Stage as Database>::TXMut, ChainSpec>>, + S: Clone + + Stage as Database>::TXMut, MockNodeTypesWithDB>>, { let (input, unwind) = range; diff --git a/crates/stages/stages/src/stages/hashing_account.rs b/crates/stages/stages/src/stages/hashing_account.rs index 5b4f72097..1ca0e1aa1 100644 --- a/crates/stages/stages/src/stages/hashing_account.rs +++ b/crates/stages/stages/src/stages/hashing_account.rs @@ -58,11 +58,8 @@ impl AccountHashingStage { /// /// Proceeds to go to the `BlockTransitionIndex` end, go back `transitions` and change the /// account state in the `AccountChangeSets` table. - pub fn seed< - Tx: DbTx + DbTxMut + 'static, - Spec: Send + Sync + 'static + reth_chainspec::EthereumHardforks, - >( - provider: &reth_provider::DatabaseProvider, + pub fn seed( + provider: &reth_provider::DatabaseProvider, opts: SeedOpts, ) -> Result, StageError> { use alloy_primitives::U256; diff --git a/crates/stages/stages/src/test_utils/runner.rs b/crates/stages/stages/src/test_utils/runner.rs index 26f245c13..c3d25b995 100644 --- a/crates/stages/stages/src/test_utils/runner.rs +++ b/crates/stages/stages/src/test_utils/runner.rs @@ -1,7 +1,6 @@ use super::TestStageDB; -use reth_chainspec::ChainSpec; use reth_db::{test_utils::TempDatabase, Database, DatabaseEnv}; -use reth_provider::{DatabaseProvider, ProviderError}; +use reth_provider::{test_utils::MockNodeTypesWithDB, DatabaseProvider, ProviderError}; use reth_stages_api::{ ExecInput, ExecOutput, Stage, StageError, StageExt, UnwindInput, UnwindOutput, }; @@ -20,7 +19,7 @@ pub(crate) enum TestRunnerError { /// A generic test runner for stages. pub(crate) trait StageTestRunner { - type S: Stage as Database>::TXMut, ChainSpec>> + type S: Stage as Database>::TXMut, MockNodeTypesWithDB>> + 'static; /// Return a reference to the database. diff --git a/crates/storage/provider/src/providers/consistent.rs b/crates/storage/provider/src/providers/consistent.rs index d6847fa1b..e6ca1a919 100644 --- a/crates/storage/provider/src/providers/consistent.rs +++ b/crates/storage/provider/src/providers/consistent.rs @@ -296,7 +296,7 @@ impl ConsistentProvider { ) -> ProviderResult> where F: FnOnce( - &DatabaseProviderRO, + &DatabaseProviderRO, RangeInclusive, &mut P, ) -> ProviderResult>, @@ -413,7 +413,7 @@ impl ConsistentProvider { ) -> ProviderResult> where S: FnOnce( - &DatabaseProviderRO, + &DatabaseProviderRO, RangeInclusive, ) -> ProviderResult>, M: Fn(RangeInclusive, &BlockState) -> ProviderResult>, @@ -511,7 +511,7 @@ impl ConsistentProvider { fetch_from_block_state: M, ) -> ProviderResult> where - S: FnOnce(&DatabaseProviderRO) -> ProviderResult>, + S: FnOnce(&DatabaseProviderRO) -> ProviderResult>, M: Fn(usize, TxNumber, &BlockState) -> ProviderResult>, { let in_mem_chain = self.head_block.iter().flat_map(|b| b.chain()).collect::>(); @@ -578,7 +578,7 @@ impl ConsistentProvider { fetch_from_block_state: M, ) -> ProviderResult where - S: FnOnce(&DatabaseProviderRO) -> ProviderResult, + S: FnOnce(&DatabaseProviderRO) -> ProviderResult, M: Fn(&BlockState) -> ProviderResult, { if let Some(Some(block_state)) = self.head_block.as_ref().map(|b| b.block_on_chain(id)) { diff --git a/crates/storage/provider/src/providers/database/mod.rs b/crates/storage/provider/src/providers/database/mod.rs index 54186dca6..04a30ce90 100644 --- a/crates/storage/provider/src/providers/database/mod.rs +++ b/crates/storage/provider/src/providers/database/mod.rs @@ -130,7 +130,7 @@ impl ProviderFactory { /// This sets the [`PruneModes`] to [`None`], because they should only be relevant for writing /// data. #[track_caller] - pub fn provider(&self) -> ProviderResult> { + pub fn provider(&self) -> ProviderResult> { Ok(DatabaseProvider::new( self.db.tx()?, self.chain_spec.clone(), @@ -144,7 +144,7 @@ impl ProviderFactory { /// [`BlockHashReader`]. This may fail if the inner read/write database transaction fails to /// open. #[track_caller] - pub fn provider_rw(&self) -> ProviderResult> { + pub fn provider_rw(&self) -> ProviderResult> { Ok(DatabaseProviderRW(DatabaseProvider::new_rw( self.db.tx_mut()?, self.chain_spec.clone(), @@ -186,8 +186,8 @@ impl ProviderFactory { impl DatabaseProviderFactory for ProviderFactory { type DB = N::DB; - type Provider = DatabaseProvider<::TX, N::ChainSpec>; - type ProviderRW = DatabaseProvider<::TXMut, N::ChainSpec>; + type Provider = DatabaseProvider<::TX, N>; + type ProviderRW = DatabaseProvider<::TXMut, N>; fn database_provider_ro(&self) -> ProviderResult { self.provider() diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 2dcc3f92d..81affa0d8 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -38,6 +38,7 @@ use reth_db_api::{ use reth_evm::ConfigureEvmEnv; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_network_p2p::headers::downloader::SyncTarget; +use reth_node_types::NodeTypes; use reth_primitives::{ Account, Block, BlockBody, BlockWithSenders, Bytecode, GotExpected, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, StaticFileSegment, StorageEntry, @@ -70,40 +71,40 @@ use tokio::sync::watch; use tracing::{debug, error, trace, warn}; /// A [`DatabaseProvider`] that holds a read-only database transaction. -pub type DatabaseProviderRO = DatabaseProvider<::TX, Spec>; +pub type DatabaseProviderRO = DatabaseProvider<::TX, N>; /// A [`DatabaseProvider`] that holds a read-write database transaction. /// /// Ideally this would be an alias type. However, there's some weird compiler error (), that forces us to wrap this in a struct instead. /// Once that issue is solved, we can probably revert back to being an alias type. #[derive(Debug)] -pub struct DatabaseProviderRW( - pub DatabaseProvider<::TXMut, Spec>, +pub struct DatabaseProviderRW( + pub DatabaseProvider<::TXMut, N>, ); -impl Deref for DatabaseProviderRW { - type Target = DatabaseProvider<::TXMut, Spec>; +impl Deref for DatabaseProviderRW { + type Target = DatabaseProvider<::TXMut, N>; fn deref(&self) -> &Self::Target { &self.0 } } -impl DerefMut for DatabaseProviderRW { +impl DerefMut for DatabaseProviderRW { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } -impl AsRef::TXMut, Spec>> - for DatabaseProviderRW +impl AsRef::TXMut, N>> + for DatabaseProviderRW { - fn as_ref(&self) -> &DatabaseProvider<::TXMut, Spec> { + fn as_ref(&self) -> &DatabaseProvider<::TXMut, N> { &self.0 } } -impl DatabaseProviderRW { +impl DatabaseProviderRW { /// Commit database transaction and static file if it exists. pub fn commit(self) -> ProviderResult { self.0.commit() @@ -115,10 +116,10 @@ impl DatabaseProviderRW { } } -impl From> - for DatabaseProvider<::TXMut, Spec> +impl From> + for DatabaseProvider<::TXMut, N> { - fn from(provider: DatabaseProviderRW) -> Self { + fn from(provider: DatabaseProviderRW) -> Self { provider.0 } } @@ -126,25 +127,25 @@ impl From> /// A provider struct that fetches data from the database. /// Wrapper around [`DbTx`] and [`DbTxMut`]. Example: [`HeaderProvider`] [`BlockHashReader`] #[derive(Debug)] -pub struct DatabaseProvider { +pub struct DatabaseProvider { /// Database transaction. tx: TX, /// Chain spec - chain_spec: Arc, + chain_spec: Arc, /// Static File provider static_file_provider: StaticFileProvider, /// Pruning configuration prune_modes: PruneModes, } -impl DatabaseProvider { +impl DatabaseProvider { /// Returns reference to prune modes. pub const fn prune_modes_ref(&self) -> &PruneModes { &self.prune_modes } } -impl DatabaseProvider { +impl DatabaseProvider { /// State provider for latest block pub fn latest<'a>(&'a self) -> ProviderResult> { trace!(target: "providers::db", "Returning latest state provider"); @@ -202,28 +203,28 @@ impl DatabaseProvider { } } -impl StaticFileProviderFactory for DatabaseProvider { +impl StaticFileProviderFactory for DatabaseProvider { /// Returns a static file provider fn static_file_provider(&self) -> StaticFileProvider { self.static_file_provider.clone() } } -impl ChainSpecProvider - for DatabaseProvider +impl> ChainSpecProvider + for DatabaseProvider { - type ChainSpec = Spec; + type ChainSpec = N::ChainSpec; fn chain_spec(&self) -> Arc { self.chain_spec.clone() } } -impl DatabaseProvider { +impl DatabaseProvider { /// Creates a provider with an inner read-write transaction. pub const fn new_rw( tx: TX, - chain_spec: Arc, + chain_spec: Arc, static_file_provider: StaticFileProvider, prune_modes: PruneModes, ) -> Self { @@ -231,15 +232,13 @@ impl DatabaseProvider { } } -impl AsRef for DatabaseProvider { +impl AsRef for DatabaseProvider { fn as_ref(&self) -> &Self { self } } -impl TryIntoHistoricalStateProvider - for DatabaseProvider -{ +impl TryIntoHistoricalStateProvider for DatabaseProvider { fn try_into_history_at_block( self, mut block_number: BlockNumber, @@ -282,8 +281,8 @@ impl TryIntoHistoricalStateProvider } } -impl - DatabaseProvider +impl + 'static> + DatabaseProvider { // TODO: uncomment below, once `reth debug_cmd` has been feature gated with dev. // #[cfg(any(test, feature = "test-utils"))] @@ -365,11 +364,11 @@ where Ok(Vec::new()) } -impl DatabaseProvider { +impl DatabaseProvider { /// Creates a provider with an inner read-only transaction. pub const fn new( tx: TX, - chain_spec: Arc, + chain_spec: Arc, static_file_provider: StaticFileProvider, prune_modes: PruneModes, ) -> Self { @@ -392,7 +391,7 @@ impl DatabaseProvider { } /// Returns a reference to the chain specification. - pub fn chain_spec(&self) -> &Spec { + pub fn chain_spec(&self) -> &N::ChainSpec { &self.chain_spec } @@ -490,7 +489,7 @@ impl DatabaseProvider { construct_block: BF, ) -> ProviderResult> where - Spec: EthereumHardforks, + N::ChainSpec: EthereumHardforks, H: AsRef
, HF: FnOnce(BlockNumber) -> ProviderResult>, BF: FnOnce( @@ -556,7 +555,7 @@ impl DatabaseProvider { mut assemble_block: F, ) -> ProviderResult> where - Spec: EthereumHardforks, + N::ChainSpec: EthereumHardforks, H: AsRef
, HF: FnOnce(RangeInclusive) -> ProviderResult>, F: FnMut(H, Range, Vec
, Option) -> ProviderResult, @@ -633,7 +632,7 @@ impl DatabaseProvider { assemble_block: BF, ) -> ProviderResult> where - Spec: EthereumHardforks, + N::ChainSpec: EthereumHardforks, H: AsRef
, HF: Fn(RangeInclusive) -> ProviderResult>, BF: Fn( @@ -853,7 +852,7 @@ impl DatabaseProvider { } } -impl DatabaseProvider { +impl DatabaseProvider { /// Commit database transaction. pub fn commit(self) -> ProviderResult { Ok(self.tx.commit()?) @@ -1079,7 +1078,7 @@ impl DatabaseProvider { range: impl RangeBounds + Clone, ) -> ProviderResult> where - Spec: EthereumHardforks, + N::ChainSpec: EthereumHardforks, { // For blocks we need: // @@ -1218,13 +1217,13 @@ impl DatabaseProvider { } } -impl AccountReader for DatabaseProvider { +impl AccountReader for DatabaseProvider { fn basic_account(&self, address: Address) -> ProviderResult> { Ok(self.tx.get::(address)?) } } -impl AccountExtReader for DatabaseProvider { +impl AccountExtReader for DatabaseProvider { fn changed_accounts_with_range( &self, range: impl RangeBounds, @@ -1268,7 +1267,7 @@ impl AccountExtReader for DatabaseProvider StorageChangeSetReader for DatabaseProvider { +impl StorageChangeSetReader for DatabaseProvider { fn storage_changeset( &self, block_number: BlockNumber, @@ -1283,7 +1282,7 @@ impl StorageChangeSetReader for DatabaseProvider ChangeSetReader for DatabaseProvider { +impl ChangeSetReader for DatabaseProvider { fn account_block_changeset( &self, block_number: BlockNumber, @@ -1300,7 +1299,7 @@ impl ChangeSetReader for DatabaseProvider } } -impl HeaderSyncGapProvider for DatabaseProvider { +impl HeaderSyncGapProvider for DatabaseProvider { fn sync_gap( &self, tip: watch::Receiver, @@ -1344,8 +1343,8 @@ impl HeaderSyncGapProvider for DatabaseProvider HeaderProvider - for DatabaseProvider +impl> HeaderProvider + for DatabaseProvider { fn header(&self, block_hash: &BlockHash) -> ProviderResult> { if let Some(num) = self.block_number(*block_hash)? { @@ -1444,7 +1443,7 @@ impl HeaderProvider } } -impl BlockHashReader for DatabaseProvider { +impl BlockHashReader for DatabaseProvider { fn block_hash(&self, number: u64) -> ProviderResult> { self.static_file_provider.get_with_static_file_or_database( StaticFileSegment::Headers, @@ -1471,7 +1470,7 @@ impl BlockHashReader for DatabaseProvider } } -impl BlockNumReader for DatabaseProvider { +impl BlockNumReader for DatabaseProvider { fn chain_info(&self) -> ProviderResult { let best_number = self.best_block_number()?; let best_hash = self.block_hash(best_number)?.unwrap_or_default(); @@ -1502,7 +1501,7 @@ impl BlockNumReader for DatabaseProvider } } -impl BlockReader for DatabaseProvider { +impl> BlockReader for DatabaseProvider { fn find_block_by_hash(&self, hash: B256, source: BlockSource) -> ProviderResult> { if source.is_canonical() { self.block(hash.into()) @@ -1677,8 +1676,8 @@ impl BlockReader for DatabasePr } } -impl TransactionsProviderExt - for DatabaseProvider +impl> TransactionsProviderExt + for DatabaseProvider { /// Recovers transaction hashes by walking through `Transactions` table and /// calculating them in a parallel manner. Returned unsorted. @@ -1747,8 +1746,8 @@ impl TransactionsProviderExt } // Calculates the hash of the given transaction -impl TransactionsProvider - for DatabaseProvider +impl> TransactionsProvider + for DatabaseProvider { fn transaction_id(&self, tx_hash: TxHash) -> ProviderResult> { Ok(self.tx.get::(tx_hash)?) @@ -1907,8 +1906,8 @@ impl TransactionsProvider } } -impl ReceiptProvider - for DatabaseProvider +impl> ReceiptProvider + for DatabaseProvider { fn receipt(&self, id: TxNumber) -> ProviderResult> { self.static_file_provider.get_with_static_file_or_database( @@ -1955,8 +1954,8 @@ impl ReceiptProvider } } -impl WithdrawalsProvider - for DatabaseProvider +impl> WithdrawalsProvider + for DatabaseProvider { fn withdrawals_by_block( &self, @@ -1985,8 +1984,8 @@ impl WithdrawalsProvider } } -impl EvmEnvProvider - for DatabaseProvider +impl> EvmEnvProvider + for DatabaseProvider { fn fill_env_at( &self, @@ -2051,7 +2050,7 @@ impl EvmEnvProvider } } -impl StageCheckpointReader for DatabaseProvider { +impl StageCheckpointReader for DatabaseProvider { fn get_stage_checkpoint(&self, id: StageId) -> ProviderResult> { Ok(self.tx.get::(id.to_string())?) } @@ -2070,7 +2069,7 @@ impl StageCheckpointReader for DatabaseProvider StageCheckpointWriter for DatabaseProvider { +impl StageCheckpointWriter for DatabaseProvider { /// Save stage checkpoint. fn save_stage_checkpoint( &self, @@ -2111,7 +2110,7 @@ impl StageCheckpointWriter for DatabaseProvider< } } -impl StorageReader for DatabaseProvider { +impl StorageReader for DatabaseProvider { fn plain_state_storages( &self, addresses_with_keys: impl IntoIterator)>, @@ -2174,7 +2173,7 @@ impl StorageReader for DatabaseProvider { } } -impl StateChangeWriter for DatabaseProvider { +impl StateChangeWriter for DatabaseProvider { fn write_state_reverts( &self, reverts: PlainStateReverts, @@ -2551,7 +2550,7 @@ impl StateChangeWriter for DatabaseProvid } } -impl TrieWriter for DatabaseProvider { +impl TrieWriter for DatabaseProvider { /// Writes trie updates. Returns the number of entries modified. fn write_trie_updates(&self, trie_updates: &TrieUpdates) -> ProviderResult { if trie_updates.is_empty() { @@ -2601,7 +2600,7 @@ impl TrieWriter for DatabaseProvider StorageTrieWriter for DatabaseProvider { +impl StorageTrieWriter for DatabaseProvider { /// Writes storage trie updates from the given storage trie map. First sorts the storage trie /// updates by the hashed address, writing in sorted order. fn write_storage_trie_updates( @@ -2638,7 +2637,7 @@ impl StorageTrieWriter for DatabaseProvid } } -impl HashingWriter for DatabaseProvider { +impl HashingWriter for DatabaseProvider { fn unwind_account_hashing<'a>( &self, changesets: impl Iterator, @@ -2863,7 +2862,7 @@ impl HashingWriter for DatabaseProvider HistoryWriter for DatabaseProvider { +impl HistoryWriter for DatabaseProvider { fn unwind_account_history_indices<'a>( &self, changesets: impl Iterator, @@ -2997,14 +2996,14 @@ impl HistoryWriter for DatabaseProvider StateReader for DatabaseProvider { +impl StateReader for DatabaseProvider { fn get_state(&self, block: BlockNumber) -> ProviderResult> { self.get_state(block..=block) } } -impl - BlockExecutionWriter for DatabaseProvider +impl + 'static> + BlockExecutionWriter for DatabaseProvider { fn take_block_and_execution_range( &self, @@ -3205,8 +3204,8 @@ impl BlockWriter - for DatabaseProvider +impl + 'static> BlockWriter + for DatabaseProvider { /// Inserts the block into the database, always modifying the following tables: /// * [`CanonicalHeaders`](tables::CanonicalHeaders) @@ -3418,7 +3417,7 @@ impl PruneCheckpointReader for DatabaseProvider { +impl PruneCheckpointReader for DatabaseProvider { fn get_prune_checkpoint( &self, segment: PruneSegment, @@ -3435,7 +3434,7 @@ impl PruneCheckpointReader for DatabaseProvider PruneCheckpointWriter for DatabaseProvider { +impl PruneCheckpointWriter for DatabaseProvider { fn save_prune_checkpoint( &self, segment: PruneSegment, @@ -3445,7 +3444,7 @@ impl PruneCheckpointWriter for DatabaseProvider< } } -impl StatsReader for DatabaseProvider { +impl StatsReader for DatabaseProvider { fn count_entries(&self) -> ProviderResult { let db_entries = self.tx.entries::()?; let static_file_entries = match self.static_file_provider.count_entries::() { @@ -3458,7 +3457,7 @@ impl StatsReader for DatabaseProvider { } } -impl ChainStateBlockReader for DatabaseProvider { +impl ChainStateBlockReader for DatabaseProvider { fn last_finalized_block_number(&self) -> ProviderResult> { let mut finalized_blocks = self .tx @@ -3484,7 +3483,7 @@ impl ChainStateBlockReader for DatabaseProvider ChainStateBlockWriter for DatabaseProvider { +impl ChainStateBlockWriter for DatabaseProvider { fn save_finalized_block_number(&self, block_number: BlockNumber) -> ProviderResult<()> { Ok(self .tx @@ -3498,7 +3497,7 @@ impl ChainStateBlockWriter for DatabaseProvider< } } -impl DBProvider for DatabaseProvider { +impl DBProvider for DatabaseProvider { type Tx = TX; fn tx_ref(&self) -> &Self::Tx { diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index 07486f555..cacb71b35 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -8,6 +8,7 @@ use alloy_primitives::{ use reth_db::tables; use reth_db_api::{database::Database, models::StoredBlockBodyIndices}; +use reth_node_types::NodeTypes; use reth_primitives::{ Account, BlockBody, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, Signature, Transaction, TransactionSigned, TxType, Withdrawal, Withdrawals, @@ -17,8 +18,8 @@ use revm::{db::BundleState, primitives::AccountInfo}; use std::{str::FromStr, sync::LazyLock}; /// Assert genesis block -pub fn assert_genesis_block( - provider: &DatabaseProviderRW, +pub fn assert_genesis_block( + provider: &DatabaseProviderRW, g: SealedBlock, ) { let n = g.number; diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index ed861f5f1..1053b4778 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -19,6 +19,7 @@ use reth_db::mock::{DatabaseMock, TxMock}; use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; use reth_evm::ConfigureEvmEnv; use reth_execution_types::ExecutionOutcome; +use reth_node_types::NodeTypes; use reth_primitives::{ Account, Block, BlockWithSenders, Bytecode, GotExpected, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, @@ -149,10 +150,19 @@ impl MockEthProvider { } } +/// Mock node. +#[derive(Debug)] +pub struct MockNode; + +impl NodeTypes for MockNode { + type Primitives = (); + type ChainSpec = ChainSpec; +} + impl DatabaseProviderFactory for MockEthProvider { type DB = DatabaseMock; - type Provider = DatabaseProvider; - type ProviderRW = DatabaseProvider; + type Provider = DatabaseProvider; + type ProviderRW = DatabaseProvider; fn database_provider_ro(&self) -> ProviderResult { Err(ConsistentViewError::Syncing { best_block: GotExpected::new(0, 0) }.into()) diff --git a/crates/trie/db/tests/trie.rs b/crates/trie/db/tests/trie.rs index f5823404c..aee264364 100644 --- a/crates/trie/db/tests/trie.rs +++ b/crates/trie/db/tests/trie.rs @@ -11,7 +11,8 @@ use reth_db_api::{ }; use reth_primitives::{Account, StorageEntry}; use reth_provider::{ - test_utils::create_test_provider_factory, DatabaseProviderRW, StorageTrieWriter, TrieWriter, + providers::ProviderNodeTypes, test_utils::create_test_provider_factory, DatabaseProviderRW, + StorageTrieWriter, TrieWriter, }; use reth_trie::{ prefix_set::PrefixSetMut, @@ -693,8 +694,8 @@ fn storage_trie_around_extension_node() { assert_trie_updates(updates.storage_nodes_ref()); } -fn extension_node_storage_trie( - tx: &DatabaseProviderRW>, Spec>, +fn extension_node_storage_trie( + tx: &DatabaseProviderRW>, N>, hashed_address: B256, ) -> (B256, StorageTrieUpdates) { let value = U256::from(1); @@ -721,8 +722,8 @@ fn extension_node_storage_trie( (root, trie_updates) } -fn extension_node_trie( - tx: &DatabaseProviderRW>, Spec>, +fn extension_node_trie( + tx: &DatabaseProviderRW>, N>, ) -> B256 { let a = Account { nonce: 0, balance: U256::from(1u64), bytecode_hash: Some(B256::random()) }; let val = encode_account(a, None); From 2e750f0ca03043cd2370036cc4ed7c9e1c12f173 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 29 Oct 2024 15:57:35 +0100 Subject: [PATCH 342/425] test(prune): add unit tests for `ReceiptsLogPruneConfig` (#11916) --- crates/prune/types/src/lib.rs | 230 ++++++++++++++++++++++++++++++++- crates/prune/types/src/mode.rs | 5 + 2 files changed, 231 insertions(+), 4 deletions(-) diff --git a/crates/prune/types/src/lib.rs b/crates/prune/types/src/lib.rs index 6e06d6fc5..8483b7b73 100644 --- a/crates/prune/types/src/lib.rs +++ b/crates/prune/types/src/lib.rs @@ -27,6 +27,7 @@ use std::collections::BTreeMap; pub use target::{PruneModes, MINIMUM_PRUNING_DISTANCE}; use alloy_primitives::{Address, BlockNumber}; +use std::ops::Deref; /// Configuration for pruning receipts not associated with logs emitted by the specified contracts. #[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)] @@ -59,7 +60,7 @@ impl ReceiptsLogPruneConfig { pruned_block: Option, ) -> Result>, PruneSegmentError> { let mut map = BTreeMap::new(); - let pruned_block = pruned_block.unwrap_or_default(); + let base_block = pruned_block.unwrap_or_default() + 1; for (address, mode) in &self.0 { // Getting `None`, means that there is nothing to prune yet, so we need it to include in @@ -69,7 +70,7 @@ impl ReceiptsLogPruneConfig { // // Reminder, that we increment because the [`BlockNumber`] key of the new map should be // viewed as `PruneMode::Before(block)` - let block = (pruned_block + 1).max( + let block = base_block.max( mode.prune_target_block(tip, PruneSegment::ContractLogs, PrunePurpose::User)? .map(|(block, _)| block) .unwrap_or_default() + @@ -90,8 +91,8 @@ impl ReceiptsLogPruneConfig { let pruned_block = pruned_block.unwrap_or_default(); let mut lowest = None; - for mode in self.0.values() { - if let PruneMode::Distance(_) = mode { + for mode in self.values() { + if mode.is_distance() { if let Some((block, _)) = mode.prune_target_block(tip, PruneSegment::ContractLogs, PrunePurpose::User)? { @@ -103,3 +104,224 @@ impl ReceiptsLogPruneConfig { Ok(lowest.map(|lowest| lowest.max(pruned_block))) } } + +impl Deref for ReceiptsLogPruneConfig { + type Target = BTreeMap; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_group_by_block_empty_config() { + let config = ReceiptsLogPruneConfig(BTreeMap::new()); + let tip = 1000; + let pruned_block = None; + + let result = config.group_by_block(tip, pruned_block).unwrap(); + assert!(result.is_empty(), "The result should be empty when the config is empty"); + } + + #[test] + fn test_group_by_block_single_entry() { + let mut config_map = BTreeMap::new(); + let address = Address::new([1; 20]); + let prune_mode = PruneMode::Before(500); + config_map.insert(address, prune_mode); + + let config = ReceiptsLogPruneConfig(config_map); + // Big tip to have something to prune for the target block + let tip = 3000000; + let pruned_block = Some(400); + + let result = config.group_by_block(tip, pruned_block).unwrap(); + + // Expect one entry with block 500 and the corresponding address + assert_eq!(result.len(), 1); + assert_eq!(result[&500], vec![&address], "Address should be grouped under block 500"); + + // Tip smaller than the target block, so that we have nothing to prune for the block + let tip = 300; + let pruned_block = Some(400); + + let result = config.group_by_block(tip, pruned_block).unwrap(); + + // Expect one entry with block 400 and the corresponding address + assert_eq!(result.len(), 1); + assert_eq!(result[&401], vec![&address], "Address should be grouped under block 400"); + } + + #[test] + fn test_group_by_block_multiple_entries() { + let mut config_map = BTreeMap::new(); + let address1 = Address::new([1; 20]); + let address2 = Address::new([2; 20]); + let prune_mode1 = PruneMode::Before(600); + let prune_mode2 = PruneMode::Before(800); + config_map.insert(address1, prune_mode1); + config_map.insert(address2, prune_mode2); + + let config = ReceiptsLogPruneConfig(config_map); + let tip = 900000; + let pruned_block = Some(400); + + let result = config.group_by_block(tip, pruned_block).unwrap(); + + // Expect two entries: one for block 600 and another for block 800 + assert_eq!(result.len(), 2); + assert_eq!(result[&600], vec![&address1], "Address1 should be grouped under block 600"); + assert_eq!(result[&800], vec![&address2], "Address2 should be grouped under block 800"); + } + + #[test] + fn test_group_by_block_with_distance_prune_mode() { + let mut config_map = BTreeMap::new(); + let address = Address::new([1; 20]); + let prune_mode = PruneMode::Distance(100000); + config_map.insert(address, prune_mode); + + let config = ReceiptsLogPruneConfig(config_map); + let tip = 100100; + // Pruned block is smaller than the target block + let pruned_block = Some(50); + + let result = config.group_by_block(tip, pruned_block).unwrap(); + + // Expect the entry to be grouped under block 100 (tip - distance) + assert_eq!(result.len(), 1); + assert_eq!(result[&101], vec![&address], "Address should be grouped under block 100"); + + let tip = 100100; + // Pruned block is larger than the target block + let pruned_block = Some(800); + + let result = config.group_by_block(tip, pruned_block).unwrap(); + + // Expect the entry to be grouped under block 800 which is larger than tip - distance + assert_eq!(result.len(), 1); + assert_eq!(result[&801], vec![&address], "Address should be grouped under block 800"); + } + + #[test] + fn test_lowest_block_with_distance_empty_config() { + let config = ReceiptsLogPruneConfig(BTreeMap::new()); + let tip = 1000; + let pruned_block = None; + + let result = config.lowest_block_with_distance(tip, pruned_block).unwrap(); + assert_eq!(result, None, "The result should be None when the config is empty"); + } + + #[test] + fn test_lowest_block_with_distance_no_distance_mode() { + let mut config_map = BTreeMap::new(); + let address = Address::new([1; 20]); + let prune_mode = PruneMode::Before(500); + config_map.insert(address, prune_mode); + + let config = ReceiptsLogPruneConfig(config_map); + let tip = 1000; + let pruned_block = None; + + let result = config.lowest_block_with_distance(tip, pruned_block).unwrap(); + assert_eq!(result, None, "The result should be None when there are no Distance modes"); + } + + #[test] + fn test_lowest_block_with_distance_single_entry() { + let mut config_map = BTreeMap::new(); + let address = Address::new([1; 20]); + let prune_mode = PruneMode::Distance(100000); + config_map.insert(address, prune_mode); + + let config = ReceiptsLogPruneConfig(config_map); + + let tip = 100100; + let pruned_block = Some(400); + + // Expect the lowest block to be 400 as 400 > 100100 - 100000 (tip - distance) + assert_eq!( + config.lowest_block_with_distance(tip, pruned_block).unwrap(), + Some(400), + "The lowest block should be 400" + ); + + let tip = 100100; + let pruned_block = Some(50); + + // Expect the lowest block to be 100 as 100 > 50 (pruned block) + assert_eq!( + config.lowest_block_with_distance(tip, pruned_block).unwrap(), + Some(100), + "The lowest block should be 100" + ); + } + + #[test] + fn test_lowest_block_with_distance_multiple_entries_last() { + let mut config_map = BTreeMap::new(); + let address1 = Address::new([1; 20]); + let address2 = Address::new([2; 20]); + let prune_mode1 = PruneMode::Distance(100100); + let prune_mode2 = PruneMode::Distance(100300); + config_map.insert(address1, prune_mode1); + config_map.insert(address2, prune_mode2); + + let config = ReceiptsLogPruneConfig(config_map); + let tip = 200300; + let pruned_block = Some(100); + + // The lowest block should be 200300 - 100300 = 100000: + // - First iteration will return 100200 => 200300 - 100100 = 100200 + // - Second iteration will return 100000 => 200300 - 100300 = 100000 < 100200 + // - Final result is 100000 + assert_eq!(config.lowest_block_with_distance(tip, pruned_block).unwrap(), Some(100000)); + } + + #[test] + fn test_lowest_block_with_distance_multiple_entries_first() { + let mut config_map = BTreeMap::new(); + let address1 = Address::new([1; 20]); + let address2 = Address::new([2; 20]); + let prune_mode1 = PruneMode::Distance(100400); + let prune_mode2 = PruneMode::Distance(100300); + config_map.insert(address1, prune_mode1); + config_map.insert(address2, prune_mode2); + + let config = ReceiptsLogPruneConfig(config_map); + let tip = 200300; + let pruned_block = Some(100); + + // The lowest block should be 200300 - 100400 = 99900: + // - First iteration, lowest block is 200300 - 100400 = 99900 + // - Second iteration, lowest block is still 99900 < 200300 - 100300 = 100000 + // - Final result is 99900 + assert_eq!(config.lowest_block_with_distance(tip, pruned_block).unwrap(), Some(99900)); + } + + #[test] + fn test_lowest_block_with_distance_multiple_entries_pruned_block() { + let mut config_map = BTreeMap::new(); + let address1 = Address::new([1; 20]); + let address2 = Address::new([2; 20]); + let prune_mode1 = PruneMode::Distance(100400); + let prune_mode2 = PruneMode::Distance(100300); + config_map.insert(address1, prune_mode1); + config_map.insert(address2, prune_mode2); + + let config = ReceiptsLogPruneConfig(config_map); + let tip = 200300; + let pruned_block = Some(100000); + + // The lowest block should be 100000 because: + // - Lowest is 200300 - 100400 = 99900 < 200300 - 100300 = 100000 + // - Lowest is compared to the pruned block 100000: 100000 > 99900 + // - Finally the lowest block is 100000 + assert_eq!(config.lowest_block_with_distance(tip, pruned_block).unwrap(), Some(100000)); + } +} diff --git a/crates/prune/types/src/mode.rs b/crates/prune/types/src/mode.rs index 9a8e55bb3..de9b9e6dc 100644 --- a/crates/prune/types/src/mode.rs +++ b/crates/prune/types/src/mode.rs @@ -74,6 +74,11 @@ impl PruneMode { pub const fn is_full(&self) -> bool { matches!(self, Self::Full) } + + /// Returns true if the prune mode is [`PruneMode::Distance`]. + pub const fn is_distance(&self) -> bool { + matches!(self, Self::Distance(_)) + } } #[cfg(test)] From 82784183e74deb78556eb01da5b015904eb8d194 Mon Sep 17 00:00:00 2001 From: tedison <76473430+edisontim@users.noreply.github.com> Date: Tue, 29 Oct 2024 12:47:12 -0400 Subject: [PATCH 343/425] feat: transaction trait (#11877) --- crates/primitives-traits/Cargo.toml | 1 - crates/primitives-traits/src/lib.rs | 3 +- .../primitives-traits/src/transaction/mod.rs | 55 +++++++++++++++---- 3 files changed, 46 insertions(+), 13 deletions(-) diff --git a/crates/primitives-traits/Cargo.toml b/crates/primitives-traits/Cargo.toml index 4319232f8..6cafe8b8b 100644 --- a/crates/primitives-traits/Cargo.toml +++ b/crates/primitives-traits/Cargo.toml @@ -44,7 +44,6 @@ reth-testing-utils.workspace = true alloy-primitives = { workspace = true, features = ["arbitrary"] } alloy-consensus = { workspace = true, features = ["arbitrary"] } -arbitrary = { workspace = true, features = ["derive"] } bincode.workspace = true proptest-arbitrary-interop.workspace = true proptest.workspace = true diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index 57d1119b0..0489a250b 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -14,6 +14,7 @@ extern crate alloc; /// Common constants. pub mod constants; + pub use constants::gas_units::{format_gas, format_gas_throughput}; /// Minimal account @@ -24,7 +25,7 @@ pub mod receipt; pub use receipt::Receipt; pub mod transaction; -pub use transaction::{signed::SignedTransaction, Transaction}; +pub use transaction::{signed::SignedTransaction, FullTransaction, Transaction}; mod integer_list; pub use integer_list::{IntegerList, IntegerListError}; diff --git a/crates/primitives-traits/src/transaction/mod.rs b/crates/primitives-traits/src/transaction/mod.rs index a306c5f76..a1ad81ab3 100644 --- a/crates/primitives-traits/src/transaction/mod.rs +++ b/crates/primitives-traits/src/transaction/mod.rs @@ -1,28 +1,61 @@ //! Transaction abstraction -pub mod signed; +use core::{fmt::Debug, hash::Hash}; -use alloc::fmt; +use alloy_primitives::{TxKind, B256}; use reth_codecs::Compact; use serde::{Deserialize, Serialize}; -/// Helper trait that unifies all behaviour required by transaction to support full node operations. -pub trait FullTransaction: Transaction + Compact {} - -impl FullTransaction for T where T: Transaction + Compact {} +pub mod signed; +#[allow(dead_code)] /// Abstraction of a transaction. pub trait Transaction: - alloy_consensus::Transaction + Debug + + Default + Clone - + fmt::Debug - + PartialEq + Eq - + Default + + PartialEq + + Hash + + Serialize + alloy_rlp::Encodable + alloy_rlp::Decodable - + Serialize + for<'de> Deserialize<'de> + + alloy_consensus::Transaction + + MaybeArbitrary { + /// Heavy operation that return signature hash over rlp encoded transaction. + /// It is only for signature signing or signer recovery. + fn signature_hash(&self) -> B256; + + /// Gets the transaction's [`TxKind`], which is the address of the recipient or + /// [`TxKind::Create`] if the transaction is a contract creation. + fn kind(&self) -> TxKind; + + /// Returns true if the tx supports dynamic fees + fn is_dynamic_fee(&self) -> bool; + + /// Returns the effective gas price for the given base fee. + fn effective_gas_price(&self, base_fee: Option) -> u128; + + /// This encodes the transaction _without_ the signature, and is only suitable for creating a + /// hash intended for signing. + fn encode_without_signature(&self, out: &mut dyn bytes::BufMut); + + /// Calculates a heuristic for the in-memory size of the [Transaction]. + fn size(&self) -> usize; } + +#[cfg(not(feature = "arbitrary"))] +/// Helper trait that requires arbitrary implementation if the feature is enabled. +pub trait MaybeArbitrary {} + +#[cfg(feature = "arbitrary")] +/// Helper trait that requires arbitrary implementation if the feature is enabled. +pub trait MaybeArbitrary: for<'a> arbitrary::Arbitrary<'a> {} + +/// Helper trait that unifies all behaviour required by transaction to support full node operations. +pub trait FullTransaction: Transaction + Compact {} + +impl FullTransaction for T where T: Transaction + Compact {} From 734c78fdfb46cc5a97971450ed74c6cbdf62d5af Mon Sep 17 00:00:00 2001 From: Kufre Samuel Date: Tue, 29 Oct 2024 22:24:35 +0100 Subject: [PATCH 344/425] feat(discv4): neighbors packet logging (#12042) Co-authored-by: Oliver Nordbjerg --- Cargo.lock | 1 + crates/net/discv4/Cargo.toml | 1 + crates/net/discv4/src/lib.rs | 13 ++++++++++++- 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 5b6224457..6545563d9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6922,6 +6922,7 @@ dependencies = [ "discv5", "enr", "generic-array", + "itertools 0.13.0", "parking_lot", "rand 0.8.5", "reth-ethereum-forks", diff --git a/crates/net/discv4/Cargo.toml b/crates/net/discv4/Cargo.toml index 6fb669388..f1c8410ee 100644 --- a/crates/net/discv4/Cargo.toml +++ b/crates/net/discv4/Cargo.toml @@ -42,6 +42,7 @@ parking_lot.workspace = true rand = { workspace = true, optional = true } generic-array.workspace = true serde = { workspace = true, optional = true } +itertools.workspace = true [dev-dependencies] assert_matches.workspace = true diff --git a/crates/net/discv4/src/lib.rs b/crates/net/discv4/src/lib.rs index a99906bdf..788e93048 100644 --- a/crates/net/discv4/src/lib.rs +++ b/crates/net/discv4/src/lib.rs @@ -38,6 +38,7 @@ use discv5::{ ConnectionDirection, ConnectionState, }; use enr::Enr; +use itertools::Itertools; use parking_lot::Mutex; use proto::{EnrRequest, EnrResponse}; use reth_ethereum_forks::ForkId; @@ -861,7 +862,7 @@ impl Discv4Service { let Some(bucket) = self.kbuckets.get_bucket(&key) else { return false }; if bucket.num_entries() < MAX_NODES_PER_BUCKET / 2 { // skip half empty bucket - return false; + return false } self.remove_key(node_id, key) } @@ -1406,6 +1407,16 @@ impl Discv4Service { } }; + // log the peers we discovered + trace!(target: "discv4", + target=format!("{:#?}", node_id), + peers_count=msg.nodes.len(), + peers=format!("[{:#}]", msg.nodes.iter() + .map(|node_rec| node_rec.id + ).format(", ")), + "Received peers from Neighbours packet" + ); + // This is the recursive lookup step where we initiate new FindNode requests for new nodes // that were discovered. for node in msg.nodes.into_iter().map(NodeRecord::into_ipv4_mapped) { From 129f3ba9116da0fcbfafb78e66b7a0163e5464d3 Mon Sep 17 00:00:00 2001 From: frisitano <35734660+frisitano@users.noreply.github.com> Date: Wed, 30 Oct 2024 16:48:43 +0800 Subject: [PATCH 345/425] feat: introduce StateCommitment type (#11842) --- Cargo.lock | 5 ++ crates/ethereum/node/Cargo.toml | 2 + crates/ethereum/node/src/node.rs | 2 + crates/exex/test-utils/Cargo.toml | 1 + crates/exex/test-utils/src/lib.rs | 1 + crates/node/builder/src/node.rs | 2 + crates/node/types/Cargo.toml | 1 + crates/node/types/src/lib.rs | 49 +++++++++++++------ crates/optimism/node/Cargo.toml | 2 + crates/optimism/node/src/node.rs | 2 + .../storage/provider/src/test_utils/mock.rs | 2 + crates/storage/provider/src/test_utils/mod.rs | 1 + crates/trie/common/src/key.rs | 18 +++++++ crates/trie/common/src/lib.rs | 3 ++ crates/trie/db/src/commitment.rs | 39 +++++++++++++++ crates/trie/db/src/lib.rs | 2 + examples/custom-engine-types/Cargo.toml | 1 + examples/custom-engine-types/src/main.rs | 2 + 18 files changed, 120 insertions(+), 15 deletions(-) create mode 100644 crates/trie/common/src/key.rs create mode 100644 crates/trie/db/src/commitment.rs diff --git a/Cargo.lock b/Cargo.lock index 6545563d9..27581ba3a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2856,6 +2856,7 @@ dependencies = [ "reth-payload-builder", "reth-primitives", "reth-tracing", + "reth-trie-db", "serde", "thiserror", "tokio", @@ -7564,6 +7565,7 @@ dependencies = [ "reth-provider", "reth-tasks", "reth-transaction-pool", + "reth-trie-db", "tempfile", "thiserror", "tokio", @@ -8024,6 +8026,7 @@ dependencies = [ "reth-tasks", "reth-tracing", "reth-transaction-pool", + "reth-trie-db", "revm", "serde_json", "tokio", @@ -8086,6 +8089,7 @@ dependencies = [ "reth-engine-primitives", "reth-primitives", "reth-primitives-traits", + "reth-trie-db", ] [[package]] @@ -8246,6 +8250,7 @@ dependencies = [ "reth-revm", "reth-tracing", "reth-transaction-pool", + "reth-trie-db", "revm", "serde", "serde_json", diff --git a/crates/ethereum/node/Cargo.toml b/crates/ethereum/node/Cargo.toml index 98224b99e..83b620347 100644 --- a/crates/ethereum/node/Cargo.toml +++ b/crates/ethereum/node/Cargo.toml @@ -31,6 +31,7 @@ reth-node-api.workspace = true reth-chainspec.workspace = true reth-primitives.workspace = true reth-revm = { workspace = true, features = ["std"] } +reth-trie-db.workspace = true # revm with required ethereum features revm = { workspace = true, features = ["secp256k1", "blst", "c-kzg"] } @@ -72,6 +73,7 @@ test-utils = [ "reth-db/test-utils", "reth-provider/test-utils", "reth-transaction-pool/test-utils", + "reth-trie-db/test-utils", "revm/test-utils", "reth-evm/test-utils" ] diff --git a/crates/ethereum/node/src/node.rs b/crates/ethereum/node/src/node.rs index dbd6ce0a1..1942f8a9e 100644 --- a/crates/ethereum/node/src/node.rs +++ b/crates/ethereum/node/src/node.rs @@ -34,6 +34,7 @@ use reth_transaction_pool::{ blobstore::DiskFileBlobStore, EthTransactionPool, TransactionPool, TransactionValidationTaskExecutor, }; +use reth_trie_db::MerklePatriciaTrie; use crate::{EthEngineTypes, EthEvmConfig}; @@ -81,6 +82,7 @@ impl EthereumNode { impl NodeTypes for EthereumNode { type Primitives = EthPrimitives; type ChainSpec = ChainSpec; + type StateCommitment = MerklePatriciaTrie; } impl NodeTypesWithEngine for EthereumNode { diff --git a/crates/exex/test-utils/Cargo.toml b/crates/exex/test-utils/Cargo.toml index b850295a3..cd0e0831b 100644 --- a/crates/exex/test-utils/Cargo.toml +++ b/crates/exex/test-utils/Cargo.toml @@ -31,6 +31,7 @@ reth-primitives.workspace = true reth-provider = { workspace = true, features = ["test-utils"] } reth-tasks.workspace = true reth-transaction-pool = { workspace = true, features = ["test-utils"] } +reth-trie-db.workspace = true ## async futures-util.workspace = true diff --git a/crates/exex/test-utils/src/lib.rs b/crates/exex/test-utils/src/lib.rs index 1f6ea75ce..06aa8c81c 100644 --- a/crates/exex/test-utils/src/lib.rs +++ b/crates/exex/test-utils/src/lib.rs @@ -119,6 +119,7 @@ pub struct TestNode; impl NodeTypes for TestNode { type Primitives = (); type ChainSpec = ChainSpec; + type StateCommitment = reth_trie_db::MerklePatriciaTrie; } impl NodeTypesWithEngine for TestNode { diff --git a/crates/node/builder/src/node.rs b/crates/node/builder/src/node.rs index 3b2f467d6..62c710ea8 100644 --- a/crates/node/builder/src/node.rs +++ b/crates/node/builder/src/node.rs @@ -69,6 +69,8 @@ where type Primitives = ::Primitives; type ChainSpec = ::ChainSpec; + + type StateCommitment = ::StateCommitment; } impl NodeTypesWithEngine for AnyNode diff --git a/crates/node/types/Cargo.toml b/crates/node/types/Cargo.toml index 5747abe9c..21facae54 100644 --- a/crates/node/types/Cargo.toml +++ b/crates/node/types/Cargo.toml @@ -17,3 +17,4 @@ reth-db-api.workspace = true reth-engine-primitives.workspace = true reth-primitives.workspace = true reth-primitives-traits.workspace = true +reth-trie-db.workspace = true diff --git a/crates/node/types/src/lib.rs b/crates/node/types/src/lib.rs index 5ba03e679..38e194bd4 100644 --- a/crates/node/types/src/lib.rs +++ b/crates/node/types/src/lib.rs @@ -18,6 +18,7 @@ use reth_db_api::{ Database, }; use reth_engine_primitives::EngineTypes; +use reth_trie_db::StateCommitment; /// Configures all the primitive types of the node. pub trait NodePrimitives { @@ -39,6 +40,8 @@ pub trait NodeTypes: Send + Sync + Unpin + 'static { type Primitives: NodePrimitives; /// The type used for configuration of the EVM. type ChainSpec: EthChainSpec; + /// The type used to perform state commitment operations. + type StateCommitment: StateCommitment; } /// The type that configures an Ethereum-like node with an engine for consensus. @@ -89,6 +92,7 @@ where { type Primitives = Types::Primitives; type ChainSpec = Types::ChainSpec; + type StateCommitment = Types::StateCommitment; } impl NodeTypesWithEngine for NodeTypesWithDBAdapter @@ -109,70 +113,85 @@ where /// A [`NodeTypes`] type builder. #[derive(Default, Debug)] -pub struct AnyNodeTypes

(PhantomData

, PhantomData); +pub struct AnyNodeTypes

(PhantomData

, PhantomData, PhantomData); -impl AnyNodeTypes { +impl AnyNodeTypes { /// Sets the `Primitives` associated type. - pub const fn primitives(self) -> AnyNodeTypes { - AnyNodeTypes::(PhantomData::, PhantomData::) + pub const fn primitives(self) -> AnyNodeTypes { + AnyNodeTypes::(PhantomData::, PhantomData::, PhantomData::) } /// Sets the `ChainSpec` associated type. - pub const fn chain_spec(self) -> AnyNodeTypes { - AnyNodeTypes::(PhantomData::

, PhantomData::) + pub const fn chain_spec(self) -> AnyNodeTypes { + AnyNodeTypes::(PhantomData::

, PhantomData::, PhantomData::) + } + + /// Sets the `StateCommitment` associated type. + pub const fn state_commitment(self) -> AnyNodeTypes { + AnyNodeTypes::(PhantomData::

, PhantomData::, PhantomData::) } } -impl NodeTypes for AnyNodeTypes +impl NodeTypes for AnyNodeTypes where P: NodePrimitives + Send + Sync + Unpin + 'static, C: EthChainSpec + 'static, + S: StateCommitment, { type Primitives = P; type ChainSpec = C; + type StateCommitment = S; } /// A [`NodeTypesWithEngine`] type builder. #[derive(Default, Debug)] -pub struct AnyNodeTypesWithEngine

{ +pub struct AnyNodeTypesWithEngine

{ /// Embedding the basic node types. - base: AnyNodeTypes, + base: AnyNodeTypes, /// Phantom data for the engine. _engine: PhantomData, } -impl AnyNodeTypesWithEngine { +impl AnyNodeTypesWithEngine { /// Sets the `Primitives` associated type. - pub const fn primitives(self) -> AnyNodeTypesWithEngine { + pub const fn primitives(self) -> AnyNodeTypesWithEngine { AnyNodeTypesWithEngine { base: self.base.primitives::(), _engine: PhantomData } } /// Sets the `Engine` associated type. - pub const fn engine(self) -> AnyNodeTypesWithEngine { + pub const fn engine(self) -> AnyNodeTypesWithEngine { AnyNodeTypesWithEngine { base: self.base, _engine: PhantomData:: } } /// Sets the `ChainSpec` associated type. - pub const fn chain_spec(self) -> AnyNodeTypesWithEngine { + pub const fn chain_spec(self) -> AnyNodeTypesWithEngine { AnyNodeTypesWithEngine { base: self.base.chain_spec::(), _engine: PhantomData } } + + /// Sets the `StateCommitment` associated type. + pub const fn state_commitment(self) -> AnyNodeTypesWithEngine { + AnyNodeTypesWithEngine { base: self.base.state_commitment::(), _engine: PhantomData } + } } -impl NodeTypes for AnyNodeTypesWithEngine +impl NodeTypes for AnyNodeTypesWithEngine where P: NodePrimitives + Send + Sync + Unpin + 'static, E: EngineTypes + Send + Sync + Unpin, C: EthChainSpec + 'static, + S: StateCommitment, { type Primitives = P; type ChainSpec = C; + type StateCommitment = S; } -impl NodeTypesWithEngine for AnyNodeTypesWithEngine +impl NodeTypesWithEngine for AnyNodeTypesWithEngine where P: NodePrimitives + Send + Sync + Unpin + 'static, E: EngineTypes + Send + Sync + Unpin, C: EthChainSpec + 'static, + S: StateCommitment, { type Engine = E; } diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index 37cf4a328..deabbac52 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -28,6 +28,7 @@ reth-network.workspace = true reth-evm.workspace = true reth-revm = { workspace = true, features = ["std"] } reth-beacon-consensus.workspace = true +reth-trie-db.workspace = true # op-reth reth-optimism-payload-builder.workspace = true @@ -99,5 +100,6 @@ test-utils = [ "reth-db/test-utils", "reth-provider/test-utils", "reth-transaction-pool/test-utils", + "reth-trie-db/test-utils", "revm/test-utils" ] diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 9492bb8c4..4328a55fb 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -30,6 +30,7 @@ use reth_transaction_pool::{ blobstore::DiskFileBlobStore, CoinbaseTipOrdering, TransactionPool, TransactionValidationTaskExecutor, }; +use reth_trie_db::MerklePatriciaTrie; use crate::{ args::RollupArgs, @@ -122,6 +123,7 @@ where impl NodeTypes for OptimismNode { type Primitives = OpPrimitives; type ChainSpec = OpChainSpec; + type StateCommitment = MerklePatriciaTrie; } impl NodeTypesWithEngine for OptimismNode { diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index 1053b4778..e2e08e61a 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -34,6 +34,7 @@ use reth_trie::{ updates::TrieUpdates, AccountProof, HashedPostState, HashedStorage, MultiProof, StorageProof, TrieInput, }; +use reth_trie_db::MerklePatriciaTrie; use revm::primitives::{BlockEnv, CfgEnvWithHandlerCfg}; use std::{ collections::BTreeMap, @@ -157,6 +158,7 @@ pub struct MockNode; impl NodeTypes for MockNode { type Primitives = (); type ChainSpec = ChainSpec; + type StateCommitment = MerklePatriciaTrie; } impl DatabaseProviderFactory for MockEthProvider { diff --git a/crates/storage/provider/src/test_utils/mod.rs b/crates/storage/provider/src/test_utils/mod.rs index 220078109..c0e80930b 100644 --- a/crates/storage/provider/src/test_utils/mod.rs +++ b/crates/storage/provider/src/test_utils/mod.rs @@ -25,6 +25,7 @@ pub type MockNodeTypes = reth_node_types::AnyNodeTypesWithEngine< (), reth_ethereum_engine_primitives::EthEngineTypes, reth_chainspec::ChainSpec, + reth_trie_db::MerklePatriciaTrie, >; /// Mock [`reth_node_types::NodeTypesWithDB`] for testing. diff --git a/crates/trie/common/src/key.rs b/crates/trie/common/src/key.rs new file mode 100644 index 000000000..9e440d199 --- /dev/null +++ b/crates/trie/common/src/key.rs @@ -0,0 +1,18 @@ +use alloy_primitives::B256; +use revm_primitives::keccak256; + +/// Trait for hashing keys in state. +pub trait KeyHasher: Default + Clone + Send + Sync + 'static { + /// Hashes the given bytes into a 256-bit hash. + fn hash_key>(bytes: T) -> B256; +} + +/// A key hasher that uses the Keccak-256 hash function. +#[derive(Clone, Debug, Default)] +pub struct KeccakKeyHasher; + +impl KeyHasher for KeccakKeyHasher { + fn hash_key>(bytes: T) -> B256 { + keccak256(bytes) + } +} diff --git a/crates/trie/common/src/lib.rs b/crates/trie/common/src/lib.rs index bdec36028..7645ebd3a 100644 --- a/crates/trie/common/src/lib.rs +++ b/crates/trie/common/src/lib.rs @@ -14,6 +14,9 @@ pub mod hash_builder; mod account; pub use account::TrieAccount; +mod key; +pub use key::{KeccakKeyHasher, KeyHasher}; + mod nibbles; pub use nibbles::{Nibbles, StoredNibbles, StoredNibblesSubKey}; diff --git a/crates/trie/db/src/commitment.rs b/crates/trie/db/src/commitment.rs new file mode 100644 index 000000000..c608aefff --- /dev/null +++ b/crates/trie/db/src/commitment.rs @@ -0,0 +1,39 @@ +use crate::{ + DatabaseHashedCursorFactory, DatabaseProof, DatabaseStateRoot, DatabaseStorageRoot, + DatabaseTrieCursorFactory, DatabaseTrieWitness, +}; +use reth_db::transaction::DbTx; +use reth_trie::{ + proof::Proof, witness::TrieWitness, KeccakKeyHasher, KeyHasher, StateRoot, StorageRoot, +}; + +/// The `StateCommitment` trait provides associated types for state commitment operations. +pub trait StateCommitment: std::fmt::Debug + Send + Sync + Unpin + 'static { + /// The state root type. + type StateRoot<'a, TX: DbTx + 'a>: DatabaseStateRoot<'a, TX>; + /// The storage root type. + type StorageRoot<'a, TX: DbTx + 'a>: DatabaseStorageRoot<'a, TX>; + /// The state proof type. + type StateProof<'a, TX: DbTx + 'a>: DatabaseProof<'a, TX>; + /// The state witness type. + type StateWitness<'a, TX: DbTx + 'a>: DatabaseTrieWitness<'a, TX>; + /// The key hasher type. + type KeyHasher: KeyHasher; +} + +/// The state commitment type for Ethereum's Merkle Patricia Trie. +#[derive(Debug)] +#[non_exhaustive] +pub struct MerklePatriciaTrie; + +impl StateCommitment for MerklePatriciaTrie { + type StateRoot<'a, TX: DbTx + 'a> = + StateRoot, DatabaseHashedCursorFactory<'a, TX>>; + type StorageRoot<'a, TX: DbTx + 'a> = + StorageRoot, DatabaseHashedCursorFactory<'a, TX>>; + type StateProof<'a, TX: DbTx + 'a> = + Proof, DatabaseHashedCursorFactory<'a, TX>>; + type StateWitness<'a, TX: DbTx + 'a> = + TrieWitness, DatabaseHashedCursorFactory<'a, TX>>; + type KeyHasher = KeccakKeyHasher; +} diff --git a/crates/trie/db/src/lib.rs b/crates/trie/db/src/lib.rs index 3a9b1e328..27c18af6c 100644 --- a/crates/trie/db/src/lib.rs +++ b/crates/trie/db/src/lib.rs @@ -1,5 +1,6 @@ //! An integration of [`reth-trie`] with [`reth-db`]. +mod commitment; mod hashed_cursor; mod prefix_set; mod proof; @@ -8,6 +9,7 @@ mod storage; mod trie_cursor; mod witness; +pub use commitment::{MerklePatriciaTrie, StateCommitment}; pub use hashed_cursor::{ DatabaseHashedAccountCursor, DatabaseHashedCursorFactory, DatabaseHashedStorageCursor, }; diff --git a/examples/custom-engine-types/Cargo.toml b/examples/custom-engine-types/Cargo.toml index f826451d2..1fbb3c494 100644 --- a/examples/custom-engine-types/Cargo.toml +++ b/examples/custom-engine-types/Cargo.toml @@ -16,6 +16,7 @@ reth-basic-payload-builder.workspace = true reth-ethereum-payload-builder.workspace = true reth-node-ethereum = { workspace = true, features = ["test-utils"] } reth-tracing.workspace = true +reth-trie-db.workspace = true alloy-genesis.workspace = true alloy-rpc-types = { workspace = true, features = ["engine"] } alloy-primitives.workspace = true diff --git a/examples/custom-engine-types/src/main.rs b/examples/custom-engine-types/src/main.rs index 30f89a0b9..a48c44b20 100644 --- a/examples/custom-engine-types/src/main.rs +++ b/examples/custom-engine-types/src/main.rs @@ -70,6 +70,7 @@ use reth_payload_builder::{ }; use reth_primitives::Withdrawals; use reth_tracing::{RethTracer, Tracer}; +use reth_trie_db::MerklePatriciaTrie; /// A custom payload attributes type. #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] @@ -228,6 +229,7 @@ struct MyCustomNode; impl NodeTypes for MyCustomNode { type Primitives = (); type ChainSpec = ChainSpec; + type StateCommitment = MerklePatriciaTrie; } /// Configure the node types with the custom engine types From 367b4ed18af583305c18adb118be0b303e55aa8f Mon Sep 17 00:00:00 2001 From: DaniPopes <57450786+DaniPopes@users.noreply.github.com> Date: Wed, 30 Oct 2024 11:10:14 +0100 Subject: [PATCH 346/425] chore(meta): update SECURITY.md (#12190) --- SECURITY.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/SECURITY.md b/SECURITY.md index 5260d529f..bea27ad11 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -2,4 +2,4 @@ ## Reporting a Vulnerability -Contact georgios at paradigm.xyz. +Contact [security@ithaca.xyz](mailto:security@ithaca.xyz). From 6e794ee6738c4f6c610010330d76e5a845b091b2 Mon Sep 17 00:00:00 2001 From: Dan Cline <6798349+Rjected@users.noreply.github.com> Date: Wed, 30 Oct 2024 07:29:48 -0400 Subject: [PATCH 347/425] fix(ecies): bound initial header body size (#12172) --- crates/net/ecies/src/algorithm.rs | 3 ++- crates/net/ecies/src/codec.rs | 36 +++++++++++++++++++++++++++---- crates/net/ecies/src/error.rs | 8 +++++++ 3 files changed, 42 insertions(+), 5 deletions(-) diff --git a/crates/net/ecies/src/algorithm.rs b/crates/net/ecies/src/algorithm.rs index 6bf9717fe..e4266d9a0 100644 --- a/crates/net/ecies/src/algorithm.rs +++ b/crates/net/ecies/src/algorithm.rs @@ -650,7 +650,8 @@ impl ECIES { out.extend_from_slice(tag.as_slice()); } - /// Extracts the header from slice and returns the body size. + /// Reads the `RLPx` header from the slice, setting up the MAC and AES, returning the body + /// size contained in the header. pub fn read_header(&mut self, data: &mut [u8]) -> Result { // If the data is not large enough to fit the header and mac bytes, return an error // diff --git a/crates/net/ecies/src/codec.rs b/crates/net/ecies/src/codec.rs index c3e9b8d58..b5a10284c 100644 --- a/crates/net/ecies/src/codec.rs +++ b/crates/net/ecies/src/codec.rs @@ -1,12 +1,15 @@ //! This contains the main codec for `RLPx` ECIES messages -use crate::{algorithm::ECIES, ECIESError, EgressECIESValue, IngressECIESValue}; +use crate::{algorithm::ECIES, ECIESError, ECIESErrorImpl, EgressECIESValue, IngressECIESValue}; use alloy_primitives::{bytes::BytesMut, B512 as PeerId}; use secp256k1::SecretKey; use std::{fmt::Debug, io}; use tokio_util::codec::{Decoder, Encoder}; use tracing::{instrument, trace}; +/// The max size that the initial handshake packet can be. Currently 2KiB. +const MAX_INITIAL_HANDSHAKE_SIZE: usize = 2048; + /// Tokio codec for ECIES #[derive(Debug)] pub struct ECIESCodec { @@ -26,6 +29,11 @@ pub enum ECIESState { /// message containing the nonce and other metadata. Ack, + /// This is the same as the [`ECIESState::Header`] stage, but occurs only after the first + /// [`ECIESState::Ack`] message. This is so that the initial handshake message can be properly + /// validated. + InitialHeader, + /// The third stage of the ECIES handshake, where header is parsed, message integrity checks /// performed, and message is decrypted. Header, @@ -70,7 +78,7 @@ impl Decoder for ECIESCodec { self.ecies.read_auth(&mut buf.split_to(total_size))?; - self.state = ECIESState::Header; + self.state = ECIESState::InitialHeader; return Ok(Some(IngressECIESValue::AuthReceive(self.ecies.remote_id()))) } ECIESState::Ack => { @@ -89,9 +97,29 @@ impl Decoder for ECIESCodec { self.ecies.read_ack(&mut buf.split_to(total_size))?; - self.state = ECIESState::Header; + self.state = ECIESState::InitialHeader; return Ok(Some(IngressECIESValue::Ack)) } + ECIESState::InitialHeader => { + if buf.len() < ECIES::header_len() { + trace!("current len {}, need {}", buf.len(), ECIES::header_len()); + return Ok(None) + } + + let body_size = + self.ecies.read_header(&mut buf.split_to(ECIES::header_len()))?; + + if body_size > MAX_INITIAL_HANDSHAKE_SIZE { + trace!(?body_size, max=?MAX_INITIAL_HANDSHAKE_SIZE, "Header exceeds max initial handshake size"); + return Err(ECIESErrorImpl::InitialHeaderBodyTooLarge { + body_size, + max_body_size: MAX_INITIAL_HANDSHAKE_SIZE, + } + .into()) + } + + self.state = ECIESState::Body; + } ECIESState::Header => { if buf.len() < ECIES::header_len() { trace!("current len {}, need {}", buf.len(), ECIES::header_len()); @@ -131,7 +159,7 @@ impl Encoder for ECIESCodec { Ok(()) } EgressECIESValue::Ack => { - self.state = ECIESState::Header; + self.state = ECIESState::InitialHeader; self.ecies.write_ack(buf); Ok(()) } diff --git a/crates/net/ecies/src/error.rs b/crates/net/ecies/src/error.rs index 79965f733..9dabfc161 100644 --- a/crates/net/ecies/src/error.rs +++ b/crates/net/ecies/src/error.rs @@ -62,6 +62,14 @@ pub enum ECIESErrorImpl { /// The encrypted data is not large enough for all fields #[error("encrypted data is not large enough for all fields")] EncryptedDataTooSmall, + /// The initial header body is too large. + #[error("initial header body is {body_size} but the max is {max_body_size}")] + InitialHeaderBodyTooLarge { + /// The body size from the header + body_size: usize, + /// The max body size + max_body_size: usize, + }, /// Error when trying to split an array beyond its length #[error("requested {idx} but array len is {len}")] OutOfBounds { From 2778ba3d52605e3837ef1abd29f9ef956e0e8140 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 30 Oct 2024 12:30:49 +0100 Subject: [PATCH 348/425] tx-pool: fix `ExceedsGasLimit` error message order (#12191) --- crates/transaction-pool/src/pool/txpool.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index b11815fc4..bd0aacccf 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -628,8 +628,8 @@ impl TxPool { *transaction.hash(), PoolErrorKind::InvalidTransaction( InvalidPoolTransactionError::ExceedsGasLimit( - block_gas_limit, tx_gas_limit, + block_gas_limit, ), ), )), From 93a9b8a218309a296d96d5841df4f4b7b60701cd Mon Sep 17 00:00:00 2001 From: cody-wang-cb Date: Wed, 30 Oct 2024 08:59:19 -0400 Subject: [PATCH 349/425] feat: Eip1559 params in extradata (#11887) Co-authored-by: Dan Cline <6798349+Rjected@users.noreply.github.com> Co-authored-by: Matthias Seitz --- .../ethereum/engine-primitives/src/payload.rs | 2 +- crates/ethereum/evm/src/lib.rs | 7 +- crates/ethereum/payload/src/lib.rs | 10 +- crates/evm/Cargo.toml | 16 +- crates/evm/src/lib.rs | 5 +- crates/optimism/chainspec/src/lib.rs | 174 +++++++++++++++++- crates/optimism/evm/src/config.rs | 8 +- crates/optimism/evm/src/lib.rs | 15 +- crates/optimism/hardforks/src/hardfork.rs | 4 + crates/optimism/node/src/engine.rs | 137 +++++++++++++- crates/optimism/node/tests/e2e/utils.rs | 1 + crates/optimism/payload/src/builder.rs | 37 +++- crates/optimism/payload/src/error.rs | 14 ++ crates/optimism/payload/src/payload.rs | 64 ++++++- examples/custom-evm/src/main.rs | 5 +- examples/stateful-precompile/src/main.rs | 5 +- 16 files changed, 461 insertions(+), 43 deletions(-) diff --git a/crates/ethereum/engine-primitives/src/payload.rs b/crates/ethereum/engine-primitives/src/payload.rs index 2d162ef15..ed377d003 100644 --- a/crates/ethereum/engine-primitives/src/payload.rs +++ b/crates/ethereum/engine-primitives/src/payload.rs @@ -184,7 +184,7 @@ impl From for ExecutionPayloadEnvelopeV4 { } /// Container type for all components required to build a payload. -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Default)] pub struct EthPayloadBuilderAttributes { /// Id of the payload pub id: PayloadId, diff --git a/crates/ethereum/evm/src/lib.rs b/crates/ethereum/evm/src/lib.rs index 91da90d99..1c340c092 100644 --- a/crates/ethereum/evm/src/lib.rs +++ b/crates/ethereum/evm/src/lib.rs @@ -17,6 +17,8 @@ extern crate alloc; +use core::convert::Infallible; + use alloc::{sync::Arc, vec::Vec}; use alloy_primitives::{Address, Bytes, TxKind, U256}; use reth_chainspec::{ChainSpec, Head}; @@ -59,6 +61,7 @@ impl EthEvmConfig { impl ConfigureEvmEnv for EthEvmConfig { type Header = Header; + type Error = Infallible; fn fill_tx_env(&self, tx_env: &mut TxEnv, transaction: &TransactionSigned, sender: Address) { transaction.fill_tx_env(tx_env, sender); @@ -131,7 +134,7 @@ impl ConfigureEvmEnv for EthEvmConfig { &self, parent: &Self::Header, attributes: NextBlockEnvAttributes, - ) -> (CfgEnvWithHandlerCfg, BlockEnv) { + ) -> Result<(CfgEnvWithHandlerCfg, BlockEnv), Self::Error> { // configure evm env based on parent block let cfg = CfgEnv::default().with_chain_id(self.chain_spec.chain().id()); @@ -179,7 +182,7 @@ impl ConfigureEvmEnv for EthEvmConfig { blob_excess_gas_and_price, }; - (CfgEnvWithHandlerCfg::new_with_spec_id(cfg, spec_id), block_env) + Ok((CfgEnvWithHandlerCfg::new_with_spec_id(cfg, spec_id), block_env)) } } diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 8e188f890..27d9d98bc 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -73,7 +73,7 @@ where &self, config: &PayloadConfig, parent: &Header, - ) -> (CfgEnvWithHandlerCfg, BlockEnv) { + ) -> Result<(CfgEnvWithHandlerCfg, BlockEnv), EvmConfig::Error> { let next_attributes = NextBlockEnvAttributes { timestamp: config.attributes.timestamp(), suggested_fee_recipient: config.attributes.suggested_fee_recipient(), @@ -97,7 +97,9 @@ where &self, args: BuildArguments, ) -> Result, PayloadBuilderError> { - let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_header); + let (cfg_env, block_env) = self + .cfg_and_block_env(&args.config, &args.config.parent_header) + .map_err(PayloadBuilderError::other)?; let pool = args.pool.clone(); default_ethereum_payload(self.evm_config.clone(), args, cfg_env, block_env, |attributes| { @@ -120,7 +122,9 @@ where None, ); - let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_header); + let (cfg_env, block_env) = self + .cfg_and_block_env(&args.config, &args.config.parent_header) + .map_err(PayloadBuilderError::other)?; let pool = args.pool.clone(); diff --git a/crates/evm/Cargo.toml b/crates/evm/Cargo.toml index 90fd53282..a4ce3c389 100644 --- a/crates/evm/Cargo.toml +++ b/crates/evm/Cargo.toml @@ -57,12 +57,12 @@ std = [ "revm/std", ] test-utils = [ - "dep:parking_lot", - "reth-chainspec/test-utils", - "reth-consensus/test-utils", - "reth-primitives/test-utils", - "reth-primitives-traits/test-utils", - "reth-revm/test-utils", - "revm/test-utils", - "reth-prune-types/test-utils" + "dep:parking_lot", + "reth-chainspec/test-utils", + "reth-consensus/test-utils", + "reth-primitives/test-utils", + "reth-primitives-traits/test-utils", + "reth-revm/test-utils", + "revm/test-utils", + "reth-prune-types/test-utils" ] diff --git a/crates/evm/src/lib.rs b/crates/evm/src/lib.rs index b75feea83..e30ff9b1a 100644 --- a/crates/evm/src/lib.rs +++ b/crates/evm/src/lib.rs @@ -116,6 +116,9 @@ pub trait ConfigureEvmEnv: Send + Sync + Unpin + Clone + 'static { /// The header type used by the EVM. type Header: BlockHeader; + /// The error type that is returned by [`Self::next_cfg_and_block_env`]. + type Error: core::error::Error + Send + Sync; + /// Returns a [`TxEnv`] from a [`TransactionSigned`] and [`Address`]. fn tx_env(&self, transaction: &TransactionSigned, signer: Address) -> TxEnv { let mut tx_env = TxEnv::default(); @@ -192,7 +195,7 @@ pub trait ConfigureEvmEnv: Send + Sync + Unpin + Clone + 'static { &self, parent: &Self::Header, attributes: NextBlockEnvAttributes, - ) -> (CfgEnvWithHandlerCfg, BlockEnv); + ) -> Result<(CfgEnvWithHandlerCfg, BlockEnv), Self::Error>; } /// Represents additional attributes required to configure the next block. diff --git a/crates/optimism/chainspec/src/lib.rs b/crates/optimism/chainspec/src/lib.rs index 03ce75aec..70adf2272 100644 --- a/crates/optimism/chainspec/src/lib.rs +++ b/crates/optimism/chainspec/src/lib.rs @@ -20,10 +20,10 @@ mod op_sepolia; use alloc::{vec, vec::Vec}; use alloy_chains::Chain; use alloy_genesis::Genesis; -use alloy_primitives::{B256, U256}; +use alloy_primitives::{Bytes, Parity, Signature, B256, U256}; pub use base::BASE_MAINNET; pub use base_sepolia::BASE_SEPOLIA; -use derive_more::{Constructor, Deref, From, Into}; +use derive_more::{Constructor, Deref, Display, From, Into}; pub use dev::OP_DEV; #[cfg(not(feature = "std"))] pub(crate) use once_cell::sync::Lazy as LazyLock; @@ -159,6 +159,16 @@ impl OpChainSpecBuilder { self } + /// Enable Holocene at genesis + pub fn holocene_activated(mut self) -> Self { + self = self.granite_activated(); + self.inner = self.inner.with_fork( + reth_optimism_forks::OptimismHardfork::Holocene, + ForkCondition::Timestamp(0), + ); + self + } + /// Build the resulting [`OpChainSpec`]. /// /// # Panics @@ -177,6 +187,81 @@ pub struct OpChainSpec { pub inner: ChainSpec, } +impl OpChainSpec { + /// Read from parent to determine the base fee for the next block + pub fn next_block_base_fee( + &self, + parent: &Header, + timestamp: u64, + ) -> Result { + let is_holocene_activated = self.inner.is_fork_active_at_timestamp( + reth_optimism_forks::OptimismHardfork::Holocene, + timestamp, + ); + // If we are in the Holocene, we need to use the base fee params + // from the parent block's extra data. + // Else, use the base fee params (default values) from chainspec + if is_holocene_activated { + let (denominator, elasticity) = decode_holocene_1559_params(parent.extra_data.clone())?; + if elasticity == 0 && denominator == 0 { + return Ok(U256::from( + parent + .next_block_base_fee(self.base_fee_params_at_timestamp(timestamp)) + .unwrap_or_default(), + )); + } + let base_fee_params = BaseFeeParams::new(denominator as u128, elasticity as u128); + Ok(U256::from(parent.next_block_base_fee(base_fee_params).unwrap_or_default())) + } else { + Ok(U256::from( + parent + .next_block_base_fee(self.base_fee_params_at_timestamp(timestamp)) + .unwrap_or_default(), + )) + } + } +} + +#[derive(Clone, Debug, Display, Eq, PartialEq)] +/// Error type for decoding Holocene 1559 parameters +pub enum DecodeError { + #[display("Insufficient data to decode")] + /// Insufficient data to decode + InsufficientData, + #[display("Invalid denominator parameter")] + /// Invalid denominator parameter + InvalidDenominator, + #[display("Invalid elasticity parameter")] + /// Invalid elasticity parameter + InvalidElasticity, +} + +impl core::error::Error for DecodeError { + fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { + // None of the errors have sub-errors + None + } +} + +/// Extracts the Holcene 1599 parameters from the encoded form: +/// +pub fn decode_holocene_1559_params(extra_data: Bytes) -> Result<(u32, u32), DecodeError> { + if extra_data.len() < 9 { + return Err(DecodeError::InsufficientData); + } + let denominator: [u8; 4] = + extra_data[1..5].try_into().map_err(|_| DecodeError::InvalidDenominator)?; + let elasticity: [u8; 4] = + extra_data[5..9].try_into().map_err(|_| DecodeError::InvalidElasticity)?; + Ok((u32::from_be_bytes(denominator), u32::from_be_bytes(elasticity))) +} + +/// Returns the signature for the optimism deposit transactions, which don't include a +/// signature. +pub fn optimism_deposit_tx_signature() -> Signature { + Signature::new(U256::ZERO, U256::ZERO, Parity::Parity(false)) +} + impl EthChainSpec for OpChainSpec { fn chain(&self) -> alloy_chains::Chain { self.inner.chain() @@ -405,6 +490,8 @@ impl OptimismGenesisInfo { #[cfg(test)] mod tests { + use std::sync::Arc; + use alloy_genesis::{ChainConfig, Genesis}; use alloy_primitives::b256; use reth_chainspec::{test_fork_ids, BaseFeeParams, BaseFeeParamsKind}; @@ -919,4 +1006,87 @@ mod tests { .all(|(expected, actual)| &**expected == *actual)); assert_eq!(expected_hardforks.len(), hardforks.len()); } + + #[test] + fn test_get_base_fee_pre_holocene() { + let op_chain_spec = &BASE_SEPOLIA; + let parent = Header { + base_fee_per_gas: Some(1), + gas_used: 15763614, + gas_limit: 144000000, + ..Default::default() + }; + let base_fee = op_chain_spec.next_block_base_fee(&parent, 0); + assert_eq!( + base_fee.unwrap(), + U256::from( + parent + .next_block_base_fee(op_chain_spec.base_fee_params_at_timestamp(0)) + .unwrap_or_default() + ) + ); + } + + fn holocene_chainspec() -> Arc { + let mut hardforks = OptimismHardfork::base_sepolia(); + hardforks.insert(OptimismHardfork::Holocene.boxed(), ForkCondition::Timestamp(1800000000)); + Arc::new(OpChainSpec { + inner: ChainSpec { + chain: BASE_SEPOLIA.inner.chain, + genesis: BASE_SEPOLIA.inner.genesis.clone(), + genesis_hash: BASE_SEPOLIA.inner.genesis_hash.clone(), + paris_block_and_final_difficulty: Some((0, U256::from(0))), + hardforks, + base_fee_params: BASE_SEPOLIA.inner.base_fee_params.clone(), + max_gas_limit: crate::constants::BASE_SEPOLIA_MAX_GAS_LIMIT, + prune_delete_limit: 10000, + ..Default::default() + }, + }) + } + + #[test] + fn test_get_base_fee_holocene_nonce_not_set() { + let op_chain_spec = holocene_chainspec(); + let parent = Header { + base_fee_per_gas: Some(1), + gas_used: 15763614, + gas_limit: 144000000, + timestamp: 1800000003, + extra_data: Bytes::from_static(&[0, 0, 0, 0, 0, 0, 0, 0, 0]), + ..Default::default() + }; + let base_fee = op_chain_spec.next_block_base_fee(&parent, 1800000005); + assert_eq!( + base_fee.unwrap(), + U256::from( + parent + .next_block_base_fee(op_chain_spec.base_fee_params_at_timestamp(0)) + .unwrap_or_default() + ) + ); + } + + #[test] + fn test_get_base_fee_holocene_nonce_set() { + let op_chain_spec = holocene_chainspec(); + let parent = Header { + base_fee_per_gas: Some(1), + gas_used: 15763614, + gas_limit: 144000000, + extra_data: Bytes::from_static(&[0, 0, 0, 0, 8, 0, 0, 0, 8]), + timestamp: 1800000003, + ..Default::default() + }; + + let base_fee = op_chain_spec.next_block_base_fee(&parent, 1800000005); + assert_eq!( + base_fee.unwrap(), + U256::from( + parent + .next_block_base_fee(BaseFeeParams::new(0x00000008, 0x00000008)) + .unwrap_or_default() + ) + ); + } } diff --git a/crates/optimism/evm/src/config.rs b/crates/optimism/evm/src/config.rs index 668fcba4d..b00341ff6 100644 --- a/crates/optimism/evm/src/config.rs +++ b/crates/optimism/evm/src/config.rs @@ -12,7 +12,9 @@ pub fn revm_spec_by_timestamp_after_bedrock( chain_spec: &OpChainSpec, timestamp: u64, ) -> revm_primitives::SpecId { - if chain_spec.fork(OptimismHardfork::Granite).active_at_timestamp(timestamp) { + if chain_spec.fork(OptimismHardfork::Holocene).active_at_timestamp(timestamp) { + revm_primitives::HOLOCENE + } else if chain_spec.fork(OptimismHardfork::Granite).active_at_timestamp(timestamp) { revm_primitives::GRANITE } else if chain_spec.fork(OptimismHardfork::Fjord).active_at_timestamp(timestamp) { revm_primitives::FJORD @@ -29,7 +31,9 @@ pub fn revm_spec_by_timestamp_after_bedrock( /// Map the latest active hardfork at the given block to a revm [`SpecId`](revm_primitives::SpecId). pub fn revm_spec(chain_spec: &OpChainSpec, block: &Head) -> revm_primitives::SpecId { - if chain_spec.fork(OptimismHardfork::Granite).active_at_head(block) { + if chain_spec.fork(OptimismHardfork::Holocene).active_at_head(block) { + revm_primitives::HOLOCENE + } else if chain_spec.fork(OptimismHardfork::Granite).active_at_head(block) { revm_primitives::GRANITE } else if chain_spec.fork(OptimismHardfork::Fjord).active_at_head(block) { revm_primitives::FJORD diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index bc46f3ea9..03aecf2c8 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -15,7 +15,7 @@ extern crate alloc; use alloc::{sync::Arc, vec::Vec}; use alloy_primitives::{Address, U256}; use reth_evm::{ConfigureEvm, ConfigureEvmEnv, NextBlockEnvAttributes}; -use reth_optimism_chainspec::OpChainSpec; +use reth_optimism_chainspec::{DecodeError, OpChainSpec}; use reth_primitives::{ revm_primitives::{AnalysisKind, CfgEnvWithHandlerCfg, TxEnv}, transaction::FillTxEnv, @@ -56,6 +56,7 @@ impl OptimismEvmConfig { impl ConfigureEvmEnv for OptimismEvmConfig { type Header = Header; + type Error = DecodeError; fn fill_tx_env(&self, tx_env: &mut TxEnv, transaction: &TransactionSigned, sender: Address) { transaction.fill_tx_env(tx_env, sender); @@ -134,7 +135,7 @@ impl ConfigureEvmEnv for OptimismEvmConfig { &self, parent: &Self::Header, attributes: NextBlockEnvAttributes, - ) -> (CfgEnvWithHandlerCfg, BlockEnv) { + ) -> Result<(CfgEnvWithHandlerCfg, BlockEnv), Self::Error> { // configure evm env based on parent block let cfg = CfgEnv::default().with_chain_id(self.chain_spec.chain().id()); @@ -156,13 +157,7 @@ impl ConfigureEvmEnv for OptimismEvmConfig { prevrandao: Some(attributes.prev_randao), gas_limit: U256::from(parent.gas_limit), // calculate basefee based on parent block's gas usage - basefee: U256::from( - parent - .next_block_base_fee( - self.chain_spec.base_fee_params_at_timestamp(attributes.timestamp), - ) - .unwrap_or_default(), - ), + basefee: self.chain_spec.next_block_base_fee(parent, attributes.timestamp)?, // calculate excess gas based on parent block's blob gas usage blob_excess_gas_and_price, }; @@ -175,7 +170,7 @@ impl ConfigureEvmEnv for OptimismEvmConfig { }; } - (cfg_with_handler_cfg, block_env) + Ok((cfg_with_handler_cfg, block_env)) } } diff --git a/crates/optimism/hardforks/src/hardfork.rs b/crates/optimism/hardforks/src/hardfork.rs index 011c4ae72..440314e37 100644 --- a/crates/optimism/hardforks/src/hardfork.rs +++ b/crates/optimism/hardforks/src/hardfork.rs @@ -31,6 +31,8 @@ hardfork!( Fjord, /// Granite: Granite, + /// Holocene: + Holocene, } ); @@ -156,6 +158,7 @@ impl OptimismHardfork { Self::Ecotone => Some(1708534800), Self::Fjord => Some(1716998400), Self::Granite => Some(1723478400), + Self::Holocene => None, }, ) } @@ -190,6 +193,7 @@ impl OptimismHardfork { Self::Ecotone => Some(1710374401), Self::Fjord => Some(1720627201), Self::Granite => Some(1726070401), + Self::Holocene => None, }, ) } diff --git a/crates/optimism/node/src/engine.rs b/crates/optimism/node/src/engine.rs index cec609671..966d87279 100644 --- a/crates/optimism/node/src/engine.rs +++ b/crates/optimism/node/src/engine.rs @@ -15,7 +15,9 @@ use reth_node_api::{ }; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_forks::OptimismHardfork; -use reth_optimism_payload_builder::{OptimismBuiltPayload, OptimismPayloadBuilderAttributes}; +use reth_optimism_payload_builder::{ + builder::decode_eip_1559_params, OptimismBuiltPayload, OptimismPayloadBuilderAttributes, +}; /// The types used in the optimism beacon consensus engine. #[derive(Debug, Default, Clone, serde::Deserialize, serde::Serialize)] @@ -147,6 +149,139 @@ where )) } + if self.chain_spec.is_fork_active_at_timestamp( + OptimismHardfork::Holocene, + attributes.payload_attributes.timestamp, + ) { + let Some(eip_1559_params) = attributes.eip_1559_params else { + return Err(EngineObjectValidationError::InvalidParams( + "MissingEip1559ParamsInPayloadAttributes".to_string().into(), + )) + }; + let (elasticity, denominator) = decode_eip_1559_params(eip_1559_params); + if elasticity != 0 && denominator == 0 { + return Err(EngineObjectValidationError::InvalidParams( + "Eip1559ParamsDenominatorZero".to_string().into(), + )) + } + } + Ok(()) } } + +#[cfg(test)] +mod test { + + use crate::engine; + use alloy_primitives::{b64, Address, B256, B64}; + use alloy_rpc_types_engine::PayloadAttributes; + use reth_chainspec::ForkCondition; + use reth_optimism_chainspec::BASE_SEPOLIA; + + use super::*; + + fn get_chainspec(is_holocene: bool) -> Arc { + let mut hardforks = OptimismHardfork::base_sepolia(); + if is_holocene { + hardforks + .insert(OptimismHardfork::Holocene.boxed(), ForkCondition::Timestamp(1800000000)); + } + Arc::new(OpChainSpec { + inner: ChainSpec { + chain: BASE_SEPOLIA.inner.chain, + genesis: BASE_SEPOLIA.inner.genesis.clone(), + genesis_hash: BASE_SEPOLIA.inner.genesis_hash.clone(), + paris_block_and_final_difficulty: BASE_SEPOLIA + .inner + .paris_block_and_final_difficulty, + hardforks, + base_fee_params: BASE_SEPOLIA.inner.base_fee_params.clone(), + max_gas_limit: BASE_SEPOLIA.inner.max_gas_limit, + prune_delete_limit: 10000, + ..Default::default() + }, + }) + } + + const fn get_attributes(eip_1559_params: Option, timestamp: u64) -> OpPayloadAttributes { + OpPayloadAttributes { + gas_limit: Some(1000), + eip_1559_params, + transactions: None, + no_tx_pool: None, + payload_attributes: PayloadAttributes { + timestamp, + prev_randao: B256::ZERO, + suggested_fee_recipient: Address::ZERO, + withdrawals: Some(vec![]), + parent_beacon_block_root: Some(B256::ZERO), + }, + } + } + + #[test] + fn test_well_formed_attributes_pre_holocene() { + let validator = OptimismEngineValidator::new(get_chainspec(false)); + let attributes = get_attributes(None, 1799999999); + + let result = >::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes + ); + assert!(result.is_ok()); + } + + #[test] + fn test_well_formed_attributes_holocene_no_eip1559_params() { + let validator = OptimismEngineValidator::new(get_chainspec(true)); + let attributes = get_attributes(None, 1800000000); + + let result = >::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes + ); + assert!(matches!(result, Err(EngineObjectValidationError::InvalidParams(_)))); + } + + #[test] + fn test_well_formed_attributes_holocene_eip1559_params_zero_denominator() { + let validator = OptimismEngineValidator::new(get_chainspec(true)); + let attributes = get_attributes(Some(b64!("0000000000000008")), 1800000000); + + let result = >::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes + ); + assert!(matches!(result, Err(EngineObjectValidationError::InvalidParams(_)))); + } + + #[test] + fn test_well_formed_attributes_holocene_valid() { + let validator = OptimismEngineValidator::new(get_chainspec(true)); + let attributes = get_attributes(Some(b64!("0000000800000008")), 1800000000); + + let result = >::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes + ); + assert!(result.is_ok()); + } + + #[test] + fn test_well_formed_attributes_holocene_valid_all_zero() { + let validator = OptimismEngineValidator::new(get_chainspec(true)); + let attributes = get_attributes(Some(b64!("0000000000000000")), 1800000000); + + let result = >::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes + ); + assert!(result.is_ok()); + } +} diff --git a/crates/optimism/node/tests/e2e/utils.rs b/crates/optimism/node/tests/e2e/utils.rs index 48175e5b2..d4219b0fe 100644 --- a/crates/optimism/node/tests/e2e/utils.rs +++ b/crates/optimism/node/tests/e2e/utils.rs @@ -63,5 +63,6 @@ pub(crate) fn optimism_payload_attributes(timestamp: u64) -> OptimismPayloadBuil transactions: vec![], no_tx_pool: false, gas_limit: Some(30_000_000), + eip_1559_params: None, } } diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 0550adeaa..e9b7e2c76 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -1,10 +1,9 @@ //! Optimism payload builder implementation. - use std::sync::Arc; use alloy_consensus::EMPTY_OMMER_ROOT_HASH; use alloy_eips::merge::BEACON_NONCE; -use alloy_primitives::U256; +use alloy_primitives::{B64, U256}; use reth_basic_payload_builder::*; use reth_chain_state::ExecutedBlock; use reth_chainspec::ChainSpecProvider; @@ -80,7 +79,7 @@ where &self, config: &PayloadConfig, parent: &Header, - ) -> (CfgEnvWithHandlerCfg, BlockEnv) { + ) -> Result<(CfgEnvWithHandlerCfg, BlockEnv), EvmConfig::Error> { let next_attributes = NextBlockEnvAttributes { timestamp: config.attributes.timestamp(), suggested_fee_recipient: config.attributes.suggested_fee_recipient(), @@ -104,7 +103,9 @@ where &self, args: BuildArguments, ) -> Result, PayloadBuilderError> { - let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_header); + let (cfg_env, block_env) = self + .cfg_and_block_env(&args.config, &args.config.parent_header) + .map_err(PayloadBuilderError::other)?; optimism_payload(&self.evm_config, args, cfg_env, block_env, self.compute_pending_block) } @@ -133,7 +134,9 @@ where cancel: Default::default(), best_payload: None, }; - let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_header); + let (cfg_env, block_env) = self + .cfg_and_block_env(&args.config, &args.config.parent_header) + .map_err(PayloadBuilderError::other)?; optimism_payload(&self.evm_config, args, cfg_env, block_env, false)? .into_payload() .ok_or_else(|| PayloadBuilderError::MissingPayload) @@ -168,7 +171,7 @@ where let state = StateProviderDatabase::new(state_provider); let mut db = State::builder().with_database_ref(cached_reads.as_db(state)).with_bundle_update().build(); - let PayloadConfig { parent_header, attributes, extra_data } = config; + let PayloadConfig { parent_header, attributes, mut extra_data } = config; debug!(target: "payload_builder", id=%attributes.payload_attributes.payload_id(), parent_header = ?parent_header.hash(), parent_number = parent_header.number, "building new payload"); @@ -470,6 +473,19 @@ where (None, None) }; + let is_holocene = chain_spec.is_fork_active_at_timestamp( + OptimismHardfork::Holocene, + attributes.payload_attributes.timestamp, + ); + + if is_holocene { + extra_data = attributes + .get_holocene_extra_data( + chain_spec.base_fee_params_at_timestamp(attributes.payload_attributes.timestamp), + ) + .map_err(PayloadBuilderError::other)?; + } + let header = Header { parent_hash: parent_header.hash(), ommers_hash: EMPTY_OMMER_ROOT_HASH, @@ -532,3 +548,12 @@ where Ok(BuildOutcome::Better { payload, cached_reads }) } } + +/// Extracts the Holocene 1599 parameters from the encoded form: +/// +pub fn decode_eip_1559_params(eip_1559_params: B64) -> (u32, u32) { + let denominator: [u8; 4] = eip_1559_params.0[..4].try_into().expect("sufficient length"); + let elasticity: [u8; 4] = eip_1559_params.0[4..8].try_into().expect("sufficient length"); + + (u32::from_be_bytes(elasticity), u32::from_be_bytes(denominator)) +} diff --git a/crates/optimism/payload/src/error.rs b/crates/optimism/payload/src/error.rs index 2016fdc6d..ce5f584a1 100644 --- a/crates/optimism/payload/src/error.rs +++ b/crates/optimism/payload/src/error.rs @@ -21,3 +21,17 @@ pub enum OptimismPayloadBuilderError { #[error("blob transaction included in sequencer block")] BlobTransactionRejected, } + +/// Error type for EIP-1559 parameters +#[derive(Debug, thiserror::Error)] +pub enum EIP1559ParamError { + /// No EIP-1559 parameters provided + #[error("No EIP-1559 parameters provided")] + NoEIP1559Params, + /// Denominator overflow + #[error("Denominator overflow")] + DenominatorOverflow, + /// Elasticity overflow + #[error("Elasticity overflow")] + ElasticityOverflow, +} diff --git a/crates/optimism/payload/src/payload.rs b/crates/optimism/payload/src/payload.rs index 7f95d04ad..056edfe7b 100644 --- a/crates/optimism/payload/src/payload.rs +++ b/crates/optimism/payload/src/payload.rs @@ -2,8 +2,9 @@ //! Optimism builder support -use alloy_eips::{eip2718::Decodable2718, eip7685::Requests}; -use alloy_primitives::{keccak256, Address, B256, U256}; +use crate::{builder::decode_eip_1559_params, error::EIP1559ParamError}; +use alloy_eips::{eip1559::BaseFeeParams, eip2718::Decodable2718, eip7685::Requests}; +use alloy_primitives::{keccak256, Address, Bytes, B256, B64, U256}; use alloy_rlp::Encodable; use alloy_rpc_types_engine::{ExecutionPayloadEnvelopeV2, ExecutionPayloadV1, PayloadId}; /// Re-export for use in downstream arguments. @@ -23,7 +24,7 @@ use reth_rpc_types_compat::engine::payload::{ use std::sync::Arc; /// Optimism Payload Builder Attributes -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Default)] pub struct OptimismPayloadBuilderAttributes { /// Inner ethereum payload builder attributes pub payload_attributes: EthPayloadBuilderAttributes, @@ -34,6 +35,42 @@ pub struct OptimismPayloadBuilderAttributes { pub transactions: Vec>, /// The gas limit for the generated payload pub gas_limit: Option, + /// EIP-1559 parameters for the generated payload + pub eip_1559_params: Option, +} + +impl OptimismPayloadBuilderAttributes { + /// Extracts the `eip1559` parameters for the payload. + pub fn get_holocene_extra_data( + &self, + default_base_fee_params: BaseFeeParams, + ) -> Result { + let eip_1559_params = self.eip_1559_params.ok_or(EIP1559ParamError::NoEIP1559Params)?; + + let mut extra_data = [0u8; 9]; + // If eip 1559 params aren't set, use the canyon base fee param constants + // otherwise use them + if eip_1559_params.is_zero() { + // Try casting max_change_denominator to u32 + let max_change_denominator: u32 = (default_base_fee_params.max_change_denominator) + .try_into() + .map_err(|_| EIP1559ParamError::DenominatorOverflow)?; + + // Try casting elasticity_multiplier to u32 + let elasticity_multiplier: u32 = (default_base_fee_params.elasticity_multiplier) + .try_into() + .map_err(|_| EIP1559ParamError::ElasticityOverflow)?; + + // Copy the values safely + extra_data[1..5].copy_from_slice(&max_change_denominator.to_be_bytes()); + extra_data[5..9].copy_from_slice(&elasticity_multiplier.to_be_bytes()); + } else { + let (elasticity, denominator) = decode_eip_1559_params(eip_1559_params); + extra_data[1..5].copy_from_slice(&denominator.to_be_bytes()); + extra_data[5..9].copy_from_slice(&elasticity.to_be_bytes()); + } + Ok(Bytes::copy_from_slice(&extra_data)) + } } impl PayloadBuilderAttributes for OptimismPayloadBuilderAttributes { @@ -82,6 +119,7 @@ impl PayloadBuilderAttributes for OptimismPayloadBuilderAttributes { no_tx_pool: attributes.no_tx_pool.unwrap_or_default(), transactions, gas_limit: attributes.gas_limit, + eip_1559_params: attributes.eip_1559_params, }) } @@ -370,4 +408,24 @@ mod tests { ) ); } + + #[test] + fn test_get_extra_data_post_holocene() { + let attributes = OptimismPayloadBuilderAttributes { + eip_1559_params: Some(B64::from_str("0x0000000800000008").unwrap()), + ..Default::default() + }; + let extra_data = attributes.get_holocene_extra_data(BaseFeeParams::new(80, 60)); + assert_eq!(extra_data.unwrap(), Bytes::copy_from_slice(&[0, 0, 0, 0, 8, 0, 0, 0, 8])); + } + + #[test] + fn test_get_extra_data_post_holocene_default() { + let attributes = OptimismPayloadBuilderAttributes { + eip_1559_params: Some(B64::ZERO), + ..Default::default() + }; + let extra_data = attributes.get_holocene_extra_data(BaseFeeParams::new(80, 60)); + assert_eq!(extra_data.unwrap(), Bytes::copy_from_slice(&[0, 0, 0, 0, 80, 0, 0, 0, 60])); + } } diff --git a/examples/custom-evm/src/main.rs b/examples/custom-evm/src/main.rs index 55063fc9b..16aad63c0 100644 --- a/examples/custom-evm/src/main.rs +++ b/examples/custom-evm/src/main.rs @@ -38,7 +38,7 @@ use reth_primitives::{ Header, TransactionSigned, }; use reth_tracing::{RethTracer, Tracer}; -use std::sync::Arc; +use std::{convert::Infallible, sync::Arc}; /// Custom EVM configuration #[derive(Debug, Clone)] @@ -87,6 +87,7 @@ impl MyEvmConfig { impl ConfigureEvmEnv for MyEvmConfig { type Header = Header; + type Error = Infallible; fn fill_tx_env(&self, tx_env: &mut TxEnv, transaction: &TransactionSigned, sender: Address) { self.inner.fill_tx_env(tx_env, transaction, sender); @@ -115,7 +116,7 @@ impl ConfigureEvmEnv for MyEvmConfig { &self, parent: &Self::Header, attributes: NextBlockEnvAttributes, - ) -> (CfgEnvWithHandlerCfg, BlockEnv) { + ) -> Result<(CfgEnvWithHandlerCfg, BlockEnv), Self::Error> { self.inner.next_cfg_and_block_env(parent, attributes) } } diff --git a/examples/stateful-precompile/src/main.rs b/examples/stateful-precompile/src/main.rs index b0165e4de..371fbf4f7 100644 --- a/examples/stateful-precompile/src/main.rs +++ b/examples/stateful-precompile/src/main.rs @@ -30,7 +30,7 @@ use reth_primitives::{ }; use reth_tracing::{RethTracer, Tracer}; use schnellru::{ByLength, LruMap}; -use std::{collections::HashMap, sync::Arc}; +use std::{collections::HashMap, convert::Infallible, sync::Arc}; /// Type alias for the LRU cache used within the [`PrecompileCache`]. type PrecompileLRUCache = LruMap<(Bytes, u64), PrecompileResult>; @@ -147,6 +147,7 @@ impl StatefulPrecompileMut for WrappedPrecompile { impl ConfigureEvmEnv for MyEvmConfig { type Header = Header; + type Error = Infallible; fn fill_tx_env(&self, tx_env: &mut TxEnv, transaction: &TransactionSigned, sender: Address) { self.inner.fill_tx_env(tx_env, transaction, sender) @@ -175,7 +176,7 @@ impl ConfigureEvmEnv for MyEvmConfig { &self, parent: &Self::Header, attributes: NextBlockEnvAttributes, - ) -> (CfgEnvWithHandlerCfg, BlockEnv) { + ) -> Result<(CfgEnvWithHandlerCfg, BlockEnv), Self::Error> { self.inner.next_cfg_and_block_env(parent, attributes) } } From ff9a42ae8fbf441f4fb9ca8dcea84c765b284721 Mon Sep 17 00:00:00 2001 From: Abhishek kochar Date: Wed, 30 Oct 2024 22:13:03 +0800 Subject: [PATCH 350/425] feat(eth69): support for ETH69 (#12158) Signed-off-by: Abhishekkochar Co-authored-by: Matthias Seitz --- crates/net/eth-wire-types/src/message.rs | 10 +++++++++- crates/net/eth-wire-types/src/version.rs | 20 ++++++++++++++++---- crates/net/network/src/transactions/mod.rs | 2 +- 3 files changed, 26 insertions(+), 6 deletions(-) diff --git a/crates/net/eth-wire-types/src/message.rs b/crates/net/eth-wire-types/src/message.rs index 4afcb34e1..8546bfe14 100644 --- a/crates/net/eth-wire-types/src/message.rs +++ b/crates/net/eth-wire-types/src/message.rs @@ -50,9 +50,17 @@ impl ProtocolMessage { let message = match message_type { EthMessageID::Status => EthMessage::Status(Status::decode(buf)?), EthMessageID::NewBlockHashes => { + if version.is_eth69() { + return Err(MessageError::Invalid(version, EthMessageID::NewBlockHashes)); + } EthMessage::NewBlockHashes(NewBlockHashes::decode(buf)?) } - EthMessageID::NewBlock => EthMessage::NewBlock(Box::new(NewBlock::decode(buf)?)), + EthMessageID::NewBlock => { + if version.is_eth69() { + return Err(MessageError::Invalid(version, EthMessageID::NewBlock)); + } + EthMessage::NewBlock(Box::new(NewBlock::decode(buf)?)) + } EthMessageID::Transactions => EthMessage::Transactions(Transactions::decode(buf)?), EthMessageID::NewPooledTransactionHashes => { if version >= EthVersion::Eth68 { diff --git a/crates/net/eth-wire-types/src/version.rs b/crates/net/eth-wire-types/src/version.rs index 4fd3e792d..5a2e0ff96 100644 --- a/crates/net/eth-wire-types/src/version.rs +++ b/crates/net/eth-wire-types/src/version.rs @@ -18,12 +18,12 @@ pub struct ParseVersionError(String); pub enum EthVersion { /// The `eth` protocol version 66. Eth66 = 66, - /// The `eth` protocol version 67. Eth67 = 67, - /// The `eth` protocol version 68. Eth68 = 68, + /// The `eth` protocol version 69. + Eth69 = 69, } impl EthVersion { @@ -38,6 +38,8 @@ impl EthVersion { // eth/67,68 are eth/66 minus GetNodeData and NodeData messages 13 } + // eth69 is both eth67 and eth68 minus NewBlockHashes and NewBlock + Self::Eth69 => 11, } } @@ -55,6 +57,11 @@ impl EthVersion { pub const fn is_eth68(&self) -> bool { matches!(self, Self::Eth68) } + + /// Returns true if the version is eth/69 + pub const fn is_eth69(&self) -> bool { + matches!(self, Self::Eth69) + } } /// Allow for converting from a `&str` to an `EthVersion`. @@ -75,6 +82,7 @@ impl TryFrom<&str> for EthVersion { "66" => Ok(Self::Eth66), "67" => Ok(Self::Eth67), "68" => Ok(Self::Eth68), + "69" => Ok(Self::Eth69), _ => Err(ParseVersionError(s.to_string())), } } @@ -98,6 +106,7 @@ impl TryFrom for EthVersion { 66 => Ok(Self::Eth66), 67 => Ok(Self::Eth67), 68 => Ok(Self::Eth68), + 69 => Ok(Self::Eth69), _ => Err(ParseVersionError(u.to_string())), } } @@ -126,6 +135,7 @@ impl From for &'static str { EthVersion::Eth66 => "66", EthVersion::Eth67 => "67", EthVersion::Eth68 => "68", + EthVersion::Eth69 => "69", } } } @@ -179,7 +189,8 @@ mod tests { assert_eq!(EthVersion::Eth66, EthVersion::try_from("66").unwrap()); assert_eq!(EthVersion::Eth67, EthVersion::try_from("67").unwrap()); assert_eq!(EthVersion::Eth68, EthVersion::try_from("68").unwrap()); - assert_eq!(Err(ParseVersionError("69".to_string())), EthVersion::try_from("69")); + assert_eq!(EthVersion::Eth69, EthVersion::try_from("69").unwrap()); + assert_eq!(Err(ParseVersionError("70".to_string())), EthVersion::try_from("70")); } #[test] @@ -187,6 +198,7 @@ mod tests { assert_eq!(EthVersion::Eth66, "66".parse().unwrap()); assert_eq!(EthVersion::Eth67, "67".parse().unwrap()); assert_eq!(EthVersion::Eth68, "68".parse().unwrap()); - assert_eq!(Err(ParseVersionError("69".to_string())), "69".parse::()); + assert_eq!(EthVersion::Eth69, "69".parse().unwrap()); + assert_eq!(Err(ParseVersionError("70".to_string())), "70".parse::()); } } diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index b6e589c6f..4e23c8527 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -1668,7 +1668,7 @@ impl PooledTransactionsHashesBuilder { fn new(version: EthVersion) -> Self { match version { EthVersion::Eth66 | EthVersion::Eth67 => Self::Eth66(Default::default()), - EthVersion::Eth68 => Self::Eth68(Default::default()), + EthVersion::Eth68 | EthVersion::Eth69 => Self::Eth68(Default::default()), } } From bb8da983b0260c8aa2912b20516a8518f2f8cd20 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 30 Oct 2024 16:39:12 +0100 Subject: [PATCH 351/425] feat: add is_ethereum trait fn (#12197) --- crates/chainspec/src/api.rs | 5 +++++ crates/net/discv5/src/network_stack_id.rs | 5 ++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/crates/chainspec/src/api.rs b/crates/chainspec/src/api.rs index 3751789ca..36640f34b 100644 --- a/crates/chainspec/src/api.rs +++ b/crates/chainspec/src/api.rs @@ -57,6 +57,11 @@ pub trait EthChainSpec: Send + Sync + Unpin + Debug { fn is_optimism(&self) -> bool { self.chain().is_optimism() } + + /// Returns `true` if this chain contains Ethereum configuration. + fn is_ethereum(&self) -> bool { + self.chain().is_ethereum() + } } impl EthChainSpec for ChainSpec { diff --git a/crates/net/discv5/src/network_stack_id.rs b/crates/net/discv5/src/network_stack_id.rs index f707c7de7..a7b6944f3 100644 --- a/crates/net/discv5/src/network_stack_id.rs +++ b/crates/net/discv5/src/network_stack_id.rs @@ -20,12 +20,11 @@ impl NetworkStackId { /// ENR fork ID kv-pair key, for an Optimism CL node. pub const OPSTACK: &'static [u8] = b"opstack"; - #[allow(clippy::missing_const_for_fn)] /// Returns the [`NetworkStackId`] that matches the given chain spec. pub fn id(chain: impl EthChainSpec) -> Option<&'static [u8]> { - if chain.chain().is_optimism() { + if chain.is_optimism() { return Some(Self::OPEL) - } else if chain.chain().is_ethereum() { + } else if chain.is_ethereum() { return Some(Self::ETH) } From 755fac08ddeec0f4260fa5565bac72d6cb392348 Mon Sep 17 00:00:00 2001 From: Kaushik Donthi Date: Wed, 30 Oct 2024 09:06:37 -0700 Subject: [PATCH 352/425] Wrap sidecar in arcs (#11554) Co-authored-by: Matthias Seitz --- crates/ethereum/payload/src/lib.rs | 2 +- crates/transaction-pool/src/blobstore/disk.rs | 44 ++++++++++++------- crates/transaction-pool/src/blobstore/mem.rs | 21 ++++----- crates/transaction-pool/src/blobstore/mod.rs | 12 +++-- crates/transaction-pool/src/blobstore/noop.rs | 10 +++-- crates/transaction-pool/src/lib.rs | 9 ++-- crates/transaction-pool/src/maintain.rs | 2 + crates/transaction-pool/src/noop.rs | 9 ++-- crates/transaction-pool/src/pool/mod.rs | 4 +- crates/transaction-pool/src/traits.rs | 9 ++-- .../src/mined_sidecar.rs | 8 ++-- 11 files changed, 84 insertions(+), 46 deletions(-) diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 27d9d98bc..8e92d0aa8 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -456,7 +456,7 @@ where EthBuiltPayload::new(attributes.id, sealed_block, total_fees, Some(executed), requests); // extend the payload with the blob sidecars from the executed txs - payload.extend_sidecars(blob_sidecars); + payload.extend_sidecars(blob_sidecars.into_iter().map(Arc::unwrap_or_clone)); Ok(BuildOutcome::Better { payload, cached_reads }) } diff --git a/crates/transaction-pool/src/blobstore/disk.rs b/crates/transaction-pool/src/blobstore/disk.rs index 787d4985f..987264853 100644 --- a/crates/transaction-pool/src/blobstore/disk.rs +++ b/crates/transaction-pool/src/blobstore/disk.rs @@ -103,7 +103,7 @@ impl BlobStore for DiskFileBlobStore { stat } - fn get(&self, tx: B256) -> Result, BlobStoreError> { + fn get(&self, tx: B256) -> Result>, BlobStoreError> { self.inner.get_one(tx) } @@ -114,14 +114,17 @@ impl BlobStore for DiskFileBlobStore { fn get_all( &self, txs: Vec, - ) -> Result, BlobStoreError> { + ) -> Result)>, BlobStoreError> { if txs.is_empty() { return Ok(Vec::new()) } self.inner.get_all(txs) } - fn get_exact(&self, txs: Vec) -> Result, BlobStoreError> { + fn get_exact( + &self, + txs: Vec, + ) -> Result>, BlobStoreError> { if txs.is_empty() { return Ok(Vec::new()) } @@ -164,7 +167,7 @@ impl BlobStore for DiskFileBlobStore { struct DiskFileBlobStoreInner { blob_dir: PathBuf, - blob_cache: Mutex>, + blob_cache: Mutex, ByLength>>, size_tracker: BlobStoreSize, file_lock: RwLock<()>, txs_to_delete: RwLock>, @@ -205,7 +208,7 @@ impl DiskFileBlobStoreInner { fn insert_one(&self, tx: B256, data: BlobTransactionSidecar) -> Result<(), BlobStoreError> { let mut buf = Vec::with_capacity(data.rlp_encoded_fields_length()); data.rlp_encode_fields(&mut buf); - self.blob_cache.lock().insert(tx, data); + self.blob_cache.lock().insert(tx, Arc::new(data)); let size = self.write_one_encoded(tx, &buf)?; self.size_tracker.add_size(size); @@ -227,7 +230,7 @@ impl DiskFileBlobStoreInner { { let mut cache = self.blob_cache.lock(); for (tx, data) in txs { - cache.insert(tx, data); + cache.insert(tx, Arc::new(data)); } } let mut add = 0; @@ -278,15 +281,19 @@ impl DiskFileBlobStoreInner { } /// Retrieves the blob for the given transaction hash from the blob cache or disk. - fn get_one(&self, tx: B256) -> Result, BlobStoreError> { + fn get_one(&self, tx: B256) -> Result>, BlobStoreError> { if let Some(blob) = self.blob_cache.lock().get(&tx) { return Ok(Some(blob.clone())) } let blob = self.read_one(tx)?; + if let Some(blob) = &blob { - self.blob_cache.lock().insert(tx, blob.clone()); + let blob_arc = Arc::new(blob.clone()); + self.blob_cache.lock().insert(tx, blob_arc.clone()); + return Ok(Some(blob_arc)) } - Ok(blob) + + Ok(None) } /// Returns the path to the blob file for the given transaction hash. @@ -374,7 +381,7 @@ impl DiskFileBlobStoreInner { fn get_all( &self, txs: Vec, - ) -> Result, BlobStoreError> { + ) -> Result)>, BlobStoreError> { let mut res = Vec::with_capacity(txs.len()); let mut cache_miss = Vec::new(); { @@ -396,8 +403,9 @@ impl DiskFileBlobStoreInner { } let mut cache = self.blob_cache.lock(); for (tx, data) in from_disk { - cache.insert(tx, data.clone()); - res.push((tx, data)); + let arc = Arc::new(data.clone()); + cache.insert(tx, arc.clone()); + res.push((tx, arc.clone())); } Ok(res) @@ -407,7 +415,10 @@ impl DiskFileBlobStoreInner { /// /// Returns an error if there are any missing blobs. #[inline] - fn get_exact(&self, txs: Vec) -> Result, BlobStoreError> { + fn get_exact( + &self, + txs: Vec, + ) -> Result>, BlobStoreError> { txs.into_iter() .map(|tx| self.get_one(tx)?.ok_or(BlobStoreError::MissingSidecar(tx))) .collect() @@ -514,14 +525,17 @@ mod tests { let blobs = rng_blobs(10); let all_hashes = blobs.iter().map(|(tx, _)| *tx).collect::>(); store.insert_all(blobs.clone()).unwrap(); + // all cached for (tx, blob) in &blobs { assert!(store.is_cached(tx)); - assert_eq!(store.get(*tx).unwrap().unwrap(), *blob); + let b = store.get(*tx).unwrap().map(Arc::unwrap_or_clone).unwrap(); + assert_eq!(b, *blob); } + let all = store.get_all(all_hashes.clone()).unwrap(); for (tx, blob) in all { - assert!(blobs.contains(&(tx, blob)), "missing blob {tx:?}"); + assert!(blobs.contains(&(tx, Arc::unwrap_or_clone(blob))), "missing blob {tx:?}"); } assert!(store.contains(all_hashes[0]).unwrap()); diff --git a/crates/transaction-pool/src/blobstore/mem.rs b/crates/transaction-pool/src/blobstore/mem.rs index c98a01b88..cea1837bd 100644 --- a/crates/transaction-pool/src/blobstore/mem.rs +++ b/crates/transaction-pool/src/blobstore/mem.rs @@ -15,7 +15,7 @@ pub struct InMemoryBlobStore { #[derive(Debug, Default)] struct InMemoryBlobStoreInner { /// Storage for all blob data. - store: RwLock>, + store: RwLock>>, size_tracker: BlobStoreSize, } @@ -75,7 +75,7 @@ impl BlobStore for InMemoryBlobStore { } // Retrieves the decoded blob data for the given transaction hash. - fn get(&self, tx: B256) -> Result, BlobStoreError> { + fn get(&self, tx: B256) -> Result>, BlobStoreError> { Ok(self.inner.store.read().get(&tx).cloned()) } @@ -86,16 +86,17 @@ impl BlobStore for InMemoryBlobStore { fn get_all( &self, txs: Vec, - ) -> Result, BlobStoreError> { + ) -> Result)>, BlobStoreError> { let store = self.inner.store.read(); Ok(txs.into_iter().filter_map(|tx| store.get(&tx).map(|item| (tx, item.clone()))).collect()) } - fn get_exact(&self, txs: Vec) -> Result, BlobStoreError> { + fn get_exact( + &self, + txs: Vec, + ) -> Result>, BlobStoreError> { let store = self.inner.store.read(); - txs.into_iter() - .map(|tx| store.get(&tx).cloned().ok_or_else(|| BlobStoreError::MissingSidecar(tx))) - .collect() + Ok(txs.into_iter().filter_map(|tx| store.get(&tx).cloned()).collect()) } fn get_by_versioned_hashes( @@ -134,7 +135,7 @@ impl BlobStore for InMemoryBlobStore { /// Removes the given blob from the store and returns the size of the blob that was removed. #[inline] -fn remove_size(store: &mut HashMap, tx: &B256) -> usize { +fn remove_size(store: &mut HashMap>, tx: &B256) -> usize { store.remove(tx).map(|rem| rem.size()).unwrap_or_default() } @@ -143,11 +144,11 @@ fn remove_size(store: &mut HashMap, tx: &B256) -> /// We don't need to handle the size updates for replacements because transactions are unique. #[inline] fn insert_size( - store: &mut HashMap, + store: &mut HashMap>, tx: B256, blob: BlobTransactionSidecar, ) -> usize { let add = blob.size(); - store.insert(tx, blob); + store.insert(tx, Arc::new(blob)); add } diff --git a/crates/transaction-pool/src/blobstore/mod.rs b/crates/transaction-pool/src/blobstore/mod.rs index ee98e3eed..f8d37bfcc 100644 --- a/crates/transaction-pool/src/blobstore/mod.rs +++ b/crates/transaction-pool/src/blobstore/mod.rs @@ -8,7 +8,10 @@ pub use noop::NoopBlobStore; use reth_primitives::BlobTransactionSidecar; use std::{ fmt, - sync::atomic::{AtomicUsize, Ordering}, + sync::{ + atomic::{AtomicUsize, Ordering}, + Arc, + }, }; pub use tracker::{BlobStoreCanonTracker, BlobStoreUpdates}; @@ -44,7 +47,7 @@ pub trait BlobStore: fmt::Debug + Send + Sync + 'static { fn cleanup(&self) -> BlobStoreCleanupStat; /// Retrieves the decoded blob data for the given transaction hash. - fn get(&self, tx: B256) -> Result, BlobStoreError>; + fn get(&self, tx: B256) -> Result>, BlobStoreError>; /// Checks if the given transaction hash is in the blob store. fn contains(&self, tx: B256) -> Result; @@ -58,13 +61,14 @@ pub trait BlobStore: fmt::Debug + Send + Sync + 'static { fn get_all( &self, txs: Vec, - ) -> Result, BlobStoreError>; + ) -> Result)>, BlobStoreError>; /// Returns the exact [`BlobTransactionSidecar`] for the given transaction hashes in the exact /// order they were requested. /// /// Returns an error if any of the blobs are not found in the blob store. - fn get_exact(&self, txs: Vec) -> Result, BlobStoreError>; + fn get_exact(&self, txs: Vec) + -> Result>, BlobStoreError>; /// Return the [`BlobTransactionSidecar`]s for a list of blob versioned hashes. fn get_by_versioned_hashes( diff --git a/crates/transaction-pool/src/blobstore/noop.rs b/crates/transaction-pool/src/blobstore/noop.rs index 0e99858bd..0f2935735 100644 --- a/crates/transaction-pool/src/blobstore/noop.rs +++ b/crates/transaction-pool/src/blobstore/noop.rs @@ -1,6 +1,7 @@ use crate::blobstore::{BlobStore, BlobStoreCleanupStat, BlobStoreError, BlobTransactionSidecar}; use alloy_eips::eip4844::BlobAndProofV1; use alloy_primitives::B256; +use std::sync::Arc; /// A blobstore implementation that does nothing #[derive(Clone, Copy, Debug, PartialOrd, PartialEq, Eq, Default)] @@ -28,7 +29,7 @@ impl BlobStore for NoopBlobStore { BlobStoreCleanupStat::default() } - fn get(&self, _tx: B256) -> Result, BlobStoreError> { + fn get(&self, _tx: B256) -> Result>, BlobStoreError> { Ok(None) } @@ -39,11 +40,14 @@ impl BlobStore for NoopBlobStore { fn get_all( &self, _txs: Vec, - ) -> Result, BlobStoreError> { + ) -> Result)>, BlobStoreError> { Ok(vec![]) } - fn get_exact(&self, txs: Vec) -> Result, BlobStoreError> { + fn get_exact( + &self, + txs: Vec, + ) -> Result>, BlobStoreError> { if txs.is_empty() { return Ok(vec![]) } diff --git a/crates/transaction-pool/src/lib.rs b/crates/transaction-pool/src/lib.rs index 609ab987f..3a5e547ba 100644 --- a/crates/transaction-pool/src/lib.rs +++ b/crates/transaction-pool/src/lib.rs @@ -561,21 +561,24 @@ where self.pool.unique_senders() } - fn get_blob(&self, tx_hash: TxHash) -> Result, BlobStoreError> { + fn get_blob( + &self, + tx_hash: TxHash, + ) -> Result>, BlobStoreError> { self.pool.blob_store().get(tx_hash) } fn get_all_blobs( &self, tx_hashes: Vec, - ) -> Result, BlobStoreError> { + ) -> Result)>, BlobStoreError> { self.pool.blob_store().get_all(tx_hashes) } fn get_all_blobs_exact( &self, tx_hashes: Vec, - ) -> Result, BlobStoreError> { + ) -> Result>, BlobStoreError> { self.pool.blob_store().get_exact(tx_hashes) } diff --git a/crates/transaction-pool/src/maintain.rs b/crates/transaction-pool/src/maintain.rs index 09c042ae6..b62a6c18c 100644 --- a/crates/transaction-pool/src/maintain.rs +++ b/crates/transaction-pool/src/maintain.rs @@ -27,6 +27,7 @@ use std::{ collections::HashSet, hash::{Hash, Hasher}, path::{Path, PathBuf}, + sync::Arc, }; use tokio::sync::oneshot; use tracing::{debug, error, info, trace, warn}; @@ -328,6 +329,7 @@ pub async fn maintain_transaction_pool( pool.get_blob(tx.hash) .ok() .flatten() + .map(Arc::unwrap_or_clone) .and_then(|sidecar| { PooledTransactionsElementEcRecovered::try_from_blob_transaction( tx, sidecar, diff --git a/crates/transaction-pool/src/noop.rs b/crates/transaction-pool/src/noop.rs index 817ea7bad..4f4e5a381 100644 --- a/crates/transaction-pool/src/noop.rs +++ b/crates/transaction-pool/src/noop.rs @@ -275,21 +275,24 @@ impl TransactionPool for NoopTransactionPool { Default::default() } - fn get_blob(&self, _tx_hash: TxHash) -> Result, BlobStoreError> { + fn get_blob( + &self, + _tx_hash: TxHash, + ) -> Result>, BlobStoreError> { Ok(None) } fn get_all_blobs( &self, _tx_hashes: Vec, - ) -> Result, BlobStoreError> { + ) -> Result)>, BlobStoreError> { Ok(vec![]) } fn get_all_blobs_exact( &self, tx_hashes: Vec, - ) -> Result, BlobStoreError> { + ) -> Result>, BlobStoreError> { if tx_hashes.is_empty() { return Ok(vec![]) } diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index fef0fd0ee..2e7340954 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -307,7 +307,9 @@ where /// Caution: this assumes the given transaction is eip-4844 fn get_blob_transaction(&self, transaction: TransactionSigned) -> Option { if let Ok(Some(sidecar)) = self.blob_store.get(transaction.hash()) { - if let Ok(blob) = BlobTransaction::try_from_signed(transaction, sidecar) { + if let Ok(blob) = + BlobTransaction::try_from_signed(transaction, Arc::unwrap_or_clone(sidecar)) + { return Some(blob) } } diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index fbbddb98f..1b3004154 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -443,7 +443,10 @@ pub trait TransactionPool: Send + Sync + Clone { /// Returns the [BlobTransactionSidecar] for the given transaction hash if it exists in the blob /// store. - fn get_blob(&self, tx_hash: TxHash) -> Result, BlobStoreError>; + fn get_blob( + &self, + tx_hash: TxHash, + ) -> Result>, BlobStoreError>; /// Returns all [BlobTransactionSidecar] for the given transaction hashes if they exists in the /// blob store. @@ -453,7 +456,7 @@ pub trait TransactionPool: Send + Sync + Clone { fn get_all_blobs( &self, tx_hashes: Vec, - ) -> Result, BlobStoreError>; + ) -> Result)>, BlobStoreError>; /// Returns the exact [BlobTransactionSidecar] for the given transaction hashes in the order /// they were requested. @@ -462,7 +465,7 @@ pub trait TransactionPool: Send + Sync + Clone { fn get_all_blobs_exact( &self, tx_hashes: Vec, - ) -> Result, BlobStoreError>; + ) -> Result>, BlobStoreError>; /// Return the [`BlobTransactionSidecar`]s for a list of blob versioned hashes. fn get_blobs_for_versioned_hashes( diff --git a/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs b/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs index 1c53e4f41..2436ee021 100644 --- a/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs +++ b/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs @@ -13,6 +13,7 @@ use serde::{Deserialize, Serialize}; use std::{ collections::VecDeque, pin::Pin, + sync::Arc, task::{Context, Poll}, }; use thiserror::Error; @@ -110,9 +111,10 @@ where match self.pool.get_all_blobs_exact(txs.iter().map(|(tx, _)| tx.hash()).collect()) { Ok(blobs) => { actions_to_queue.reserve_exact(txs.len()); - for ((tx, _), sidecar) in txs.iter().zip(blobs.iter()) { - let transaction = BlobTransaction::try_from_signed(tx.clone(), sidecar.clone()) - .expect("should not fail to convert blob tx if it is already eip4844"); + for ((tx, _), sidecar) in txs.iter().zip(blobs.into_iter()) { + let transaction = + BlobTransaction::try_from_signed(tx.clone(), Arc::unwrap_or_clone(sidecar)) + .expect("should not fail to convert blob tx if it is already eip4844"); let block_metadata = BlockMetadata { block_hash: block.hash(), From b3e8327065ccb2570913ec9f99aa142f7bb8a962 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 30 Oct 2024 18:56:36 +0100 Subject: [PATCH 353/425] chore: rename v1 type (#12205) --- crates/engine/primitives/src/lib.rs | 18 ++++++++++++------ crates/ethereum/engine-primitives/src/lib.rs | 2 +- crates/optimism/node/src/engine.rs | 2 +- crates/rpc/rpc-api/src/engine.rs | 5 ++++- crates/rpc/rpc-engine-api/src/engine_api.rs | 4 ++-- examples/custom-engine-types/src/main.rs | 2 +- 6 files changed, 21 insertions(+), 12 deletions(-) diff --git a/crates/engine/primitives/src/lib.rs b/crates/engine/primitives/src/lib.rs index fab96b0d1..949ebf015 100644 --- a/crates/engine/primitives/src/lib.rs +++ b/crates/engine/primitives/src/lib.rs @@ -23,7 +23,7 @@ use serde::{de::DeserializeOwned, ser::Serialize}; /// payload job. Hence this trait is also [`PayloadTypes`]. pub trait EngineTypes: PayloadTypes< - BuiltPayload: TryInto + BuiltPayload: TryInto + TryInto + TryInto + TryInto, @@ -31,9 +31,15 @@ pub trait EngineTypes: + Serialize + 'static { - /// Execution Payload V1 type. - type ExecutionPayloadV1: DeserializeOwned + Serialize + Clone + Unpin + Send + Sync + 'static; - /// Execution Payload V2 type. + /// Execution Payload V1 envelope type. + type ExecutionPayloadEnvelopeV1: DeserializeOwned + + Serialize + + Clone + + Unpin + + Send + + Sync + + 'static; + /// Execution Payload V2 envelope type. type ExecutionPayloadEnvelopeV2: DeserializeOwned + Serialize + Clone @@ -41,7 +47,7 @@ pub trait EngineTypes: + Send + Sync + 'static; - /// Execution Payload V3 type. + /// Execution Payload V3 envelope type. type ExecutionPayloadEnvelopeV3: DeserializeOwned + Serialize + Clone @@ -49,7 +55,7 @@ pub trait EngineTypes: + Send + Sync + 'static; - /// Execution Payload V4 type. + /// Execution Payload V4 envelope type. type ExecutionPayloadEnvelopeV4: DeserializeOwned + Serialize + Clone diff --git a/crates/ethereum/engine-primitives/src/lib.rs b/crates/ethereum/engine-primitives/src/lib.rs index 20a558836..5addf2a18 100644 --- a/crates/ethereum/engine-primitives/src/lib.rs +++ b/crates/ethereum/engine-primitives/src/lib.rs @@ -43,7 +43,7 @@ where + TryInto + TryInto, { - type ExecutionPayloadV1 = ExecutionPayloadV1; + type ExecutionPayloadEnvelopeV1 = ExecutionPayloadV1; type ExecutionPayloadEnvelopeV2 = ExecutionPayloadEnvelopeV2; type ExecutionPayloadEnvelopeV3 = ExecutionPayloadEnvelopeV3; type ExecutionPayloadEnvelopeV4 = ExecutionPayloadEnvelopeV4; diff --git a/crates/optimism/node/src/engine.rs b/crates/optimism/node/src/engine.rs index 966d87279..7e19b2f93 100644 --- a/crates/optimism/node/src/engine.rs +++ b/crates/optimism/node/src/engine.rs @@ -39,7 +39,7 @@ where + TryInto + TryInto, { - type ExecutionPayloadV1 = ExecutionPayloadV1; + type ExecutionPayloadEnvelopeV1 = ExecutionPayloadV1; type ExecutionPayloadEnvelopeV2 = ExecutionPayloadEnvelopeV2; type ExecutionPayloadEnvelopeV3 = OpExecutionPayloadEnvelopeV3; type ExecutionPayloadEnvelopeV4 = OpExecutionPayloadEnvelopeV4; diff --git a/crates/rpc/rpc-api/src/engine.rs b/crates/rpc/rpc-api/src/engine.rs index ddf6d8461..d92173112 100644 --- a/crates/rpc/rpc-api/src/engine.rs +++ b/crates/rpc/rpc-api/src/engine.rs @@ -110,7 +110,10 @@ pub trait EngineApi { /// Note: /// > Provider software MAY stop the corresponding build process after serving this call. #[method(name = "getPayloadV1")] - async fn get_payload_v1(&self, payload_id: PayloadId) -> RpcResult; + async fn get_payload_v1( + &self, + payload_id: PayloadId, + ) -> RpcResult; /// See also /// diff --git a/crates/rpc/rpc-engine-api/src/engine_api.rs b/crates/rpc/rpc-engine-api/src/engine_api.rs index cca9f5d6b..383da2d21 100644 --- a/crates/rpc/rpc-engine-api/src/engine_api.rs +++ b/crates/rpc/rpc-engine-api/src/engine_api.rs @@ -281,7 +281,7 @@ where pub async fn get_payload_v1( &self, payload_id: PayloadId, - ) -> EngineApiResult { + ) -> EngineApiResult { self.inner .payload_store .resolve(payload_id) @@ -775,7 +775,7 @@ where async fn get_payload_v1( &self, payload_id: PayloadId, - ) -> RpcResult { + ) -> RpcResult { trace!(target: "rpc::engine", "Serving engine_getPayloadV1"); let start = Instant::now(); let res = Self::get_payload_v1(self, payload_id).await; diff --git a/examples/custom-engine-types/src/main.rs b/examples/custom-engine-types/src/main.rs index a48c44b20..896a4b55f 100644 --- a/examples/custom-engine-types/src/main.rs +++ b/examples/custom-engine-types/src/main.rs @@ -161,7 +161,7 @@ impl PayloadTypes for CustomEngineTypes { } impl EngineTypes for CustomEngineTypes { - type ExecutionPayloadV1 = ExecutionPayloadV1; + type ExecutionPayloadEnvelopeV1 = ExecutionPayloadV1; type ExecutionPayloadEnvelopeV2 = ExecutionPayloadEnvelopeV2; type ExecutionPayloadEnvelopeV3 = ExecutionPayloadEnvelopeV3; type ExecutionPayloadEnvelopeV4 = ExecutionPayloadEnvelopeV4; From 0c39704950e370c3c97acd4253f52eca5a57a94c Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 30 Oct 2024 19:04:44 +0100 Subject: [PATCH 354/425] feat: add missing is active at timestamp fns (#12206) --- crates/optimism/hardforks/src/lib.rs | 21 +++++++++++++++++++-- crates/optimism/node/src/engine.rs | 8 +++----- crates/optimism/payload/src/builder.rs | 19 ++++++------------- 3 files changed, 28 insertions(+), 20 deletions(-) diff --git a/crates/optimism/hardforks/src/lib.rs b/crates/optimism/hardforks/src/lib.rs index 91c11d3fd..bac0d0e04 100644 --- a/crates/optimism/hardforks/src/lib.rs +++ b/crates/optimism/hardforks/src/lib.rs @@ -26,18 +26,35 @@ pub trait OptimismHardforks: EthereumHardforks { self.fork(OptimismHardfork::Bedrock).active_at_block(block_number) } + /// Returns `true` if [`Canyon`](OptimismHardfork::Canyon) is active at given block timestamp. + fn is_canyon_active_at_timestamp(&self, timestamp: u64) -> bool { + self.fork(OptimismHardfork::Canyon).active_at_timestamp(timestamp) + } + /// Returns `true` if [`Ecotone`](OptimismHardfork::Ecotone) is active at given block timestamp. fn is_ecotone_active_at_timestamp(&self, timestamp: u64) -> bool { self.fork(OptimismHardfork::Ecotone).active_at_timestamp(timestamp) } - /// Returns `true` if [`Ecotone`](OptimismHardfork::Ecotone) is active at given block timestamp. + /// Returns `true` if [`Fjord`](OptimismHardfork::Fjord) is active at given block timestamp. fn is_fjord_active_at_timestamp(&self, timestamp: u64) -> bool { - self.fork(OptimismHardfork::Ecotone).active_at_timestamp(timestamp) + self.fork(OptimismHardfork::Fjord).active_at_timestamp(timestamp) } /// Returns `true` if [`Granite`](OptimismHardfork::Granite) is active at given block timestamp. fn is_granite_active_at_timestamp(&self, timestamp: u64) -> bool { self.fork(OptimismHardfork::Granite).active_at_timestamp(timestamp) } + + /// Returns `true` if [`Holocene`](OptimismHardfork::Holocene) is active at given block + /// timestamp. + fn is_holocene_active_at_timestamp(&self, timestamp: u64) -> bool { + self.fork(OptimismHardfork::Holocene).active_at_timestamp(timestamp) + } + + /// Returns `true` if [`Regolith`](OptimismHardfork::Regolith) is active at given block + /// timestamp. + fn is_regolith_active_at_timestamp(&self, timestamp: u64) -> bool { + self.fork(OptimismHardfork::Regolith).active_at_timestamp(timestamp) + } } diff --git a/crates/optimism/node/src/engine.rs b/crates/optimism/node/src/engine.rs index 7e19b2f93..da8fde2b4 100644 --- a/crates/optimism/node/src/engine.rs +++ b/crates/optimism/node/src/engine.rs @@ -14,7 +14,7 @@ use reth_node_api::{ validate_version_specific_fields, EngineTypes, EngineValidator, }; use reth_optimism_chainspec::OpChainSpec; -use reth_optimism_forks::OptimismHardfork; +use reth_optimism_forks::{OptimismHardfork, OptimismHardforks}; use reth_optimism_payload_builder::{ builder::decode_eip_1559_params, OptimismBuiltPayload, OptimismPayloadBuilderAttributes, }; @@ -149,10 +149,8 @@ where )) } - if self.chain_spec.is_fork_active_at_timestamp( - OptimismHardfork::Holocene, - attributes.payload_attributes.timestamp, - ) { + if self.chain_spec.is_holocene_active_at_timestamp(attributes.payload_attributes.timestamp) + { let Some(eip_1559_params) = attributes.eip_1559_params else { return Err(EngineObjectValidationError::InvalidParams( "MissingEip1559ParamsInPayloadAttributes".to_string().into(), diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index e9b7e2c76..96ac28c5d 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -11,7 +11,7 @@ use reth_evm::{system_calls::SystemCaller, ConfigureEvm, ConfigureEvmEnv, NextBl use reth_execution_types::ExecutionOutcome; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::calculate_receipt_root_no_memo_optimism; -use reth_optimism_forks::{OptimismHardfork, OptimismHardforks}; +use reth_optimism_forks::OptimismHardforks; use reth_payload_primitives::{PayloadBuilderAttributes, PayloadBuilderError}; use reth_primitives::{ proofs, @@ -193,10 +193,8 @@ where let block_number = initialized_block_env.number.to::(); - let is_regolith = chain_spec.is_fork_active_at_timestamp( - OptimismHardfork::Regolith, - attributes.payload_attributes.timestamp, - ); + let is_regolith = + chain_spec.is_regolith_active_at_timestamp(attributes.payload_attributes.timestamp); // apply eip-4788 pre block contract call let mut system_caller = SystemCaller::new(evm_config.clone(), &chain_spec); @@ -315,10 +313,7 @@ where // receipt hashes should be computed when set. The state transition process // ensures this is only set for post-Canyon deposit transactions. deposit_receipt_version: chain_spec - .is_fork_active_at_timestamp( - OptimismHardfork::Canyon, - attributes.payload_attributes.timestamp, - ) + .is_canyon_active_at_timestamp(attributes.payload_attributes.timestamp) .then_some(1), })); @@ -473,10 +468,8 @@ where (None, None) }; - let is_holocene = chain_spec.is_fork_active_at_timestamp( - OptimismHardfork::Holocene, - attributes.payload_attributes.timestamp, - ); + let is_holocene = + chain_spec.is_holocene_active_at_timestamp(attributes.payload_attributes.timestamp); if is_holocene { extra_data = attributes From 09c666d676dc857692e883a6b93b87a06524ed36 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 30 Oct 2024 19:36:38 +0100 Subject: [PATCH 355/425] test: add test case for BestTransactions (#12209) --- crates/transaction-pool/src/pool/best.rs | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/crates/transaction-pool/src/pool/best.rs b/crates/transaction-pool/src/pool/best.rs index 6ade15be7..36a14edaa 100644 --- a/crates/transaction-pool/src/pool/best.rs +++ b/crates/transaction-pool/src/pool/best.rs @@ -401,6 +401,29 @@ mod tests { assert!(best.next().is_none()); } + #[test] + fn test_best_transactions_iter_invalid() { + let mut pool = PendingPool::new(MockOrdering::default()); + let mut f = MockTransactionFactory::default(); + + let num_tx = 10; + // insert 10 gapless tx + let tx = MockTransaction::eip1559(); + for nonce in 0..num_tx { + let tx = tx.clone().rng_hash().with_nonce(nonce); + let valid_tx = f.validated(tx); + pool.add_transaction(Arc::new(valid_tx), 0); + } + + let mut best: Box< + dyn crate::traits::BestTransactions>>, + > = Box::new(pool.best()); + + let tx = best.next().unwrap(); + best.mark_invalid(&tx); + assert!(best.next().is_none()); + } + #[test] fn test_best_with_fees_iter_base_fee_satisfied() { let mut pool = PendingPool::new(MockOrdering::default()); From b42b189210a6a4802dbf97521fee6f4fea8d8e11 Mon Sep 17 00:00:00 2001 From: "0xriazaka.eth" <168359025+0xriazaka@users.noreply.github.com> Date: Wed, 30 Oct 2024 22:45:16 +0100 Subject: [PATCH 356/425] Reth primitives traits tx type (#11720) Co-authored-by: Emilia Hane --- crates/primitives-traits/src/lib.rs | 4 ++++ crates/primitives-traits/src/tx_type.rs | 28 +++++++++++++++++++++++++ 2 files changed, 32 insertions(+) create mode 100644 crates/primitives-traits/src/tx_type.rs diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index 0489a250b..9f41bbd47 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -45,6 +45,10 @@ pub use alloy_primitives::{logs_bloom, Log, LogData}; mod storage; pub use storage::StorageEntry; +/// Transaction types +pub mod tx_type; +pub use tx_type::TxType; + /// Common header types pub mod header; #[cfg(any(test, feature = "arbitrary", feature = "test-utils"))] diff --git a/crates/primitives-traits/src/tx_type.rs b/crates/primitives-traits/src/tx_type.rs new file mode 100644 index 000000000..aebf7584f --- /dev/null +++ b/crates/primitives-traits/src/tx_type.rs @@ -0,0 +1,28 @@ +use alloy_eips::eip2718::Eip2718Error; +use alloy_primitives::{U64, U8}; +use alloy_rlp::{Decodable, Encodable}; +use core::fmt::{Debug, Display}; + +/// Trait representing the behavior of a transaction type. +pub trait TxType: + Into + + Into + + PartialEq + + Eq + + PartialEq + + TryFrom + + TryFrom + + TryFrom + + From + + Debug + + Display + + Clone + + Copy + + Default + + Encodable + + Decodable + + Send + + Sync + + 'static +{ +} From e5fc048139ad61f96ed581cb89c5aa4f0da12ac2 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 30 Oct 2024 23:43:26 +0100 Subject: [PATCH 357/425] docs: add context truncated input (#12207) --- crates/rpc/rpc/src/otterscan.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/rpc/rpc/src/otterscan.rs b/crates/rpc/rpc/src/otterscan.rs index 024bdd172..a772dd501 100644 --- a/crates/rpc/rpc/src/otterscan.rs +++ b/crates/rpc/rpc/src/otterscan.rs @@ -227,7 +227,8 @@ where *transactions = transactions.drain(page_start..page_end).collect::>(); // The input field returns only the 4 bytes method selector instead of the entire - // calldata byte blob. + // calldata byte blob + // See also: for tx in transactions.iter_mut() { if tx.input().len() > 4 { Eth::TransactionCompat::otterscan_api_truncate_input(tx); From c19af293a63fdfee4e92d7dd9a915e40c0a98afd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C3=A9a=20Narzis?= <78718413+lean-apple@users.noreply.github.com> Date: Thu, 31 Oct 2024 15:33:33 +0700 Subject: [PATCH 358/425] feat: add rlp support for `EthVersion` (#12221) --- crates/net/eth-wire-types/src/status.rs | 22 ++++---- crates/net/eth-wire-types/src/version.rs | 65 ++++++++++++++++++++++++ crates/net/eth-wire/src/errors/eth.rs | 3 +- crates/net/eth-wire/src/ethstream.rs | 12 ++--- crates/net/eth-wire/src/test_utils.rs | 2 +- crates/net/network/tests/it/session.rs | 4 +- examples/manual-p2p/src/main.rs | 3 +- 7 files changed, 89 insertions(+), 22 deletions(-) diff --git a/crates/net/eth-wire-types/src/status.rs b/crates/net/eth-wire-types/src/status.rs index 90e1731c9..d9e8d4319 100644 --- a/crates/net/eth-wire-types/src/status.rs +++ b/crates/net/eth-wire-types/src/status.rs @@ -19,7 +19,7 @@ use std::fmt::{Debug, Display}; pub struct Status { /// The current protocol version. For example, peers running `eth/66` would have a version of /// 66. - pub version: u8, + pub version: EthVersion, /// The chain id, as introduced in /// [EIP155](https://eips.ethereum.org/EIPS/eip-155#list-of-chain-ids). @@ -50,7 +50,7 @@ impl Status { /// Sets the [`EthVersion`] for the status. pub fn set_eth_version(&mut self, version: EthVersion) { - self.version = version as u8; + self.version = version; } /// Create a [`StatusBuilder`] from the given [`EthChainSpec`] and head block. @@ -122,7 +122,7 @@ impl Default for Status { fn default() -> Self { let mainnet_genesis = MAINNET.genesis_hash(); Self { - version: EthVersion::Eth68 as u8, + version: EthVersion::Eth68, chain: Chain::from_named(NamedChain::Mainnet), total_difficulty: U256::from(17_179_869_184u64), blockhash: mainnet_genesis, @@ -145,7 +145,7 @@ impl Default for Status { /// /// // this is just an example status message! /// let status = Status::builder() -/// .version(EthVersion::Eth66.into()) +/// .version(EthVersion::Eth66) /// .chain(Chain::mainnet()) /// .total_difficulty(U256::from(100)) /// .blockhash(B256::from(MAINNET_GENESIS_HASH)) @@ -156,7 +156,7 @@ impl Default for Status { /// assert_eq!( /// status, /// Status { -/// version: EthVersion::Eth66.into(), +/// version: EthVersion::Eth66, /// chain: Chain::mainnet(), /// total_difficulty: U256::from(100), /// blockhash: B256::from(MAINNET_GENESIS_HASH), @@ -177,7 +177,7 @@ impl StatusBuilder { } /// Sets the protocol version. - pub const fn version(mut self, version: u8) -> Self { + pub const fn version(mut self, version: EthVersion) -> Self { self.status.version = version; self } @@ -229,7 +229,7 @@ mod tests { fn encode_eth_status_message() { let expected = hex!("f85643018a07aac59dabcdd74bc567a0feb27336ca7923f8fab3bd617fcb6e75841538f71c1bcfc267d7838489d9e13da0d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3c684b715077d80"); let status = Status { - version: EthVersion::Eth67 as u8, + version: EthVersion::Eth67, chain: Chain::from_named(NamedChain::Mainnet), total_difficulty: U256::from(36206751599115524359527u128), blockhash: B256::from_str( @@ -249,7 +249,7 @@ mod tests { fn decode_eth_status_message() { let data = hex!("f85643018a07aac59dabcdd74bc567a0feb27336ca7923f8fab3bd617fcb6e75841538f71c1bcfc267d7838489d9e13da0d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3c684b715077d80"); let expected = Status { - version: EthVersion::Eth67 as u8, + version: EthVersion::Eth67, chain: Chain::from_named(NamedChain::Mainnet), total_difficulty: U256::from(36206751599115524359527u128), blockhash: B256::from_str( @@ -267,7 +267,7 @@ mod tests { fn encode_network_status_message() { let expected = hex!("f850423884024190faa0f8514c4680ef27700751b08f37645309ce65a449616a3ea966bf39dd935bb27ba00d21840abff46b96c84b2ac9e10e4f5cdaeb5693cb665db62a2f3b02d2d57b5bc6845d43d2fd80"); let status = Status { - version: EthVersion::Eth66 as u8, + version: EthVersion::Eth66, chain: Chain::from_named(NamedChain::BinanceSmartChain), total_difficulty: U256::from(37851386u64), blockhash: B256::from_str( @@ -290,7 +290,7 @@ mod tests { fn decode_network_status_message() { let data = hex!("f850423884024190faa0f8514c4680ef27700751b08f37645309ce65a449616a3ea966bf39dd935bb27ba00d21840abff46b96c84b2ac9e10e4f5cdaeb5693cb665db62a2f3b02d2d57b5bc6845d43d2fd80"); let expected = Status { - version: EthVersion::Eth66 as u8, + version: EthVersion::Eth66, chain: Chain::from_named(NamedChain::BinanceSmartChain), total_difficulty: U256::from(37851386u64), blockhash: B256::from_str( @@ -311,7 +311,7 @@ mod tests { fn decode_another_network_status_message() { let data = hex!("f86142820834936d68fcffffffffffffffffffffffffdeab81b8a0523e8163a6d620a4cc152c547a05f28a03fec91a2a615194cb86df9731372c0ca06499dccdc7c7def3ebb1ce4c6ee27ec6bd02aee570625ca391919faf77ef27bdc6841a67ccd880"); let expected = Status { - version: EthVersion::Eth66 as u8, + version: EthVersion::Eth66, chain: Chain::from_id(2100), total_difficulty: U256::from_str( "0x000000000000000000000000006d68fcffffffffffffffffffffffffdeab81b8", diff --git a/crates/net/eth-wire-types/src/version.rs b/crates/net/eth-wire-types/src/version.rs index 5a2e0ff96..40d51cb55 100644 --- a/crates/net/eth-wire-types/src/version.rs +++ b/crates/net/eth-wire-types/src/version.rs @@ -15,6 +15,8 @@ pub struct ParseVersionError(String); /// The `eth` protocol version. #[repr(u8)] #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Display)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] pub enum EthVersion { /// The `eth` protocol version 66. Eth66 = 66, @@ -64,6 +66,26 @@ impl EthVersion { } } +/// RLP encodes `EthVersion` as a single byte (66-69). +impl Encodable for EthVersion { + fn encode(&self, out: &mut dyn BufMut) { + (*self as u8).encode(out) + } + + fn length(&self) -> usize { + (*self as u8).length() + } +} + +/// RLP decodes a single byte into `EthVersion`. +/// Returns error if byte is not a valid version (66-69). +impl Decodable for EthVersion { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { + let version = u8::decode(buf)?; + Self::try_from(version).map_err(|_| RlpError::Custom("invalid eth version")) + } +} + /// Allow for converting from a `&str` to an `EthVersion`. /// /// # Example @@ -183,6 +205,8 @@ impl Decodable for ProtocolVersion { #[cfg(test)] mod tests { use super::{EthVersion, ParseVersionError}; + use alloy_rlp::{Decodable, Encodable, Error as RlpError}; + use bytes::BytesMut; #[test] fn test_eth_version_try_from_str() { @@ -201,4 +225,45 @@ mod tests { assert_eq!(EthVersion::Eth69, "69".parse().unwrap()); assert_eq!(Err(ParseVersionError("70".to_string())), "70".parse::()); } + + #[test] + fn test_eth_version_rlp_encode() { + let versions = [EthVersion::Eth66, EthVersion::Eth67, EthVersion::Eth68, EthVersion::Eth69]; + + for version in versions { + let mut encoded = BytesMut::new(); + version.encode(&mut encoded); + + assert_eq!(encoded.len(), 1); + assert_eq!(encoded[0], version as u8); + } + } + #[test] + fn test_eth_version_rlp_decode() { + let test_cases = [ + (66_u8, Ok(EthVersion::Eth66)), + (67_u8, Ok(EthVersion::Eth67)), + (68_u8, Ok(EthVersion::Eth68)), + (69_u8, Ok(EthVersion::Eth69)), + (70_u8, Err(RlpError::Custom("invalid eth version"))), + (65_u8, Err(RlpError::Custom("invalid eth version"))), + ]; + + for (input, expected) in test_cases { + let mut encoded = BytesMut::new(); + input.encode(&mut encoded); + + let mut slice = encoded.as_ref(); + let result = EthVersion::decode(&mut slice); + assert_eq!(result, expected); + } + } + + #[test] + fn test_eth_version_total_messages() { + assert_eq!(EthVersion::Eth66.total_messages(), 15); + assert_eq!(EthVersion::Eth67.total_messages(), 13); + assert_eq!(EthVersion::Eth68.total_messages(), 13); + assert_eq!(EthVersion::Eth69.total_messages(), 11); + } } diff --git a/crates/net/eth-wire/src/errors/eth.rs b/crates/net/eth-wire/src/errors/eth.rs index 557fbd66a..1f8b995af 100644 --- a/crates/net/eth-wire/src/errors/eth.rs +++ b/crates/net/eth-wire/src/errors/eth.rs @@ -5,6 +5,7 @@ use crate::{ }; use alloy_primitives::B256; use reth_chainspec::Chain; +use reth_eth_wire_types::EthVersion; use reth_primitives::{GotExpected, GotExpectedBoxed, ValidationError}; use std::io; @@ -88,7 +89,7 @@ pub enum EthHandshakeError { MismatchedGenesis(GotExpectedBoxed), #[error("mismatched protocol version in status message: {0}")] /// Mismatched protocol versions in status messages. - MismatchedProtocolVersion(GotExpected), + MismatchedProtocolVersion(GotExpected), #[error("mismatched chain in status message: {0}")] /// Mismatch in chain details in status messages. MismatchedChain(GotExpected), diff --git a/crates/net/eth-wire/src/ethstream.rs b/crates/net/eth-wire/src/ethstream.rs index 9deca99fb..74f3fab2b 100644 --- a/crates/net/eth-wire/src/ethstream.rs +++ b/crates/net/eth-wire/src/ethstream.rs @@ -102,7 +102,7 @@ where return Err(EthStreamError::MessageTooBig(their_msg.len())) } - let version = EthVersion::try_from(status.version)?; + let version = status.version; let msg = match ProtocolMessage::decode_message(version, &mut their_msg.as_ref()) { Ok(m) => m, Err(err) => { @@ -368,7 +368,7 @@ mod tests { let fork_filter = ForkFilter::new(Head::default(), genesis, 0, Vec::new()); let status = Status { - version: EthVersion::Eth67 as u8, + version: EthVersion::Eth67, chain: NamedChain::Mainnet.into(), total_difficulty: U256::ZERO, blockhash: B256::random(), @@ -415,7 +415,7 @@ mod tests { let fork_filter = ForkFilter::new(Head::default(), genesis, 0, Vec::new()); let status = Status { - version: EthVersion::Eth67 as u8, + version: EthVersion::Eth67, chain: NamedChain::Mainnet.into(), total_difficulty: U256::from(2).pow(U256::from(100)) - U256::from(1), blockhash: B256::random(), @@ -462,7 +462,7 @@ mod tests { let fork_filter = ForkFilter::new(Head::default(), genesis, 0, Vec::new()); let status = Status { - version: EthVersion::Eth67 as u8, + version: EthVersion::Eth67, chain: NamedChain::Mainnet.into(), total_difficulty: U256::from(2).pow(U256::from(100)), blockhash: B256::random(), @@ -603,7 +603,7 @@ mod tests { let fork_filter = ForkFilter::new(Head::default(), genesis, 0, Vec::new()); let status = Status { - version: EthVersion::Eth67 as u8, + version: EthVersion::Eth67, chain: NamedChain::Mainnet.into(), total_difficulty: U256::ZERO, blockhash: B256::random(), @@ -674,7 +674,7 @@ mod tests { let fork_filter = ForkFilter::new(Head::default(), genesis, 0, Vec::new()); let status = Status { - version: EthVersion::Eth67 as u8, + version: EthVersion::Eth67, chain: NamedChain::Mainnet.into(), total_difficulty: U256::ZERO, blockhash: B256::random(), diff --git a/crates/net/eth-wire/src/test_utils.rs b/crates/net/eth-wire/src/test_utils.rs index e516c0aee..d7a3aa582 100644 --- a/crates/net/eth-wire/src/test_utils.rs +++ b/crates/net/eth-wire/src/test_utils.rs @@ -37,7 +37,7 @@ pub fn eth_handshake() -> (Status, ForkFilter) { let fork_filter = ForkFilter::new(Head::default(), genesis, 0, Vec::new()); let status = Status { - version: EthVersion::Eth67 as u8, + version: EthVersion::Eth67, chain: Chain::mainnet(), total_difficulty: U256::ZERO, blockhash: B256::random(), diff --git a/crates/net/network/tests/it/session.rs b/crates/net/network/tests/it/session.rs index 6bc029d8a..3f74db3d3 100644 --- a/crates/net/network/tests/it/session.rs +++ b/crates/net/network/tests/it/session.rs @@ -33,7 +33,7 @@ async fn test_session_established_with_highest_version() { } NetworkEvent::SessionEstablished { peer_id, status, .. } => { assert_eq!(handle1.peer_id(), &peer_id); - assert_eq!(status.version, EthVersion::Eth68 as u8); + assert_eq!(status.version, EthVersion::Eth68); } ev => { panic!("unexpected event {ev:?}") @@ -71,7 +71,7 @@ async fn test_session_established_with_different_capability() { } NetworkEvent::SessionEstablished { peer_id, status, .. } => { assert_eq!(handle1.peer_id(), &peer_id); - assert_eq!(status.version, EthVersion::Eth66 as u8); + assert_eq!(status.version, EthVersion::Eth66); } ev => { panic!("unexpected event: {ev:?}") diff --git a/examples/manual-p2p/src/main.rs b/examples/manual-p2p/src/main.rs index 857a8a1c1..79a2ff26a 100644 --- a/examples/manual-p2p/src/main.rs +++ b/examples/manual-p2p/src/main.rs @@ -106,7 +106,8 @@ async fn handshake_eth(p2p_stream: AuthedP2PStream) -> eyre::Result<(AuthedEthSt .forkid(MAINNET.hardfork_fork_id(EthereumHardfork::Shanghai).unwrap()) .build(); - let status = Status { version: p2p_stream.shared_capabilities().eth()?.version(), ..status }; + let status = + Status { version: p2p_stream.shared_capabilities().eth()?.version().try_into()?, ..status }; let eth_unauthed = UnauthedEthStream::new(p2p_stream); Ok(eth_unauthed.handshake(status, fork_filter).await?) } From 9659717e83b58d2655f5f35a714c8ff8d9366f6c Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Thu, 31 Oct 2024 02:34:11 -0600 Subject: [PATCH 359/425] renamed OptimismPayloadBuilderAttributes to OpPayloadBuilderAttributes (#12213) --- crates/optimism/node/src/engine.rs | 4 ++-- crates/optimism/node/src/lib.rs | 2 +- crates/optimism/node/tests/e2e/utils.rs | 8 ++++---- crates/optimism/payload/src/builder.rs | 12 ++++++------ crates/optimism/payload/src/lib.rs | 2 +- crates/optimism/payload/src/payload.rs | 18 ++++++++---------- 6 files changed, 22 insertions(+), 24 deletions(-) diff --git a/crates/optimism/node/src/engine.rs b/crates/optimism/node/src/engine.rs index da8fde2b4..27a609d95 100644 --- a/crates/optimism/node/src/engine.rs +++ b/crates/optimism/node/src/engine.rs @@ -16,7 +16,7 @@ use reth_node_api::{ use reth_optimism_chainspec::OpChainSpec; use reth_optimism_forks::{OptimismHardfork, OptimismHardforks}; use reth_optimism_payload_builder::{ - builder::decode_eip_1559_params, OptimismBuiltPayload, OptimismPayloadBuilderAttributes, + builder::decode_eip_1559_params, OpPayloadBuilderAttributes, OptimismBuiltPayload, }; /// The types used in the optimism beacon consensus engine. @@ -53,7 +53,7 @@ pub struct OptimismPayloadTypes; impl PayloadTypes for OptimismPayloadTypes { type BuiltPayload = OptimismBuiltPayload; type PayloadAttributes = OpPayloadAttributes; - type PayloadBuilderAttributes = OptimismPayloadBuilderAttributes; + type PayloadBuilderAttributes = OpPayloadBuilderAttributes; } /// Validator for Optimism engine API. diff --git a/crates/optimism/node/src/lib.rs b/crates/optimism/node/src/lib.rs index 768f4d94e..a8ef472da 100644 --- a/crates/optimism/node/src/lib.rs +++ b/crates/optimism/node/src/lib.rs @@ -23,7 +23,7 @@ pub use node::OptimismNode; pub mod txpool; pub use reth_optimism_payload_builder::{ - OptimismBuiltPayload, OptimismPayloadBuilder, OptimismPayloadBuilderAttributes, + OpPayloadBuilderAttributes, OptimismBuiltPayload, OptimismPayloadBuilder, }; pub use reth_optimism_evm::*; diff --git a/crates/optimism/node/tests/e2e/utils.rs b/crates/optimism/node/tests/e2e/utils.rs index d4219b0fe..b445af33b 100644 --- a/crates/optimism/node/tests/e2e/utils.rs +++ b/crates/optimism/node/tests/e2e/utils.rs @@ -6,7 +6,7 @@ use reth_e2e_test_utils::{ }; use reth_optimism_chainspec::OpChainSpecBuilder; use reth_optimism_node::{ - node::OptimismAddOns, OptimismBuiltPayload, OptimismNode, OptimismPayloadBuilderAttributes, + node::OptimismAddOns, OpPayloadBuilderAttributes, OptimismBuiltPayload, OptimismNode, }; use reth_payload_builder::EthPayloadBuilderAttributes; use std::sync::Arc; @@ -31,7 +31,7 @@ pub(crate) async fn advance_chain( length: usize, node: &mut OpNode, wallet: Arc>, -) -> eyre::Result> { +) -> eyre::Result> { node.advance(length as u64, |_| { let wallet = wallet.clone(); Box::pin(async move { @@ -49,7 +49,7 @@ pub(crate) async fn advance_chain( } /// Helper function to create a new eth payload attributes -pub(crate) fn optimism_payload_attributes(timestamp: u64) -> OptimismPayloadBuilderAttributes { +pub(crate) fn optimism_payload_attributes(timestamp: u64) -> OpPayloadBuilderAttributes { let attributes = PayloadAttributes { timestamp, prev_randao: B256::ZERO, @@ -58,7 +58,7 @@ pub(crate) fn optimism_payload_attributes(timestamp: u64) -> OptimismPayloadBuil parent_beacon_block_root: Some(B256::ZERO), }; - OptimismPayloadBuilderAttributes { + OpPayloadBuilderAttributes { payload_attributes: EthPayloadBuilderAttributes::new(B256::ZERO, attributes), transactions: vec![], no_tx_pool: false, diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 96ac28c5d..3095ce351 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -33,7 +33,7 @@ use tracing::{debug, trace, warn}; use crate::{ error::OptimismPayloadBuilderError, - payload::{OptimismBuiltPayload, OptimismPayloadBuilderAttributes}, + payload::{OpPayloadBuilderAttributes, OptimismBuiltPayload}, }; use op_alloy_consensus::DepositTransaction; @@ -77,7 +77,7 @@ where /// (that has the `parent` as its parent). pub fn cfg_and_block_env( &self, - config: &PayloadConfig, + config: &PayloadConfig, parent: &Header, ) -> Result<(CfgEnvWithHandlerCfg, BlockEnv), EvmConfig::Error> { let next_attributes = NextBlockEnvAttributes { @@ -96,12 +96,12 @@ where Pool: TransactionPool, EvmConfig: ConfigureEvm

, { - type Attributes = OptimismPayloadBuilderAttributes; + type Attributes = OpPayloadBuilderAttributes; type BuiltPayload = OptimismBuiltPayload; fn try_build( &self, - args: BuildArguments, + args: BuildArguments, ) -> Result, PayloadBuilderError> { let (cfg_env, block_env) = self .cfg_and_block_env(&args.config, &args.config.parent_header) @@ -111,7 +111,7 @@ where fn on_missing_payload( &self, - _args: BuildArguments, + _args: BuildArguments, ) -> MissingPayloadBehaviour { // we want to await the job that's already in progress because that should be returned as // is, there's no benefit in racing another job @@ -154,7 +154,7 @@ where #[inline] pub(crate) fn optimism_payload( evm_config: &EvmConfig, - args: BuildArguments, + args: BuildArguments, initialized_cfg: CfgEnvWithHandlerCfg, initialized_block_env: BlockEnv, _compute_pending_block: bool, diff --git a/crates/optimism/payload/src/lib.rs b/crates/optimism/payload/src/lib.rs index c06b49c53..1c7bcaf70 100644 --- a/crates/optimism/payload/src/lib.rs +++ b/crates/optimism/payload/src/lib.rs @@ -15,4 +15,4 @@ pub mod builder; pub use builder::OptimismPayloadBuilder; pub mod error; pub mod payload; -pub use payload::{OpPayloadAttributes, OptimismBuiltPayload, OptimismPayloadBuilderAttributes}; +pub use payload::{OpPayloadAttributes, OpPayloadBuilderAttributes, OptimismBuiltPayload}; diff --git a/crates/optimism/payload/src/payload.rs b/crates/optimism/payload/src/payload.rs index 056edfe7b..ecfebdf00 100644 --- a/crates/optimism/payload/src/payload.rs +++ b/crates/optimism/payload/src/payload.rs @@ -25,7 +25,7 @@ use std::sync::Arc; /// Optimism Payload Builder Attributes #[derive(Debug, Clone, PartialEq, Eq, Default)] -pub struct OptimismPayloadBuilderAttributes { +pub struct OpPayloadBuilderAttributes { /// Inner ethereum payload builder attributes pub payload_attributes: EthPayloadBuilderAttributes, /// `NoTxPool` option for the generated payload @@ -39,7 +39,7 @@ pub struct OptimismPayloadBuilderAttributes { pub eip_1559_params: Option, } -impl OptimismPayloadBuilderAttributes { +impl OpPayloadBuilderAttributes { /// Extracts the `eip1559` parameters for the payload. pub fn get_holocene_extra_data( &self, @@ -73,7 +73,7 @@ impl OptimismPayloadBuilderAttributes { } } -impl PayloadBuilderAttributes for OptimismPayloadBuilderAttributes { +impl PayloadBuilderAttributes for OpPayloadBuilderAttributes { type RpcPayloadAttributes = OpPayloadAttributes; type Error = alloy_rlp::Error; @@ -169,7 +169,7 @@ pub struct OptimismBuiltPayload { /// The rollup's chainspec. pub(crate) chain_spec: Arc, /// The payload attributes. - pub(crate) attributes: OptimismPayloadBuilderAttributes, + pub(crate) attributes: OpPayloadBuilderAttributes, } // === impl BuiltPayload === @@ -181,7 +181,7 @@ impl OptimismBuiltPayload { block: SealedBlock, fees: U256, chain_spec: Arc, - attributes: OptimismPayloadBuilderAttributes, + attributes: OpPayloadBuilderAttributes, executed_block: Option, ) -> Self { Self { id, block, executed_block, fees, sidecars: Vec::new(), chain_spec, attributes } @@ -411,7 +411,7 @@ mod tests { #[test] fn test_get_extra_data_post_holocene() { - let attributes = OptimismPayloadBuilderAttributes { + let attributes = OpPayloadBuilderAttributes { eip_1559_params: Some(B64::from_str("0x0000000800000008").unwrap()), ..Default::default() }; @@ -421,10 +421,8 @@ mod tests { #[test] fn test_get_extra_data_post_holocene_default() { - let attributes = OptimismPayloadBuilderAttributes { - eip_1559_params: Some(B64::ZERO), - ..Default::default() - }; + let attributes = + OpPayloadBuilderAttributes { eip_1559_params: Some(B64::ZERO), ..Default::default() }; let extra_data = attributes.get_holocene_extra_data(BaseFeeParams::new(80, 60)); assert_eq!(extra_data.unwrap(), Bytes::copy_from_slice(&[0, 0, 0, 0, 80, 0, 0, 0, 60])); } From 66cc619128450a0368d6814a177f3b812758618a Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 31 Oct 2024 09:34:31 +0100 Subject: [PATCH 360/425] chore: rm deprecated txpool fn (#12198) --- crates/transaction-pool/src/lib.rs | 7 ------- crates/transaction-pool/src/noop.rs | 7 ------- crates/transaction-pool/src/traits.rs | 10 ---------- 3 files changed, 24 deletions(-) diff --git a/crates/transaction-pool/src/lib.rs b/crates/transaction-pool/src/lib.rs index 3a5e547ba..f8f06b805 100644 --- a/crates/transaction-pool/src/lib.rs +++ b/crates/transaction-pool/src/lib.rs @@ -430,13 +430,6 @@ where Box::new(self.pool.best_transactions()) } - fn best_transactions_with_base_fee( - &self, - base_fee: u64, - ) -> Box>>> { - self.pool.best_transactions_with_attributes(BestTransactionsAttributes::base_fee(base_fee)) - } - fn best_transactions_with_attributes( &self, best_transactions_attributes: BestTransactionsAttributes, diff --git a/crates/transaction-pool/src/noop.rs b/crates/transaction-pool/src/noop.rs index 4f4e5a381..0f87f06f7 100644 --- a/crates/transaction-pool/src/noop.rs +++ b/crates/transaction-pool/src/noop.rs @@ -150,13 +150,6 @@ impl TransactionPool for NoopTransactionPool { Box::new(std::iter::empty()) } - fn best_transactions_with_base_fee( - &self, - _: u64, - ) -> Box>>> { - Box::new(std::iter::empty()) - } - fn best_transactions_with_attributes( &self, _: BestTransactionsAttributes, diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index 1b3004154..0d8f6dbb5 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -248,16 +248,6 @@ pub trait TransactionPool: Send + Sync + Clone { &self, ) -> Box>>>; - /// Returns an iterator that yields transactions that are ready for block production with the - /// given base fee. - /// - /// Consumer: Block production - #[deprecated(note = "Use best_transactions_with_attributes instead.")] - fn best_transactions_with_base_fee( - &self, - base_fee: u64, - ) -> Box>>>; - /// Returns an iterator that yields transactions that are ready for block production with the /// given base fee and optional blob fee attributes. /// From 41044a2601aab03a76838921ff92009debeb27e3 Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Thu, 31 Oct 2024 03:07:20 -0600 Subject: [PATCH 361/425] Apply beacon system call to trace_block (#12030) Co-authored-by: Matthias Seitz --- crates/rpc/rpc/src/debug.rs | 35 ++++++++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 6da03b046..8b9e16023 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -98,6 +98,7 @@ where cfg: CfgEnvWithHandlerCfg, block_env: BlockEnv, opts: GethDebugTracingOptions, + parent_beacon_block_root: Option, ) -> Result, Eth::Error> { if transactions.is_empty() { // nothing to trace @@ -111,6 +112,26 @@ where let block_hash = at.as_block_hash(); let mut results = Vec::with_capacity(transactions.len()); let mut db = CacheDB::new(StateProviderDatabase::new(state)); + + let mut system_caller = SystemCaller::new( + RpcNodeCore::evm_config(this.eth_api()).clone(), + RpcNodeCore::provider(this.eth_api()).chain_spec(), + ); + + // apply relevant system calls + system_caller + .pre_block_beacon_root_contract_call( + &mut db, + &cfg, + &block_env, + parent_beacon_block_root, + ) + .map_err(|_| { + EthApiError::EvmCustom( + "failed to apply 4788 beacon root system call".to_string(), + ) + })?; + let mut transactions = transactions.into_iter().enumerate().peekable(); let mut inspector = None; while let Some((index, tx)) = transactions.next() { @@ -170,6 +191,9 @@ where // we trace on top the block's parent block let parent = block.parent_hash; + // we need the beacon block root for a system call + let parent_beacon_block_root = block.parent_beacon_block_root; + // Depending on EIP-2 we need to recover the transactions differently let transactions = if self.inner.provider.chain_spec().is_homestead_active_at_block(block.number) { @@ -196,7 +220,15 @@ where .collect::, Eth::Error>>()? }; - self.trace_block(parent.into(), transactions, cfg, block_env, opts).await + self.trace_block( + parent.into(), + transactions, + cfg, + block_env, + opts, + parent_beacon_block_root, + ) + .await } /// Replays a block and returns the trace of each transaction. @@ -228,6 +260,7 @@ where cfg, block_env, opts, + block.parent_beacon_block_root, ) .await } From 76c5aef911d909161be47113cd88b7ea36fafffd Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Thu, 31 Oct 2024 11:53:59 +0100 Subject: [PATCH 362/425] fix(trie): move to sibling on invalid tree mask (#12193) Co-authored-by: Federico Gimenez --- crates/trie/trie/src/metrics.rs | 22 +++++++++++++++++++++- crates/trie/trie/src/walker.rs | 32 ++++++++++++++++++++++++++++++-- 2 files changed, 51 insertions(+), 3 deletions(-) diff --git a/crates/trie/trie/src/metrics.rs b/crates/trie/trie/src/metrics.rs index 7582f3741..006dc7e36 100644 --- a/crates/trie/trie/src/metrics.rs +++ b/crates/trie/trie/src/metrics.rs @@ -1,5 +1,5 @@ use crate::stats::TrieStats; -use metrics::Histogram; +use metrics::{Counter, Histogram}; use reth_metrics::Metrics; /// Wrapper for state root metrics. @@ -63,3 +63,23 @@ impl TrieType { } } } + +/// Metrics for trie walker +#[derive(Clone, Metrics)] +#[metrics(scope = "trie.walker")] +pub struct WalkerMetrics { + /// The number of subnodes out of order due to wrong tree mask. + out_of_order_subnode: Counter, +} + +impl WalkerMetrics { + /// Create new metrics for the given trie type. + pub fn new(ty: TrieType) -> Self { + Self::new_with_labels(&[("type", ty.as_str())]) + } + + /// Increment `out_of_order_subnode`. + pub fn inc_out_of_order_subnode(&self, amount: u64) { + self.out_of_order_subnode.increment(amount); + } +} diff --git a/crates/trie/trie/src/walker.rs b/crates/trie/trie/src/walker.rs index e75a96d0f..aaff293b3 100644 --- a/crates/trie/trie/src/walker.rs +++ b/crates/trie/trie/src/walker.rs @@ -7,6 +7,9 @@ use alloy_primitives::B256; use reth_storage_errors::db::DatabaseError; use std::collections::HashSet; +#[cfg(feature = "metrics")] +use crate::metrics::WalkerMetrics; + /// `TrieWalker` is a structure that enables traversal of a Merkle trie. /// It allows moving through the trie in a depth-first manner, skipping certain branches /// if they have not changed. @@ -24,13 +27,23 @@ pub struct TrieWalker { pub changes: PrefixSet, /// The retained trie node keys that need to be removed. removed_keys: Option>, + #[cfg(feature = "metrics")] + /// Walker metrics. + metrics: WalkerMetrics, } impl TrieWalker { /// Constructs a new `TrieWalker` from existing stack and a cursor. pub fn from_stack(cursor: C, stack: Vec, changes: PrefixSet) -> Self { - let mut this = - Self { cursor, changes, stack, can_skip_current_node: false, removed_keys: None }; + let mut this = Self { + cursor, + changes, + stack, + can_skip_current_node: false, + removed_keys: None, + #[cfg(feature = "metrics")] + metrics: WalkerMetrics::default(), + }; this.update_skip_node(); this } @@ -113,6 +126,8 @@ impl TrieWalker { stack: vec![CursorSubNode::default()], can_skip_current_node: false, removed_keys: None, + #[cfg(feature = "metrics")] + metrics: WalkerMetrics::default(), }; // Set up the root node of the trie in the stack, if it exists. @@ -179,6 +194,19 @@ impl TrieWalker { self.stack[0].set_nibble(key[0] as i8); } + // The current tree mask might have been set incorrectly. + // Sanity check that the newly retrieved trie node key is the child of the last item + // on the stack. If not, advance to the next sibling instead of adding the node to the + // stack. + if let Some(subnode) = self.stack.last() { + if !key.starts_with(subnode.full_key()) { + #[cfg(feature = "metrics")] + self.metrics.inc_out_of_order_subnode(1); + self.move_to_next_sibling(false)?; + return Ok(()) + } + } + // Create a new CursorSubNode and push it to the stack. let subnode = CursorSubNode::new(key, Some(node)); let nibble = subnode.nibble(); From 1f1dcc950dc2f1b3bee5a562a1c2c7cd9db04fff Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 31 Oct 2024 14:45:55 +0100 Subject: [PATCH 363/425] chore: simplify SystemCaller setup (#12223) --- crates/rpc/rpc-eth-api/src/helpers/trace.rs | 12 +---- crates/rpc/rpc/src/debug.rs | 60 ++++++++++----------- 2 files changed, 30 insertions(+), 42 deletions(-) diff --git a/crates/rpc/rpc-eth-api/src/helpers/trace.rs b/crates/rpc/rpc-eth-api/src/helpers/trace.rs index da1d1cdb9..fa70b2df2 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/trace.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/trace.rs @@ -195,11 +195,7 @@ pub trait Trace: LoadState> { let block_txs = block.transactions_with_sender(); // apply relevant system calls - let mut system_caller = SystemCaller::new( - this.evm_config().clone(), - RpcNodeCore::provider(&this).chain_spec(), - ); - system_caller + SystemCaller::new(this.evm_config().clone(), this.provider().chain_spec()) .pre_block_beacon_root_contract_call( &mut db, &cfg, @@ -338,11 +334,7 @@ pub trait Trace: LoadState> { CacheDB::new(StateProviderDatabase::new(StateProviderTraitObjWrapper(&state))); // apply relevant system calls - let mut system_caller = SystemCaller::new( - this.evm_config().clone(), - RpcNodeCore::provider(&this).chain_spec(), - ); - system_caller + SystemCaller::new(this.evm_config().clone(), this.provider().chain_spec()) .pre_block_beacon_root_contract_call( &mut db, &cfg, diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 8b9e16023..e2746a53c 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -113,24 +113,22 @@ where let mut results = Vec::with_capacity(transactions.len()); let mut db = CacheDB::new(StateProviderDatabase::new(state)); - let mut system_caller = SystemCaller::new( - RpcNodeCore::evm_config(this.eth_api()).clone(), - RpcNodeCore::provider(this.eth_api()).chain_spec(), - ); - // apply relevant system calls - system_caller - .pre_block_beacon_root_contract_call( - &mut db, - &cfg, - &block_env, - parent_beacon_block_root, + SystemCaller::new( + this.eth_api().evm_config().clone(), + this.eth_api().provider().chain_spec(), + ) + .pre_block_beacon_root_contract_call( + &mut db, + &cfg, + &block_env, + parent_beacon_block_root, + ) + .map_err(|_| { + EthApiError::EvmCustom( + "failed to apply 4788 beacon root system call".to_string(), ) - .map_err(|_| { - EthApiError::EvmCustom( - "failed to apply 4788 beacon root system call".to_string(), - ) - })?; + })?; let mut transactions = transactions.into_iter().enumerate().peekable(); let mut inspector = None; @@ -296,23 +294,21 @@ where let mut db = CacheDB::new(StateProviderDatabase::new(state)); // apply relevant system calls - let mut system_caller = SystemCaller::new( - RpcNodeCore::evm_config(this.eth_api()).clone(), - RpcNodeCore::provider(this.eth_api()).chain_spec(), - ); - - system_caller - .pre_block_beacon_root_contract_call( - &mut db, - &cfg, - &block_env, - parent_beacon_block_root, + SystemCaller::new( + this.eth_api().evm_config().clone(), + this.eth_api().provider().chain_spec(), + ) + .pre_block_beacon_root_contract_call( + &mut db, + &cfg, + &block_env, + parent_beacon_block_root, + ) + .map_err(|_| { + EthApiError::EvmCustom( + "failed to apply 4788 beacon root system call".to_string(), ) - .map_err(|_| { - EthApiError::EvmCustom( - "failed to apply 4788 beacon root system call".to_string(), - ) - })?; + })?; // replay all transactions prior to the targeted transaction let index = this.eth_api().replay_transactions_until( From 460e26cc51c1902cac88b7287c7491976478ca18 Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Thu, 31 Oct 2024 09:51:33 -0600 Subject: [PATCH 364/425] renamed OptimismBuiltPayload to OpBuiltPayload (#12227) --- crates/optimism/node/src/engine.rs | 4 ++-- crates/optimism/node/src/lib.rs | 2 +- crates/optimism/node/tests/e2e/utils.rs | 4 ++-- crates/optimism/payload/src/builder.rs | 18 +++++++-------- crates/optimism/payload/src/lib.rs | 2 +- crates/optimism/payload/src/payload.rs | 30 ++++++++++++------------- 6 files changed, 30 insertions(+), 30 deletions(-) diff --git a/crates/optimism/node/src/engine.rs b/crates/optimism/node/src/engine.rs index 27a609d95..0f48dc347 100644 --- a/crates/optimism/node/src/engine.rs +++ b/crates/optimism/node/src/engine.rs @@ -16,7 +16,7 @@ use reth_node_api::{ use reth_optimism_chainspec::OpChainSpec; use reth_optimism_forks::{OptimismHardfork, OptimismHardforks}; use reth_optimism_payload_builder::{ - builder::decode_eip_1559_params, OpPayloadBuilderAttributes, OptimismBuiltPayload, + builder::decode_eip_1559_params, OpBuiltPayload, OpPayloadBuilderAttributes, }; /// The types used in the optimism beacon consensus engine. @@ -51,7 +51,7 @@ where pub struct OptimismPayloadTypes; impl PayloadTypes for OptimismPayloadTypes { - type BuiltPayload = OptimismBuiltPayload; + type BuiltPayload = OpBuiltPayload; type PayloadAttributes = OpPayloadAttributes; type PayloadBuilderAttributes = OpPayloadBuilderAttributes; } diff --git a/crates/optimism/node/src/lib.rs b/crates/optimism/node/src/lib.rs index a8ef472da..9bc15ef26 100644 --- a/crates/optimism/node/src/lib.rs +++ b/crates/optimism/node/src/lib.rs @@ -23,7 +23,7 @@ pub use node::OptimismNode; pub mod txpool; pub use reth_optimism_payload_builder::{ - OpPayloadBuilderAttributes, OptimismBuiltPayload, OptimismPayloadBuilder, + OpBuiltPayload, OpPayloadBuilderAttributes, OptimismPayloadBuilder, }; pub use reth_optimism_evm::*; diff --git a/crates/optimism/node/tests/e2e/utils.rs b/crates/optimism/node/tests/e2e/utils.rs index b445af33b..16eb97491 100644 --- a/crates/optimism/node/tests/e2e/utils.rs +++ b/crates/optimism/node/tests/e2e/utils.rs @@ -6,7 +6,7 @@ use reth_e2e_test_utils::{ }; use reth_optimism_chainspec::OpChainSpecBuilder; use reth_optimism_node::{ - node::OptimismAddOns, OpPayloadBuilderAttributes, OptimismBuiltPayload, OptimismNode, + node::OptimismAddOns, OpBuiltPayload, OpPayloadBuilderAttributes, OptimismNode, }; use reth_payload_builder::EthPayloadBuilderAttributes; use std::sync::Arc; @@ -31,7 +31,7 @@ pub(crate) async fn advance_chain( length: usize, node: &mut OpNode, wallet: Arc>, -) -> eyre::Result> { +) -> eyre::Result> { node.advance(length as u64, |_| { let wallet = wallet.clone(); Box::pin(async move { diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 3095ce351..a1fa3d47b 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -33,7 +33,7 @@ use tracing::{debug, trace, warn}; use crate::{ error::OptimismPayloadBuilderError, - payload::{OpPayloadBuilderAttributes, OptimismBuiltPayload}, + payload::{OpBuiltPayload, OpPayloadBuilderAttributes}, }; use op_alloy_consensus::DepositTransaction; @@ -97,12 +97,12 @@ where EvmConfig: ConfigureEvm
, { type Attributes = OpPayloadBuilderAttributes; - type BuiltPayload = OptimismBuiltPayload; + type BuiltPayload = OpBuiltPayload; fn try_build( &self, - args: BuildArguments, - ) -> Result, PayloadBuilderError> { + args: BuildArguments, + ) -> Result, PayloadBuilderError> { let (cfg_env, block_env) = self .cfg_and_block_env(&args.config, &args.config.parent_header) .map_err(PayloadBuilderError::other)?; @@ -111,7 +111,7 @@ where fn on_missing_payload( &self, - _args: BuildArguments, + _args: BuildArguments, ) -> MissingPayloadBehaviour { // we want to await the job that's already in progress because that should be returned as // is, there's no benefit in racing another job @@ -124,7 +124,7 @@ where &self, client: &Client, config: PayloadConfig, - ) -> Result { + ) -> Result { let args = BuildArguments { client, config, @@ -154,11 +154,11 @@ where #[inline] pub(crate) fn optimism_payload( evm_config: &EvmConfig, - args: BuildArguments, + args: BuildArguments, initialized_cfg: CfgEnvWithHandlerCfg, initialized_block_env: BlockEnv, _compute_pending_block: bool, -) -> Result, PayloadBuilderError> +) -> Result, PayloadBuilderError> where EvmConfig: ConfigureEvm
, Client: StateProviderFactory + ChainSpecProvider, @@ -523,7 +523,7 @@ where let no_tx_pool = attributes.no_tx_pool; - let payload = OptimismBuiltPayload::new( + let payload = OpBuiltPayload::new( attributes.payload_attributes.id, sealed_block, total_fees, diff --git a/crates/optimism/payload/src/lib.rs b/crates/optimism/payload/src/lib.rs index 1c7bcaf70..e1f55e51b 100644 --- a/crates/optimism/payload/src/lib.rs +++ b/crates/optimism/payload/src/lib.rs @@ -15,4 +15,4 @@ pub mod builder; pub use builder::OptimismPayloadBuilder; pub mod error; pub mod payload; -pub use payload::{OpPayloadAttributes, OpPayloadBuilderAttributes, OptimismBuiltPayload}; +pub use payload::{OpBuiltPayload, OpPayloadAttributes, OpPayloadBuilderAttributes}; diff --git a/crates/optimism/payload/src/payload.rs b/crates/optimism/payload/src/payload.rs index ecfebdf00..3a7d87acc 100644 --- a/crates/optimism/payload/src/payload.rs +++ b/crates/optimism/payload/src/payload.rs @@ -154,7 +154,7 @@ impl PayloadBuilderAttributes for OpPayloadBuilderAttributes { /// Contains the built payload. #[derive(Debug, Clone)] -pub struct OptimismBuiltPayload { +pub struct OpBuiltPayload { /// Identifier of the payload pub(crate) id: PayloadId, /// The built block @@ -174,7 +174,7 @@ pub struct OptimismBuiltPayload { // === impl BuiltPayload === -impl OptimismBuiltPayload { +impl OpBuiltPayload { /// Initializes the payload with the given initial block. pub const fn new( id: PayloadId, @@ -208,7 +208,7 @@ impl OptimismBuiltPayload { } } -impl BuiltPayload for OptimismBuiltPayload { +impl BuiltPayload for OpBuiltPayload { fn block(&self) -> &SealedBlock { &self.block } @@ -226,7 +226,7 @@ impl BuiltPayload for OptimismBuiltPayload { } } -impl BuiltPayload for &OptimismBuiltPayload { +impl BuiltPayload for &OpBuiltPayload { fn block(&self) -> &SealedBlock { (**self).block() } @@ -245,24 +245,24 @@ impl BuiltPayload for &OptimismBuiltPayload { } // V1 engine_getPayloadV1 response -impl From for ExecutionPayloadV1 { - fn from(value: OptimismBuiltPayload) -> Self { +impl From for ExecutionPayloadV1 { + fn from(value: OpBuiltPayload) -> Self { block_to_payload_v1(value.block) } } // V2 engine_getPayloadV2 response -impl From for ExecutionPayloadEnvelopeV2 { - fn from(value: OptimismBuiltPayload) -> Self { - let OptimismBuiltPayload { block, fees, .. } = value; +impl From for ExecutionPayloadEnvelopeV2 { + fn from(value: OpBuiltPayload) -> Self { + let OpBuiltPayload { block, fees, .. } = value; Self { block_value: fees, execution_payload: convert_block_to_payload_field_v2(block) } } } -impl From for OpExecutionPayloadEnvelopeV3 { - fn from(value: OptimismBuiltPayload) -> Self { - let OptimismBuiltPayload { block, fees, sidecars, chain_spec, attributes, .. } = value; +impl From for OpExecutionPayloadEnvelopeV3 { + fn from(value: OpBuiltPayload) -> Self { + let OpBuiltPayload { block, fees, sidecars, chain_spec, attributes, .. } = value; let parent_beacon_block_root = if chain_spec.is_cancun_active_at_timestamp(attributes.timestamp()) { @@ -287,9 +287,9 @@ impl From for OpExecutionPayloadEnvelopeV3 { } } } -impl From for OpExecutionPayloadEnvelopeV4 { - fn from(value: OptimismBuiltPayload) -> Self { - let OptimismBuiltPayload { block, fees, sidecars, chain_spec, attributes, .. } = value; +impl From for OpExecutionPayloadEnvelopeV4 { + fn from(value: OpBuiltPayload) -> Self { + let OpBuiltPayload { block, fees, sidecars, chain_spec, attributes, .. } = value; let parent_beacon_block_root = if chain_spec.is_cancun_active_at_timestamp(attributes.timestamp()) { From 219def95821b6ba631e8e1457aca86acdc3589eb Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Thu, 31 Oct 2024 17:21:06 +0100 Subject: [PATCH 365/425] chore(ci): pin kurtosis to working version (#12225) --- .github/workflows/kurtosis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/kurtosis.yml b/.github/workflows/kurtosis.yml index 74d26dbd3..43f5c3605 100644 --- a/.github/workflows/kurtosis.yml +++ b/.github/workflows/kurtosis.yml @@ -79,6 +79,7 @@ jobs: - name: Run kurtosis uses: ethpandaops/kurtosis-assertoor-github-action@v1 with: + kurtosis_version: 1.3.1 ethereum_package_args: '.github/assets/kurtosis_network_params.yaml' notify-on-error: From d555f9ef3a057c9143c7cf7b4ea69eb30859295d Mon Sep 17 00:00:00 2001 From: Dan Cline <6798349+Rjected@users.noreply.github.com> Date: Thu, 31 Oct 2024 15:21:55 -0400 Subject: [PATCH 366/425] chore(book): fix engine api typo (#12231) --- book/run/mainnet.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/book/run/mainnet.md b/book/run/mainnet.md index 4412f51c7..6f1ec144d 100644 --- a/book/run/mainnet.md +++ b/book/run/mainnet.md @@ -83,4 +83,4 @@ In the meantime, consider setting up [observability](./observability.md) to moni ## Running without a Consensus Layer -We provide a method for running Reth without a Consensus Layer via the `--debug.tip ` parameter. If you provide that to your node, it will simulate sending a `engine_forkChoiceUpdated` message _once_ and will trigger syncing to the provided block hash. This is useful for testing and debugging purposes, but in order to have a node that can keep up with the tip you'll need to run a CL alongside it. At the moment we have no plans of including a Consensus Layer implementation in Reth, and we are open to including light clients other methods of syncing like importing Lighthouse as a library. +We provide a method for running Reth without a Consensus Layer via the `--debug.tip ` parameter. If you provide that to your node, it will simulate sending an `engine_forkchoiceUpdated` message _once_ and will trigger syncing to the provided block hash. This is useful for testing and debugging purposes, but in order to have a node that can keep up with the tip you'll need to run a CL alongside it. At the moment we have no plans of including a Consensus Layer implementation in Reth, and we are open to including light clients other methods of syncing like importing Lighthouse as a library. From 998b3b3d3a57b3a567166dda57c4f2d856539ec3 Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Thu, 31 Oct 2024 13:21:38 -0600 Subject: [PATCH 367/425] renamed OptimismPayloadBuilder to OpPayloadBuilder (#12234) --- crates/optimism/node/src/lib.rs | 2 +- crates/optimism/node/src/node.rs | 17 ++++++++--------- crates/optimism/payload/src/builder.rs | 12 ++++++------ crates/optimism/payload/src/lib.rs | 2 +- 4 files changed, 16 insertions(+), 17 deletions(-) diff --git a/crates/optimism/node/src/lib.rs b/crates/optimism/node/src/lib.rs index 9bc15ef26..ff25e7173 100644 --- a/crates/optimism/node/src/lib.rs +++ b/crates/optimism/node/src/lib.rs @@ -23,7 +23,7 @@ pub use node::OptimismNode; pub mod txpool; pub use reth_optimism_payload_builder::{ - OpBuiltPayload, OpPayloadBuilderAttributes, OptimismPayloadBuilder, + OpBuiltPayload, OpPayloadBuilder, OpPayloadBuilderAttributes, }; pub use reth_optimism_evm::*; diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 4328a55fb..8e06dbcc5 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -67,7 +67,7 @@ impl OptimismNode { ) -> ComponentsBuilder< Node, OptimismPoolBuilder, - OptimismPayloadBuilder, + OpPayloadBuilder, OptimismNetworkBuilder, OptimismExecutorBuilder, OptimismConsensusBuilder, @@ -81,7 +81,7 @@ impl OptimismNode { ComponentsBuilder::default() .node_types::() .pool(OptimismPoolBuilder::default()) - .payload(OptimismPayloadBuilder::new(compute_pending_block)) + .payload(OpPayloadBuilder::new(compute_pending_block)) .network(OptimismNetworkBuilder { disable_txpool_gossip, disable_discovery_v4: !discovery_v4, @@ -100,7 +100,7 @@ where type ComponentsBuilder = ComponentsBuilder< N, OptimismPoolBuilder, - OptimismPayloadBuilder, + OpPayloadBuilder, OptimismNetworkBuilder, OptimismExecutorBuilder, OptimismConsensusBuilder, @@ -288,7 +288,7 @@ where /// A basic optimism payload service builder #[derive(Debug, Default, Clone)] -pub struct OptimismPayloadBuilder { +pub struct OpPayloadBuilder { /// By default the pending block equals the latest block /// to save resources and not leak txs from the tx-pool, /// this flag enables computing of the pending block @@ -300,7 +300,7 @@ pub struct OptimismPayloadBuilder { pub compute_pending_block: bool, } -impl OptimismPayloadBuilder { +impl OpPayloadBuilder { /// Create a new instance with the given `compute_pending_block` flag. pub const fn new(compute_pending_block: bool) -> Self { Self { compute_pending_block } @@ -320,9 +320,8 @@ impl OptimismPayloadBuilder { Pool: TransactionPool + Unpin + 'static, Evm: ConfigureEvm
, { - let payload_builder = - reth_optimism_payload_builder::OptimismPayloadBuilder::new(evm_config) - .set_compute_pending_block(self.compute_pending_block); + let payload_builder = reth_optimism_payload_builder::OpPayloadBuilder::new(evm_config) + .set_compute_pending_block(self.compute_pending_block); let conf = ctx.payload_builder_config(); let payload_job_config = BasicPayloadJobGeneratorConfig::default() @@ -348,7 +347,7 @@ impl OptimismPayloadBuilder { } } -impl PayloadServiceBuilder for OptimismPayloadBuilder +impl PayloadServiceBuilder for OpPayloadBuilder where Node: FullNodeTypes< Types: NodeTypesWithEngine, diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index a1fa3d47b..09a443d0f 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -39,7 +39,7 @@ use op_alloy_consensus::DepositTransaction; /// Optimism's payload builder #[derive(Debug, Clone, PartialEq, Eq)] -pub struct OptimismPayloadBuilder { +pub struct OpPayloadBuilder { /// The rollup's compute pending block configuration option. // TODO(clabby): Implement this feature. pub compute_pending_block: bool, @@ -47,8 +47,8 @@ pub struct OptimismPayloadBuilder { pub evm_config: EvmConfig, } -impl OptimismPayloadBuilder { - /// `OptimismPayloadBuilder` constructor. +impl OpPayloadBuilder { + /// `OpPayloadBuilder` constructor. pub const fn new(evm_config: EvmConfig) -> Self { Self { compute_pending_block: true, evm_config } } @@ -69,7 +69,7 @@ impl OptimismPayloadBuilder { self.compute_pending_block } } -impl OptimismPayloadBuilder +impl OpPayloadBuilder where EvmConfig: ConfigureEvmEnv
, { @@ -89,8 +89,8 @@ where } } -/// Implementation of the [`PayloadBuilder`] trait for [`OptimismPayloadBuilder`]. -impl PayloadBuilder for OptimismPayloadBuilder +/// Implementation of the [`PayloadBuilder`] trait for [`OpPayloadBuilder`]. +impl PayloadBuilder for OpPayloadBuilder where Client: StateProviderFactory + ChainSpecProvider, Pool: TransactionPool, diff --git a/crates/optimism/payload/src/lib.rs b/crates/optimism/payload/src/lib.rs index e1f55e51b..8447026d7 100644 --- a/crates/optimism/payload/src/lib.rs +++ b/crates/optimism/payload/src/lib.rs @@ -12,7 +12,7 @@ #![cfg(feature = "optimism")] pub mod builder; -pub use builder::OptimismPayloadBuilder; +pub use builder::OpPayloadBuilder; pub mod error; pub mod payload; pub use payload::{OpBuiltPayload, OpPayloadAttributes, OpPayloadBuilderAttributes}; From d020b41f6aa053dd6a13d3647f7ad708ee35dc66 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 31 Oct 2024 23:22:42 +0400 Subject: [PATCH 368/425] feat: `flashbots_validateBuilderSubmissionV3` (#12168) --- Cargo.lock | 8 + book/cli/reth/node.md | 3 + crates/cli/util/Cargo.toml | 1 + crates/cli/util/src/parsers.rs | 7 + crates/e2e-test-utils/src/lib.rs | 8 +- crates/ethereum/consensus/src/lib.rs | 4 +- crates/ethereum/node/Cargo.toml | 1 + crates/ethereum/node/tests/e2e/rpc.rs | 86 +++- crates/node/builder/src/rpc.rs | 3 + crates/node/core/src/args/rpc_server.rs | 14 + crates/payload/validator/src/lib.rs | 4 +- crates/rpc/rpc-api/Cargo.toml | 2 + crates/rpc/rpc-api/src/lib.rs | 2 +- crates/rpc/rpc-api/src/validation.rs | 21 +- crates/rpc/rpc-builder/Cargo.toml | 1 + crates/rpc/rpc-builder/src/config.rs | 10 +- crates/rpc/rpc-builder/src/lib.rs | 322 +++++++++++--- crates/rpc/rpc-builder/tests/it/utils.rs | 3 + .../rpc/rpc-eth-types/src/builder/config.rs | 2 +- crates/rpc/rpc-server-types/src/result.rs | 2 + crates/rpc/rpc/Cargo.toml | 5 +- crates/rpc/rpc/src/lib.rs | 2 +- crates/rpc/rpc/src/validation.rs | 400 ++++++++++++++++-- examples/rpc-db/src/main.rs | 6 +- 24 files changed, 801 insertions(+), 116 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 27581ba3a..5fc3574eb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6697,6 +6697,7 @@ dependencies = [ "rand 0.8.5", "reth-fs-util", "secp256k1", + "serde", "thiserror", "tikv-jemallocator", "tracy-client", @@ -7997,6 +7998,7 @@ dependencies = [ "alloy-genesis", "alloy-primitives", "alloy-provider", + "alloy-rpc-types-beacon", "alloy-signer", "alloy-sol-types", "eyre", @@ -8626,13 +8628,16 @@ dependencies = [ "pin-project", "rand 0.8.5", "reth-chainspec", + "reth-consensus", "reth-consensus-common", "reth-errors", + "reth-ethereum-consensus", "reth-evm", "reth-evm-ethereum", "reth-network-api", "reth-network-peers", "reth-network-types", + "reth-payload-validator", "reth-primitives", "reth-provider", "reth-revm", @@ -8682,6 +8687,8 @@ dependencies = [ "reth-network-peers", "reth-primitives", "reth-rpc-eth-api", + "serde", + "serde_with", ] [[package]] @@ -8721,6 +8728,7 @@ dependencies = [ "pin-project", "reth-beacon-consensus", "reth-chainspec", + "reth-consensus", "reth-engine-primitives", "reth-ethereum-engine-primitives", "reth-evm", diff --git a/book/cli/reth/node.md b/book/cli/reth/node.md index 52f597279..5f0090ef8 100644 --- a/book/cli/reth/node.md +++ b/book/cli/reth/node.md @@ -367,6 +367,9 @@ RPC: [default: 25] + --builder.disallow + Path to file containing disallowed addresses, json-encoded list of strings. Block validation API will reject blocks containing transactions from these addresses + RPC State Cache: --rpc-cache.max-blocks Max number of blocks in cache diff --git a/crates/cli/util/Cargo.toml b/crates/cli/util/Cargo.toml index d96a882a6..70515f83b 100644 --- a/crates/cli/util/Cargo.toml +++ b/crates/cli/util/Cargo.toml @@ -24,6 +24,7 @@ eyre.workspace = true rand.workspace = true secp256k1 = { workspace = true, features = ["rand"] } thiserror.workspace = true +serde.workspace = true tracy-client = { workspace = true, optional = true, features = ["demangle"] } diff --git a/crates/cli/util/src/parsers.rs b/crates/cli/util/src/parsers.rs index 202744a4b..9bb803bcc 100644 --- a/crates/cli/util/src/parsers.rs +++ b/crates/cli/util/src/parsers.rs @@ -1,7 +1,9 @@ use alloy_eips::BlockHashOrNumber; use alloy_primitives::B256; +use reth_fs_util::FsPathError; use std::{ net::{IpAddr, Ipv4Addr, SocketAddr, ToSocketAddrs}, + path::Path, str::FromStr, time::Duration, }; @@ -82,6 +84,11 @@ pub fn parse_socket_address(value: &str) -> eyre::Result(path: &str) -> Result { + reth_fs_util::read_json_file(Path::new(path)) +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/e2e-test-utils/src/lib.rs b/crates/e2e-test-utils/src/lib.rs index 1e9717d08..1e9b39058 100644 --- a/crates/e2e-test-utils/src/lib.rs +++ b/crates/e2e-test-utils/src/lib.rs @@ -7,6 +7,7 @@ use reth::{ args::{DiscoveryArgs, NetworkArgs, RpcServerArgs}, builder::{NodeBuilder, NodeConfig, NodeHandle}, network::PeersHandleProvider, + rpc::server_types::RpcModuleSelection, tasks::TaskManager, }; use reth_chainspec::{EthChainSpec, EthereumHardforks}; @@ -147,7 +148,12 @@ where let node_config = NodeConfig::new(chain_spec.clone()) .with_network(network_config.clone()) .with_unused_ports() - .with_rpc(RpcServerArgs::default().with_unused_ports().with_http()) + .with_rpc( + RpcServerArgs::default() + .with_unused_ports() + .with_http() + .with_http_api(RpcModuleSelection::All), + ) .set_dev(is_dev); let span = span!(Level::INFO, "node", idx); diff --git a/crates/ethereum/consensus/src/lib.rs b/crates/ethereum/consensus/src/lib.rs index dd286584a..07c2a71e8 100644 --- a/crates/ethereum/consensus/src/lib.rs +++ b/crates/ethereum/consensus/src/lib.rs @@ -24,7 +24,7 @@ use reth_primitives::{ use std::{fmt::Debug, sync::Arc, time::SystemTime}; /// The bound divisor of the gas limit, used in update calculations. -const GAS_LIMIT_BOUND_DIVISOR: u64 = 1024; +pub const GAS_LIMIT_BOUND_DIVISOR: u64 = 1024; mod validation; pub use validation::validate_block_post_execution; @@ -32,7 +32,7 @@ pub use validation::validate_block_post_execution; /// Ethereum beacon consensus /// /// This consensus engine does basic checks as outlined in the execution specs. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct EthBeaconConsensus { /// Configuration chain_spec: Arc, diff --git a/crates/ethereum/node/Cargo.toml b/crates/ethereum/node/Cargo.toml index 83b620347..49663ffc2 100644 --- a/crates/ethereum/node/Cargo.toml +++ b/crates/ethereum/node/Cargo.toml @@ -59,6 +59,7 @@ alloy-signer.workspace = true alloy-eips.workspace = true alloy-sol-types.workspace = true alloy-contract.workspace = true +alloy-rpc-types-beacon.workspace = true [features] default = [] diff --git a/crates/ethereum/node/tests/e2e/rpc.rs b/crates/ethereum/node/tests/e2e/rpc.rs index ddf3d5cba..c8b127b9b 100644 --- a/crates/ethereum/node/tests/e2e/rpc.rs +++ b/crates/ethereum/node/tests/e2e/rpc.rs @@ -1,8 +1,14 @@ use crate::utils::eth_payload_attributes; -use alloy_eips::calc_next_block_base_fee; -use alloy_primitives::U256; -use alloy_provider::{network::EthereumWallet, Provider, ProviderBuilder}; +use alloy_eips::{calc_next_block_base_fee, eip2718::Encodable2718}; +use alloy_primitives::{Address, B256, U256}; +use alloy_provider::{network::EthereumWallet, Provider, ProviderBuilder, SendableTx}; +use alloy_rpc_types_beacon::relay::{BidTrace, SignedBidSubmissionV3}; use rand::{rngs::StdRng, Rng, SeedableRng}; +use reth::rpc::{ + api::BuilderBlockValidationRequestV3, + compat::engine::payload::block_to_payload_v3, + types::{engine::BlobsBundleV1, TransactionRequest}, +}; use reth_chainspec::{ChainSpecBuilder, MAINNET}; use reth_e2e_test_utils::setup_engine; use reth_node_ethereum::EthereumNode; @@ -107,3 +113,77 @@ async fn test_fee_history() -> eyre::Result<()> { Ok(()) } + +#[tokio::test] +async fn test_flashbots_validate() -> eyre::Result<()> { + reth_tracing::init_test_tracing(); + + let chain_spec = Arc::new( + ChainSpecBuilder::default() + .chain(MAINNET.chain) + .genesis(serde_json::from_str(include_str!("../assets/genesis.json")).unwrap()) + .cancun_activated() + .build(), + ); + + let (mut nodes, _tasks, wallet) = + setup_engine::(1, chain_spec.clone(), false, eth_payload_attributes).await?; + let mut node = nodes.pop().unwrap(); + let provider = ProviderBuilder::new() + .with_recommended_fillers() + .wallet(EthereumWallet::new(wallet.gen().swap_remove(0))) + .on_http(node.rpc_url()); + + node.advance(100, |_| { + let provider = provider.clone(); + Box::pin(async move { + let SendableTx::Envelope(tx) = + provider.fill(TransactionRequest::default().to(Address::ZERO)).await.unwrap() + else { + unreachable!() + }; + + tx.encoded_2718().into() + }) + }) + .await?; + + let _ = provider.send_transaction(TransactionRequest::default().to(Address::ZERO)).await?; + let (payload, attrs) = node.new_payload().await?; + + let mut request = BuilderBlockValidationRequestV3 { + request: SignedBidSubmissionV3 { + message: BidTrace { + parent_hash: payload.block().parent_hash, + block_hash: payload.block().hash(), + gas_used: payload.block().gas_used, + gas_limit: payload.block().gas_limit, + ..Default::default() + }, + execution_payload: block_to_payload_v3(payload.block().clone()), + blobs_bundle: BlobsBundleV1::new([]), + signature: Default::default(), + }, + parent_beacon_block_root: attrs.parent_beacon_block_root.unwrap(), + registered_gas_limit: payload.block().gas_limit, + }; + + assert!(provider + .raw_request::<_, ()>("flashbots_validateBuilderSubmissionV3".into(), (&request,)) + .await + .is_ok()); + + request.registered_gas_limit -= 1; + assert!(provider + .raw_request::<_, ()>("flashbots_validateBuilderSubmissionV3".into(), (&request,)) + .await + .is_err()); + request.registered_gas_limit += 1; + + request.request.execution_payload.payload_inner.payload_inner.state_root = B256::ZERO; + assert!(provider + .raw_request::<_, ()>("flashbots_validateBuilderSubmissionV3".into(), (&request,)) + .await + .is_err()); + Ok(()) +} diff --git a/crates/node/builder/src/rpc.rs b/crates/node/builder/src/rpc.rs index 4c1ea32d0..8819aa4ac 100644 --- a/crates/node/builder/src/rpc.rs +++ b/crates/node/builder/src/rpc.rs @@ -198,6 +198,7 @@ pub struct RpcRegistry { Node::Provider, EthApi, Node::Executor, + Node::Consensus, >, } @@ -214,6 +215,7 @@ where Node::Provider, EthApi, Node::Executor, + Node::Consensus, >; fn deref(&self) -> &Self::Target { @@ -442,6 +444,7 @@ where .with_executor(node.task_executor().clone()) .with_evm_config(node.evm_config().clone()) .with_block_executor(node.block_executor().clone()) + .with_consensus(node.consensus().clone()) .build_with_auth_server(module_config, engine_api, eth_api_builder); // in dev mode we generate 20 random dev-signer accounts diff --git a/crates/node/core/src/args/rpc_server.rs b/crates/node/core/src/args/rpc_server.rs index 382f22d37..fe9b80cec 100644 --- a/crates/node/core/src/args/rpc_server.rs +++ b/crates/node/core/src/args/rpc_server.rs @@ -1,11 +1,13 @@ //! clap [Args](clap::Args) for RPC related arguments. use std::{ + collections::HashSet, ffi::OsStr, net::{IpAddr, Ipv4Addr}, path::PathBuf, }; +use alloy_primitives::Address; use alloy_rpc_types_engine::JwtSecret; use clap::{ builder::{PossibleValue, RangedU64ValueParser, TypedValueParser}, @@ -183,6 +185,11 @@ pub struct RpcServerArgs { #[arg(long = "rpc.proof-permits", alias = "rpc-proof-permits", value_name = "COUNT", default_value_t = constants::DEFAULT_PROOF_PERMITS)] pub rpc_proof_permits: usize, + /// Path to file containing disallowed addresses, json-encoded list of strings. Block + /// validation API will reject blocks containing transactions from these addresses. + #[arg(long = "builder.disallow", value_name = "PATH", value_parser = reth_cli_util::parsers::read_json_from_file::>)] + pub builder_disallow: Option>, + /// State cache configuration. #[command(flatten)] pub rpc_state_cache: RpcStateCacheArgs, @@ -199,6 +206,12 @@ impl RpcServerArgs { self } + /// Configures modules for the HTTP-RPC server. + pub fn with_http_api(mut self, http_api: RpcModuleSelection) -> Self { + self.http_api = Some(http_api); + self + } + /// Enables the WS-RPC server. pub const fn with_ws(mut self) -> Self { self.ws = true; @@ -318,6 +331,7 @@ impl Default for RpcServerArgs { gas_price_oracle: GasPriceOracleArgs::default(), rpc_state_cache: RpcStateCacheArgs::default(), rpc_proof_permits: constants::DEFAULT_PROOF_PERMITS, + builder_disallow: Default::default(), } } } diff --git a/crates/payload/validator/src/lib.rs b/crates/payload/validator/src/lib.rs index 9952815fd..38e53bac4 100644 --- a/crates/payload/validator/src/lib.rs +++ b/crates/payload/validator/src/lib.rs @@ -23,7 +23,7 @@ pub struct ExecutionPayloadValidator { chain_spec: Arc, } -impl ExecutionPayloadValidator { +impl ExecutionPayloadValidator { /// Create a new validator. pub const fn new(chain_spec: Arc) -> Self { Self { chain_spec } @@ -34,7 +34,9 @@ impl ExecutionPayloadValidator { pub fn chain_spec(&self) -> &ChainSpec { &self.chain_spec } +} +impl ExecutionPayloadValidator { /// Returns true if the Cancun hardfork is active at the given timestamp. #[inline] fn is_cancun_active_at_timestamp(&self, timestamp: u64) -> bool { diff --git a/crates/rpc/rpc-api/Cargo.toml b/crates/rpc/rpc-api/Cargo.toml index 363e22955..60146e8b2 100644 --- a/crates/rpc/rpc-api/Cargo.toml +++ b/crates/rpc/rpc-api/Cargo.toml @@ -36,6 +36,8 @@ alloy-rpc-types-engine.workspace = true # misc jsonrpsee = { workspace = true, features = ["server", "macros"] } +serde.workspace = true +serde_with.workspace = true [features] client = [ diff --git a/crates/rpc/rpc-api/src/lib.rs b/crates/rpc/rpc-api/src/lib.rs index 73775112d..63e6e5446 100644 --- a/crates/rpc/rpc-api/src/lib.rs +++ b/crates/rpc/rpc-api/src/lib.rs @@ -46,7 +46,7 @@ pub mod servers { rpc::RpcApiServer, trace::TraceApiServer, txpool::TxPoolApiServer, - validation::BlockSubmissionValidationApiServer, + validation::{BlockSubmissionValidationApiServer, BuilderBlockValidationRequestV3}, web3::Web3ApiServer, }; pub use reth_rpc_eth_api::{ diff --git a/crates/rpc/rpc-api/src/validation.rs b/crates/rpc/rpc-api/src/validation.rs index e1819dde4..d8f55b668 100644 --- a/crates/rpc/rpc-api/src/validation.rs +++ b/crates/rpc/rpc-api/src/validation.rs @@ -1,9 +1,28 @@ //! API for block submission validation. +use alloy_primitives::B256; use alloy_rpc_types_beacon::relay::{ - BuilderBlockValidationRequest, BuilderBlockValidationRequestV2, BuilderBlockValidationRequestV3, + BuilderBlockValidationRequest, BuilderBlockValidationRequestV2, SignedBidSubmissionV3, }; use jsonrpsee::proc_macros::rpc; +use serde::{Deserialize, Serialize}; +use serde_with::{serde_as, DisplayFromStr}; + +/// A Request to validate a [`SignedBidSubmissionV3`] +/// +/// +#[serde_as] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct BuilderBlockValidationRequestV3 { + /// The request to be validated. + #[serde(flatten)] + pub request: SignedBidSubmissionV3, + /// The registered gas limit for the validation request. + #[serde_as(as = "DisplayFromStr")] + pub registered_gas_limit: u64, + /// The parent beacon block root for the validation request. + pub parent_beacon_block_root: B256, +} /// Block validation rpc interface. #[cfg_attr(not(feature = "client"), rpc(server, namespace = "flashbots"))] diff --git a/crates/rpc/rpc-builder/Cargo.toml b/crates/rpc/rpc-builder/Cargo.toml index b9b511a07..711e44381 100644 --- a/crates/rpc/rpc-builder/Cargo.toml +++ b/crates/rpc/rpc-builder/Cargo.toml @@ -15,6 +15,7 @@ workspace = true # reth reth-ipc.workspace = true reth-chainspec.workspace = true +reth-consensus.workspace = true reth-network-api.workspace = true reth-node-core.workspace = true reth-provider.workspace = true diff --git a/crates/rpc/rpc-builder/src/config.rs b/crates/rpc/rpc-builder/src/config.rs index 4ff98ae8d..daff81fa2 100644 --- a/crates/rpc/rpc-builder/src/config.rs +++ b/crates/rpc/rpc-builder/src/config.rs @@ -2,6 +2,7 @@ use std::{net::SocketAddr, path::PathBuf}; use jsonrpsee::server::ServerBuilder; use reth_node_core::{args::RpcServerArgs, utils::get_or_create_jwt_secret_from_path}; +use reth_rpc::ValidationApiConfig; use reth_rpc_eth_types::{EthConfig, EthStateCacheConfig, GasPriceOracleConfig}; use reth_rpc_layer::{JwtError, JwtSecret}; use reth_rpc_server_types::RpcModuleSelection; @@ -27,6 +28,9 @@ pub trait RethRpcServerConfig { /// The configured ethereum RPC settings. fn eth_config(&self) -> EthConfig; + /// The configured ethereum RPC settings. + fn flashbots_config(&self) -> ValidationApiConfig; + /// Returns state cache configuration. fn state_cache_config(&self) -> EthStateCacheConfig; @@ -101,6 +105,10 @@ impl RethRpcServerConfig for RpcServerArgs { .proof_permits(self.rpc_proof_permits) } + fn flashbots_config(&self) -> ValidationApiConfig { + ValidationApiConfig { disallow: self.builder_disallow.clone().unwrap_or_default() } + } + fn state_cache_config(&self) -> EthStateCacheConfig { EthStateCacheConfig { max_blocks: self.rpc_state_cache.max_blocks, @@ -124,7 +132,7 @@ impl RethRpcServerConfig for RpcServerArgs { fn transport_rpc_module_config(&self) -> TransportRpcModuleConfig { let mut config = TransportRpcModuleConfig::default() - .with_config(RpcModuleConfig::new(self.eth_config())); + .with_config(RpcModuleConfig::new(self.eth_config(), self.flashbots_config())); if self.http { config = config.with_http( diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index 787dce08b..5d5fd31aa 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -27,13 +27,14 @@ //! use reth_tasks::TokioTaskExecutor; //! use reth_transaction_pool::TransactionPool; //! -//! pub async fn launch( +//! pub async fn launch( //! provider: Provider, //! pool: Pool, //! network: Network, //! events: Events, //! evm_config: EvmConfig, //! block_executor: BlockExecutor, +//! consensus: Consensus, //! ) where //! Provider: FullRpcProvider + AccountReader + ChangeSetReader, //! Pool: TransactionPool + Unpin + 'static, @@ -41,6 +42,7 @@ //! Events: CanonStateSubscriptions + Clone + 'static, //! EvmConfig: ConfigureEvm
, //! BlockExecutor: BlockExecutorProvider, +//! Consensus: reth_consensus::Consensus + Clone + 'static, //! { //! // configure the rpc module per transport //! let transports = TransportRpcModuleConfig::default().with_http(vec![ @@ -57,6 +59,7 @@ //! events, //! evm_config, //! block_executor, +//! consensus, //! ) //! .build(transports, Box::new(EthApi::with_spawner)); //! let handle = RpcServerConfig::default() @@ -95,6 +98,7 @@ //! EngineT, //! EvmConfig, //! BlockExecutor, +//! Consensus, //! >( //! provider: Provider, //! pool: Pool, @@ -103,6 +107,7 @@ //! engine_api: EngineApi, //! evm_config: EvmConfig, //! block_executor: BlockExecutor, +//! consensus: Consensus, //! ) where //! Provider: FullRpcProvider + AccountReader + ChangeSetReader, //! Pool: TransactionPool + Unpin + 'static, @@ -112,6 +117,7 @@ //! EngineT: EngineTypes, //! EvmConfig: ConfigureEvm
, //! BlockExecutor: BlockExecutorProvider, +//! Consensus: reth_consensus::Consensus + Clone + 'static, //! { //! // configure the rpc module per transport //! let transports = TransportRpcModuleConfig::default().with_http(vec![ @@ -128,6 +134,7 @@ //! events, //! evm_config, //! block_executor, +//! consensus, //! ); //! //! // configure the server modules @@ -155,6 +162,7 @@ use std::{ collections::HashMap, fmt::Debug, net::{Ipv4Addr, SocketAddr, SocketAddrV4}, + sync::Arc, time::{Duration, SystemTime, UNIX_EPOCH}, }; @@ -170,6 +178,7 @@ use jsonrpsee::{ Methods, RpcModule, }; use reth_chainspec::EthereumHardforks; +use reth_consensus::Consensus; use reth_engine_primitives::EngineTypes; use reth_evm::{execute::BlockExecutorProvider, ConfigureEvm}; use reth_network_api::{noop::NoopNetwork, NetworkInfo, Peers}; @@ -180,7 +189,7 @@ use reth_provider::{ }; use reth_rpc::{ AdminApi, DebugApi, EngineEthApi, EthBundle, NetApi, OtterscanApi, RPCApi, RethApi, TraceApi, - TxPoolApi, ValidationApi, Web3Api, + TxPoolApi, ValidationApi, ValidationApiConfig, Web3Api, }; use reth_rpc_api::servers::*; use reth_rpc_eth_api::{ @@ -243,6 +252,7 @@ pub async fn launch, block_executor: BlockExecutor, + consensus: Arc, ) -> Result where Provider: FullRpcProvider + AccountReader + ChangeSetReader, @@ -266,6 +276,7 @@ where events, evm_config, block_executor, + consensus, ) .build(module_config, eth), ) @@ -276,7 +287,16 @@ where /// /// This is the main entrypoint and the easiest way to configure an RPC server. #[derive(Debug, Clone)] -pub struct RpcModuleBuilder { +pub struct RpcModuleBuilder< + Provider, + Pool, + Network, + Tasks, + Events, + EvmConfig, + BlockExecutor, + Consensus, +> { /// The Provider type to when creating all rpc handlers provider: Provider, /// The Pool type to when creating all rpc handlers @@ -291,14 +311,17 @@ pub struct RpcModuleBuilder - RpcModuleBuilder +impl + RpcModuleBuilder { /// Create a new instance of the builder + #[allow(clippy::too_many_arguments)] pub const fn new( provider: Provider, pool: Pool, @@ -307,32 +330,54 @@ impl events: Events, evm_config: EvmConfig, block_executor: BlockExecutor, + consensus: Consensus, ) -> Self { - Self { provider, pool, network, executor, events, evm_config, block_executor } + Self { provider, pool, network, executor, events, evm_config, block_executor, consensus } } /// Configure the provider instance. pub fn with_provider

( self, provider: P, - ) -> RpcModuleBuilder + ) -> RpcModuleBuilder where P: BlockReader + StateProviderFactory + EvmEnvProvider + 'static, { - let Self { pool, network, executor, events, evm_config, block_executor, .. } = self; - RpcModuleBuilder { provider, network, pool, executor, events, evm_config, block_executor } + let Self { pool, network, executor, events, evm_config, block_executor, consensus, .. } = + self; + RpcModuleBuilder { + provider, + network, + pool, + executor, + events, + evm_config, + block_executor, + consensus, + } } /// Configure the transaction pool instance. pub fn with_pool

( self, pool: P, - ) -> RpcModuleBuilder + ) -> RpcModuleBuilder where P: TransactionPool + 'static, { - let Self { provider, network, executor, events, evm_config, block_executor, .. } = self; - RpcModuleBuilder { provider, network, pool, executor, events, evm_config, block_executor } + let Self { + provider, network, executor, events, evm_config, block_executor, consensus, .. + } = self; + RpcModuleBuilder { + provider, + network, + pool, + executor, + events, + evm_config, + block_executor, + consensus, + } } /// Configure a [`NoopTransactionPool`] instance. @@ -350,8 +395,11 @@ impl Events, EvmConfig, BlockExecutor, + Consensus, > { - let Self { provider, executor, events, network, evm_config, block_executor, .. } = self; + let Self { + provider, executor, events, network, evm_config, block_executor, consensus, .. + } = self; RpcModuleBuilder { provider, executor, @@ -360,6 +408,7 @@ impl evm_config, block_executor, pool: NoopTransactionPool::default(), + consensus, } } @@ -367,12 +416,23 @@ impl pub fn with_network( self, network: N, - ) -> RpcModuleBuilder + ) -> RpcModuleBuilder where N: NetworkInfo + Peers + 'static, { - let Self { provider, pool, executor, events, evm_config, block_executor, .. } = self; - RpcModuleBuilder { provider, network, pool, executor, events, evm_config, block_executor } + let Self { + provider, pool, executor, events, evm_config, block_executor, consensus, .. + } = self; + RpcModuleBuilder { + provider, + network, + pool, + executor, + events, + evm_config, + block_executor, + consensus, + } } /// Configure a [`NoopNetwork`] instance. @@ -382,9 +442,19 @@ impl /// [`EthApi`](reth_rpc::eth::EthApi) which requires a [`NetworkInfo`] implementation. pub fn with_noop_network( self, - ) -> RpcModuleBuilder - { - let Self { provider, pool, executor, events, evm_config, block_executor, .. } = self; + ) -> RpcModuleBuilder< + Provider, + Pool, + NoopNetwork, + Tasks, + Events, + EvmConfig, + BlockExecutor, + Consensus, + > { + let Self { + provider, pool, executor, events, evm_config, block_executor, consensus, .. + } = self; RpcModuleBuilder { provider, pool, @@ -393,6 +463,7 @@ impl network: NoopNetwork::default(), evm_config, block_executor, + consensus, } } @@ -400,12 +471,22 @@ impl pub fn with_executor( self, executor: T, - ) -> RpcModuleBuilder + ) -> RpcModuleBuilder where T: TaskSpawner + 'static, { - let Self { pool, network, provider, events, evm_config, block_executor, .. } = self; - RpcModuleBuilder { provider, network, pool, executor, events, evm_config, block_executor } + let Self { pool, network, provider, events, evm_config, block_executor, consensus, .. } = + self; + RpcModuleBuilder { + provider, + network, + pool, + executor, + events, + evm_config, + block_executor, + consensus, + } } /// Configure [`TokioTaskExecutor`] as the task executor to use for additional tasks. @@ -422,8 +503,10 @@ impl Events, EvmConfig, BlockExecutor, + Consensus, > { - let Self { pool, network, provider, events, evm_config, block_executor, .. } = self; + let Self { pool, network, provider, events, evm_config, block_executor, consensus, .. } = + self; RpcModuleBuilder { provider, network, @@ -432,6 +515,7 @@ impl executor: TokioTaskExecutor::default(), evm_config, block_executor, + consensus, } } @@ -439,41 +523,90 @@ impl pub fn with_events( self, events: E, - ) -> RpcModuleBuilder + ) -> RpcModuleBuilder where E: CanonStateSubscriptions + 'static, { - let Self { provider, pool, executor, network, evm_config, block_executor, .. } = self; - RpcModuleBuilder { provider, network, pool, executor, events, evm_config, block_executor } + let Self { + provider, pool, executor, network, evm_config, block_executor, consensus, .. + } = self; + RpcModuleBuilder { + provider, + network, + pool, + executor, + events, + evm_config, + block_executor, + consensus, + } } /// Configure the evm configuration type pub fn with_evm_config( self, evm_config: E, - ) -> RpcModuleBuilder + ) -> RpcModuleBuilder where E: ConfigureEvm + 'static, { - let Self { provider, pool, executor, network, events, block_executor, .. } = self; - RpcModuleBuilder { provider, network, pool, executor, events, evm_config, block_executor } + let Self { provider, pool, executor, network, events, block_executor, consensus, .. } = + self; + RpcModuleBuilder { + provider, + network, + pool, + executor, + events, + evm_config, + block_executor, + consensus, + } } /// Configure the block executor provider pub fn with_block_executor( self, block_executor: BE, - ) -> RpcModuleBuilder + ) -> RpcModuleBuilder where BE: BlockExecutorProvider, { - let Self { provider, network, pool, executor, events, evm_config, .. } = self; - RpcModuleBuilder { provider, network, pool, executor, events, evm_config, block_executor } + let Self { provider, network, pool, executor, events, evm_config, consensus, .. } = self; + RpcModuleBuilder { + provider, + network, + pool, + executor, + events, + evm_config, + block_executor, + consensus, + } + } + + /// Configure the consensus implementation. + pub fn with_consensus( + self, + consensus: C, + ) -> RpcModuleBuilder { + let Self { provider, network, pool, executor, events, evm_config, block_executor, .. } = + self; + RpcModuleBuilder { + provider, + network, + pool, + executor, + events, + evm_config, + block_executor, + consensus, + } } } -impl - RpcModuleBuilder +impl + RpcModuleBuilder where Provider: FullRpcProvider + AccountReader + ChangeSetReader, Pool: TransactionPool + 'static, @@ -482,6 +615,7 @@ where Events: CanonStateSubscriptions + Clone + 'static, EvmConfig: ConfigureEvm

, BlockExecutor: BlockExecutorProvider, + Consensus: reth_consensus::Consensus + Clone + 'static, { /// Configures all [`RpcModule`]s specific to the given [`TransportRpcModuleConfig`] which can /// be used to start the transport server(s). @@ -498,14 +632,23 @@ where ) -> ( TransportRpcModules, AuthRpcModule, - RpcRegistryInner, + RpcRegistryInner, ) where EngineT: EngineTypes, EngineApi: EngineApiServer, EthApi: FullEthApiServer, { - let Self { provider, pool, network, executor, events, evm_config, block_executor } = self; + let Self { + provider, + pool, + network, + executor, + events, + evm_config, + block_executor, + consensus, + } = self; let config = module_config.config.clone().unwrap_or_default(); @@ -515,6 +658,7 @@ where network, executor, events, + consensus, config, evm_config, eth, @@ -536,6 +680,7 @@ where /// # Example /// /// ```no_run + /// use reth_consensus::noop::NoopConsensus; /// use reth_evm::ConfigureEvm; /// use reth_evm_ethereum::execute::EthExecutorProvider; /// use reth_network_api::noop::NoopNetwork; @@ -555,6 +700,7 @@ where /// .with_events(TestCanonStateSubscriptions::default()) /// .with_evm_config(evm) /// .with_block_executor(EthExecutorProvider::mainnet()) + /// .with_consensus(NoopConsensus::default()) /// .into_registry(Default::default(), Box::new(EthApi::with_spawner)); /// /// let eth_api = registry.eth_api(); @@ -564,17 +710,27 @@ where self, config: RpcModuleConfig, eth: DynEthApiBuilder, - ) -> RpcRegistryInner + ) -> RpcRegistryInner where EthApi: EthApiTypes + 'static, { - let Self { provider, pool, network, executor, events, evm_config, block_executor } = self; + let Self { + provider, + pool, + network, + executor, + events, + evm_config, + block_executor, + consensus, + } = self; RpcRegistryInner::new( provider, pool, network, executor, events, + consensus, config, evm_config, eth, @@ -594,7 +750,16 @@ where { let mut modules = TransportRpcModules::default(); - let Self { provider, pool, network, executor, events, evm_config, block_executor } = self; + let Self { + provider, + pool, + network, + executor, + events, + evm_config, + block_executor, + consensus, + } = self; if !module_config.is_empty() { let TransportRpcModuleConfig { http, ws, ipc, config } = module_config.clone(); @@ -605,6 +770,7 @@ where network, executor, events, + consensus, config.unwrap_or_default(), evm_config, eth, @@ -621,9 +787,9 @@ where } } -impl Default for RpcModuleBuilder<(), (), (), (), (), (), ()> { +impl Default for RpcModuleBuilder<(), (), (), (), (), (), (), ()> { fn default() -> Self { - Self::new((), (), (), (), (), (), ()) + Self::new((), (), (), (), (), (), (), ()) } } @@ -632,6 +798,8 @@ impl Default for RpcModuleBuilder<(), (), (), (), (), (), ()> { pub struct RpcModuleConfig { /// `eth` namespace settings eth: EthConfig, + /// `flashbots` namespace settings + flashbots: ValidationApiConfig, } // === impl RpcModuleConfig === @@ -643,8 +811,8 @@ impl RpcModuleConfig { } /// Returns a new RPC module config given the eth namespace config - pub const fn new(eth: EthConfig) -> Self { - Self { eth } + pub const fn new(eth: EthConfig, flashbots: ValidationApiConfig) -> Self { + Self { eth, flashbots } } /// Get a reference to the eth namespace config @@ -662,6 +830,7 @@ impl RpcModuleConfig { #[derive(Clone, Debug, Default)] pub struct RpcModuleConfigBuilder { eth: Option, + flashbots: Option, } // === impl RpcModuleConfigBuilder === @@ -673,10 +842,16 @@ impl RpcModuleConfigBuilder { self } + /// Configures a custom flashbots namespace config + pub fn flashbots(mut self, flashbots: ValidationApiConfig) -> Self { + self.flashbots = Some(flashbots); + self + } + /// Consumes the type and creates the [`RpcModuleConfig`] pub fn build(self) -> RpcModuleConfig { - let Self { eth } = self; - RpcModuleConfig { eth: eth.unwrap_or_default() } + let Self { eth, flashbots } = self; + RpcModuleConfig { eth: eth.unwrap_or_default(), flashbots: flashbots.unwrap_or_default() } } /// Get a reference to the eth namespace config, if any @@ -705,6 +880,7 @@ pub struct RpcRegistryInner< Events, EthApi: EthApiTypes, BlockExecutor, + Consensus, > { provider: Provider, pool: Pool, @@ -712,6 +888,9 @@ pub struct RpcRegistryInner< executor: Tasks, events: Events, block_executor: BlockExecutor, + consensus: Consensus, + /// Holds the configuration for the RPC modules + config: RpcModuleConfig, /// Holds a all `eth_` namespace handlers eth: EthHandlers, /// to put trace calls behind semaphore @@ -722,8 +901,8 @@ pub struct RpcRegistryInner< // === impl RpcRegistryInner === -impl - RpcRegistryInner +impl + RpcRegistryInner where Provider: StateProviderFactory + BlockReader + EvmEnvProvider + Clone + Unpin + 'static, Pool: Send + Sync + Clone + 'static, @@ -741,6 +920,7 @@ where network: Network, executor: Tasks, events: Events, + consensus: Consensus, config: RpcModuleConfig, evm_config: EvmConfig, eth_api_builder: DynEthApiBuilder< @@ -776,6 +956,8 @@ where network, eth, executor, + consensus, + config, modules: Default::default(), blocking_pool_guard, events, @@ -784,8 +966,8 @@ where } } -impl - RpcRegistryInner +impl + RpcRegistryInner where EthApi: EthApiTypes, { @@ -842,8 +1024,8 @@ where } } -impl - RpcRegistryInner +impl + RpcRegistryInner where Network: NetworkInfo + Clone + 'static, EthApi: EthApiTypes, @@ -881,8 +1063,8 @@ where } } -impl - RpcRegistryInner +impl + RpcRegistryInner where Provider: FullRpcProvider + AccountReader + ChangeSetReader, Network: NetworkInfo + Peers + Clone + 'static, @@ -994,8 +1176,8 @@ where } } -impl - RpcRegistryInner +impl + RpcRegistryInner where Provider: FullRpcProvider + AccountReader + ChangeSetReader, Network: NetworkInfo + Peers + Clone + 'static, @@ -1069,13 +1251,21 @@ where } /// Instantiates `ValidationApi` - pub fn validation_api(&self) -> ValidationApi { - ValidationApi::new(self.provider.clone()) + pub fn validation_api(&self) -> ValidationApi + where + Consensus: reth_consensus::Consensus + Clone + 'static, + { + ValidationApi::new( + self.provider.clone(), + Arc::new(self.consensus.clone()), + self.block_executor.clone(), + self.config.flashbots.clone(), + ) } } -impl - RpcRegistryInner +impl + RpcRegistryInner where Provider: FullRpcProvider + AccountReader + ChangeSetReader, Pool: TransactionPool + 'static, @@ -1084,6 +1274,7 @@ where Events: CanonStateSubscriptions + Clone + 'static, EthApi: FullEthApiServer, BlockExecutor: BlockExecutorProvider, + Consensus: reth_consensus::Consensus + Clone + 'static, { /// Configures the auth module that includes the /// * `engine_` namespace @@ -1228,9 +1419,14 @@ where .into_rpc() .into() } - RethRpcModule::Flashbots => { - ValidationApi::new(self.provider.clone()).into_rpc().into() - } + RethRpcModule::Flashbots => ValidationApi::new( + self.provider.clone(), + Arc::new(self.consensus.clone()), + self.block_executor.clone(), + self.config.flashbots.clone(), + ) + .into_rpc() + .into(), }) .clone() }) @@ -1688,7 +1884,7 @@ impl TransportRpcModuleConfig { } /// Sets a custom [`RpcModuleConfig`] for the configured modules. - pub const fn with_config(mut self, config: RpcModuleConfig) -> Self { + pub fn with_config(mut self, config: RpcModuleConfig) -> Self { self.config = Some(config); self } diff --git a/crates/rpc/rpc-builder/tests/it/utils.rs b/crates/rpc/rpc-builder/tests/it/utils.rs index 44614ea49..175992c0f 100644 --- a/crates/rpc/rpc-builder/tests/it/utils.rs +++ b/crates/rpc/rpc-builder/tests/it/utils.rs @@ -3,6 +3,7 @@ use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4}; use alloy_rpc_types_engine::{ClientCode, ClientVersionV1}; use reth_beacon_consensus::BeaconConsensusEngineHandle; use reth_chainspec::MAINNET; +use reth_consensus::noop::NoopConsensus; use reth_ethereum_engine_primitives::{EthEngineTypes, EthereumEngineValidator}; use reth_evm::execute::BasicBlockExecutorProvider; use reth_evm_ethereum::{execute::EthExecutionStrategyFactory, EthEvmConfig}; @@ -126,6 +127,7 @@ pub fn test_rpc_builder() -> RpcModuleBuilder< TestCanonStateSubscriptions, EthEvmConfig, BasicBlockExecutorProvider, + NoopConsensus, > { RpcModuleBuilder::default() .with_provider(NoopProvider::default()) @@ -137,4 +139,5 @@ pub fn test_rpc_builder() -> RpcModuleBuilder< .with_block_executor( BasicBlockExecutorProvider::new(EthExecutionStrategyFactory::mainnet()), ) + .with_consensus(NoopConsensus::default()) } diff --git a/crates/rpc/rpc-eth-types/src/builder/config.rs b/crates/rpc/rpc-eth-types/src/builder/config.rs index a016d0215..532c10772 100644 --- a/crates/rpc/rpc-eth-types/src/builder/config.rs +++ b/crates/rpc/rpc-eth-types/src/builder/config.rs @@ -15,7 +15,7 @@ use serde::{Deserialize, Serialize}; pub const DEFAULT_STALE_FILTER_TTL: Duration = Duration::from_secs(5 * 60); /// Additional config values for the eth namespace. -#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)] pub struct EthConfig { /// Settings for the caching layer pub cache: EthStateCacheConfig, diff --git a/crates/rpc/rpc-server-types/src/result.rs b/crates/rpc/rpc-server-types/src/result.rs index 78e643664..10ce1650a 100644 --- a/crates/rpc/rpc-server-types/src/result.rs +++ b/crates/rpc/rpc-server-types/src/result.rs @@ -4,6 +4,7 @@ use std::fmt; use alloy_rpc_types_engine::PayloadError; use jsonrpsee_core::RpcResult; +use reth_errors::ConsensusError; use reth_primitives::BlockId; /// Helper trait to easily convert various `Result` types into [`RpcResult`] @@ -102,6 +103,7 @@ macro_rules! impl_to_rpc_result { } impl_to_rpc_result!(PayloadError); +impl_to_rpc_result!(ConsensusError); impl_to_rpc_result!(reth_errors::RethError); impl_to_rpc_result!(reth_errors::ProviderError); impl_to_rpc_result!(reth_network_api::NetworkError); diff --git a/crates/rpc/rpc/Cargo.toml b/crates/rpc/rpc/Cargo.toml index 00799d761..876467d1f 100644 --- a/crates/rpc/rpc/Cargo.toml +++ b/crates/rpc/rpc/Cargo.toml @@ -18,6 +18,7 @@ reth-primitives = { workspace = true, features = ["secp256k1"] } reth-rpc-api.workspace = true reth-rpc-eth-api.workspace = true reth-errors.workspace = true +reth-ethereum-consensus.workspace = true reth-provider.workspace = true reth-transaction-pool.workspace = true reth-network-api.workspace = true @@ -33,12 +34,14 @@ reth-rpc-eth-types.workspace = true reth-rpc-server-types.workspace = true reth-network-types.workspace = true reth-trie.workspace = true +reth-consensus.workspace = true +reth-payload-validator.workspace = true # ethereum alloy-consensus.workspace = true alloy-signer.workspace = true alloy-signer-local.workspace = true -alloy-eips.workspace = true +alloy-eips = { workspace = true, features = ["kzg"] } alloy-dyn-abi.workspace = true alloy-genesis.workspace = true alloy-network.workspace = true diff --git a/crates/rpc/rpc/src/lib.rs b/crates/rpc/rpc/src/lib.rs index 027edea3c..76fb96f91 100644 --- a/crates/rpc/rpc/src/lib.rs +++ b/crates/rpc/rpc/src/lib.rs @@ -55,5 +55,5 @@ pub use reth::RethApi; pub use rpc::RPCApi; pub use trace::TraceApi; pub use txpool::TxPoolApi; -pub use validation::ValidationApi; +pub use validation::{ValidationApi, ValidationApiConfig}; pub use web3::Web3Api; diff --git a/crates/rpc/rpc/src/validation.rs b/crates/rpc/rpc/src/validation.rs index c6419dc12..fe9d0eb44 100644 --- a/crates/rpc/rpc/src/validation.rs +++ b/crates/rpc/rpc/src/validation.rs @@ -1,56 +1,376 @@ +use alloy_consensus::{BlobTransactionValidationError, EnvKzgSettings, Transaction}; +use alloy_eips::eip4844::kzg_to_versioned_hash; +use alloy_rpc_types::engine::{ + BlobsBundleV1, CancunPayloadFields, ExecutionPayload, ExecutionPayloadSidecar, +}; use alloy_rpc_types_beacon::relay::{ - BuilderBlockValidationRequest, BuilderBlockValidationRequestV2, BuilderBlockValidationRequestV3, + BidTrace, BuilderBlockValidationRequest, BuilderBlockValidationRequestV2, }; use async_trait::async_trait; use jsonrpsee::core::RpcResult; -use reth_chainspec::ChainSpecProvider; +use reth_chainspec::{ChainSpecProvider, EthereumHardforks}; +use reth_consensus::{Consensus, PostExecutionInput}; +use reth_errors::{BlockExecutionError, ConsensusError, ProviderError, RethError}; +use reth_ethereum_consensus::GAS_LIMIT_BOUND_DIVISOR; +use reth_evm::execute::{BlockExecutorProvider, Executor}; +use reth_payload_validator::ExecutionPayloadValidator; +use reth_primitives::{Block, GotExpected, Receipt, SealedBlockWithSenders, SealedHeader}; use reth_provider::{ - AccountReader, BlockReaderIdExt, HeaderProvider, StateProviderFactory, WithdrawalsProvider, + AccountReader, BlockExecutionInput, BlockExecutionOutput, BlockReaderIdExt, HeaderProvider, + StateProviderFactory, WithdrawalsProvider, }; -use reth_rpc_api::BlockSubmissionValidationApiServer; -use reth_rpc_server_types::result::internal_rpc_err; -use std::sync::Arc; -use tracing::warn; +use reth_revm::database::StateProviderDatabase; +use reth_rpc_api::{BlockSubmissionValidationApiServer, BuilderBlockValidationRequestV3}; +use reth_rpc_eth_types::EthApiError; +use reth_rpc_server_types::{result::internal_rpc_err, ToRpcResult}; +use reth_trie::HashedPostState; +use revm_primitives::{Address, B256, U256}; +use serde::{Deserialize, Serialize}; +use std::{collections::HashSet, sync::Arc}; + +/// Configuration for validation API. +#[derive(Debug, Clone, Default, Eq, PartialEq, Serialize, Deserialize)] +pub struct ValidationApiConfig { + /// Disallowed addresses. + pub disallow: HashSet
, +} + +#[derive(Debug, thiserror::Error)] +pub enum ValidationApiError { + #[error("block gas limit mismatch: {_0}")] + GasLimitMismatch(GotExpected), + #[error("block gas used mismatch: {_0}")] + GasUsedMismatch(GotExpected), + #[error("block parent hash mismatch: {_0}")] + ParentHashMismatch(GotExpected), + #[error("block hash mismatch: {_0}")] + BlockHashMismatch(GotExpected), + #[error("missing latest block in database")] + MissingLatestBlock, + #[error("could not verify proposer payment")] + ProposerPayment, + #[error("invalid blobs bundle")] + InvalidBlobsBundle, + #[error("block accesses blacklisted address: {_0}")] + Blacklist(Address), + #[error(transparent)] + Blob(#[from] BlobTransactionValidationError), + #[error(transparent)] + Consensus(#[from] ConsensusError), + #[error(transparent)] + Provider(#[from] ProviderError), + #[error(transparent)] + Execution(#[from] BlockExecutionError), +} + +#[derive(Debug, Clone)] +pub struct ValidationApiInner { + /// The provider that can interact with the chain. + provider: Provider, + /// Consensus implementation. + consensus: Arc, + /// Execution payload validator. + payload_validator: ExecutionPayloadValidator, + /// Block executor factory. + executor_provider: E, + /// Set of disallowed addresses + disallow: HashSet
, +} /// The type that implements the `validation` rpc namespace trait -pub struct ValidationApi { - inner: Arc>, +#[derive(Clone, Debug, derive_more::Deref)] +pub struct ValidationApi { + #[deref] + inner: Arc>, } -impl ValidationApi +impl ValidationApi +where + Provider: ChainSpecProvider, +{ + /// Create a new instance of the [`ValidationApi`] + pub fn new( + provider: Provider, + consensus: Arc, + executor_provider: E, + config: ValidationApiConfig, + ) -> Self { + let ValidationApiConfig { disallow } = config; + + let payload_validator = ExecutionPayloadValidator::new(provider.chain_spec()); + let inner = Arc::new(ValidationApiInner { + provider, + consensus, + payload_validator, + executor_provider, + disallow, + }); + + Self { inner } + } +} + +impl ValidationApi where Provider: BlockReaderIdExt - + ChainSpecProvider + + ChainSpecProvider + StateProviderFactory + HeaderProvider + AccountReader + WithdrawalsProvider + Clone + 'static, + E: BlockExecutorProvider, { - /// The provider that can interact with the chain. - pub fn provider(&self) -> Provider { - self.inner.provider.clone() + /// Validates the given block and a [`BidTrace`] against it. + pub fn validate_message_against_block( + &self, + block: SealedBlockWithSenders, + message: BidTrace, + registered_gas_limit: u64, + ) -> Result<(), ValidationApiError> { + self.validate_message_against_header(&block.header, &message)?; + + self.consensus.validate_header_with_total_difficulty(&block.header, U256::MAX)?; + self.consensus.validate_header(&block.header)?; + self.consensus.validate_block_pre_execution(&block)?; + + if !self.disallow.is_empty() { + if self.disallow.contains(&block.beneficiary) { + return Err(ValidationApiError::Blacklist(block.beneficiary)) + } + if self.disallow.contains(&message.proposer_fee_recipient) { + return Err(ValidationApiError::Blacklist(message.proposer_fee_recipient)) + } + for (sender, tx) in block.senders.iter().zip(block.transactions()) { + if self.disallow.contains(sender) { + return Err(ValidationApiError::Blacklist(*sender)) + } + if let Some(to) = tx.to() { + if self.disallow.contains(&to) { + return Err(ValidationApiError::Blacklist(to)) + } + } + } + } + + let latest_header = + self.provider.latest_header()?.ok_or_else(|| ValidationApiError::MissingLatestBlock)?; + + if latest_header.hash() != block.header.parent_hash { + return Err(ConsensusError::ParentHashMismatch( + GotExpected { got: block.header.parent_hash, expected: latest_header.hash() } + .into(), + ) + .into()) + } + self.consensus.validate_header_against_parent(&block.header, &latest_header)?; + self.validate_gas_limit(registered_gas_limit, &latest_header, &block.header)?; + + let state_provider = self.provider.state_by_block_hash(latest_header.hash())?; + let executor = self.executor_provider.executor(StateProviderDatabase::new(&state_provider)); + + let block = block.unseal(); + let mut accessed_blacklisted = None; + let output = executor.execute_with_state_closure( + BlockExecutionInput::new(&block, U256::MAX), + |state| { + if !self.disallow.is_empty() { + for account in state.cache.accounts.keys() { + if self.disallow.contains(account) { + accessed_blacklisted = Some(*account); + } + } + } + }, + )?; + + if let Some(account) = accessed_blacklisted { + return Err(ValidationApiError::Blacklist(account)) + } + + self.consensus.validate_block_post_execution( + &block, + PostExecutionInput::new(&output.receipts, &output.requests), + )?; + + self.ensure_payment(&block, &output, &message)?; + + let state_root = + state_provider.state_root(HashedPostState::from_bundle_state(&output.state.state))?; + + if state_root != block.state_root { + return Err(ConsensusError::BodyStateRootDiff( + GotExpected { got: state_root, expected: block.state_root }.into(), + ) + .into()) + } + + Ok(()) } - /// Create a new instance of the [`ValidationApi`] - pub fn new(provider: Provider) -> Self { - let inner = Arc::new(ValidationApiInner { provider }); - Self { inner } + /// Ensures that fields of [`BidTrace`] match the fields of the [`SealedHeader`]. + fn validate_message_against_header( + &self, + header: &SealedHeader, + message: &BidTrace, + ) -> Result<(), ValidationApiError> { + if header.hash() != message.block_hash { + Err(ValidationApiError::BlockHashMismatch(GotExpected { + got: message.block_hash, + expected: header.hash(), + })) + } else if header.parent_hash != message.parent_hash { + Err(ValidationApiError::ParentHashMismatch(GotExpected { + got: message.parent_hash, + expected: header.parent_hash, + })) + } else if header.gas_limit != message.gas_limit { + Err(ValidationApiError::GasLimitMismatch(GotExpected { + got: message.gas_limit, + expected: header.gas_limit, + })) + } else if header.gas_used != message.gas_used { + return Err(ValidationApiError::GasUsedMismatch(GotExpected { + got: message.gas_used, + expected: header.gas_used, + })) + } else { + Ok(()) + } + } + + /// Ensures that the chosen gas limit is the closest possible value for the validator's + /// registered gas limit. + /// + /// Ref: + fn validate_gas_limit( + &self, + registered_gas_limit: u64, + parent_header: &SealedHeader, + header: &SealedHeader, + ) -> Result<(), ValidationApiError> { + let max_gas_limit = + parent_header.gas_limit + parent_header.gas_limit / GAS_LIMIT_BOUND_DIVISOR - 1; + let min_gas_limit = + parent_header.gas_limit - parent_header.gas_limit / GAS_LIMIT_BOUND_DIVISOR + 1; + + let best_gas_limit = + std::cmp::max(min_gas_limit, std::cmp::min(max_gas_limit, registered_gas_limit)); + + if best_gas_limit != header.gas_limit { + return Err(ValidationApiError::GasLimitMismatch(GotExpected { + got: header.gas_limit, + expected: best_gas_limit, + })) + } + + Ok(()) + } + + /// Ensures that the proposer has received [`BidTrace::value`] for this block. + /// + /// Firstly attempts to verify the payment by checking the state changes, otherwise falls back + /// to checking the latest block transaction. + fn ensure_payment( + &self, + block: &Block, + output: &BlockExecutionOutput, + message: &BidTrace, + ) -> Result<(), ValidationApiError> { + let (mut balance_before, balance_after) = if let Some(acc) = + output.state.state.get(&message.proposer_fee_recipient) + { + let balance_before = acc.original_info.as_ref().map(|i| i.balance).unwrap_or_default(); + let balance_after = acc.info.as_ref().map(|i| i.balance).unwrap_or_default(); + + (balance_before, balance_after) + } else { + // account might have balance but considering it zero is fine as long as we know + // that balance have not changed + (U256::ZERO, U256::ZERO) + }; + + if let Some(withdrawals) = &block.body.withdrawals { + for withdrawal in withdrawals { + if withdrawal.address == message.proposer_fee_recipient { + balance_before += withdrawal.amount_wei(); + } + } + } + + if balance_after >= balance_before + message.value { + return Ok(()) + } + + let (receipt, tx) = output + .receipts + .last() + .zip(block.body.transactions.last()) + .ok_or(ValidationApiError::ProposerPayment)?; + + if !receipt.success { + return Err(ValidationApiError::ProposerPayment) + } + + if tx.to() != Some(message.proposer_fee_recipient) { + return Err(ValidationApiError::ProposerPayment) + } + + if tx.value() != message.value { + return Err(ValidationApiError::ProposerPayment) + } + + if !tx.input().is_empty() { + return Err(ValidationApiError::ProposerPayment) + } + + if let Some(block_base_fee) = block.base_fee_per_gas { + if tx.effective_tip_per_gas(block_base_fee).unwrap_or_default() != 0 { + return Err(ValidationApiError::ProposerPayment) + } + } + + Ok(()) + } + + /// Validates the given [`BlobsBundleV1`] and returns versioned hashes for blobs. + pub fn validate_blobs_bundle( + &self, + mut blobs_bundle: BlobsBundleV1, + ) -> Result, ValidationApiError> { + if blobs_bundle.commitments.len() != blobs_bundle.proofs.len() || + blobs_bundle.commitments.len() != blobs_bundle.blobs.len() + { + return Err(ValidationApiError::InvalidBlobsBundle) + } + + let versioned_hashes = blobs_bundle + .commitments + .iter() + .map(|c| kzg_to_versioned_hash(c.as_slice())) + .collect::>(); + + let sidecar = blobs_bundle.pop_sidecar(blobs_bundle.blobs.len()); + + sidecar.validate(&versioned_hashes, EnvKzgSettings::default().get())?; + + Ok(versioned_hashes) } } #[async_trait] -impl BlockSubmissionValidationApiServer for ValidationApi +impl BlockSubmissionValidationApiServer for ValidationApi where Provider: BlockReaderIdExt - + ChainSpecProvider + + ChainSpecProvider + StateProviderFactory + HeaderProvider + AccountReader + WithdrawalsProvider + Clone + 'static, + E: BlockExecutorProvider, { async fn validate_builder_submission_v1( &self, @@ -71,24 +391,28 @@ where &self, request: BuilderBlockValidationRequestV3, ) -> RpcResult<()> { - warn!("flashbots_validateBuilderSubmissionV3: blindly accepting request without validation {:?}", request); - Ok(()) - } -} - -impl std::fmt::Debug for ValidationApi { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("ValidationApi").finish_non_exhaustive() - } -} + let block = self + .payload_validator + .ensure_well_formed_payload( + ExecutionPayload::V3(request.request.execution_payload), + ExecutionPayloadSidecar::v3(CancunPayloadFields { + parent_beacon_block_root: request.parent_beacon_block_root, + versioned_hashes: self + .validate_blobs_bundle(request.request.blobs_bundle) + .map_err(|e| RethError::Other(e.into())) + .to_rpc_result()?, + }), + ) + .to_rpc_result()? + .try_seal_with_senders() + .map_err(|_| EthApiError::InvalidTransactionSignature)?; -impl Clone for ValidationApi { - fn clone(&self) -> Self { - Self { inner: Arc::clone(&self.inner) } + self.validate_message_against_block( + block, + request.request.message, + request.registered_gas_limit, + ) + .map_err(|e| RethError::Other(e.into())) + .to_rpc_result() } } - -struct ValidationApiInner { - /// The provider that can interact with the chain. - provider: Provider, -} diff --git a/examples/rpc-db/src/main.rs b/examples/rpc-db/src/main.rs index 1b2899a64..92ae86f00 100644 --- a/examples/rpc-db/src/main.rs +++ b/examples/rpc-db/src/main.rs @@ -16,6 +16,7 @@ use std::{path::Path, sync::Arc}; use reth::{ api::NodeTypesWithDBAdapter, + beacon_consensus::EthBeaconConsensus, providers::{ providers::{BlockchainProvider, StaticFileProvider}, ProviderFactory, @@ -66,9 +67,10 @@ async fn main() -> eyre::Result<()> { .with_noop_pool() .with_noop_network() .with_executor(TokioTaskExecutor::default()) - .with_evm_config(EthEvmConfig::new(spec)) + .with_evm_config(EthEvmConfig::new(spec.clone())) .with_events(TestCanonStateSubscriptions::default()) - .with_block_executor(EthExecutorProvider::ethereum(provider.chain_spec())); + .with_block_executor(EthExecutorProvider::ethereum(provider.chain_spec())) + .with_consensus(EthBeaconConsensus::new(spec)); // Pick which namespaces to expose. let config = TransportRpcModuleConfig::default().with_http([RethRpcModule::Eth]); From bd8c4eceb20c39c6e501d06cf906469329340bb9 Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Thu, 31 Oct 2024 14:20:41 -0600 Subject: [PATCH 369/425] replace DisplayHardforks with Box (#12219) Co-authored-by: Matthias Seitz --- crates/chainspec/src/api.rs | 11 +++++------ crates/optimism/chainspec/src/lib.rs | 10 +++++----- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/crates/chainspec/src/api.rs b/crates/chainspec/src/api.rs index 36640f34b..ee25f72ba 100644 --- a/crates/chainspec/src/api.rs +++ b/crates/chainspec/src/api.rs @@ -1,11 +1,10 @@ use crate::{ChainSpec, DepositContract}; -use alloc::vec::Vec; +use alloc::{boxed::Box, vec::Vec}; use alloy_chains::Chain; use alloy_eips::eip1559::BaseFeeParams; use alloy_genesis::Genesis; use alloy_primitives::B256; -use core::fmt::Debug; -use reth_ethereum_forks::DisplayHardforks; +use core::fmt::{Debug, Display}; use reth_network_peers::NodeRecord; use reth_primitives_traits::Header; @@ -39,7 +38,7 @@ pub trait EthChainSpec: Send + Sync + Unpin + Debug { fn prune_delete_limit(&self) -> usize; /// Returns a string representation of the hardforks. - fn display_hardforks(&self) -> DisplayHardforks; + fn display_hardforks(&self) -> Box; /// The genesis header. fn genesis_header(&self) -> &Header; @@ -89,8 +88,8 @@ impl EthChainSpec for ChainSpec { self.prune_delete_limit } - fn display_hardforks(&self) -> DisplayHardforks { - self.display_hardforks() + fn display_hardforks(&self) -> Box { + Box::new(Self::display_hardforks(self)) } fn genesis_header(&self) -> &Header { diff --git a/crates/optimism/chainspec/src/lib.rs b/crates/optimism/chainspec/src/lib.rs index 70adf2272..8ad36c66a 100644 --- a/crates/optimism/chainspec/src/lib.rs +++ b/crates/optimism/chainspec/src/lib.rs @@ -17,7 +17,7 @@ mod dev; mod op; mod op_sepolia; -use alloc::{vec, vec::Vec}; +use alloc::{boxed::Box, vec, vec::Vec}; use alloy_chains::Chain; use alloy_genesis::Genesis; use alloy_primitives::{Bytes, Parity, Signature, B256, U256}; @@ -30,8 +30,8 @@ pub(crate) use once_cell::sync::Lazy as LazyLock; pub use op::OP_MAINNET; pub use op_sepolia::OP_SEPOLIA; use reth_chainspec::{ - BaseFeeParams, BaseFeeParamsKind, ChainSpec, ChainSpecBuilder, DepositContract, - DisplayHardforks, EthChainSpec, EthereumHardforks, ForkFilter, ForkId, Hardforks, Head, + BaseFeeParams, BaseFeeParamsKind, ChainSpec, ChainSpecBuilder, DepositContract, EthChainSpec, + EthereumHardforks, ForkFilter, ForkId, Hardforks, Head, }; use reth_ethereum_forks::{ChainHardforks, EthereumHardfork, ForkCondition, Hardfork}; use reth_network_peers::NodeRecord; @@ -287,8 +287,8 @@ impl EthChainSpec for OpChainSpec { self.inner.prune_delete_limit() } - fn display_hardforks(&self) -> DisplayHardforks { - self.inner.display_hardforks() + fn display_hardforks(&self) -> Box { + Box::new(ChainSpec::display_hardforks(self)) } fn genesis_header(&self) -> &Header { From 921d1cc4b5d37195188f2afcede39fe789cf9582 Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Fri, 1 Nov 2024 03:44:16 -0600 Subject: [PATCH 370/425] renamed OptimismPoolBuilder to OpPoolBuilder (#12246) --- crates/optimism/node/src/node.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 8e06dbcc5..925e7204c 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -66,7 +66,7 @@ impl OptimismNode { args: RollupArgs, ) -> ComponentsBuilder< Node, - OptimismPoolBuilder, + OpPoolBuilder, OpPayloadBuilder, OptimismNetworkBuilder, OptimismExecutorBuilder, @@ -80,7 +80,7 @@ impl OptimismNode { let RollupArgs { disable_txpool_gossip, compute_pending_block, discovery_v4, .. } = args; ComponentsBuilder::default() .node_types::() - .pool(OptimismPoolBuilder::default()) + .pool(OpPoolBuilder::default()) .payload(OpPayloadBuilder::new(compute_pending_block)) .network(OptimismNetworkBuilder { disable_txpool_gossip, @@ -99,7 +99,7 @@ where { type ComponentsBuilder = ComponentsBuilder< N, - OptimismPoolBuilder, + OpPoolBuilder, OpPayloadBuilder, OptimismNetworkBuilder, OptimismExecutorBuilder, @@ -206,12 +206,12 @@ where /// This contains various settings that can be configured and take precedence over the node's /// config. #[derive(Debug, Default, Clone)] -pub struct OptimismPoolBuilder { +pub struct OpPoolBuilder { /// Enforced overrides that are applied to the pool config. pub pool_config_overrides: PoolBuilderConfigOverrides, } -impl PoolBuilder for OptimismPoolBuilder +impl PoolBuilder for OpPoolBuilder where Node: FullNodeTypes>, { From 8d31b652425d0e67be1577d41be77331028b93d6 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 1 Nov 2024 11:17:43 +0100 Subject: [PATCH 371/425] chore: clippy happy (#12248) --- crates/rpc/rpc-testing-util/src/debug.rs | 4 ++-- crates/rpc/rpc-testing-util/src/trace.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/rpc/rpc-testing-util/src/debug.rs b/crates/rpc/rpc-testing-util/src/debug.rs index f50064e80..97fe008fa 100644 --- a/crates/rpc/rpc-testing-util/src/debug.rs +++ b/crates/rpc/rpc-testing-util/src/debug.rs @@ -292,7 +292,7 @@ pub struct DebugTraceTransactionsStream<'a> { stream: Pin + 'a>>, } -impl<'a> DebugTraceTransactionsStream<'a> { +impl DebugTraceTransactionsStream<'_> { /// Returns the next error result of the stream. pub async fn next_err(&mut self) -> Option<(RpcError, TxHash)> { loop { @@ -324,7 +324,7 @@ pub struct DebugTraceBlockStream<'a> { stream: Pin + 'a>>, } -impl<'a> DebugTraceBlockStream<'a> { +impl DebugTraceBlockStream<'_> { /// Returns the next error result of the stream. pub async fn next_err(&mut self) -> Option<(RpcError, BlockId)> { loop { diff --git a/crates/rpc/rpc-testing-util/src/trace.rs b/crates/rpc/rpc-testing-util/src/trace.rs index c6dc16cf1..0fefef7c9 100644 --- a/crates/rpc/rpc-testing-util/src/trace.rs +++ b/crates/rpc/rpc-testing-util/src/trace.rs @@ -381,7 +381,7 @@ pub struct TraceBlockStream<'a> { stream: Pin + 'a>>, } -impl<'a> TraceBlockStream<'a> { +impl TraceBlockStream<'_> { /// Returns the next error result of the stream. pub async fn next_err(&mut self) -> Option<(RpcError, BlockId)> { loop { From 39bc8ce81a237fd5452ae31d0362a1777a7d3c5f Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Fri, 1 Nov 2024 11:35:06 +0100 Subject: [PATCH 372/425] refactor(revm): simplify `Database` impl for `StateProviderDatabase` (#12241) --- crates/revm/src/database.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/revm/src/database.rs b/crates/revm/src/database.rs index 8f40d2be8..5f662fea7 100644 --- a/crates/revm/src/database.rs +++ b/crates/revm/src/database.rs @@ -101,21 +101,21 @@ impl Database for StateProviderDatabase { /// Returns `Ok` with `Some(AccountInfo)` if the account exists, /// `None` if it doesn't, or an error if encountered. fn basic(&mut self, address: Address) -> Result, Self::Error> { - DatabaseRef::basic_ref(self, address) + self.basic_ref(address) } /// Retrieves the bytecode associated with a given code hash. /// /// Returns `Ok` with the bytecode if found, or the default bytecode otherwise. fn code_by_hash(&mut self, code_hash: B256) -> Result { - DatabaseRef::code_by_hash_ref(self, code_hash) + self.code_by_hash_ref(code_hash) } /// Retrieves the storage value at a specific index for a given address. /// /// Returns `Ok` with the storage value, or the default value if not found. fn storage(&mut self, address: Address, index: U256) -> Result { - DatabaseRef::storage_ref(self, address, index) + self.storage_ref(address, index) } /// Retrieves the block hash for a given block number. @@ -123,7 +123,7 @@ impl Database for StateProviderDatabase { /// Returns `Ok` with the block hash if found, or the default hash otherwise. /// Note: It safely casts the `number` to `u64`. fn block_hash(&mut self, number: u64) -> Result { - DatabaseRef::block_hash_ref(self, number) + self.block_hash_ref(number) } } From f0cef9dc51fa738490d09f9d64bf03ae27a01fd6 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Fri, 1 Nov 2024 11:33:58 +0100 Subject: [PATCH 373/425] revm: add `Database` `Either` helper type (#12240) --- crates/revm/src/either.rs | 52 +++++++++++++++++++++++++++++++++++++++ crates/revm/src/lib.rs | 3 +++ 2 files changed, 55 insertions(+) create mode 100644 crates/revm/src/either.rs diff --git a/crates/revm/src/either.rs b/crates/revm/src/either.rs new file mode 100644 index 000000000..e93ba3a8d --- /dev/null +++ b/crates/revm/src/either.rs @@ -0,0 +1,52 @@ +use alloy_primitives::{Address, B256, U256}; +use revm::{ + primitives::{AccountInfo, Bytecode}, + Database, +}; + +/// An enum type that can hold either of two different [`Database`] implementations. +/// +/// This allows flexible usage of different [`Database`] types in the same context. +#[derive(Debug, Clone)] +pub enum Either { + /// A value of type `L`. + Left(L), + /// A value of type `R`. + Right(R), +} + +impl Database for Either +where + L: Database, + R: Database, +{ + type Error = L::Error; + + fn basic(&mut self, address: Address) -> Result, Self::Error> { + match self { + Self::Left(db) => db.basic(address), + Self::Right(db) => db.basic(address), + } + } + + fn code_by_hash(&mut self, code_hash: B256) -> Result { + match self { + Self::Left(db) => db.code_by_hash(code_hash), + Self::Right(db) => db.code_by_hash(code_hash), + } + } + + fn storage(&mut self, address: Address, index: U256) -> Result { + match self { + Self::Left(db) => db.storage(address, index), + Self::Right(db) => db.storage(address, index), + } + } + + fn block_hash(&mut self, number: u64) -> Result { + match self { + Self::Left(db) => db.block_hash(number), + Self::Right(db) => db.block_hash(number), + } + } +} diff --git a/crates/revm/src/lib.rs b/crates/revm/src/lib.rs index 02eb182ee..8b544a537 100644 --- a/crates/revm/src/lib.rs +++ b/crates/revm/src/lib.rs @@ -22,3 +22,6 @@ pub mod test_utils; // Convenience re-exports. pub use revm::{self, *}; + +/// Either type for flexible usage of different database types in the same context. +pub mod either; From 249c600dd9c8a7ae8556ae004f366b66644ac569 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Fri, 1 Nov 2024 11:34:48 +0100 Subject: [PATCH 374/425] rpc: add `rename` method in `TransportRpcModules` (#12239) --- crates/rpc/rpc-builder/src/lib.rs | 56 +++++++++++++++++++++++++++++-- 1 file changed, 54 insertions(+), 2 deletions(-) diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index 5d5fd31aa..696d35014 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -2015,7 +2015,7 @@ impl TransportRpcModules { Ok(false) } - /// Merge the given [Methods] in all configured methods. + /// Merge the given [`Methods`] in all configured methods. /// /// Fails if any of the methods in other is present already. pub fn merge_configured( @@ -2106,7 +2106,22 @@ impl TransportRpcModules { http_removed || ws_removed || ipc_removed } - /// Replace the given [Methods] in the configured http methods. + /// Renames a method in all configured transports by: + /// 1. Removing the old method name. + /// 2. Adding the new method. + pub fn rename( + &mut self, + old_name: &'static str, + new_method: impl Into, + ) -> Result<(), RegisterMethodError> { + // Remove the old method from all configured transports + self.remove_method_from_configured(old_name); + + // Merge the new method into the configured transports + self.merge_configured(new_method) + } + + /// Replace the given [`Methods`] in the configured http methods. /// /// Fails if any of the methods in other is present already or if the method being removed is /// not present @@ -2478,6 +2493,43 @@ mod tests { assert!(modules.ipc.as_ref().unwrap().method("anything").is_none()); } + #[test] + fn test_transport_rpc_module_rename() { + let mut modules = TransportRpcModules { + http: Some(create_test_module()), + ws: Some(create_test_module()), + ipc: Some(create_test_module()), + ..Default::default() + }; + + // Verify that the old we want to rename exists at the start + assert!(modules.http.as_ref().unwrap().method("anything").is_some()); + assert!(modules.ws.as_ref().unwrap().method("anything").is_some()); + assert!(modules.ipc.as_ref().unwrap().method("anything").is_some()); + + // Verify that the new method does not exist at the start + assert!(modules.http.as_ref().unwrap().method("something").is_none()); + assert!(modules.ws.as_ref().unwrap().method("something").is_none()); + assert!(modules.ipc.as_ref().unwrap().method("something").is_none()); + + // Create another module + let mut other_module = RpcModule::new(()); + other_module.register_method("something", |_, _, _| "fails").unwrap(); + + // Rename the method + modules.rename("anything", other_module).expect("rename failed"); + + // Verify that the old method was removed from all transports + assert!(modules.http.as_ref().unwrap().method("anything").is_none()); + assert!(modules.ws.as_ref().unwrap().method("anything").is_none()); + assert!(modules.ipc.as_ref().unwrap().method("anything").is_none()); + + // Verify that the new method was added to all transports + assert!(modules.http.as_ref().unwrap().method("something").is_some()); + assert!(modules.ws.as_ref().unwrap().method("something").is_some()); + assert!(modules.ipc.as_ref().unwrap().method("something").is_some()); + } + #[test] fn test_replace_http_method() { let mut modules = From 2758a560c0ed1afb9b395123eef788e2426b951b Mon Sep 17 00:00:00 2001 From: Darshan Kathiriya <8559992+lakshya-sky@users.noreply.github.com> Date: Fri, 1 Nov 2024 07:10:55 -0400 Subject: [PATCH 375/425] txpool: added a helper to filter pending txns by predicate (#12204) --- crates/transaction-pool/src/lib.rs | 7 +++++++ crates/transaction-pool/src/noop.rs | 7 +++++++ crates/transaction-pool/src/pool/mod.rs | 8 ++++++++ crates/transaction-pool/src/pool/txpool.rs | 8 ++++++++ crates/transaction-pool/src/traits.rs | 6 ++++++ 5 files changed, 36 insertions(+) diff --git a/crates/transaction-pool/src/lib.rs b/crates/transaction-pool/src/lib.rs index f8f06b805..020375994 100644 --- a/crates/transaction-pool/src/lib.rs +++ b/crates/transaction-pool/src/lib.rs @@ -496,6 +496,13 @@ where self.pool.get_transactions_by_sender(sender) } + fn get_pending_transactions_with_predicate( + &self, + predicate: impl FnMut(&ValidPoolTransaction) -> bool, + ) -> Vec>> { + self.pool.pending_transactions_with_predicate(predicate) + } + fn get_pending_transactions_by_sender( &self, sender: Address, diff --git a/crates/transaction-pool/src/noop.rs b/crates/transaction-pool/src/noop.rs index 0f87f06f7..47a26ee29 100644 --- a/crates/transaction-pool/src/noop.rs +++ b/crates/transaction-pool/src/noop.rs @@ -213,6 +213,13 @@ impl TransactionPool for NoopTransactionPool { vec![] } + fn get_pending_transactions_with_predicate( + &self, + _predicate: impl FnMut(&ValidPoolTransaction) -> bool, + ) -> Vec>> { + vec![] + } + fn get_pending_transactions_by_sender( &self, _sender: Address, diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index 2e7340954..77446a523 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -787,6 +787,14 @@ where self.get_pool_data().pending_txs_by_sender(sender_id) } + /// Returns all pending transactions filtered by predicate + pub(crate) fn pending_transactions_with_predicate( + &self, + predicate: impl FnMut(&ValidPoolTransaction) -> bool, + ) -> Vec>> { + self.get_pool_data().pending_transactions_with_predicate(predicate) + } + /// Returns all pending transactions of the address by sender pub(crate) fn get_pending_transactions_by_sender( &self, diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index bd0aacccf..8679a4318 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -364,6 +364,14 @@ impl TxPool { self.pending_pool.all() } + /// Returns all pending transactions filtered by predicate + pub(crate) fn pending_transactions_with_predicate( + &self, + mut predicate: impl FnMut(&ValidPoolTransaction) -> bool, + ) -> Vec>> { + self.pending_transactions_iter().filter(|tx| predicate(tx)).collect() + } + /// Returns all pending transactions for the specified sender pub(crate) fn pending_txs_by_sender( &self, diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index 0d8f6dbb5..2667143b7 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -342,6 +342,12 @@ pub trait TransactionPool: Send + Sync + Clone { sender: Address, ) -> Vec>>; + /// Returns all pending transactions filtered by predicate + fn get_pending_transactions_with_predicate( + &self, + predicate: impl FnMut(&ValidPoolTransaction) -> bool, + ) -> Vec>>; + /// Returns all pending transactions sent by a given user fn get_pending_transactions_by_sender( &self, From c6b740801fda7963baeb33ac395227032825bb7e Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 1 Nov 2024 12:11:54 +0100 Subject: [PATCH 376/425] chore: apply same member order (#12253) --- crates/blockchain-tree/src/noop.rs | 12 ++-- crates/exex/exex/src/notifications.rs | 10 ++-- crates/node/builder/src/builder/states.rs | 14 ++--- crates/optimism/rpc/src/eth/mod.rs | 2 +- crates/primitives/src/block.rs | 14 ++--- crates/rpc/rpc-eth-api/src/node.rs | 2 +- crates/rpc/rpc/src/eth/core.rs | 2 +- .../src/providers/database/provider.rs | 10 ++-- crates/storage/provider/src/providers/mod.rs | 58 +++++++++---------- .../storage/provider/src/test_utils/mock.rs | 24 ++++---- .../storage/provider/src/test_utils/noop.rs | 32 +++++----- crates/trie/trie/src/hashed_cursor/noop.rs | 8 +-- crates/trie/trie/src/prefix_set.rs | 2 +- 13 files changed, 95 insertions(+), 95 deletions(-) diff --git a/crates/blockchain-tree/src/noop.rs b/crates/blockchain-tree/src/noop.rs index 925b8f03a..862b02e76 100644 --- a/crates/blockchain-tree/src/noop.rs +++ b/crates/blockchain-tree/src/noop.rs @@ -60,6 +60,12 @@ impl BlockchainTreeEngine for NoopBlockchainTree { Ok(()) } + fn update_block_hashes_and_clear_buffered( + &self, + ) -> Result, CanonicalError> { + Ok(BTreeMap::new()) + } + fn connect_buffered_blocks_to_canonical_hashes(&self) -> Result<(), CanonicalError> { Ok(()) } @@ -67,12 +73,6 @@ impl BlockchainTreeEngine for NoopBlockchainTree { fn make_canonical(&self, block_hash: BlockHash) -> Result { Err(BlockchainTreeError::BlockHashNotFoundInChain { block_hash }.into()) } - - fn update_block_hashes_and_clear_buffered( - &self, - ) -> Result, CanonicalError> { - Ok(BTreeMap::new()) - } } impl BlockchainTreeViewer for NoopBlockchainTree { diff --git a/crates/exex/exex/src/notifications.rs b/crates/exex/exex/src/notifications.rs index 90a0ee230..14cfe9be4 100644 --- a/crates/exex/exex/src/notifications.rs +++ b/crates/exex/exex/src/notifications.rs @@ -108,11 +108,6 @@ where }); } - fn without_head(mut self) -> Self { - self.set_without_head(); - self - } - fn set_with_head(&mut self, exex_head: ExExHead) { let current = std::mem::replace(&mut self.inner, ExExNotificationsInner::Invalid); self.inner = ExExNotificationsInner::WithHead(match current { @@ -131,6 +126,11 @@ where }); } + fn without_head(mut self) -> Self { + self.set_without_head(); + self + } + fn with_head(mut self, exex_head: ExExHead) -> Self { self.set_with_head(exex_head); self diff --git a/crates/node/builder/src/builder/states.rs b/crates/node/builder/src/builder/states.rs index e75a07802..ca5a57d0d 100644 --- a/crates/node/builder/src/builder/states.rs +++ b/crates/node/builder/src/builder/states.rs @@ -95,8 +95,8 @@ impl> FullNodeComponents for NodeAdapter< type Pool = C::Pool; type Evm = C::Evm; type Executor = C::Executor; - type Network = C::Network; type Consensus = C::Consensus; + type Network = C::Network; fn pool(&self) -> &Self::Pool { self.components.pool() @@ -110,8 +110,8 @@ impl> FullNodeComponents for NodeAdapter< self.components.block_executor() } - fn provider(&self) -> &Self::Provider { - &self.provider + fn consensus(&self) -> &Self::Consensus { + self.components.consensus() } fn network(&self) -> &Self::Network { @@ -122,12 +122,12 @@ impl> FullNodeComponents for NodeAdapter< self.components.payload_builder() } - fn task_executor(&self) -> &TaskExecutor { - &self.task_executor + fn provider(&self) -> &Self::Provider { + &self.provider } - fn consensus(&self) -> &Self::Consensus { - self.components.consensus() + fn task_executor(&self) -> &TaskExecutor { + &self.task_executor } } diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index 7b427467b..dc6e8e59f 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -119,8 +119,8 @@ where { type Provider = N::Provider; type Pool = N::Pool; - type Network = ::Network; type Evm = ::Evm; + type Network = ::Network; #[inline] fn pool(&self) -> &Self::Pool { diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index a06979300..7e5e76f1b 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -150,27 +150,27 @@ mod block_rlp { } impl Encodable for Block { - fn length(&self) -> usize { - let helper: HelperRef<'_, _> = self.into(); - helper.length() - } - fn encode(&self, out: &mut dyn bytes::BufMut) { let helper: HelperRef<'_, _> = self.into(); helper.encode(out) } - } - impl Encodable for SealedBlock { fn length(&self) -> usize { let helper: HelperRef<'_, _> = self.into(); helper.length() } + } + impl Encodable for SealedBlock { fn encode(&self, out: &mut dyn bytes::BufMut) { let helper: HelperRef<'_, _> = self.into(); helper.encode(out) } + + fn length(&self) -> usize { + let helper: HelperRef<'_, _> = self.into(); + helper.length() + } } } diff --git a/crates/rpc/rpc-eth-api/src/node.rs b/crates/rpc/rpc-eth-api/src/node.rs index 851b26b72..4ae79c083 100644 --- a/crates/rpc/rpc-eth-api/src/node.rs +++ b/crates/rpc/rpc-eth-api/src/node.rs @@ -38,8 +38,8 @@ where { type Provider = T::Provider; type Pool = T::Pool; - type Network = ::Network; type Evm = ::Evm; + type Network = ::Network; #[inline] fn pool(&self) -> &Self::Pool { diff --git a/crates/rpc/rpc/src/eth/core.rs b/crates/rpc/rpc/src/eth/core.rs index 339f2200c..026c87153 100644 --- a/crates/rpc/rpc/src/eth/core.rs +++ b/crates/rpc/rpc/src/eth/core.rs @@ -150,8 +150,8 @@ where { type Provider = Provider; type Pool = Pool; - type Network = Network; type Evm = EvmConfig; + type Network = Network; fn pool(&self) -> &Self::Pool { self.inner.pool() diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 81affa0d8..2af22cec0 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -2055,6 +2055,11 @@ impl StageCheckpointReader for DatabaseProvider { Ok(self.tx.get::(id.to_string())?) } + /// Get stage checkpoint progress. + fn get_stage_checkpoint_progress(&self, id: StageId) -> ProviderResult>> { + Ok(self.tx.get::(id.to_string())?) + } + fn get_all_checkpoints(&self) -> ProviderResult> { self.tx .cursor_read::()? @@ -2062,11 +2067,6 @@ impl StageCheckpointReader for DatabaseProvider { .collect::, _>>() .map_err(ProviderError::Database) } - - /// Get stage checkpoint progress. - fn get_stage_checkpoint_progress(&self, id: StageId) -> ProviderResult>> { - Ok(self.tx.get::(id.to_string())?) - } } impl StageCheckpointWriter for DatabaseProvider { diff --git a/crates/storage/provider/src/providers/mod.rs b/crates/storage/provider/src/providers/mod.rs index c81ef05d2..3b24617fd 100644 --- a/crates/storage/provider/src/providers/mod.rs +++ b/crates/storage/provider/src/providers/mod.rs @@ -601,35 +601,6 @@ impl StateProviderFactory for BlockchainProvider { self.database.latest() } - fn history_by_block_number( - &self, - block_number: BlockNumber, - ) -> ProviderResult { - trace!(target: "providers::blockchain", ?block_number, "Getting history by block number"); - self.ensure_canonical_block(block_number)?; - self.database.history_by_block_number(block_number) - } - - fn history_by_block_hash(&self, block_hash: BlockHash) -> ProviderResult { - trace!(target: "providers::blockchain", ?block_hash, "Getting history by block hash"); - self.database.history_by_block_hash(block_hash) - } - - fn state_by_block_hash(&self, block: BlockHash) -> ProviderResult { - trace!(target: "providers::blockchain", ?block, "Getting state by block hash"); - let mut state = self.history_by_block_hash(block); - - // we failed to get the state by hash, from disk, hash block be the pending block - if state.is_err() { - if let Ok(Some(pending)) = self.pending_state_by_hash(block) { - // we found pending block by hash - state = Ok(pending) - } - } - - state - } - /// Returns a [`StateProviderBox`] indexed by the given block number or tag. /// /// Note: if a number is provided this will only look at historical(canonical) state. @@ -662,6 +633,35 @@ impl StateProviderFactory for BlockchainProvider { } } + fn history_by_block_number( + &self, + block_number: BlockNumber, + ) -> ProviderResult { + trace!(target: "providers::blockchain", ?block_number, "Getting history by block number"); + self.ensure_canonical_block(block_number)?; + self.database.history_by_block_number(block_number) + } + + fn history_by_block_hash(&self, block_hash: BlockHash) -> ProviderResult { + trace!(target: "providers::blockchain", ?block_hash, "Getting history by block hash"); + self.database.history_by_block_hash(block_hash) + } + + fn state_by_block_hash(&self, block: BlockHash) -> ProviderResult { + trace!(target: "providers::blockchain", ?block, "Getting state by block hash"); + let mut state = self.history_by_block_hash(block); + + // we failed to get the state by hash, from disk, hash block be the pending block + if state.is_err() { + if let Ok(Some(pending)) = self.pending_state_by_hash(block) { + // we found pending block by hash + state = Ok(pending) + } + } + + state + } + /// Returns the state provider for pending state. /// /// If there's no pending block available then the latest state provider is returned: diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index e2e08e61a..07fa505b2 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -760,18 +760,6 @@ impl StateProviderFactory for MockEthProvider { Ok(Box::new(self.clone())) } - fn history_by_block_number(&self, _block: BlockNumber) -> ProviderResult { - Ok(Box::new(self.clone())) - } - - fn history_by_block_hash(&self, _block: BlockHash) -> ProviderResult { - Ok(Box::new(self.clone())) - } - - fn state_by_block_hash(&self, _block: BlockHash) -> ProviderResult { - Ok(Box::new(self.clone())) - } - fn state_by_block_number_or_tag( &self, number_or_tag: BlockNumberOrTag, @@ -798,6 +786,18 @@ impl StateProviderFactory for MockEthProvider { } } + fn history_by_block_number(&self, _block: BlockNumber) -> ProviderResult { + Ok(Box::new(self.clone())) + } + + fn history_by_block_hash(&self, _block: BlockHash) -> ProviderResult { + Ok(Box::new(self.clone())) + } + + fn state_by_block_hash(&self, _block: BlockHash) -> ProviderResult { + Ok(Box::new(self.clone())) + } + fn pending(&self) -> ProviderResult { Ok(Box::new(self.clone())) } diff --git a/crates/storage/provider/src/test_utils/noop.rs b/crates/storage/provider/src/test_utils/noop.rs index f6f7e185d..e09437647 100644 --- a/crates/storage/provider/src/test_utils/noop.rs +++ b/crates/storage/provider/src/test_utils/noop.rs @@ -465,22 +465,6 @@ impl StateProviderFactory for NoopProvider { Ok(Box::new(*self)) } - fn history_by_block_number(&self, _block: BlockNumber) -> ProviderResult { - Ok(Box::new(*self)) - } - - fn history_by_block_hash(&self, _block: BlockHash) -> ProviderResult { - Ok(Box::new(*self)) - } - - fn state_by_block_hash(&self, _block: BlockHash) -> ProviderResult { - Ok(Box::new(*self)) - } - - fn pending(&self) -> ProviderResult { - Ok(Box::new(*self)) - } - fn state_by_block_number_or_tag( &self, number_or_tag: BlockNumberOrTag, @@ -507,6 +491,22 @@ impl StateProviderFactory for NoopProvider { } } + fn history_by_block_number(&self, _block: BlockNumber) -> ProviderResult { + Ok(Box::new(*self)) + } + + fn history_by_block_hash(&self, _block: BlockHash) -> ProviderResult { + Ok(Box::new(*self)) + } + + fn state_by_block_hash(&self, _block: BlockHash) -> ProviderResult { + Ok(Box::new(*self)) + } + + fn pending(&self) -> ProviderResult { + Ok(Box::new(*self)) + } + fn pending_state_by_hash(&self, _block_hash: B256) -> ProviderResult> { Ok(Some(Box::new(*self))) } diff --git a/crates/trie/trie/src/hashed_cursor/noop.rs b/crates/trie/trie/src/hashed_cursor/noop.rs index 4783d5afd..a21e1026b 100644 --- a/crates/trie/trie/src/hashed_cursor/noop.rs +++ b/crates/trie/trie/src/hashed_cursor/noop.rs @@ -32,11 +32,11 @@ pub struct NoopHashedAccountCursor; impl HashedCursor for NoopHashedAccountCursor { type Value = Account; - fn next(&mut self) -> Result, DatabaseError> { + fn seek(&mut self, _key: B256) -> Result, DatabaseError> { Ok(None) } - fn seek(&mut self, _key: B256) -> Result, DatabaseError> { + fn next(&mut self) -> Result, DatabaseError> { Ok(None) } } @@ -49,11 +49,11 @@ pub struct NoopHashedStorageCursor; impl HashedCursor for NoopHashedStorageCursor { type Value = U256; - fn next(&mut self) -> Result, DatabaseError> { + fn seek(&mut self, _key: B256) -> Result, DatabaseError> { Ok(None) } - fn seek(&mut self, _key: B256) -> Result, DatabaseError> { + fn next(&mut self) -> Result, DatabaseError> { Ok(None) } } diff --git a/crates/trie/trie/src/prefix_set.rs b/crates/trie/trie/src/prefix_set.rs index 0cf16f939..d904ef38f 100644 --- a/crates/trie/trie/src/prefix_set.rs +++ b/crates/trie/trie/src/prefix_set.rs @@ -211,8 +211,8 @@ impl PrefixSet { } impl<'a> IntoIterator for &'a PrefixSet { - type IntoIter = std::slice::Iter<'a, reth_trie_common::Nibbles>; type Item = &'a reth_trie_common::Nibbles; + type IntoIter = std::slice::Iter<'a, reth_trie_common::Nibbles>; fn into_iter(self) -> Self::IntoIter { self.iter() } From f52186cc4dbc1598be8e6a7fa17cf0cedd56a1a3 Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Fri, 1 Nov 2024 19:30:03 +0800 Subject: [PATCH 377/425] db-api: opt StorageShardedKey encode, decode (#12143) --- .../storage/db-api/src/models/storage_sharded_key.rs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/crates/storage/db-api/src/models/storage_sharded_key.rs b/crates/storage/db-api/src/models/storage_sharded_key.rs index 5fd79ba65..a7a1ffb71 100644 --- a/crates/storage/db-api/src/models/storage_sharded_key.rs +++ b/crates/storage/db-api/src/models/storage_sharded_key.rs @@ -12,6 +12,10 @@ use super::ShardedKey; /// Number of indices in one shard. pub const NUM_OF_INDICES_IN_SHARD: usize = 2_000; +/// The size of [`StorageShardedKey`] encode bytes. +/// The fields are: 20-byte address, 32-byte key, and 8-byte block number +const STORAGE_SHARD_KEY_BYTES_SIZE: usize = 20 + 32 + 8; + /// Sometimes data can be too big to be saved for a single key. This helps out by dividing the data /// into different shards. Example: /// @@ -53,7 +57,8 @@ impl Encode for StorageShardedKey { type Encoded = Vec; fn encode(self) -> Self::Encoded { - let mut buf: Vec = Encode::encode(self.address).into(); + let mut buf: Vec = Vec::with_capacity(STORAGE_SHARD_KEY_BYTES_SIZE); + buf.extend_from_slice(&Encode::encode(self.address)); buf.extend_from_slice(&Encode::encode(self.sharded_key.key)); buf.extend_from_slice(&self.sharded_key.highest_block_number.to_be_bytes()); buf @@ -62,6 +67,9 @@ impl Encode for StorageShardedKey { impl Decode for StorageShardedKey { fn decode(value: &[u8]) -> Result { + if value.len() != STORAGE_SHARD_KEY_BYTES_SIZE { + return Err(DatabaseError::Decode) + } let tx_num_index = value.len() - 8; let highest_tx_number = u64::from_be_bytes( From 927be855ffbdf8186a69ae90168a784bdd0651cc Mon Sep 17 00:00:00 2001 From: caglarkaya Date: Fri, 1 Nov 2024 14:32:12 +0300 Subject: [PATCH 378/425] feat: track buffered outgoing messages (#12220) --- crates/net/network/src/metrics.rs | 2 ++ crates/net/network/src/session/active.rs | 31 ++++++++++++++++++++++-- crates/net/network/src/session/mod.rs | 5 +++- 3 files changed, 35 insertions(+), 3 deletions(-) diff --git a/crates/net/network/src/metrics.rs b/crates/net/network/src/metrics.rs index 4333cf140..bda5f84c7 100644 --- a/crates/net/network/src/metrics.rs +++ b/crates/net/network/src/metrics.rs @@ -85,6 +85,8 @@ pub struct SessionManagerMetrics { pub(crate) total_dial_successes: Counter, /// Number of dropped outgoing peer messages. pub(crate) total_outgoing_peer_messages_dropped: Counter, + /// Number of queued outgoing messages + pub(crate) queued_outgoing_messages: Gauge, } /// Metrics for the [`TransactionsManager`](crate::transactions::TransactionsManager). diff --git a/crates/net/network/src/session/active.rs b/crates/net/network/src/session/active.rs index e83f5d9f1..10048823c 100644 --- a/crates/net/network/src/session/active.rs +++ b/crates/net/network/src/session/active.rs @@ -12,6 +12,7 @@ use std::{ }; use futures::{stream::Fuse, SinkExt, StreamExt}; +use metrics::Gauge; use reth_eth_wire::{ errors::{EthHandshakeError, EthStreamError, P2PStreamError}, message::{EthBroadcastMessage, RequestPair}, @@ -87,7 +88,7 @@ pub(crate) struct ActiveSession { /// All requests that were sent by the remote peer and we're waiting on an internal response pub(crate) received_requests_from_remote: Vec, /// Buffered messages that should be handled and sent to the peer. - pub(crate) queued_outgoing: VecDeque, + pub(crate) queued_outgoing: QueuedOutgoingMessages, /// The maximum time we wait for a response from a peer. pub(crate) internal_request_timeout: Arc, /// Interval when to check for timed out requests. @@ -757,6 +758,32 @@ fn calculate_new_timeout(current_timeout: Duration, estimated_rtt: Duration) -> smoothened_timeout.clamp(MINIMUM_TIMEOUT, MAXIMUM_TIMEOUT) } + +/// A helper struct that wraps the queue of outgoing messages and a metric to track their count +pub(crate) struct QueuedOutgoingMessages { + messages: VecDeque, + count: Gauge, +} + +impl QueuedOutgoingMessages { + pub(crate) const fn new(metric: Gauge) -> Self { + Self { messages: VecDeque::new(), count: metric } + } + + pub(crate) fn push_back(&mut self, message: OutgoingMessage) { + self.messages.push_back(message); + self.count.increment(1); + } + + pub(crate) fn pop_front(&mut self) -> Option { + self.messages.pop_front().inspect(|_| self.count.decrement(1)) + } + + pub(crate) fn shrink_to_fit(&mut self) { + self.messages.shrink_to_fit(); + } +} + #[cfg(test)] mod tests { use super::*; @@ -882,7 +909,7 @@ mod tests { internal_request_tx: ReceiverStream::new(messages_rx).fuse(), inflight_requests: Default::default(), conn, - queued_outgoing: Default::default(), + queued_outgoing: QueuedOutgoingMessages::new(Gauge::noop()), received_requests_from_remote: Default::default(), internal_request_timeout_interval: tokio::time::interval( INITIAL_REQUEST_TIMEOUT, diff --git a/crates/net/network/src/session/mod.rs b/crates/net/network/src/session/mod.rs index 3522aa6a7..712f076b4 100644 --- a/crates/net/network/src/session/mod.rs +++ b/crates/net/network/src/session/mod.rs @@ -5,6 +5,7 @@ mod conn; mod counter; mod handle; +use active::QueuedOutgoingMessages; pub use conn::EthRlpxConnection; pub use handle::{ ActiveSessionHandle, ActiveSessionMessage, PendingSessionEvent, PendingSessionHandle, @@ -495,7 +496,9 @@ impl SessionManager { internal_request_tx: ReceiverStream::new(messages_rx).fuse(), inflight_requests: Default::default(), conn, - queued_outgoing: Default::default(), + queued_outgoing: QueuedOutgoingMessages::new( + self.metrics.queued_outgoing_messages.clone(), + ), received_requests_from_remote: Default::default(), internal_request_timeout_interval: tokio::time::interval( self.initial_internal_request_timeout, From f93dbf54c3b1181b166a0813cfeb95296ec0c6cd Mon Sep 17 00:00:00 2001 From: Emilia Hane Date: Fri, 1 Nov 2024 19:49:37 +0800 Subject: [PATCH 379/425] Remove redundant `SignedTransaction::Signature` (#12185) --- crates/primitives-traits/src/transaction/signed.rs | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/crates/primitives-traits/src/transaction/signed.rs b/crates/primitives-traits/src/transaction/signed.rs index 1bc8308b1..748ef3966 100644 --- a/crates/primitives-traits/src/transaction/signed.rs +++ b/crates/primitives-traits/src/transaction/signed.rs @@ -5,7 +5,7 @@ use core::hash::Hash; use alloy_consensus::Transaction; use alloy_eips::eip2718::{Decodable2718, Encodable2718}; -use alloy_primitives::{keccak256, Address, TxHash, B256}; +use alloy_primitives::{keccak256, Address, Signature, TxHash, B256}; /// A signed transaction. pub trait SignedTransaction: @@ -26,9 +26,6 @@ pub trait SignedTransaction: /// Transaction type that is signed. type Transaction: Transaction; - /// Signature type that results from signing transaction. - type Signature; - /// Returns reference to transaction hash. fn tx_hash(&self) -> &TxHash; @@ -36,7 +33,7 @@ pub trait SignedTransaction: fn transaction(&self) -> &Self::Transaction; /// Returns reference to signature. - fn signature(&self) -> &Self::Signature; + fn signature(&self) -> &Signature; /// Recover signer from signature and hash. /// @@ -59,10 +56,8 @@ pub trait SignedTransaction: /// Create a new signed transaction from a transaction and its signature. /// /// This will also calculate the transaction hash using its encoding. - fn from_transaction_and_signature( - transaction: Self::Transaction, - signature: Self::Signature, - ) -> Self; + fn from_transaction_and_signature(transaction: Self::Transaction, signature: Signature) + -> Self; /// Calculate transaction hash, eip2728 transaction does not contain rlp header and start with /// tx type. From f6bd11c7116dbf46a4450cb69121d015b4b705b6 Mon Sep 17 00:00:00 2001 From: Aliaksei Misiukevich Date: Fri, 1 Nov 2024 14:54:55 +0100 Subject: [PATCH 380/425] feature: transaction's input truncate function (#12236) Signed-off-by: nadtech-hub --- crates/rpc/rpc-types-compat/src/transaction/mod.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/rpc/rpc-types-compat/src/transaction/mod.rs b/crates/rpc/rpc-types-compat/src/transaction/mod.rs index f8b46454d..2d92747d4 100644 --- a/crates/rpc/rpc-types-compat/src/transaction/mod.rs +++ b/crates/rpc/rpc-types-compat/src/transaction/mod.rs @@ -102,7 +102,9 @@ impl TransactionCompat for () { WithOtherFields::default() } - fn otterscan_api_truncate_input(_tx: &mut Self::Transaction) {} + fn otterscan_api_truncate_input(tx: &mut Self::Transaction) { + tx.input = tx.input.slice(..4); + } fn tx_type(_tx: &Self::Transaction) -> u8 { 0 From c1a68f23cf2d655cd8137660fc82c7d83c094b20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C3=A9a=20Narzis?= <78718413+lean-apple@users.noreply.github.com> Date: Fri, 1 Nov 2024 20:55:23 +0700 Subject: [PATCH 381/425] refactor: move `payload/builder/src/database.rs` to `revm/src/cached.rs` (#12252) Co-authored-by: Matthias Seitz --- Cargo.lock | 237 +++++++++--------- .../src/commands/debug_cmd/build_block.rs | 3 +- crates/payload/basic/Cargo.toml | 1 + crates/payload/basic/src/lib.rs | 5 +- crates/payload/builder/Cargo.toml | 10 +- crates/payload/builder/src/lib.rs | 1 - .../src/database.rs => revm/src/cached.rs} | 12 +- crates/revm/src/lib.rs | 8 +- 8 files changed, 147 insertions(+), 130 deletions(-) rename crates/{payload/builder/src/database.rs => revm/src/cached.rs} (96%) diff --git a/Cargo.lock b/Cargo.lock index 5fc3574eb..4c469cd21 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -97,9 +97,9 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "alloy-chains" -version = "0.1.42" +version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dca4a1469a3e572e9ba362920ff145f5d0a00a3e71a64ddcb4a3659cf64c76a7" +checksum = "4feb7c662fd0be3d0c926a456be4ac44e9cf8e05cbd91df6db7f7140b861016a" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -150,9 +150,9 @@ dependencies = [ [[package]] name = "alloy-dyn-abi" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5647fce5a168f9630f935bf7821c4207b1755184edaeba783cb4e11d35058484" +checksum = "f5228b189b18b85761340dc9eaac0141148a8503657b36f9bc3a869413d987ca" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -229,9 +229,9 @@ dependencies = [ [[package]] name = "alloy-json-abi" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b5671117c38b1c2306891f97ad3828d85487087f54ebe2c7591a055ea5bcea7" +checksum = "31a0f0d51db8a1a30a4d98a9f90e090a94c8f44cb4d9eafc7e03aa6d00aae984" dependencies = [ "alloy-primitives", "alloy-sol-type-parser", @@ -306,9 +306,9 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c71738eb20c42c5fb149571e76536a0f309d142f3957c28791662b96baf77a3d" +checksum = "8edae627382349b56cd6a7a2106f4fd69b243a9233e560c55c2e03cabb7e1d3c" dependencies = [ "alloy-rlp", "arbitrary", @@ -415,7 +415,7 @@ checksum = "2b09cae092c27b6f1bde952653a22708691802e57bfef4a2973b80bea21efd3f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -630,23 +630,23 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0900b83f4ee1f45c640ceee596afbc118051921b9438fdb5a3175c1a7e05f8b" +checksum = "841eabaa4710f719fddbc24c95d386eae313f07e6da4babc25830ee37945be0c" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] name = "alloy-sol-macro-expander" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a41b1e78dde06b5e12e6702fa8c1d30621bf07728ba75b801fb801c9c6a0ba10" +checksum = "6672337f19d837b9f7073c45853aeb528ed9f7dd6a4154ce683e9e5cb7794014" dependencies = [ "alloy-sol-macro-input", "const-hex", @@ -655,31 +655,31 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", "syn-solidity", "tiny-keccak", ] [[package]] name = "alloy-sol-macro-input" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91dc311a561a306664393407b88d3e53ae58581624128afd8a15faa5de3627dc" +checksum = "0dff37dd20bfb118b777c96eda83b2067f4226d2644c5cfa00187b3bc01770ba" dependencies = [ "const-hex", "dunce", "heck", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", "syn-solidity", ] [[package]] name = "alloy-sol-type-parser" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45d1fbee9e698f3ba176b6e7a145f4aefe6d2b746b611e8bb246fe11a0e9f6c4" +checksum = "5b853d42292dbb159671a3edae3b2750277ff130f32b726fe07dc2b17aa6f2b5" dependencies = [ "serde", "winnow", @@ -687,9 +687,9 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "086f41bc6ebcd8cb15f38ba20e47be38dd03692149681ce8061c35d960dbf850" +checksum = "aa828bb1b9a6dc52208fbb18084fb9ce2c30facc2bfda6a5d922349b4990354f" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -862,9 +862,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.91" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8" +checksum = "74f37166d7d48a0284b99dd824694c26119c700b53bf0d1540cdb147dbdaaf13" [[package]] name = "aquamarine" @@ -877,7 +877,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -1100,7 +1100,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -1111,7 +1111,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -1149,7 +1149,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -1255,7 +1255,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -1437,7 +1437,7 @@ checksum = "240f4126219a83519bad05c9a40bfc0303921eeb571fc2d7e44c17ffac99d3f1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", "synstructure", ] @@ -1559,7 +1559,7 @@ checksum = "bcfcc3cd946cb52f0bbfdbbcfa2f4e24f75ebb6c0e1002f7c25904fada18b9ec" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -1677,6 +1677,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + [[package]] name = "chrono" version = "0.4.38" @@ -1771,7 +1777,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -2228,7 +2234,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -2252,7 +2258,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -2263,7 +2269,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -2385,7 +2391,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -2396,7 +2402,7 @@ checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -2417,7 +2423,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", "unicode-xid", ] @@ -2531,7 +2537,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -2679,7 +2685,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -2690,7 +2696,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -2747,7 +2753,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -3303,7 +3309,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -3788,9 +3794,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-channel", @@ -3829,7 +3835,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -3979,7 +3985,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -4147,7 +4153,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b23a0c8dfe501baac4adf6ebbfa6eddf8f0c07f56b058cc1288017e32397846c" dependencies = [ "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -4395,7 +4401,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -4571,9 +4577,9 @@ dependencies = [ [[package]] name = "libm" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a00419de735aac21d53b0de5ce2c03bd3627277cf471300f27ebc89f7d828047" +checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" [[package]] name = "libp2p-identity" @@ -4813,7 +4819,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -4959,7 +4965,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -5207,7 +5213,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -5260,9 +5266,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "op-alloy-consensus" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba7c98055fd048073738df0cc6d6537e992a0d8828f39d99a469e870db126dbd" +checksum = "f26c3b35b7b3e36d15e0563eebffe13c1d9ca16b7aaffcb6a64354633547e16b" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5278,9 +5284,9 @@ dependencies = [ [[package]] name = "op-alloy-genesis" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d631e8113cf88d30e621022677209caa148a9ca3ccb590fd34bbd1c731e3aff3" +checksum = "ccacc2efed3d60d98ea581bddb885df1c6c62a592e55de049cfefd94116112cd" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5292,9 +5298,9 @@ dependencies = [ [[package]] name = "op-alloy-network" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1eabe7683d7e19c7cc5171d664e49fc449176cf1334ffff82808e2a7eea5933a" +checksum = "5ff6fc0f94702ea0f4d8466bffdc990067ae6df9213465df9b7957f74f1e5461" dependencies = [ "alloy-consensus", "alloy-network", @@ -5306,26 +5312,29 @@ dependencies = [ [[package]] name = "op-alloy-protocol" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b39574acb1873315e6bd89df174f6223e897188fb87eeea2ad1eda04f7d28eb" +checksum = "f5f8e6ec6b91c6aaeb20860b455a52fd8e300acfe5d534e96e9073a24f853e74" dependencies = [ "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rlp", "alloy-serde", + "async-trait", "derive_more 1.0.0", "op-alloy-consensus", "op-alloy-genesis", "serde", + "tracing", + "unsigned-varint", ] [[package]] name = "op-alloy-rpc-types" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "919e9b69212d61f3c8932bfb717c7ad458ea3fc52072b3433d99994f8223d555" +checksum = "94bae9bf91b620e1e2c2291562e5998bc1247bd8ada011773e1997b31a95de99" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5341,9 +5350,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types-engine" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e3a47ea24cee189b4351be247fd138c68571704ee57060cf5a722502f44412c" +checksum = "4b52ee59c86537cff83e8c7f2a6aa287a94f3608bb40c06d442aafd0c2e807a4" dependencies = [ "alloy-primitives", "alloy-rpc-types-engine", @@ -5561,7 +5570,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -5590,7 +5599,7 @@ checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -5762,7 +5771,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -5813,7 +5822,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -5911,7 +5920,7 @@ checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -5990,10 +5999,11 @@ dependencies = [ [[package]] name = "quinn-udp" -version = "0.5.5" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fe68c2e9e1a1234e218683dbdf9f9dfcb094113c5ac2b938dfcb9bab4c4140b" +checksum = "e346e016eacfff12233c243718197ca12f148c84e1e84268a896699b41c71780" dependencies = [ + "cfg_aliases", "libc", "once_cell", "socket2", @@ -6229,9 +6239,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.8" +version = "0.12.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f713147fbe92361e52392c73b8c9e48c04c6625bce969ef54dc901e58e042a7b" +checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" dependencies = [ "base64 0.22.1", "bytes", @@ -6403,6 +6413,7 @@ dependencies = [ "reth-payload-primitives", "reth-primitives", "reth-provider", + "reth-revm", "reth-tasks", "reth-transaction-pool", "revm", @@ -6733,7 +6744,7 @@ dependencies = [ "proc-macro2", "quote", "similar-asserts", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -9606,9 +9617,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.15" +version = "0.23.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fbb44d7acc4e873d613422379f69f237a1b141928c02f6bc6ccfddddc2d7993" +checksum = "eee87ff5d9b36712a58574e12e9f0ea80f915a5b0ac518d322b24a465617925e" dependencies = [ "log", "once_cell", @@ -9883,22 +9894,22 @@ checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" [[package]] name = "serde" -version = "1.0.213" +version = "1.0.214" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ea7893ff5e2466df8d720bb615088341b295f849602c6956047f8f80f0e9bc1" +checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.213" +version = "1.0.214" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e85ad2009c50b58e87caa8cd6dac16bdf511bbfb7af6c33df902396aa480fa5" +checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -9933,7 +9944,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -9984,7 +9995,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -10007,7 +10018,7 @@ checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -10293,7 +10304,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -10351,9 +10362,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.85" +version = "2.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5023162dfcd14ef8f32034d8bcd4cc5ddc61ef7a247c024a33e24e1f24d21b56" +checksum = "e89275301d38033efb81a6e60e3497e734dfcc62571f2854bf4b16690398824c" dependencies = [ "proc-macro2", "quote", @@ -10362,14 +10373,14 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5e0c2ea8db64b2898b62ea2fbd60204ca95e0b2c6bdf53ff768bbe916fbe4d" +checksum = "16320d4a2021ba1a32470b3759676114a918885e9800e68ad60f2c67969fba62" dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -10395,7 +10406,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -10472,7 +10483,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -10496,22 +10507,22 @@ checksum = "a38c90d48152c236a3ab59271da4f4ae63d678c5d7ad6b7714d7cb9760be5e4b" [[package]] name = "thiserror" -version = "1.0.65" +version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d11abd9594d9b38965ef50805c5e469ca9cc6f197f883f717e0269a3057b3d5" +checksum = "5d171f59dbaa811dbbb1aee1e73db92ec2b122911a48e1390dfe327a821ddede" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.65" +version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae71770322cbd277e69d762a16c444af02aa0575ac0d174f0b9562d3b37f8602" +checksum = "b08be0f17bd307950653ce45db00cd31200d82b624b36e181337d9c7d92765b5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -10688,7 +10699,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -10889,7 +10900,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -11280,7 +11291,7 @@ checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -11351,7 +11362,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", "wasm-bindgen-shared", ] @@ -11385,7 +11396,7 @@ checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11541,7 +11552,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -11552,7 +11563,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -11563,7 +11574,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -11574,7 +11585,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -11849,7 +11860,7 @@ checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", "synstructure", ] @@ -11871,7 +11882,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -11891,7 +11902,7 @@ checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", "synstructure", ] @@ -11912,7 +11923,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] @@ -11934,7 +11945,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.86", ] [[package]] diff --git a/bin/reth/src/commands/debug_cmd/build_block.rs b/bin/reth/src/commands/debug_cmd/build_block.rs index a2dfb5ab3..0559d473f 100644 --- a/bin/reth/src/commands/debug_cmd/build_block.rs +++ b/bin/reth/src/commands/debug_cmd/build_block.rs @@ -26,7 +26,6 @@ use reth_node_api::{ EngineApiMessageVersion, NodeTypesWithDB, NodeTypesWithEngine, PayloadBuilderAttributes, }; use reth_node_ethereum::{EthEvmConfig, EthExecutorProvider}; -use reth_payload_builder::database::CachedReads; use reth_primitives::{ revm_primitives::KzgSettings, BlobTransaction, BlobTransactionSidecar, PooledTransactionsElement, SealedBlock, SealedBlockWithSenders, SealedHeader, Transaction, @@ -36,7 +35,7 @@ use reth_provider::{ providers::BlockchainProvider, BlockHashReader, BlockReader, BlockWriter, ChainSpecProvider, ProviderFactory, StageCheckpointReader, StateProviderFactory, }; -use reth_revm::{database::StateProviderDatabase, primitives::EnvKzgSettings}; +use reth_revm::{cached::CachedReads, database::StateProviderDatabase, primitives::EnvKzgSettings}; use reth_stages::StageId; use reth_transaction_pool::{ blobstore::InMemoryBlobStore, BlobStore, EthPooledTransaction, PoolConfig, TransactionOrigin, diff --git a/crates/payload/basic/Cargo.toml b/crates/payload/basic/Cargo.toml index 88ab99272..74dea45d1 100644 --- a/crates/payload/basic/Cargo.toml +++ b/crates/payload/basic/Cargo.toml @@ -21,6 +21,7 @@ reth-payload-builder.workspace = true reth-payload-primitives.workspace = true reth-tasks.workspace = true reth-evm.workspace = true +reth-revm.workspace=true # ethereum alloy-rlp.workspace = true diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index bb8dc0ef6..7b1de980c 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -16,9 +16,7 @@ use futures_core::ready; use futures_util::FutureExt; use reth_chainspec::{ChainSpec, EthereumHardforks}; use reth_evm::state_change::post_block_withdrawals_balance_increments; -use reth_payload_builder::{ - database::CachedReads, KeepPayloadJobAlive, PayloadId, PayloadJob, PayloadJobGenerator, -}; +use reth_payload_builder::{KeepPayloadJobAlive, PayloadId, PayloadJob, PayloadJobGenerator}; use reth_payload_primitives::{ BuiltPayload, PayloadBuilderAttributes, PayloadBuilderError, PayloadKind, }; @@ -28,6 +26,7 @@ use reth_primitives::{ use reth_provider::{ BlockReaderIdExt, BlockSource, CanonStateNotification, ProviderError, StateProviderFactory, }; +use reth_revm::cached::CachedReads; use reth_tasks::TaskSpawner; use reth_transaction_pool::TransactionPool; use revm::{Database, State}; diff --git a/crates/payload/builder/Cargo.toml b/crates/payload/builder/Cargo.toml index 3b71011e0..08399b6f9 100644 --- a/crates/payload/builder/Cargo.toml +++ b/crates/payload/builder/Cargo.toml @@ -13,15 +13,15 @@ workspace = true [dependencies] # reth -reth-primitives.workspace = true +reth-primitives = { workspace = true, optional = true } reth-provider.workspace = true reth-payload-primitives.workspace = true reth-ethereum-engine-primitives.workspace = true reth-chain-state = { workspace = true, optional = true } # alloy +alloy-primitives = { workspace = true, optional = true } alloy-rpc-types = { workspace = true, features = ["engine"] } -alloy-primitives.workspace = true # async async-trait.workspace = true @@ -37,12 +37,16 @@ metrics.workspace = true tracing.workspace = true [dev-dependencies] +reth-primitives.workspace = true +reth-chain-state.workspace = true +alloy-primitives.workspace = true revm.workspace = true [features] test-utils = [ + "alloy-primitives", "reth-chain-state", - "reth-chain-state?/test-utils", + "reth-chain-state/test-utils", "reth-primitives/test-utils", "reth-provider/test-utils", "revm/test-utils" diff --git a/crates/payload/builder/src/lib.rs b/crates/payload/builder/src/lib.rs index 7af61ac4c..2c46a4a9e 100644 --- a/crates/payload/builder/src/lib.rs +++ b/crates/payload/builder/src/lib.rs @@ -101,7 +101,6 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -pub mod database; mod metrics; mod service; mod traits; diff --git a/crates/payload/builder/src/database.rs b/crates/revm/src/cached.rs similarity index 96% rename from crates/payload/builder/src/database.rs rename to crates/revm/src/cached.rs index d63f7322d..b0eac39c4 100644 --- a/crates/payload/builder/src/database.rs +++ b/crates/revm/src/cached.rs @@ -1,13 +1,13 @@ //! Database adapters for payload building. -use alloy_primitives::{Address, B256, U256}; +use alloy_primitives::{ + map::{Entry, HashMap}, + Address, B256, U256, +}; +use core::cell::RefCell; use reth_primitives::revm_primitives::{ db::{Database, DatabaseRef}, AccountInfo, Bytecode, }; -use std::{ - cell::RefCell, - collections::{hash_map::Entry, HashMap}, -}; /// A container type that caches reads from an underlying [`DatabaseRef`]. /// @@ -17,7 +17,7 @@ use std::{ /// # Example /// /// ``` -/// use reth_payload_builder::database::CachedReads; +/// use reth_revm::cached::CachedReads; /// use revm::db::{DatabaseRef, State}; /// /// fn build_payload(db: DB) { diff --git a/crates/revm/src/lib.rs b/crates/revm/src/lib.rs index 8b544a537..b06ee816f 100644 --- a/crates/revm/src/lib.rs +++ b/crates/revm/src/lib.rs @@ -11,11 +11,15 @@ extern crate alloc; +pub mod batch; + +/// Cache database that reads from an underlying [`DatabaseRef`]. +/// Database adapters for payload building. +pub mod cached; + /// Contains glue code for integrating reth database into revm's [Database]. pub mod database; -pub mod batch; - /// Common test helpers #[cfg(any(test, feature = "test-utils"))] pub mod test_utils; From d5a3a3a849265496af53b7f4fd460b572fb4fca3 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Fri, 1 Nov 2024 14:57:34 +0100 Subject: [PATCH 382/425] test(chain-state): add unit tests for `CanonStateNotification` (#12110) --- crates/chain-state/src/notifications.rs | 226 ++++++++++++++++++++++++ crates/evm/execution-types/src/chain.rs | 2 +- 2 files changed, 227 insertions(+), 1 deletion(-) diff --git a/crates/chain-state/src/notifications.rs b/crates/chain-state/src/notifications.rs index fc717314b..6e24bcbb4 100644 --- a/crates/chain-state/src/notifications.rs +++ b/crates/chain-state/src/notifications.rs @@ -190,3 +190,229 @@ impl Stream for ForkChoiceStream { } } } + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::B256; + use reth_execution_types::ExecutionOutcome; + use reth_primitives::{Receipt, Receipts, TransactionSigned, TxType}; + + #[test] + fn test_commit_notification() { + let block = SealedBlockWithSenders::default(); + let block1_hash = B256::new([0x01; 32]); + let block2_hash = B256::new([0x02; 32]); + + let mut block1 = block.clone(); + block1.set_block_number(1); + block1.set_hash(block1_hash); + + let mut block2 = block; + block2.set_block_number(2); + block2.set_hash(block2_hash); + + let chain = Arc::new(Chain::new( + vec![block1.clone(), block2.clone()], + ExecutionOutcome::default(), + None, + )); + + // Create a commit notification + let notification = CanonStateNotification::Commit { new: chain.clone() }; + + // Test that `committed` returns the correct chain + assert_eq!(notification.committed(), chain); + + // Test that `reverted` returns None for `Commit` + assert!(notification.reverted().is_none()); + + // Test that `tip` returns the correct block + assert_eq!(*notification.tip(), block2); + } + + #[test] + fn test_reorg_notification() { + let block = SealedBlockWithSenders::default(); + let block1_hash = B256::new([0x01; 32]); + let block2_hash = B256::new([0x02; 32]); + let block3_hash = B256::new([0x03; 32]); + + let mut block1 = block.clone(); + block1.set_block_number(1); + block1.set_hash(block1_hash); + + let mut block2 = block.clone(); + block2.set_block_number(2); + block2.set_hash(block2_hash); + + let mut block3 = block; + block3.set_block_number(3); + block3.set_hash(block3_hash); + + let old_chain = + Arc::new(Chain::new(vec![block1.clone()], ExecutionOutcome::default(), None)); + let new_chain = Arc::new(Chain::new( + vec![block2.clone(), block3.clone()], + ExecutionOutcome::default(), + None, + )); + + // Create a reorg notification + let notification = + CanonStateNotification::Reorg { old: old_chain.clone(), new: new_chain.clone() }; + + // Test that `reverted` returns the old chain + assert_eq!(notification.reverted(), Some(old_chain)); + + // Test that `committed` returns the new chain + assert_eq!(notification.committed(), new_chain); + + // Test that `tip` returns the tip of the new chain (last block in the new chain) + assert_eq!(*notification.tip(), block3); + } + + #[test] + fn test_block_receipts_commit() { + // Create a default block instance for use in block definitions. + let block = SealedBlockWithSenders::default(); + + // Define unique hashes for two blocks to differentiate them in the chain. + let block1_hash = B256::new([0x01; 32]); + let block2_hash = B256::new([0x02; 32]); + + // Create a default transaction to include in block1's transactions. + let tx = TransactionSigned::default(); + + // Create a clone of the default block and customize it to act as block1. + let mut block1 = block.clone(); + block1.set_block_number(1); + block1.set_hash(block1_hash); + // Add the transaction to block1's transactions. + block1.block.body.transactions.push(tx); + + // Clone the default block and customize it to act as block2. + let mut block2 = block; + block2.set_block_number(2); + block2.set_hash(block2_hash); + + // Create a receipt for the transaction in block1. + let receipt1 = Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 12345, + logs: vec![], + success: true, + ..Default::default() + }; + + // Wrap the receipt in a `Receipts` structure, as expected in the `ExecutionOutcome`. + let receipts = Receipts { receipt_vec: vec![vec![Some(receipt1.clone())]] }; + + // Define an `ExecutionOutcome` with the created receipts. + let execution_outcome = ExecutionOutcome { receipts, ..Default::default() }; + + // Create a new chain segment with `block1` and `block2` and the execution outcome. + let new_chain = + Arc::new(Chain::new(vec![block1.clone(), block2.clone()], execution_outcome, None)); + + // Create a commit notification containing the new chain segment. + let notification = CanonStateNotification::Commit { new: new_chain }; + + // Call `block_receipts` on the commit notification to retrieve block receipts. + let block_receipts = notification.block_receipts(); + + // Assert that only one receipt entry exists in the `block_receipts` list. + assert_eq!(block_receipts.len(), 1); + + // Verify that the first entry matches block1's hash and transaction receipt. + assert_eq!( + block_receipts[0].0, + BlockReceipts { + block: block1.num_hash(), + tx_receipts: vec![(B256::default(), receipt1)] + } + ); + + // Assert that the receipt is from the committed segment (not reverted). + assert!(!block_receipts[0].1); + } + + #[test] + fn test_block_receipts_reorg() { + // Define block1 for the old chain segment, which will be reverted. + let mut old_block1 = SealedBlockWithSenders::default(); + old_block1.set_block_number(1); + old_block1.set_hash(B256::new([0x01; 32])); + old_block1.block.body.transactions.push(TransactionSigned::default()); + + // Create a receipt for a transaction in the reverted block. + let old_receipt = Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 54321, + logs: vec![], + success: false, + ..Default::default() + }; + let old_receipts = Receipts { receipt_vec: vec![vec![Some(old_receipt.clone())]] }; + + let old_execution_outcome = + ExecutionOutcome { receipts: old_receipts, ..Default::default() }; + + // Create an old chain segment to be reverted, containing `old_block1`. + let old_chain = Arc::new(Chain::new(vec![old_block1.clone()], old_execution_outcome, None)); + + // Define block2 for the new chain segment, which will be committed. + let mut new_block1 = SealedBlockWithSenders::default(); + new_block1.set_block_number(2); + new_block1.set_hash(B256::new([0x02; 32])); + new_block1.block.body.transactions.push(TransactionSigned::default()); + + // Create a receipt for a transaction in the new committed block. + let new_receipt = Receipt { + tx_type: TxType::Legacy, + cumulative_gas_used: 12345, + logs: vec![], + success: true, + ..Default::default() + }; + let new_receipts = Receipts { receipt_vec: vec![vec![Some(new_receipt.clone())]] }; + + let new_execution_outcome = + ExecutionOutcome { receipts: new_receipts, ..Default::default() }; + + // Create a new chain segment to be committed, containing `new_block1`. + let new_chain = Arc::new(Chain::new(vec![new_block1.clone()], new_execution_outcome, None)); + + // Create a reorg notification with both reverted (old) and committed (new) chain segments. + let notification = CanonStateNotification::Reorg { old: old_chain, new: new_chain }; + + // Retrieve receipts from both old (reverted) and new (committed) segments. + let block_receipts = notification.block_receipts(); + + // Assert there are two receipt entries, one from each chain segment. + assert_eq!(block_receipts.len(), 2); + + // Verify that the first entry matches old_block1 and its receipt from the reverted segment. + assert_eq!( + block_receipts[0].0, + BlockReceipts { + block: old_block1.num_hash(), + tx_receipts: vec![(B256::default(), old_receipt)] + } + ); + // Confirm this is from the reverted segment. + assert!(block_receipts[0].1); + + // Verify that the second entry matches new_block1 and its receipt from the committed + // segment. + assert_eq!( + block_receipts[1].0, + BlockReceipts { + block: new_block1.num_hash(), + tx_receipts: vec![(B256::default(), new_receipt)] + } + ); + // Confirm this is from the committed segment. + assert!(!block_receipts[1].1); + } +} diff --git a/crates/evm/execution-types/src/chain.rs b/crates/evm/execution-types/src/chain.rs index 65f96ff56..dc633e2d7 100644 --- a/crates/evm/execution-types/src/chain.rs +++ b/crates/evm/execution-types/src/chain.rs @@ -453,7 +453,7 @@ impl IntoIterator for ChainBlocks<'_> { } /// Used to hold receipts and their attachment. -#[derive(Default, Clone, Debug)] +#[derive(Default, Clone, Debug, PartialEq, Eq)] pub struct BlockReceipts { /// Block identifier pub block: BlockNumHash, From eaac2aa2cfb6ea5a61eea2d9a36a8b66b7854ce4 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 1 Nov 2024 15:17:31 +0100 Subject: [PATCH 383/425] chore: simplify cached db usage (#12242) --- crates/ethereum/payload/src/lib.rs | 5 ++--- crates/optimism/payload/src/builder.rs | 5 ++--- crates/revm/src/cached.rs | 24 +++++++++++++++++++----- 3 files changed, 23 insertions(+), 11 deletions(-) diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 8e92d0aa8..ed33292ef 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -161,7 +161,7 @@ where let state_provider = client.state_by_block_hash(config.parent_header.hash())?; let state = StateProviderDatabase::new(state_provider); let mut db = - State::builder().with_database_ref(cached_reads.as_db(state)).with_bundle_update().build(); + State::builder().with_database(cached_reads.as_db_mut(state)).with_bundle_update().build(); let PayloadConfig { parent_header, extra_data, attributes } = config; debug!(target: "payload_builder", id=%attributes.id, parent_header = ?parent_header.hash(), parent_number = parent_header.number, "building new payload"); @@ -372,8 +372,7 @@ where // calculate the state root let hashed_state = HashedPostState::from_bundle_state(&execution_outcome.state().state); let (state_root, trie_output) = { - let state_provider = db.database.0.inner.borrow_mut(); - state_provider.db.state_root_with_updates(hashed_state.clone()).inspect_err(|err| { + db.database.inner().state_root_with_updates(hashed_state.clone()).inspect_err(|err| { warn!(target: "payload_builder", parent_hash=%parent_header.hash(), %err, diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 09a443d0f..cae2d34bd 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -170,7 +170,7 @@ where let state_provider = client.state_by_block_hash(config.parent_header.hash())?; let state = StateProviderDatabase::new(state_provider); let mut db = - State::builder().with_database_ref(cached_reads.as_db(state)).with_bundle_update().build(); + State::builder().with_database(cached_reads.as_db_mut(state)).with_bundle_update().build(); let PayloadConfig { parent_header, attributes, mut extra_data } = config; debug!(target: "payload_builder", id=%attributes.payload_attributes.payload_id(), parent_header = ?parent_header.hash(), parent_number = parent_header.number, "building new payload"); @@ -445,8 +445,7 @@ where // calculate the state root let hashed_state = HashedPostState::from_bundle_state(&execution_outcome.state().state); let (state_root, trie_output) = { - let state_provider = db.database.0.inner.borrow_mut(); - state_provider.db.state_root_with_updates(hashed_state.clone()).inspect_err(|err| { + db.database.inner().state_root_with_updates(hashed_state.clone()).inspect_err(|err| { warn!(target: "payload_builder", parent_header=%parent_header.hash(), %err, diff --git a/crates/revm/src/cached.rs b/crates/revm/src/cached.rs index b0eac39c4..2152ca5bd 100644 --- a/crates/revm/src/cached.rs +++ b/crates/revm/src/cached.rs @@ -22,10 +22,10 @@ use reth_primitives::revm_primitives::{ /// /// fn build_payload(db: DB) { /// let mut cached_reads = CachedReads::default(); -/// let db_ref = cached_reads.as_db(db); -/// // this is `Database` and can be used to build a payload, it never writes to `CachedReads` or the underlying database, but all reads from the underlying database are cached in `CachedReads`. +/// let db = cached_reads.as_db_mut(db); +/// // this is `Database` and can be used to build a payload, it never commits to `CachedReads` or the underlying database, but all reads from the underlying database are cached in `CachedReads`. /// // Subsequent payload build attempts can use cached reads and avoid hitting the underlying database. -/// let db = State::builder().with_database_ref(db_ref).build(); +/// let state = State::builder().with_database(db).build(); /// } /// ``` #[derive(Debug, Clone, Default)] @@ -40,10 +40,11 @@ pub struct CachedReads { impl CachedReads { /// Gets a [`DatabaseRef`] that will cache reads from the given database. pub fn as_db(&mut self, db: DB) -> CachedReadsDBRef<'_, DB> { - CachedReadsDBRef { inner: RefCell::new(self.as_db_mut(db)) } + self.as_db_mut(db).into_db() } - fn as_db_mut(&mut self, db: DB) -> CachedReadsDbMut<'_, DB> { + /// Gets a mutable [`Database`] that will cache reads from the underlying database. + pub fn as_db_mut(&mut self, db: DB) -> CachedReadsDbMut<'_, DB> { CachedReadsDbMut { cached: self, db } } @@ -67,6 +68,19 @@ pub struct CachedReadsDbMut<'a, DB> { pub db: DB, } +impl<'a, DB> CachedReadsDbMut<'a, DB> { + /// Converts this [`Database`] implementation into a [`DatabaseRef`] that will still cache + /// reads. + pub const fn into_db(self) -> CachedReadsDBRef<'a, DB> { + CachedReadsDBRef { inner: RefCell::new(self) } + } + + /// Returns access to wrapped [`DatabaseRef`]. + pub const fn inner(&self) -> &DB { + &self.db + } +} + impl Database for CachedReadsDbMut<'_, DB> { type Error = ::Error; From d8100012798c540f71266e757bc5dbcddd508b63 Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Fri, 1 Nov 2024 09:34:48 -0600 Subject: [PATCH 384/425] renamed OptimismNetworkBuilder to OpNetworkBuilder (#12255) --- crates/optimism/node/src/node.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 925e7204c..56487fb24 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -68,7 +68,7 @@ impl OptimismNode { Node, OpPoolBuilder, OpPayloadBuilder, - OptimismNetworkBuilder, + OpNetworkBuilder, OptimismExecutorBuilder, OptimismConsensusBuilder, > @@ -82,7 +82,7 @@ impl OptimismNode { .node_types::() .pool(OpPoolBuilder::default()) .payload(OpPayloadBuilder::new(compute_pending_block)) - .network(OptimismNetworkBuilder { + .network(OpNetworkBuilder { disable_txpool_gossip, disable_discovery_v4: !discovery_v4, }) @@ -101,7 +101,7 @@ where N, OpPoolBuilder, OpPayloadBuilder, - OptimismNetworkBuilder, + OpNetworkBuilder, OptimismExecutorBuilder, OptimismConsensusBuilder, >; @@ -365,17 +365,17 @@ where /// A basic optimism network builder. #[derive(Debug, Default, Clone)] -pub struct OptimismNetworkBuilder { +pub struct OpNetworkBuilder { /// Disable transaction pool gossip pub disable_txpool_gossip: bool, /// Disable discovery v4 pub disable_discovery_v4: bool, } -impl OptimismNetworkBuilder { +impl OpNetworkBuilder { /// Returns the [`NetworkConfig`] that contains the settings to launch the p2p network. /// - /// This applies the configured [`OptimismNetworkBuilder`] settings. + /// This applies the configured [`OpNetworkBuilder`] settings. pub fn network_config( &self, ctx: &BuilderContext, @@ -420,7 +420,7 @@ impl OptimismNetworkBuilder { } } -impl NetworkBuilder for OptimismNetworkBuilder +impl NetworkBuilder for OpNetworkBuilder where Node: FullNodeTypes>, Pool: TransactionPool + Unpin + 'static, From 41c4bab0f781f7b67fbc430797ff96839149c603 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 1 Nov 2024 17:02:49 +0100 Subject: [PATCH 385/425] chore: use deref directly (#12256) --- crates/rpc/rpc-eth-api/src/helpers/pending_block.rs | 4 +--- crates/rpc/rpc-eth-types/src/simulate.rs | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index f2d141613..a0065d793 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -399,9 +399,7 @@ pub trait LoadPendingBlock: execution_outcome.block_logs_bloom(block_number).expect("Block is present"); // calculate the state root - let state_provider = &db.database; - let state_root = - state_provider.state_root(hashed_state).map_err(Self::Error::from_eth_err)?; + let state_root = db.database.state_root(hashed_state).map_err(Self::Error::from_eth_err)?; // create the block header let transactions_root = calculate_transaction_root(&executed_txs); diff --git a/crates/rpc/rpc-eth-types/src/simulate.rs b/crates/rpc/rpc-eth-types/src/simulate.rs index 4249c78fe..62f0e24b1 100644 --- a/crates/rpc/rpc-eth-types/src/simulate.rs +++ b/crates/rpc/rpc-eth-types/src/simulate.rs @@ -274,7 +274,7 @@ pub fn build_block( } } - let state_root = db.db.0.state_root(hashed_state)?; + let state_root = db.db.state_root(hashed_state)?; let header = reth_primitives::Header { beneficiary: block_env.coinbase, From bc69f6348f553802c94a774e0305ca57b018aaf9 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 1 Nov 2024 18:08:41 +0100 Subject: [PATCH 386/425] feat: add asref impls (#12257) --- crates/revm/src/cached.rs | 9 +++++++++ crates/revm/src/database.rs | 6 ++++++ 2 files changed, 15 insertions(+) diff --git a/crates/revm/src/cached.rs b/crates/revm/src/cached.rs index 2152ca5bd..807b163c4 100644 --- a/crates/revm/src/cached.rs +++ b/crates/revm/src/cached.rs @@ -81,6 +81,15 @@ impl<'a, DB> CachedReadsDbMut<'a, DB> { } } +impl AsRef for CachedReadsDbMut<'_, DB> +where + DB: AsRef, +{ + fn as_ref(&self) -> &T { + self.inner().as_ref() + } +} + impl Database for CachedReadsDbMut<'_, DB> { type Error = ::Error; diff --git a/crates/revm/src/database.rs b/crates/revm/src/database.rs index 5f662fea7..682aca6cf 100644 --- a/crates/revm/src/database.rs +++ b/crates/revm/src/database.rs @@ -79,6 +79,12 @@ impl StateProviderDatabase { } } +impl AsRef for StateProviderDatabase { + fn as_ref(&self) -> &DB { + self + } +} + impl Deref for StateProviderDatabase { type Target = DB; From 969ca3e63b2203abf7537808bf3513971ebf210b Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Sat, 2 Nov 2024 02:14:22 +0900 Subject: [PATCH 387/425] fix: check hashed state for loading `TriePrefixSets::destroyed_accounts` (#12235) --- crates/trie/db/src/prefix_set.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/trie/db/src/prefix_set.rs b/crates/trie/db/src/prefix_set.rs index 079fe3937..cd50503bc 100644 --- a/crates/trie/db/src/prefix_set.rs +++ b/crates/trie/db/src/prefix_set.rs @@ -36,13 +36,13 @@ impl PrefixSetLoader<'_, TX> { // Walk account changeset and insert account prefixes. let mut account_changeset_cursor = self.cursor_read::()?; - let mut account_plain_state_cursor = self.cursor_read::()?; + let mut account_hashed_state_cursor = self.cursor_read::()?; for account_entry in account_changeset_cursor.walk_range(range.clone())? { let (_, AccountBeforeTx { address, .. }) = account_entry?; let hashed_address = keccak256(address); account_prefix_set.insert(Nibbles::unpack(hashed_address)); - if account_plain_state_cursor.seek_exact(address)?.is_none() { + if account_hashed_state_cursor.seek_exact(hashed_address)?.is_none() { destroyed_accounts.insert(hashed_address); } } From 166a2346dcd2a9c8d5fea6995a9fc5e6426c62bb Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Fri, 1 Nov 2024 14:19:47 -0600 Subject: [PATCH 388/425] renamed OptimismExecutorBuilder to OpExecutorBuilder (#12258) --- crates/optimism/node/src/node.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 56487fb24..b4a4d2730 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -69,7 +69,7 @@ impl OptimismNode { OpPoolBuilder, OpPayloadBuilder, OpNetworkBuilder, - OptimismExecutorBuilder, + OpExecutorBuilder, OptimismConsensusBuilder, > where @@ -86,7 +86,7 @@ impl OptimismNode { disable_txpool_gossip, disable_discovery_v4: !discovery_v4, }) - .executor(OptimismExecutorBuilder::default()) + .executor(OpExecutorBuilder::default()) .consensus(OptimismConsensusBuilder::default()) } } @@ -102,7 +102,7 @@ where OpPoolBuilder, OpPayloadBuilder, OpNetworkBuilder, - OptimismExecutorBuilder, + OpExecutorBuilder, OptimismConsensusBuilder, >; @@ -179,9 +179,9 @@ where /// A regular optimism evm and executor builder. #[derive(Debug, Default, Clone, Copy)] #[non_exhaustive] -pub struct OptimismExecutorBuilder; +pub struct OpExecutorBuilder; -impl ExecutorBuilder for OptimismExecutorBuilder +impl ExecutorBuilder for OpExecutorBuilder where Node: FullNodeTypes>, { From fdf10a7dc26bdaf669341eb095f3a801b595b9a4 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 1 Nov 2024 21:58:59 +0100 Subject: [PATCH 389/425] test: make cargo t compile in codecs (#12261) --- crates/storage/codecs/src/alloy/mod.rs | 2 +- crates/storage/codecs/src/alloy/transaction/eip4844.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/storage/codecs/src/alloy/mod.rs b/crates/storage/codecs/src/alloy/mod.rs index f1bf6a00e..697bac901 100644 --- a/crates/storage/codecs/src/alloy/mod.rs +++ b/crates/storage/codecs/src/alloy/mod.rs @@ -7,7 +7,7 @@ macro_rules! cond_mod { #[cfg(feature = "test-utils")] pub mod $mod_name; #[cfg(not(feature = "test-utils"))] - mod $mod_name; + pub(crate) mod $mod_name; )* }; } diff --git a/crates/storage/codecs/src/alloy/transaction/eip4844.rs b/crates/storage/codecs/src/alloy/transaction/eip4844.rs index 5ec36e06b..c89e2b078 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip4844.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip4844.rs @@ -106,7 +106,7 @@ impl<'a> arbitrary::Arbitrary<'a> for TxEip4844 { } } -#[cfg(any(test, feature = "test-utils"))] +#[cfg(feature = "test-utils")] fn serialize_placeholder(value: &Option<()>, serializer: S) -> Result where S: serde::Serializer, @@ -119,7 +119,7 @@ where } } -#[cfg(any(test, feature = "test-utils"))] +#[cfg(feature = "test-utils")] fn deserialize_placeholder<'de, D>(deserializer: D) -> Result, D::Error> where D: serde::Deserializer<'de>, From a911104fe9ccb04c0946e0b11ac14337eec6b7e0 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 1 Nov 2024 22:03:17 +0100 Subject: [PATCH 390/425] test: make cargo t compile in db-models (#12263) --- Cargo.lock | 2 +- crates/storage/db-models/Cargo.toml | 10 ++++++---- crates/storage/db-models/src/accounts.rs | 2 +- crates/storage/db-models/src/blocks.rs | 2 +- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4c469cd21..6491a47eb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6920,7 +6920,7 @@ dependencies = [ "proptest", "proptest-arbitrary-interop", "reth-codecs", - "reth-primitives", + "reth-primitives-traits", "serde", "test-fuzz", ] diff --git a/crates/storage/db-models/Cargo.toml b/crates/storage/db-models/Cargo.toml index d5f773347..44b291959 100644 --- a/crates/storage/db-models/Cargo.toml +++ b/crates/storage/db-models/Cargo.toml @@ -14,7 +14,7 @@ workspace = true [dependencies] # reth reth-codecs.workspace = true -reth-primitives = { workspace = true, features = ["reth-codec"] } +reth-primitives-traits.workspace = true # ethereum alloy-primitives.workspace = true @@ -32,20 +32,22 @@ proptest = { workspace = true, optional = true } [dev-dependencies] # reth -reth-primitives = { workspace = true, features = ["arbitrary"] } +reth-primitives-traits = { workspace = true, features = ["arbitrary"] } reth-codecs.workspace = true +arbitrary = { workspace = true, features = ["derive"] } +proptest.workspace = true proptest-arbitrary-interop.workspace = true test-fuzz.workspace = true [features] test-utils = [ + "reth-primitives-traits/test-utils", "arbitrary", - "reth-primitives/test-utils", "reth-codecs/test-utils" ] arbitrary = [ - "reth-primitives/arbitrary", + "reth-primitives-traits/arbitrary", "dep:arbitrary", "dep:proptest", "alloy-primitives/arbitrary", diff --git a/crates/storage/db-models/src/accounts.rs b/crates/storage/db-models/src/accounts.rs index acfd45fe3..29a5cf305 100644 --- a/crates/storage/db-models/src/accounts.rs +++ b/crates/storage/db-models/src/accounts.rs @@ -2,7 +2,7 @@ use reth_codecs::{add_arbitrary_tests, Compact}; use serde::Serialize; use alloy_primitives::{bytes::Buf, Address}; -use reth_primitives::Account; +use reth_primitives_traits::Account; /// Account as it is saved in the database. /// diff --git a/crates/storage/db-models/src/blocks.rs b/crates/storage/db-models/src/blocks.rs index 3e740a2e1..b4399dc1e 100644 --- a/crates/storage/db-models/src/blocks.rs +++ b/crates/storage/db-models/src/blocks.rs @@ -2,7 +2,7 @@ use std::ops::Range; use alloy_primitives::TxNumber; use reth_codecs::{add_arbitrary_tests, Compact}; -use reth_primitives::Withdrawals; +use reth_primitives_traits::Withdrawals; use serde::{Deserialize, Serialize}; /// Total number of transactions. From c72f11cc851073518d670960b2930b74aa36b9b5 Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Fri, 1 Nov 2024 15:34:14 -0600 Subject: [PATCH 391/425] renamed OptimismConsensusBuilder to OpConsensusBuilder (#12265) --- crates/optimism/node/src/node.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index b4a4d2730..6a1f94cb7 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -70,7 +70,7 @@ impl OptimismNode { OpPayloadBuilder, OpNetworkBuilder, OpExecutorBuilder, - OptimismConsensusBuilder, + OpConsensusBuilder, > where Node: FullNodeTypes< @@ -87,7 +87,7 @@ impl OptimismNode { disable_discovery_v4: !discovery_v4, }) .executor(OpExecutorBuilder::default()) - .consensus(OptimismConsensusBuilder::default()) + .consensus(OpConsensusBuilder::default()) } } @@ -103,7 +103,7 @@ where OpPayloadBuilder, OpNetworkBuilder, OpExecutorBuilder, - OptimismConsensusBuilder, + OpConsensusBuilder, >; type AddOns = OptimismAddOns< @@ -442,9 +442,9 @@ where /// A basic optimism consensus builder. #[derive(Debug, Default, Clone)] #[non_exhaustive] -pub struct OptimismConsensusBuilder; +pub struct OpConsensusBuilder; -impl ConsensusBuilder for OptimismConsensusBuilder +impl ConsensusBuilder for OpConsensusBuilder where Node: FullNodeTypes>, { From d8bbd36b2f081b3e7d4eb5c7cb90c4e8ab9343e0 Mon Sep 17 00:00:00 2001 From: Ryan Schneider Date: Fri, 1 Nov 2024 14:35:47 -0700 Subject: [PATCH 392/425] feat: flashbots_validateBuilderSubmissionV4 (#12243) --- crates/ethereum/node/tests/e2e/rpc.rs | 90 +++++++++++++++++++++++++-- crates/rpc/rpc-api/src/lib.rs | 5 +- crates/rpc/rpc-api/src/validation.rs | 24 +++++++ crates/rpc/rpc/src/validation.rs | 38 ++++++++++- 4 files changed, 149 insertions(+), 8 deletions(-) diff --git a/crates/ethereum/node/tests/e2e/rpc.rs b/crates/ethereum/node/tests/e2e/rpc.rs index c8b127b9b..1f7ac32e0 100644 --- a/crates/ethereum/node/tests/e2e/rpc.rs +++ b/crates/ethereum/node/tests/e2e/rpc.rs @@ -2,12 +2,15 @@ use crate::utils::eth_payload_attributes; use alloy_eips::{calc_next_block_base_fee, eip2718::Encodable2718}; use alloy_primitives::{Address, B256, U256}; use alloy_provider::{network::EthereumWallet, Provider, ProviderBuilder, SendableTx}; -use alloy_rpc_types_beacon::relay::{BidTrace, SignedBidSubmissionV3}; +use alloy_rpc_types_beacon::relay::{BidTrace, SignedBidSubmissionV3, SignedBidSubmissionV4}; use rand::{rngs::StdRng, Rng, SeedableRng}; -use reth::rpc::{ - api::BuilderBlockValidationRequestV3, - compat::engine::payload::block_to_payload_v3, - types::{engine::BlobsBundleV1, TransactionRequest}, +use reth::{ + payload::BuiltPayload, + rpc::{ + api::{BuilderBlockValidationRequestV3, BuilderBlockValidationRequestV4}, + compat::engine::payload::block_to_payload_v3, + types::{engine::BlobsBundleV1, TransactionRequest}, + }, }; use reth_chainspec::{ChainSpecBuilder, MAINNET}; use reth_e2e_test_utils::setup_engine; @@ -115,7 +118,7 @@ async fn test_fee_history() -> eyre::Result<()> { } #[tokio::test] -async fn test_flashbots_validate() -> eyre::Result<()> { +async fn test_flashbots_validate_v3() -> eyre::Result<()> { reth_tracing::init_test_tracing(); let chain_spec = Arc::new( @@ -187,3 +190,78 @@ async fn test_flashbots_validate() -> eyre::Result<()> { .is_err()); Ok(()) } + +#[tokio::test] +async fn test_flashbots_validate_v4() -> eyre::Result<()> { + reth_tracing::init_test_tracing(); + + let chain_spec = Arc::new( + ChainSpecBuilder::default() + .chain(MAINNET.chain) + .genesis(serde_json::from_str(include_str!("../assets/genesis.json")).unwrap()) + .prague_activated() + .build(), + ); + + let (mut nodes, _tasks, wallet) = + setup_engine::(1, chain_spec.clone(), false, eth_payload_attributes).await?; + let mut node = nodes.pop().unwrap(); + let provider = ProviderBuilder::new() + .with_recommended_fillers() + .wallet(EthereumWallet::new(wallet.gen().swap_remove(0))) + .on_http(node.rpc_url()); + + node.advance(100, |_| { + let provider = provider.clone(); + Box::pin(async move { + let SendableTx::Envelope(tx) = + provider.fill(TransactionRequest::default().to(Address::ZERO)).await.unwrap() + else { + unreachable!() + }; + + tx.encoded_2718().into() + }) + }) + .await?; + + let _ = provider.send_transaction(TransactionRequest::default().to(Address::ZERO)).await?; + let (payload, attrs) = node.new_payload().await?; + + let mut request = BuilderBlockValidationRequestV4 { + request: SignedBidSubmissionV4 { + message: BidTrace { + parent_hash: payload.block().parent_hash, + block_hash: payload.block().hash(), + gas_used: payload.block().gas_used, + gas_limit: payload.block().gas_limit, + ..Default::default() + }, + execution_payload: block_to_payload_v3(payload.block().clone()), + blobs_bundle: BlobsBundleV1::new([]), + execution_requests: payload.requests().unwrap_or_default().to_vec(), + signature: Default::default(), + }, + parent_beacon_block_root: attrs.parent_beacon_block_root.unwrap(), + registered_gas_limit: payload.block().gas_limit, + }; + + provider + .raw_request::<_, ()>("flashbots_validateBuilderSubmissionV4".into(), (&request,)) + .await + .expect("request should validate"); + + request.registered_gas_limit -= 1; + assert!(provider + .raw_request::<_, ()>("flashbots_validateBuilderSubmissionV4".into(), (&request,)) + .await + .is_err()); + request.registered_gas_limit += 1; + + request.request.execution_payload.payload_inner.payload_inner.state_root = B256::ZERO; + assert!(provider + .raw_request::<_, ()>("flashbots_validateBuilderSubmissionV4".into(), (&request,)) + .await + .is_err()); + Ok(()) +} diff --git a/crates/rpc/rpc-api/src/lib.rs b/crates/rpc/rpc-api/src/lib.rs index 63e6e5446..0a4fa9f66 100644 --- a/crates/rpc/rpc-api/src/lib.rs +++ b/crates/rpc/rpc-api/src/lib.rs @@ -46,7 +46,10 @@ pub mod servers { rpc::RpcApiServer, trace::TraceApiServer, txpool::TxPoolApiServer, - validation::{BlockSubmissionValidationApiServer, BuilderBlockValidationRequestV3}, + validation::{ + BlockSubmissionValidationApiServer, BuilderBlockValidationRequestV3, + BuilderBlockValidationRequestV4, + }, web3::Web3ApiServer, }; pub use reth_rpc_eth_api::{ diff --git a/crates/rpc/rpc-api/src/validation.rs b/crates/rpc/rpc-api/src/validation.rs index d8f55b668..797eee7ae 100644 --- a/crates/rpc/rpc-api/src/validation.rs +++ b/crates/rpc/rpc-api/src/validation.rs @@ -3,6 +3,7 @@ use alloy_primitives::B256; use alloy_rpc_types_beacon::relay::{ BuilderBlockValidationRequest, BuilderBlockValidationRequestV2, SignedBidSubmissionV3, + SignedBidSubmissionV4, }; use jsonrpsee::proc_macros::rpc; use serde::{Deserialize, Serialize}; @@ -24,6 +25,22 @@ pub struct BuilderBlockValidationRequestV3 { pub parent_beacon_block_root: B256, } +/// A Request to validate a [`SignedBidSubmissionV4`] +/// +/// +#[serde_as] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct BuilderBlockValidationRequestV4 { + /// The request to be validated. + #[serde(flatten)] + pub request: SignedBidSubmissionV4, + /// The registered gas limit for the validation request. + #[serde_as(as = "DisplayFromStr")] + pub registered_gas_limit: u64, + /// The parent beacon block root for the validation request. + pub parent_beacon_block_root: B256, +} + /// Block validation rpc interface. #[cfg_attr(not(feature = "client"), rpc(server, namespace = "flashbots"))] #[cfg_attr(feature = "client", rpc(server, client, namespace = "flashbots"))] @@ -48,4 +65,11 @@ pub trait BlockSubmissionValidationApi { &self, request: BuilderBlockValidationRequestV3, ) -> jsonrpsee::core::RpcResult<()>; + + /// A Request to validate a block submission. + #[method(name = "validateBuilderSubmissionV4")] + async fn validate_builder_submission_v4( + &self, + request: BuilderBlockValidationRequestV4, + ) -> jsonrpsee::core::RpcResult<()>; } diff --git a/crates/rpc/rpc/src/validation.rs b/crates/rpc/rpc/src/validation.rs index fe9d0eb44..1476180d4 100644 --- a/crates/rpc/rpc/src/validation.rs +++ b/crates/rpc/rpc/src/validation.rs @@ -20,7 +20,10 @@ use reth_provider::{ StateProviderFactory, WithdrawalsProvider, }; use reth_revm::database::StateProviderDatabase; -use reth_rpc_api::{BlockSubmissionValidationApiServer, BuilderBlockValidationRequestV3}; +use reth_rpc_api::{ + BlockSubmissionValidationApiServer, BuilderBlockValidationRequestV3, + BuilderBlockValidationRequestV4, +}; use reth_rpc_eth_types::EthApiError; use reth_rpc_server_types::{result::internal_rpc_err, ToRpcResult}; use reth_trie::HashedPostState; @@ -415,4 +418,37 @@ where .map_err(|e| RethError::Other(e.into())) .to_rpc_result() } + + /// Validates a block submitted to the relay + async fn validate_builder_submission_v4( + &self, + request: BuilderBlockValidationRequestV4, + ) -> RpcResult<()> { + let block = self + .payload_validator + .ensure_well_formed_payload( + ExecutionPayload::V3(request.request.execution_payload), + ExecutionPayloadSidecar::v4( + CancunPayloadFields { + parent_beacon_block_root: request.parent_beacon_block_root, + versioned_hashes: self + .validate_blobs_bundle(request.request.blobs_bundle) + .map_err(|e| RethError::Other(e.into())) + .to_rpc_result()?, + }, + request.request.execution_requests.into(), + ), + ) + .to_rpc_result()? + .try_seal_with_senders() + .map_err(|_| EthApiError::InvalidTransactionSignature)?; + + self.validate_message_against_block( + block, + request.request.message, + request.registered_gas_limit, + ) + .map_err(|e| RethError::Other(e.into())) + .to_rpc_result() + } } From d7ead13bda6551958d5595cd20eac5a873debd48 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Sat, 2 Nov 2024 11:17:31 +0400 Subject: [PATCH 393/425] fix: clippy lints (#12269) --- .github/workflows/lint.yml | 4 ++-- crates/chain-state/src/notifications.rs | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 4723d8a4d..fa7b4f9f4 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -18,10 +18,10 @@ jobs: matrix: include: - type: ethereum - args: --bin reth --workspace --locked + args: --bin reth --workspace --lib --examples --tests --benches --locked features: "ethereum asm-keccak jemalloc jemalloc-prof min-error-logs min-warn-logs min-info-logs min-debug-logs min-trace-logs" - type: optimism - args: --bin op-reth --workspace --locked + args: --bin op-reth --workspace --lib --examples --tests --benches --locked features: "optimism asm-keccak jemalloc jemalloc-prof min-error-logs min-warn-logs min-info-logs min-debug-logs min-trace-logs" - type: book args: --manifest-path book/sources/Cargo.toml --workspace --bins diff --git a/crates/chain-state/src/notifications.rs b/crates/chain-state/src/notifications.rs index 6e24bcbb4..582e1d2a0 100644 --- a/crates/chain-state/src/notifications.rs +++ b/crates/chain-state/src/notifications.rs @@ -297,6 +297,7 @@ mod tests { block2.set_hash(block2_hash); // Create a receipt for the transaction in block1. + #[allow(clippy::needless_update)] let receipt1 = Receipt { tx_type: TxType::Legacy, cumulative_gas_used: 12345, @@ -346,6 +347,7 @@ mod tests { old_block1.block.body.transactions.push(TransactionSigned::default()); // Create a receipt for a transaction in the reverted block. + #[allow(clippy::needless_update)] let old_receipt = Receipt { tx_type: TxType::Legacy, cumulative_gas_used: 54321, @@ -368,6 +370,7 @@ mod tests { new_block1.block.body.transactions.push(TransactionSigned::default()); // Create a receipt for a transaction in the new committed block. + #[allow(clippy::needless_update)] let new_receipt = Receipt { tx_type: TxType::Legacy, cumulative_gas_used: 12345, From 962fa6685bd5763f8a162a1f9f9e67a92c9548a3 Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Sat, 2 Nov 2024 01:41:11 -0600 Subject: [PATCH 394/425] Use Arc in SystemCaller (#12268) Co-authored-by: Matthias Seitz --- crates/engine/util/src/reorg.rs | 2 +- crates/ethereum/evm/src/execute.rs | 2 +- crates/evm/src/system_calls/mod.rs | 6 +++--- crates/optimism/evm/src/execute.rs | 2 +- crates/optimism/payload/src/builder.rs | 2 +- crates/payload/validator/src/lib.rs | 2 +- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/crates/engine/util/src/reorg.rs b/crates/engine/util/src/reorg.rs index 0d51d2dfa..69831389a 100644 --- a/crates/engine/util/src/reorg.rs +++ b/crates/engine/util/src/reorg.rs @@ -303,7 +303,7 @@ where let mut evm = evm_config.evm_with_env(&mut state, env); // apply eip-4788 pre block contract call - let mut system_caller = SystemCaller::new(evm_config.clone(), chain_spec); + let mut system_caller = SystemCaller::new(evm_config.clone(), chain_spec.clone()); system_caller.apply_beacon_root_contract_call( reorg_target.timestamp, diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index f082a3a70..fa14e260d 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -95,7 +95,7 @@ where { /// Creates a new [`EthExecutionStrategy`] pub fn new(state: State, chain_spec: Arc, evm_config: EvmConfig) -> Self { - let system_caller = SystemCaller::new(evm_config.clone(), (*chain_spec).clone()); + let system_caller = SystemCaller::new(evm_config.clone(), chain_spec.clone()); Self { state, chain_spec, evm_config, system_caller } } } diff --git a/crates/evm/src/system_calls/mod.rs b/crates/evm/src/system_calls/mod.rs index daaf1d141..7fdb31d96 100644 --- a/crates/evm/src/system_calls/mod.rs +++ b/crates/evm/src/system_calls/mod.rs @@ -1,7 +1,7 @@ //! System contract call functions. use crate::ConfigureEvm; -use alloc::{boxed::Box, vec}; +use alloc::{boxed::Box, sync::Arc, vec}; use alloy_eips::eip7685::Requests; use alloy_primitives::Bytes; use core::fmt::Display; @@ -46,7 +46,7 @@ impl OnStateHook for NoopHook { #[allow(missing_debug_implementations)] pub struct SystemCaller { evm_config: EvmConfig, - chain_spec: Chainspec, + chain_spec: Arc, /// Optional hook to be called after each state change. hook: Option>, } @@ -54,7 +54,7 @@ pub struct SystemCaller { impl SystemCaller { /// Create a new system caller with the given EVM config, database, and chain spec, and creates /// the EVM with the given initialized config and block environment. - pub const fn new(evm_config: EvmConfig, chain_spec: Chainspec) -> Self { + pub const fn new(evm_config: EvmConfig, chain_spec: Arc) -> Self { Self { evm_config, chain_spec, hook: None } } diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index 9c5db9d4b..c2b006144 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -90,7 +90,7 @@ where { /// Creates a new [`OpExecutionStrategy`] pub fn new(state: State, chain_spec: Arc, evm_config: EvmConfig) -> Self { - let system_caller = SystemCaller::new(evm_config.clone(), (*chain_spec).clone()); + let system_caller = SystemCaller::new(evm_config.clone(), chain_spec.clone()); Self { state, chain_spec, evm_config, system_caller } } } diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index cae2d34bd..0cf45835a 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -197,7 +197,7 @@ where chain_spec.is_regolith_active_at_timestamp(attributes.payload_attributes.timestamp); // apply eip-4788 pre block contract call - let mut system_caller = SystemCaller::new(evm_config.clone(), &chain_spec); + let mut system_caller = SystemCaller::new(evm_config.clone(), chain_spec.clone()); system_caller .pre_block_beacon_root_contract_call( diff --git a/crates/payload/validator/src/lib.rs b/crates/payload/validator/src/lib.rs index 38e53bac4..e74b5f48d 100644 --- a/crates/payload/validator/src/lib.rs +++ b/crates/payload/validator/src/lib.rs @@ -31,7 +31,7 @@ impl ExecutionPayloadValidator { /// Returns the chain spec used by the validator. #[inline] - pub fn chain_spec(&self) -> &ChainSpec { + pub const fn chain_spec(&self) -> &Arc { &self.chain_spec } } From 9593f4d08d25018eac0173fadff8768ce7d241b7 Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Sat, 2 Nov 2024 02:42:42 -0600 Subject: [PATCH 395/425] renamed OptimismGenesisInfo to OpGenesisInfo (#12270) --- crates/optimism/chainspec/src/lib.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/optimism/chainspec/src/lib.rs b/crates/optimism/chainspec/src/lib.rs index 8ad36c66a..3248625d6 100644 --- a/crates/optimism/chainspec/src/lib.rs +++ b/crates/optimism/chainspec/src/lib.rs @@ -351,7 +351,7 @@ impl OptimismHardforks for OpChainSpec {} impl From for OpChainSpec { fn from(genesis: Genesis) -> Self { use reth_optimism_forks::OptimismHardfork; - let optimism_genesis_info = OptimismGenesisInfo::extract_from(&genesis); + let optimism_genesis_info = OpGenesisInfo::extract_from(&genesis); let genesis_info = optimism_genesis_info.optimism_chain_info.genesis_info.unwrap_or_default(); @@ -441,12 +441,12 @@ impl From for OpChainSpec { } #[derive(Default, Debug)] -struct OptimismGenesisInfo { +struct OpGenesisInfo { optimism_chain_info: op_alloy_rpc_types::genesis::OpChainInfo, base_fee_params: BaseFeeParamsKind, } -impl OptimismGenesisInfo { +impl OpGenesisInfo { fn extract_from(genesis: &Genesis) -> Self { let mut info = Self { optimism_chain_info: op_alloy_rpc_types::genesis::OpChainInfo::extract_from( From dbdf60ba119c04df52dddba0e1e9a8f2843b063f Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Sat, 2 Nov 2024 03:22:58 -0600 Subject: [PATCH 396/425] renamed OptimismBeaconConsensus to OpBeaconConsensus (#12271) --- crates/optimism/consensus/src/lib.rs | 8 ++++---- crates/optimism/node/src/node.rs | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/optimism/consensus/src/lib.rs b/crates/optimism/consensus/src/lib.rs index 16c1d5d37..bf1428815 100644 --- a/crates/optimism/consensus/src/lib.rs +++ b/crates/optimism/consensus/src/lib.rs @@ -34,19 +34,19 @@ pub use validation::validate_block_post_execution; /// /// Provides basic checks as outlined in the execution specs. #[derive(Debug, Clone, PartialEq, Eq)] -pub struct OptimismBeaconConsensus { +pub struct OpBeaconConsensus { /// Configuration chain_spec: Arc, } -impl OptimismBeaconConsensus { - /// Create a new instance of [`OptimismBeaconConsensus`] +impl OpBeaconConsensus { + /// Create a new instance of [`OpBeaconConsensus`] pub const fn new(chain_spec: Arc) -> Self { Self { chain_spec } } } -impl Consensus for OptimismBeaconConsensus { +impl Consensus for OpBeaconConsensus { fn validate_header(&self, header: &SealedHeader) -> Result<(), ConsensusError> { validate_header_gas(header)?; validate_header_base_fee(header, &self.chain_spec) diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 6a1f94cb7..87266767d 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -19,7 +19,7 @@ use reth_node_builder::{ BuilderContext, Node, NodeAdapter, NodeComponentsBuilder, PayloadBuilderConfig, }; use reth_optimism_chainspec::OpChainSpec; -use reth_optimism_consensus::OptimismBeaconConsensus; +use reth_optimism_consensus::OpBeaconConsensus; use reth_optimism_evm::{OpExecutionStrategyFactory, OptimismEvmConfig}; use reth_optimism_rpc::OpEthApi; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; @@ -454,7 +454,7 @@ where if ctx.is_dev() { Ok(Arc::new(reth_auto_seal_consensus::AutoSealConsensus::new(ctx.chain_spec()))) } else { - Ok(Arc::new(OptimismBeaconConsensus::new(ctx.chain_spec()))) + Ok(Arc::new(OpBeaconConsensus::new(ctx.chain_spec()))) } } } From c74d2a06f2d84a96f779e62293ef90b3c984c56a Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Sat, 2 Nov 2024 11:30:17 +0100 Subject: [PATCH 397/425] chore(ci): unpin kurtosis (#12272) --- .github/workflows/kurtosis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/kurtosis.yml b/.github/workflows/kurtosis.yml index 43f5c3605..74d26dbd3 100644 --- a/.github/workflows/kurtosis.yml +++ b/.github/workflows/kurtosis.yml @@ -79,7 +79,6 @@ jobs: - name: Run kurtosis uses: ethpandaops/kurtosis-assertoor-github-action@v1 with: - kurtosis_version: 1.3.1 ethereum_package_args: '.github/assets/kurtosis_network_params.yaml' notify-on-error: From f2de55d8fe4988e8580c18a8302d642ce87a031d Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 2 Nov 2024 13:16:08 +0100 Subject: [PATCH 398/425] chore: make some fields owned (#12274) --- crates/node/api/src/node.rs | 8 ++++---- crates/node/builder/src/launch/engine.rs | 6 +++--- crates/node/builder/src/launch/mod.rs | 6 +++--- crates/node/builder/src/rpc.rs | 10 ++++++---- 4 files changed, 16 insertions(+), 14 deletions(-) diff --git a/crates/node/api/src/node.rs b/crates/node/api/src/node.rs index 3173fd2b3..b016e01c2 100644 --- a/crates/node/api/src/node.rs +++ b/crates/node/api/src/node.rs @@ -91,17 +91,17 @@ pub trait FullNodeComponents: FullNodeTypes + Clone + 'static { } /// Context passed to [`NodeAddOns::launch_add_ons`], -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct AddOnsContext<'a, N: FullNodeComponents> { /// Node with all configured components. - pub node: &'a N, + pub node: N, /// Node configuration. pub config: &'a NodeConfig<::ChainSpec>, /// Handle to the beacon consensus engine. pub beacon_engine_handle: - &'a BeaconConsensusEngineHandle<::Engine>, + BeaconConsensusEngineHandle<::Engine>, /// JWT secret for the node. - pub jwt_secret: &'a JwtSecret, + pub jwt_secret: JwtSecret, } /// Customizable node add-on types. diff --git a/crates/node/builder/src/launch/engine.rs b/crates/node/builder/src/launch/engine.rs index 3de651cdc..85401b8b9 100644 --- a/crates/node/builder/src/launch/engine.rs +++ b/crates/node/builder/src/launch/engine.rs @@ -286,10 +286,10 @@ where let jwt_secret = ctx.auth_jwt_secret()?; let add_ons_ctx = AddOnsContext { - node: ctx.node_adapter(), + node: ctx.node_adapter().clone(), config: ctx.node_config(), - beacon_engine_handle: &beacon_engine_handle, - jwt_secret: &jwt_secret, + beacon_engine_handle, + jwt_secret, }; let RpcHandle { rpc_server_handles, rpc_registry } = diff --git a/crates/node/builder/src/launch/mod.rs b/crates/node/builder/src/launch/mod.rs index 50438e79d..a623a7a9f 100644 --- a/crates/node/builder/src/launch/mod.rs +++ b/crates/node/builder/src/launch/mod.rs @@ -329,10 +329,10 @@ where let jwt_secret = ctx.auth_jwt_secret()?; let add_ons_ctx = AddOnsContext { - node: ctx.node_adapter(), + node: ctx.node_adapter().clone(), config: ctx.node_config(), - beacon_engine_handle: &beacon_engine_handle, - jwt_secret: &jwt_secret, + beacon_engine_handle, + jwt_secret, }; let RpcHandle { rpc_server_handles, rpc_registry } = diff --git a/crates/node/builder/src/rpc.rs b/crates/node/builder/src/rpc.rs index 8819aa4ac..8af1527cb 100644 --- a/crates/node/builder/src/rpc.rs +++ b/crates/node/builder/src/rpc.rs @@ -409,9 +409,11 @@ where type Handle = RpcHandle; async fn launch_add_ons(self, ctx: AddOnsContext<'_, N>) -> eyre::Result { - let AddOnsContext { node, config, beacon_engine_handle, jwt_secret } = ctx; let Self { eth_api_builder, engine_validator_builder, hooks, _pd: _ } = self; + let engine_validator = engine_validator_builder.build(&ctx).await?; + let AddOnsContext { node, config, beacon_engine_handle, jwt_secret } = ctx; + let client = ClientVersionV1 { code: CLIENT_CODE, name: NAME_CLIENT.to_string(), @@ -422,17 +424,17 @@ where let engine_api = EngineApi::new( node.provider().clone(), config.chain.clone(), - beacon_engine_handle.clone(), + beacon_engine_handle, node.payload_builder().clone().into(), node.pool().clone(), Box::new(node.task_executor().clone()), client, EngineCapabilities::default(), - engine_validator_builder.build(&ctx).await?, + engine_validator, ); info!(target: "reth::cli", "Engine API handler initialized"); - let auth_config = config.rpc.auth_server_config(*jwt_secret)?; + let auth_config = config.rpc.auth_server_config(jwt_secret)?; let module_config = config.rpc.transport_rpc_module_config(); debug!(target: "reth::cli", http=?module_config.http(), ws=?module_config.ws(), "Using RPC module config"); From 2aba40b17c57c6a6f6bd68f1fa32279d8e7b0d0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C3=A9a=20Narzis?= <78718413+lean-apple@users.noreply.github.com> Date: Sat, 2 Nov 2024 21:22:17 +0700 Subject: [PATCH 399/425] feat: add `CachedReads::extend` (#12277) Co-authored-by: Matthias Seitz --- crates/revm/src/cached.rs | 63 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/crates/revm/src/cached.rs b/crates/revm/src/cached.rs index 807b163c4..88a41e1d8 100644 --- a/crates/revm/src/cached.rs +++ b/crates/revm/src/cached.rs @@ -57,6 +57,15 @@ impl CachedReads { ) { self.accounts.insert(address, CachedAccount { info: Some(info), storage }); } + + /// Extends current cache with entries from another [`CachedReads`] instance. + /// + /// Note: It is expected that both instances are based on the exact same state. + pub fn extend(&mut self, other: Self) { + self.accounts.extend(other.accounts); + self.contracts.extend(other.contracts); + self.block_hashes.extend(other.block_hashes); + } } /// A [Database] that caches reads inside [`CachedReads`]. @@ -184,3 +193,57 @@ impl CachedAccount { Self { info, storage: HashMap::default() } } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_extend_with_two_cached_reads() { + // Setup test data + let hash1 = B256::from_slice(&[1u8; 32]); + let hash2 = B256::from_slice(&[2u8; 32]); + let address1 = Address::from_slice(&[1u8; 20]); + let address2 = Address::from_slice(&[2u8; 20]); + + // Create primary cache + let mut primary = { + let mut cache = CachedReads::default(); + cache.accounts.insert(address1, CachedAccount::new(Some(AccountInfo::default()))); + cache.contracts.insert(hash1, Bytecode::default()); + cache.block_hashes.insert(1, hash1); + cache + }; + + // Create additional cache + let additional = { + let mut cache = CachedReads::default(); + cache.accounts.insert(address2, CachedAccount::new(Some(AccountInfo::default()))); + cache.contracts.insert(hash2, Bytecode::default()); + cache.block_hashes.insert(2, hash2); + cache + }; + + // Extending primary with additional cache + primary.extend(additional); + + // Verify the combined state + assert!( + primary.accounts.len() == 2 && + primary.contracts.len() == 2 && + primary.block_hashes.len() == 2, + "All maps should contain 2 entries" + ); + + // Verify specific entries + assert!( + primary.accounts.contains_key(&address1) && + primary.accounts.contains_key(&address2) && + primary.contracts.contains_key(&hash1) && + primary.contracts.contains_key(&hash2) && + primary.block_hashes.get(&1) == Some(&hash1) && + primary.block_hashes.get(&2) == Some(&hash2), + "All expected entries should be present" + ); + } +} From af38a1a2d0631d0f37c4da29d6aa8f86f7dec0d9 Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Sat, 2 Nov 2024 12:04:25 -0600 Subject: [PATCH 400/425] renamed OptimismPayloadTypes to OpPayloadTypes (#12281) --- crates/optimism/node/src/engine.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/optimism/node/src/engine.rs b/crates/optimism/node/src/engine.rs index 0f48dc347..af517b00e 100644 --- a/crates/optimism/node/src/engine.rs +++ b/crates/optimism/node/src/engine.rs @@ -22,7 +22,7 @@ use reth_optimism_payload_builder::{ /// The types used in the optimism beacon consensus engine. #[derive(Debug, Default, Clone, serde::Deserialize, serde::Serialize)] #[non_exhaustive] -pub struct OptimismEngineTypes { +pub struct OptimismEngineTypes { _marker: std::marker::PhantomData, } @@ -48,9 +48,9 @@ where /// A default payload type for [`OptimismEngineTypes`] #[derive(Debug, Default, Clone, serde::Deserialize, serde::Serialize)] #[non_exhaustive] -pub struct OptimismPayloadTypes; +pub struct OpPayloadTypes; -impl PayloadTypes for OptimismPayloadTypes { +impl PayloadTypes for OpPayloadTypes { type BuiltPayload = OpBuiltPayload; type PayloadAttributes = OpPayloadAttributes; type PayloadBuilderAttributes = OpPayloadBuilderAttributes; From adff5a9429e2f5f65e0b183526a554664161b4f4 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Sat, 2 Nov 2024 22:14:10 +0400 Subject: [PATCH 401/425] feat: remove AnyNetwork usage (#12280) --- Cargo.lock | 5 - crates/optimism/rpc/src/eth/receipt.rs | 96 +++------ crates/optimism/rpc/src/eth/transaction.rs | 2 +- crates/rpc/rpc-builder/Cargo.toml | 5 - crates/rpc/rpc-builder/src/lib.rs | 9 +- crates/rpc/rpc-builder/tests/it/http.rs | 7 +- crates/rpc/rpc-builder/tests/it/middleware.rs | 2 +- crates/rpc/rpc-eth-api/src/types.rs | 15 +- crates/rpc/rpc-eth-types/Cargo.toml | 1 - crates/rpc/rpc-eth-types/src/lib.rs | 2 +- crates/rpc/rpc-eth-types/src/receipt.rs | 186 ++++++++++-------- crates/rpc/rpc-types-compat/Cargo.toml | 3 +- .../rpc-types-compat/src/transaction/mod.rs | 25 +-- crates/rpc/rpc/src/eth/core.rs | 5 +- crates/rpc/rpc/src/eth/helpers/block.rs | 10 +- crates/rpc/rpc/src/eth/helpers/receipt.rs | 5 +- crates/rpc/rpc/src/eth/helpers/types.rs | 60 +++--- crates/rpc/rpc/src/eth/pubsub.rs | 19 +- 18 files changed, 183 insertions(+), 274 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6491a47eb..8aab2c0a8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8725,13 +8725,10 @@ dependencies = [ name = "reth-rpc-builder" version = "1.1.0" dependencies = [ - "alloy-network", "alloy-primitives", - "alloy-rpc-types", "alloy-rpc-types-engine", "alloy-rpc-types-eth", "alloy-rpc-types-trace", - "alloy-serde", "clap", "http", "jsonrpsee", @@ -8860,7 +8857,6 @@ dependencies = [ "alloy-primitives", "alloy-rpc-types", "alloy-rpc-types-eth", - "alloy-serde", "alloy-sol-types", "derive_more 1.0.0", "futures", @@ -8936,7 +8932,6 @@ dependencies = [ "alloy-rpc-types", "alloy-rpc-types-engine", "alloy-rpc-types-eth", - "alloy-serde", "reth-primitives", "reth-trie-common", "serde", diff --git a/crates/optimism/rpc/src/eth/receipt.rs b/crates/optimism/rpc/src/eth/receipt.rs index 2734fb545..f8e6b7fc2 100644 --- a/crates/optimism/rpc/src/eth/receipt.rs +++ b/crates/optimism/rpc/src/eth/receipt.rs @@ -1,7 +1,7 @@ //! Loads and formats OP receipt RPC response. use alloy_eips::eip2718::Encodable2718; -use alloy_rpc_types::{AnyReceiptEnvelope, Log, TransactionReceipt}; +use alloy_rpc_types::{Log, TransactionReceipt}; use op_alloy_consensus::{ DepositTransaction, OpDepositReceipt, OpDepositReceiptWithBloom, OpReceiptEnvelope, }; @@ -13,7 +13,7 @@ use reth_optimism_forks::OptimismHardforks; use reth_primitives::{Receipt, TransactionMeta, TransactionSigned, TxType}; use reth_provider::ChainSpecProvider; use reth_rpc_eth_api::{helpers::LoadReceipt, FromEthApiError, RpcReceipt}; -use reth_rpc_eth_types::{EthApiError, ReceiptBuilder}; +use reth_rpc_eth_types::{receipt::build_receipt, EthApiError}; use crate::{OpEthApi, OpEthApiError}; @@ -172,9 +172,7 @@ impl OpReceiptFieldsBuilder { #[derive(Debug)] pub struct OpReceiptBuilder { /// Core receipt, has all the fields of an L1 receipt and is the basis for the OP receipt. - pub core_receipt: TransactionReceipt>, - /// Transaction type. - pub tx_type: TxType, + pub core_receipt: TransactionReceipt>, /// Additional OP receipt fields. pub op_receipt_fields: OpTransactionReceiptFields, } @@ -189,11 +187,29 @@ impl OpReceiptBuilder { all_receipts: &[Receipt], l1_block_info: revm::L1BlockInfo, ) -> Result { - let ReceiptBuilder { base: core_receipt, .. } = - ReceiptBuilder::new(transaction, meta, receipt, all_receipts) - .map_err(OpEthApiError::Eth)?; - - let tx_type = transaction.tx_type(); + let core_receipt = + build_receipt(transaction, meta, receipt, all_receipts, |receipt_with_bloom| { + match receipt.tx_type { + TxType::Legacy => OpReceiptEnvelope::::Legacy(receipt_with_bloom), + TxType::Eip2930 => OpReceiptEnvelope::::Eip2930(receipt_with_bloom), + TxType::Eip1559 => OpReceiptEnvelope::::Eip1559(receipt_with_bloom), + TxType::Eip4844 => { + // TODO: unreachable + OpReceiptEnvelope::::Eip1559(receipt_with_bloom) + } + TxType::Eip7702 => OpReceiptEnvelope::::Eip7702(receipt_with_bloom), + TxType::Deposit => { + OpReceiptEnvelope::::Deposit(OpDepositReceiptWithBloom:: { + receipt: OpDepositReceipt:: { + inner: receipt_with_bloom.receipt, + deposit_nonce: receipt.deposit_nonce, + deposit_receipt_version: receipt.deposit_receipt_version, + }, + logs_bloom: receipt_with_bloom.logs_bloom, + }) + } + } + })?; let op_receipt_fields = OpReceiptFieldsBuilder::default() .l1_block_info(chain_spec, transaction, l1_block_info)? @@ -201,69 +217,15 @@ impl OpReceiptBuilder { .deposit_version(receipt.deposit_receipt_version) .build(); - Ok(Self { core_receipt, tx_type, op_receipt_fields }) + Ok(Self { core_receipt, op_receipt_fields }) } /// Builds [`OpTransactionReceipt`] by combing core (l1) receipt fields and additional OP /// receipt fields. pub fn build(self) -> OpTransactionReceipt { - let Self { core_receipt, tx_type, op_receipt_fields } = self; - - let OpTransactionReceiptFields { l1_block_info, deposit_nonce, deposit_receipt_version } = - op_receipt_fields; - - let TransactionReceipt { - inner: AnyReceiptEnvelope { inner: receipt_with_bloom, .. }, - transaction_hash, - transaction_index, - block_hash, - block_number, - gas_used, - effective_gas_price, - blob_gas_used, - blob_gas_price, - from, - to, - contract_address, - authorization_list, - } = core_receipt; - - let inner = match tx_type { - TxType::Legacy => OpReceiptEnvelope::::Legacy(receipt_with_bloom), - TxType::Eip2930 => OpReceiptEnvelope::::Eip2930(receipt_with_bloom), - TxType::Eip1559 => OpReceiptEnvelope::::Eip1559(receipt_with_bloom), - TxType::Eip4844 => { - // TODO: unreachable - OpReceiptEnvelope::::Eip1559(receipt_with_bloom) - } - TxType::Eip7702 => OpReceiptEnvelope::::Eip7702(receipt_with_bloom), - TxType::Deposit => { - OpReceiptEnvelope::::Deposit(OpDepositReceiptWithBloom:: { - receipt: OpDepositReceipt:: { - inner: receipt_with_bloom.receipt, - deposit_nonce, - deposit_receipt_version, - }, - logs_bloom: receipt_with_bloom.logs_bloom, - }) - } - }; + let Self { core_receipt: inner, op_receipt_fields } = self; - let inner = TransactionReceipt::> { - inner, - transaction_hash, - transaction_index, - block_hash, - block_number, - gas_used, - effective_gas_price, - blob_gas_used, - blob_gas_price, - from, - to, - contract_address, - authorization_list, - }; + let OpTransactionReceiptFields { l1_block_info, .. } = op_receipt_fields; OpTransactionReceipt { inner, l1_block_info } } diff --git a/crates/optimism/rpc/src/eth/transaction.rs b/crates/optimism/rpc/src/eth/transaction.rs index 3994afe19..6b5954391 100644 --- a/crates/optimism/rpc/src/eth/transaction.rs +++ b/crates/optimism/rpc/src/eth/transaction.rs @@ -86,7 +86,7 @@ where let signed_tx = tx.clone().into_signed(); let hash = tx.hash; - let mut inner = EthTxBuilder.fill(tx, tx_info).inner; + let mut inner = EthTxBuilder.fill(tx, tx_info); if signed_tx.is_deposit() { inner.gas_price = Some(signed_tx.max_fee_per_gas()) diff --git a/crates/rpc/rpc-builder/Cargo.toml b/crates/rpc/rpc-builder/Cargo.toml index 711e44381..2d10dabf8 100644 --- a/crates/rpc/rpc-builder/Cargo.toml +++ b/crates/rpc/rpc-builder/Cargo.toml @@ -31,11 +31,6 @@ reth-evm.workspace = true reth-engine-primitives.workspace = true reth-primitives.workspace = true -# ethereum -alloy-network.workspace = true -alloy-rpc-types.workspace = true -alloy-serde.workspace = true - # rpc/net jsonrpsee = { workspace = true, features = ["server"] } tower-http = { workspace = true, features = ["full"] } diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index 696d35014..385b92af3 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -1094,14 +1094,7 @@ where /// If called outside of the tokio runtime. See also [`Self::eth_api`] pub fn register_ots(&mut self) -> &mut Self where - EthApi: TraceExt - + EthTransactions< - NetworkTypes: alloy_network::Network< - TransactionResponse = alloy_serde::WithOtherFields< - alloy_rpc_types::Transaction, - >, - >, - >, + EthApi: TraceExt + EthTransactions, { let otterscan_api = self.otterscan_api(); self.modules.insert(RethRpcModule::Ots, otterscan_api.into_rpc().into()); diff --git a/crates/rpc/rpc-builder/tests/it/http.rs b/crates/rpc/rpc-builder/tests/it/http.rs index 7a8093c50..ed9ef56d6 100644 --- a/crates/rpc/rpc-builder/tests/it/http.rs +++ b/crates/rpc/rpc-builder/tests/it/http.rs @@ -3,11 +3,10 @@ use crate::utils::{launch_http, launch_http_ws, launch_ws}; use alloy_primitives::{hex_literal::hex, Address, Bytes, TxHash, B256, B64, U256, U64}; -use alloy_rpc_types::{ - Block, FeeHistory, Filter, Index, Log, PendingTransactionFilterKind, SyncStatus, Transaction, - TransactionReceipt, +use alloy_rpc_types_eth::{ + transaction::TransactionRequest, Block, FeeHistory, Filter, Index, Log, + PendingTransactionFilterKind, SyncStatus, Transaction, TransactionReceipt, }; -use alloy_rpc_types_eth::transaction::TransactionRequest; use alloy_rpc_types_trace::filter::TraceFilter; use jsonrpsee::{ core::{ diff --git a/crates/rpc/rpc-builder/tests/it/middleware.rs b/crates/rpc/rpc-builder/tests/it/middleware.rs index bcc26dcad..0e2186e56 100644 --- a/crates/rpc/rpc-builder/tests/it/middleware.rs +++ b/crates/rpc/rpc-builder/tests/it/middleware.rs @@ -1,5 +1,5 @@ use crate::utils::{test_address, test_rpc_builder}; -use alloy_rpc_types::{Block, Receipt, Transaction}; +use alloy_rpc_types_eth::{Block, Receipt, Transaction}; use jsonrpsee::{ server::{middleware::rpc::RpcServiceT, RpcServiceBuilder}, types::Request, diff --git a/crates/rpc/rpc-eth-api/src/types.rs b/crates/rpc/rpc-eth-api/src/types.rs index 653730ed3..1d176dd1e 100644 --- a/crates/rpc/rpc-eth-api/src/types.rs +++ b/crates/rpc/rpc-eth-api/src/types.rs @@ -2,16 +2,15 @@ use std::{error::Error, fmt}; -use alloy_network::{AnyNetwork, Network}; +use alloy_network::Network; use alloy_rpc_types::Block; -use reth_rpc_eth_types::EthApiError; use reth_rpc_types_compat::TransactionCompat; use crate::{AsEthApiError, FromEthApiError, FromEvmError}; /// Network specific `eth` API types. pub trait EthApiTypes: Send + Sync + Clone { - /// Extension of [`EthApiError`], with network specific errors. + /// Extension of [`FromEthApiError`], with network specific errors. type Error: Into> + FromEthApiError + AsEthApiError @@ -28,16 +27,6 @@ pub trait EthApiTypes: Send + Sync + Clone { fn tx_resp_builder(&self) -> &Self::TransactionCompat; } -impl EthApiTypes for () { - type Error = EthApiError; - type NetworkTypes = AnyNetwork; - type TransactionCompat = (); - - fn tx_resp_builder(&self) -> &Self::TransactionCompat { - self - } -} - /// Adapter for network specific transaction type. pub type RpcTransaction = ::TransactionResponse; diff --git a/crates/rpc/rpc-eth-types/Cargo.toml b/crates/rpc/rpc-eth-types/Cargo.toml index 46a0d7b5c..80901bcf8 100644 --- a/crates/rpc/rpc-eth-types/Cargo.toml +++ b/crates/rpc/rpc-eth-types/Cargo.toml @@ -36,7 +36,6 @@ revm.workspace = true revm-inspectors.workspace = true revm-primitives = { workspace = true, features = ["dev"] } alloy-rpc-types.workspace = true -alloy-serde.workspace = true alloy-eips.workspace = true # rpc diff --git a/crates/rpc/rpc-eth-types/src/lib.rs b/crates/rpc/rpc-eth-types/src/lib.rs index fa36dae4c..03c23dc34 100644 --- a/crates/rpc/rpc-eth-types/src/lib.rs +++ b/crates/rpc/rpc-eth-types/src/lib.rs @@ -37,5 +37,5 @@ pub use gas_oracle::{ }; pub use id_provider::EthSubscriptionIdProvider; pub use pending_block::{PendingBlock, PendingBlockEnv, PendingBlockEnvOrigin}; -pub use receipt::ReceiptBuilder; +pub use receipt::EthReceiptBuilder; pub use transaction::TransactionSource; diff --git a/crates/rpc/rpc-eth-types/src/receipt.rs b/crates/rpc/rpc-eth-types/src/receipt.rs index 198ca79aa..0734b547e 100644 --- a/crates/rpc/rpc-eth-types/src/receipt.rs +++ b/crates/rpc/rpc-eth-types/src/receipt.rs @@ -1,24 +1,101 @@ //! RPC receipt response builder, extends a layer one receipt with layer two data. -use alloy_consensus::Transaction; +use alloy_consensus::{ReceiptEnvelope, Transaction}; use alloy_primitives::{Address, TxKind}; -use alloy_rpc_types::{AnyReceiptEnvelope, Log, ReceiptWithBloom, TransactionReceipt}; -use alloy_serde::OtherFields; -use reth_primitives::{Receipt, TransactionMeta, TransactionSigned}; +use alloy_rpc_types::{Log, ReceiptWithBloom, TransactionReceipt}; +use reth_primitives::{Receipt, TransactionMeta, TransactionSigned, TxType}; use revm_primitives::calc_blob_gasprice; use super::{EthApiError, EthResult}; +/// Builds an [`TransactionReceipt`] obtaining the inner receipt envelope from the given closure. +pub fn build_receipt( + transaction: &TransactionSigned, + meta: TransactionMeta, + receipt: &Receipt, + all_receipts: &[Receipt], + build_envelope: impl FnOnce(ReceiptWithBloom) -> T, +) -> EthResult> { + // Note: we assume this transaction is valid, because it's mined (or part of pending block) + // and we don't need to check for pre EIP-2 + let from = + transaction.recover_signer_unchecked().ok_or(EthApiError::InvalidTransactionSignature)?; + + // get the previous transaction cumulative gas used + let gas_used = if meta.index == 0 { + receipt.cumulative_gas_used + } else { + let prev_tx_idx = (meta.index - 1) as usize; + all_receipts + .get(prev_tx_idx) + .map(|prev_receipt| receipt.cumulative_gas_used - prev_receipt.cumulative_gas_used) + .unwrap_or_default() + }; + + let blob_gas_used = transaction.transaction.blob_gas_used(); + // Blob gas price should only be present if the transaction is a blob transaction + let blob_gas_price = blob_gas_used.and_then(|_| meta.excess_blob_gas.map(calc_blob_gasprice)); + let logs_bloom = receipt.bloom_slow(); + + // get number of logs in the block + let mut num_logs = 0; + for prev_receipt in all_receipts.iter().take(meta.index as usize) { + num_logs += prev_receipt.logs.len(); + } + + let logs: Vec = receipt + .logs + .iter() + .enumerate() + .map(|(tx_log_idx, log)| Log { + inner: log.clone(), + block_hash: Some(meta.block_hash), + block_number: Some(meta.block_number), + block_timestamp: Some(meta.timestamp), + transaction_hash: Some(meta.tx_hash), + transaction_index: Some(meta.index), + log_index: Some((num_logs + tx_log_idx) as u64), + removed: false, + }) + .collect(); + + let rpc_receipt = alloy_rpc_types::Receipt { + status: receipt.success.into(), + cumulative_gas_used: receipt.cumulative_gas_used as u128, + logs, + }; + + let (contract_address, to) = match transaction.transaction.kind() { + TxKind::Create => (Some(from.create(transaction.transaction.nonce())), None), + TxKind::Call(addr) => (None, Some(Address(*addr))), + }; + + Ok(TransactionReceipt { + inner: build_envelope(ReceiptWithBloom { receipt: rpc_receipt, logs_bloom }), + transaction_hash: meta.tx_hash, + transaction_index: Some(meta.index), + block_hash: Some(meta.block_hash), + block_number: Some(meta.block_number), + from, + to, + gas_used: gas_used as u128, + contract_address, + effective_gas_price: transaction.effective_gas_price(meta.base_fee), + // EIP-4844 fields + blob_gas_price, + blob_gas_used: blob_gas_used.map(u128::from), + authorization_list: transaction.authorization_list().map(|l| l.to_vec()), + }) +} + /// Receipt response builder. #[derive(Debug)] -pub struct ReceiptBuilder { +pub struct EthReceiptBuilder { /// The base response body, contains L1 fields. - pub base: TransactionReceipt>, - /// Additional L2 fields. - pub other: OtherFields, + pub base: TransactionReceipt, } -impl ReceiptBuilder { +impl EthReceiptBuilder { /// Returns a new builder with the base response body (L1 fields) set. /// /// Note: This requires _all_ block receipts because we need to calculate the gas used by the @@ -29,88 +106,23 @@ impl ReceiptBuilder { receipt: &Receipt, all_receipts: &[Receipt], ) -> EthResult { - // Note: we assume this transaction is valid, because it's mined (or part of pending block) - // and we don't need to check for pre EIP-2 - let from = transaction - .recover_signer_unchecked() - .ok_or(EthApiError::InvalidTransactionSignature)?; - - // get the previous transaction cumulative gas used - let gas_used = if meta.index == 0 { - receipt.cumulative_gas_used - } else { - let prev_tx_idx = (meta.index - 1) as usize; - all_receipts - .get(prev_tx_idx) - .map(|prev_receipt| receipt.cumulative_gas_used - prev_receipt.cumulative_gas_used) - .unwrap_or_default() - }; - - let blob_gas_used = transaction.transaction.blob_gas_used(); - // Blob gas price should only be present if the transaction is a blob transaction - let blob_gas_price = - blob_gas_used.and_then(|_| meta.excess_blob_gas.map(calc_blob_gasprice)); - let logs_bloom = receipt.bloom_slow(); - - // get number of logs in the block - let mut num_logs = 0; - for prev_receipt in all_receipts.iter().take(meta.index as usize) { - num_logs += prev_receipt.logs.len(); - } - - let logs: Vec = receipt - .logs - .iter() - .enumerate() - .map(|(tx_log_idx, log)| Log { - inner: log.clone(), - block_hash: Some(meta.block_hash), - block_number: Some(meta.block_number), - block_timestamp: Some(meta.timestamp), - transaction_hash: Some(meta.tx_hash), - transaction_index: Some(meta.index), - log_index: Some((num_logs + tx_log_idx) as u64), - removed: false, - }) - .collect(); - - let rpc_receipt = alloy_rpc_types::Receipt { - status: receipt.success.into(), - cumulative_gas_used: receipt.cumulative_gas_used as u128, - logs, - }; - - let (contract_address, to) = match transaction.transaction.kind() { - TxKind::Create => (Some(from.create(transaction.transaction.nonce())), None), - TxKind::Call(addr) => (None, Some(Address(*addr))), - }; - - #[allow(clippy::needless_update)] - let base = TransactionReceipt { - inner: AnyReceiptEnvelope { - inner: ReceiptWithBloom { receipt: rpc_receipt, logs_bloom }, - r#type: transaction.transaction.tx_type().into(), - }, - transaction_hash: meta.tx_hash, - transaction_index: Some(meta.index), - block_hash: Some(meta.block_hash), - block_number: Some(meta.block_number), - from, - to, - gas_used: gas_used as u128, - contract_address, - effective_gas_price: transaction.effective_gas_price(meta.base_fee), - // EIP-4844 fields - blob_gas_price, - blob_gas_used: blob_gas_used.map(u128::from), - authorization_list: transaction.authorization_list().map(|l| l.to_vec()), - }; + let base = build_receipt(transaction, meta, receipt, all_receipts, |receipt_with_bloom| { + match receipt.tx_type { + TxType::Legacy => ReceiptEnvelope::Legacy(receipt_with_bloom), + TxType::Eip2930 => ReceiptEnvelope::Eip2930(receipt_with_bloom), + TxType::Eip1559 => ReceiptEnvelope::Eip1559(receipt_with_bloom), + TxType::Eip4844 => ReceiptEnvelope::Eip4844(receipt_with_bloom), + TxType::Eip7702 => ReceiptEnvelope::Eip7702(receipt_with_bloom), + #[allow(unreachable_patterns)] + _ => unreachable!(), + } + })?; - Ok(Self { base, other: Default::default() }) + Ok(Self { base }) } /// Builds a receipt response from the base response body, and any set additional fields. - pub fn build(self) -> TransactionReceipt> { + pub fn build(self) -> TransactionReceipt { self.base } } diff --git a/crates/rpc/rpc-types-compat/Cargo.toml b/crates/rpc/rpc-types-compat/Cargo.toml index 7d5eac9db..b9a9e5f03 100644 --- a/crates/rpc/rpc-types-compat/Cargo.toml +++ b/crates/rpc/rpc-types-compat/Cargo.toml @@ -22,7 +22,6 @@ alloy-primitives.workspace = true alloy-rlp.workspace = true alloy-rpc-types.workspace = true alloy-rpc-types-eth = { workspace = true, default-features = false, features = ["serde"] } -alloy-serde.workspace = true alloy-rpc-types-engine.workspace = true alloy-consensus.workspace = true @@ -30,4 +29,4 @@ alloy-consensus.workspace = true serde.workspace = true [dev-dependencies] -serde_json.workspace = true \ No newline at end of file +serde_json.workspace = true diff --git a/crates/rpc/rpc-types-compat/src/transaction/mod.rs b/crates/rpc/rpc-types-compat/src/transaction/mod.rs index 2d92747d4..16742144f 100644 --- a/crates/rpc/rpc-types-compat/src/transaction/mod.rs +++ b/crates/rpc/rpc-types-compat/src/transaction/mod.rs @@ -8,9 +8,8 @@ use std::fmt; use alloy_consensus::Transaction as _; use alloy_rpc_types::{ request::{TransactionInput, TransactionRequest}, - Transaction, TransactionInfo, + TransactionInfo, }; -use alloy_serde::WithOtherFields; use reth_primitives::{TransactionSigned, TransactionSignedEcRecovered, TxType}; use serde::{Deserialize, Serialize}; @@ -89,28 +88,6 @@ pub trait TransactionCompat: Send + Sync + Unpin + Clone + fmt::Debug { fn tx_type(tx: &Self::Transaction) -> u8; } -impl TransactionCompat for () { - // this noop impl depends on integration in `reth_rpc_eth_api::EthApiTypes` noop impl, and - // `alloy_network::AnyNetwork` - type Transaction = WithOtherFields; - - fn fill( - &self, - _tx: TransactionSignedEcRecovered, - _tx_info: TransactionInfo, - ) -> Self::Transaction { - WithOtherFields::default() - } - - fn otterscan_api_truncate_input(tx: &mut Self::Transaction) { - tx.input = tx.input.slice(..4); - } - - fn tx_type(_tx: &Self::Transaction) -> u8 { - 0 - } -} - /// Gas price and max fee per gas for a transaction. Helper type to format transaction RPC response. #[derive(Debug, Default)] pub struct GasPrice { diff --git a/crates/rpc/rpc/src/eth/core.rs b/crates/rpc/rpc/src/eth/core.rs index 026c87153..98ac9e9f4 100644 --- a/crates/rpc/rpc/src/eth/core.rs +++ b/crates/rpc/rpc/src/eth/core.rs @@ -3,7 +3,7 @@ use std::sync::Arc; -use alloy_network::AnyNetwork; +use alloy_network::Ethereum; use alloy_primitives::U256; use derive_more::Deref; use reth_primitives::BlockNumberOrTag; @@ -132,8 +132,7 @@ where Self: Send + Sync, { type Error = EthApiError; - // todo: replace with alloy_network::Ethereum - type NetworkTypes = AnyNetwork; + type NetworkTypes = Ethereum; type TransactionCompat = EthTxBuilder; fn tx_resp_builder(&self) -> &Self::TransactionCompat { diff --git a/crates/rpc/rpc/src/eth/helpers/block.rs b/crates/rpc/rpc/src/eth/helpers/block.rs index d5341d0b2..1e2d1802e 100644 --- a/crates/rpc/rpc/src/eth/helpers/block.rs +++ b/crates/rpc/rpc/src/eth/helpers/block.rs @@ -1,14 +1,13 @@ //! Contains RPC handler implementations specific to blocks. -use alloy_rpc_types::{AnyTransactionReceipt, BlockId}; -use alloy_serde::WithOtherFields; +use alloy_rpc_types::{BlockId, TransactionReceipt}; use reth_primitives::TransactionMeta; use reth_provider::{BlockReaderIdExt, HeaderProvider}; use reth_rpc_eth_api::{ helpers::{EthBlocks, LoadBlock, LoadPendingBlock, LoadReceipt, SpawnBlocking}, RpcReceipt, }; -use reth_rpc_eth_types::{EthApiError, ReceiptBuilder}; +use reth_rpc_eth_types::{EthApiError, EthReceiptBuilder}; use crate::EthApi; @@ -16,7 +15,7 @@ impl EthBlocks for EthApi, + NetworkTypes: alloy_network::Network, Provider: HeaderProvider, >, { @@ -51,9 +50,8 @@ where excess_blob_gas, timestamp, }; - ReceiptBuilder::new(&tx, meta, receipt, &receipts) + EthReceiptBuilder::new(&tx, meta, receipt, &receipts) .map(|builder| builder.build()) - .map(WithOtherFields::new) }) .collect::, Self::Error>>() .map(Some) diff --git a/crates/rpc/rpc/src/eth/helpers/receipt.rs b/crates/rpc/rpc/src/eth/helpers/receipt.rs index d0cb5867e..594cffd09 100644 --- a/crates/rpc/rpc/src/eth/helpers/receipt.rs +++ b/crates/rpc/rpc/src/eth/helpers/receipt.rs @@ -1,9 +1,8 @@ //! Builds an RPC receipt response w.r.t. data layout of network. -use alloy_serde::WithOtherFields; use reth_primitives::{Receipt, TransactionMeta, TransactionSigned}; use reth_rpc_eth_api::{helpers::LoadReceipt, FromEthApiError, RpcNodeCoreExt, RpcReceipt}; -use reth_rpc_eth_types::{EthApiError, ReceiptBuilder}; +use reth_rpc_eth_types::{EthApiError, EthReceiptBuilder}; use crate::EthApi; @@ -26,6 +25,6 @@ where .map_err(Self::Error::from_eth_err)? .ok_or(EthApiError::HeaderNotFound(hash.into()))?; - Ok(WithOtherFields::new(ReceiptBuilder::new(&tx, meta, &receipt, &all_receipts)?.build())) + Ok(EthReceiptBuilder::new(&tx, meta, &receipt, &all_receipts)?.build()) } } diff --git a/crates/rpc/rpc/src/eth/helpers/types.rs b/crates/rpc/rpc/src/eth/helpers/types.rs index 848bcdc36..0998c057e 100644 --- a/crates/rpc/rpc/src/eth/helpers/types.rs +++ b/crates/rpc/rpc/src/eth/helpers/types.rs @@ -1,10 +1,9 @@ //! L1 `eth` API types. use alloy_consensus::Transaction as _; -use alloy_network::{AnyNetwork, Network}; +use alloy_network::{Ethereum, Network}; use alloy_primitives::{Address, TxKind}; use alloy_rpc_types::{Transaction, TransactionInfo}; -use alloy_serde::WithOtherFields; use reth_primitives::TransactionSignedEcRecovered; use reth_rpc_types_compat::{ transaction::{from_primitive_signature, GasPrice}, @@ -19,7 +18,7 @@ impl TransactionCompat for EthTxBuilder where Self: Send + Sync, { - type Transaction = ::TransactionResponse; + type Transaction = ::TransactionResponse; fn fill( &self, @@ -53,41 +52,38 @@ where signed_tx.chain_id(), ); - WithOtherFields { - inner: Transaction { - hash: signed_tx.hash(), - nonce: signed_tx.nonce(), - from: signer, - to, - value: signed_tx.value(), - gas_price, - max_fee_per_gas, - max_priority_fee_per_gas: signed_tx.max_priority_fee_per_gas(), - signature: Some(signature), - gas: signed_tx.gas_limit(), - input, - chain_id, - access_list, - transaction_type: Some(signed_tx.tx_type() as u8), - // These fields are set to None because they are not stored as part of the - // transaction - block_hash, - block_number, - transaction_index, - // EIP-4844 fields - max_fee_per_blob_gas: signed_tx.max_fee_per_blob_gas(), - blob_versioned_hashes, - authorization_list, - }, - ..Default::default() + Transaction { + hash: signed_tx.hash(), + nonce: signed_tx.nonce(), + from: signer, + to, + value: signed_tx.value(), + gas_price, + max_fee_per_gas, + max_priority_fee_per_gas: signed_tx.max_priority_fee_per_gas(), + signature: Some(signature), + gas: signed_tx.gas_limit(), + input, + chain_id, + access_list, + transaction_type: Some(signed_tx.tx_type() as u8), + // These fields are set to None because they are not stored as part of the + // transaction + block_hash, + block_number, + transaction_index, + // EIP-4844 fields + max_fee_per_blob_gas: signed_tx.max_fee_per_blob_gas(), + blob_versioned_hashes, + authorization_list, } } fn otterscan_api_truncate_input(tx: &mut Self::Transaction) { - tx.inner.input = tx.inner.input.slice(..4); + tx.input = tx.input.slice(..4); } fn tx_type(tx: &Self::Transaction) -> u8 { - tx.inner.transaction_type.unwrap_or(0) + tx.transaction_type.unwrap_or(0) } } diff --git a/crates/rpc/rpc/src/eth/pubsub.rs b/crates/rpc/rpc/src/eth/pubsub.rs index ac962610e..663ec0b99 100644 --- a/crates/rpc/rpc/src/eth/pubsub.rs +++ b/crates/rpc/rpc/src/eth/pubsub.rs @@ -8,9 +8,8 @@ use alloy_rpc_types::{ Params, PubSubSyncStatus, SubscriptionKind, SubscriptionResult as EthSubscriptionResult, SyncStatusMetadata, }, - FilteredParams, Header, Log, Transaction, + FilteredParams, Header, Log, }; -use alloy_serde::WithOtherFields; use futures::StreamExt; use jsonrpsee::{ server::SubscriptionMessage, types::ErrorObject, PendingSubscriptionSink, SubscriptionSink, @@ -123,11 +122,9 @@ where { match kind { SubscriptionKind::NewHeads => { - let stream = pubsub.new_headers_stream().map(|header| { - EthSubscriptionResult::>::Header(Box::new( - header.into(), - )) - }); + let stream = pubsub + .new_headers_stream() + .map(|header| EthSubscriptionResult::<()>::Header(Box::new(header.into()))); pipe_from_stream(accepted_sink, stream).await } SubscriptionKind::Logs => { @@ -139,9 +136,9 @@ where } _ => FilteredParams::default(), }; - let stream = pubsub.log_stream(filter).map(|log| { - EthSubscriptionResult::>::Log(Box::new(log)) - }); + let stream = pubsub + .log_stream(filter) + .map(|log| EthSubscriptionResult::<()>::Log(Box::new(log))); pipe_from_stream(accepted_sink, stream).await } SubscriptionKind::NewPendingTransactions => { @@ -170,7 +167,7 @@ where let stream = pubsub .pending_transaction_hashes_stream() - .map(EthSubscriptionResult::>::TransactionHash); + .map(EthSubscriptionResult::<()>::TransactionHash); pipe_from_stream(accepted_sink, stream).await } SubscriptionKind::Syncing => { From e374e4bfe9c4779828c73f39ffc30d99850286ec Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Sat, 2 Nov 2024 13:18:31 -0600 Subject: [PATCH 402/425] renamed OptimismEvmConfig to OpEvmConfig (#12284) --- crates/optimism/evm/src/execute.rs | 8 ++++---- crates/optimism/evm/src/lib.rs | 20 ++++++++++---------- crates/optimism/node/src/node.rs | 8 ++++---- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index c2b006144..91cdb1bd2 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -1,6 +1,6 @@ //! Optimism block execution strategy. -use crate::{l1::ensure_create2_deployer, OptimismBlockExecutionError, OptimismEvmConfig}; +use crate::{l1::ensure_create2_deployer, OpEvmConfig, OptimismBlockExecutionError}; use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_consensus::Transaction as _; use alloy_eips::eip7685::Requests; @@ -29,7 +29,7 @@ use tracing::trace; /// Factory for [`OpExecutionStrategy`]. #[derive(Debug, Clone)] -pub struct OpExecutionStrategyFactory { +pub struct OpExecutionStrategyFactory { /// The chainspec chain_spec: Arc, /// How to create an EVM. @@ -39,7 +39,7 @@ pub struct OpExecutionStrategyFactory { impl OpExecutionStrategyFactory { /// Creates a new default optimism executor strategy factory. pub fn optimism(chain_spec: Arc) -> Self { - Self::new(chain_spec.clone(), OptimismEvmConfig::new(chain_spec)) + Self::new(chain_spec.clone(), OpEvmConfig::new(chain_spec)) } } @@ -339,7 +339,7 @@ mod tests { chain_spec: Arc, ) -> BasicBlockExecutorProvider { let strategy_factory = - OpExecutionStrategyFactory::new(chain_spec.clone(), OptimismEvmConfig::new(chain_spec)); + OpExecutionStrategyFactory::new(chain_spec.clone(), OpEvmConfig::new(chain_spec)); BasicBlockExecutorProvider::new(strategy_factory) } diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index 03aecf2c8..1adb5db74 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -38,12 +38,12 @@ use revm_primitives::{ /// Optimism-related EVM configuration. #[derive(Debug, Clone)] -pub struct OptimismEvmConfig { +pub struct OpEvmConfig { chain_spec: Arc, } -impl OptimismEvmConfig { - /// Creates a new [`OptimismEvmConfig`] with the given chain spec. +impl OpEvmConfig { + /// Creates a new [`OpEvmConfig`] with the given chain spec. pub const fn new(chain_spec: Arc) -> Self { Self { chain_spec } } @@ -54,7 +54,7 @@ impl OptimismEvmConfig { } } -impl ConfigureEvmEnv for OptimismEvmConfig { +impl ConfigureEvmEnv for OpEvmConfig { type Header = Header; type Error = DecodeError; @@ -174,7 +174,7 @@ impl ConfigureEvmEnv for OptimismEvmConfig { } } -impl ConfigureEvm for OptimismEvmConfig { +impl ConfigureEvm for OpEvmConfig { type DefaultExternalContext<'a> = (); fn evm(&self, db: DB) -> Evm<'_, Self::DefaultExternalContext<'_>, DB> { @@ -226,8 +226,8 @@ mod tests { sync::Arc, }; - fn test_evm_config() -> OptimismEvmConfig { - OptimismEvmConfig::new(BASE_MAINNET.clone()) + fn test_evm_config() -> OpEvmConfig { + OpEvmConfig::new(BASE_MAINNET.clone()) } #[test] @@ -254,9 +254,9 @@ mod tests { // Define the total difficulty as zero (default) let total_difficulty = U256::ZERO; - // Use the `OptimismEvmConfig` to fill the `cfg_env` and `block_env` based on the ChainSpec, + // Use the `OpEvmConfig` to fill the `cfg_env` and `block_env` based on the ChainSpec, // Header, and total difficulty - OptimismEvmConfig::new(Arc::new(OpChainSpec { inner: chain_spec.clone() })) + OpEvmConfig::new(Arc::new(OpChainSpec { inner: chain_spec.clone() })) .fill_cfg_and_block_env(&mut cfg_env, &mut block_env, &header, total_difficulty); // Assert that the chain ID in the `cfg_env` is correctly set to the chain ID of the @@ -266,7 +266,7 @@ mod tests { #[test] fn test_evm_configure() { - // Create a default `OptimismEvmConfig` + // Create a default `OpEvmConfig` let evm_config = test_evm_config(); // Initialize an empty database wrapped in CacheDB diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 87266767d..7437ec676 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -20,7 +20,7 @@ use reth_node_builder::{ }; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::OpBeaconConsensus; -use reth_optimism_evm::{OpExecutionStrategyFactory, OptimismEvmConfig}; +use reth_optimism_evm::{OpEvmConfig, OpExecutionStrategyFactory}; use reth_optimism_rpc::OpEthApi; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; use reth_primitives::{Block, Header}; @@ -185,14 +185,14 @@ impl ExecutorBuilder for OpExecutorBuilder where Node: FullNodeTypes>, { - type EVM = OptimismEvmConfig; + type EVM = OpEvmConfig; type Executor = BasicBlockExecutorProvider; async fn build_evm( self, ctx: &BuilderContext, ) -> eyre::Result<(Self::EVM, Self::Executor)> { - let evm_config = OptimismEvmConfig::new(ctx.chain_spec()); + let evm_config = OpEvmConfig::new(ctx.chain_spec()); let strategy_factory = OpExecutionStrategyFactory::new(ctx.chain_spec(), evm_config.clone()); let executor = BasicBlockExecutorProvider::new(strategy_factory); @@ -359,7 +359,7 @@ where ctx: &BuilderContext, pool: Pool, ) -> eyre::Result> { - self.spawn(OptimismEvmConfig::new(ctx.chain_spec()), ctx, pool) + self.spawn(OpEvmConfig::new(ctx.chain_spec()), ctx, pool) } } From e167cc9b0976e86963c294b3b102f4e6f4859ff7 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 13:51:20 +0000 Subject: [PATCH 403/425] chore(deps): weekly `cargo update` (#12289) Co-authored-by: github-merge-queue <118344674+github-merge-queue@users.noreply.github.com> --- Cargo.lock | 134 ++++++++++++++++++++++++++--------------------------- 1 file changed, 67 insertions(+), 67 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8aab2c0a8..0b3318c4a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -97,9 +97,9 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "alloy-chains" -version = "0.1.45" +version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4feb7c662fd0be3d0c926a456be4ac44e9cf8e05cbd91df6db7f7140b861016a" +checksum = "836cf02383d9ebb35502d379bcd1ae803155094077eaab9c29131d888cd5fa3e" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -415,7 +415,7 @@ checksum = "2b09cae092c27b6f1bde952653a22708691802e57bfef4a2973b80bea21efd3f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -639,7 +639,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -655,7 +655,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", "syn-solidity", "tiny-keccak", ] @@ -671,7 +671,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", "syn-solidity", ] @@ -828,9 +828,9 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8365de52b16c035ff4fcafe0092ba9390540e3e352870ac09933bebcaa2c8c56" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" @@ -877,7 +877,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -1100,7 +1100,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -1111,7 +1111,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -1149,7 +1149,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -1255,7 +1255,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -1437,7 +1437,7 @@ checksum = "240f4126219a83519bad05c9a40bfc0303921eeb571fc2d7e44c17ffac99d3f1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", "synstructure", ] @@ -1559,7 +1559,7 @@ checksum = "bcfcc3cd946cb52f0bbfdbbcfa2f4e24f75ebb6c0e1002f7c25904fada18b9ec" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -1647,9 +1647,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.31" +version = "1.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f" +checksum = "67b9470d453346108f93a59222a9a1a5724db32d0a4727b7ab7ace4b4d822dc9" dependencies = [ "jobserver", "libc", @@ -1777,7 +1777,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -2234,7 +2234,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -2258,7 +2258,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -2269,7 +2269,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -2391,7 +2391,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -2402,7 +2402,7 @@ checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -2423,7 +2423,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", "unicode-xid", ] @@ -2537,7 +2537,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -2685,7 +2685,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -2696,7 +2696,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -2753,7 +2753,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -3309,7 +3309,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -3835,7 +3835,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -3985,7 +3985,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -4153,7 +4153,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b23a0c8dfe501baac4adf6ebbfa6eddf8f0c07f56b058cc1288017e32397846c" dependencies = [ "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -4401,7 +4401,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -4819,7 +4819,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -4965,7 +4965,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -5213,7 +5213,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -5570,7 +5570,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -5599,7 +5599,7 @@ checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -5771,7 +5771,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -5822,7 +5822,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -5920,7 +5920,7 @@ checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -6744,7 +6744,7 @@ dependencies = [ "proc-macro2", "quote", "similar-asserts", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -9904,7 +9904,7 @@ checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -9939,7 +9939,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -9990,7 +9990,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -10013,7 +10013,7 @@ checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -10299,7 +10299,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -10357,9 +10357,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.86" +version = "2.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89275301d38033efb81a6e60e3497e734dfcc62571f2854bf4b16690398824c" +checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" dependencies = [ "proc-macro2", "quote", @@ -10375,7 +10375,7 @@ dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -10401,7 +10401,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -10478,7 +10478,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -10517,7 +10517,7 @@ checksum = "b08be0f17bd307950653ce45db00cd31200d82b624b36e181337d9c7d92765b5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -10694,7 +10694,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -10895,7 +10895,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -11286,7 +11286,7 @@ checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -11357,7 +11357,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", "wasm-bindgen-shared", ] @@ -11391,7 +11391,7 @@ checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11547,7 +11547,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -11558,7 +11558,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -11569,7 +11569,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -11580,7 +11580,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -11855,7 +11855,7 @@ checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", "synstructure", ] @@ -11877,7 +11877,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -11897,7 +11897,7 @@ checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", "synstructure", ] @@ -11918,7 +11918,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] @@ -11940,7 +11940,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.86", + "syn 2.0.87", ] [[package]] From 20d3b21904dc5412c3526f72caa999be488a6886 Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Sun, 3 Nov 2024 09:26:31 -0600 Subject: [PATCH 404/425] renamed OptimismEngineTypes to OpEngineTypes (#12285) --- crates/optimism/node/src/engine.rs | 18 +++++++++--------- crates/optimism/node/src/lib.rs | 2 +- crates/optimism/node/src/node.rs | 21 +++++++++------------ 3 files changed, 19 insertions(+), 22 deletions(-) diff --git a/crates/optimism/node/src/engine.rs b/crates/optimism/node/src/engine.rs index af517b00e..d956f0cd5 100644 --- a/crates/optimism/node/src/engine.rs +++ b/crates/optimism/node/src/engine.rs @@ -22,17 +22,17 @@ use reth_optimism_payload_builder::{ /// The types used in the optimism beacon consensus engine. #[derive(Debug, Default, Clone, serde::Deserialize, serde::Serialize)] #[non_exhaustive] -pub struct OptimismEngineTypes { +pub struct OpEngineTypes { _marker: std::marker::PhantomData, } -impl PayloadTypes for OptimismEngineTypes { +impl PayloadTypes for OpEngineTypes { type BuiltPayload = T::BuiltPayload; type PayloadAttributes = T::PayloadAttributes; type PayloadBuilderAttributes = T::PayloadBuilderAttributes; } -impl EngineTypes for OptimismEngineTypes +impl EngineTypes for OpEngineTypes where T::BuiltPayload: TryInto + TryInto @@ -45,7 +45,7 @@ where type ExecutionPayloadEnvelopeV4 = OpExecutionPayloadEnvelopeV4; } -/// A default payload type for [`OptimismEngineTypes`] +/// A default payload type for [`OpEngineTypes`] #[derive(Debug, Default, Clone, serde::Deserialize, serde::Serialize)] #[non_exhaustive] pub struct OpPayloadTypes; @@ -224,7 +224,7 @@ mod test { let attributes = get_attributes(None, 1799999999); let result = >::ensure_well_formed_attributes( &validator, EngineApiMessageVersion::V3, &attributes ); @@ -237,7 +237,7 @@ mod test { let attributes = get_attributes(None, 1800000000); let result = >::ensure_well_formed_attributes( &validator, EngineApiMessageVersion::V3, &attributes ); @@ -250,7 +250,7 @@ mod test { let attributes = get_attributes(Some(b64!("0000000000000008")), 1800000000); let result = >::ensure_well_formed_attributes( &validator, EngineApiMessageVersion::V3, &attributes ); @@ -263,7 +263,7 @@ mod test { let attributes = get_attributes(Some(b64!("0000000800000008")), 1800000000); let result = >::ensure_well_formed_attributes( &validator, EngineApiMessageVersion::V3, &attributes ); @@ -276,7 +276,7 @@ mod test { let attributes = get_attributes(Some(b64!("0000000000000000")), 1800000000); let result = >::ensure_well_formed_attributes( &validator, EngineApiMessageVersion::V3, &attributes ); diff --git a/crates/optimism/node/src/lib.rs b/crates/optimism/node/src/lib.rs index ff25e7173..f2870d0b8 100644 --- a/crates/optimism/node/src/lib.rs +++ b/crates/optimism/node/src/lib.rs @@ -15,7 +15,7 @@ pub mod args; /// Exports optimism-specific implementations of the [`EngineTypes`](reth_node_api::EngineTypes) /// trait. pub mod engine; -pub use engine::OptimismEngineTypes; +pub use engine::OpEngineTypes; pub mod node; pub use node::OptimismNode; diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 7437ec676..7e6f1d098 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -36,7 +36,7 @@ use crate::{ args::RollupArgs, engine::OptimismEngineValidator, txpool::{OpTransactionPool, OpTransactionValidator}, - OptimismEngineTypes, + OpEngineTypes, }; /// Optimism primitive types. @@ -74,7 +74,7 @@ impl OptimismNode { > where Node: FullNodeTypes< - Types: NodeTypesWithEngine, + Types: NodeTypesWithEngine, >, { let RollupArgs { disable_txpool_gossip, compute_pending_block, discovery_v4, .. } = args; @@ -93,9 +93,7 @@ impl OptimismNode { impl Node for OptimismNode where - N: FullNodeTypes< - Types: NodeTypesWithEngine, - >, + N: FullNodeTypes>, { type ComponentsBuilder = ComponentsBuilder< N, @@ -127,7 +125,7 @@ impl NodeTypes for OptimismNode { } impl NodeTypesWithEngine for OptimismNode { - type Engine = OptimismEngineTypes; + type Engine = OpEngineTypes; } /// Add-ons w.r.t. optimism. @@ -312,10 +310,10 @@ impl OpPayloadBuilder { evm_config: Evm, ctx: &BuilderContext, pool: Pool, - ) -> eyre::Result> + ) -> eyre::Result> where Node: FullNodeTypes< - Types: NodeTypesWithEngine, + Types: NodeTypesWithEngine, >, Pool: TransactionPool + Unpin + 'static, Evm: ConfigureEvm
, @@ -349,16 +347,15 @@ impl OpPayloadBuilder { impl PayloadServiceBuilder for OpPayloadBuilder where - Node: FullNodeTypes< - Types: NodeTypesWithEngine, - >, + Node: + FullNodeTypes>, Pool: TransactionPool + Unpin + 'static, { async fn spawn_payload_service( self, ctx: &BuilderContext, pool: Pool, - ) -> eyre::Result> { + ) -> eyre::Result> { self.spawn(OpEvmConfig::new(ctx.chain_spec()), ctx, pool) } } From 4e3b32c5afe27ee9643d5e6afbc81bf670041f0f Mon Sep 17 00:00:00 2001 From: tedison <76473430+edisontim@users.noreply.github.com> Date: Sun, 3 Nov 2024 10:40:49 -0500 Subject: [PATCH 405/425] feat: add more decimals to database growth (#12288) --- etc/grafana/dashboards/overview.json | 1 + 1 file changed, 1 insertion(+) diff --git a/etc/grafana/dashboards/overview.json b/etc/grafana/dashboards/overview.json index 8c77f5979..25cc280fe 100644 --- a/etc/grafana/dashboards/overview.json +++ b/etc/grafana/dashboards/overview.json @@ -1953,6 +1953,7 @@ "mode": "off" } }, + "decimals": 4, "mappings": [], "thresholds": { "mode": "absolute", From 61f19ab2dcfe06a23dd7a88acf1ac8ce49ef75dd Mon Sep 17 00:00:00 2001 From: Deon <110722148+DanielEmmanuel1@users.noreply.github.com> Date: Sun, 3 Nov 2024 18:03:45 +0100 Subject: [PATCH 406/425] Refactor: use fully-qualified paths in Compact derives(Deon Branch) (#12279) Co-authored-by: Matthias Seitz --- crates/primitives/src/receipt.rs | 5 ++- crates/primitives/src/transaction/mod.rs | 3 -- crates/primitives/src/transaction/tx_type.rs | 3 -- crates/storage/codecs/derive/src/arbitrary.rs | 20 +++++++++-- .../codecs/derive/src/compact/generator.rs | 33 +++++++++++++++++-- .../storage/codecs/derive/src/compact/mod.rs | 11 ++++--- crates/storage/codecs/derive/src/lib.rs | 4 +-- .../codecs/src/alloy/authorization_list.rs | 3 +- .../codecs/src/alloy/genesis_account.rs | 10 ++++-- crates/storage/codecs/src/alloy/header.rs | 2 ++ .../codecs/src/alloy/transaction/eip1559.rs | 3 +- .../codecs/src/alloy/transaction/eip2930.rs | 3 +- .../codecs/src/alloy/transaction/eip4844.rs | 3 +- .../codecs/src/alloy/transaction/eip7702.rs | 3 +- .../codecs/src/alloy/transaction/legacy.rs | 3 +- .../codecs/src/alloy/transaction/optimism.rs | 3 +- crates/storage/codecs/src/alloy/withdrawal.rs | 3 +- crates/storage/codecs/src/lib.rs | 6 ++-- 18 files changed, 87 insertions(+), 34 deletions(-) diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index 21443f482..e60bddb9d 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -11,8 +11,6 @@ use alloy_rlp::{length_of_length, Decodable, Encodable, RlpDecodable, RlpEncodab use bytes::{Buf, BufMut}; use core::{cmp::Ordering, ops::Deref}; use derive_more::{DerefMut, From, IntoIterator}; -#[cfg(feature = "reth-codec")] -use reth_codecs::Compact; use serde::{Deserialize, Serialize}; /// Receipt containing result of transaction execution. @@ -91,7 +89,7 @@ impl Receipts { self.receipt_vec.len() } - /// Returns `true` if the `Receipts` vector is empty. + /// Returns true if the `Receipts` vector is empty. pub fn is_empty(&self) -> bool { self.receipt_vec.is_empty() } @@ -518,6 +516,7 @@ mod tests { use super::*; use crate::revm_primitives::Bytes; use alloy_primitives::{address, b256, bytes, hex_literal::hex}; + use reth_codecs::Compact; #[test] fn test_decode_receipt() { diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 2e0a786fc..59d3b9e12 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -63,9 +63,6 @@ use tx_type::{ COMPACT_IDENTIFIER_LEGACY, }; -#[cfg(test)] -use reth_codecs::Compact; - use alloc::vec::Vec; /// Either a transaction hash or number. diff --git a/crates/primitives/src/transaction/tx_type.rs b/crates/primitives/src/transaction/tx_type.rs index b6e1ecbf2..eff1c17a7 100644 --- a/crates/primitives/src/transaction/tx_type.rs +++ b/crates/primitives/src/transaction/tx_type.rs @@ -6,9 +6,6 @@ use alloy_primitives::{U64, U8}; use alloy_rlp::{Decodable, Encodable}; use serde::{Deserialize, Serialize}; -#[cfg(test)] -use reth_codecs::Compact; - /// Identifier parameter for legacy transaction #[cfg(any(test, feature = "reth-codec"))] pub(crate) const COMPACT_IDENTIFIER_LEGACY: usize = 0; diff --git a/crates/storage/codecs/derive/src/arbitrary.rs b/crates/storage/codecs/derive/src/arbitrary.rs index 8aa44062e..753bb1e33 100644 --- a/crates/storage/codecs/derive/src/arbitrary.rs +++ b/crates/storage/codecs/derive/src/arbitrary.rs @@ -18,10 +18,26 @@ pub fn maybe_generate_tests( let mut traits = vec![]; let mut roundtrips = vec![]; let mut additional_tests = vec![]; + let mut is_crate = false; - for arg in args { + let mut iter = args.into_iter().peekable(); + + // we check if there's a crate argument which is used from inside the codecs crate directly + if let Some(arg) = iter.peek() { + if arg.to_string() == "crate" { + is_crate = true; + iter.next(); + } + } + + for arg in iter { if arg.to_string() == "compact" { - traits.push(quote! { use super::Compact; }); + let path = if is_crate { + quote! { use crate::Compact; } + } else { + quote! { use reth_codecs::Compact; } + }; + traits.push(path); roundtrips.push(quote! { { let mut buf = vec![]; diff --git a/crates/storage/codecs/derive/src/compact/generator.rs b/crates/storage/codecs/derive/src/compact/generator.rs index 1fb6d40fa..cf9bcc0c6 100644 --- a/crates/storage/codecs/derive/src/compact/generator.rs +++ b/crates/storage/codecs/derive/src/compact/generator.rs @@ -2,10 +2,12 @@ use super::*; use convert_case::{Case, Casing}; +use syn::{Attribute, LitStr}; /// Generates code to implement the `Compact` trait for a data type. pub fn generate_from_to( ident: &Ident, + attrs: &[Attribute], has_lifetime: bool, fields: &FieldList, is_zstd: bool, @@ -20,6 +22,8 @@ pub fn generate_from_to( let fuzz = format_ident!("fuzz_test_{snake_case_ident}"); let test = format_ident!("fuzz_{snake_case_ident}"); + let reth_codecs = parse_reth_codecs_path(attrs).unwrap(); + let lifetime = if has_lifetime { quote! { 'a } } else { @@ -28,11 +32,11 @@ pub fn generate_from_to( let impl_compact = if has_lifetime { quote! { - impl<#lifetime> Compact for #ident<#lifetime> + impl<#lifetime> #reth_codecs::Compact for #ident<#lifetime> } } else { quote! { - impl Compact for #ident + impl #reth_codecs::Compact for #ident } }; @@ -53,6 +57,7 @@ pub fn generate_from_to( #[allow(dead_code)] #[test_fuzz::test_fuzz] fn #fuzz(obj: #ident) { + use #reth_codecs::Compact; let mut buf = vec![]; let len = obj.clone().to_compact(&mut buf); let (same_obj, buf) = #ident::from_compact(buf.as_ref(), len); @@ -191,7 +196,7 @@ fn generate_to_compact(fields: &FieldList, ident: &Ident, is_zstd: bool) -> Vec< } // Just because a type supports compression, doesn't mean all its values are to be compressed. - // We skip the smaller ones, and thus require a flag `__zstd` to specify if this value is + // We skip the smaller ones, and thus require a flag` __zstd` to specify if this value is // compressed or not. if is_zstd { lines.push(quote! { @@ -232,3 +237,25 @@ fn generate_to_compact(fields: &FieldList, ident: &Ident, is_zstd: bool) -> Vec< lines } + +/// Function to extract the crate path from `reth_codecs(crate = "...")` attribute. +fn parse_reth_codecs_path(attrs: &[Attribute]) -> syn::Result { + // let default_crate_path: syn::Path = syn::parse_str("reth-codecs").unwrap(); + let mut reth_codecs_path: syn::Path = syn::parse_quote!(reth_codecs); + for attr in attrs { + if attr.path().is_ident("reth_codecs") { + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("crate") { + let value = meta.value()?; + let lit: LitStr = value.parse()?; + reth_codecs_path = syn::parse_str(&lit.value())?; + Ok(()) + } else { + Err(meta.error("unsupported attribute")) + } + })?; + } + } + + Ok(reth_codecs_path) +} diff --git a/crates/storage/codecs/derive/src/compact/mod.rs b/crates/storage/codecs/derive/src/compact/mod.rs index e5a79b3fe..b9d5cf18d 100644 --- a/crates/storage/codecs/derive/src/compact/mod.rs +++ b/crates/storage/codecs/derive/src/compact/mod.rs @@ -43,13 +43,13 @@ pub enum FieldTypes { pub fn derive(input: TokenStream, is_zstd: bool) -> TokenStream { let mut output = quote! {}; - let DeriveInput { ident, data, generics, .. } = parse_macro_input!(input); + let DeriveInput { ident, data, generics, attrs, .. } = parse_macro_input!(input); let has_lifetime = has_lifetime(&generics); let fields = get_fields(&data); output.extend(generate_flag_struct(&ident, has_lifetime, &fields, is_zstd)); - output.extend(generate_from_to(&ident, has_lifetime, &fields, is_zstd)); + output.extend(generate_from_to(&ident, &attrs, has_lifetime, &fields, is_zstd)); output.into() } @@ -233,10 +233,10 @@ mod tests { // Generate code that will impl the `Compact` trait. let mut output = quote! {}; - let DeriveInput { ident, data, .. } = parse2(f_struct).unwrap(); + let DeriveInput { ident, data, attrs, .. } = parse2(f_struct).unwrap(); let fields = get_fields(&data); output.extend(generate_flag_struct(&ident, false, &fields, false)); - output.extend(generate_from_to(&ident, false, &fields, false)); + output.extend(generate_from_to(&ident, &attrs, false, &fields, false)); // Expected output in a TokenStream format. Commas matter! let should_output = quote! { @@ -285,6 +285,7 @@ mod tests { #[allow(dead_code)] #[test_fuzz::test_fuzz] fn fuzz_test_test_struct(obj: TestStruct) { + use reth_codecs::Compact; let mut buf = vec![]; let len = obj.clone().to_compact(&mut buf); let (same_obj, buf) = TestStruct::from_compact(buf.as_ref(), len); @@ -295,7 +296,7 @@ mod tests { pub fn fuzz_test_struct() { fuzz_test_test_struct(TestStruct::default()) } - impl Compact for TestStruct { + impl reth_codecs::Compact for TestStruct { fn to_compact(&self, buf: &mut B) -> usize where B: bytes::BufMut + AsMut<[u8]> { let mut flags = TestStructFlags::default(); let mut total_length = 0; diff --git a/crates/storage/codecs/derive/src/lib.rs b/crates/storage/codecs/derive/src/lib.rs index 4ffdbfd6e..0b4015830 100644 --- a/crates/storage/codecs/derive/src/lib.rs +++ b/crates/storage/codecs/derive/src/lib.rs @@ -49,14 +49,14 @@ mod compact; /// own encoding and do not rely on the bitflag struct. /// - `Bytes` fields and any types containing a `Bytes` field should be placed last to ensure /// efficient decoding. -#[proc_macro_derive(Compact, attributes(maybe_zero))] +#[proc_macro_derive(Compact, attributes(maybe_zero, reth_codecs))] pub fn derive(input: TokenStream) -> TokenStream { let is_zstd = false; compact::derive(input, is_zstd) } /// Adds `zstd` compression to derived [`Compact`]. -#[proc_macro_derive(CompactZstd, attributes(maybe_zero))] +#[proc_macro_derive(CompactZstd, attributes(maybe_zero, reth_codecs))] pub fn derive_zstd(input: TokenStream) -> TokenStream { let is_zstd = true; compact::derive(input, is_zstd) diff --git a/crates/storage/codecs/src/alloy/authorization_list.rs b/crates/storage/codecs/src/alloy/authorization_list.rs index 3fc9518a6..e17c0fb32 100644 --- a/crates/storage/codecs/src/alloy/authorization_list.rs +++ b/crates/storage/codecs/src/alloy/authorization_list.rs @@ -11,12 +11,13 @@ use reth_codecs_derive::add_arbitrary_tests; /// /// Notice: Make sure this struct is 1:1 with `alloy_eips::eip7702::Authorization` #[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] +#[reth_codecs(crate = "crate")] #[cfg_attr( any(test, feature = "test-utils"), derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) )] #[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] -#[add_arbitrary_tests(compact)] +#[add_arbitrary_tests(crate, compact)] pub(crate) struct Authorization { chain_id: u64, address: Address, diff --git a/crates/storage/codecs/src/alloy/genesis_account.rs b/crates/storage/codecs/src/alloy/genesis_account.rs index b29fe526d..a35d4947d 100644 --- a/crates/storage/codecs/src/alloy/genesis_account.rs +++ b/crates/storage/codecs/src/alloy/genesis_account.rs @@ -11,6 +11,7 @@ use reth_codecs_derive::add_arbitrary_tests; /// /// Notice: Make sure this struct is 1:1 with `alloy_genesis::GenesisAccount` #[derive(Debug, Clone, PartialEq, Eq, Compact)] +#[reth_codecs(crate = "crate")] pub(crate) struct GenesisAccountRef<'a> { /// The nonce of the account at genesis. nonce: Option, @@ -27,12 +28,13 @@ pub(crate) struct GenesisAccountRef<'a> { /// Acts as bridge which simplifies Compact implementation for /// `AlloyGenesisAccount`. #[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] +#[reth_codecs(crate = "crate")] #[cfg_attr( any(test, feature = "test-utils"), derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) )] #[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] -#[add_arbitrary_tests(compact)] +#[add_arbitrary_tests(crate, compact)] pub(crate) struct GenesisAccount { /// The nonce of the account at genesis. nonce: Option, @@ -47,21 +49,23 @@ pub(crate) struct GenesisAccount { } #[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] +#[reth_codecs(crate = "crate")] #[cfg_attr( any(test, feature = "test-utils"), derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) )] -#[add_arbitrary_tests(compact)] +#[add_arbitrary_tests(crate, compact)] pub(crate) struct StorageEntries { entries: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] +#[reth_codecs(crate = "crate")] #[cfg_attr( any(test, feature = "test-utils"), derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) )] -#[add_arbitrary_tests(compact)] +#[add_arbitrary_tests(crate, compact)] pub(crate) struct StorageEntry { key: B256, value: B256, diff --git a/crates/storage/codecs/src/alloy/header.rs b/crates/storage/codecs/src/alloy/header.rs index 78f2029c3..04b7d6ab7 100644 --- a/crates/storage/codecs/src/alloy/header.rs +++ b/crates/storage/codecs/src/alloy/header.rs @@ -18,6 +18,7 @@ use alloy_primitives::{Address, BlockNumber, Bloom, Bytes, B256, U256}; )] #[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] +#[reth_codecs(crate = "crate")] pub(crate) struct Header { parent_hash: B256, ommers_hash: B256, @@ -54,6 +55,7 @@ pub(crate) struct Header { )] #[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] +#[reth_codecs(crate = "crate")] pub(crate) struct HeaderExt { requests_hash: Option, } diff --git a/crates/storage/codecs/src/alloy/transaction/eip1559.rs b/crates/storage/codecs/src/alloy/transaction/eip1559.rs index 0e7f44cde..6d910a690 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip1559.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip1559.rs @@ -13,11 +13,12 @@ use alloy_primitives::{Bytes, ChainId, TxKind, U256}; /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip1559`] #[derive(Debug, Clone, PartialEq, Eq, Hash, Compact, Default)] +#[reth_codecs(crate = "crate")] #[cfg_attr( any(test, feature = "test-utils"), derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) )] -#[cfg_attr(any(test, feature = "test-utils"), crate::add_arbitrary_tests(compact))] +#[cfg_attr(any(test, feature = "test-utils"), crate::add_arbitrary_tests(crate, compact))] #[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] pub(crate) struct TxEip1559 { chain_id: ChainId, diff --git a/crates/storage/codecs/src/alloy/transaction/eip2930.rs b/crates/storage/codecs/src/alloy/transaction/eip2930.rs index 75cab9e8a..aeb08f361 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip2930.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip2930.rs @@ -15,12 +15,13 @@ use reth_codecs_derive::add_arbitrary_tests; /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip2930`] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] +#[reth_codecs(crate = "crate")] #[cfg_attr( any(test, feature = "test-utils"), derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) )] #[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] -#[add_arbitrary_tests(compact)] +#[add_arbitrary_tests(crate, compact)] pub(crate) struct TxEip2930 { chain_id: ChainId, nonce: u64, diff --git a/crates/storage/codecs/src/alloy/transaction/eip4844.rs b/crates/storage/codecs/src/alloy/transaction/eip4844.rs index c89e2b078..fac9ab9a1 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip4844.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip4844.rs @@ -16,9 +16,10 @@ use reth_codecs_derive::add_arbitrary_tests; /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip4844`] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] +#[reth_codecs(crate = "crate")] #[cfg_attr(any(test, feature = "test-utils"), derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] -#[add_arbitrary_tests(compact)] +#[add_arbitrary_tests(crate, compact)] pub(crate) struct TxEip4844 { chain_id: ChainId, nonce: u64, diff --git a/crates/storage/codecs/src/alloy/transaction/eip7702.rs b/crates/storage/codecs/src/alloy/transaction/eip7702.rs index 8acf59425..eab10af0b 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip7702.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip7702.rs @@ -16,12 +16,13 @@ use reth_codecs_derive::add_arbitrary_tests; /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip7702`] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] +#[reth_codecs(crate = "crate")] #[cfg_attr( any(test, feature = "test-utils"), derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) )] #[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] -#[add_arbitrary_tests(compact)] +#[add_arbitrary_tests(crate, compact)] pub(crate) struct TxEip7702 { chain_id: ChainId, nonce: u64, diff --git a/crates/storage/codecs/src/alloy/transaction/legacy.rs b/crates/storage/codecs/src/alloy/transaction/legacy.rs index c83626aa4..60250ba64 100644 --- a/crates/storage/codecs/src/alloy/transaction/legacy.rs +++ b/crates/storage/codecs/src/alloy/transaction/legacy.rs @@ -6,10 +6,11 @@ use alloy_primitives::{Bytes, ChainId, TxKind, U256}; /// Legacy transaction. #[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] +#[reth_codecs(crate = "crate")] #[cfg_attr( any(test, feature = "test-utils"), derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize), - crate::add_arbitrary_tests(compact) + crate::add_arbitrary_tests(crate, compact) )] #[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] pub(crate) struct TxLegacy { diff --git a/crates/storage/codecs/src/alloy/transaction/optimism.rs b/crates/storage/codecs/src/alloy/transaction/optimism.rs index 22f508fd4..bb970b581 100644 --- a/crates/storage/codecs/src/alloy/transaction/optimism.rs +++ b/crates/storage/codecs/src/alloy/transaction/optimism.rs @@ -19,7 +19,8 @@ use reth_codecs_derive::add_arbitrary_tests; derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) )] #[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] -#[add_arbitrary_tests(compact)] +#[reth_codecs(crate = "crate")] +#[add_arbitrary_tests(crate, compact)] pub(crate) struct TxDeposit { source_hash: B256, from: Address, diff --git a/crates/storage/codecs/src/alloy/withdrawal.rs b/crates/storage/codecs/src/alloy/withdrawal.rs index 0f3347cec..8aa567179 100644 --- a/crates/storage/codecs/src/alloy/withdrawal.rs +++ b/crates/storage/codecs/src/alloy/withdrawal.rs @@ -13,8 +13,9 @@ use reth_codecs_derive::add_arbitrary_tests; any(test, feature = "test-utils"), derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize) )] +#[reth_codecs(crate = "crate")] #[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] -#[add_arbitrary_tests(compact)] +#[add_arbitrary_tests(crate, compact)] pub(crate) struct Withdrawal { /// Monotonically increasing identifier issued by consensus layer. index: u64, diff --git a/crates/storage/codecs/src/lib.rs b/crates/storage/codecs/src/lib.rs index 598f2131b..284c6454f 100644 --- a/crates/storage/codecs/src/lib.rs +++ b/crates/storage/codecs/src/lib.rs @@ -662,7 +662,8 @@ mod tests { } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Compact, arbitrary::Arbitrary)] - #[add_arbitrary_tests(compact)] + #[add_arbitrary_tests(crate, compact)] + #[reth_codecs(crate = "crate")] struct TestStruct { f_u64: u64, f_u256: U256, @@ -714,7 +715,8 @@ mod tests { #[derive( Debug, PartialEq, Clone, Default, Serialize, Deserialize, Compact, arbitrary::Arbitrary, )] - #[add_arbitrary_tests(compact)] + #[add_arbitrary_tests(crate, compact)] + #[reth_codecs(crate = "crate")] enum TestEnum { #[default] Var0, From 21d911abb2cf594834c1818d2f723718b5109f8b Mon Sep 17 00:00:00 2001 From: Kien Trinh <51135161+kien6034@users.noreply.github.com> Date: Mon, 4 Nov 2024 00:49:07 +0700 Subject: [PATCH 407/425] docs: add debug.etherscan in the book (#12293) Co-authored-by: Matthias Seitz --- book/run/mainnet.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/book/run/mainnet.md b/book/run/mainnet.md index 6f1ec144d..c4908971f 100644 --- a/book/run/mainnet.md +++ b/book/run/mainnet.md @@ -84,3 +84,13 @@ In the meantime, consider setting up [observability](./observability.md) to moni ## Running without a Consensus Layer We provide a method for running Reth without a Consensus Layer via the `--debug.tip ` parameter. If you provide that to your node, it will simulate sending an `engine_forkchoiceUpdated` message _once_ and will trigger syncing to the provided block hash. This is useful for testing and debugging purposes, but in order to have a node that can keep up with the tip you'll need to run a CL alongside it. At the moment we have no plans of including a Consensus Layer implementation in Reth, and we are open to including light clients other methods of syncing like importing Lighthouse as a library. + +## Running with Etherscan as Block Source + +You can use `--debug.etherscan` to run Reth with a fake consensus client that advances the chain using recent blocks on Etherscan. This requires an Etherscan API key (set via `ETHERSCAN_API_KEY` environment variable). Optionally, specify a custom API URL with `--debug.etherscan `. + +Example: +```bash +export ETHERSCAN_API_KEY=your_api_key_here +reth node --debug.etherscan +``` \ No newline at end of file From bb03578eed90d2eff4d75a420cb4bee224102167 Mon Sep 17 00:00:00 2001 From: garwah <14845405+garwahl@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:01:40 +1100 Subject: [PATCH 408/425] chore: Move FillTxEnv::fill_tx_env into SignedTransaction trait and implement in TransactionSigned (#12186) Co-authored-by: garwah Co-authored-by: Emilia Hane --- .../src/transaction/signed.rs | 4 + crates/primitives/src/transaction/mod.rs | 117 ++++++++++++++++++ 2 files changed, 121 insertions(+) diff --git a/crates/primitives-traits/src/transaction/signed.rs b/crates/primitives-traits/src/transaction/signed.rs index 748ef3966..4c1243721 100644 --- a/crates/primitives-traits/src/transaction/signed.rs +++ b/crates/primitives-traits/src/transaction/signed.rs @@ -6,6 +6,7 @@ use core::hash::Hash; use alloy_consensus::Transaction; use alloy_eips::eip2718::{Decodable2718, Encodable2718}; use alloy_primitives::{keccak256, Address, Signature, TxHash, B256}; +use revm_primitives::TxEnv; /// A signed transaction. pub trait SignedTransaction: @@ -64,4 +65,7 @@ pub trait SignedTransaction: fn recalculate_hash(&self) -> B256 { keccak256(self.encoded_2718()) } + + /// Fills [`TxEnv`] with an [`Address`] and transaction. + fn fill_tx_env(&self, tx_env: &mut TxEnv, sender: Address); } diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 59d3b9e12..b9a24316c 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -64,6 +64,8 @@ use tx_type::{ }; use alloc::vec::Vec; +use reth_primitives_traits::SignedTransaction; +use revm_primitives::{AuthorizationList, TxEnv}; /// Either a transaction hash or number. pub type TxHashOrNumber = BlockHashOrNumber; @@ -1123,6 +1125,11 @@ impl TransactionSigned { &self.signature } + /// Transaction + pub const fn transaction(&self) -> &Transaction { + &self.transaction + } + /// Transaction hash. Used to identify transaction. pub const fn hash(&self) -> TxHash { self.hash @@ -1398,6 +1405,116 @@ impl alloy_consensus::Transaction for TransactionSigned { } } +impl SignedTransaction for TransactionSigned { + type Transaction = Transaction; + + fn tx_hash(&self) -> &TxHash { + Self::hash_ref(self) + } + + fn transaction(&self) -> &Self::Transaction { + Self::transaction(self) + } + + fn signature(&self) -> &Signature { + Self::signature(self) + } + + fn recover_signer(&self) -> Option
{ + Self::recover_signer(self) + } + + fn recover_signer_unchecked(&self) -> Option
{ + Self::recover_signer_unchecked(self) + } + + fn from_transaction_and_signature( + transaction: Self::Transaction, + signature: Signature, + ) -> Self { + Self::from_transaction_and_signature(transaction, signature) + } + + fn fill_tx_env(&self, tx_env: &mut TxEnv, sender: Address) { + tx_env.caller = sender; + match self.as_ref() { + Transaction::Legacy(tx) => { + tx_env.gas_limit = tx.gas_limit; + tx_env.gas_price = U256::from(tx.gas_price); + tx_env.gas_priority_fee = None; + tx_env.transact_to = tx.to; + tx_env.value = tx.value; + tx_env.data = tx.input.clone(); + tx_env.chain_id = tx.chain_id; + tx_env.nonce = Some(tx.nonce); + tx_env.access_list.clear(); + tx_env.blob_hashes.clear(); + tx_env.max_fee_per_blob_gas.take(); + tx_env.authorization_list = None; + } + Transaction::Eip2930(tx) => { + tx_env.gas_limit = tx.gas_limit; + tx_env.gas_price = U256::from(tx.gas_price); + tx_env.gas_priority_fee = None; + tx_env.transact_to = tx.to; + tx_env.value = tx.value; + tx_env.data = tx.input.clone(); + tx_env.chain_id = Some(tx.chain_id); + tx_env.nonce = Some(tx.nonce); + tx_env.access_list.clone_from(&tx.access_list.0); + tx_env.blob_hashes.clear(); + tx_env.max_fee_per_blob_gas.take(); + tx_env.authorization_list = None; + } + Transaction::Eip1559(tx) => { + tx_env.gas_limit = tx.gas_limit; + tx_env.gas_price = U256::from(tx.max_fee_per_gas); + tx_env.gas_priority_fee = Some(U256::from(tx.max_priority_fee_per_gas)); + tx_env.transact_to = tx.to; + tx_env.value = tx.value; + tx_env.data = tx.input.clone(); + tx_env.chain_id = Some(tx.chain_id); + tx_env.nonce = Some(tx.nonce); + tx_env.access_list.clone_from(&tx.access_list.0); + tx_env.blob_hashes.clear(); + tx_env.max_fee_per_blob_gas.take(); + tx_env.authorization_list = None; + } + Transaction::Eip4844(tx) => { + tx_env.gas_limit = tx.gas_limit; + tx_env.gas_price = U256::from(tx.max_fee_per_gas); + tx_env.gas_priority_fee = Some(U256::from(tx.max_priority_fee_per_gas)); + tx_env.transact_to = TxKind::Call(tx.to); + tx_env.value = tx.value; + tx_env.data = tx.input.clone(); + tx_env.chain_id = Some(tx.chain_id); + tx_env.nonce = Some(tx.nonce); + tx_env.access_list.clone_from(&tx.access_list.0); + tx_env.blob_hashes.clone_from(&tx.blob_versioned_hashes); + tx_env.max_fee_per_blob_gas = Some(U256::from(tx.max_fee_per_blob_gas)); + tx_env.authorization_list = None; + } + Transaction::Eip7702(tx) => { + tx_env.gas_limit = tx.gas_limit; + tx_env.gas_price = U256::from(tx.max_fee_per_gas); + tx_env.gas_priority_fee = Some(U256::from(tx.max_priority_fee_per_gas)); + tx_env.transact_to = tx.to.into(); + tx_env.value = tx.value; + tx_env.data = tx.input.clone(); + tx_env.chain_id = Some(tx.chain_id); + tx_env.nonce = Some(tx.nonce); + tx_env.access_list.clone_from(&tx.access_list.0); + tx_env.blob_hashes.clear(); + tx_env.max_fee_per_blob_gas.take(); + tx_env.authorization_list = + Some(AuthorizationList::Signed(tx.authorization_list.clone())); + } + #[cfg(feature = "optimism")] + Transaction::Deposit(_) => {} + } + } +} + impl From for TransactionSigned { fn from(recovered: TransactionSignedEcRecovered) -> Self { recovered.signed_transaction From 98140416032dd0e964c28fe87675e453baa815d2 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 4 Nov 2024 09:18:13 +0100 Subject: [PATCH 409/425] primitives: rm useless `OP_` constants (#12298) --- crates/primitives-traits/src/constants/mod.rs | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index f3dd28e19..94eaf95c2 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -1,6 +1,6 @@ //! Ethereum protocol-related constants -use alloy_primitives::{address, b256, Address, B256}; +use alloy_primitives::{b256, B256}; /// Gas units, for example [`GIGAGAS`]. pub mod gas_units; @@ -16,12 +16,6 @@ pub const MINIMUM_GAS_LIMIT: u64 = 5000; pub const HOLESKY_GENESIS_HASH: B256 = b256!("b5f7f912443c940f21fd611f12828d75b534364ed9e95ca4e307729a4661bde4"); -/// From address from Optimism system txs: `0xdeaddeaddeaddeaddeaddeaddeaddeaddead0001` -pub const OP_SYSTEM_TX_FROM_ADDR: Address = address!("deaddeaddeaddeaddeaddeaddeaddeaddead0001"); - -/// To address from Optimism system txs: `0x4200000000000000000000000000000000000015` -pub const OP_SYSTEM_TX_TO_ADDR: Address = address!("4200000000000000000000000000000000000015"); - /// The number of blocks to unwind during a reorg that already became a part of canonical chain. /// /// In reality, the node can end up in this particular situation very rarely. It would happen only From 56b76871edd9680290242dd0f7d66d7a8b8b6cba Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 4 Nov 2024 09:21:17 +0100 Subject: [PATCH 410/425] primitives: rm alloy `Withdrawal` reexport (#12296) --- crates/consensus/common/src/validation.rs | 3 ++- crates/evm/src/state_change.rs | 3 ++- crates/primitives-traits/src/lib.rs | 2 +- crates/primitives-traits/src/withdrawal.rs | 5 +---- crates/primitives/src/lib.rs | 2 +- crates/primitives/src/proofs.rs | 6 ++---- .../storage/provider/src/providers/blockchain_provider.rs | 4 ++-- crates/storage/provider/src/providers/consistent.rs | 6 ++++-- crates/storage/provider/src/providers/database/mod.rs | 5 ++--- crates/storage/provider/src/providers/database/provider.rs | 4 ++-- crates/storage/provider/src/providers/mod.rs | 5 ++--- .../storage/provider/src/providers/static_file/manager.rs | 5 ++--- crates/storage/provider/src/test_utils/blocks.rs | 3 ++- crates/storage/provider/src/test_utils/mock.rs | 4 ++-- crates/storage/provider/src/test_utils/noop.rs | 4 ++-- crates/storage/storage-api/src/withdrawals.rs | 4 ++-- examples/custom-beacon-withdrawals/src/main.rs | 4 ++-- testing/testing-utils/Cargo.toml | 1 + testing/testing-utils/src/generators.rs | 3 ++- 19 files changed, 36 insertions(+), 37 deletions(-) diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index c6539cdcf..a6e5d2158 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -275,6 +275,7 @@ pub fn validate_against_parent_4844( mod tests { use super::*; use alloy_consensus::{TxEip4844, EMPTY_OMMER_ROOT_HASH, EMPTY_ROOT_HASH}; + use alloy_eips::eip4895::Withdrawal; use alloy_primitives::{ hex_literal::hex, Address, BlockHash, BlockNumber, Bytes, Parity, Sealable, U256, }; @@ -283,7 +284,7 @@ mod tests { use reth_chainspec::ChainSpecBuilder; use reth_primitives::{ proofs, Account, BlockBody, BlockHashOrNumber, Signature, Transaction, TransactionSigned, - Withdrawal, Withdrawals, + Withdrawals, }; use reth_storage_api::{ errors::provider::ProviderResult, AccountReader, HeaderProvider, WithdrawalsProvider, diff --git a/crates/evm/src/state_change.rs b/crates/evm/src/state_change.rs index 2a3d93f94..2d91ac30e 100644 --- a/crates/evm/src/state_change.rs +++ b/crates/evm/src/state_change.rs @@ -1,9 +1,10 @@ //! State changes that are not related to transactions. +use alloy_eips::eip4895::Withdrawal; use alloy_primitives::{map::HashMap, Address, U256}; use reth_chainspec::EthereumHardforks; use reth_consensus_common::calc; -use reth_primitives::{Block, Withdrawal, Withdrawals}; +use reth_primitives::{Block, Withdrawals}; /// Collect all balance changes at the end of the block. /// diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index 9f41bbd47..90a6935ae 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -34,7 +34,7 @@ pub mod block; pub use block::{body::BlockBody, Block}; mod withdrawal; -pub use withdrawal::{Withdrawal, Withdrawals}; +pub use withdrawal::Withdrawals; mod error; pub use error::{GotExpected, GotExpectedBoxed}; diff --git a/crates/primitives-traits/src/withdrawal.rs b/crates/primitives-traits/src/withdrawal.rs index 995e60292..8f072afa5 100644 --- a/crates/primitives-traits/src/withdrawal.rs +++ b/crates/primitives-traits/src/withdrawal.rs @@ -1,13 +1,10 @@ //! [EIP-4895](https://eips.ethereum.org/EIPS/eip-4895) Withdrawal types. use alloc::vec::Vec; +use alloy_eips::eip4895::Withdrawal; use alloy_rlp::{RlpDecodableWrapper, RlpEncodableWrapper}; use derive_more::{AsRef, Deref, DerefMut, From, IntoIterator}; use reth_codecs::{add_arbitrary_tests, Compact}; - -/// Re-export from `alloy_eips`. -#[doc(inline)] -pub use alloy_eips::eip4895::Withdrawal; use serde::{Deserialize, Serialize}; /// Represents a collection of Withdrawals. diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 4e3f1d3bd..9bb27e658 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -45,7 +45,7 @@ pub use receipt::{ }; pub use reth_primitives_traits::{ logs_bloom, Account, Bytecode, GotExpected, GotExpectedBoxed, Header, HeaderError, Log, - LogData, SealedHeader, StorageEntry, Withdrawal, Withdrawals, + LogData, SealedHeader, StorageEntry, Withdrawals, }; pub use static_file::StaticFileSegment; diff --git a/crates/primitives/src/proofs.rs b/crates/primitives/src/proofs.rs index 169724670..000244d2c 100644 --- a/crates/primitives/src/proofs.rs +++ b/crates/primitives/src/proofs.rs @@ -1,11 +1,9 @@ //! Helper function for calculating Merkle proofs and hashes. -use crate::{ - Header, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, TransactionSigned, Withdrawal, -}; +use crate::{Header, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, TransactionSigned}; use alloc::vec::Vec; use alloy_consensus::EMPTY_OMMER_ROOT_HASH; -use alloy_eips::eip2718::Encodable2718; +use alloy_eips::{eip2718::Encodable2718, eip4895::Withdrawal}; use alloy_primitives::{keccak256, B256}; use reth_trie_common::root::{ordered_trie_root, ordered_trie_root_with_encoder}; diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index 64a8a204a..85f759310 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -9,7 +9,7 @@ use crate::{ StageCheckpointReader, StateProviderBox, StateProviderFactory, StateReader, StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; -use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag}; +use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag}; use alloy_primitives::{Address, BlockHash, BlockNumber, Sealable, TxHash, TxNumber, B256, U256}; use alloy_rpc_types_engine::ForkchoiceState; use reth_chain_state::{ @@ -25,7 +25,7 @@ use reth_node_types::NodeTypesWithDB; use reth_primitives::{ Account, Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, StorageEntry, TransactionMeta, TransactionSigned, TransactionSignedNoHash, - Withdrawal, Withdrawals, + Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; diff --git a/crates/storage/provider/src/providers/consistent.rs b/crates/storage/provider/src/providers/consistent.rs index e6ca1a919..0784bdb63 100644 --- a/crates/storage/provider/src/providers/consistent.rs +++ b/crates/storage/provider/src/providers/consistent.rs @@ -6,7 +6,9 @@ use crate::{ StageCheckpointReader, StateReader, StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; -use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, HashOrNumber}; +use alloy_eips::{ + eip4895::Withdrawal, BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, HashOrNumber, +}; use alloy_primitives::{Address, BlockHash, BlockNumber, Sealable, TxHash, TxNumber, B256, U256}; use reth_chain_state::{BlockState, CanonicalInMemoryState, MemoryOverlayStateProviderRef}; use reth_chainspec::{ChainInfo, EthereumHardforks}; @@ -17,7 +19,7 @@ use reth_execution_types::{BundleStateInit, ExecutionOutcome, RevertsInit}; use reth_primitives::{ Account, Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, StorageEntry, TransactionMeta, TransactionSigned, TransactionSignedNoHash, - Withdrawal, Withdrawals, + Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; diff --git a/crates/storage/provider/src/providers/database/mod.rs b/crates/storage/provider/src/providers/database/mod.rs index 04a30ce90..bd0466ff9 100644 --- a/crates/storage/provider/src/providers/database/mod.rs +++ b/crates/storage/provider/src/providers/database/mod.rs @@ -7,7 +7,7 @@ use crate::{ PruneCheckpointReader, StageCheckpointReader, StateProviderBox, StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; -use alloy_eips::BlockHashOrNumber; +use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber}; use alloy_primitives::{Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; use core::fmt; use reth_chainspec::{ChainInfo, EthereumHardforks}; @@ -18,8 +18,7 @@ use reth_evm::ConfigureEvmEnv; use reth_node_types::NodeTypesWithDB; use reth_primitives::{ Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, - StaticFileSegment, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawal, - Withdrawals, + StaticFileSegment, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneModes, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 2af22cec0..d47532e71 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -15,7 +15,7 @@ use crate::{ StaticFileProviderFactory, StatsReader, StorageReader, StorageTrieWriter, TransactionVariant, TransactionsProvider, TransactionsProviderExt, TrieWriter, WithdrawalsProvider, }; -use alloy_eips::BlockHashOrNumber; +use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber}; use alloy_primitives::{keccak256, Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; use itertools::{izip, Itertools}; use rayon::slice::ParallelSliceMut; @@ -43,7 +43,7 @@ use reth_primitives::{ Account, Block, BlockBody, BlockWithSenders, Bytecode, GotExpected, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, StaticFileSegment, StorageEntry, TransactionMeta, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, - Withdrawal, Withdrawals, + Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneModes, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; diff --git a/crates/storage/provider/src/providers/mod.rs b/crates/storage/provider/src/providers/mod.rs index 3b24617fd..5e95c6ce0 100644 --- a/crates/storage/provider/src/providers/mod.rs +++ b/crates/storage/provider/src/providers/mod.rs @@ -7,7 +7,7 @@ use crate::{ StageCheckpointReader, StateProviderBox, StateProviderFactory, StaticFileProviderFactory, TransactionVariant, TransactionsProvider, TreeViewer, WithdrawalsProvider, }; -use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag}; +use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag}; use alloy_primitives::{Address, BlockHash, BlockNumber, Sealable, TxHash, TxNumber, B256, U256}; use reth_blockchain_tree_api::{ error::{CanonicalError, InsertBlockError}, @@ -21,8 +21,7 @@ use reth_evm::ConfigureEvmEnv; use reth_node_types::NodeTypesWithDB; use reth_primitives::{ Account, Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, - SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawal, - Withdrawals, + SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; diff --git a/crates/storage/provider/src/providers/static_file/manager.rs b/crates/storage/provider/src/providers/static_file/manager.rs index e81dc01f7..70c1e38f6 100644 --- a/crates/storage/provider/src/providers/static_file/manager.rs +++ b/crates/storage/provider/src/providers/static_file/manager.rs @@ -7,7 +7,7 @@ use crate::{ ReceiptProvider, StageCheckpointReader, StatsReader, TransactionVariant, TransactionsProvider, TransactionsProviderExt, WithdrawalsProvider, }; -use alloy_eips::BlockHashOrNumber; +use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber}; use alloy_primitives::{keccak256, Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; use dashmap::DashMap; use notify::{RecommendedWatcher, RecursiveMode, Watcher}; @@ -31,8 +31,7 @@ use reth_primitives::{ DEFAULT_BLOCKS_PER_STATIC_FILE, }, Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, - StaticFileSegment, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawal, - Withdrawals, + StaticFileSegment, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawals, }; use reth_stages_types::{PipelineTarget, StageId}; use reth_storage_api::DBProvider; diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index cacb71b35..d524f47cc 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -6,12 +6,13 @@ use alloy_primitives::{ TxKind, B256, U256, }; +use alloy_eips::eip4895::Withdrawal; use reth_db::tables; use reth_db_api::{database::Database, models::StoredBlockBodyIndices}; use reth_node_types::NodeTypes; use reth_primitives::{ Account, BlockBody, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, - Signature, Transaction, TransactionSigned, TxType, Withdrawal, Withdrawals, + Signature, Transaction, TransactionSigned, TxType, Withdrawals, }; use reth_trie::root::{state_root_unhashed, storage_root_unhashed}; use revm::{db::BundleState, primitives::AccountInfo}; diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index 07fa505b2..2e7cd6a06 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -6,7 +6,7 @@ use crate::{ StateRootProvider, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; use alloy_consensus::constants::EMPTY_ROOT_HASH; -use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumberOrTag}; +use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber, BlockId, BlockNumberOrTag}; use alloy_primitives::{ keccak256, map::{HashMap, HashSet}, @@ -23,7 +23,7 @@ use reth_node_types::NodeTypes; use reth_primitives::{ Account, Block, BlockWithSenders, Bytecode, GotExpected, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, - TransactionSignedNoHash, Withdrawal, Withdrawals, + TransactionSignedNoHash, Withdrawals, }; use reth_stages_types::{StageCheckpoint, StageId}; use reth_storage_api::{ diff --git a/crates/storage/provider/src/test_utils/noop.rs b/crates/storage/provider/src/test_utils/noop.rs index e09437647..8e6dc7425 100644 --- a/crates/storage/provider/src/test_utils/noop.rs +++ b/crates/storage/provider/src/test_utils/noop.rs @@ -4,7 +4,7 @@ use std::{ sync::Arc, }; -use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumberOrTag}; +use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber, BlockId, BlockNumberOrTag}; use alloy_primitives::{ map::{HashMap, HashSet}, Address, BlockHash, BlockNumber, Bytes, StorageKey, StorageValue, TxHash, TxNumber, B256, U256, @@ -20,7 +20,7 @@ use reth_evm::ConfigureEvmEnv; use reth_primitives::{ Account, Block, BlockWithSenders, Bytecode, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, - TransactionSignedNoHash, Withdrawal, Withdrawals, + TransactionSignedNoHash, Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; diff --git a/crates/storage/storage-api/src/withdrawals.rs b/crates/storage/storage-api/src/withdrawals.rs index 2de69b34e..ba422a3b3 100644 --- a/crates/storage/storage-api/src/withdrawals.rs +++ b/crates/storage/storage-api/src/withdrawals.rs @@ -1,5 +1,5 @@ -use alloy_eips::BlockHashOrNumber; -use reth_primitives::{Withdrawal, Withdrawals}; +use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber}; +use reth_primitives::Withdrawals; use reth_storage_errors::provider::ProviderResult; /// Client trait for fetching [Withdrawal] related data. diff --git a/examples/custom-beacon-withdrawals/src/main.rs b/examples/custom-beacon-withdrawals/src/main.rs index 09dad2f70..43e5f7428 100644 --- a/examples/custom-beacon-withdrawals/src/main.rs +++ b/examples/custom-beacon-withdrawals/src/main.rs @@ -3,7 +3,7 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] -use alloy_eips::eip7685::Requests; +use alloy_eips::{eip4895::Withdrawal, eip7685::Requests}; use alloy_sol_macro::sol; use alloy_sol_types::SolCall; #[cfg(feature = "optimism")] @@ -30,7 +30,7 @@ use reth_primitives::{ revm_primitives::{ address, Address, BlockEnv, Bytes, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, U256, }, - BlockWithSenders, Receipt, Withdrawal, + BlockWithSenders, Receipt, }; use std::{fmt::Display, sync::Arc}; diff --git a/testing/testing-utils/Cargo.toml b/testing/testing-utils/Cargo.toml index 98bfeabdf..3e0f58a7b 100644 --- a/testing/testing-utils/Cargo.toml +++ b/testing/testing-utils/Cargo.toml @@ -17,6 +17,7 @@ reth-primitives = { workspace = true, features = ["secp256k1"] } alloy-genesis.workspace = true alloy-primitives.workspace = true alloy-consensus.workspace = true +alloy-eips.workspace = true rand.workspace = true secp256k1 = { workspace = true, features = ["rand"] } diff --git a/testing/testing-utils/src/generators.rs b/testing/testing-utils/src/generators.rs index 571727cb2..84225ea72 100644 --- a/testing/testing-utils/src/generators.rs +++ b/testing/testing-utils/src/generators.rs @@ -1,6 +1,7 @@ //! Generators for different data structures like block headers, block bodies and ranges of those. use alloy_consensus::{Transaction as _, TxLegacy}; +use alloy_eips::eip4895::Withdrawal; use alloy_primitives::{Address, BlockNumber, Bytes, Parity, Sealable, TxKind, B256, U256}; pub use rand::Rng; use rand::{ @@ -8,7 +9,7 @@ use rand::{ }; use reth_primitives::{ proofs, sign_message, Account, BlockBody, Header, Log, Receipt, SealedBlock, SealedHeader, - StorageEntry, Transaction, TransactionSigned, Withdrawal, Withdrawals, + StorageEntry, Transaction, TransactionSigned, Withdrawals, }; use secp256k1::{Keypair, Secp256k1}; use std::{ From 566f2b4950d9e86c83dd527cde8d99b3a35f6a49 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:15:56 +0100 Subject: [PATCH 411/425] primitives: rm alloy `BlockHashOrNumber` reexport (#12302) --- bin/reth/src/commands/debug_cmd/execution.rs | 2 +- bin/reth/src/commands/debug_cmd/in_memory_merkle.rs | 2 +- bin/reth/src/commands/debug_cmd/merkle.rs | 2 +- crates/consensus/auto-seal/src/client.rs | 3 ++- crates/consensus/auto-seal/src/lib.rs | 6 +++--- crates/consensus/common/src/validation.rs | 5 ++--- crates/engine/local/Cargo.toml | 6 +++--- crates/evm/src/provider.rs | 2 +- crates/net/eth-wire-types/src/blocks.rs | 3 ++- crates/node/core/src/node_config.rs | 3 ++- crates/node/core/src/utils.rs | 3 ++- crates/primitives/src/block.rs | 4 +--- crates/primitives/src/lib.rs | 4 ++-- crates/primitives/src/transaction/mod.rs | 2 +- crates/rpc/rpc-engine-api/src/engine_api.rs | 4 ++-- crates/rpc/rpc-eth-types/src/cache/mod.rs | 5 ++--- .../storage/provider/src/providers/blockchain_provider.rs | 4 ++-- 17 files changed, 30 insertions(+), 30 deletions(-) diff --git a/bin/reth/src/commands/debug_cmd/execution.rs b/bin/reth/src/commands/debug_cmd/execution.rs index 215afacb5..cc584c892 100644 --- a/bin/reth/src/commands/debug_cmd/execution.rs +++ b/bin/reth/src/commands/debug_cmd/execution.rs @@ -1,6 +1,7 @@ //! Command for debugging execution. use crate::{args::NetworkArgs, utils::get_single_header}; +use alloy_eips::BlockHashOrNumber; use alloy_primitives::{BlockNumber, B256}; use clap::Parser; use futures::{stream::select as stream_select, StreamExt}; @@ -23,7 +24,6 @@ use reth_network_api::NetworkInfo; use reth_network_p2p::{headers::client::HeadersClient, BlockClient}; use reth_node_api::{NodeTypesWithDB, NodeTypesWithDBAdapter, NodeTypesWithEngine}; use reth_node_ethereum::EthExecutorProvider; -use reth_primitives::BlockHashOrNumber; use reth_provider::{ BlockExecutionWriter, ChainSpecProvider, ProviderFactory, StageCheckpointReader, }; diff --git a/bin/reth/src/commands/debug_cmd/in_memory_merkle.rs b/bin/reth/src/commands/debug_cmd/in_memory_merkle.rs index 51851c0b0..2c56da9b4 100644 --- a/bin/reth/src/commands/debug_cmd/in_memory_merkle.rs +++ b/bin/reth/src/commands/debug_cmd/in_memory_merkle.rs @@ -4,6 +4,7 @@ use crate::{ args::NetworkArgs, utils::{get_single_body, get_single_header}, }; +use alloy_eips::BlockHashOrNumber; use backon::{ConstantBuilder, Retryable}; use clap::Parser; use reth_chainspec::ChainSpec; @@ -19,7 +20,6 @@ use reth_network::{BlockDownloaderProvider, NetworkHandle}; use reth_network_api::NetworkInfo; use reth_node_api::{NodeTypesWithDB, NodeTypesWithEngine}; use reth_node_ethereum::EthExecutorProvider; -use reth_primitives::BlockHashOrNumber; use reth_provider::{ writer::UnifiedStorageWriter, AccountExtReader, ChainSpecProvider, HashingWriter, HeaderProvider, LatestStateProviderRef, OriginalValuesKnown, ProviderFactory, diff --git a/bin/reth/src/commands/debug_cmd/merkle.rs b/bin/reth/src/commands/debug_cmd/merkle.rs index 8e02a52ea..3c6e38512 100644 --- a/bin/reth/src/commands/debug_cmd/merkle.rs +++ b/bin/reth/src/commands/debug_cmd/merkle.rs @@ -1,5 +1,6 @@ //! Command for debugging merkle trie calculation. use crate::{args::NetworkArgs, utils::get_single_header}; +use alloy_eips::BlockHashOrNumber; use backon::{ConstantBuilder, Retryable}; use clap::Parser; use reth_beacon_consensus::EthBeaconConsensus; @@ -18,7 +19,6 @@ use reth_network_api::NetworkInfo; use reth_network_p2p::full_block::FullBlockClient; use reth_node_api::{NodeTypesWithDB, NodeTypesWithEngine}; use reth_node_ethereum::EthExecutorProvider; -use reth_primitives::BlockHashOrNumber; use reth_provider::{ writer::UnifiedStorageWriter, BlockNumReader, BlockWriter, ChainSpecProvider, DatabaseProviderFactory, HeaderProvider, LatestStateProviderRef, OriginalValuesKnown, diff --git a/crates/consensus/auto-seal/src/client.rs b/crates/consensus/auto-seal/src/client.rs index f9b80f10b..0083192d7 100644 --- a/crates/consensus/auto-seal/src/client.rs +++ b/crates/consensus/auto-seal/src/client.rs @@ -1,6 +1,7 @@ //! This includes download client implementations for auto sealing miners. use crate::Storage; +use alloy_eips::BlockHashOrNumber; use alloy_primitives::B256; use reth_network_p2p::{ bodies::client::{BodiesClient, BodiesFut}, @@ -9,7 +10,7 @@ use reth_network_p2p::{ priority::Priority, }; use reth_network_peers::{PeerId, WithPeerId}; -use reth_primitives::{BlockBody, BlockHashOrNumber, Header}; +use reth_primitives::{BlockBody, Header}; use std::fmt::Debug; use tracing::{trace, warn}; diff --git a/crates/consensus/auto-seal/src/lib.rs b/crates/consensus/auto-seal/src/lib.rs index 16299e19b..ad7e66acc 100644 --- a/crates/consensus/auto-seal/src/lib.rs +++ b/crates/consensus/auto-seal/src/lib.rs @@ -15,7 +15,7 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -use alloy_eips::eip7685::Requests; +use alloy_eips::{eip1898::BlockHashOrNumber, eip7685::Requests}; use alloy_primitives::{BlockHash, BlockNumber, Bloom, B256, U256}; use reth_beacon_consensus::BeaconEngineMessage; use reth_chainspec::{EthChainSpec, EthereumHardforks}; @@ -26,8 +26,8 @@ use reth_execution_errors::{ }; use reth_execution_types::ExecutionOutcome; use reth_primitives::{ - proofs, Block, BlockBody, BlockHashOrNumber, BlockWithSenders, Header, SealedBlock, - SealedHeader, TransactionSigned, Withdrawals, + proofs, Block, BlockBody, BlockWithSenders, Header, SealedBlock, SealedHeader, + TransactionSigned, Withdrawals, }; use reth_provider::{BlockReaderIdExt, StateProviderFactory, StateRootProvider}; use reth_revm::database::StateProviderDatabase; diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index a6e5d2158..092330595 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -275,7 +275,7 @@ pub fn validate_against_parent_4844( mod tests { use super::*; use alloy_consensus::{TxEip4844, EMPTY_OMMER_ROOT_HASH, EMPTY_ROOT_HASH}; - use alloy_eips::eip4895::Withdrawal; + use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber}; use alloy_primitives::{ hex_literal::hex, Address, BlockHash, BlockNumber, Bytes, Parity, Sealable, U256, }; @@ -283,8 +283,7 @@ mod tests { use rand::Rng; use reth_chainspec::ChainSpecBuilder; use reth_primitives::{ - proofs, Account, BlockBody, BlockHashOrNumber, Signature, Transaction, TransactionSigned, - Withdrawals, + proofs, Account, BlockBody, Signature, Transaction, TransactionSigned, Withdrawals, }; use reth_storage_api::{ errors::provider::ProviderResult, AccountReader, HeaderProvider, WithdrawalsProvider, diff --git a/crates/engine/local/Cargo.toml b/crates/engine/local/Cargo.toml index d9dc63253..2ab448e3b 100644 --- a/crates/engine/local/Cargo.toml +++ b/crates/engine/local/Cargo.toml @@ -47,7 +47,7 @@ workspace = true [features] optimism = [ - "op-alloy-rpc-types-engine", - "reth-beacon-consensus/optimism", - "reth-provider/optimism" + "op-alloy-rpc-types-engine", + "reth-beacon-consensus/optimism", + "reth-provider/optimism", ] diff --git a/crates/evm/src/provider.rs b/crates/evm/src/provider.rs index 8db828ec4..84c38db0d 100644 --- a/crates/evm/src/provider.rs +++ b/crates/evm/src/provider.rs @@ -1,7 +1,7 @@ //! Provider trait for populating the EVM environment. use crate::ConfigureEvmEnv; -use alloy_eips::eip1898::BlockHashOrNumber; +use alloy_eips::BlockHashOrNumber; use reth_primitives::Header; use reth_storage_errors::provider::ProviderResult; use revm::primitives::{BlockEnv, CfgEnv, CfgEnvWithHandlerCfg, SpecId}; diff --git a/crates/net/eth-wire-types/src/blocks.rs b/crates/net/eth-wire-types/src/blocks.rs index 878b4573f..5ae843190 100644 --- a/crates/net/eth-wire-types/src/blocks.rs +++ b/crates/net/eth-wire-types/src/blocks.rs @@ -113,9 +113,10 @@ mod tests { HeadersDirection, }; use alloy_consensus::TxLegacy; + use alloy_eips::BlockHashOrNumber; use alloy_primitives::{hex, Parity, TxKind, U256}; use alloy_rlp::{Decodable, Encodable}; - use reth_primitives::{BlockHashOrNumber, Header, Signature, Transaction, TransactionSigned}; + use reth_primitives::{Header, Signature, Transaction, TransactionSigned}; use std::str::FromStr; use super::BlockBody; diff --git a/crates/node/core/src/node_config.rs b/crates/node/core/src/node_config.rs index 80fb5152e..3848772c4 100644 --- a/crates/node/core/src/node_config.rs +++ b/crates/node/core/src/node_config.rs @@ -15,8 +15,9 @@ use reth_network_p2p::headers::client::HeadersClient; use serde::{de::DeserializeOwned, Serialize}; use std::{fs, path::Path}; +use alloy_eips::BlockHashOrNumber; use alloy_primitives::{BlockNumber, B256}; -use reth_primitives::{BlockHashOrNumber, Head, SealedHeader}; +use reth_primitives::{Head, SealedHeader}; use reth_stages_types::StageId; use reth_storage_api::{ BlockHashReader, DatabaseProviderFactory, HeaderProvider, StageCheckpointReader, diff --git a/crates/node/core/src/utils.rs b/crates/node/core/src/utils.rs index a64d12114..a04d4e324 100644 --- a/crates/node/core/src/utils.rs +++ b/crates/node/core/src/utils.rs @@ -1,6 +1,7 @@ //! Utility functions for node startup and shutdown, for example path parsing and retrieving single //! blocks from the network. +use alloy_eips::BlockHashOrNumber; use alloy_primitives::Sealable; use alloy_rpc_types_engine::{JwtError, JwtSecret}; use eyre::Result; @@ -11,7 +12,7 @@ use reth_network_p2p::{ headers::client::{HeadersClient, HeadersDirection, HeadersRequest}, priority::Priority, }; -use reth_primitives::{BlockHashOrNumber, SealedBlock, SealedHeader}; +use reth_primitives::{SealedBlock, SealedHeader}; use std::{ env::VarError, path::{Path, PathBuf}, diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index 7e5e76f1b..153f67b03 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -2,9 +2,7 @@ use crate::{ GotExpected, Header, SealedHeader, TransactionSigned, TransactionSignedEcRecovered, Withdrawals, }; use alloc::vec::Vec; -pub use alloy_eips::eip1898::{ - BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, ForkBlock, RpcBlockHash, -}; +pub use alloy_eips::eip1898::{BlockId, BlockNumHash, BlockNumberOrTag, ForkBlock, RpcBlockHash}; use alloy_eips::eip2718::Encodable2718; use alloy_primitives::{Address, Bytes, Sealable, B256}; use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 9bb27e658..3b8589f8f 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -34,8 +34,8 @@ pub mod transaction; #[cfg(any(test, feature = "arbitrary"))] pub use block::{generate_valid_header, valid_header_strategy}; pub use block::{ - Block, BlockBody, BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, BlockWithSenders, - ForkBlock, RpcBlockHash, SealedBlock, SealedBlockWithSenders, + Block, BlockBody, BlockId, BlockNumHash, BlockNumberOrTag, BlockWithSenders, ForkBlock, + RpcBlockHash, SealedBlock, SealedBlockWithSenders, }; #[cfg(feature = "reth-codec")] pub use compression::*; diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index b9a24316c..f7d36be0a 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1,12 +1,12 @@ //! Transaction types. -use crate::BlockHashOrNumber; #[cfg(any(test, feature = "reth-codec"))] use alloy_consensus::constants::{EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID}; use alloy_consensus::{ SignableTransaction, Transaction as _, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy, }; use alloy_eips::{ + eip1898::BlockHashOrNumber, eip2718::{Decodable2718, Eip2718Error, Eip2718Result, Encodable2718}, eip2930::AccessList, eip7702::SignedAuthorization, diff --git a/crates/rpc/rpc-engine-api/src/engine_api.rs b/crates/rpc/rpc-engine-api/src/engine_api.rs index 383da2d21..20eeb390a 100644 --- a/crates/rpc/rpc-engine-api/src/engine_api.rs +++ b/crates/rpc/rpc-engine-api/src/engine_api.rs @@ -1,7 +1,7 @@ use crate::{ capabilities::EngineCapabilities, metrics::EngineApiMetrics, EngineApiError, EngineApiResult, }; -use alloy_eips::{eip4844::BlobAndProofV1, eip7685::Requests}; +use alloy_eips::{eip1898::BlockHashOrNumber, eip4844::BlobAndProofV1, eip7685::Requests}; use alloy_primitives::{BlockHash, BlockNumber, B256, U64}; use alloy_rpc_types_engine::{ CancunPayloadFields, ClientVersionV1, ExecutionPayload, ExecutionPayloadBodiesV1, @@ -19,7 +19,7 @@ use reth_payload_primitives::{ validate_payload_timestamp, EngineApiMessageVersion, PayloadBuilderAttributes, PayloadOrAttributes, }; -use reth_primitives::{Block, BlockHashOrNumber, EthereumHardfork}; +use reth_primitives::{Block, EthereumHardfork}; use reth_rpc_api::EngineApiServer; use reth_rpc_types_compat::engine::payload::{ convert_payload_input_v2_to_payload, convert_to_payload_body_v1, diff --git a/crates/rpc/rpc-eth-types/src/cache/mod.rs b/crates/rpc/rpc-eth-types/src/cache/mod.rs index cbf05f276..b6b0364c4 100644 --- a/crates/rpc/rpc-eth-types/src/cache/mod.rs +++ b/crates/rpc/rpc-eth-types/src/cache/mod.rs @@ -1,14 +1,13 @@ //! Async caching support for eth RPC +use alloy_eips::BlockHashOrNumber; use alloy_primitives::B256; use futures::{future::Either, Stream, StreamExt}; use reth_chain_state::CanonStateNotification; use reth_errors::{ProviderError, ProviderResult}; use reth_evm::{provider::EvmEnvProvider, ConfigureEvm}; use reth_execution_types::Chain; -use reth_primitives::{ - BlockHashOrNumber, Header, Receipt, SealedBlockWithSenders, TransactionSigned, -}; +use reth_primitives::{Header, Receipt, SealedBlockWithSenders, TransactionSigned}; use reth_storage_api::{BlockReader, StateProviderFactory, TransactionVariant}; use reth_tasks::{TaskSpawner, TokioTaskExecutor}; use revm::primitives::{BlockEnv, CfgEnv, CfgEnvWithHandlerCfg, SpecId}; diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index 85f759310..669a35559 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -1444,7 +1444,7 @@ mod tests { assert_eq!( provider .withdrawals_by_block( - reth_primitives::BlockHashOrNumber::Number(15), + alloy_eips::BlockHashOrNumber::Number(15), shainghai_timestamp ) .expect("could not call withdrawals by block"), @@ -1456,7 +1456,7 @@ mod tests { assert_eq!( provider .withdrawals_by_block( - reth_primitives::BlockHashOrNumber::Number(block.number), + alloy_eips::BlockHashOrNumber::Number(block.number), shainghai_timestamp )? .unwrap(), From 0475af8bdb605ef8deadeb3452a59f9d71373d49 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 4 Nov 2024 12:47:18 +0100 Subject: [PATCH 412/425] primitives: rm alloy `BlockId` reexport (#12303) --- Cargo.lock | 4 +++- crates/primitives/src/block.rs | 4 ++-- crates/primitives/src/lib.rs | 4 ++-- crates/rpc/rpc-api/src/debug.rs | 3 ++- crates/rpc/rpc-api/src/otterscan.rs | 2 +- crates/rpc/rpc-api/src/reth.rs | 2 +- crates/rpc/rpc-api/src/trace.rs | 2 +- crates/rpc/rpc-builder/Cargo.toml | 1 + crates/rpc/rpc-builder/tests/it/http.rs | 3 ++- crates/rpc/rpc-eth-api/src/core.rs | 4 ++-- crates/rpc/rpc-eth-api/src/helpers/block.rs | 3 ++- crates/rpc/rpc-eth-api/src/helpers/state.rs | 3 ++- crates/rpc/rpc-eth-api/src/helpers/transaction.rs | 6 ++---- crates/rpc/rpc-eth-types/src/error.rs | 3 ++- crates/rpc/rpc-eth-types/src/pending_block.rs | 3 ++- crates/rpc/rpc-server-types/Cargo.toml | 3 +-- crates/rpc/rpc-server-types/src/result.rs | 2 +- crates/rpc/rpc-testing-util/Cargo.toml | 3 ++- crates/rpc/rpc-testing-util/src/debug.rs | 3 ++- crates/rpc/rpc-testing-util/src/trace.rs | 2 +- crates/rpc/rpc/src/debug.rs | 4 ++-- crates/rpc/rpc/src/engine.rs | 3 ++- crates/rpc/rpc/src/otterscan.rs | 3 ++- crates/rpc/rpc/src/reth.rs | 2 +- crates/rpc/rpc/src/trace.rs | 3 ++- 25 files changed, 43 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0b3318c4a..9b5a5350c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8706,6 +8706,7 @@ dependencies = [ name = "reth-rpc-api-testing-util" version = "1.1.0" dependencies = [ + "alloy-eips", "alloy-primitives", "alloy-rpc-types", "alloy-rpc-types-eth", @@ -8725,6 +8726,7 @@ dependencies = [ name = "reth-rpc-builder" version = "1.1.0" dependencies = [ + "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", "alloy-rpc-types-eth", @@ -8910,13 +8912,13 @@ dependencies = [ name = "reth-rpc-server-types" version = "1.1.0" dependencies = [ + "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", "jsonrpsee-core", "jsonrpsee-types", "reth-errors", "reth-network-api", - "reth-primitives", "serde", "strum", ] diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index 153f67b03..2b651652d 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -2,7 +2,7 @@ use crate::{ GotExpected, Header, SealedHeader, TransactionSigned, TransactionSignedEcRecovered, Withdrawals, }; use alloc::vec::Vec; -pub use alloy_eips::eip1898::{BlockId, BlockNumHash, BlockNumberOrTag, ForkBlock, RpcBlockHash}; +pub use alloy_eips::eip1898::{BlockNumHash, BlockNumberOrTag, ForkBlock, RpcBlockHash}; use alloy_eips::eip2718::Encodable2718; use alloy_primitives::{Address, Bytes, Sealable, B256}; use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; @@ -905,7 +905,7 @@ pub(super) mod serde_bincode_compat { #[cfg(test)] mod tests { use super::{BlockNumberOrTag::*, *}; - use alloy_eips::eip1898::HexStringMissingPrefixError; + use alloy_eips::{eip1898::HexStringMissingPrefixError, BlockId}; use alloy_primitives::hex_literal::hex; use alloy_rlp::{Decodable, Encodable}; use std::str::FromStr; diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 3b8589f8f..679b24abd 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -34,8 +34,8 @@ pub mod transaction; #[cfg(any(test, feature = "arbitrary"))] pub use block::{generate_valid_header, valid_header_strategy}; pub use block::{ - Block, BlockBody, BlockId, BlockNumHash, BlockNumberOrTag, BlockWithSenders, ForkBlock, - RpcBlockHash, SealedBlock, SealedBlockWithSenders, + Block, BlockBody, BlockNumHash, BlockNumberOrTag, BlockWithSenders, ForkBlock, RpcBlockHash, + SealedBlock, SealedBlockWithSenders, }; #[cfg(feature = "reth-codec")] pub use compression::*; diff --git a/crates/rpc/rpc-api/src/debug.rs b/crates/rpc/rpc-api/src/debug.rs index 3e03210f1..162699c6e 100644 --- a/crates/rpc/rpc-api/src/debug.rs +++ b/crates/rpc/rpc-api/src/debug.rs @@ -1,3 +1,4 @@ +use alloy_eips::BlockId; use alloy_primitives::{Address, Bytes, B256}; use alloy_rpc_types::{Block, Bundle, StateContext}; use alloy_rpc_types_debug::ExecutionWitness; @@ -6,7 +7,7 @@ use alloy_rpc_types_trace::geth::{ BlockTraceResult, GethDebugTracingCallOptions, GethDebugTracingOptions, GethTrace, TraceResult, }; use jsonrpsee::{core::RpcResult, proc_macros::rpc}; -use reth_primitives::{BlockId, BlockNumberOrTag}; +use reth_primitives::BlockNumberOrTag; /// Debug rpc interface. #[cfg_attr(not(feature = "client"), rpc(server, namespace = "debug"))] diff --git a/crates/rpc/rpc-api/src/otterscan.rs b/crates/rpc/rpc-api/src/otterscan.rs index ee805b482..d3e61c031 100644 --- a/crates/rpc/rpc-api/src/otterscan.rs +++ b/crates/rpc/rpc-api/src/otterscan.rs @@ -1,3 +1,4 @@ +use alloy_eips::BlockId; use alloy_json_rpc::RpcObject; use alloy_primitives::{Address, Bytes, TxHash, B256}; use alloy_rpc_types::Header; @@ -6,7 +7,6 @@ use alloy_rpc_types_trace::otterscan::{ TransactionsWithReceipts, }; use jsonrpsee::{core::RpcResult, proc_macros::rpc}; -use reth_primitives::BlockId; /// Otterscan rpc interface. #[cfg_attr(not(feature = "client"), rpc(server, namespace = "ots"))] diff --git a/crates/rpc/rpc-api/src/reth.rs b/crates/rpc/rpc-api/src/reth.rs index 98c31b78f..0589ffc00 100644 --- a/crates/rpc/rpc-api/src/reth.rs +++ b/crates/rpc/rpc-api/src/reth.rs @@ -1,6 +1,6 @@ +use alloy_eips::BlockId; use alloy_primitives::{Address, U256}; use jsonrpsee::{core::RpcResult, proc_macros::rpc}; -use reth_primitives::BlockId; use std::collections::HashMap; /// Reth API namespace for reth-specific methods diff --git a/crates/rpc/rpc-api/src/trace.rs b/crates/rpc/rpc-api/src/trace.rs index 58dda422a..45059284a 100644 --- a/crates/rpc/rpc-api/src/trace.rs +++ b/crates/rpc/rpc-api/src/trace.rs @@ -1,3 +1,4 @@ +use alloy_eips::BlockId; use alloy_primitives::{map::HashSet, Bytes, B256}; use alloy_rpc_types::{state::StateOverride, BlockOverrides, Index}; use alloy_rpc_types_eth::transaction::TransactionRequest; @@ -7,7 +8,6 @@ use alloy_rpc_types_trace::{ parity::*, }; use jsonrpsee::{core::RpcResult, proc_macros::rpc}; -use reth_primitives::BlockId; /// Ethereum trace API #[cfg_attr(not(feature = "client"), rpc(server, namespace = "trace"))] diff --git a/crates/rpc/rpc-builder/Cargo.toml b/crates/rpc/rpc-builder/Cargo.toml index 2d10dabf8..b6ae86c74 100644 --- a/crates/rpc/rpc-builder/Cargo.toml +++ b/crates/rpc/rpc-builder/Cargo.toml @@ -68,6 +68,7 @@ alloy-primitives.workspace = true alloy-rpc-types-eth.workspace = true alloy-rpc-types-trace.workspace = true alloy-rpc-types-engine.workspace = true +alloy-eips.workspace = true tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } serde_json.workspace = true diff --git a/crates/rpc/rpc-builder/tests/it/http.rs b/crates/rpc/rpc-builder/tests/it/http.rs index ed9ef56d6..5c33a5d34 100644 --- a/crates/rpc/rpc-builder/tests/it/http.rs +++ b/crates/rpc/rpc-builder/tests/it/http.rs @@ -2,6 +2,7 @@ //! Standalone http tests use crate::utils::{launch_http, launch_http_ws, launch_ws}; +use alloy_eips::BlockId; use alloy_primitives::{hex_literal::hex, Address, Bytes, TxHash, B256, B64, U256, U64}; use alloy_rpc_types_eth::{ transaction::TransactionRequest, Block, FeeHistory, Filter, Index, Log, @@ -18,7 +19,7 @@ use jsonrpsee::{ types::error::ErrorCode, }; use reth_network_peers::NodeRecord; -use reth_primitives::{BlockId, BlockNumberOrTag, Receipt}; +use reth_primitives::{BlockNumberOrTag, Receipt}; use reth_rpc_api::{ clients::{AdminApiClient, EthApiClient}, DebugApiClient, EthFilterApiClient, NetApiClient, OtterscanClient, TraceApiClient, diff --git a/crates/rpc/rpc-eth-api/src/core.rs b/crates/rpc/rpc-eth-api/src/core.rs index 66bc5a44d..185297c22 100644 --- a/crates/rpc/rpc-eth-api/src/core.rs +++ b/crates/rpc/rpc-eth-api/src/core.rs @@ -1,7 +1,7 @@ //! Implementation of the [`jsonrpsee`] generated [`EthApiServer`] trait. Handles RPC requests for //! the `eth_` namespace. use alloy_dyn_abi::TypedData; -use alloy_eips::eip2930::AccessListResult; +use alloy_eips::{eip2930::AccessListResult, BlockId}; use alloy_json_rpc::RpcObject; use alloy_primitives::{Address, Bytes, B256, B64, U256, U64}; use alloy_rpc_types::{ @@ -13,7 +13,7 @@ use alloy_rpc_types::{ }; use alloy_rpc_types_eth::transaction::TransactionRequest; use jsonrpsee::{core::RpcResult, proc_macros::rpc}; -use reth_primitives::{BlockId, BlockNumberOrTag}; +use reth_primitives::BlockNumberOrTag; use reth_rpc_server_types::{result::internal_rpc_err, ToRpcResult}; use tracing::trace; diff --git a/crates/rpc/rpc-eth-api/src/helpers/block.rs b/crates/rpc/rpc-eth-api/src/helpers/block.rs index fa397db35..bb8fd08ed 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/block.rs @@ -2,9 +2,10 @@ use std::sync::Arc; +use alloy_eips::BlockId; use alloy_rpc_types::{Header, Index}; use futures::Future; -use reth_primitives::{BlockId, Receipt, SealedBlock, SealedBlockWithSenders}; +use reth_primitives::{Receipt, SealedBlock, SealedBlockWithSenders}; use reth_provider::{BlockIdReader, BlockReader, BlockReaderIdExt, HeaderProvider}; use reth_rpc_types_compat::block::{from_block, uncle_block_from_header}; diff --git a/crates/rpc/rpc-eth-api/src/helpers/state.rs b/crates/rpc/rpc-eth-api/src/helpers/state.rs index 97c94b949..d980b9114 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/state.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/state.rs @@ -2,13 +2,14 @@ //! RPC methods. use alloy_consensus::constants::KECCAK_EMPTY; +use alloy_eips::BlockId; use alloy_primitives::{Address, Bytes, B256, U256}; use alloy_rpc_types::{serde_helpers::JsonStorageKey, Account, EIP1186AccountProofResponse}; use futures::Future; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_errors::RethError; use reth_evm::ConfigureEvmEnv; -use reth_primitives::{BlockId, Header}; +use reth_primitives::Header; use reth_provider::{ BlockIdReader, BlockNumReader, ChainSpecProvider, StateProvider, StateProviderBox, StateProviderFactory, diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index 3c526cbb0..ab94e3dd1 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -3,15 +3,13 @@ use alloy_consensus::Transaction; use alloy_dyn_abi::TypedData; -use alloy_eips::eip2718::Encodable2718; +use alloy_eips::{eip2718::Encodable2718, BlockId}; use alloy_network::TransactionBuilder; use alloy_primitives::{Address, Bytes, TxHash, B256}; use alloy_rpc_types::{BlockNumberOrTag, TransactionInfo}; use alloy_rpc_types_eth::transaction::TransactionRequest; use futures::Future; -use reth_primitives::{ - BlockId, Receipt, SealedBlockWithSenders, TransactionMeta, TransactionSigned, -}; +use reth_primitives::{Receipt, SealedBlockWithSenders, TransactionMeta, TransactionSigned}; use reth_provider::{BlockNumReader, BlockReaderIdExt, ReceiptProvider, TransactionsProvider}; use reth_rpc_eth_types::{ utils::{binary_search, recover_raw_transaction}, diff --git a/crates/rpc/rpc-eth-types/src/error.rs b/crates/rpc/rpc-eth-types/src/error.rs index b38b31227..9241e9e0b 100644 --- a/crates/rpc/rpc-eth-types/src/error.rs +++ b/crates/rpc/rpc-eth-types/src/error.rs @@ -2,11 +2,12 @@ use std::time::Duration; +use alloy_eips::BlockId; use alloy_primitives::{Address, Bytes, U256}; use alloy_rpc_types::{error::EthRpcErrorCode, request::TransactionInputError, BlockError}; use alloy_sol_types::decode_revert_reason; use reth_errors::RethError; -use reth_primitives::{revm_primitives::InvalidHeader, BlockId}; +use reth_primitives::revm_primitives::InvalidHeader; use reth_rpc_server_types::result::{ block_id_to_str, internal_rpc_err, invalid_params_rpc_err, rpc_err, rpc_error_with_code, }; diff --git a/crates/rpc/rpc-eth-types/src/pending_block.rs b/crates/rpc/rpc-eth-types/src/pending_block.rs index 949e205dc..d3e7c4158 100644 --- a/crates/rpc/rpc-eth-types/src/pending_block.rs +++ b/crates/rpc/rpc-eth-types/src/pending_block.rs @@ -4,9 +4,10 @@ use std::time::Instant; +use alloy_eips::BlockId; use alloy_primitives::B256; use derive_more::Constructor; -use reth_primitives::{BlockId, BlockNumberOrTag, Receipt, SealedBlockWithSenders, SealedHeader}; +use reth_primitives::{BlockNumberOrTag, Receipt, SealedBlockWithSenders, SealedHeader}; use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg}; /// Configured [`BlockEnv`] and [`CfgEnvWithHandlerCfg`] for a pending block. diff --git a/crates/rpc/rpc-server-types/Cargo.toml b/crates/rpc/rpc-server-types/Cargo.toml index 08ecd3947..275d8ea56 100644 --- a/crates/rpc/rpc-server-types/Cargo.toml +++ b/crates/rpc/rpc-server-types/Cargo.toml @@ -14,11 +14,11 @@ workspace = true [dependencies] reth-errors.workspace = true reth-network-api.workspace = true -reth-primitives.workspace = true # ethereum alloy-primitives.workspace = true alloy-rpc-types-engine.workspace = true +alloy-eips.workspace = true # rpc jsonrpsee-core.workspace = true @@ -27,4 +27,3 @@ jsonrpsee-types.workspace = true # misc strum = { workspace = true, features = ["derive"] } serde = { workspace = true, features = ["derive"] } - diff --git a/crates/rpc/rpc-server-types/src/result.rs b/crates/rpc/rpc-server-types/src/result.rs index 10ce1650a..5d1b702e9 100644 --- a/crates/rpc/rpc-server-types/src/result.rs +++ b/crates/rpc/rpc-server-types/src/result.rs @@ -2,10 +2,10 @@ use std::fmt; +use alloy_eips::BlockId; use alloy_rpc_types_engine::PayloadError; use jsonrpsee_core::RpcResult; use reth_errors::ConsensusError; -use reth_primitives::BlockId; /// Helper trait to easily convert various `Result` types into [`RpcResult`] pub trait ToRpcResult: Sized { diff --git a/crates/rpc/rpc-testing-util/Cargo.toml b/crates/rpc/rpc-testing-util/Cargo.toml index 4977c3a2c..e5c57502e 100644 --- a/crates/rpc/rpc-testing-util/Cargo.toml +++ b/crates/rpc/rpc-testing-util/Cargo.toml @@ -21,6 +21,7 @@ alloy-primitives.workspace = true alloy-rpc-types-eth.workspace = true alloy-rpc-types.workspace = true alloy-rpc-types-trace.workspace = true +alloy-eips.workspace = true # async futures.workspace = true @@ -36,4 +37,4 @@ similar-asserts.workspace = true tokio = { workspace = true, features = ["rt-multi-thread", "macros", "rt"] } reth-rpc-eth-api.workspace = true jsonrpsee-http-client.workspace = true -alloy-rpc-types-trace.workspace = true \ No newline at end of file +alloy-rpc-types-trace.workspace = true diff --git a/crates/rpc/rpc-testing-util/src/debug.rs b/crates/rpc/rpc-testing-util/src/debug.rs index 97fe008fa..d4c7dce86 100644 --- a/crates/rpc/rpc-testing-util/src/debug.rs +++ b/crates/rpc/rpc-testing-util/src/debug.rs @@ -6,6 +6,7 @@ use std::{ task::{Context, Poll}, }; +use alloy_eips::BlockId; use alloy_primitives::{TxHash, B256}; use alloy_rpc_types::{Block, Transaction}; use alloy_rpc_types_eth::transaction::TransactionRequest; @@ -15,7 +16,7 @@ use alloy_rpc_types_trace::{ }; use futures::{Stream, StreamExt}; use jsonrpsee::core::client::Error as RpcError; -use reth_primitives::{BlockId, Receipt}; +use reth_primitives::Receipt; use reth_rpc_api::{clients::DebugApiClient, EthApiClient}; const NOOP_TRACER: &str = include_str!("../assets/noop-tracer.js"); diff --git a/crates/rpc/rpc-testing-util/src/trace.rs b/crates/rpc/rpc-testing-util/src/trace.rs index 0fefef7c9..efb1f3674 100644 --- a/crates/rpc/rpc-testing-util/src/trace.rs +++ b/crates/rpc/rpc-testing-util/src/trace.rs @@ -1,5 +1,6 @@ //! Helpers for testing trace calls. +use alloy_eips::BlockId; use alloy_primitives::{map::HashSet, Bytes, TxHash, B256}; use alloy_rpc_types::Index; use alloy_rpc_types_eth::transaction::TransactionRequest; @@ -10,7 +11,6 @@ use alloy_rpc_types_trace::{ }; use futures::{Stream, StreamExt}; use jsonrpsee::core::client::Error as RpcError; -use reth_primitives::BlockId; use reth_rpc_api::clients::TraceApiClient; use std::{ pin::Pin, diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index e2746a53c..6a73af69d 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -1,4 +1,4 @@ -use alloy_eips::eip2718::Encodable2718; +use alloy_eips::{eip2718::Encodable2718, BlockId}; use alloy_primitives::{Address, Bytes, B256, U256}; use alloy_rlp::{Decodable, Encodable}; use alloy_rpc_types::{ @@ -19,7 +19,7 @@ use reth_evm::{ system_calls::SystemCaller, ConfigureEvmEnv, }; -use reth_primitives::{Block, BlockId, BlockNumberOrTag, TransactionSignedEcRecovered}; +use reth_primitives::{Block, BlockNumberOrTag, TransactionSignedEcRecovered}; use reth_provider::{ BlockReaderIdExt, ChainSpecProvider, HeaderProvider, StateProofProvider, StateProviderFactory, TransactionVariant, diff --git a/crates/rpc/rpc/src/engine.rs b/crates/rpc/rpc/src/engine.rs index 928e2050a..0ff90d399 100644 --- a/crates/rpc/rpc/src/engine.rs +++ b/crates/rpc/rpc/src/engine.rs @@ -1,3 +1,4 @@ +use alloy_eips::BlockId; use alloy_primitives::{Address, Bytes, B256, U256, U64}; use alloy_rpc_types::{ state::StateOverride, BlockOverrides, EIP1186AccountProofResponse, Filter, Log, SyncStatus, @@ -5,7 +6,7 @@ use alloy_rpc_types::{ use alloy_rpc_types_eth::transaction::TransactionRequest; use alloy_serde::JsonStorageKey; use jsonrpsee::core::RpcResult as Result; -use reth_primitives::{BlockId, BlockNumberOrTag}; +use reth_primitives::BlockNumberOrTag; use reth_rpc_api::{EngineEthApiServer, EthApiServer, EthFilterApiServer}; /// Re-export for convenience pub use reth_rpc_engine_api::EngineApi; diff --git a/crates/rpc/rpc/src/otterscan.rs b/crates/rpc/rpc/src/otterscan.rs index a772dd501..da33bf5d3 100644 --- a/crates/rpc/rpc/src/otterscan.rs +++ b/crates/rpc/rpc/src/otterscan.rs @@ -1,4 +1,5 @@ use alloy_consensus::Transaction; +use alloy_eips::BlockId; use alloy_network::{ReceiptResponse, TransactionResponse}; use alloy_primitives::{Address, Bytes, TxHash, B256, U256}; use alloy_rpc_types::{BlockTransactions, Header, TransactionReceipt}; @@ -11,7 +12,7 @@ use alloy_rpc_types_trace::{ }; use async_trait::async_trait; use jsonrpsee::{core::RpcResult, types::ErrorObjectOwned}; -use reth_primitives::{BlockId, BlockNumberOrTag}; +use reth_primitives::BlockNumberOrTag; use reth_rpc_api::{EthApiServer, OtterscanServer}; use reth_rpc_eth_api::{ helpers::{EthTransactions, TraceExt}, diff --git a/crates/rpc/rpc/src/reth.rs b/crates/rpc/rpc/src/reth.rs index 6d5897df1..c33f97f53 100644 --- a/crates/rpc/rpc/src/reth.rs +++ b/crates/rpc/rpc/src/reth.rs @@ -1,10 +1,10 @@ use std::{collections::HashMap, future::Future, sync::Arc}; +use alloy_eips::BlockId; use alloy_primitives::{Address, U256}; use async_trait::async_trait; use jsonrpsee::core::RpcResult; use reth_errors::RethResult; -use reth_primitives::BlockId; use reth_provider::{BlockReaderIdExt, ChangeSetReader, StateProviderFactory}; use reth_rpc_api::RethApiServer; use reth_rpc_eth_types::{EthApiError, EthResult}; diff --git a/crates/rpc/rpc/src/trace.rs b/crates/rpc/rpc/src/trace.rs index 2883818af..38c73b0f5 100644 --- a/crates/rpc/rpc/src/trace.rs +++ b/crates/rpc/rpc/src/trace.rs @@ -1,3 +1,4 @@ +use alloy_eips::BlockId; use alloy_primitives::{map::HashSet, Bytes, B256, U256}; use alloy_rpc_types::{ state::{EvmOverrides, StateOverride}, @@ -17,7 +18,7 @@ use reth_consensus_common::calc::{ base_block_reward, base_block_reward_pre_merge, block_reward, ommer_reward, }; use reth_evm::ConfigureEvmEnv; -use reth_primitives::{BlockId, Header}; +use reth_primitives::Header; use reth_provider::{BlockReader, ChainSpecProvider, EvmEnvProvider, StateProviderFactory}; use reth_revm::database::StateProviderDatabase; use reth_rpc_api::TraceApiServer; From 3fe22343f1b96ef97c4199741e0b44b6567799b4 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Mon, 4 Nov 2024 13:48:32 +0100 Subject: [PATCH 413/425] feat(engine): add StateRootTask skeleton (#12305) --- Cargo.lock | 2 + crates/engine/tree/Cargo.toml | 3 ++ crates/engine/tree/src/tree/mod.rs | 2 + crates/engine/tree/src/tree/root.rs | 60 +++++++++++++++++++++++++++++ 4 files changed, 67 insertions(+) create mode 100644 crates/engine/tree/src/tree/root.rs diff --git a/Cargo.lock b/Cargo.lock index 9b5a5350c..a885081cd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7218,8 +7218,10 @@ dependencies = [ "reth-tracing", "reth-trie", "reth-trie-parallel", + "revm-primitives", "thiserror", "tokio", + "tokio-stream", "tracing", ] diff --git a/crates/engine/tree/Cargo.toml b/crates/engine/tree/Cargo.toml index dee0bcaf7..293883c03 100644 --- a/crates/engine/tree/Cargo.toml +++ b/crates/engine/tree/Cargo.toml @@ -39,9 +39,12 @@ alloy-primitives.workspace = true alloy-eips.workspace = true alloy-rpc-types-engine.workspace = true +revm-primitives.workspace = true + # common futures.workspace = true tokio = { workspace = true, features = ["macros", "sync"] } +tokio-stream.workspace = true thiserror.workspace = true # metrics diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index bc070d873..c3e922d11 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -75,6 +75,8 @@ pub use invalid_block_hook::{InvalidBlockHooks, NoopInvalidBlockHook}; pub use persistence_state::PersistenceState; pub use reth_engine_primitives::InvalidBlockHook; +mod root; + /// Keeps track of the state of the tree. /// /// ## Invariants diff --git a/crates/engine/tree/src/tree/root.rs b/crates/engine/tree/src/tree/root.rs new file mode 100644 index 000000000..48b2eccdf --- /dev/null +++ b/crates/engine/tree/src/tree/root.rs @@ -0,0 +1,60 @@ +//! State root task related functionality. + +use reth_provider::providers::ConsistentDbView; +use reth_trie::{updates::TrieUpdates, TrieInput}; +use reth_trie_parallel::parallel_root::ParallelStateRootError; +use revm_primitives::{EvmState, B256}; +use std::{ + future::Future, + pin::Pin, + sync::Arc, + task::{Context, Poll}, +}; +use tokio_stream::wrappers::UnboundedReceiverStream; + +/// Standalone task that receives a transaction state stream and updates relevant +/// data structures to calculate state root. +/// +/// It is responsile of initializing a blinded sparse trie and subscribe to +/// transaction state stream. As it receives transaction execution results, it +/// fetches the proofs for relevant accounts from the database and reveal them +/// to the tree. +/// Then it updates relevant leaves according to the result of the transaction. +#[allow(dead_code)] +pub(crate) struct StateRootTask { + /// View over the state in the database. + consistent_view: ConsistentDbView, + /// Incoming state updates. + state_stream: UnboundedReceiverStream, + /// Latest trie input. + input: Arc, +} + +#[allow(dead_code)] +impl StateRootTask { + /// Creates a new `StateRootTask`. + pub(crate) const fn new( + consistent_view: ConsistentDbView, + input: Arc, + state_stream: UnboundedReceiverStream, + ) -> Self { + Self { consistent_view, state_stream, input } + } + + /// Handles state updates. + pub(crate) fn on_state_update(&self, _update: EvmState) { + // TODO: calculate hashed state update and dispatch proof gathering for it. + } +} + +impl Future for StateRootTask { + type Output = Result<(B256, TrieUpdates), ParallelStateRootError>; + + fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll { + // TODO: + // * poll incoming state updates stream + // * keep track of proof calculation + // * keep track of intermediate root computation + Poll::Pending + } +} From d5f01036016263fbad6a3af9dc3707b2701adc1d Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:54:58 +0100 Subject: [PATCH 414/425] primitives: rm alloy `BlockNumHash` reexport (#12304) --- Cargo.lock | 2 ++ crates/blockchain-tree-api/Cargo.toml | 1 + crates/blockchain-tree-api/src/lib.rs | 3 ++- crates/consensus/beacon/src/engine/mod.rs | 4 ++-- crates/exex/exex/src/event.rs | 2 +- crates/exex/exex/src/manager.rs | 3 ++- crates/exex/test-utils/Cargo.toml | 3 +++ crates/exex/test-utils/src/lib.rs | 3 ++- crates/primitives/src/block.rs | 2 +- crates/primitives/src/lib.rs | 4 ++-- crates/rpc/rpc-eth-types/src/logs_utils.rs | 3 ++- crates/storage/provider/src/test_utils/mock.rs | 6 +++--- crates/storage/provider/src/test_utils/noop.rs | 6 +++--- crates/storage/storage-api/src/block_id.rs | 6 +++--- 14 files changed, 29 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a885081cd..9d40ebfb2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6550,6 +6550,7 @@ dependencies = [ name = "reth-blockchain-tree-api" version = "1.1.0" dependencies = [ + "alloy-eips", "alloy-primitives", "reth-consensus", "reth-execution-errors", @@ -7557,6 +7558,7 @@ dependencies = [ name = "reth-exex-test-utils" version = "1.1.0" dependencies = [ + "alloy-eips", "eyre", "futures-util", "rand 0.8.5", diff --git a/crates/blockchain-tree-api/Cargo.toml b/crates/blockchain-tree-api/Cargo.toml index 552b72767..b1c01f859 100644 --- a/crates/blockchain-tree-api/Cargo.toml +++ b/crates/blockchain-tree-api/Cargo.toml @@ -18,6 +18,7 @@ reth-storage-errors.workspace = true # alloy alloy-primitives.workspace = true +alloy-eips.workspace = true # misc thiserror.workspace = true diff --git a/crates/blockchain-tree-api/src/lib.rs b/crates/blockchain-tree-api/src/lib.rs index 0a1bf6164..7e1d0d714 100644 --- a/crates/blockchain-tree-api/src/lib.rs +++ b/crates/blockchain-tree-api/src/lib.rs @@ -9,8 +9,9 @@ use self::error::CanonicalError; use crate::error::InsertBlockError; +use alloy_eips::BlockNumHash; use alloy_primitives::{BlockHash, BlockNumber}; -use reth_primitives::{BlockNumHash, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader}; +use reth_primitives::{Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader}; use reth_storage_errors::provider::{ProviderError, ProviderResult}; use std::collections::BTreeMap; diff --git a/crates/consensus/beacon/src/engine/mod.rs b/crates/consensus/beacon/src/engine/mod.rs index a00f507db..65904196e 100644 --- a/crates/consensus/beacon/src/engine/mod.rs +++ b/crates/consensus/beacon/src/engine/mod.rs @@ -1,4 +1,4 @@ -use alloy_eips::merge::EPOCH_SLOTS; +use alloy_eips::{merge::EPOCH_SLOTS, BlockNumHash}; use alloy_primitives::{BlockNumber, B256}; use alloy_rpc_types_engine::{ ExecutionPayload, ExecutionPayloadSidecar, ForkchoiceState, PayloadStatus, PayloadStatusEnum, @@ -20,7 +20,7 @@ use reth_node_types::NodeTypesWithEngine; use reth_payload_builder::PayloadBuilderHandle; use reth_payload_primitives::{PayloadAttributes, PayloadBuilder, PayloadBuilderAttributes}; use reth_payload_validator::ExecutionPayloadValidator; -use reth_primitives::{BlockNumHash, Head, Header, SealedBlock, SealedHeader}; +use reth_primitives::{Head, Header, SealedBlock, SealedHeader}; use reth_provider::{ providers::ProviderNodeTypes, BlockIdReader, BlockReader, BlockSource, CanonChainTracker, ChainSpecProvider, ProviderError, StageCheckpointReader, diff --git a/crates/exex/exex/src/event.rs b/crates/exex/exex/src/event.rs index 1215ea2a5..bbd79addc 100644 --- a/crates/exex/exex/src/event.rs +++ b/crates/exex/exex/src/event.rs @@ -1,4 +1,4 @@ -use reth_primitives::BlockNumHash; +use alloy_eips::BlockNumHash; /// Events emitted by an `ExEx`. #[derive(Debug, Clone, Copy, PartialEq, Eq)] diff --git a/crates/exex/exex/src/manager.rs b/crates/exex/exex/src/manager.rs index 8c1518f30..a17de6608 100644 --- a/crates/exex/exex/src/manager.rs +++ b/crates/exex/exex/src/manager.rs @@ -1,13 +1,14 @@ use crate::{ wal::Wal, ExExEvent, ExExNotification, ExExNotifications, FinishedExExHeight, WalHandle, }; +use alloy_eips::BlockNumHash; use futures::StreamExt; use itertools::Itertools; use metrics::Gauge; use reth_chain_state::ForkChoiceStream; use reth_chainspec::Head; use reth_metrics::{metrics::Counter, Metrics}; -use reth_primitives::{BlockNumHash, SealedHeader}; +use reth_primitives::SealedHeader; use reth_provider::HeaderProvider; use reth_tracing::tracing::debug; use std::{ diff --git a/crates/exex/test-utils/Cargo.toml b/crates/exex/test-utils/Cargo.toml index cd0e0831b..6e5af981b 100644 --- a/crates/exex/test-utils/Cargo.toml +++ b/crates/exex/test-utils/Cargo.toml @@ -33,6 +33,9 @@ reth-tasks.workspace = true reth-transaction-pool = { workspace = true, features = ["test-utils"] } reth-trie-db.workspace = true +## alloy +alloy-eips.workspace = true + ## async futures-util.workspace = true tokio.workspace = true diff --git a/crates/exex/test-utils/src/lib.rs b/crates/exex/test-utils/src/lib.rs index 06aa8c81c..5c3468a3c 100644 --- a/crates/exex/test-utils/src/lib.rs +++ b/crates/exex/test-utils/src/lib.rs @@ -15,6 +15,7 @@ use std::{ task::Poll, }; +use alloy_eips::BlockNumHash; use futures_util::FutureExt; use reth_blockchain_tree::noop::NoopBlockchainTree; use reth_chainspec::{ChainSpec, MAINNET}; @@ -44,7 +45,7 @@ use reth_node_ethereum::{ EthEngineTypes, EthEvmConfig, }; use reth_payload_builder::noop::NoopPayloadBuilderService; -use reth_primitives::{BlockNumHash, Head, SealedBlockWithSenders}; +use reth_primitives::{Head, SealedBlockWithSenders}; use reth_provider::{ providers::{BlockchainProvider, StaticFileProvider}, BlockReader, ProviderFactory, diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index 2b651652d..b9f43df5d 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -2,7 +2,7 @@ use crate::{ GotExpected, Header, SealedHeader, TransactionSigned, TransactionSignedEcRecovered, Withdrawals, }; use alloc::vec::Vec; -pub use alloy_eips::eip1898::{BlockNumHash, BlockNumberOrTag, ForkBlock, RpcBlockHash}; +pub use alloy_eips::eip1898::{BlockNumberOrTag, ForkBlock, RpcBlockHash}; use alloy_eips::eip2718::Encodable2718; use alloy_primitives::{Address, Bytes, Sealable, B256}; use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 679b24abd..4f56b1ac4 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -34,8 +34,8 @@ pub mod transaction; #[cfg(any(test, feature = "arbitrary"))] pub use block::{generate_valid_header, valid_header_strategy}; pub use block::{ - Block, BlockBody, BlockNumHash, BlockNumberOrTag, BlockWithSenders, ForkBlock, RpcBlockHash, - SealedBlock, SealedBlockWithSenders, + Block, BlockBody, BlockNumberOrTag, BlockWithSenders, ForkBlock, RpcBlockHash, SealedBlock, + SealedBlockWithSenders, }; #[cfg(feature = "reth-codec")] pub use compression::*; diff --git a/crates/rpc/rpc-eth-types/src/logs_utils.rs b/crates/rpc/rpc-eth-types/src/logs_utils.rs index 205e2bba3..aa132675c 100644 --- a/crates/rpc/rpc-eth-types/src/logs_utils.rs +++ b/crates/rpc/rpc-eth-types/src/logs_utils.rs @@ -2,11 +2,12 @@ //! //! Log parsing for building filter. +use alloy_eips::BlockNumHash; use alloy_primitives::TxHash; use alloy_rpc_types::{FilteredParams, Log}; use reth_chainspec::ChainInfo; use reth_errors::ProviderError; -use reth_primitives::{BlockNumHash, Receipt, SealedBlockWithSenders}; +use reth_primitives::{Receipt, SealedBlockWithSenders}; use reth_storage_api::BlockReader; use std::sync::Arc; diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index 2e7cd6a06..b5593b904 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -463,15 +463,15 @@ impl BlockNumReader for MockEthProvider { } impl BlockIdReader for MockEthProvider { - fn pending_block_num_hash(&self) -> ProviderResult> { + fn pending_block_num_hash(&self) -> ProviderResult> { Ok(None) } - fn safe_block_num_hash(&self) -> ProviderResult> { + fn safe_block_num_hash(&self) -> ProviderResult> { Ok(None) } - fn finalized_block_num_hash(&self) -> ProviderResult> { + fn finalized_block_num_hash(&self) -> ProviderResult> { Ok(None) } } diff --git a/crates/storage/provider/src/test_utils/noop.rs b/crates/storage/provider/src/test_utils/noop.rs index 8e6dc7425..65c083062 100644 --- a/crates/storage/provider/src/test_utils/noop.rs +++ b/crates/storage/provider/src/test_utils/noop.rs @@ -175,15 +175,15 @@ impl BlockReaderIdExt for NoopProvider { } impl BlockIdReader for NoopProvider { - fn pending_block_num_hash(&self) -> ProviderResult> { + fn pending_block_num_hash(&self) -> ProviderResult> { Ok(None) } - fn safe_block_num_hash(&self) -> ProviderResult> { + fn safe_block_num_hash(&self) -> ProviderResult> { Ok(None) } - fn finalized_block_num_hash(&self) -> ProviderResult> { + fn finalized_block_num_hash(&self) -> ProviderResult> { Ok(None) } } diff --git a/crates/storage/storage-api/src/block_id.rs b/crates/storage/storage-api/src/block_id.rs index 55cd6ab1c..00856d348 100644 --- a/crates/storage/storage-api/src/block_id.rs +++ b/crates/storage/storage-api/src/block_id.rs @@ -99,13 +99,13 @@ pub trait BlockIdReader: BlockNumReader + Send + Sync { } /// Get the current pending block number and hash. - fn pending_block_num_hash(&self) -> ProviderResult>; + fn pending_block_num_hash(&self) -> ProviderResult>; /// Get the current safe block number and hash. - fn safe_block_num_hash(&self) -> ProviderResult>; + fn safe_block_num_hash(&self) -> ProviderResult>; /// Get the current finalized block number and hash. - fn finalized_block_num_hash(&self) -> ProviderResult>; + fn finalized_block_num_hash(&self) -> ProviderResult>; /// Get the safe block number. fn safe_block_number(&self) -> ProviderResult> { From 967cbc4e974ee2084f3342a0fbc37dd549861d05 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 5 Nov 2024 05:15:15 +0100 Subject: [PATCH 415/425] primitives: rm alloy `Signature` reexport (#12313) --- crates/blockchain-tree/src/blockchain_tree.rs | 6 +++--- crates/consensus/common/src/validation.rs | 4 ++-- crates/net/eth-wire-types/src/blocks.rs | 4 ++-- crates/net/eth-wire-types/src/transactions.rs | 4 ++-- crates/net/network/tests/it/big_pooled_txs_req.rs | 4 ++-- crates/net/network/tests/it/requests.rs | 4 ++-- crates/net/network/tests/it/txgossip.rs | 4 ++-- crates/optimism/evm/src/execute.rs | 4 ++-- crates/optimism/node/src/txpool.rs | 3 +-- crates/primitives/src/alloy_compat.rs | 4 ++-- crates/primitives/src/lib.rs | 2 +- crates/primitives/src/transaction/mod.rs | 12 ++++++------ crates/primitives/src/transaction/pooled.rs | 4 ++-- crates/primitives/src/transaction/sidecar.rs | 4 ++-- crates/primitives/src/transaction/signature.rs | 13 ++++--------- crates/primitives/src/transaction/util.rs | 3 +-- crates/rpc/rpc-eth-api/src/helpers/signer.rs | 4 ++-- crates/rpc/rpc-eth-types/src/simulate.rs | 5 ++--- .../rpc-types-compat/src/transaction/signature.rs | 10 +++++----- crates/rpc/rpc/src/eth/helpers/signer.rs | 4 ++-- crates/storage/provider/src/test_utils/blocks.rs | 3 ++- crates/transaction-pool/src/test_utils/mock.rs | 4 ++-- crates/transaction-pool/src/traits.rs | 3 ++- testing/testing-utils/src/generators.rs | 4 ++-- 24 files changed, 55 insertions(+), 61 deletions(-) diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index 4468d8205..1674081fe 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -1377,7 +1377,7 @@ mod tests { use alloy_consensus::{TxEip1559, EMPTY_ROOT_HASH}; use alloy_eips::eip1559::INITIAL_BASE_FEE; use alloy_genesis::{Genesis, GenesisAccount}; - use alloy_primitives::{keccak256, Address, Sealable, B256}; + use alloy_primitives::{keccak256, Address, Sealable, Signature, B256}; use assert_matches::assert_matches; use linked_hash_set::LinkedHashSet; use reth_chainspec::{ChainSpecBuilder, MAINNET, MIN_TRANSACTION_GAS}; @@ -1389,8 +1389,8 @@ mod tests { use reth_primitives::{ proofs::{calculate_receipt_root, calculate_transaction_root}, revm_primitives::AccountInfo, - Account, BlockBody, Header, Signature, Transaction, TransactionSigned, - TransactionSignedEcRecovered, Withdrawals, + Account, BlockBody, Header, Transaction, TransactionSigned, TransactionSignedEcRecovered, + Withdrawals, }; use reth_provider::{ test_utils::{ diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index 092330595..d4dea07dc 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -277,13 +277,13 @@ mod tests { use alloy_consensus::{TxEip4844, EMPTY_OMMER_ROOT_HASH, EMPTY_ROOT_HASH}; use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber}; use alloy_primitives::{ - hex_literal::hex, Address, BlockHash, BlockNumber, Bytes, Parity, Sealable, U256, + hex_literal::hex, Address, BlockHash, BlockNumber, Bytes, Parity, Sealable, Signature, U256, }; use mockall::mock; use rand::Rng; use reth_chainspec::ChainSpecBuilder; use reth_primitives::{ - proofs, Account, BlockBody, Signature, Transaction, TransactionSigned, Withdrawals, + proofs, Account, BlockBody, Transaction, TransactionSigned, Withdrawals, }; use reth_storage_api::{ errors::provider::ProviderResult, AccountReader, HeaderProvider, WithdrawalsProvider, diff --git a/crates/net/eth-wire-types/src/blocks.rs b/crates/net/eth-wire-types/src/blocks.rs index 5ae843190..ce23dfe70 100644 --- a/crates/net/eth-wire-types/src/blocks.rs +++ b/crates/net/eth-wire-types/src/blocks.rs @@ -114,9 +114,9 @@ mod tests { }; use alloy_consensus::TxLegacy; use alloy_eips::BlockHashOrNumber; - use alloy_primitives::{hex, Parity, TxKind, U256}; + use alloy_primitives::{hex, Parity, Signature, TxKind, U256}; use alloy_rlp::{Decodable, Encodable}; - use reth_primitives::{Header, Signature, Transaction, TransactionSigned}; + use reth_primitives::{Header, Transaction, TransactionSigned}; use std::str::FromStr; use super::BlockBody; diff --git a/crates/net/eth-wire-types/src/transactions.rs b/crates/net/eth-wire-types/src/transactions.rs index ab65aa178..7c66f657a 100644 --- a/crates/net/eth-wire-types/src/transactions.rs +++ b/crates/net/eth-wire-types/src/transactions.rs @@ -78,10 +78,10 @@ impl FromIterator for PooledTransactions { mod tests { use crate::{message::RequestPair, GetPooledTransactions, PooledTransactions}; use alloy_consensus::{TxEip1559, TxLegacy}; - use alloy_primitives::{hex, Parity, TxKind, U256}; + use alloy_primitives::{hex, Parity, Signature, TxKind, U256}; use alloy_rlp::{Decodable, Encodable}; use reth_chainspec::MIN_TRANSACTION_GAS; - use reth_primitives::{PooledTransactionsElement, Signature, Transaction, TransactionSigned}; + use reth_primitives::{PooledTransactionsElement, Transaction, TransactionSigned}; use std::str::FromStr; #[test] diff --git a/crates/net/network/tests/it/big_pooled_txs_req.rs b/crates/net/network/tests/it/big_pooled_txs_req.rs index 3a645da6c..29b62708e 100644 --- a/crates/net/network/tests/it/big_pooled_txs_req.rs +++ b/crates/net/network/tests/it/big_pooled_txs_req.rs @@ -1,4 +1,4 @@ -use alloy_primitives::B256; +use alloy_primitives::{Signature, B256}; use reth_eth_wire::{GetPooledTransactions, PooledTransactions}; use reth_network::{ test_utils::{NetworkEventStream, Testnet}, @@ -6,7 +6,7 @@ use reth_network::{ }; use reth_network_api::{NetworkInfo, Peers}; use reth_network_p2p::sync::{NetworkSyncUpdater, SyncState}; -use reth_primitives::{Signature, TransactionSigned}; +use reth_primitives::TransactionSigned; use reth_provider::test_utils::MockEthProvider; use reth_transaction_pool::{ test_utils::{testing_pool, MockTransaction}, diff --git a/crates/net/network/tests/it/requests.rs b/crates/net/network/tests/it/requests.rs index 61241f02d..8c00302f7 100644 --- a/crates/net/network/tests/it/requests.rs +++ b/crates/net/network/tests/it/requests.rs @@ -4,7 +4,7 @@ use std::sync::Arc; use alloy_consensus::TxEip2930; -use alloy_primitives::{Bytes, Parity, TxKind, U256}; +use alloy_primitives::{Bytes, Parity, Signature, TxKind, U256}; use rand::Rng; use reth_eth_wire::HeadersDirection; use reth_network::{ @@ -16,7 +16,7 @@ use reth_network_p2p::{ bodies::client::BodiesClient, headers::client::{HeadersClient, HeadersRequest}, }; -use reth_primitives::{Block, Header, Signature, Transaction, TransactionSigned}; +use reth_primitives::{Block, Header, Transaction, TransactionSigned}; use reth_provider::test_utils::MockEthProvider; /// Returns a new [`TransactionSigned`] with some random parameters diff --git a/crates/net/network/tests/it/txgossip.rs b/crates/net/network/tests/it/txgossip.rs index 70ac67bb5..f08a2b2eb 100644 --- a/crates/net/network/tests/it/txgossip.rs +++ b/crates/net/network/tests/it/txgossip.rs @@ -3,12 +3,12 @@ use std::sync::Arc; use alloy_consensus::TxLegacy; -use alloy_primitives::U256; +use alloy_primitives::{Signature, U256}; use futures::StreamExt; use rand::thread_rng; use reth_network::{test_utils::Testnet, NetworkEvent, NetworkEventListenerProvider}; use reth_network_api::PeersInfo; -use reth_primitives::{Signature, TransactionSigned}; +use reth_primitives::TransactionSigned; use reth_provider::test_utils::{ExtendedAccount, MockEthProvider}; use reth_transaction_pool::{test_utils::TransactionGenerator, PoolTransaction, TransactionPool}; diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index 91cdb1bd2..d64f4bd5e 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -297,12 +297,12 @@ mod tests { use super::*; use crate::OpChainSpec; use alloy_consensus::TxEip1559; - use alloy_primitives::{b256, Address, StorageKey, StorageValue}; + use alloy_primitives::{b256, Address, Signature, StorageKey, StorageValue}; use op_alloy_consensus::TxDeposit; use reth_chainspec::MIN_TRANSACTION_GAS; use reth_evm::execute::{BasicBlockExecutorProvider, BatchExecutor, BlockExecutorProvider}; use reth_optimism_chainspec::OpChainSpecBuilder; - use reth_primitives::{Account, Block, BlockBody, Signature, Transaction, TransactionSigned}; + use reth_primitives::{Account, Block, BlockBody, Transaction, TransactionSigned}; use reth_revm::{ database::StateProviderDatabase, test_utils::StateProviderTest, L1_BLOCK_CONTRACT, }; diff --git a/crates/optimism/node/src/txpool.rs b/crates/optimism/node/src/txpool.rs index 09aa76fef..011654909 100644 --- a/crates/optimism/node/src/txpool.rs +++ b/crates/optimism/node/src/txpool.rs @@ -231,9 +231,8 @@ pub struct OpL1BlockInfo { mod tests { use crate::txpool::OpTransactionValidator; use alloy_eips::eip2718::Encodable2718; - use alloy_primitives::{TxKind, U256}; + use alloy_primitives::{Signature, TxKind, U256}; use op_alloy_consensus::TxDeposit; - use reth::primitives::Signature; use reth_chainspec::MAINNET; use reth_primitives::{Transaction, TransactionSigned, TransactionSignedEcRecovered}; use reth_provider::test_utils::MockEthProvider; diff --git a/crates/primitives/src/alloy_compat.rs b/crates/primitives/src/alloy_compat.rs index 917baef66..d86bd04c7 100644 --- a/crates/primitives/src/alloy_compat.rs +++ b/crates/primitives/src/alloy_compat.rs @@ -1,14 +1,14 @@ //! Common conversions from alloy types. use crate::{ - transaction::extract_chain_id, Block, BlockBody, Signature, Transaction, TransactionSigned, + transaction::extract_chain_id, Block, BlockBody, Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, TxType, }; use alloc::{string::ToString, vec::Vec}; use alloy_consensus::{ constants::EMPTY_TRANSACTIONS, Transaction as _, TxEip1559, TxEip2930, TxEip4844, TxLegacy, }; -use alloy_primitives::{Parity, TxKind}; +use alloy_primitives::{Parity, Signature, TxKind}; use alloy_rlp::Error as RlpError; use alloy_serde::WithOtherFields; use op_alloy_rpc_types as _; diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 4f56b1ac4..be592e1c1 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -59,7 +59,7 @@ pub use transaction::BlobTransactionValidationError; pub use transaction::{ util::secp256k1::{public_key_to_address, recover_signer_unchecked, sign_message}, - InvalidTransactionError, Signature, Transaction, TransactionMeta, TransactionSigned, + InvalidTransactionError, Transaction, TransactionMeta, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, TxHashOrNumber, TxType, }; diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index f7d36be0a..adbd8f0d0 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -11,7 +11,7 @@ use alloy_eips::{ eip2930::AccessList, eip7702::SignedAuthorization, }; -use alloy_primitives::{keccak256, Address, Bytes, ChainId, TxHash, TxKind, B256, U256}; +use alloy_primitives::{keccak256, Address, Bytes, ChainId, Signature, TxHash, TxKind, B256, U256}; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header}; use core::mem; use derive_more::{AsRef, Deref}; @@ -36,9 +36,7 @@ pub use sidecar::BlobTransactionValidationError; pub use sidecar::{BlobTransaction, BlobTransactionSidecar}; pub use compat::FillTxEnv; -pub use signature::{ - extract_chain_id, legacy_parity, recover_signer, recover_signer_unchecked, Signature, -}; +pub use signature::{extract_chain_id, legacy_parity, recover_signer, recover_signer_unchecked}; pub use tx_type::TxType; pub use variant::TransactionSignedVariant; @@ -2016,12 +2014,14 @@ pub mod serde_bincode_compat { #[cfg(test)] mod tests { use crate::{ - transaction::{signature::Signature, TxEip1559, TxKind, TxLegacy}, + transaction::{TxEip1559, TxKind, TxLegacy}, Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, }; use alloy_consensus::Transaction as _; use alloy_eips::eip2718::{Decodable2718, Encodable2718}; - use alloy_primitives::{address, b256, bytes, hex, Address, Bytes, Parity, B256, U256}; + use alloy_primitives::{ + address, b256, bytes, hex, Address, Bytes, Parity, Signature, B256, U256, + }; use alloy_rlp::{Decodable, Encodable, Error as RlpError}; use reth_chainspec::MIN_TRANSACTION_GAS; use reth_codecs::Compact; diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index 000ff41fe..11da5d838 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -7,7 +7,7 @@ use super::{ TxEip7702, }; use crate::{ - BlobTransaction, BlobTransactionSidecar, Signature, Transaction, TransactionSigned, + BlobTransaction, BlobTransactionSidecar, Transaction, TransactionSigned, TransactionSignedEcRecovered, }; use alloy_consensus::{ @@ -16,7 +16,7 @@ use alloy_consensus::{ SignableTransaction, TxEip4844WithSidecar, }; use alloy_eips::eip2718::{Decodable2718, Eip2718Result, Encodable2718}; -use alloy_primitives::{Address, TxHash, B256}; +use alloy_primitives::{Address, Signature, TxHash, B256}; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header}; use bytes::Buf; use derive_more::{AsRef, Deref}; diff --git a/crates/primitives/src/transaction/sidecar.rs b/crates/primitives/src/transaction/sidecar.rs index 1e6560e15..aa473ef8a 100644 --- a/crates/primitives/src/transaction/sidecar.rs +++ b/crates/primitives/src/transaction/sidecar.rs @@ -1,8 +1,8 @@ #![cfg_attr(docsrs, doc(cfg(feature = "c-kzg")))] -use crate::{Signature, Transaction, TransactionSigned}; +use crate::{Transaction, TransactionSigned}; use alloy_consensus::{constants::EIP4844_TX_TYPE_ID, TxEip4844WithSidecar}; -use alloy_primitives::TxHash; +use alloy_primitives::{Signature, TxHash}; use alloy_rlp::Header; use serde::{Deserialize, Serialize}; diff --git a/crates/primitives/src/transaction/signature.rs b/crates/primitives/src/transaction/signature.rs index 5bfdab8e6..b73206e6e 100644 --- a/crates/primitives/src/transaction/signature.rs +++ b/crates/primitives/src/transaction/signature.rs @@ -1,9 +1,7 @@ use crate::transaction::util::secp256k1; -use alloy_primitives::{Address, Parity, B256, U256}; +use alloy_primitives::{Address, Parity, Signature, B256, U256}; use alloy_rlp::{Decodable, Error as RlpError}; -pub use alloy_primitives::Signature; - /// The order of the secp256k1 curve, divided by two. Signatures that should be checked according /// to EIP-2 should have an S value less than or equal to this. /// @@ -111,14 +109,11 @@ pub const fn extract_chain_id(v: u64) -> alloy_rlp::Result<(bool, Option)> #[cfg(test)] mod tests { - use crate::{ - transaction::signature::{ - legacy_parity, recover_signer, recover_signer_unchecked, SECP256K1N_HALF, - }, - Signature, + use crate::transaction::signature::{ + legacy_parity, recover_signer, recover_signer_unchecked, SECP256K1N_HALF, }; use alloy_eips::eip2718::Decodable2718; - use alloy_primitives::{hex, Address, Parity, B256, U256}; + use alloy_primitives::{hex, Address, Parity, Signature, B256, U256}; use std::str::FromStr; #[test] diff --git a/crates/primitives/src/transaction/util.rs b/crates/primitives/src/transaction/util.rs index 7569400e9..ff2c2e0da 100644 --- a/crates/primitives/src/transaction/util.rs +++ b/crates/primitives/src/transaction/util.rs @@ -1,5 +1,4 @@ -use crate::Signature; -use alloy_primitives::Address; +use alloy_primitives::{Address, Signature}; #[cfg(feature = "secp256k1")] pub(crate) mod secp256k1 { diff --git a/crates/rpc/rpc-eth-api/src/helpers/signer.rs b/crates/rpc/rpc-eth-api/src/helpers/signer.rs index ab11e62d5..36e927740 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/signer.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/signer.rs @@ -1,10 +1,10 @@ //! An abstraction over ethereum signers. use alloy_dyn_abi::TypedData; -use alloy_primitives::Address; +use alloy_primitives::{Address, Signature}; use alloy_rpc_types_eth::TransactionRequest; use dyn_clone::DynClone; -use reth_primitives::{Signature, TransactionSigned}; +use reth_primitives::TransactionSigned; use reth_rpc_eth_types::SignError; use std::result; diff --git a/crates/rpc/rpc-eth-types/src/simulate.rs b/crates/rpc/rpc-eth-types/src/simulate.rs index 62f0e24b1..20952413c 100644 --- a/crates/rpc/rpc-eth-types/src/simulate.rs +++ b/crates/rpc/rpc-eth-types/src/simulate.rs @@ -1,7 +1,7 @@ //! Utilities for serving `eth_simulateV1` use alloy_consensus::{Transaction as _, TxEip4844Variant, TxType, TypedTransaction}; -use alloy_primitives::Parity; +use alloy_primitives::{Parity, Signature}; use alloy_rpc_types::{ simulate::{SimCallResult, SimulateError, SimulatedBlock}, Block, BlockTransactionsKind, @@ -10,8 +10,7 @@ use alloy_rpc_types_eth::transaction::TransactionRequest; use jsonrpsee_types::ErrorObject; use reth_primitives::{ proofs::{calculate_receipt_root, calculate_transaction_root}, - BlockBody, BlockWithSenders, Receipt, Signature, Transaction, TransactionSigned, - TransactionSignedNoHash, + BlockBody, BlockWithSenders, Receipt, Transaction, TransactionSigned, TransactionSignedNoHash, }; use reth_revm::database::StateProviderDatabase; use reth_rpc_server_types::result::rpc_err; diff --git a/crates/rpc/rpc-types-compat/src/transaction/signature.rs b/crates/rpc/rpc-types-compat/src/transaction/signature.rs index 536f6ac5e..77ae365b2 100644 --- a/crates/rpc/rpc-types-compat/src/transaction/signature.rs +++ b/crates/rpc/rpc-types-compat/src/transaction/signature.rs @@ -1,9 +1,9 @@ -use alloy_primitives::U256; +use alloy_primitives::{Signature as PrimitiveSignature, U256}; use alloy_rpc_types::{Parity, Signature}; -use reth_primitives::{transaction::legacy_parity, Signature as PrimitiveSignature, TxType}; +use reth_primitives::{transaction::legacy_parity, TxType}; /// Creates a new rpc signature from a legacy [primitive -/// signature](reth_primitives::Signature), using the give chain id to compute the signature's +/// signature](alloy_primitives::Signature), using the give chain id to compute the signature's /// recovery id. /// /// If the chain id is `Some`, the recovery id is computed according to [EIP-155](https://eips.ethereum.org/EIPS/eip-155). @@ -20,7 +20,7 @@ pub fn from_legacy_primitive_signature( } /// Creates a new rpc signature from a non-legacy [primitive -/// signature](reth_primitives::Signature). This sets the `v` value to `0` or `1` depending on +/// signature](alloy_primitives::Signature). This sets the `v` value to `0` or `1` depending on /// the signature's `odd_y_parity`. pub fn from_typed_primitive_signature(signature: PrimitiveSignature) -> Signature { Signature { @@ -32,7 +32,7 @@ pub fn from_typed_primitive_signature(signature: PrimitiveSignature) -> Signatur } /// Creates a new rpc signature from a legacy [primitive -/// signature](reth_primitives::Signature). +/// signature](alloy_primitives::Signature). /// /// The tx type is used to determine whether or not to use the `chain_id` to compute the /// signature's recovery id. diff --git a/crates/rpc/rpc/src/eth/helpers/signer.rs b/crates/rpc/rpc/src/eth/helpers/signer.rs index e59be0ac2..c6c603127 100644 --- a/crates/rpc/rpc/src/eth/helpers/signer.rs +++ b/crates/rpc/rpc/src/eth/helpers/signer.rs @@ -6,11 +6,11 @@ use crate::EthApi; use alloy_dyn_abi::TypedData; use alloy_eips::eip2718::Decodable2718; use alloy_network::{eip2718::Encodable2718, EthereumWallet, TransactionBuilder}; -use alloy_primitives::{eip191_hash_message, Address, B256}; +use alloy_primitives::{eip191_hash_message, Address, Signature, B256}; use alloy_rpc_types_eth::TransactionRequest; use alloy_signer::SignerSync; use alloy_signer_local::PrivateKeySigner; -use reth_primitives::{Signature, TransactionSigned}; +use reth_primitives::TransactionSigned; use reth_rpc_eth_api::helpers::{signer::Result, AddDevSigners, EthSigner}; use reth_rpc_eth_types::SignError; diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index d524f47cc..19f885e27 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -7,12 +7,13 @@ use alloy_primitives::{ }; use alloy_eips::eip4895::Withdrawal; +use alloy_primitives::Signature; use reth_db::tables; use reth_db_api::{database::Database, models::StoredBlockBodyIndices}; use reth_node_types::NodeTypes; use reth_primitives::{ Account, BlockBody, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, - Signature, Transaction, TransactionSigned, TxType, Withdrawals, + Transaction, TransactionSigned, TxType, Withdrawals, }; use reth_trie::root::{state_root_unhashed, storage_root_unhashed}; use revm::{db::BundleState, primitives::AccountInfo}; diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index a3cddaf0a..c6143ff16 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -12,7 +12,7 @@ use alloy_consensus::{ TxEip1559, TxEip2930, TxEip4844, TxLegacy, }; use alloy_eips::{eip1559::MIN_PROTOCOL_BASE_FEE, eip2930::AccessList, eip4844::DATA_GAS_PER_BLOB}; -use alloy_primitives::{Address, Bytes, ChainId, TxHash, TxKind, B256, U256}; +use alloy_primitives::{Address, Bytes, ChainId, Signature, TxHash, TxKind, B256, U256}; use paste::paste; use rand::{ distributions::{Uniform, WeightedIndex}, @@ -20,7 +20,7 @@ use rand::{ }; use reth_primitives::{ transaction::TryFromRecoveredTransactionError, BlobTransactionSidecar, - BlobTransactionValidationError, PooledTransactionsElementEcRecovered, Signature, Transaction, + BlobTransactionValidationError, PooledTransactionsElementEcRecovered, Transaction, TransactionSigned, TransactionSignedEcRecovered, TxType, }; diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index 2667143b7..709b43e71 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -1498,7 +1498,8 @@ mod tests { use super::*; use alloy_consensus::{TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy}; use alloy_eips::eip4844::DATA_GAS_PER_BLOB; - use reth_primitives::{Signature, TransactionSigned}; + use alloy_primitives::Signature; + use reth_primitives::TransactionSigned; #[test] fn test_pool_size_invariants() { diff --git a/testing/testing-utils/src/generators.rs b/testing/testing-utils/src/generators.rs index 84225ea72..83fcf4484 100644 --- a/testing/testing-utils/src/generators.rs +++ b/testing/testing-utils/src/generators.rs @@ -464,8 +464,8 @@ mod tests { use super::*; use alloy_consensus::TxEip1559; use alloy_eips::eip2930::AccessList; - use alloy_primitives::{hex, Parity}; - use reth_primitives::{public_key_to_address, Signature}; + use alloy_primitives::{hex, Parity, Signature}; + use reth_primitives::public_key_to_address; use std::str::FromStr; #[test] From e3173407e103fd036ab6240d599668292cf1190a Mon Sep 17 00:00:00 2001 From: Steven <112043913+stevencartavia@users.noreply.github.com> Date: Tue, 5 Nov 2024 04:07:58 -0600 Subject: [PATCH 416/425] renamed OptimismEngineValidator to OpEngineValidator (#12312) --- crates/optimism/node/src/engine.rs | 26 +++++++++++++------------- crates/optimism/node/src/node.rs | 14 +++++++------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/crates/optimism/node/src/engine.rs b/crates/optimism/node/src/engine.rs index d956f0cd5..eb356e86e 100644 --- a/crates/optimism/node/src/engine.rs +++ b/crates/optimism/node/src/engine.rs @@ -58,11 +58,11 @@ impl PayloadTypes for OpPayloadTypes { /// Validator for Optimism engine API. #[derive(Debug, Clone)] -pub struct OptimismEngineValidator { +pub struct OpEngineValidator { chain_spec: Arc, } -impl OptimismEngineValidator { +impl OpEngineValidator { /// Instantiates a new validator. pub const fn new(chain_spec: Arc) -> Self { Self { chain_spec } @@ -111,7 +111,7 @@ pub fn validate_withdrawals_presence( Ok(()) } -impl EngineValidator for OptimismEngineValidator +impl EngineValidator for OpEngineValidator where Types: EngineTypes, { @@ -220,10 +220,10 @@ mod test { #[test] fn test_well_formed_attributes_pre_holocene() { - let validator = OptimismEngineValidator::new(get_chainspec(false)); + let validator = OpEngineValidator::new(get_chainspec(false)); let attributes = get_attributes(None, 1799999999); - let result = >::ensure_well_formed_attributes( &validator, EngineApiMessageVersion::V3, &attributes @@ -233,10 +233,10 @@ mod test { #[test] fn test_well_formed_attributes_holocene_no_eip1559_params() { - let validator = OptimismEngineValidator::new(get_chainspec(true)); + let validator = OpEngineValidator::new(get_chainspec(true)); let attributes = get_attributes(None, 1800000000); - let result = >::ensure_well_formed_attributes( &validator, EngineApiMessageVersion::V3, &attributes @@ -246,10 +246,10 @@ mod test { #[test] fn test_well_formed_attributes_holocene_eip1559_params_zero_denominator() { - let validator = OptimismEngineValidator::new(get_chainspec(true)); + let validator = OpEngineValidator::new(get_chainspec(true)); let attributes = get_attributes(Some(b64!("0000000000000008")), 1800000000); - let result = >::ensure_well_formed_attributes( &validator, EngineApiMessageVersion::V3, &attributes @@ -259,10 +259,10 @@ mod test { #[test] fn test_well_formed_attributes_holocene_valid() { - let validator = OptimismEngineValidator::new(get_chainspec(true)); + let validator = OpEngineValidator::new(get_chainspec(true)); let attributes = get_attributes(Some(b64!("0000000800000008")), 1800000000); - let result = >::ensure_well_formed_attributes( &validator, EngineApiMessageVersion::V3, &attributes @@ -272,10 +272,10 @@ mod test { #[test] fn test_well_formed_attributes_holocene_valid_all_zero() { - let validator = OptimismEngineValidator::new(get_chainspec(true)); + let validator = OpEngineValidator::new(get_chainspec(true)); let attributes = get_attributes(Some(b64!("0000000000000000")), 1800000000); - let result = >::ensure_well_formed_attributes( &validator, EngineApiMessageVersion::V3, &attributes diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 7e6f1d098..14088e636 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -34,7 +34,7 @@ use reth_trie_db::MerklePatriciaTrie; use crate::{ args::RollupArgs, - engine::OptimismEngineValidator, + engine::OpEngineValidator, txpool::{OpTransactionPool, OpTransactionValidator}, OpEngineTypes, }; @@ -150,7 +150,7 @@ impl OptimismAddOns { impl NodeAddOns for OptimismAddOns where N: FullNodeComponents>, - OptimismEngineValidator: EngineValidator<::Engine>, + OpEngineValidator: EngineValidator<::Engine>, { type Handle = RpcHandle>; @@ -165,7 +165,7 @@ where impl RethRpcAddOns for OptimismAddOns where N: FullNodeComponents>, - OptimismEngineValidator: EngineValidator<::Engine>, + OpEngineValidator: EngineValidator<::Engine>, { type EthApi = OpEthApi; @@ -456,7 +456,7 @@ where } } -/// Builder for [`OptimismEngineValidator`]. +/// Builder for [`OpEngineValidator`]. #[derive(Debug, Default, Clone)] #[non_exhaustive] pub struct OptimismEngineValidatorBuilder; @@ -465,11 +465,11 @@ impl EngineValidatorBuilder for OptimismEngineValidatorBuilde where Types: NodeTypesWithEngine, Node: FullNodeComponents, - OptimismEngineValidator: EngineValidator, + OpEngineValidator: EngineValidator, { - type Validator = OptimismEngineValidator; + type Validator = OpEngineValidator; async fn build(self, ctx: &AddOnsContext<'_, Node>) -> eyre::Result { - Ok(OptimismEngineValidator::new(ctx.config.chain.clone())) + Ok(OpEngineValidator::new(ctx.config.chain.clone())) } } From 4222cbe6826715a5b7b70e8505984ffb613b7078 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 5 Nov 2024 12:11:04 +0100 Subject: [PATCH 417/425] chore: switch op to new engine (#12329) --- crates/optimism/bin/src/main.rs | 11 +++++++---- crates/optimism/node/src/args.rs | 14 +++++++++++--- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/crates/optimism/bin/src/main.rs b/crates/optimism/bin/src/main.rs index c6d3e32b7..6c440f434 100644 --- a/crates/optimism/bin/src/main.rs +++ b/crates/optimism/bin/src/main.rs @@ -23,10 +23,13 @@ fn main() { if let Err(err) = Cli::::parse().run(|builder, rollup_args| async move { - let enable_engine2 = rollup_args.experimental; + if rollup_args.experimental { + tracing::warn!(target: "reth::cli", "Experimental engine is default now, and the --engine.experimental flag is deprecated. To enable the legacy functionality, use --engine.legacy."); + } + let use_legacy_engine = rollup_args.legacy; let sequencer_http_arg = rollup_args.sequencer_http.clone(); - match enable_engine2 { - true => { + match use_legacy_engine { + false => { let engine_tree_config = TreeConfig::default() .with_persistence_threshold(rollup_args.persistence_threshold) .with_memory_block_buffer_target(rollup_args.memory_block_buffer_target); @@ -46,7 +49,7 @@ fn main() { handle.node_exit_future.await } - false => { + true => { let handle = builder.node(OptimismNode::new(rollup_args.clone())).launch().await?; diff --git a/crates/optimism/node/src/args.rs b/crates/optimism/node/src/args.rs index 54be83dc5..b84e98d28 100644 --- a/crates/optimism/node/src/args.rs +++ b/crates/optimism/node/src/args.rs @@ -38,16 +38,23 @@ pub struct RollupArgs { #[arg(long = "rollup.discovery.v4", default_value = "false")] pub discovery_v4: bool, - /// Enable the engine2 experimental features on op-reth binary + /// Enable the experimental engine features on reth binary + /// + /// DEPRECATED: experimental engine is default now, use --engine.legacy to enable the legacy + /// functionality #[arg(long = "engine.experimental", default_value = "false")] pub experimental: bool, + /// Enable the legacy engine on reth binary + #[arg(long = "engine.legacy", default_value = "false")] + pub legacy: bool, + /// Configure persistence threshold for engine experimental. - #[arg(long = "engine.persistence-threshold", requires = "experimental", default_value_t = DEFAULT_PERSISTENCE_THRESHOLD)] + #[arg(long = "engine.persistence-threshold", conflicts_with = "legacy", default_value_t = DEFAULT_PERSISTENCE_THRESHOLD)] pub persistence_threshold: u64, /// Configure the target number of blocks to keep in memory. - #[arg(long = "engine.memory-block-buffer-target", requires = "experimental", default_value_t = DEFAULT_MEMORY_BLOCK_BUFFER_TARGET)] + #[arg(long = "engine.memory-block-buffer-target", conflicts_with = "legacy", default_value_t = DEFAULT_MEMORY_BLOCK_BUFFER_TARGET)] pub memory_block_buffer_target: u64, } @@ -60,6 +67,7 @@ impl Default for RollupArgs { compute_pending_block: false, discovery_v4: false, experimental: false, + legacy: false, persistence_threshold: DEFAULT_PERSISTENCE_THRESHOLD, memory_block_buffer_target: DEFAULT_MEMORY_BLOCK_BUFFER_TARGET, } From 556995fc5d8df8f6614a3cd2a7fc152cac3d3efe Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Tue, 5 Nov 2024 12:23:20 +0100 Subject: [PATCH 418/425] chore: limit initial status size (#12324) --- crates/net/eth-wire/src/ethstream.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/net/eth-wire/src/ethstream.rs b/crates/net/eth-wire/src/ethstream.rs index 74f3fab2b..8ae599b67 100644 --- a/crates/net/eth-wire/src/ethstream.rs +++ b/crates/net/eth-wire/src/ethstream.rs @@ -21,6 +21,9 @@ use tracing::{debug, trace}; // https://github.com/ethereum/go-ethereum/blob/30602163d5d8321fbc68afdcbbaf2362b2641bde/eth/protocols/eth/protocol.go#L50 pub const MAX_MESSAGE_SIZE: usize = 10 * 1024 * 1024; +/// [`MAX_STATUS_SIZE`] is the maximum cap on the size of the initial status message +pub(crate) const MAX_STATUS_SIZE: usize = 500 * 1024; + /// An un-authenticated [`EthStream`]. This is consumed and returns a [`EthStream`] after the /// `Status` handshake is completed. #[pin_project] @@ -97,7 +100,7 @@ where } }?; - if their_msg.len() > MAX_MESSAGE_SIZE { + if their_msg.len() > MAX_STATUS_SIZE { self.inner.disconnect(DisconnectReason::ProtocolBreach).await?; return Err(EthStreamError::MessageTooBig(their_msg.len())) } From 441ddbf0858e809556cbb6e85cde860df592c8b2 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 5 Nov 2024 12:37:56 +0100 Subject: [PATCH 419/425] primitives: rm more alloy block reexports (#12308) --- Cargo.lock | 3 ++- crates/optimism/node/src/txpool.rs | 2 +- crates/optimism/rpc/src/eth/pending_block.rs | 5 ++--- crates/payload/basic/src/lib.rs | 6 ++---- crates/primitives/src/block.rs | 8 +++++--- crates/primitives/src/lib.rs | 5 +---- crates/rpc/rpc-api/Cargo.toml | 1 - crates/rpc/rpc-api/src/debug.rs | 3 +-- crates/rpc/rpc-builder/tests/it/http.rs | 4 ++-- crates/rpc/rpc-eth-api/src/core.rs | 3 +-- crates/rpc/rpc-eth-api/src/helpers/state.rs | 6 +++--- crates/rpc/rpc-eth-types/src/gas_oracle.rs | 2 +- crates/rpc/rpc-eth-types/src/pending_block.rs | 4 ++-- crates/rpc/rpc-testing-util/src/trace.rs | 2 +- crates/rpc/rpc/src/debug.rs | 4 ++-- crates/rpc/rpc/src/engine.rs | 3 +-- crates/rpc/rpc/src/eth/core.rs | 5 +++-- crates/rpc/rpc/src/otterscan.rs | 3 +-- crates/transaction-pool/src/maintain.rs | 3 ++- examples/custom-inspector/Cargo.toml | 1 + examples/custom-inspector/src/main.rs | 2 +- examples/custom-payload-builder/Cargo.toml | 1 + examples/custom-payload-builder/src/generator.rs | 3 ++- 23 files changed, 38 insertions(+), 41 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9d40ebfb2..7c0c4f318 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2890,6 +2890,7 @@ dependencies = [ name = "example-custom-inspector" version = "0.0.0" dependencies = [ + "alloy-eips", "alloy-primitives", "alloy-rpc-types", "clap", @@ -2913,6 +2914,7 @@ dependencies = [ name = "example-custom-payload-builder" version = "0.0.0" dependencies = [ + "alloy-eips", "alloy-primitives", "eyre", "futures-util", @@ -8700,7 +8702,6 @@ dependencies = [ "jsonrpsee", "reth-engine-primitives", "reth-network-peers", - "reth-primitives", "reth-rpc-eth-api", "serde", "serde_with", diff --git a/crates/optimism/node/src/txpool.rs b/crates/optimism/node/src/txpool.rs index 011654909..b1255a987 100644 --- a/crates/optimism/node/src/txpool.rs +++ b/crates/optimism/node/src/txpool.rs @@ -74,7 +74,7 @@ where pub fn new(inner: EthTransactionValidator) -> Self { let this = Self::with_block_info(inner, OpL1BlockInfo::default()); if let Ok(Some(block)) = - this.inner.client().block_by_number_or_tag(reth_primitives::BlockNumberOrTag::Latest) + this.inner.client().block_by_number_or_tag(alloy_eips::BlockNumberOrTag::Latest) { // genesis block has no txs, so we can't extract L1 info, we set the block info to empty // so that we will accept txs into the pool before the first block diff --git a/crates/optimism/rpc/src/eth/pending_block.rs b/crates/optimism/rpc/src/eth/pending_block.rs index 3b3b7845c..c90b3f7b7 100644 --- a/crates/optimism/rpc/src/eth/pending_block.rs +++ b/crates/optimism/rpc/src/eth/pending_block.rs @@ -1,12 +1,11 @@ //! Loads OP pending block for a RPC response. +use alloy_eips::BlockNumberOrTag; use alloy_primitives::{BlockNumber, B256}; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_evm::ConfigureEvm; use reth_optimism_consensus::calculate_receipt_root_no_memo_optimism; -use reth_primitives::{ - revm_primitives::BlockEnv, BlockNumberOrTag, Header, Receipt, SealedBlockWithSenders, -}; +use reth_primitives::{revm_primitives::BlockEnv, Header, Receipt, SealedBlockWithSenders}; use reth_provider::{ BlockReader, BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, ExecutionOutcome, ReceiptProvider, StateProviderFactory, diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index 7b1de980c..d6dcdf114 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -10,7 +10,7 @@ use crate::metrics::PayloadBuilderMetrics; use alloy_consensus::constants::EMPTY_WITHDRAWALS; -use alloy_eips::merge::SLOT_DURATION; +use alloy_eips::{merge::SLOT_DURATION, BlockNumberOrTag}; use alloy_primitives::{Bytes, B256, U256}; use futures_core::ready; use futures_util::FutureExt; @@ -20,9 +20,7 @@ use reth_payload_builder::{KeepPayloadJobAlive, PayloadId, PayloadJob, PayloadJo use reth_payload_primitives::{ BuiltPayload, PayloadBuilderAttributes, PayloadBuilderError, PayloadKind, }; -use reth_primitives::{ - constants::RETH_CLIENT_VERSION, proofs, BlockNumberOrTag, SealedHeader, Withdrawals, -}; +use reth_primitives::{constants::RETH_CLIENT_VERSION, proofs, SealedHeader, Withdrawals}; use reth_provider::{ BlockReaderIdExt, BlockSource, CanonStateNotification, ProviderError, StateProviderFactory, }; diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index b9f43df5d..5f3272848 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -2,7 +2,6 @@ use crate::{ GotExpected, Header, SealedHeader, TransactionSigned, TransactionSignedEcRecovered, Withdrawals, }; use alloc::vec::Vec; -pub use alloy_eips::eip1898::{BlockNumberOrTag, ForkBlock, RpcBlockHash}; use alloy_eips::eip2718::Encodable2718; use alloy_primitives::{Address, Bytes, Sealable, B256}; use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; @@ -904,8 +903,11 @@ pub(super) mod serde_bincode_compat { #[cfg(test)] mod tests { - use super::{BlockNumberOrTag::*, *}; - use alloy_eips::{eip1898::HexStringMissingPrefixError, BlockId}; + use super::*; + use alloy_eips::{ + eip1898::HexStringMissingPrefixError, BlockId, BlockNumberOrTag, BlockNumberOrTag::*, + RpcBlockHash, + }; use alloy_primitives::hex_literal::hex; use alloy_rlp::{Decodable, Encodable}; use std::str::FromStr; diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index be592e1c1..9b121a56f 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -33,10 +33,7 @@ pub use reth_static_file_types as static_file; pub mod transaction; #[cfg(any(test, feature = "arbitrary"))] pub use block::{generate_valid_header, valid_header_strategy}; -pub use block::{ - Block, BlockBody, BlockNumberOrTag, BlockWithSenders, ForkBlock, RpcBlockHash, SealedBlock, - SealedBlockWithSenders, -}; +pub use block::{Block, BlockBody, BlockWithSenders, SealedBlock, SealedBlockWithSenders}; #[cfg(feature = "reth-codec")] pub use compression::*; pub use constants::HOLESKY_GENESIS_HASH; diff --git a/crates/rpc/rpc-api/Cargo.toml b/crates/rpc/rpc-api/Cargo.toml index 60146e8b2..75c06a255 100644 --- a/crates/rpc/rpc-api/Cargo.toml +++ b/crates/rpc/rpc-api/Cargo.toml @@ -13,7 +13,6 @@ workspace = true [dependencies] # reth -reth-primitives.workspace = true reth-rpc-eth-api.workspace = true reth-engine-primitives.workspace = true reth-network-peers.workspace = true diff --git a/crates/rpc/rpc-api/src/debug.rs b/crates/rpc/rpc-api/src/debug.rs index 162699c6e..d1837787d 100644 --- a/crates/rpc/rpc-api/src/debug.rs +++ b/crates/rpc/rpc-api/src/debug.rs @@ -1,4 +1,4 @@ -use alloy_eips::BlockId; +use alloy_eips::{BlockId, BlockNumberOrTag}; use alloy_primitives::{Address, Bytes, B256}; use alloy_rpc_types::{Block, Bundle, StateContext}; use alloy_rpc_types_debug::ExecutionWitness; @@ -7,7 +7,6 @@ use alloy_rpc_types_trace::geth::{ BlockTraceResult, GethDebugTracingCallOptions, GethDebugTracingOptions, GethTrace, TraceResult, }; use jsonrpsee::{core::RpcResult, proc_macros::rpc}; -use reth_primitives::BlockNumberOrTag; /// Debug rpc interface. #[cfg_attr(not(feature = "client"), rpc(server, namespace = "debug"))] diff --git a/crates/rpc/rpc-builder/tests/it/http.rs b/crates/rpc/rpc-builder/tests/it/http.rs index 5c33a5d34..b5faa71cc 100644 --- a/crates/rpc/rpc-builder/tests/it/http.rs +++ b/crates/rpc/rpc-builder/tests/it/http.rs @@ -2,7 +2,7 @@ //! Standalone http tests use crate::utils::{launch_http, launch_http_ws, launch_ws}; -use alloy_eips::BlockId; +use alloy_eips::{BlockId, BlockNumberOrTag}; use alloy_primitives::{hex_literal::hex, Address, Bytes, TxHash, B256, B64, U256, U64}; use alloy_rpc_types_eth::{ transaction::TransactionRequest, Block, FeeHistory, Filter, Index, Log, @@ -19,7 +19,7 @@ use jsonrpsee::{ types::error::ErrorCode, }; use reth_network_peers::NodeRecord; -use reth_primitives::{BlockNumberOrTag, Receipt}; +use reth_primitives::Receipt; use reth_rpc_api::{ clients::{AdminApiClient, EthApiClient}, DebugApiClient, EthFilterApiClient, NetApiClient, OtterscanClient, TraceApiClient, diff --git a/crates/rpc/rpc-eth-api/src/core.rs b/crates/rpc/rpc-eth-api/src/core.rs index 185297c22..a89364a15 100644 --- a/crates/rpc/rpc-eth-api/src/core.rs +++ b/crates/rpc/rpc-eth-api/src/core.rs @@ -1,7 +1,7 @@ //! Implementation of the [`jsonrpsee`] generated [`EthApiServer`] trait. Handles RPC requests for //! the `eth_` namespace. use alloy_dyn_abi::TypedData; -use alloy_eips::{eip2930::AccessListResult, BlockId}; +use alloy_eips::{eip2930::AccessListResult, BlockId, BlockNumberOrTag}; use alloy_json_rpc::RpcObject; use alloy_primitives::{Address, Bytes, B256, B64, U256, U64}; use alloy_rpc_types::{ @@ -13,7 +13,6 @@ use alloy_rpc_types::{ }; use alloy_rpc_types_eth::transaction::TransactionRequest; use jsonrpsee::{core::RpcResult, proc_macros::rpc}; -use reth_primitives::BlockNumberOrTag; use reth_rpc_server_types::{result::internal_rpc_err, ToRpcResult}; use tracing::trace; diff --git a/crates/rpc/rpc-eth-api/src/helpers/state.rs b/crates/rpc/rpc-eth-api/src/helpers/state.rs index d980b9114..702572064 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/state.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/state.rs @@ -30,7 +30,7 @@ pub trait EthState: LoadState + SpawnBlocking { /// Returns the number of transactions sent from an address at the given block identifier. /// - /// If this is [`BlockNumberOrTag::Pending`](reth_primitives::BlockNumberOrTag) then this will + /// If this is [`BlockNumberOrTag::Pending`](alloy_eips::BlockNumberOrTag) then this will /// look up the highest transaction in pool and return the next nonce (highest + 1). fn transaction_count( &self, @@ -184,7 +184,7 @@ pub trait LoadState: /// Returns the state at the given [`BlockId`] enum. /// - /// Note: if not [`BlockNumberOrTag::Pending`](reth_primitives::BlockNumberOrTag) then this + /// Note: if not [`BlockNumberOrTag::Pending`](alloy_eips::BlockNumberOrTag) then this /// will only return canonical state. See also fn state_at_block_id(&self, at: BlockId) -> Result { self.provider().state_by_block_id(at).map_err(Self::Error::from_eth_err) @@ -302,7 +302,7 @@ pub trait LoadState: /// Returns the number of transactions sent from an address at the given block identifier. /// - /// If this is [`BlockNumberOrTag::Pending`](reth_primitives::BlockNumberOrTag) then this will + /// If this is [`BlockNumberOrTag::Pending`](alloy_eips::BlockNumberOrTag) then this will /// look up the highest transaction in pool and return the next nonce (highest + 1). fn transaction_count( &self, diff --git a/crates/rpc/rpc-eth-types/src/gas_oracle.rs b/crates/rpc/rpc-eth-types/src/gas_oracle.rs index 065ac1acc..9da373376 100644 --- a/crates/rpc/rpc-eth-types/src/gas_oracle.rs +++ b/crates/rpc/rpc-eth-types/src/gas_oracle.rs @@ -2,11 +2,11 @@ //! previous blocks. use alloy_consensus::constants::GWEI_TO_WEI; +use alloy_eips::BlockNumberOrTag; use alloy_primitives::{B256, U256}; use alloy_rpc_types::BlockId; use derive_more::{Deref, DerefMut, From, Into}; use itertools::Itertools; -use reth_primitives::BlockNumberOrTag; use reth_rpc_server_types::constants; use reth_storage_api::BlockReaderIdExt; use schnellru::{ByLength, LruMap}; diff --git a/crates/rpc/rpc-eth-types/src/pending_block.rs b/crates/rpc/rpc-eth-types/src/pending_block.rs index d3e7c4158..d8f413650 100644 --- a/crates/rpc/rpc-eth-types/src/pending_block.rs +++ b/crates/rpc/rpc-eth-types/src/pending_block.rs @@ -4,10 +4,10 @@ use std::time::Instant; -use alloy_eips::BlockId; +use alloy_eips::{BlockId, BlockNumberOrTag}; use alloy_primitives::B256; use derive_more::Constructor; -use reth_primitives::{BlockNumberOrTag, Receipt, SealedBlockWithSenders, SealedHeader}; +use reth_primitives::{Receipt, SealedBlockWithSenders, SealedHeader}; use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg}; /// Configured [`BlockEnv`] and [`CfgEnvWithHandlerCfg`] for a pending block. diff --git a/crates/rpc/rpc-testing-util/src/trace.rs b/crates/rpc/rpc-testing-util/src/trace.rs index efb1f3674..097d582df 100644 --- a/crates/rpc/rpc-testing-util/src/trace.rs +++ b/crates/rpc/rpc-testing-util/src/trace.rs @@ -514,9 +514,9 @@ where #[cfg(test)] mod tests { use super::*; + use alloy_eips::BlockNumberOrTag; use alloy_rpc_types_trace::filter::TraceFilterMode; use jsonrpsee::http_client::HttpClientBuilder; - use reth_primitives::BlockNumberOrTag; const fn assert_is_stream(_: &St) {} diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 6a73af69d..7e4c8fb82 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -1,4 +1,4 @@ -use alloy_eips::{eip2718::Encodable2718, BlockId}; +use alloy_eips::{eip2718::Encodable2718, BlockId, BlockNumberOrTag}; use alloy_primitives::{Address, Bytes, B256, U256}; use alloy_rlp::{Decodable, Encodable}; use alloy_rpc_types::{ @@ -19,7 +19,7 @@ use reth_evm::{ system_calls::SystemCaller, ConfigureEvmEnv, }; -use reth_primitives::{Block, BlockNumberOrTag, TransactionSignedEcRecovered}; +use reth_primitives::{Block, TransactionSignedEcRecovered}; use reth_provider::{ BlockReaderIdExt, ChainSpecProvider, HeaderProvider, StateProofProvider, StateProviderFactory, TransactionVariant, diff --git a/crates/rpc/rpc/src/engine.rs b/crates/rpc/rpc/src/engine.rs index 0ff90d399..ac4de7c74 100644 --- a/crates/rpc/rpc/src/engine.rs +++ b/crates/rpc/rpc/src/engine.rs @@ -1,4 +1,4 @@ -use alloy_eips::BlockId; +use alloy_eips::{BlockId, BlockNumberOrTag}; use alloy_primitives::{Address, Bytes, B256, U256, U64}; use alloy_rpc_types::{ state::StateOverride, BlockOverrides, EIP1186AccountProofResponse, Filter, Log, SyncStatus, @@ -6,7 +6,6 @@ use alloy_rpc_types::{ use alloy_rpc_types_eth::transaction::TransactionRequest; use alloy_serde::JsonStorageKey; use jsonrpsee::core::RpcResult as Result; -use reth_primitives::BlockNumberOrTag; use reth_rpc_api::{EngineEthApiServer, EthApiServer, EthFilterApiServer}; /// Re-export for convenience pub use reth_rpc_engine_api::EngineApi; diff --git a/crates/rpc/rpc/src/eth/core.rs b/crates/rpc/rpc/src/eth/core.rs index 98ac9e9f4..c491ca21d 100644 --- a/crates/rpc/rpc/src/eth/core.rs +++ b/crates/rpc/rpc/src/eth/core.rs @@ -3,10 +3,10 @@ use std::sync::Arc; +use alloy_eips::BlockNumberOrTag; use alloy_network::Ethereum; use alloy_primitives::U256; use derive_more::Deref; -use reth_primitives::BlockNumberOrTag; use reth_provider::{BlockReaderIdExt, CanonStateSubscriptions, ChainSpecProvider}; use reth_rpc_eth_api::{ helpers::{EthSigner, SpawnBlocking}, @@ -400,13 +400,14 @@ impl EthApiInner Date: Tue, 5 Nov 2024 13:00:16 +0100 Subject: [PATCH 420/425] chore: 1.1.1 (#12334) --- Cargo.lock | 238 ++++++++++++++++++++++++++--------------------------- Cargo.toml | 2 +- 2 files changed, 120 insertions(+), 120 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7c0c4f318..d3fcd59e7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2605,7 +2605,7 @@ dependencies = [ [[package]] name = "ef-tests" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -5368,7 +5368,7 @@ dependencies = [ [[package]] name = "op-reth" -version = "1.1.0" +version = "1.1.1" dependencies = [ "clap", "reth-cli-util", @@ -6296,7 +6296,7 @@ dependencies = [ [[package]] name = "reth" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6368,7 +6368,7 @@ dependencies = [ [[package]] name = "reth-auto-seal-consensus" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -6399,7 +6399,7 @@ dependencies = [ [[package]] name = "reth-basic-payload-builder" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6425,7 +6425,7 @@ dependencies = [ [[package]] name = "reth-beacon-consensus" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-genesis", @@ -6477,7 +6477,7 @@ dependencies = [ [[package]] name = "reth-bench" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-json-rpc", @@ -6512,7 +6512,7 @@ dependencies = [ [[package]] name = "reth-blockchain-tree" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6550,7 +6550,7 @@ dependencies = [ [[package]] name = "reth-blockchain-tree-api" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -6563,7 +6563,7 @@ dependencies = [ [[package]] name = "reth-chain-state" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6592,7 +6592,7 @@ dependencies = [ [[package]] name = "reth-chainspec" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-chains", "alloy-consensus", @@ -6613,7 +6613,7 @@ dependencies = [ [[package]] name = "reth-cli" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-genesis", "clap", @@ -6626,7 +6626,7 @@ dependencies = [ [[package]] name = "reth-cli-commands" -version = "1.1.0" +version = "1.1.1" dependencies = [ "ahash", "alloy-eips", @@ -6692,7 +6692,7 @@ dependencies = [ [[package]] name = "reth-cli-runner" -version = "1.1.0" +version = "1.1.1" dependencies = [ "reth-tasks", "tokio", @@ -6701,7 +6701,7 @@ dependencies = [ [[package]] name = "reth-cli-util" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -6719,7 +6719,7 @@ dependencies = [ [[package]] name = "reth-codecs" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6741,7 +6741,7 @@ dependencies = [ [[package]] name = "reth-codecs-derive" -version = "1.1.0" +version = "1.1.1" dependencies = [ "convert_case", "proc-macro2", @@ -6752,7 +6752,7 @@ dependencies = [ [[package]] name = "reth-config" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "eyre", @@ -6768,7 +6768,7 @@ dependencies = [ [[package]] name = "reth-consensus" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -6779,7 +6779,7 @@ dependencies = [ [[package]] name = "reth-consensus-common" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6795,7 +6795,7 @@ dependencies = [ [[package]] name = "reth-consensus-debug-client" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -6818,7 +6818,7 @@ dependencies = [ [[package]] name = "reth-db" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "arbitrary", @@ -6858,7 +6858,7 @@ dependencies = [ [[package]] name = "reth-db-api" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-genesis", "alloy-primitives", @@ -6885,7 +6885,7 @@ dependencies = [ [[package]] name = "reth-db-common" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-genesis", @@ -6914,7 +6914,7 @@ dependencies = [ [[package]] name = "reth-db-models" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "arbitrary", @@ -6930,7 +6930,7 @@ dependencies = [ [[package]] name = "reth-discv4" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -6957,7 +6957,7 @@ dependencies = [ [[package]] name = "reth-discv5" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -6981,7 +6981,7 @@ dependencies = [ [[package]] name = "reth-dns-discovery" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-chains", "alloy-primitives", @@ -7009,7 +7009,7 @@ dependencies = [ [[package]] name = "reth-downloaders" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -7046,7 +7046,7 @@ dependencies = [ [[package]] name = "reth-e2e-test-utils" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7082,7 +7082,7 @@ dependencies = [ [[package]] name = "reth-ecies" -version = "1.1.0" +version = "1.1.1" dependencies = [ "aes", "alloy-primitives", @@ -7112,7 +7112,7 @@ dependencies = [ [[package]] name = "reth-engine-local" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "alloy-rpc-types-engine", @@ -7142,7 +7142,7 @@ dependencies = [ [[package]] name = "reth-engine-primitives" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "reth-execution-types", @@ -7154,7 +7154,7 @@ dependencies = [ [[package]] name = "reth-engine-service" -version = "1.1.0" +version = "1.1.1" dependencies = [ "futures", "pin-project", @@ -7182,7 +7182,7 @@ dependencies = [ [[package]] name = "reth-engine-tree" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -7230,7 +7230,7 @@ dependencies = [ [[package]] name = "reth-engine-util" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7262,7 +7262,7 @@ dependencies = [ [[package]] name = "reth-errors" -version = "1.1.0" +version = "1.1.1" dependencies = [ "reth-blockchain-tree-api", "reth-consensus", @@ -7274,7 +7274,7 @@ dependencies = [ [[package]] name = "reth-eth-wire" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -7309,7 +7309,7 @@ dependencies = [ [[package]] name = "reth-eth-wire-types" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-chains", "alloy-consensus", @@ -7332,7 +7332,7 @@ dependencies = [ [[package]] name = "reth-ethereum-cli" -version = "1.1.0" +version = "1.1.1" dependencies = [ "clap", "eyre", @@ -7343,7 +7343,7 @@ dependencies = [ [[package]] name = "reth-ethereum-consensus" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7357,7 +7357,7 @@ dependencies = [ [[package]] name = "reth-ethereum-engine-primitives" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -7376,7 +7376,7 @@ dependencies = [ [[package]] name = "reth-ethereum-forks" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-chains", "alloy-consensus", @@ -7396,7 +7396,7 @@ dependencies = [ [[package]] name = "reth-ethereum-payload-builder" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7422,7 +7422,7 @@ dependencies = [ [[package]] name = "reth-etl" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "rayon", @@ -7432,7 +7432,7 @@ dependencies = [ [[package]] name = "reth-evm" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7459,7 +7459,7 @@ dependencies = [ [[package]] name = "reth-evm-ethereum" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7482,7 +7482,7 @@ dependencies = [ [[package]] name = "reth-execution-errors" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -7497,7 +7497,7 @@ dependencies = [ [[package]] name = "reth-execution-types" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -7514,7 +7514,7 @@ dependencies = [ [[package]] name = "reth-exex" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7558,7 +7558,7 @@ dependencies = [ [[package]] name = "reth-exex-test-utils" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "eyre", @@ -7591,7 +7591,7 @@ dependencies = [ [[package]] name = "reth-exex-types" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -7607,7 +7607,7 @@ dependencies = [ [[package]] name = "reth-fs-util" -version = "1.1.0" +version = "1.1.1" dependencies = [ "serde", "serde_json", @@ -7616,7 +7616,7 @@ dependencies = [ [[package]] name = "reth-invalid-block-hooks" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -7640,7 +7640,7 @@ dependencies = [ [[package]] name = "reth-ipc" -version = "1.1.0" +version = "1.1.1" dependencies = [ "async-trait", "bytes", @@ -7662,7 +7662,7 @@ dependencies = [ [[package]] name = "reth-libmdbx" -version = "1.1.0" +version = "1.1.1" dependencies = [ "bitflags 2.6.0", "byteorder", @@ -7683,7 +7683,7 @@ dependencies = [ [[package]] name = "reth-mdbx-sys" -version = "1.1.0" +version = "1.1.1" dependencies = [ "bindgen", "cc", @@ -7691,7 +7691,7 @@ dependencies = [ [[package]] name = "reth-metrics" -version = "1.1.0" +version = "1.1.1" dependencies = [ "futures", "metrics", @@ -7702,14 +7702,14 @@ dependencies = [ [[package]] name = "reth-net-banlist" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", ] [[package]] name = "reth-net-nat" -version = "1.1.0" +version = "1.1.1" dependencies = [ "futures-util", "if-addrs", @@ -7723,7 +7723,7 @@ dependencies = [ [[package]] name = "reth-network" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -7783,7 +7783,7 @@ dependencies = [ [[package]] name = "reth-network-api" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "alloy-rpc-types-admin", @@ -7805,7 +7805,7 @@ dependencies = [ [[package]] name = "reth-network-p2p" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -7825,7 +7825,7 @@ dependencies = [ [[package]] name = "reth-network-peers" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -7841,7 +7841,7 @@ dependencies = [ [[package]] name = "reth-network-types" -version = "1.1.0" +version = "1.1.1" dependencies = [ "humantime-serde", "reth-ethereum-forks", @@ -7854,7 +7854,7 @@ dependencies = [ [[package]] name = "reth-nippy-jar" -version = "1.1.0" +version = "1.1.1" dependencies = [ "anyhow", "bincode", @@ -7872,7 +7872,7 @@ dependencies = [ [[package]] name = "reth-node-api" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-rpc-types-engine", "eyre", @@ -7893,7 +7893,7 @@ dependencies = [ [[package]] name = "reth-node-builder" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "alloy-rpc-types", @@ -7958,7 +7958,7 @@ dependencies = [ [[package]] name = "reth-node-core" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8007,7 +8007,7 @@ dependencies = [ [[package]] name = "reth-node-ethereum" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-contract", @@ -8053,7 +8053,7 @@ dependencies = [ [[package]] name = "reth-node-events" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8076,7 +8076,7 @@ dependencies = [ [[package]] name = "reth-node-metrics" -version = "1.1.0" +version = "1.1.1" dependencies = [ "eyre", "http", @@ -8101,7 +8101,7 @@ dependencies = [ [[package]] name = "reth-node-types" -version = "1.1.0" +version = "1.1.1" dependencies = [ "reth-chainspec", "reth-db-api", @@ -8113,7 +8113,7 @@ dependencies = [ [[package]] name = "reth-optimism-chainspec" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-chains", "alloy-consensus", @@ -8133,7 +8133,7 @@ dependencies = [ [[package]] name = "reth-optimism-cli" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -8179,7 +8179,7 @@ dependencies = [ [[package]] name = "reth-optimism-consensus" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-primitives", @@ -8195,7 +8195,7 @@ dependencies = [ [[package]] name = "reth-optimism-evm" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8222,7 +8222,7 @@ dependencies = [ [[package]] name = "reth-optimism-forks" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-chains", "alloy-primitives", @@ -8233,7 +8233,7 @@ dependencies = [ [[package]] name = "reth-optimism-node" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-genesis", @@ -8278,7 +8278,7 @@ dependencies = [ [[package]] name = "reth-optimism-payload-builder" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8312,7 +8312,7 @@ dependencies = [ [[package]] name = "reth-optimism-primitives" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-primitives", @@ -8321,7 +8321,7 @@ dependencies = [ [[package]] name = "reth-optimism-rpc" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8361,7 +8361,7 @@ dependencies = [ [[package]] name = "reth-optimism-storage" -version = "1.1.0" +version = "1.1.1" dependencies = [ "reth-codecs", "reth-db-api", @@ -8372,7 +8372,7 @@ dependencies = [ [[package]] name = "reth-payload-builder" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "alloy-rpc-types", @@ -8393,7 +8393,7 @@ dependencies = [ [[package]] name = "reth-payload-primitives" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -8415,7 +8415,7 @@ dependencies = [ [[package]] name = "reth-payload-validator" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-rpc-types", "reth-chainspec", @@ -8425,7 +8425,7 @@ dependencies = [ [[package]] name = "reth-primitives" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8469,7 +8469,7 @@ dependencies = [ [[package]] name = "reth-primitives-traits" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8497,7 +8497,7 @@ dependencies = [ [[package]] name = "reth-provider" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8546,7 +8546,7 @@ dependencies = [ [[package]] name = "reth-prune" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "assert_matches", @@ -8575,7 +8575,7 @@ dependencies = [ [[package]] name = "reth-prune-types" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "arbitrary", @@ -8595,7 +8595,7 @@ dependencies = [ [[package]] name = "reth-revm" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8612,7 +8612,7 @@ dependencies = [ [[package]] name = "reth-rpc" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-dyn-abi", @@ -8683,7 +8683,7 @@ dependencies = [ [[package]] name = "reth-rpc-api" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-json-rpc", @@ -8709,7 +8709,7 @@ dependencies = [ [[package]] name = "reth-rpc-api-testing-util" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -8729,7 +8729,7 @@ dependencies = [ [[package]] name = "reth-rpc-builder" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -8779,7 +8779,7 @@ dependencies = [ [[package]] name = "reth-rpc-engine-api" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -8815,7 +8815,7 @@ dependencies = [ [[package]] name = "reth-rpc-eth-api" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-dyn-abi", @@ -8857,7 +8857,7 @@ dependencies = [ [[package]] name = "reth-rpc-eth-types" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8900,7 +8900,7 @@ dependencies = [ [[package]] name = "reth-rpc-layer" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-rpc-types-engine", "http", @@ -8915,7 +8915,7 @@ dependencies = [ [[package]] name = "reth-rpc-server-types" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -8930,7 +8930,7 @@ dependencies = [ [[package]] name = "reth-rpc-types-compat" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -8947,7 +8947,7 @@ dependencies = [ [[package]] name = "reth-stages" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -8996,7 +8996,7 @@ dependencies = [ [[package]] name = "reth-stages-api" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "aquamarine", @@ -9024,7 +9024,7 @@ dependencies = [ [[package]] name = "reth-stages-types" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "arbitrary", @@ -9041,7 +9041,7 @@ dependencies = [ [[package]] name = "reth-static-file" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "assert_matches", @@ -9063,7 +9063,7 @@ dependencies = [ [[package]] name = "reth-static-file-types" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "clap", @@ -9074,7 +9074,7 @@ dependencies = [ [[package]] name = "reth-storage-api" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -9093,7 +9093,7 @@ dependencies = [ [[package]] name = "reth-storage-errors" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-eips", "alloy-primitives", @@ -9105,7 +9105,7 @@ dependencies = [ [[package]] name = "reth-tasks" -version = "1.1.0" +version = "1.1.1" dependencies = [ "auto_impl", "dyn-clone", @@ -9122,7 +9122,7 @@ dependencies = [ [[package]] name = "reth-testing-utils" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -9135,7 +9135,7 @@ dependencies = [ [[package]] name = "reth-tokio-util" -version = "1.1.0" +version = "1.1.1" dependencies = [ "tokio", "tokio-stream", @@ -9144,7 +9144,7 @@ dependencies = [ [[package]] name = "reth-tracing" -version = "1.1.0" +version = "1.1.1" dependencies = [ "clap", "eyre", @@ -9158,7 +9158,7 @@ dependencies = [ [[package]] name = "reth-transaction-pool" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-eips", @@ -9203,7 +9203,7 @@ dependencies = [ [[package]] name = "reth-trie" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-primitives", @@ -9232,7 +9232,7 @@ dependencies = [ [[package]] name = "reth-trie-common" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-genesis", @@ -9256,7 +9256,7 @@ dependencies = [ [[package]] name = "reth-trie-db" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-consensus", "alloy-primitives", @@ -9285,7 +9285,7 @@ dependencies = [ [[package]] name = "reth-trie-parallel" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -9311,7 +9311,7 @@ dependencies = [ [[package]] name = "reth-trie-sparse" -version = "1.1.0" +version = "1.1.1" dependencies = [ "alloy-primitives", "alloy-rlp", diff --git a/Cargo.toml b/Cargo.toml index 8c5cf7b0c..10d39b11b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace.package] -version = "1.1.0" +version = "1.1.1" edition = "2021" rust-version = "1.82" license = "MIT OR Apache-2.0" From 15c230bac20e2b1b3532c8b0d470e815fbc0cc22 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Tue, 5 Nov 2024 13:17:28 +0100 Subject: [PATCH 421/425] primitives: rm alloy `BlobTransactionValidationError` reexport (#12311) --- crates/primitives/src/lib.rs | 3 --- crates/primitives/src/transaction/mod.rs | 2 -- crates/primitives/src/transaction/sidecar.rs | 5 +---- crates/transaction-pool/src/error.rs | 3 ++- crates/transaction-pool/src/test_utils/mock.rs | 12 ++++++++---- crates/transaction-pool/src/traits.rs | 10 +++++++--- 6 files changed, 18 insertions(+), 17 deletions(-) diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 9b121a56f..09610bf74 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -51,9 +51,6 @@ pub use transaction::{ PooledTransactionsElementEcRecovered, }; -#[cfg(feature = "c-kzg")] -pub use transaction::BlobTransactionValidationError; - pub use transaction::{ util::secp256k1::{public_key_to_address, recover_signer_unchecked, sign_message}, InvalidTransactionError, Transaction, TransactionMeta, TransactionSigned, diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index adbd8f0d0..aa6aaa2d8 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -31,8 +31,6 @@ pub use error::{ }; pub use meta::TransactionMeta; pub use pooled::{PooledTransactionsElement, PooledTransactionsElementEcRecovered}; -#[cfg(feature = "c-kzg")] -pub use sidecar::BlobTransactionValidationError; pub use sidecar::{BlobTransaction, BlobTransactionSidecar}; pub use compat::FillTxEnv; diff --git a/crates/primitives/src/transaction/sidecar.rs b/crates/primitives/src/transaction/sidecar.rs index aa473ef8a..e7ff7a9b5 100644 --- a/crates/primitives/src/transaction/sidecar.rs +++ b/crates/primitives/src/transaction/sidecar.rs @@ -9,9 +9,6 @@ use serde::{Deserialize, Serialize}; #[doc(inline)] pub use alloy_eips::eip4844::BlobTransactionSidecar; -#[cfg(feature = "c-kzg")] -pub use alloy_eips::eip4844::BlobTransactionValidationError; - /// A response to `GetPooledTransactions` that includes blob data, their commitments, and their /// corresponding proofs. /// @@ -58,7 +55,7 @@ impl BlobTransaction { pub fn validate( &self, proof_settings: &c_kzg::KzgSettings, - ) -> Result<(), BlobTransactionValidationError> { + ) -> Result<(), alloy_eips::eip4844::BlobTransactionValidationError> { self.transaction.validate_blob(proof_settings) } diff --git a/crates/transaction-pool/src/error.rs b/crates/transaction-pool/src/error.rs index a4766a89d..f71bf0188 100644 --- a/crates/transaction-pool/src/error.rs +++ b/crates/transaction-pool/src/error.rs @@ -1,7 +1,8 @@ //! Transaction pool errors +use alloy_eips::eip4844::BlobTransactionValidationError; use alloy_primitives::{Address, TxHash, U256}; -use reth_primitives::{BlobTransactionValidationError, InvalidTransactionError}; +use reth_primitives::InvalidTransactionError; /// Transaction pool result type. pub type PoolResult = Result; diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index c6143ff16..a272e8d00 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -11,7 +11,11 @@ use alloy_consensus::{ constants::{EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, LEGACY_TX_TYPE_ID}, TxEip1559, TxEip2930, TxEip4844, TxLegacy, }; -use alloy_eips::{eip1559::MIN_PROTOCOL_BASE_FEE, eip2930::AccessList, eip4844::DATA_GAS_PER_BLOB}; +use alloy_eips::{ + eip1559::MIN_PROTOCOL_BASE_FEE, + eip2930::AccessList, + eip4844::{BlobTransactionValidationError, DATA_GAS_PER_BLOB}, +}; use alloy_primitives::{Address, Bytes, ChainId, Signature, TxHash, TxKind, B256, U256}; use paste::paste; use rand::{ @@ -20,8 +24,8 @@ use rand::{ }; use reth_primitives::{ transaction::TryFromRecoveredTransactionError, BlobTransactionSidecar, - BlobTransactionValidationError, PooledTransactionsElementEcRecovered, Transaction, - TransactionSigned, TransactionSignedEcRecovered, TxType, + PooledTransactionsElementEcRecovered, Transaction, TransactionSigned, + TransactionSignedEcRecovered, TxType, }; use std::{ops::Range, sync::Arc, time::Instant, vec::IntoIter}; @@ -761,7 +765,7 @@ impl EthPoolTransaction for MockTransaction { &self, _blob: &BlobTransactionSidecar, _settings: &reth_primitives::kzg::KzgSettings, - ) -> Result<(), reth_primitives::BlobTransactionValidationError> { + ) -> Result<(), alloy_eips::eip4844::BlobTransactionValidationError> { match &self { Self::Eip4844 { .. } => Ok(()), _ => Err(BlobTransactionValidationError::NotBlobTransaction(self.tx_type())), diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index 709b43e71..512e3e31f 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -11,15 +11,19 @@ use alloy_consensus::{ constants::{EIP1559_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID}, Transaction as _, }; -use alloy_eips::{eip2718::Encodable2718, eip2930::AccessList, eip4844::BlobAndProofV1}; +use alloy_eips::{ + eip2718::Encodable2718, + eip2930::AccessList, + eip4844::{BlobAndProofV1, BlobTransactionValidationError}, +}; use alloy_primitives::{Address, TxHash, TxKind, B256, U256}; use futures_util::{ready, Stream}; use reth_eth_wire_types::HandleMempoolData; use reth_execution_types::ChangedAccount; use reth_primitives::{ kzg::KzgSettings, transaction::TryFromRecoveredTransactionError, BlobTransactionSidecar, - BlobTransactionValidationError, PooledTransactionsElement, - PooledTransactionsElementEcRecovered, SealedBlock, Transaction, TransactionSignedEcRecovered, + PooledTransactionsElement, PooledTransactionsElementEcRecovered, SealedBlock, Transaction, + TransactionSignedEcRecovered, }; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; From f170052e46a93dc824826e58ee1ee398388523b6 Mon Sep 17 00:00:00 2001 From: yutianwu Date: Fri, 15 Nov 2024 10:35:16 +0800 Subject: [PATCH 422/425] fix: fix conflicts and ci issues Co-authored-by: j75689 --- .github/workflows/bench.yml | 2 +- .github/workflows/build-check.yml | 2 +- .github/workflows/compact.yml | 47 - .github/workflows/dependencies.yml | 2 +- .github/workflows/docker.yml | 2 +- .github/workflows/hive.yml | 2 +- .github/workflows/integration.yml | 2 +- .github/workflows/kurtosis.yml | 2 +- .github/workflows/release.yml | 2 +- .github/workflows/stage.yml | 2 +- .github/workflows/unit.yml | 2 +- .github/workflows/windows.yml | 2 +- Cargo.lock | 938 +++++++++++------- Cargo.toml | 8 +- bin/reth/src/commands/debug_cmd/merkle.rs | 9 +- bsc.Dockerfile | 2 +- crates/blockchain-tree/Cargo.toml | 3 +- crates/blockchain-tree/src/chain.rs | 2 +- crates/bsc/bin/src/main.rs | 4 +- crates/bsc/chainspec/Cargo.toml | 9 +- crates/bsc/chainspec/src/dev.rs | 2 +- crates/bsc/chainspec/src/lib.rs | 4 +- crates/bsc/cli/Cargo.toml | 5 +- crates/bsc/consensus/Cargo.toml | 1 + crates/bsc/consensus/src/constants.rs | 2 +- crates/bsc/consensus/src/lib.rs | 3 +- crates/bsc/consensus/src/trace_helper.rs | 75 +- crates/bsc/consensus/src/validation.rs | 2 +- crates/bsc/engine/Cargo.toml | 2 + crates/bsc/engine/src/lib.rs | 28 +- crates/bsc/engine/src/task.rs | 30 +- crates/bsc/evm/Cargo.toml | 6 +- crates/bsc/evm/src/execute.rs | 16 +- crates/bsc/evm/src/lib.rs | 16 +- crates/bsc/hardforks/Cargo.toml | 12 +- crates/bsc/node/Cargo.toml | 7 +- crates/bsc/node/src/node.rs | 72 +- crates/bsc/payload/Cargo.toml | 5 +- crates/bsc/payload/src/builder.rs | 58 +- crates/bsc/payload/src/payload.rs | 32 +- crates/bsc/primitives/Cargo.toml | 1 + .../primitives/src/system_contracts/mod.rs | 1 + crates/chain-state/Cargo.toml | 4 +- crates/chainspec/src/api.rs | 4 +- crates/config/src/config.rs | 20 +- crates/consensus/auto-seal/Cargo.toml | 3 +- crates/consensus/auto-seal/src/lib.rs | 10 +- crates/consensus/beacon/Cargo.toml | 15 +- crates/consensus/common/src/validation.rs | 7 +- crates/e2e-test-utils/Cargo.toml | 5 +- crates/engine/local/Cargo.toml | 7 +- crates/engine/local/src/service.rs | 8 +- crates/engine/service/src/service.rs | 1 + crates/ethereum/evm/src/execute.rs | 26 +- crates/ethereum/node/src/node.rs | 4 +- crates/ethereum/payload/src/lib.rs | 7 +- .../execution-types/src/execution_outcome.rs | 4 +- crates/evm/src/either.rs | 10 +- crates/evm/src/execute.rs | 103 +- crates/evm/src/noop.rs | 6 +- crates/evm/src/test_utils.rs | 6 +- crates/exex/exex/src/backfill/job.rs | 9 +- crates/exex/exex/src/backfill/test_utils.rs | 11 +- crates/exex/test-utils/src/lib.rs | 2 +- crates/node/api/src/node.rs | 4 +- crates/node/builder/src/components/builder.rs | 48 +- crates/node/builder/src/components/mod.rs | 2 + crates/node/builder/src/components/parlia.rs | 27 + crates/node/builder/src/launch/engine.rs | 8 +- crates/node/builder/src/launch/mod.rs | 4 +- crates/node/builder/src/rpc.rs | 6 +- crates/node/core/Cargo.toml | 6 +- crates/node/core/src/args/payload_builder.rs | 2 +- crates/node/core/src/node_config.rs | 3 + crates/optimism/cli/Cargo.toml | 3 +- crates/optimism/consensus/Cargo.toml | 5 +- crates/optimism/evm/Cargo.toml | 3 +- crates/optimism/evm/src/execute.rs | 424 +------- crates/optimism/evm/src/lib.rs | 38 +- crates/optimism/hardforks/src/hardfork.rs | 8 - crates/optimism/hardforks/src/lib.rs | 13 +- crates/optimism/node/Cargo.toml | 22 +- crates/optimism/node/src/args.rs | 11 +- crates/optimism/node/src/node.rs | 38 +- crates/optimism/node/src/txpool.rs | 4 +- crates/optimism/payload/Cargo.toml | 20 +- crates/optimism/payload/src/builder.rs | 12 +- crates/optimism/rpc/Cargo.toml | 3 +- crates/optimism/rpc/src/eth/receipt.rs | 4 - crates/primitives-traits/src/blob_sidecar.rs | 6 +- crates/primitives-traits/src/constants/mod.rs | 74 -- crates/primitives/Cargo.toml | 14 +- crates/primitives/src/block.rs | 40 +- crates/primitives/src/lib.rs | 6 +- crates/rpc/rpc-builder/src/lib.rs | 166 +++- crates/rpc/rpc-eth-api/src/helpers/block.rs | 5 +- crates/rpc/rpc-eth-api/src/helpers/call.rs | 10 +- .../rpc-eth-api/src/helpers/pending_block.rs | 7 +- crates/rpc/rpc-eth-api/src/helpers/trace.rs | 15 +- .../rpc-eth-api/src/helpers/transaction.rs | 12 +- crates/rpc/rpc-eth-types/src/builder/ctx.rs | 1 - crates/rpc/rpc-eth-types/src/cache/mod.rs | 15 +- crates/rpc/rpc/Cargo.toml | 7 +- crates/rpc/rpc/src/debug.rs | 28 +- crates/rpc/rpc/src/eth/core.rs | 14 +- crates/rpc/rpc/src/eth/helpers/call.rs | 30 +- crates/rpc/rpc/src/validation.rs | 5 +- crates/stages/stages/src/stages/execution.rs | 2 +- crates/static-file/types/src/lib.rs | 24 +- crates/storage/db-api/src/models/mod.rs | 3 +- crates/storage/db/src/tables/mod.rs | 4 +- crates/storage/provider/Cargo.toml | 3 +- .../src/providers/blockchain_provider.rs | 15 +- .../provider/src/providers/consistent.rs | 20 +- .../provider/src/providers/database/mod.rs | 11 +- .../src/providers/database/provider.rs | 110 +- crates/storage/provider/src/providers/mod.rs | 12 +- .../src/providers/static_file/manager.rs | 5 +- .../provider/src/providers/static_file/mod.rs | 2 +- .../storage/provider/src/test_utils/mock.rs | 4 +- .../storage/provider/src/test_utils/noop.rs | 12 +- crates/storage/storage-api/src/block.rs | 4 +- examples/custom-beacon-withdrawals/Cargo.toml | 10 +- .../custom-beacon-withdrawals/src/main.rs | 6 +- examples/custom-engine-types/src/main.rs | 2 +- op.Dockerfile | 2 +- 126 files changed, 1559 insertions(+), 1561 deletions(-) delete mode 100644 .github/workflows/compact.yml create mode 100644 crates/node/builder/src/components/parlia.rs diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index ab031da32..fc7b794ee 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -10,7 +10,7 @@ env: CARGO_TERM_COLOR: always BASELINE: base IAI_CALLGRIND_RUNNER: iai-callgrind-runner - TOOL_CHAIN: "1.81" + TOOL_CHAIN: "1.82" concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} diff --git a/.github/workflows/build-check.yml b/.github/workflows/build-check.yml index 52be427ff..0f80292ba 100644 --- a/.github/workflows/build-check.yml +++ b/.github/workflows/build-check.yml @@ -6,7 +6,7 @@ on: env: CARGO_TERM_COLOR: always - TOOL_CHAIN: "1.81" + TOOL_CHAIN: "1.82" jobs: extract-version: diff --git a/.github/workflows/compact.yml b/.github/workflows/compact.yml deleted file mode 100644 index 484b27c82..000000000 --- a/.github/workflows/compact.yml +++ /dev/null @@ -1,47 +0,0 @@ -# Ensures that `Compact` codec changes are backwards compatible. -# -# 1) checkout `main` -# 2) randomly generate and serialize to disk many different type vectors with `Compact` (eg. Header, Transaction, etc) -# 3) checkout `pr` -# 4) deserialize previously generated test vectors - -on: - - pull_request: - merge_group: - push: - branches: [main] - -env: - CARGO_TERM_COLOR: always - -name: compact-codec -jobs: - compact-codec: - runs-on: - group: Reth - strategy: - matrix: - bin: - - cargo run --bin reth --features "dev" - - cargo run --bin op-reth --features "optimism dev" --manifest-path crates/optimism/bin/Cargo.toml - steps: - - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 - with: - cache-on-failure: true - - name: Checkout base - uses: actions/checkout@v4 - with: - ref: ${{ github.base_ref || 'main' }} - # On `main` branch, generates test vectors and serializes them to disk using `Compact`. - - name: Generate compact vectors - run: | - ${{ matrix.bin }} -- test-vectors compact --write - - name: Checkout PR - uses: actions/checkout@v4 - with: - clean: false - # On incoming merge try to read and decode previously generated vectors with `Compact` - - name: Read vectors - run: ${{ matrix.bin }} -- test-vectors compact --read diff --git a/.github/workflows/dependencies.yml b/.github/workflows/dependencies.yml index da3574329..cfe38de4e 100644 --- a/.github/workflows/dependencies.yml +++ b/.github/workflows/dependencies.yml @@ -10,7 +10,7 @@ on: # Needed so we can run it manually env: - TOOL_CHAIN: "1.81" + TOOL_CHAIN: "1.82" GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} BRANCH: cargo-update TITLE: "chore(deps): weekly `cargo update`" diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index bbe84b494..18a4ec664 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -18,7 +18,7 @@ env: OP_DOCKER_IMAGE_NAME: ghcr.io/${{ github.repository_owner }}/op-reth BSC_DOCKER_IMAGE_NAME: ghcr.io/${{ github.repository_owner }}/bsc-reth DOCKER_USERNAME: ${{ github.actor }} - TOOL_CHAIN: "1.81" + TOOL_CHAIN: "1.82" jobs: build: diff --git a/.github/workflows/hive.yml b/.github/workflows/hive.yml index 419d1b6a8..46b358071 100644 --- a/.github/workflows/hive.yml +++ b/.github/workflows/hive.yml @@ -10,7 +10,7 @@ on: env: CARGO_TERM_COLOR: always - TOOL_CHAIN: "1.81" + TOOL_CHAIN: "1.82" concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 883944a36..5d2b6931f 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -12,7 +12,7 @@ on: env: CARGO_TERM_COLOR: always SEED: rustethereumethereumrust - TOOL_CHAIN: "1.81" + TOOL_CHAIN: "1.82" concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} diff --git a/.github/workflows/kurtosis.yml b/.github/workflows/kurtosis.yml index 2e33b6d62..d2f51225c 100644 --- a/.github/workflows/kurtosis.yml +++ b/.github/workflows/kurtosis.yml @@ -10,7 +10,7 @@ on: env: CARGO_TERM_COLOR: always - TOOL_CHAIN: "1.81" + TOOL_CHAIN: "1.82" concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 32262d034..14b79297c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -13,7 +13,7 @@ env: OP_IMAGE_NAME: ${{ github.repository_owner }}/op-reth IMAGE_NAME: ${{ github.repository_owner }}/bsc-reth CARGO_TERM_COLOR: always - TOOL_CHAIN: "1.81" + TOOL_CHAIN: "1.82" jobs: extract-version: diff --git a/.github/workflows/stage.yml b/.github/workflows/stage.yml index c35e00144..2700ff1cb 100644 --- a/.github/workflows/stage.yml +++ b/.github/workflows/stage.yml @@ -12,7 +12,7 @@ env: CARGO_TERM_COLOR: always FROM_BLOCK: 0 TO_BLOCK: 50000 - TOOL_CHAIN: "1.81" + TOOL_CHAIN: "1.82" concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} diff --git a/.github/workflows/unit.yml b/.github/workflows/unit.yml index 499047e48..6da824815 100644 --- a/.github/workflows/unit.yml +++ b/.github/workflows/unit.yml @@ -12,7 +12,7 @@ on: env: CARGO_TERM_COLOR: always SEED: rustethereumethereumrust - TOOL_CHAIN: "1.81" + TOOL_CHAIN: "1.82" concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 6850ace92..a25c4c9d4 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -10,7 +10,7 @@ on: merge_group: env: - TOOL_CHAIN: "1.81" + TOOL_CHAIN: "1.82" jobs: check-reth: diff --git a/Cargo.lock b/Cargo.lock index b5b590e17..658d7b869 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -111,8 +111,8 @@ dependencies = [ [[package]] name = "alloy-consensus" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-eips", "alloy-primitives", @@ -126,6 +126,26 @@ dependencies = [ "serde_with", ] +[[package]] +name = "alloy-contract" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460ab80ce4bda1c80bcf96fe7460520476f2c7b734581c6567fac2708e2a60ef" +dependencies = [ + "alloy-dyn-abi", + "alloy-json-abi", + "alloy-network", + "alloy-network-primitives", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-types-eth", + "alloy-sol-types", + "alloy-transport", + "futures", + "futures-util", + "thiserror", +] + [[package]] name = "alloy-dyn-abi" version = "0.8.5" @@ -159,13 +179,14 @@ dependencies = [ [[package]] name = "alloy-eip7702" -version = "0.1.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea59dc42102bc9a1905dc57901edc6dd48b9f38115df86c7d252acba70d71d04" +checksum = "64ffc577390ce50234e02d841214b3dc0bea6aaaae8e04bbf3cb82e9a45da9eb" dependencies = [ "alloy-primitives", "alloy-rlp", "arbitrary", + "derive_more 1.0.0", "k256", "rand 0.8.5", "serde", @@ -174,8 +195,8 @@ dependencies = [ [[package]] name = "alloy-eips" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-eip2930", "alloy-eip7702", @@ -185,6 +206,8 @@ dependencies = [ "arbitrary", "c-kzg", "derive_more 1.0.0", + "ethereum_ssz", + "ethereum_ssz_derive", "once_cell", "serde", "sha2 0.10.8", @@ -192,9 +215,9 @@ dependencies = [ [[package]] name = "alloy-genesis" -version = "0.4.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8429cf4554eed9b40feec7f4451113e76596086447550275e3def933faf47ce3" +checksum = "dde15e14944a88bd6a57d325e9a49b75558746fe16aaccc79713ae50a6a9574c" dependencies = [ "alloy-primitives", "alloy-serde", @@ -203,8 +226,8 @@ dependencies = [ [[package]] name = "alloy-genesis" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-primitives", "alloy-serde", @@ -225,8 +248,8 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-primitives", "alloy-sol-types", @@ -238,8 +261,8 @@ dependencies = [ [[package]] name = "alloy-network" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-consensus", "alloy-eips", @@ -258,8 +281,8 @@ dependencies = [ [[package]] name = "alloy-network-primitives" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-consensus", "alloy-eips", @@ -270,11 +293,11 @@ dependencies = [ [[package]] name = "alloy-node-bindings" -version = "0.4.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f1334a738aa1710cb8227441b3fcc319202ce78e967ef37406940242df4a454" +checksum = "27444ea67d360508753022807cdd0b49a95c878924c9c5f8f32668b7d7768245" dependencies = [ - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "k256", "rand 0.8.5", @@ -287,9 +310,9 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "0.8.7" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb848c43f6b06ae3de2e4a67496cbbabd78ae87db0f1248934f15d76192c6a" +checksum = "fd58d377699e6cfeab52c4a9d28bdc4ef37e2bd235ff2db525071fe37a2e9af5" dependencies = [ "alloy-rlp", "arbitrary", @@ -302,7 +325,7 @@ dependencies = [ "getrandom 0.2.15", "hashbrown 0.15.0", "hex-literal", - "indexmap 2.5.0", + "indexmap 2.6.0", "itoa", "k256", "keccak-asm", @@ -319,8 +342,8 @@ dependencies = [ [[package]] name = "alloy-provider" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-chains", "alloy-consensus", @@ -329,14 +352,14 @@ dependencies = [ "alloy-network", "alloy-network-primitives", "alloy-primitives", - "alloy-pubsub 0.4.2 (git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36)", + "alloy-pubsub 0.5.4 (git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444)", "alloy-rpc-client", - "alloy-rpc-types-admin 0.4.2 (git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36)", + "alloy-rpc-types-admin 0.5.4 (git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444)", "alloy-rpc-types-engine", "alloy-rpc-types-eth", "alloy-transport", "alloy-transport-http", - "alloy-transport-ws 0.4.2 (git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36)", + "alloy-transport-ws 0.5.4 (git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444)", "async-stream", "async-trait", "auto_impl", @@ -344,21 +367,24 @@ dependencies = [ "futures", "futures-utils-wasm", "lru", + "parking_lot", "pin-project", "reqwest 0.12.8", + "schnellru", "serde", "serde_json", "thiserror", "tokio", "tracing", "url", + "wasmtimer", ] [[package]] name = "alloy-pubsub" -version = "0.4.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f32cef487122ae75c91eb50154c70801d71fabdb976fec6c49e0af5e6486ab15" +checksum = "96ba46eb69ddf7a9925b81f15229cb74658e6eebe5dd30a5b74e2cd040380573" dependencies = [ "alloy-json-rpc", "alloy-primitives", @@ -375,8 +401,8 @@ dependencies = [ [[package]] name = "alloy-pubsub" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-json-rpc", "alloy-primitives", @@ -415,15 +441,15 @@ dependencies = [ [[package]] name = "alloy-rpc-client" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-json-rpc", "alloy-primitives", - "alloy-pubsub 0.4.2 (git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36)", + "alloy-pubsub 0.5.4 (git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444)", "alloy-transport", "alloy-transport-http", - "alloy-transport-ws 0.4.2 (git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36)", + "alloy-transport-ws 0.5.4 (git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444)", "futures", "pin-project", "reqwest 0.12.8", @@ -434,13 +460,14 @@ dependencies = [ "tower 0.5.1", "tracing", "url", + "wasmtimer", ] [[package]] name = "alloy-rpc-types" -version = "0.4.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ffc534b7919e18f35e3aa1f507b6f3d9d92ec298463a9f6beaac112809d8d06" +checksum = "eea9bf1abdd506f985a53533f5ac01296bcd6102c5e139bbc5d40bc468d2c916" dependencies = [ "alloy-primitives", "alloy-rpc-types-engine", @@ -451,11 +478,11 @@ dependencies = [ [[package]] name = "alloy-rpc-types-admin" -version = "0.4.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb520ed46cc5b7d8c014a73fdd77b6a310383a2a5c0a5ae3c9b8055881f062b7" +checksum = "ea02c25541fb19eaac4278aa5c41d2d7e0245898887e54a74bfc0f3103e99415" dependencies = [ - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "serde", "serde_json", @@ -463,10 +490,10 @@ dependencies = [ [[package]] name = "alloy-rpc-types-admin" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ - "alloy-genesis 0.4.2 (git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36)", + "alloy-genesis 0.5.4 (git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444)", "alloy-primitives", "serde", "serde_json", @@ -474,9 +501,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-anvil" -version = "0.4.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d780adaa5d95b07ad92006b2feb68ecfa7e2015f7d5976ceaac4c906c73ebd07" +checksum = "2382fc63fb0cf3e02818d547b80cb66cc49a31f8803d0c328402b2008bc13650" dependencies = [ "alloy-primitives", "alloy-serde", @@ -485,9 +512,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-beacon" -version = "0.4.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a8dc5980fe30203d698627cddb5f0cedc57f900c8b5e1229c8b9448e37acb4a" +checksum = "45357a642081c8ce235c0ad990c4e9279f5f18a723545076b38cfcc05cc25234" dependencies = [ "alloy-eips", "alloy-primitives", @@ -499,9 +526,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-debug" -version = "0.4.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59d8f8c5bfb160081a772f1f68eb9a37e8929c4ef74e5d01f5b78c2b645a5c5e" +checksum = "a5afe3ab1038f90faf56304aa0adf1e6a8c9844615d8f83967f932f3a70390b1" dependencies = [ "alloy-primitives", "serde", @@ -509,8 +536,8 @@ dependencies = [ [[package]] name = "alloy-rpc-types-engine" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-consensus", "alloy-eips", @@ -518,6 +545,8 @@ dependencies = [ "alloy-rlp", "alloy-serde", "derive_more 1.0.0", + "ethereum_ssz", + "ethereum_ssz_derive", "jsonrpsee-types", "jsonwebtoken", "rand 0.8.5", @@ -527,8 +556,8 @@ dependencies = [ [[package]] name = "alloy-rpc-types-eth" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-consensus", "alloy-eips", @@ -537,6 +566,7 @@ dependencies = [ "alloy-rlp", "alloy-serde", "alloy-sol-types", + "arbitrary", "derive_more 1.0.0", "itertools 0.13.0", "jsonrpsee-types", @@ -546,9 +576,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-mev" -version = "0.4.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cec23ce56c869eec5f6b6fd6a8a92b5aa0cfaf8d7be3a96502e537554dc7430" +checksum = "3246948dfa5f5060a9abe04233d741ea656ef076b12958f3242416ce9f375058" dependencies = [ "alloy-eips", "alloy-primitives", @@ -559,9 +589,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-trace" -version = "0.4.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "017cad3e5793c5613588c1f9732bcbad77e820ba7d0feaba3527749f856fdbc5" +checksum = "4e5fb6c5c401321f802f69dcdb95b932f30f8158f6798793f914baac5995628e" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -573,9 +603,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-txpool" -version = "0.4.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b230e321c416be7f50530159392b4c41a45596d40d97e185575bcd0b545e521" +checksum = "9ad066b49c3b1b5f64cdd2399177a19926a6a15db2dbf11e2098de621f9e7480" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -585,8 +615,8 @@ dependencies = [ [[package]] name = "alloy-serde" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-primitives", "arbitrary", @@ -596,8 +626,8 @@ dependencies = [ [[package]] name = "alloy-signer" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-primitives", "async-trait", @@ -609,8 +639,8 @@ dependencies = [ [[package]] name = "alloy-signer-local" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-consensus", "alloy-network", @@ -626,9 +656,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "0.8.5" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68e7f6e8fe5b443f82b3f1e15abfa191128f71569148428e49449d01f6f49e8b" +checksum = "8a1b42ac8f45e2f49f4bcdd72cbfde0bb148f5481d403774ffa546e48b83efc1" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", @@ -640,14 +670,14 @@ dependencies = [ [[package]] name = "alloy-sol-macro-expander" -version = "0.8.5" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b96ce28d2fde09abb6135f410c41fad670a3a770b6776869bd852f1df102e6f" +checksum = "06318f1778e57f36333e850aa71bd1bb5e560c10279e236622faae0470c50412" dependencies = [ "alloy-sol-macro-input", "const-hex", "heck", - "indexmap 2.5.0", + "indexmap 2.6.0", "proc-macro-error2", "proc-macro2", "quote", @@ -658,9 +688,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro-input" -version = "0.8.5" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "906746396a8296537745711630d9185746c0b50c033d5e9d18b0a6eba3d53f90" +checksum = "eaebb9b0ad61a41345a22c9279975c0cdd231b97947b10d7aad1cf0a7181e4a5" dependencies = [ "const-hex", "dunce", @@ -696,8 +726,8 @@ dependencies = [ [[package]] name = "alloy-transport" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-json-rpc", "base64 0.22.1", @@ -710,12 +740,13 @@ dependencies = [ "tower 0.5.1", "tracing", "url", + "wasmtimer", ] [[package]] name = "alloy-transport-http" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ "alloy-json-rpc", "alloy-transport", @@ -728,12 +759,12 @@ dependencies = [ [[package]] name = "alloy-transport-ipc" -version = "0.4.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b90cf9cde7f2fce617da52768ee28f522264b282d148384a4ca0ea85af04fa3a" +checksum = "8073d1186bfeeb8fbdd1292b6f1a0731f3aed8e21e1463905abfae0b96a887a6" dependencies = [ "alloy-json-rpc", - "alloy-pubsub 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-pubsub 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-transport", "bytes 1.7.2", "futures", @@ -747,11 +778,11 @@ dependencies = [ [[package]] name = "alloy-transport-ws" -version = "0.4.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7153b88690de6a50bba81c11e1d706bc41dbb90126d607404d60b763f6a3947f" +checksum = "61f27837bb4a1d6c83a28231c94493e814882f0e9058648a97e908a5f3fc9fcf" dependencies = [ - "alloy-pubsub 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-pubsub 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-transport", "futures", "http 1.1.0", @@ -765,10 +796,10 @@ dependencies = [ [[package]] name = "alloy-transport-ws" -version = "0.4.2" -source = "git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36#718060680134e6bb40d97d3c6fb56fd1950ced36" +version = "0.5.4" +source = "git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444#b6ca35f241857d220f530b2100581586bac05444" dependencies = [ - "alloy-pubsub 0.4.2 (git+https://github.com/bnb-chain/alloy?rev=718060680134e6bb40d97d3c6fb56fd1950ced36)", + "alloy-pubsub 0.5.4 (git+https://github.com/bnb-chain/alloy?rev=b6ca35f241857d220f530b2100581586bac05444)", "alloy-transport", "futures", "http 1.1.0", @@ -782,13 +813,14 @@ dependencies = [ [[package]] name = "alloy-trie" -version = "0.6.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9703ce68b97f8faae6f7739d1e003fc97621b856953cbcdbb2b515743f23288" +checksum = "cdd7f8b3a7c65ca09b3c7bdd7c7d72d7423d026f5247eda96af53d24e58315c1" dependencies = [ "alloy-primitives", "alloy-rlp", "arbitrary", + "arrayvec", "derive_arbitrary", "derive_more 1.0.0", "nybbles", @@ -886,13 +918,13 @@ checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" [[package]] name = "aquamarine" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21cc1548309245035eb18aa7f0967da6bc65587005170c56e6ef2788a4cf3f4e" +checksum = "0f50776554130342de4836ba542aa85a4ddb361690d7e8df13774d7284c3d5c2" dependencies = [ "include_dir", "itertools 0.10.5", - "proc-macro-error", + "proc-macro-error2", "proc-macro2", "quote", "syn 2.0.79", @@ -1096,6 +1128,9 @@ name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +dependencies = [ + "serde", +] [[package]] name = "asn1_der" @@ -1483,7 +1518,7 @@ dependencies = [ "bitflags 2.6.0", "boa_interner", "boa_macros", - "indexmap 2.5.0", + "indexmap 2.6.0", "num-bigint", "rustc-hash 2.0.0", ] @@ -1509,7 +1544,7 @@ dependencies = [ "fast-float", "hashbrown 0.14.5", "icu_normalizer", - "indexmap 2.5.0", + "indexmap 2.6.0", "intrusive-collections", "itertools 0.13.0", "num-bigint", @@ -1555,7 +1590,7 @@ dependencies = [ "boa_gc", "boa_macros", "hashbrown 0.14.5", - "indexmap 2.5.0", + "indexmap 2.6.0", "once_cell", "phf", "rustc-hash 2.0.0", @@ -1681,6 +1716,17 @@ dependencies = [ "regex-automata 0.1.10", ] +[[package]] +name = "bstr" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" +dependencies = [ + "memchr", + "regex-automata 0.4.8", + "serde", +] + [[package]] name = "bumpalo" version = "3.16.0" @@ -2357,6 +2403,12 @@ dependencies = [ "itertools 0.10.5", ] +[[package]] +name = "critical-section" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" + [[package]] name = "crossbeam-channel" version = "0.5.13" @@ -2400,7 +2452,7 @@ dependencies = [ "bitflags 2.6.0", "crossterm_winapi", "libc", - "parking_lot 0.12.3", + "parking_lot", "winapi", ] @@ -2413,7 +2465,7 @@ dependencies = [ "bitflags 2.6.0", "crossterm_winapi", "mio 1.0.2", - "parking_lot 0.12.3", + "parking_lot", "rustix", "signal-hook", "signal-hook-mio", @@ -2596,7 +2648,7 @@ dependencies = [ "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core 0.9.10", + "parking_lot_core", ] [[package]] @@ -2610,7 +2662,8 @@ dependencies = [ "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core 0.9.10", + "parking_lot_core", + "serde", ] [[package]] @@ -2656,11 +2709,12 @@ dependencies = [ [[package]] name = "delay_map" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4355c25cbf99edcb6b4a0e906f6bdc6956eda149e84455bea49696429b2f8e8" +checksum = "df941644b671f05f59433e481ba0d31ac10e3667de725236a4c0d587c496fba1" dependencies = [ "futures", + "tokio", "tokio-util", ] @@ -2819,9 +2873,9 @@ dependencies = [ [[package]] name = "discv5" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f569b8c367554666c8652305621e8bae3634a2ff5c6378081d5bd8c399c99f23" +checksum = "23e6b70634e26c909d1edbb3142b3eaf3b89da0e52f284f00ca7c80d9901ad9e" dependencies = [ "aes", "aes-gcm", @@ -2840,13 +2894,13 @@ dependencies = [ "lru", "more-asserts", "multiaddr", - "parking_lot 0.11.2", + "parking_lot", "rand 0.8.5", "smallvec", - "socket2 0.4.10", + "socket2", "tokio", "tracing", - "uint", + "uint 0.10.0", "zeroize", ] @@ -2975,7 +3029,9 @@ dependencies = [ "reth-evm-ethereum", "reth-primitives", "reth-provider", + "reth-revm", "reth-stages", + "revm", "serde", "serde_json", "thiserror", @@ -3081,6 +3137,47 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "ethereum_serde_utils" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70cbccfccf81d67bff0ab36e591fa536c8a935b078a7b0e58c1d00d418332fc9" +dependencies = [ + "alloy-primitives", + "hex 0.4.3", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "ethereum_ssz" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfbba28f4f3f32d92c06a64f5bf6c4537b5d4e21f28c689bd2bbaecfea4e0d3e" +dependencies = [ + "alloy-primitives", + "derivative", + "ethereum_serde_utils", + "itertools 0.13.0", + "serde", + "serde_derive", + "smallvec", + "typenum", +] + +[[package]] +name = "ethereum_ssz_derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d37845ba7c16bf4be8be4b5786f03a2ba5f2fda0d7f9e7cb2282f69cff420d7" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.79", +] + [[package]] name = "event-listener" version = "2.5.3" @@ -3136,11 +3233,29 @@ dependencies = [ "tokio-stream", ] +[[package]] +name = "example-custom-beacon-withdrawals" +version = "0.0.0" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-sol-macro", + "alloy-sol-types", + "eyre", + "reth", + "reth-chainspec", + "reth-evm", + "reth-evm-ethereum", + "reth-node-ethereum", + "reth-primitives", + "tokio", +] + [[package]] name = "example-custom-dev-node" version = "0.0.0" dependencies = [ - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "eyre", "futures-util", @@ -3156,7 +3271,7 @@ dependencies = [ name = "example-custom-engine-types" version = "0.0.0" dependencies = [ - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "alloy-rpc-types", "eyre", @@ -3170,6 +3285,7 @@ dependencies = [ "reth-payload-builder", "reth-primitives", "reth-tracing", + "reth-trie-db", "serde", "thiserror", "tokio", @@ -3179,7 +3295,7 @@ dependencies = [ name = "example-custom-evm" version = "0.0.0" dependencies = [ - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "eyre", "reth", @@ -3197,6 +3313,7 @@ dependencies = [ name = "example-custom-inspector" version = "0.0.0" dependencies = [ + "alloy-eips", "alloy-primitives", "alloy-rpc-types", "clap", @@ -3220,6 +3337,7 @@ dependencies = [ name = "example-custom-payload-builder" version = "0.0.0" dependencies = [ + "alloy-eips", "alloy-primitives", "eyre", "futures-util", @@ -3247,8 +3365,6 @@ dependencies = [ "reth-network", "reth-network-api", "reth-node-ethereum", - "reth-primitives", - "reth-provider", "tokio", "tokio-stream", "tracing", @@ -3273,6 +3389,7 @@ dependencies = [ name = "example-manual-p2p" version = "0.0.0" dependencies = [ + "alloy-consensus", "eyre", "futures", "reth-chainspec", @@ -3337,7 +3454,6 @@ dependencies = [ "reth-discv4", "reth-network", "reth-primitives", - "reth-provider", "reth-tracing", "secp256k1", "serde_json", @@ -3364,10 +3480,10 @@ dependencies = [ name = "example-stateful-precompile" version = "0.0.0" dependencies = [ - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "eyre", - "parking_lot 0.12.3", + "parking_lot", "reth", "reth-chainspec", "reth-node-api", @@ -3732,6 +3848,7 @@ version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ + "serde", "typenum", "version_check", "zeroize", @@ -3852,7 +3969,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.5.0", + "indexmap 2.6.0", "slab", "tokio", "tokio-util", @@ -3871,7 +3988,7 @@ dependencies = [ "futures-core", "futures-sink", "http 1.1.0", - "indexmap 2.5.0", + "indexmap 2.6.0", "slab", "tokio", "tokio-util", @@ -3937,9 +4054,9 @@ dependencies = [ [[package]] name = "hashlink" -version = "0.8.4" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" dependencies = [ "hashbrown 0.14.5", ] @@ -4176,7 +4293,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.5.7", + "socket2", "tokio", "tower-service", "tracing", @@ -4251,7 +4368,7 @@ dependencies = [ "http-body 1.0.1", "hyper 1.4.1", "pin-project-lite", - "socket2 0.5.7", + "socket2", "tokio", "tower-service", "tracing", @@ -4543,13 +4660,13 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" +checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "arbitrary", "equivalent", - "hashbrown 0.14.5", + "hashbrown 0.15.0", "serde", ] @@ -4566,7 +4683,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "232929e1d75fe899576a3d5c7416ad0d88dbfbb3c3d6aa00873a7408a50ddb88" dependencies = [ "ahash", - "indexmap 2.5.0", + "indexmap 2.6.0", "is-terminal", "itoa", "log", @@ -4656,7 +4773,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2 0.5.7", + "socket2", "widestring", "windows-sys 0.48.0", "winreg", @@ -4841,7 +4958,7 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "jsonrpsee-types", - "parking_lot 0.12.3", + "parking_lot", "pin-project", "rand 0.8.5", "rustc-hash 2.0.0", @@ -5113,7 +5230,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags 2.6.0", "libc", - "redox_syscall 0.5.7", + "redox_syscall", ] [[package]] @@ -5177,6 +5294,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47186c6da4d81ca383c7c47c1bfc80f4b95f4720514d860a5407aaf4233f9588" dependencies = [ "linked-hash-map", + "serde", ] [[package]] @@ -5199,6 +5317,7 @@ checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", + "serde", ] [[package]] @@ -5294,9 +5413,9 @@ dependencies = [ [[package]] name = "metrics" -version = "0.23.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "884adb57038347dfbaf2d5065887b6cf4312330dc8e94bc30a1a839bd79d3261" +checksum = "8ae428771d17306715c5091d446327d1cfdedc82185c65ba8423ab404e45bf10" dependencies = [ "ahash", "portable-atomic", @@ -5316,12 +5435,12 @@ dependencies = [ [[package]] name = "metrics-exporter-prometheus" -version = "0.15.3" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4f0c8427b39666bf970460908b213ec09b3b350f20c0c2eabcbba51704a08e6" +checksum = "85b6f8152da6d7892ff1b7a1c0fa3f435e92b5918ad67035c3bb432111d9a29b" dependencies = [ "base64 0.22.1", - "indexmap 2.5.0", + "indexmap 2.6.0", "metrics", "metrics-util", "quanta", @@ -5330,30 +5449,30 @@ dependencies = [ [[package]] name = "metrics-process" -version = "2.1.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb524e5438255eaa8aa74214d5a62713b77b2c3c6e3c0bbeee65cfd9a58948ba" +checksum = "57ca8ecd85575fbb143b2678cb123bb818779391ec0f745b1c4a9dbabadde407" dependencies = [ + "libc", "libproc", "mach2", "metrics", "once_cell", - "procfs", + "procfs 0.17.0", "rlimit", - "windows 0.57.0", + "windows 0.58.0", ] [[package]] name = "metrics-util" -version = "0.17.0" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4259040465c955f9f2f1a4a8a16dc46726169bca0f88e8fb2dbeced487c3e828" +checksum = "15b482df36c13dd1869d73d14d28cd4855fbd6cfc32294bee109908a9f4a4ed7" dependencies = [ "crossbeam-epoch", "crossbeam-utils", - "hashbrown 0.14.5", + "hashbrown 0.15.0", "metrics", - "num_cpus", "quanta", "sketches-ddsketch", ] @@ -5589,6 +5708,7 @@ dependencies = [ "libc", "log", "mio 0.8.11", + "serde", "walkdir", "windows-sys 0.48.0", ] @@ -5794,6 +5914,7 @@ version = "1.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82881c4be219ab5faaf2ad5e5e5ecdff8c66bd7402ca3160975c93b24961afd1" dependencies = [ + "critical-section", "portable-atomic", ] @@ -5805,9 +5926,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "op-alloy-consensus" -version = "0.4.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ea7162170c6f3cad8f67f4dd7108e3f78349fd553da5b8bebff1e7ef8f38896" +checksum = "f26c3b35b7b3e36d15e0563eebffe13c1d9ca16b7aaffcb6a64354633547e16b" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5823,9 +5944,9 @@ dependencies = [ [[package]] name = "op-alloy-genesis" -version = "0.4.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f3d31dfbbd8dd898c7512f8ce7d30103980485416f668566100b0ed0994b958" +checksum = "ccacc2efed3d60d98ea581bddb885df1c6c62a592e55de049cfefd94116112cd" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5837,9 +5958,9 @@ dependencies = [ [[package]] name = "op-alloy-network" -version = "0.4.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d113b325527ba7da271a8793f1c14bdf7f035ce9e0611e668c36fc6812568c7f" +checksum = "5ff6fc0f94702ea0f4d8466bffdc990067ae6df9213465df9b7957f74f1e5461" dependencies = [ "alloy-consensus", "alloy-network", @@ -5851,26 +5972,29 @@ dependencies = [ [[package]] name = "op-alloy-protocol" -version = "0.4.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "310873e4fbfc41986716c4fb6000a8b49d025d932d2c261af58271c434b05288" +checksum = "f5f8e6ec6b91c6aaeb20860b455a52fd8e300acfe5d534e96e9073a24f853e74" dependencies = [ "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rlp", "alloy-serde", + "async-trait", "derive_more 1.0.0", "op-alloy-consensus", "op-alloy-genesis", "serde", + "tracing", + "unsigned-varint 0.8.0", ] [[package]] name = "op-alloy-rpc-types" -version = "0.4.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "323c65880e2561aa87f74f8af260fd15b9cc930c448c88a60ae95af86c88c634" +checksum = "94bae9bf91b620e1e2c2291562e5998bc1247bd8ada011773e1997b31a95de99" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5878,6 +6002,7 @@ dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", "alloy-serde", + "arbitrary", "op-alloy-consensus", "serde", "serde_json", @@ -5885,16 +6010,18 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types-engine" -version = "0.4.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349e7b420f45d1a00216ec4c65fcf3f0057a841bc39732c405c85ae782b94121" +checksum = "4b52ee59c86537cff83e8c7f2a6aa287a94f3608bb40c06d442aafd0c2e807a4" dependencies = [ "alloy-primitives", "alloy-rpc-types-engine", "alloy-serde", "derive_more 1.0.0", + "ethereum_ssz", "op-alloy-protocol", "serde", + "snap", ] [[package]] @@ -5981,6 +6108,7 @@ version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" dependencies = [ + "arbitrary", "arrayvec", "bitvec", "byte-slice-cast", @@ -6008,17 +6136,6 @@ version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", -] - [[package]] name = "parking_lot" version = "0.12.3" @@ -6026,21 +6143,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", - "parking_lot_core 0.9.10", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if", - "instant", - "libc", - "redox_syscall 0.2.16", - "smallvec", - "winapi", + "parking_lot_core", ] [[package]] @@ -6051,7 +6154,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.7", + "redox_syscall", "smallvec", "windows-targets 0.52.6", ] @@ -6308,7 +6411,7 @@ dependencies = [ "log", "nix", "once_cell", - "parking_lot 0.12.3", + "parking_lot", "smallvec", "symbolic-demangle", "tempfile", @@ -6387,7 +6490,7 @@ checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" dependencies = [ "fixed-hash", "impl-codec", - "uint", + "uint 0.9.5", ] [[package]] @@ -6399,30 +6502,6 @@ dependencies = [ "toml_edit", ] -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn 1.0.109", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - [[package]] name = "proc-macro-error-attr2" version = "2.0.0" @@ -6465,7 +6544,19 @@ dependencies = [ "flate2", "hex 0.4.3", "lazy_static", - "procfs-core", + "procfs-core 0.16.0", + "rustix", +] + +[[package]] +name = "procfs" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc5b72d8145275d844d4b5f6d4e1eef00c8cd889edb6035c21675d1bb1f45c9f" +dependencies = [ + "bitflags 2.6.0", + "hex 0.4.3", + "procfs-core 0.17.0", "rustix", ] @@ -6480,6 +6571,16 @@ dependencies = [ "hex 0.4.3", ] +[[package]] +name = "procfs-core" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "239df02d8349b06fc07398a3a1697b06418223b1c7725085e801e7c0fc6a12ec" +dependencies = [ + "bitflags 2.6.0", + "hex 0.4.3", +] + [[package]] name = "proptest" version = "1.5.0" @@ -6658,7 +6759,7 @@ dependencies = [ "ahash", "equivalent", "hashbrown 0.14.5", - "parking_lot 0.12.3", + "parking_lot", ] [[package]] @@ -6673,7 +6774,7 @@ dependencies = [ "quinn-udp", "rustc-hash 2.0.0", "rustls 0.23.13", - "socket2 0.5.7", + "socket2", "thiserror", "tokio", "tracing", @@ -6704,7 +6805,7 @@ checksum = "4fe68c2e9e1a1234e218683dbdf9f9dfcb094113c5ac2b938dfcb9bab4c4140b" dependencies = [ "libc", "once_cell", - "socket2 0.5.7", + "socket2", "tracing", "windows-sys 0.59.0", ] @@ -6861,15 +6962,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3edd4d5d42c92f0a659926464d4cce56b562761267ecf0f469d85b7de384175" -[[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_syscall" version = "0.5.7" @@ -7067,7 +7159,6 @@ dependencies = [ "reth-consensus-common", "reth-db", "reth-db-api", - "reth-discv4", "reth-downloaders", "reth-engine-util", "reth-errors", @@ -7117,6 +7208,7 @@ dependencies = [ name = "reth-auto-seal-consensus" version = "1.0.6" dependencies = [ + "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", "futures-util", @@ -7147,12 +7239,15 @@ dependencies = [ name = "reth-basic-payload-builder" version = "1.0.6" dependencies = [ + "alloy-consensus", + "alloy-eips", "alloy-primitives", "alloy-rlp", "futures-core", "futures-util", "metrics", "reth-chainspec", + "reth-evm", "reth-metrics", "reth-payload-builder", "reth-payload-primitives", @@ -7170,7 +7265,8 @@ dependencies = [ name = "reth-beacon-consensus" version = "1.0.6" dependencies = [ - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-eips", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "alloy-rpc-types-engine", "assert_matches", @@ -7225,13 +7321,13 @@ dependencies = [ "alloy-json-rpc", "alloy-primitives", "alloy-provider", - "alloy-pubsub 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-pubsub 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-rpc-client", "alloy-rpc-types-engine", "alloy-transport", "alloy-transport-http", "alloy-transport-ipc", - "alloy-transport-ws 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-transport-ws 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "async-trait", "clap", "csv", @@ -7258,14 +7354,14 @@ version = "1.0.6" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "aquamarine", "assert_matches", "dashmap 6.1.0", "linked_hash_set", "metrics", - "parking_lot 0.12.3", + "parking_lot", "reth-blockchain-tree-api", "reth-chainspec", "reth-consensus", @@ -7296,6 +7392,7 @@ dependencies = [ name = "reth-blockchain-tree-api" version = "1.0.6" dependencies = [ + "alloy-eips", "alloy-primitives", "reth-consensus", "reth-execution-errors", @@ -7309,7 +7406,8 @@ name = "reth-bsc-chainspec" version = "1.0.6" dependencies = [ "alloy-chains", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-consensus", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "derive_more 1.0.0", "once_cell", @@ -7371,6 +7469,7 @@ version = "1.0.6" dependencies = [ "alloy-consensus", "alloy-dyn-abi", + "alloy-eips", "alloy-json-abi", "alloy-primitives", "alloy-rlp", @@ -7381,7 +7480,7 @@ dependencies = [ "lazy_static", "lru", "mockall 0.12.1", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "reth-bsc-chainspec", "reth-bsc-forks", @@ -7414,10 +7513,12 @@ name = "reth-bsc-engine" version = "1.0.6" dependencies = [ "alloy-dyn-abi", + "alloy-eips", "alloy-json-abi", "alloy-primitives", "alloy-rlp", "alloy-rpc-types", + "alloy-rpc-types-engine", "bitset", "blst", "bytes 1.7.2", @@ -7425,7 +7526,7 @@ dependencies = [ "lazy_static", "lru", "mockall 0.12.1", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "reth-beacon-consensus", "reth-bsc-chainspec", @@ -7461,13 +7562,14 @@ dependencies = [ name = "reth-bsc-evm" version = "1.0.6" dependencies = [ - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-consensus", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "bitset", "blst", "lazy_static", "lru", - "parking_lot 0.12.3", + "parking_lot", "reth-bsc-chainspec", "reth-bsc-consensus", "reth-bsc-forks", @@ -7528,6 +7630,7 @@ dependencies = [ "reth-rpc", "reth-tracing", "reth-transaction-pool", + "reth-trie-db", "serde_json", "tokio", ] @@ -7536,6 +7639,7 @@ dependencies = [ name = "reth-bsc-payload-builder" version = "1.0.6" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", @@ -7563,6 +7667,7 @@ name = "reth-bsc-primitives" version = "1.0.6" dependencies = [ "alloy-chains", + "alloy-consensus", "alloy-primitives", "include_dir", "lazy_static", @@ -7589,7 +7694,7 @@ dependencies = [ "derive_more 1.0.0", "lazy_static", "metrics", - "parking_lot 0.12.3", + "parking_lot", "pin-project", "quick_cache", "rand 0.8.5", @@ -7601,6 +7706,7 @@ dependencies = [ "reth-provider", "reth-revm", "reth-storage-api", + "reth-testing-utils", "reth-trie", "revm", "serial_test", @@ -7614,8 +7720,9 @@ name = "reth-chainspec" version = "1.0.6" dependencies = [ "alloy-chains", + "alloy-consensus", "alloy-eips", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "alloy-rlp", "alloy-trie", @@ -7635,10 +7742,11 @@ dependencies = [ name = "reth-cli" version = "1.0.6" dependencies = [ - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "clap", "eyre", "reth-cli-runner", + "reth-db", "serde_json", "shellexpand", ] @@ -7650,6 +7758,7 @@ dependencies = [ "ahash", "alloy-eips", "alloy-primitives", + "alloy-rlp", "arbitrary", "backon", "clap", @@ -7668,6 +7777,7 @@ dependencies = [ "reth-cli", "reth-cli-runner", "reth-cli-util", + "reth-codecs", "reth-config", "reth-consensus", "reth-db", @@ -7691,10 +7801,13 @@ dependencies = [ "reth-primitives", "reth-provider", "reth-prune", + "reth-prune-types", "reth-stages", + "reth-stages-types", "reth-static-file", "reth-static-file-types", "reth-trie", + "reth-trie-common", "reth-trie-db", "secp256k1", "serde", @@ -7725,6 +7838,7 @@ dependencies = [ "rand 0.8.5", "reth-fs-util", "secp256k1", + "serde", "thiserror", "tikv-jemallocator", "tracy-client", @@ -7736,9 +7850,8 @@ version = "1.0.6" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", - "alloy-rlp", "alloy-trie", "arbitrary", "bytes 1.7.2", @@ -7746,11 +7859,11 @@ dependencies = [ "op-alloy-consensus", "proptest", "proptest-arbitrary-interop", - "rand 0.8.5", "reth-codecs-derive", "serde", "serde_json", "test-fuzz", + "visibility", ] [[package]] @@ -7785,6 +7898,7 @@ dependencies = [ name = "reth-consensus" version = "1.0.6" dependencies = [ + "alloy-eips", "alloy-primitives", "auto_impl", "derive_more 1.0.0", @@ -7796,6 +7910,7 @@ name = "reth-consensus-common" version = "1.0.6" dependencies = [ "alloy-consensus", + "alloy-eips", "alloy-primitives", "mockall 0.13.0", "rand 0.8.5", @@ -7843,11 +7958,10 @@ dependencies = [ "iai-callgrind", "metrics", "page_size", - "parking_lot 0.12.3", + "parking_lot", "paste", "pprof", "proptest", - "rand 0.8.5", "reth-db-api", "reth-fs-util", "reth-libmdbx", @@ -7874,7 +7988,7 @@ dependencies = [ name = "reth-db-api" version = "1.0.6" dependencies = [ - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "arbitrary", "bytes 1.7.2", @@ -7902,7 +8016,8 @@ dependencies = [ name = "reth-db-common" version = "1.0.6" dependencies = [ - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-consensus", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "boyer-moore-magiclen", "eyre", @@ -7937,7 +8052,7 @@ dependencies = [ "proptest", "proptest-arbitrary-interop", "reth-codecs", - "reth-primitives", + "reth-primitives-traits", "serde", "test-fuzz", ] @@ -7952,7 +8067,8 @@ dependencies = [ "discv5", "enr", "generic-array 0.14.7", - "parking_lot 0.12.3", + "itertools 0.13.0", + "parking_lot", "rand 0.8.5", "reth-ethereum-forks", "reth-net-banlist", @@ -8002,7 +8118,7 @@ dependencies = [ "data-encoding", "enr", "linked_hash_set", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "reth-chainspec", "reth-ethereum-forks", @@ -8068,24 +8184,21 @@ dependencies = [ "alloy-rpc-types", "alloy-signer", "alloy-signer-local", + "derive_more 1.0.0", "eyre", "futures-util", "jsonrpsee", - "jsonrpsee-types", "op-alloy-rpc-types-engine", "reth", "reth-chainspec", "reth-db", + "reth-engine-local", "reth-network-peers", "reth-node-builder", - "reth-node-ethereum", "reth-payload-builder", "reth-payload-primitives", - "reth-primitives", "reth-provider", - "reth-rpc", "reth-rpc-layer", - "reth-rpc-types-compat", "reth-stages-types", "reth-tokio-util", "reth-tracing", @@ -8093,6 +8206,7 @@ dependencies = [ "tokio", "tokio-stream", "tracing", + "url", ] [[package]] @@ -8133,22 +8247,22 @@ dependencies = [ "alloy-rpc-types-engine", "eyre", "futures-util", + "op-alloy-rpc-types-engine", "reth-beacon-consensus", - "reth-chain-state", "reth-chainspec", - "reth-config", - "reth-db", + "reth-consensus", + "reth-engine-primitives", + "reth-engine-service", "reth-engine-tree", "reth-ethereum-engine-primitives", - "reth-exex-test-utils", - "reth-node-types", + "reth-evm", "reth-payload-builder", "reth-payload-primitives", - "reth-primitives", + "reth-payload-validator", "reth-provider", "reth-prune", + "reth-rpc-types-compat", "reth-stages-api", - "reth-tracing", "reth-transaction-pool", "tokio", "tokio-stream", @@ -8207,7 +8321,6 @@ dependencies = [ "dashmap 6.1.0", "futures", "metrics", - "rand 0.8.5", "reth-beacon-consensus", "reth-blockchain-tree", "reth-blockchain-tree-api", @@ -8239,8 +8352,10 @@ dependencies = [ "reth-trie", "reth-trie-parallel", "reth-trie-prefetch", + "revm-primitives", "thiserror", "tokio", + "tokio-stream", "tracing", ] @@ -8248,6 +8363,8 @@ dependencies = [ name = "reth-engine-util" version = "1.0.6" dependencies = [ + "alloy-consensus", + "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", "eyre", @@ -8328,7 +8445,7 @@ dependencies = [ "alloy-chains", "alloy-consensus", "alloy-eips", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "alloy-rlp", "arbitrary", @@ -8359,6 +8476,8 @@ dependencies = [ name = "reth-ethereum-consensus" version = "1.0.6" dependencies = [ + "alloy-consensus", + "alloy-eips", "alloy-primitives", "reth-chainspec", "reth-consensus", @@ -8391,6 +8510,7 @@ name = "reth-ethereum-forks" version = "1.0.6" dependencies = [ "alloy-chains", + "alloy-consensus", "alloy-primitives", "alloy-rlp", "arbitrary", @@ -8409,6 +8529,8 @@ dependencies = [ name = "reth-ethereum-payload-builder" version = "1.0.6" dependencies = [ + "alloy-consensus", + "alloy-eips", "alloy-primitives", "reth-basic-payload-builder", "reth-chain-state", @@ -8443,19 +8565,24 @@ dependencies = [ name = "reth-evm" version = "1.0.6" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "auto_impl", "futures-util", "metrics", - "parking_lot 0.12.3", + "parking_lot", "reth-chainspec", + "reth-consensus", + "reth-consensus-common", + "reth-ethereum-forks", "reth-execution-errors", "reth-execution-types", "reth-metrics", "reth-primitives", "reth-primitives-traits", "reth-prune-types", + "reth-revm", "reth-storage-errors", "revm", "revm-primitives", @@ -8468,16 +8595,16 @@ version = "1.0.6" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "alloy-sol-types", "reth-chainspec", + "reth-consensus", "reth-ethereum-consensus", "reth-ethereum-forks", "reth-evm", "reth-execution-types", "reth-primitives", - "reth-prune-types", "reth-revm", "reth-testing-utils", "revm-primitives", @@ -8525,19 +8652,18 @@ version = "1.0.6" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "eyre", "futures", "itertools 0.13.0", "metrics", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "reth-blockchain-tree", "reth-chain-state", "reth-chainspec", "reth-config", - "reth-db-api", "reth-db-common", "reth-evm", "reth-evm-ethereum", @@ -8568,6 +8694,7 @@ dependencies = [ name = "reth-exex-test-utils" version = "1.0.6" dependencies = [ + "alloy-eips", "eyre", "futures-util", "rand 0.8.5", @@ -8577,7 +8704,6 @@ dependencies = [ "reth-consensus", "reth-db", "reth-db-common", - "reth-ethereum-engine-primitives", "reth-evm", "reth-execution-types", "reth-exex", @@ -8591,6 +8717,7 @@ dependencies = [ "reth-provider", "reth-tasks", "reth-transaction-pool", + "reth-trie-db", "tempfile", "thiserror", "tokio", @@ -8676,8 +8803,8 @@ dependencies = [ "criterion", "dashmap 6.1.0", "derive_more 1.0.0", - "indexmap 2.5.0", - "parking_lot 0.12.3", + "indexmap 2.6.0", + "parking_lot", "pprof", "rand 0.8.5", "rand_xorshift", @@ -8747,7 +8874,7 @@ dependencies = [ "futures", "itertools 0.13.0", "metrics", - "parking_lot 0.12.3", + "parking_lot", "pin-project", "pprof", "rand 0.8.5", @@ -8793,7 +8920,7 @@ name = "reth-network-api" version = "1.0.6" dependencies = [ "alloy-primitives", - "alloy-rpc-types-admin 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-rpc-types-admin 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "auto_impl", "derive_more 1.0.0", "enr", @@ -8819,7 +8946,7 @@ dependencies = [ "auto_impl", "derive_more 1.0.0", "futures", - "parking_lot 0.12.3", + "parking_lot", "reth-consensus", "reth-eth-wire-types", "reth-network-peers", @@ -8881,15 +9008,20 @@ dependencies = [ name = "reth-node-api" version = "1.0.6" dependencies = [ + "alloy-rpc-types-engine", + "eyre", + "reth-beacon-consensus", + "reth-bsc-consensus", + "reth-consensus", "reth-engine-primitives", "reth-evm", "reth-network-api", + "reth-node-core", "reth-node-types", "reth-payload-builder", "reth-payload-primitives", "reth-primitives", "reth-provider", - "reth-rpc-eth-api", "reth-tasks", "reth-transaction-pool", ] @@ -8921,6 +9053,7 @@ dependencies = [ "reth-db-api", "reth-db-common", "reth-downloaders", + "reth-engine-local", "reth-engine-service", "reth-engine-tree", "reth-engine-util", @@ -8964,6 +9097,8 @@ dependencies = [ name = "reth-node-core" version = "1.0.6" dependencies = [ + "alloy-consensus", + "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", "clap", @@ -8988,8 +9123,6 @@ dependencies = [ "reth-network-peers", "reth-primitives", "reth-prune-types", - "reth-rpc-api", - "reth-rpc-eth-api", "reth-rpc-eth-types", "reth-rpc-server-types", "reth-rpc-types-compat", @@ -9002,7 +9135,6 @@ dependencies = [ "serde", "shellexpand", "strum", - "tempfile", "thiserror", "tokio", "toml 0.8.19", @@ -9014,11 +9146,18 @@ dependencies = [ name = "reth-node-ethereum" version = "1.0.6" dependencies = [ - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-consensus", + "alloy-contract", + "alloy-eips", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", + "alloy-provider", + "alloy-rpc-types-beacon", + "alloy-signer", + "alloy-sol-types", "eyre", "futures", - "futures-util", + "rand 0.8.5", "reth", "reth-auto-seal-consensus", "reth-basic-payload-builder", @@ -9030,19 +9169,22 @@ dependencies = [ "reth-e2e-test-utils", "reth-ethereum-engine-primitives", "reth-ethereum-payload-builder", + "reth-evm", "reth-evm-ethereum", "reth-exex", "reth-network", "reth-node-api", "reth-node-builder", - "reth-node-core", "reth-payload-builder", "reth-primitives", "reth-provider", + "reth-revm", "reth-rpc", "reth-tasks", "reth-tracing", "reth-transaction-pool", + "reth-trie-db", + "revm", "serde_json", "tokio", ] @@ -9051,6 +9193,8 @@ dependencies = [ name = "reth-node-events" version = "1.0.6" dependencies = [ + "alloy-consensus", + "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", "futures", @@ -9059,7 +9203,6 @@ dependencies = [ "reth-beacon-consensus", "reth-network", "reth-network-api", - "reth-primitives", "reth-primitives-traits", "reth-provider", "reth-prune", @@ -9080,14 +9223,13 @@ dependencies = [ "metrics-exporter-prometheus", "metrics-process", "metrics-util", - "procfs", + "procfs 0.16.0", "reqwest 0.12.8", - "reth-chainspec", "reth-db-api", "reth-metrics", "reth-provider", "reth-tasks", - "socket2 0.5.7", + "socket2", "tikv-jemalloc-ctl", "tokio", "tower 0.4.13", @@ -9102,6 +9244,9 @@ dependencies = [ "reth-chainspec", "reth-db-api", "reth-engine-primitives", + "reth-primitives", + "reth-primitives-traits", + "reth-trie-db", ] [[package]] @@ -9109,7 +9254,9 @@ name = "reth-optimism-chainspec" version = "1.0.6" dependencies = [ "alloy-chains", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-consensus", + "alloy-eips", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "derive_more 1.0.0", "once_cell", @@ -9131,6 +9278,8 @@ dependencies = [ "clap", "eyre", "futures-util", + "op-alloy-consensus", + "proptest", "reth-chainspec", "reth-cli", "reth-cli-commands", @@ -9170,6 +9319,7 @@ dependencies = [ name = "reth-optimism-consensus" version = "1.0.6" dependencies = [ + "alloy-consensus", "alloy-primitives", "reth-chainspec", "reth-consensus", @@ -9187,10 +9337,12 @@ version = "1.0.6" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", + "derive_more 1.0.0", "op-alloy-consensus", "reth-chainspec", + "reth-consensus", "reth-ethereum-forks", "reth-evm", "reth-execution-errors", @@ -9203,7 +9355,6 @@ dependencies = [ "reth-revm", "revm", "revm-primitives", - "thiserror", "tokio", "tracing", ] @@ -9224,18 +9375,14 @@ name = "reth-optimism-node" version = "1.0.6" dependencies = [ "alloy-eips", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "alloy-rpc-types-engine", - "async-trait", "clap", "eyre", - "jsonrpsee", - "jsonrpsee-types", "op-alloy-consensus", "op-alloy-rpc-types-engine", - "parking_lot 0.12.3", - "reqwest 0.12.8", + "parking_lot", "reth", "reth-auto-seal-consensus", "reth-basic-payload-builder", @@ -9244,8 +9391,8 @@ dependencies = [ "reth-chainspec", "reth-consensus", "reth-db", - "reth-discv5", "reth-e2e-test-utils", + "reth-engine-local", "reth-evm", "reth-network", "reth-node-api", @@ -9260,27 +9407,25 @@ dependencies = [ "reth-primitives", "reth-provider", "reth-revm", - "reth-rpc", - "reth-rpc-eth-api", - "reth-rpc-eth-types", - "reth-rpc-types-compat", "reth-tracing", "reth-transaction-pool", + "reth-trie-db", + "revm", "serde", "serde_json", - "thiserror", "tokio", - "tracing", ] [[package]] name = "reth-optimism-payload-builder" version = "1.0.6" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rlp", "alloy-rpc-types-engine", + "op-alloy-consensus", "op-alloy-rpc-types-engine", "reth-basic-payload-builder", "reth-chain-state", @@ -9300,7 +9445,6 @@ dependencies = [ "reth-transaction-pool", "reth-trie", "revm", - "revm-primitives", "sha2 0.10.8", "thiserror", "tracing", @@ -9310,15 +9454,16 @@ dependencies = [ name = "reth-optimism-primitives" version = "1.0.6" dependencies = [ + "alloy-consensus", "alloy-primitives", "reth-primitives", - "reth-primitives-traits", ] [[package]] name = "reth-optimism-rpc" version = "1.0.6" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rpc-types", @@ -9328,7 +9473,7 @@ dependencies = [ "op-alloy-consensus", "op-alloy-network", "op-alloy-rpc-types", - "parking_lot 0.12.3", + "parking_lot", "reqwest 0.12.8", "reth-chainspec", "reth-evm", @@ -9390,6 +9535,7 @@ dependencies = [ name = "reth-payload-primitives" version = "1.0.6" dependencies = [ + "alloy-eips", "alloy-primitives", "alloy-rpc-types", "async-trait", @@ -9423,7 +9569,7 @@ version = "1.0.6" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "alloy-rlp", "alloy-rpc-types", @@ -9448,7 +9594,6 @@ dependencies = [ "reth-chainspec", "reth-codecs", "reth-ethereum-forks", - "reth-optimism-chainspec", "reth-primitives-traits", "reth-static-file-types", "reth-testing-utils", @@ -9469,7 +9614,7 @@ version = "1.0.6" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "alloy-rlp", "arbitrary", @@ -9506,8 +9651,7 @@ dependencies = [ "itertools 0.13.0", "metrics", "notify", - "once_cell", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "rayon", "reth-blockchain-tree-api", @@ -9594,9 +9738,9 @@ dependencies = [ name = "reth-revm" version = "1.0.6" dependencies = [ + "alloy-consensus", + "alloy-eips", "alloy-primitives", - "reth-chainspec", - "reth-consensus-common", "reth-ethereum-forks", "reth-execution-errors", "reth-primitives", @@ -9614,12 +9758,13 @@ dependencies = [ "alloy-consensus", "alloy-dyn-abi", "alloy-eips", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-network", "alloy-primitives", "alloy-rlp", "alloy-rpc-types", - "alloy-rpc-types-admin 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-rpc-types-admin 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-rpc-types-beacon", "alloy-rpc-types-debug", "alloy-rpc-types-eth", "alloy-rpc-types-mev", @@ -9637,20 +9782,22 @@ dependencies = [ "jsonrpsee", "jsonrpsee-types", "jsonwebtoken", - "parking_lot 0.12.3", + "parking_lot", "pin-project", "rand 0.8.5", "reth-bsc-consensus", "reth-bsc-primitives", "reth-chainspec", + "reth-consensus", "reth-consensus-common", "reth-errors", + "reth-ethereum-consensus", "reth-evm", "reth-evm-ethereum", "reth-network-api", "reth-network-peers", "reth-network-types", - "reth-node-api", + "reth-payload-validator", "reth-primitives", "reth-provider", "reth-revm", @@ -9685,7 +9832,7 @@ dependencies = [ "alloy-json-rpc", "alloy-primitives", "alloy-rpc-types", - "alloy-rpc-types-admin 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-rpc-types-admin 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-rpc-types-anvil", "alloy-rpc-types-beacon", "alloy-rpc-types-debug", @@ -9698,15 +9845,16 @@ dependencies = [ "jsonrpsee", "reth-engine-primitives", "reth-network-peers", - "reth-primitives", "reth-rpc-eth-api", - "serde_json", + "serde", + "serde_with", ] [[package]] name = "reth-rpc-api-testing-util" version = "1.0.6" dependencies = [ + "alloy-eips", "alloy-primitives", "alloy-rpc-types", "alloy-rpc-types-eth", @@ -9726,13 +9874,11 @@ dependencies = [ name = "reth-rpc-builder" version = "1.0.6" dependencies = [ - "alloy-network", + "alloy-eips", "alloy-primitives", - "alloy-rpc-types", "alloy-rpc-types-engine", "alloy-rpc-types-eth", "alloy-rpc-types-trace", - "alloy-serde", "clap", "http 1.1.0", "jsonrpsee", @@ -9741,6 +9887,7 @@ dependencies = [ "reth-beacon-consensus", "reth-bsc-consensus", "reth-chainspec", + "reth-consensus", "reth-engine-primitives", "reth-ethereum-engine-primitives", "reth-evm", @@ -9749,7 +9896,6 @@ dependencies = [ "reth-metrics", "reth-network-api", "reth-network-peers", - "reth-node-api", "reth-node-core", "reth-payload-builder", "reth-primitives", @@ -9763,13 +9909,13 @@ dependencies = [ "reth-rpc-server-types", "reth-rpc-types-compat", "reth-tasks", - "reth-tokio-util", "reth-tracing", "reth-transaction-pool", "serde", "serde_json", "thiserror", "tokio", + "tokio-util", "tower 0.4.13", "tower-http", "tracing", @@ -9815,6 +9961,7 @@ dependencies = [ name = "reth-rpc-eth-api" version = "1.0.6" dependencies = [ + "alloy-consensus", "alloy-dyn-abi", "alloy-eips", "alloy-json-rpc", @@ -9829,13 +9976,14 @@ dependencies = [ "futures", "jsonrpsee", "jsonrpsee-types", - "parking_lot 0.12.3", + "parking_lot", "reth-bsc-primitives", "reth-chainspec", "reth-errors", "reth-evm", "reth-execution-types", "reth-network-api", + "reth-node-api", "reth-primitives", "reth-provider", "reth-revm", @@ -9861,10 +10009,10 @@ dependencies = [ "alloy-primitives", "alloy-rpc-types", "alloy-rpc-types-eth", - "alloy-serde", "alloy-sol-types", "derive_more 1.0.0", "futures", + "itertools 0.13.0", "jsonrpsee-core", "jsonrpsee-types", "metrics", @@ -9915,13 +10063,13 @@ dependencies = [ name = "reth-rpc-server-types" version = "1.0.6" dependencies = [ + "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", "jsonrpsee-core", "jsonrpsee-types", "reth-errors", "reth-network-api", - "reth-primitives", "serde", "strum", ] @@ -9930,15 +10078,16 @@ dependencies = [ name = "reth-rpc-types-compat" version = "1.0.6" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rlp", "alloy-rpc-types", "alloy-rpc-types-engine", "alloy-rpc-types-eth", - "alloy-serde", "reth-primitives", "reth-trie-common", + "serde", "serde_json", ] @@ -9986,7 +10135,6 @@ dependencies = [ "reth-testing-utils", "reth-trie", "reth-trie-db", - "serde_json", "tempfile", "thiserror", "tokio", @@ -10044,13 +10192,10 @@ version = "1.0.6" dependencies = [ "alloy-primitives", "assert_matches", - "parking_lot 0.12.3", + "parking_lot", "rayon", - "reth-chainspec", "reth-db", "reth-db-api", - "reth-nippy-jar", - "reth-node-types", "reth-provider", "reth-prune-types", "reth-stages", @@ -10078,6 +10223,7 @@ dependencies = [ name = "reth-storage-api" version = "1.0.6" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "auto_impl", @@ -10127,7 +10273,7 @@ version = "1.0.6" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "rand 0.8.5", "reth-primitives", @@ -10172,7 +10318,7 @@ dependencies = [ "criterion", "futures-util", "metrics", - "parking_lot 0.12.3", + "parking_lot", "paste", "pprof", "proptest", @@ -10206,18 +10352,17 @@ dependencies = [ name = "reth-trie" version = "1.0.6" dependencies = [ + "alloy-consensus", "alloy-primitives", "alloy-rlp", "auto_impl", "bincode", "criterion", - "derive_more 1.0.0", "itertools 0.13.0", "metrics", "proptest", "proptest-arbitrary-interop", "rayon", - "reth-chainspec", "reth-execution-errors", "reth-metrics", "reth-primitives", @@ -10228,7 +10373,6 @@ dependencies = [ "serde", "serde_json", "serde_with", - "tokio", "tracing", "triehash", ] @@ -10238,7 +10382,7 @@ name = "reth-trie-common" version = "1.0.6" dependencies = [ "alloy-consensus", - "alloy-genesis 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "alloy-genesis 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-primitives", "alloy-rlp", "alloy-trie", @@ -10261,24 +10405,20 @@ dependencies = [ name = "reth-trie-db" version = "1.0.6" dependencies = [ + "alloy-consensus", "alloy-primitives", "alloy-rlp", - "auto_impl", "derive_more 1.0.0", - "itertools 0.13.0", "metrics", "proptest", "proptest-arbitrary-interop", - "rayon", "reth-chainspec", "reth-db", "reth-db-api", "reth-execution-errors", "reth-metrics", - "reth-node-types", "reth-primitives", "reth-provider", - "reth-stages-types", "reth-storage-errors", "reth-trie", "reth-trie-common", @@ -10286,8 +10426,6 @@ dependencies = [ "serde", "serde_json", "similar-asserts", - "tokio", - "tokio-stream", "tracing", "triehash", ] @@ -10308,7 +10446,6 @@ dependencies = [ "rand 0.8.5", "rayon", "reth-db", - "reth-db-api", "reth-execution-errors", "reth-metrics", "reth-primitives", @@ -10347,10 +10484,30 @@ dependencies = [ "tracing", ] +[[package]] +name = "reth-trie-sparse" +version = "1.0.6" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "assert_matches", + "criterion", + "itertools 0.13.0", + "pretty_assertions", + "proptest", + "rand 0.8.5", + "reth-testing-utils", + "reth-tracing", + "reth-trie", + "reth-trie-common", + "smallvec", + "thiserror", +] + [[package]] name = "revm" -version = "14.0.3" -source = "git+https://github.com/bnb-chain/revm?rev=0978bcb4189c547e7b71b2f11389ff2214fbe319#0978bcb4189c547e7b71b2f11389ff2214fbe319" +version = "17.0.0" +source = "git+https://github.com/bnb-chain/revm?rev=add28b2bb131aa4a37ab4daf817e46448158dfc8#add28b2bb131aa4a37ab4daf817e46448158dfc8" dependencies = [ "auto_impl", "cfg-if", @@ -10363,9 +10520,9 @@ dependencies = [ [[package]] name = "revm-inspectors" -version = "0.8.1" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43c44af0bf801f48d25f7baf25cf72aff4c02d610f83b428175228162fef0246" +checksum = "1e29c662f7887f3b659d4b0fd234673419a8fcbeaa1ecc29bf7034c0a75cc8ea" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -10382,8 +10539,8 @@ dependencies = [ [[package]] name = "revm-interpreter" -version = "10.0.3" -source = "git+https://github.com/bnb-chain/revm?rev=0978bcb4189c547e7b71b2f11389ff2214fbe319#0978bcb4189c547e7b71b2f11389ff2214fbe319" +version = "13.0.0" +source = "git+https://github.com/bnb-chain/revm?rev=add28b2bb131aa4a37ab4daf817e46448158dfc8#add28b2bb131aa4a37ab4daf817e46448158dfc8" dependencies = [ "revm-primitives", "serde", @@ -10391,8 +10548,8 @@ dependencies = [ [[package]] name = "revm-precompile" -version = "11.0.3" -source = "git+https://github.com/bnb-chain/revm?rev=0978bcb4189c547e7b71b2f11389ff2214fbe319#0978bcb4189c547e7b71b2f11389ff2214fbe319" +version = "14.0.0" +source = "git+https://github.com/bnb-chain/revm?rev=add28b2bb131aa4a37ab4daf817e46448158dfc8#add28b2bb131aa4a37ab4daf817e46448158dfc8" dependencies = [ "alloy-rlp", "aurora-engine-modexp", @@ -10419,8 +10576,8 @@ dependencies = [ [[package]] name = "revm-primitives" -version = "10.0.0" -source = "git+https://github.com/bnb-chain/revm?rev=0978bcb4189c547e7b71b2f11389ff2214fbe319#0978bcb4189c547e7b71b2f11389ff2214fbe319" +version = "13.0.0" +source = "git+https://github.com/bnb-chain/revm?rev=add28b2bb131aa4a37ab4daf817e46448158dfc8#add28b2bb131aa4a37ab4daf817e46448158dfc8" dependencies = [ "alloy-eip2930", "alloy-eip7702", @@ -11037,7 +11194,7 @@ version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" dependencies = [ - "indexmap 2.5.0", + "indexmap 2.6.0", "itoa", "memchr", "ryu", @@ -11089,15 +11246,15 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.10.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9720086b3357bcb44fce40117d769a4d068c70ecfa190850a980a71755f66fcc" +checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817" dependencies = [ "base64 0.22.1", "chrono", "hex 0.4.3", "indexmap 1.9.3", - "indexmap 2.5.0", + "indexmap 2.6.0", "serde", "serde_derive", "serde_json", @@ -11107,9 +11264,9 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.10.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f1abbfe725f27678f4663bcacb75a83e829fd464c25d78dd038a3a29e307cec" +checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d" dependencies = [ "darling", "proc-macro2", @@ -11126,7 +11283,7 @@ dependencies = [ "futures", "log", "once_cell", - "parking_lot 0.12.3", + "parking_lot", "scc", "serial_test_derive", ] @@ -11297,6 +11454,10 @@ name = "similar" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" +dependencies = [ + "bstr 1.10.0", + "unicode-segmentation", +] [[package]] name = "similar-asserts" @@ -11305,6 +11466,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe85670573cd6f0fa97940f26e7e6601213c3b0555246c24234131f88c5709e" dependencies = [ "console", + "serde", "similar", ] @@ -11328,9 +11490,9 @@ checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "sketches-ddsketch" -version = "0.2.2" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85636c14b73d81f541e525f585c0a2109e6744e1565b5c1668e31c70c10ed65c" +checksum = "c1e9a774a6c28142ac54bb25d25562e6bcf957493a184f15ad4eebccb23e410a" [[package]] name = "slab" @@ -11357,16 +11519,6 @@ version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" -[[package]] -name = "socket2" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "socket2" version = "0.5.7" @@ -11545,9 +11697,9 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.8.5" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ab661c8148c2261222a4d641ad5477fd4bea79406a99056096a0b41b35617a5" +checksum = "edf42e81491fb8871b74df3d222c64ae8cbc1269ea509fa768a3ed3e1b0ac8cb" dependencies = [ "paste", "proc-macro2", @@ -11653,7 +11805,7 @@ source = "git+https://github.com/bnb-chain/tendermint-rs-parlia?rev=8c21ccbd58a1 dependencies = [ "anomaly", "async-trait", - "bstr", + "bstr 0.2.17", "byteorder", "bytes 0.5.6", "chrono", @@ -11947,10 +12099,10 @@ dependencies = [ "bytes 1.7.2", "libc", "mio 1.0.2", - "parking_lot 0.12.3", + "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.7", + "socket2", "tokio-macros", "windows-sys 0.52.0", ] @@ -12066,7 +12218,7 @@ version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ - "indexmap 2.5.0", + "indexmap 2.6.0", "serde", "serde_spanned", "toml_datetime", @@ -12338,9 +12490,10 @@ dependencies = [ "ipconfig", "lru-cache", "once_cell", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "resolv-conf", + "serde", "smallvec", "thiserror", "tokio", @@ -12398,6 +12551,18 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "uint" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "909988d098b2f738727b161a106cfc7cab00c539c2687a8836f8e565976fb53e" +dependencies = [ + "byteorder", + "crunchy", + "hex 0.4.3", + "static_assertions", +] + [[package]] name = "unarray" version = "0.1.4" @@ -12562,6 +12727,17 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "visibility" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.79", +] + [[package]] name = "wait-timeout" version = "0.2.0" @@ -12688,6 +12864,20 @@ dependencies = [ "web-sys", ] +[[package]] +name = "wasmtimer" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7ed9d8b15c7fb594d72bfb4b5a276f3d2029333cd93a932f376f5937f6f80ee" +dependencies = [ + "futures", + "js-sys", + "parking_lot", + "pin-utils", + "slab", + "wasm-bindgen", +] + [[package]] name = "web-sys" version = "0.3.70" diff --git a/Cargo.toml b/Cargo.toml index 0095c1512..376896195 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -553,7 +553,7 @@ zstd = "0.13" metrics = "0.24.0" metrics-derive = "0.1" metrics-exporter-prometheus = { version = "0.16.0", default-features = false } -metrics-process = "2.1.0" +metrics-process = { version = "2.3.1"} metrics-util = { default-features = false, version = "0.18.0" } # proc-macros @@ -625,9 +625,9 @@ tikv-jemallocator = "0.6" tracy-client = "0.17.3" [patch.crates-io] -revm = { git = "https://github.com/bnb-chain/revm", rev = "4b61845b06f4d98df218dacd0416ce736e7b4484" } -revm-interpreter = { git = "https://github.com/bnb-chain/revm", rev = "4b61845b06f4d98df218dacd0416ce736e7b4484" } -revm-primitives = { git = "https://github.com/bnb-chain/revm", rev = "4b61845b06f4d98df218dacd0416ce736e7b4484" } +revm = { git = "https://github.com/bnb-chain/revm", rev = "add28b2bb131aa4a37ab4daf817e46448158dfc8" } +revm-interpreter = { git = "https://github.com/bnb-chain/revm", rev = "add28b2bb131aa4a37ab4daf817e46448158dfc8" } +revm-primitives = { git = "https://github.com/bnb-chain/revm", rev = "add28b2bb131aa4a37ab4daf817e46448158dfc8" } alloy-rpc-types-eth = { git = "https://github.com/bnb-chain/alloy", rev = "b6ca35f241857d220f530b2100581586bac05444" } alloy-consensus = { git = "https://github.com/bnb-chain/alloy", rev = "b6ca35f241857d220f530b2100581586bac05444" } alloy-eips = { git = "https://github.com/bnb-chain/alloy", rev = "b6ca35f241857d220f530b2100581586bac05444" } diff --git a/bin/reth/src/commands/debug_cmd/merkle.rs b/bin/reth/src/commands/debug_cmd/merkle.rs index bda60946d..782d05190 100644 --- a/bin/reth/src/commands/debug_cmd/merkle.rs +++ b/bin/reth/src/commands/debug_cmd/merkle.rs @@ -152,12 +152,13 @@ impl> Command { provider_rw.insert_block(sealed_block.clone())?; td += sealed_block.difficulty; - let mut executor = executor_provider.batch_executor(StateProviderDatabase::new( - LatestStateProviderRef::new( + let mut executor = executor_provider.batch_executor( + StateProviderDatabase::new(LatestStateProviderRef::new( provider_rw.tx_ref(), provider_rw.static_file_provider().clone(), - ), - )); + )), + None, + ); executor.execute_and_verify_one((&sealed_block.clone().unseal(), td, None).into())?; let execution_outcome = executor.finalize(); diff --git a/bsc.Dockerfile b/bsc.Dockerfile index ff5e380b9..659b14304 100644 --- a/bsc.Dockerfile +++ b/bsc.Dockerfile @@ -1,4 +1,4 @@ -FROM lukemathwalker/cargo-chef:latest-rust-1.81 AS chef +FROM lukemathwalker/cargo-chef:latest-rust-1.82 AS chef WORKDIR /app LABEL org.opencontainers.image.source=https://github.com/bnb-chain/reth diff --git a/crates/blockchain-tree/Cargo.toml b/crates/blockchain-tree/Cargo.toml index efc91a571..560c61664 100644 --- a/crates/blockchain-tree/Cargo.toml +++ b/crates/blockchain-tree/Cargo.toml @@ -86,5 +86,6 @@ optimism = [ "reth-provider/optimism", "reth-execution-types/optimism", "reth-db/optimism", - "reth-db-api/optimism" + "reth-db-api/optimism", + "reth-chainspec/optimism" ] diff --git a/crates/blockchain-tree/src/chain.rs b/crates/blockchain-tree/src/chain.rs index 149cdaaa4..6b017eb7d 100644 --- a/crates/blockchain-tree/src/chain.rs +++ b/crates/blockchain-tree/src/chain.rs @@ -326,7 +326,7 @@ impl AppendableChain { let parent_block = self.chain.tip(); let ancestor_blocks = - self.headers().map(|h| return (h.hash() as B256, h.header().clone())).collect(); + self.headers().map(|h| (h.hash() as B256, h.header().clone())).collect(); let bundle_state_data = BundleStateDataRef { execution_outcome: self.execution_outcome(), diff --git a/crates/bsc/bin/src/main.rs b/crates/bsc/bin/src/main.rs index 7ac657d04..99844d5bd 100644 --- a/crates/bsc/bin/src/main.rs +++ b/crates/bsc/bin/src/main.rs @@ -7,7 +7,7 @@ static ALLOC: reth_cli_util::allocator::Allocator = reth_cli_util::allocator::ne use clap::{Args, Parser}; use reth_bsc_cli::{BscChainSpecParser, Cli}; -use reth_bsc_node::{node::BSCAddOns, BscNode}; +use reth_bsc_node::{node::BscAddOns, BscNode}; use reth_node_builder::{ engine_tree_config::{ TreeConfig, DEFAULT_MEMORY_BLOCK_BUFFER_TARGET, DEFAULT_PERSISTENCE_THRESHOLD, @@ -76,7 +76,7 @@ fn main() { let handle = builder .with_types_and_provider::>() .with_components(BscNode::components()) - .with_add_ons(BSCAddOns) + .with_add_ons(BscAddOns::default()) .launch_with_fn(|builder| { let launcher = EngineNodeLauncher::new( builder.task_executor().clone(), diff --git a/crates/bsc/chainspec/Cargo.toml b/crates/bsc/chainspec/Cargo.toml index e267fb451..d1c72ac4a 100644 --- a/crates/bsc/chainspec/Cargo.toml +++ b/crates/bsc/chainspec/Cargo.toml @@ -23,6 +23,7 @@ reth-bsc-forks.workspace = true # ethereum alloy-chains.workspace = true +alloy-consensus.workspace = true alloy-genesis.workspace = true alloy-primitives.workspace = true @@ -40,4 +41,10 @@ op-alloy-rpc-types.workspace = true [features] default = ["std"] -std = [] \ No newline at end of file +std = [ + "alloy-consensus/std", + "alloy-genesis/std", + "alloy-primitives/std", + "once_cell/std", + "reth-primitives-traits/std" +] diff --git a/crates/bsc/chainspec/src/dev.rs b/crates/bsc/chainspec/src/dev.rs index e8f2d5a2f..e2da82f1a 100644 --- a/crates/bsc/chainspec/src/dev.rs +++ b/crates/bsc/chainspec/src/dev.rs @@ -6,11 +6,11 @@ use alloc::sync::Arc; use std::sync::Arc; use alloy_chains::Chain; +use alloy_consensus::constants::DEV_GENESIS_HASH; use alloy_primitives::U256; use once_cell::sync::Lazy; use reth_bsc_forks::DEV_HARDFORKS; use reth_chainspec::{once_cell_set, BaseFeeParams, BaseFeeParamsKind, ChainSpec}; -use reth_primitives_traits::constants::DEV_GENESIS_HASH; use crate::BscChainSpec; diff --git a/crates/bsc/chainspec/src/lib.rs b/crates/bsc/chainspec/src/lib.rs index c2f902329..3dd0ad0d3 100644 --- a/crates/bsc/chainspec/src/lib.rs +++ b/crates/bsc/chainspec/src/lib.rs @@ -65,8 +65,8 @@ impl EthChainSpec for BscChainSpec { self.inner.prune_delete_limit() } - fn display_hardforks(&self) -> impl Display { - self.inner.display_hardforks() + fn display_hardforks(&self) -> Box { + Box::new(self.inner.display_hardforks()) } fn genesis_header(&self) -> &Header { diff --git a/crates/bsc/cli/Cargo.toml b/crates/bsc/cli/Cargo.toml index 0bef763f1..7448fa96d 100644 --- a/crates/bsc/cli/Cargo.toml +++ b/crates/bsc/cli/Cargo.toml @@ -84,6 +84,7 @@ asm-keccak = [ # Jemalloc feature for vergen to generate correct env vars jemalloc = [ - "reth-node-core/jemalloc", - "reth-node-metrics/jemalloc" + "reth-node-core/jemalloc", + "reth-node-metrics/jemalloc", + "reth-cli-util/jemalloc" ] diff --git a/crates/bsc/consensus/Cargo.toml b/crates/bsc/consensus/Cargo.toml index 0f35b2554..87530a0da 100644 --- a/crates/bsc/consensus/Cargo.toml +++ b/crates/bsc/consensus/Cargo.toml @@ -31,6 +31,7 @@ reth-bsc-forks.workspace = true reth-bsc-primitives.workspace = true # eth +alloy-eips.workspace = true alloy-rlp.workspace = true alloy-consensus.workspace = true alloy-dyn-abi.workspace = true diff --git a/crates/bsc/consensus/src/constants.rs b/crates/bsc/consensus/src/constants.rs index 4fd2af718..2a5b808a0 100644 --- a/crates/bsc/consensus/src/constants.rs +++ b/crates/bsc/consensus/src/constants.rs @@ -1,5 +1,5 @@ +use alloy_consensus::constants::ETH_TO_WEI; use alloy_primitives::U256; -use reth_primitives::constants::ETH_TO_WEI; /// Fixed number of extra-data prefix bytes reserved for signer vanity pub const EXTRA_VANITY_LEN: usize = 32; diff --git a/crates/bsc/consensus/src/lib.rs b/crates/bsc/consensus/src/lib.rs index 184b29bf8..eb8492e18 100644 --- a/crates/bsc/consensus/src/lib.rs +++ b/crates/bsc/consensus/src/lib.rs @@ -13,6 +13,7 @@ use std::{ time::SystemTime, }; +use alloy_consensus::EMPTY_OMMER_ROOT_HASH; use alloy_json_abi::JsonAbi; use alloy_primitives::{Address, B256, U256}; use alloy_rlp::Decodable; @@ -30,7 +31,7 @@ use reth_consensus_common::validation::{ use reth_primitives::{ constants::EMPTY_MIX_HASH, parlia::{ParliaConfig, Snapshot, VoteAddress, VoteAttestation}, - BlockWithSenders, GotExpected, Header, SealedBlock, SealedHeader, EMPTY_OMMER_ROOT_HASH, + BlockWithSenders, GotExpected, Header, SealedBlock, SealedHeader, }; use secp256k1::{ ecdsa::{RecoverableSignature, RecoveryId}, diff --git a/crates/bsc/consensus/src/trace_helper.rs b/crates/bsc/consensus/src/trace_helper.rs index 3411c8598..d5e99be25 100644 --- a/crates/bsc/consensus/src/trace_helper.rs +++ b/crates/bsc/consensus/src/trace_helper.rs @@ -1,13 +1,14 @@ use std::sync::Arc; -use alloy_primitives::{address, Address, U256}; +use alloy_primitives::{address, map::HashMap, Address, U256}; use reth_bsc_forks::BscHardforks; use reth_bsc_primitives::system_contracts::get_upgrade_system_contracts; -use reth_primitives::revm_primitives::{db::DatabaseRef, BlockEnv}; -use reth_revm::db::{ - AccountState::{NotExisting, Touched}, - CacheDB, +use reth_primitives::revm_primitives::{ + db::{Database, DatabaseCommit}, + state::AccountStatus, + BlockEnv, }; +use reth_revm::primitives::Account; use crate::Parlia; @@ -24,9 +25,9 @@ impl BscTraceHelper { Self { parlia } } - pub fn upgrade_system_contracts( + pub fn upgrade_system_contracts( &self, - db: &mut CacheDB, + db: &mut DB, block_env: &BlockEnv, parent_timestamp: u64, before_tx: bool, @@ -43,38 +44,60 @@ impl BscTraceHelper { ) .map_err(|_| BscTraceHelperError::GetUpgradeSystemContractsFailed)?; + let mut changeset: HashMap<_, _> = Default::default(); for (k, v) in contracts { + let mut info = db + .basic(k) + .map_err(|_| BscTraceHelperError::LoadAccountFailed)? + .unwrap_or_default() + .clone(); + + info.code_hash = v.clone().unwrap().hash_slow(); + info.code = v; + let account = - db.load_account(k).map_err(|_| BscTraceHelperError::LoadAccountFailed).unwrap(); - if account.account_state == NotExisting { - account.account_state = Touched; - } - account.info.code_hash = v.clone().unwrap().hash_slow(); - account.info.code = v; + Account { info, status: AccountStatus::Touched, ..Default::default() }; + + changeset.insert(k, account); } + + db.commit(changeset); } Ok(()) } - pub fn add_block_reward( + pub fn add_block_reward( &self, - db: &mut CacheDB, + db: &mut DB, block_env: &BlockEnv, ) -> Result<(), BscTraceHelperError> { - let sys_acc = - db.load_account(SYSTEM_ADDRESS).map_err(|_| BscTraceHelperError::LoadAccountFailed)?; - let balance = sys_acc.info.balance; + let mut sys_info = db + .basic(SYSTEM_ADDRESS) + .map_err(|_| BscTraceHelperError::LoadAccountFailed)? + .unwrap_or_default(); + let balance = sys_info.balance; if balance > U256::ZERO { - sys_acc.info.balance = U256::ZERO; + let mut changeset: HashMap<_, _> = Default::default(); - let val_acc = db - .load_account(block_env.coinbase) - .map_err(|_| BscTraceHelperError::LoadAccountFailed)?; - if val_acc.account_state == NotExisting { - val_acc.account_state = Touched; - } - val_acc.info.balance += balance; + sys_info.balance = U256::ZERO; + + let sys_account = + Account { info: sys_info, status: AccountStatus::Touched, ..Default::default() }; + changeset.insert(SYSTEM_ADDRESS, sys_account); + + let mut val_info = db + .basic(block_env.coinbase) + .map_err(|_| BscTraceHelperError::LoadAccountFailed)? + .unwrap_or_default(); + + val_info.balance += balance; + + let val_account = + Account { info: val_info, status: AccountStatus::Touched, ..Default::default() }; + changeset.insert(block_env.coinbase, val_account); + + db.commit(changeset); } Ok(()) diff --git a/crates/bsc/consensus/src/validation.rs b/crates/bsc/consensus/src/validation.rs index f2ecb1232..af2060a1c 100644 --- a/crates/bsc/consensus/src/validation.rs +++ b/crates/bsc/consensus/src/validation.rs @@ -1,8 +1,8 @@ +use alloy_eips::eip4844::{DATA_GAS_PER_BLOB, MAX_DATA_GAS_PER_BLOCK}; use alloy_primitives::{Bloom, B256}; use reth_chainspec::{EthChainSpec, EthereumHardfork, EthereumHardforks}; use reth_consensus::ConsensusError; use reth_primitives::{ - constants::eip4844::{DATA_GAS_PER_BLOB, MAX_DATA_GAS_PER_BLOCK}, gas_spent_by_transactions, BlockWithSenders, GotExpected, Header, Receipt, SealedHeader, }; diff --git a/crates/bsc/engine/Cargo.toml b/crates/bsc/engine/Cargo.toml index 0de980e56..6ef389a93 100644 --- a/crates/bsc/engine/Cargo.toml +++ b/crates/bsc/engine/Cargo.toml @@ -35,11 +35,13 @@ reth-bsc-evm.workspace = true reth-bsc-payload-builder.workspace = true # eth +alloy-eips.workspace = true alloy-primitives.workspace = true alloy-rlp.workspace = true alloy-rpc-types.workspace = true alloy-dyn-abi.workspace = true alloy-json-abi.workspace = true +alloy-rpc-types-engine.workspace = true # crypto secp256k1.workspace = true diff --git a/crates/bsc/engine/src/lib.rs b/crates/bsc/engine/src/lib.rs index 1ae0ca174..ccaef072a 100644 --- a/crates/bsc/engine/src/lib.rs +++ b/crates/bsc/engine/src/lib.rs @@ -5,16 +5,9 @@ // The `bsc` feature must be enabled to use this crate. #![cfg(feature = "bsc")] -use std::{ - clone::Clone, - collections::{HashMap, VecDeque}, - fmt::Debug, - marker::PhantomData, - sync::Arc, -}; - +use alloy_eips::BlockHashOrNumber; use alloy_primitives::{BlockHash, BlockNumber, B256}; -use alloy_rpc_types::engine::{ +pub use alloy_rpc_types_engine::{ ExecutionPayloadEnvelopeV2, ExecutionPayloadEnvelopeV3, ExecutionPayloadEnvelopeV4, ExecutionPayloadV1, PayloadAttributes, }; @@ -29,8 +22,15 @@ use reth_engine_primitives::{ }; use reth_network_api::events::EngineMessage; use reth_network_p2p::BlockClient; -use reth_primitives::{BlockBody, BlockHashOrNumber, SealedHeader}; +use reth_primitives::{BlockBody, SealedHeader}; use reth_provider::{BlockReaderIdExt, CanonChainTracker, ParliaProvider}; +use std::{ + clone::Clone, + collections::{HashMap, VecDeque}, + fmt::Debug, + marker::PhantomData, + sync::Arc, +}; use tokio::sync::{ mpsc::{UnboundedReceiver, UnboundedSender}, Mutex, RwLockReadGuard, RwLockWriteGuard, @@ -65,10 +65,10 @@ where + TryInto + TryInto, { - type ExecutionPayloadV1 = ExecutionPayloadV1; - type ExecutionPayloadV2 = ExecutionPayloadEnvelopeV2; - type ExecutionPayloadV3 = ExecutionPayloadEnvelopeV3; - type ExecutionPayloadV4 = ExecutionPayloadEnvelopeV4; + type ExecutionPayloadEnvelopeV1 = ExecutionPayloadV1; + type ExecutionPayloadEnvelopeV2 = ExecutionPayloadEnvelopeV2; + type ExecutionPayloadEnvelopeV3 = ExecutionPayloadEnvelopeV3; + type ExecutionPayloadEnvelopeV4 = ExecutionPayloadEnvelopeV4; } /// A default payload type for [`BscEngineTypes`] diff --git a/crates/bsc/engine/src/task.rs b/crates/bsc/engine/src/task.rs index da4a0e0a6..83bb0537d 100644 --- a/crates/bsc/engine/src/task.rs +++ b/crates/bsc/engine/src/task.rs @@ -5,6 +5,7 @@ use std::{ time::{SystemTime, UNIX_EPOCH}, }; +use alloy_eips::BlockHashOrNumber; use alloy_primitives::{Sealable, B256}; use alloy_rpc_types::{engine::ForkchoiceState, BlockId, RpcBlockHash}; use reth_beacon_consensus::{ @@ -13,13 +14,14 @@ use reth_beacon_consensus::{ use reth_bsc_consensus::Parlia; use reth_bsc_evm::SnapshotReader; use reth_chainspec::EthChainSpec; +use reth_engine_primitives::EngineApiMessageVersion; use reth_network_api::events::EngineMessage; use reth_network_p2p::{ headers::client::{HeadersClient, HeadersDirection, HeadersRequest}, priority::Priority, BlockClient, }; -use reth_primitives::{Block, BlockBody, BlockHashOrNumber, SealedHeader}; +use reth_primitives::{Block, BlockBody, SealedHeader}; use reth_provider::{BlockReaderIdExt, CanonChainTracker, ParliaProvider}; use tokio::{ signal, @@ -421,21 +423,20 @@ impl< } for header in disconnected_headers { storage.insert_new_header(header.clone()); - let result = - fork_choice_tx.send(ForkChoiceMessage::NewHeader(NewHeaderEvent { - header: header.clone(), - // if the pipeline sync is true, the fork choice will not use the safe - // and finalized hash. - // this can make Block Sync Engine to use pipeline sync mode. - pipeline_sync, - local_header: latest_unsafe_header.clone(), - })); - if result.is_err() { - error!(target: "consensus::parlia", "Failed to send new block event to - fork choice"); - } } drop(storage); + let result = fork_choice_tx.send(ForkChoiceMessage::NewHeader(NewHeaderEvent { + header: sealed_header.clone(), + // if the pipeline sync is true, the fork choice will not use the safe + // and finalized hash. + // this can make Block Sync Engine to use pipeline sync mode. + pipeline_sync, + local_header: latest_unsafe_header.clone(), + })); + if result.is_err() { + error!(target: "consensus::parlia", "Failed to send new block event to + fork choice"); + } let result = chain_tracker_tx.send(ForkChoiceMessage::NewHeader(NewHeaderEvent { header: sealed_header.clone(), @@ -489,6 +490,7 @@ impl< state, payload_attrs: None, tx, + version: EngineApiMessageVersion::default(), }); debug!(target: "consensus::parlia", ?state, "Sent fork choice update"); diff --git a/crates/bsc/evm/Cargo.toml b/crates/bsc/evm/Cargo.toml index e9e357a93..3318c6343 100644 --- a/crates/bsc/evm/Cargo.toml +++ b/crates/bsc/evm/Cargo.toml @@ -27,11 +27,11 @@ reth-bsc-chainspec.workspace = true reth-bsc-forks.workspace = true reth-bsc-primitives.workspace = true -# Revm -revm-primitives.workspace = true - +# ethereum +alloy-consensus.workspace = true alloy-genesis.workspace = true alloy-primitives.workspace = true +revm-primitives.workspace = true # misc thiserror.workspace = true diff --git a/crates/bsc/evm/src/execute.rs b/crates/bsc/evm/src/execute.rs index c5228a1ed..6e2cf5d6a 100644 --- a/crates/bsc/evm/src/execute.rs +++ b/crates/bsc/evm/src/execute.rs @@ -3,6 +3,7 @@ use core::fmt::Display; use std::{collections::HashMap, num::NonZeroUsize, sync::Arc}; +use alloy_consensus::Transaction as _; use alloy_primitives::{Address, BlockNumber, Bytes, B256, U256}; use lazy_static::lazy_static; use lru::LruCache; @@ -141,11 +142,15 @@ where self.bsc_executor(db, prefetch_tx) } - fn batch_executor(&self, db: DB) -> Self::BatchExecutor + fn batch_executor( + &self, + db: DB, + prefetch_tx: Option>, + ) -> Self::BatchExecutor where DB: Database + Display>, { - let executor = self.bsc_executor(db, None); + let executor = self.bsc_executor(db, prefetch_tx); BscBatchExecutor { executor, batch_record: BlockBatchRecord::default(), @@ -244,7 +249,6 @@ where })?; if let Some(tx) = tx.as_ref() { - // let post_state = HashedPostState::from_state(state.clone()); tx.send(state.clone()).unwrap_or_else(|err| { debug!(target: "evm_executor", ?err, "Failed to send post state to prefetch channel") }); @@ -791,7 +795,7 @@ where Ok(BlockExecutionOutput { state: self.state.take_bundle(), receipts, - requests: Vec::default(), + requests: Default::default(), gas_used, snapshot, }) @@ -816,7 +820,7 @@ where Ok(BlockExecutionOutput { state: self.state.take_bundle(), receipts, - requests: Vec::default(), + requests: Default::default(), gas_used, snapshot, }) @@ -845,7 +849,7 @@ where Ok(BlockExecutionOutput { state: self.state.take_bundle(), receipts, - requests: Vec::default(), + requests: Default::default(), gas_used, snapshot, }) diff --git a/crates/bsc/evm/src/lib.rs b/crates/bsc/evm/src/lib.rs index ed2322109..0f81ec2ba 100644 --- a/crates/bsc/evm/src/lib.rs +++ b/crates/bsc/evm/src/lib.rs @@ -6,7 +6,7 @@ // The `bsc` feature must be enabled to use this crate. #![cfg(feature = "bsc")] -use std::sync::Arc; +use std::{convert::Infallible, sync::Arc}; use alloy_primitives::{Address, Bytes, U256}; use reth_bsc_chainspec::BscChainSpec; @@ -54,6 +54,7 @@ impl BscEvmConfig { impl ConfigureEvmEnv for BscEvmConfig { type Header = Header; + type Error = Infallible; // TODO: error type fn fill_tx_env(&self, tx_env: &mut TxEnv, transaction: &TransactionSigned, sender: Address) { transaction.fill_tx_env(tx_env, sender); @@ -101,7 +102,7 @@ impl ConfigureEvmEnv for BscEvmConfig { &self, parent: &Self::Header, attributes: NextBlockEnvAttributes, - ) -> (CfgEnvWithHandlerCfg, BlockEnv) { + ) -> Result<(CfgEnvWithHandlerCfg, BlockEnv), Self::Error> { // configure evm env based on parent block let cfg = CfgEnv::default().with_chain_id(self.chain_spec.chain().id()); @@ -112,14 +113,7 @@ impl ConfigureEvmEnv for BscEvmConfig { // cancun now, we need to set the excess blob gas to the default value let blob_excess_gas_and_price = parent .next_block_excess_blob_gas() - .or_else(|| { - if spec_id == SpecId::CANCUN { - // default excess blob gas is zero - Some(0) - } else { - None - } - }) + .or_else(|| (spec_id == SpecId::CANCUN).then_some(0)) .map(BlobExcessGasAndPrice::new); let mut basefee = parent.next_block_base_fee( @@ -156,7 +150,7 @@ impl ConfigureEvmEnv for BscEvmConfig { blob_excess_gas_and_price, }; - (CfgEnvWithHandlerCfg::new_with_spec_id(cfg, spec_id), block_env) + Ok((CfgEnvWithHandlerCfg::new_with_spec_id(cfg, spec_id), block_env)) } } diff --git a/crates/bsc/hardforks/Cargo.toml b/crates/bsc/hardforks/Cargo.toml index 5677e60b9..59ddcfed3 100644 --- a/crates/bsc/hardforks/Cargo.toml +++ b/crates/bsc/hardforks/Cargo.toml @@ -27,5 +27,13 @@ once_cell.workspace = true [features] default = ["std"] -std = [] -serde = ["dep:serde"] \ No newline at end of file +std = [ + "alloy-primitives/std", + "once_cell/std", + "serde?/std" +] +serde = [ + "dep:serde", + "alloy-chains/serde", + "alloy-primitives/serde" +] diff --git a/crates/bsc/node/Cargo.toml b/crates/bsc/node/Cargo.toml index ed883df01..3a3369410 100644 --- a/crates/bsc/node/Cargo.toml +++ b/crates/bsc/node/Cargo.toml @@ -26,6 +26,7 @@ reth-primitives.workspace = true reth-config.workspace = true reth-rpc.workspace = true reth-node-api.workspace = true +reth-trie-db.workspace = true # bsc-reth reth-bsc-chainspec.workspace = true @@ -62,4 +63,8 @@ bsc = [ "reth-bsc-payload-builder/bsc", "reth-bsc-engine/bsc", ] -asm-keccak = ["reth-primitives/asm-keccak"] \ No newline at end of file +asm-keccak = [ + "reth-primitives/asm-keccak", + "reth/asm-keccak", + "reth-node-core/asm-keccak" +] diff --git a/crates/bsc/node/src/node.rs b/crates/bsc/node/src/node.rs index d11cf346b..cf057f0da 100644 --- a/crates/bsc/node/src/node.rs +++ b/crates/bsc/node/src/node.rs @@ -8,18 +8,23 @@ use reth_bsc_consensus::Parlia; use reth_bsc_engine::{BscEngineTypes, BscEngineValidator}; use reth_bsc_evm::{BscEvmConfig, BscExecutorProvider}; use reth_bsc_payload_builder::{BscBuiltPayload, BscPayloadBuilderAttributes}; +use reth_ethereum_engine_primitives::EthPayloadAttributes; use reth_network::NetworkHandle; -use reth_node_api::{ConfigureEvm, EngineValidator, FullNodeComponents, NodeAddOns}; +use reth_node_api::{ + AddOnsContext, ConfigureEvm, EngineValidator, FullNodeComponents, NodePrimitives, + NodeTypesWithDB, +}; use reth_node_builder::{ components::{ - ComponentsBuilder, ConsensusBuilder, EngineValidatorBuilder, ExecutorBuilder, - NetworkBuilder, ParliaBuilder, PayloadServiceBuilder, PoolBuilder, + ComponentsBuilder, ConsensusBuilder, ExecutorBuilder, NetworkBuilder, ParliaBuilder, + PayloadServiceBuilder, PoolBuilder, }, node::{FullNodeTypes, NodeTypes, NodeTypesWithEngine}, - BuilderContext, Node, PayloadBuilderConfig, PayloadTypes, + rpc::{EngineValidatorBuilder, RpcAddOns}, + BuilderContext, Node, NodeAdapter, NodeComponentsBuilder, PayloadBuilderConfig, PayloadTypes, }; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; -use reth_primitives::Header; +use reth_primitives::{Block, Header}; use reth_provider::CanonStateSubscriptions; use reth_rpc::EthApi; use reth_tracing::tracing::{debug, info}; @@ -27,6 +32,15 @@ use reth_transaction_pool::{ blobstore::DiskFileBlobStore, EthTransactionPool, TransactionPool, TransactionValidationTaskExecutor, }; +use reth_trie_db::MerklePatriciaTrie; + +/// Ethereum primitive types. +#[derive(Debug)] +pub struct BscPrimitives; + +impl NodePrimitives for BscPrimitives { + type Block = Block; +} /// Type configuration for a regular BSC node. #[derive(Debug, Default, Clone, Copy)] @@ -42,12 +56,14 @@ impl BscNode { BscNetworkBuilder, BscExecutorBuilder, BscConsensusBuilder, - BscEngineValidatorBuilder, BscParliaBuilder, > where - Node: FullNodeTypes< - Types: NodeTypesWithEngine, + Node: FullNodeTypes>, + ::Engine: PayloadTypes< + BuiltPayload = BscBuiltPayload, + PayloadAttributes = EthPayloadAttributes, + PayloadBuilderAttributes = BscPayloadBuilderAttributes, >, { ComponentsBuilder::default() @@ -57,14 +73,14 @@ impl BscNode { .network(BscNetworkBuilder::default()) .executor(BscExecutorBuilder::default()) .consensus(BscConsensusBuilder::default()) - .engine_validator(BscEngineValidatorBuilder::default()) .parlia(BscParliaBuilder::default()) } } impl NodeTypes for BscNode { - type Primitives = (); + type Primitives = BscPrimitives; type ChainSpec = BscChainSpec; + type StateCommitment = MerklePatriciaTrie; } impl NodeTypesWithEngine for BscNode { @@ -72,16 +88,20 @@ impl NodeTypesWithEngine for BscNode { } /// Add-ons w.r.t. l1 bsc. -#[derive(Debug, Clone, Default)] -pub struct BSCAddOns; - -impl NodeAddOns for BSCAddOns { - type EthApi = EthApi; -} +pub type BscAddOns = RpcAddOns< + N, + EthApi< + ::Provider, + ::Pool, + NetworkHandle, + ::Evm, + >, + BscEngineValidatorBuilder, +>; impl Node for BscNode where - Types: NodeTypesWithEngine, + Types: NodeTypesWithDB + NodeTypesWithEngine, N: FullNodeTypes, { type ComponentsBuilder = ComponentsBuilder< @@ -91,18 +111,19 @@ where BscNetworkBuilder, BscExecutorBuilder, BscConsensusBuilder, - BscEngineValidatorBuilder, BscParliaBuilder, >; - type AddOns = BSCAddOns; + type AddOns = BscAddOns< + NodeAdapter>::Components>, + >; fn components_builder(&self) -> Self::ComponentsBuilder { Self::components() } fn add_ons(&self) -> Self::AddOns { - BSCAddOns + BscAddOns::default() } } @@ -321,16 +342,15 @@ where #[non_exhaustive] pub struct BscEngineValidatorBuilder; -impl EngineValidatorBuilder for BscEngineValidatorBuilder +impl EngineValidatorBuilder for BscEngineValidatorBuilder where - Node: FullNodeTypes< - Types: NodeTypesWithEngine, - >, - BscEngineValidator: EngineValidator<::Engine>, + Types: NodeTypesWithEngine, + Node: FullNodeComponents, + BscEngineValidator: EngineValidator, { type Validator = BscEngineValidator; - async fn build_validator(self, _ctx: &BuilderContext) -> eyre::Result { + async fn build(self, _ctx: &AddOnsContext<'_, Node>) -> eyre::Result { Ok(BscEngineValidator {}) } } diff --git a/crates/bsc/payload/Cargo.toml b/crates/bsc/payload/Cargo.toml index 0b1712588..853a6724e 100644 --- a/crates/bsc/payload/Cargo.toml +++ b/crates/bsc/payload/Cargo.toml @@ -31,11 +31,12 @@ reth-chain-state.workspace = true reth-bsc-chainspec.workspace = true # ethereum -revm.workspace = true +alloy-consensus.workspace = true alloy-eips.workspace = true alloy-primitives.workspace = true -revm-primitives.workspace = true alloy-rpc-types-engine.workspace = true +revm.workspace = true +revm-primitives.workspace = true # misc tracing.workspace = true diff --git a/crates/bsc/payload/src/builder.rs b/crates/bsc/payload/src/builder.rs index a1b364fb5..a037fd38a 100644 --- a/crates/bsc/payload/src/builder.rs +++ b/crates/bsc/payload/src/builder.rs @@ -2,7 +2,8 @@ use std::sync::Arc; -use alloy_eips::eip4844::MAX_DATA_GAS_PER_BLOCK; +use alloy_consensus::EMPTY_OMMER_ROOT_HASH; +use alloy_eips::{eip4844::MAX_DATA_GAS_PER_BLOCK, merge::BEACON_NONCE}; use alloy_primitives::U256; use reth_basic_payload_builder::*; use reth_bsc_chainspec::BscChainSpec; @@ -12,10 +13,9 @@ use reth_evm::{system_calls::SystemCaller, ConfigureEvm, ConfigureEvmEnv, NextBl use reth_execution_types::ExecutionOutcome; use reth_payload_primitives::{PayloadBuilderAttributes, PayloadBuilderError}; use reth_primitives::{ - constants::BEACON_NONCE, proofs, revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg}, - Block, BlockBody, Header, Receipt, EMPTY_OMMER_ROOT_HASH, + Block, BlockBody, Header, Receipt, }; use reth_provider::StateProviderFactory; use reth_revm::database::StateProviderDatabase; @@ -56,7 +56,7 @@ where &self, config: &PayloadConfig, parent: &Header, - ) -> (CfgEnvWithHandlerCfg, BlockEnv) { + ) -> Result<(CfgEnvWithHandlerCfg, BlockEnv), EvmConfig::Error> { let next_attributes = NextBlockEnvAttributes { timestamp: config.attributes.timestamp(), suggested_fee_recipient: config.attributes.suggested_fee_recipient(), @@ -80,7 +80,9 @@ where &self, args: BuildArguments, ) -> Result, PayloadBuilderError> { - let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_block); + let (cfg_env, block_env) = self + .cfg_and_block_env(&args.config, &args.config.parent_header) + .map_err(PayloadBuilderError::other)?; bsc_payload(self.evm_config.clone(), args, cfg_env, block_env) } @@ -100,7 +102,9 @@ where cancel: Default::default(), best_payload: None, }; - let (cfg_env, block_env) = self.cfg_and_block_env(&args.config, &args.config.parent_block); + let (cfg_env, block_env) = self + .cfg_and_block_env(&args.config, &args.config.parent_header) + .map_err(PayloadBuilderError::other)?; bsc_payload(self.evm_config.clone(), args, cfg_env, block_env)? .into_payload() .ok_or_else(|| PayloadBuilderError::MissingPayload) @@ -130,13 +134,13 @@ where let BuildArguments { client, pool, mut cached_reads, config, cancel, best_payload } = args; let chain_spec = client.chain_spec(); - let state_provider = client.state_by_block_hash(config.parent_block.hash())?; + let state_provider = client.state_by_block_hash(config.parent_header.hash())?; let state = StateProviderDatabase::new(state_provider); let mut db = State::builder().with_database_ref(cached_reads.as_db(state)).with_bundle_update().build(); - let PayloadConfig { parent_block, extra_data, attributes } = config; + let PayloadConfig { parent_header, extra_data, attributes } = config; - debug!(target: "payload_builder", id=%attributes.payload_attributes.id, parent_hash = ?parent_block.hash(), parent_number = parent_block.number, "building new payload"); + debug!(target: "payload_builder", id=%attributes.payload_attributes.id, parent_hash = ?parent_header.hash(), parent_number = parent_header.number, "building new payload"); let mut cumulative_gas_used = 0; let mut sum_blob_gas_used = 0; let block_gas_limit: u64 = initialized_block_env.gas_limit.to::(); @@ -154,7 +158,7 @@ where let block_number = initialized_block_env.number.to::(); - let mut system_caller = SystemCaller::new(&evm_config, chain_spec.clone()); + let mut system_caller = SystemCaller::new(evm_config.clone(), chain_spec.clone()); // apply eip-4788 pre block contract call system_caller @@ -166,7 +170,7 @@ where ) .map_err(|err| { warn!(target: "payload_builder", - parent_hash=%parent_block.hash(), + parent_hash=%parent_header.hash(), %err, "failed to apply beacon root contract call for payload" ); @@ -178,10 +182,10 @@ where &mut db, &initialized_cfg, &initialized_block_env, - parent_block.hash(), + parent_header.hash(), ) .map_err(|err| { - warn!(target: "payload_builder", parent_hash=%parent_block.hash(), %err, "failed to update blockhashes for payload"); + warn!(target: "payload_builder", parent_hash=%parent_header.hash(), %err, "failed to update blockhashes for payload"); PayloadBuilderError::Internal(err.into()) })?; @@ -222,7 +226,7 @@ where let env = EnvWithHandlerCfg::new_with_cfg_env( initialized_cfg.clone(), initialized_block_env.clone(), - evm_config.tx_env(&tx), + evm_config.tx_env(tx.as_signed(), tx.signer()), ); // Configure the environment for the block. @@ -301,7 +305,7 @@ where } // calculate the requests and the requests root - let (requests, requests_root) = (None, None); + let (requests, requests_hash) = (None, None); let WithdrawalsOutcome { withdrawals_root, withdrawals } = commit_withdrawals( &mut db, @@ -318,7 +322,7 @@ where db.take_bundle(), vec![receipts.clone()].into(), block_number, - vec![requests.clone().unwrap_or_default()], + vec![requests.unwrap_or_default()], ); let receipts_root = execution_outcome.receipts_root_slow(block_number).expect("Number is in range"); @@ -330,7 +334,7 @@ where let state_provider = db.database.0.inner.borrow_mut(); state_provider.db.state_root_with_updates(hashed_state.clone()).inspect_err(|err| { warn!(target: "payload_builder", - parent_hash=%parent_block.hash(), + parent_hash=%parent_header.hash(), %err, "failed to calculate state root for payload" ); @@ -352,9 +356,9 @@ where executed_txs.iter().filter(|tx| tx.is_eip4844()).map(|tx| tx.hash).collect(), )?; - excess_blob_gas = if chain_spec.is_cancun_active_at_timestamp(parent_block.timestamp) { - let parent_excess_blob_gas = parent_block.excess_blob_gas.unwrap_or_default(); - let parent_blob_gas_used = parent_block.blob_gas_used.unwrap_or_default(); + excess_blob_gas = if chain_spec.is_cancun_active_at_timestamp(parent_header.timestamp) { + let parent_excess_blob_gas = parent_header.excess_blob_gas.unwrap_or_default(); + let parent_blob_gas_used = parent_header.blob_gas_used.unwrap_or_default(); Some(calc_excess_blob_gas(parent_excess_blob_gas, parent_blob_gas_used)) } else { // for the first post-fork block, both parent.blob_gas_used and @@ -366,7 +370,7 @@ where } let header = Header { - parent_hash: parent_block.hash(), + parent_hash: parent_header.hash(), ommers_hash: EMPTY_OMMER_ROOT_HASH, beneficiary: initialized_block_env.coinbase, state_root, @@ -378,7 +382,7 @@ where mix_hash: attributes.payload_attributes.prev_randao, nonce: BEACON_NONCE.into(), base_fee_per_gas: Some(base_fee), - number: parent_block.number + 1, + number: parent_header.number + 1, gas_limit: block_gas_limit, difficulty: U256::ZERO, gas_used: cumulative_gas_used, @@ -386,19 +390,13 @@ where parent_beacon_block_root: attributes.payload_attributes.parent_beacon_block_root, blob_gas_used: blob_gas_used.map(Into::into), excess_blob_gas: excess_blob_gas.map(Into::into), - requests_root, + requests_hash, }; // seal the block let block = Block { header, - body: BlockBody { - transactions: executed_txs, - ommers: vec![], - withdrawals, - sidecars: None, - requests, - }, + body: BlockBody { transactions: executed_txs, ommers: vec![], withdrawals, sidecars: None }, }; let sealed_block = block.seal_slow(); diff --git a/crates/bsc/payload/src/payload.rs b/crates/bsc/payload/src/payload.rs index 457d67c4d..103d7d992 100644 --- a/crates/bsc/payload/src/payload.rs +++ b/crates/bsc/payload/src/payload.rs @@ -2,8 +2,7 @@ //! Bsc builder support -use std::convert::Infallible; - +use alloy_eips::eip7685::Requests; use alloy_primitives::{Address, B256, U256}; use alloy_rpc_types_engine::{ ExecutionPayloadEnvelopeV2, ExecutionPayloadEnvelopeV3, ExecutionPayloadEnvelopeV4, @@ -14,9 +13,9 @@ use reth_payload_builder::EthPayloadBuilderAttributes; use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes}; use reth_primitives::{BlobTransactionSidecar, SealedBlock, Withdrawals}; use reth_rpc_types_compat::engine::payload::{ - block_to_payload_v1, block_to_payload_v3, block_to_payload_v4, - convert_block_to_payload_field_v2, + block_to_payload_v1, block_to_payload_v3, convert_block_to_payload_field_v2, }; +use std::{convert::Infallible, sync::Arc}; /// Bsc Payload Builder Attributes #[derive(Debug, Clone, PartialEq, Eq)] @@ -32,8 +31,14 @@ impl PayloadBuilderAttributes for BscPayloadBuilderAttributes { /// Creates a new payload builder for the given parent block and the attributes. /// /// Derives the unique [`PayloadId`] for the given parent and attributes - fn try_new(parent: B256, attributes: PayloadAttributes) -> Result { - Ok(Self { payload_attributes: EthPayloadBuilderAttributes::try_new(parent, attributes)? }) + fn try_new( + parent: B256, + attributes: PayloadAttributes, + version: u8, + ) -> Result { + Ok(Self { + payload_attributes: EthPayloadBuilderAttributes::try_new(parent, attributes, version)?, + }) } fn payload_id(&self) -> PayloadId { @@ -110,8 +115,8 @@ impl BscBuiltPayload { } /// Adds sidecars to the payload. - pub fn extend_sidecars(&mut self, sidecars: Vec) { - self.sidecars.extend(sidecars) + pub fn extend_sidecars(&mut self, sidecars: Vec>) { + self.sidecars.extend(sidecars.into_iter().map(|arc| (*arc).clone())); } } @@ -127,6 +132,10 @@ impl BuiltPayload for BscBuiltPayload { fn executed_block(&self) -> Option { self.executed_block.clone() } + + fn requests(&self) -> Option { + None + } } impl BuiltPayload for &BscBuiltPayload { @@ -141,6 +150,10 @@ impl BuiltPayload for &BscBuiltPayload { fn executed_block(&self) -> Option { self.executed_block.clone() } + + fn requests(&self) -> Option { + None + } } // V1 engine_getPayloadV1 response @@ -185,7 +198,7 @@ impl From for ExecutionPayloadEnvelopeV4 { let BscBuiltPayload { block, fees, sidecars, .. } = value; Self { - execution_payload: block_to_payload_v4(block), + execution_payload: block_to_payload_v3(block), block_value: fees, // From the engine API spec: // @@ -197,6 +210,7 @@ impl From for ExecutionPayloadEnvelopeV4 { // should_override_builder: false, blobs_bundle: sidecars.into_iter().map(Into::into).collect::>().into(), + execution_requests: Default::default(), } } } diff --git a/crates/bsc/primitives/Cargo.toml b/crates/bsc/primitives/Cargo.toml index c486ecda9..8b5ee0012 100644 --- a/crates/bsc/primitives/Cargo.toml +++ b/crates/bsc/primitives/Cargo.toml @@ -20,6 +20,7 @@ revm-primitives.workspace = true alloy-chains.workspace = true alloy-primitives.workspace = true +alloy-consensus.workspace = true reth-bsc-chainspec.workspace = true reth-bsc-forks.workspace = true diff --git a/crates/bsc/primitives/src/system_contracts/mod.rs b/crates/bsc/primitives/src/system_contracts/mod.rs index 8feb8f103..570dabe4b 100644 --- a/crates/bsc/primitives/src/system_contracts/mod.rs +++ b/crates/bsc/primitives/src/system_contracts/mod.rs @@ -3,6 +3,7 @@ use std::collections::HashMap; use alloy_chains::Chain; +use alloy_consensus::Transaction; use alloy_primitives::{hex, Address, BlockNumber}; use include_dir::{include_dir, Dir}; use lazy_static::lazy_static; diff --git a/crates/chain-state/Cargo.toml b/crates/chain-state/Cargo.toml index 93e8fc3b4..7b90e9a02 100644 --- a/crates/chain-state/Cargo.toml +++ b/crates/chain-state/Cargo.toml @@ -71,5 +71,7 @@ test-utils = [ "reth-chainspec/test-utils", "reth-primitives/test-utils", "reth-trie/test-utils", - "revm?/test-utils" + "revm?/test-utils", + "reth-provider/test-utils", + "reth-revm/test-utils" ] diff --git a/crates/chainspec/src/api.rs b/crates/chainspec/src/api.rs index 9f817b0e2..9de6ace49 100644 --- a/crates/chainspec/src/api.rs +++ b/crates/chainspec/src/api.rs @@ -63,9 +63,7 @@ pub trait EthChainSpec: Send + Sync + Unpin + Debug { } /// Returns `true` if this chain contains Binance Smart Chain configuration. - fn is_bsc(&self) -> bool { - self.chain().is_bsc() - } + fn is_bsc(&self) -> bool; } impl EthChainSpec for ChainSpec { diff --git a/crates/config/src/config.rs b/crates/config/src/config.rs index 392eed12c..c53ad6bb9 100644 --- a/crates/config/src/config.rs +++ b/crates/config/src/config.rs @@ -312,7 +312,7 @@ pub struct MerkleConfig { impl Default for MerkleConfig { fn default() -> Self { - Self { clean_threshold: 5_000 } + Self { clean_threshold: 50_000 } } } @@ -393,7 +393,11 @@ pub struct PruneConfig { impl Default for PruneConfig { fn default() -> Self { - Self { block_interval: DEFAULT_BLOCK_INTERVAL, recent_sidecars_kept_blocks: 0, segments: PruneModes::none() } + Self { + block_interval: DEFAULT_BLOCK_INTERVAL, + recent_sidecars_kept_blocks: 0, + segments: PruneModes::none(), + } } } @@ -409,6 +413,7 @@ impl PruneConfig { let Some(other) = other else { return }; let Self { block_interval, + recent_sidecars_kept_blocks, segments: PruneModes { sender_recovery, @@ -425,6 +430,11 @@ impl PruneConfig { self.block_interval = block_interval; } + // Merge recent_sidecars_kept_blocks, only update if it's the default number of blocks + if self.recent_sidecars_kept_blocks == 0 { + self.recent_sidecars_kept_blocks = recent_sidecars_kept_blocks; + } + // Merge the various segment prune modes self.segments.sender_recovery = self.segments.sender_recovery.or(sender_recovery); self.segments.transaction_lookup = self.segments.transaction_lookup.or(transaction_lookup); @@ -1019,9 +1029,9 @@ connect_trusted_nodes_only = true assert_eq!(conf.peers.trusted_nodes.len(), 2); let expected_enodes = vec![ - "enode://0401e494dbd0c84c5c0f72adac5985d2f2525e08b68d448958aae218f5ac8198a80d1498e0ebec2ce38b1b18d6750f6e61a56b4614c5a6c6cf0981c39aed47dc@34.159.32.127:30303", - "enode://e9675164b5e17b9d9edf0cc2bd79e6b6f487200c74d1331c220abb5b8ee80c2eefbf18213989585e9d0960683e819542e11d4eefb5f2b4019e1e49f9fd8fff18@berav2-bootnode.staketab.org:30303", - ]; + "enode://0401e494dbd0c84c5c0f72adac5985d2f2525e08b68d448958aae218f5ac8198a80d1498e0ebec2ce38b1b18d6750f6e61a56b4614c5a6c6cf0981c39aed47dc@34.159.32.127:30303", + "enode://e9675164b5e17b9d9edf0cc2bd79e6b6f487200c74d1331c220abb5b8ee80c2eefbf18213989585e9d0960683e819542e11d4eefb5f2b4019e1e49f9fd8fff18@berav2-bootnode.staketab.org:30303", + ]; for enode in expected_enodes { let node = TrustedPeer::from_str(enode).unwrap(); diff --git a/crates/consensus/auto-seal/Cargo.toml b/crates/consensus/auto-seal/Cargo.toml index f2bfb43bc..0227f9365 100644 --- a/crates/consensus/auto-seal/Cargo.toml +++ b/crates/consensus/auto-seal/Cargo.toml @@ -53,5 +53,6 @@ optimism = [ "reth-execution-types/optimism", "reth-optimism-consensus?/optimism", "reth-primitives/optimism", - "revm-primitives/optimism" + "revm-primitives/optimism", + "reth-chainspec/optimism" ] diff --git a/crates/consensus/auto-seal/src/lib.rs b/crates/consensus/auto-seal/src/lib.rs index d2bf7f049..ce9204565 100644 --- a/crates/consensus/auto-seal/src/lib.rs +++ b/crates/consensus/auto-seal/src/lib.rs @@ -368,7 +368,6 @@ impl StorageInner { ommers: ommers.clone(), withdrawals: withdrawals.clone(), sidecars: None, - requests: requests.clone(), }, } .with_recovered_senders() @@ -392,13 +391,8 @@ impl StorageInner { // root here let Block { mut header, body, .. } = block.block; - let body = BlockBody { - transactions: body.transactions, - ommers, - withdrawals, - sidecars: None, - requests, - }; + let body = + BlockBody { transactions: body.transactions, ommers, withdrawals, sidecars: None }; trace!(target: "consensus::auto", ?execution_outcome, ?header, ?body, "executed block, calculating state root and completing header"); // now we need to update certain header fields with the results of the execution diff --git a/crates/consensus/beacon/Cargo.toml b/crates/consensus/beacon/Cargo.toml index 4d7c6bb4c..54e801006 100644 --- a/crates/consensus/beacon/Cargo.toml +++ b/crates/consensus/beacon/Cargo.toml @@ -78,11 +78,12 @@ assert_matches.workspace = true [features] optimism = [ - "reth-chainspec", - "reth-primitives/optimism", - "reth-provider/optimism", - "reth-blockchain-tree/optimism", - "reth-db/optimism", - "reth-db-api/optimism", + "reth-chainspec", + "reth-primitives/optimism", + "reth-provider/optimism", + "reth-blockchain-tree/optimism", + "reth-db/optimism", + "reth-db-api/optimism", + "reth-chainspec?/optimism" ] -bsc = [] \ No newline at end of file +bsc = [] diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index f28f1c54c..bba40df7e 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -454,12 +454,7 @@ mod tests { ( SealedBlock { header: SealedHeader::new(header, seal), - body: BlockBody { - transactions, - ommers, - withdrawals: None, - sidecars: None, - }, + body: BlockBody { transactions, ommers, withdrawals: None, sidecars: None }, }, parent, ) diff --git a/crates/e2e-test-utils/Cargo.toml b/crates/e2e-test-utils/Cargo.toml index 80f5e5cba..9010c7990 100644 --- a/crates/e2e-test-utils/Cargo.toml +++ b/crates/e2e-test-utils/Cargo.toml @@ -47,5 +47,6 @@ alloy-consensus = { workspace = true, features = ["kzg"] } tracing.workspace = true derive_more.workspace = true -[features] -bsc = ["reth-rpc/bsc"] +# TODO: fix this +#[features] +#bsc = ["reth-rpc/bsc"] diff --git a/crates/engine/local/Cargo.toml b/crates/engine/local/Cargo.toml index 2ab448e3b..fcfeabac1 100644 --- a/crates/engine/local/Cargo.toml +++ b/crates/engine/local/Cargo.toml @@ -47,7 +47,8 @@ workspace = true [features] optimism = [ - "op-alloy-rpc-types-engine", - "reth-beacon-consensus/optimism", - "reth-provider/optimism", + "op-alloy-rpc-types-engine", + "reth-beacon-consensus/optimism", + "reth-provider/optimism", + "reth-chainspec/optimism" ] diff --git a/crates/engine/local/src/service.rs b/crates/engine/local/src/service.rs index 2d39b5907..02599e227 100644 --- a/crates/engine/local/src/service.rs +++ b/crates/engine/local/src/service.rs @@ -76,6 +76,9 @@ where from_engine: EngineMessageStream, mode: MiningMode, payload_attributes_builder: B, + skip_state_root_validation: bool, + enable_prefetch: bool, + enable_execution_cache: bool, ) -> Self where B: PayloadAttributesBuilder<::PayloadAttributes>, @@ -85,7 +88,7 @@ where if chain_spec.is_optimism() { EngineApiKind::OpStack } else { EngineApiKind::Ethereum }; let persistence_handle = - PersistenceHandle::spawn_service(provider, pruner, sync_metrics_tx, false); + PersistenceHandle::spawn_service(provider, pruner, sync_metrics_tx, false); let payload_validator = ExecutionPayloadValidator::new(chain_spec); let canonical_in_memory_state = blockchain_db.canonical_in_memory_state(); @@ -101,6 +104,9 @@ where tree_config, invalid_block_hook, engine_kind, + skip_state_root_validation, + enable_prefetch, + enable_execution_cache, ); let handler = EngineApiRequestHandler::new(to_tree_tx, from_tree); diff --git a/crates/engine/service/src/service.rs b/crates/engine/service/src/service.rs index 4eeab04a1..0d5c1848d 100644 --- a/crates/engine/service/src/service.rs +++ b/crates/engine/service/src/service.rs @@ -163,6 +163,7 @@ mod tests { use reth_primitives::SealedHeader; use reth_provider::{ providers::BlockchainProvider2, test_utils::create_test_provider_factory_with_chain_spec, + StaticFileProviderFactory, }; use reth_prune::Pruner; use reth_tasks::TokioTaskExecutor; diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index 23e262bad..bf7793537 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -154,6 +154,7 @@ where &mut self, block: &BlockWithSenders, total_difficulty: U256, + tx: Option>, ) -> Result { let env = self.evm_env_for_block(&block.header, total_difficulty); let mut evm = self.evm_config.evm_with_env(&mut self.state, env); @@ -186,9 +187,11 @@ where self.system_caller.on_state(&result_and_state); let ResultAndState { result, state } = result_and_state; + // Send post state to prefetch channel. if let Some(tx) = tx.as_ref() { tx.send(state.clone()).unwrap_or_else(|err| { - debug!(target: "evm_executor", ?err, "Failed to send post state to prefetch channel") + debug!(target: "evm_executor", ?err, "Failed to send post state to prefetch + channel") }); } @@ -390,7 +393,7 @@ mod tests { let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db), None); // attempt to execute a block without parent beacon block root, expect err let err = executor @@ -442,6 +445,7 @@ mod tests { senders: vec![], }, U256::ZERO, + None, ) .into(), ) @@ -497,7 +501,7 @@ mod tests { // attempt to execute an empty block with parent beacon block root, this should not fail provider - .batch_executor(StateProviderDatabase::new(&db)) + .batch_executor(StateProviderDatabase::new(&db), None) .execute_and_verify_one( ( &BlockWithSenders { @@ -550,7 +554,7 @@ mod tests { ..Header::default() }; - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db), None); // attempt to execute an empty block with parent beacon block root, this should not fail executor @@ -597,7 +601,7 @@ mod tests { let mut header = chain_spec.genesis_header().clone(); let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db), None); // attempt to execute the genesis block with non-zero parent beacon block root, expect err header.parent_beacon_block_root = Some(B256::with_last_byte(0x69)); @@ -675,7 +679,7 @@ mod tests { let provider = executor_provider(chain_spec); // execute header - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db), None); // Now execute a block with the fixed header, ensure that it does not fail executor @@ -749,7 +753,7 @@ mod tests { ); let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db), None); // construct the header for block one let header = Header { timestamp: 1, number: 1, ..Header::default() }; @@ -796,7 +800,7 @@ mod tests { let header = chain_spec.genesis_header().clone(); let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db), None); // attempt to execute genesis block, this should not fail executor @@ -847,7 +851,7 @@ mod tests { ..Header::default() }; let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db), None); // attempt to execute the fork activation block, this should not fail executor @@ -896,7 +900,7 @@ mod tests { ); let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db), None); let header = Header { parent_hash: B256::random(), @@ -953,7 +957,7 @@ mod tests { let header_hash = header.hash_slow(); let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db), None); // attempt to execute the genesis block, this should not fail executor diff --git a/crates/ethereum/node/src/node.rs b/crates/ethereum/node/src/node.rs index 5ee274e0f..05a4fcf6b 100644 --- a/crates/ethereum/node/src/node.rs +++ b/crates/ethereum/node/src/node.rs @@ -19,8 +19,8 @@ use reth_node_api::{ }; use reth_node_builder::{ components::{ - ComponentsBuilder, ConsensusBuilder, ExecutorBuilder, NetworkBuilder, - PayloadServiceBuilder, PoolBuilder, ParliaBuilder, + ComponentsBuilder, ConsensusBuilder, ExecutorBuilder, NetworkBuilder, ParliaBuilder, + PayloadServiceBuilder, PoolBuilder, }, node::{FullNodeTypes, NodeTypes, NodeTypesWithEngine}, rpc::{EngineValidatorBuilder, RpcAddOns}, diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 304790cf2..65106b00d 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -436,12 +436,7 @@ where // seal the block let block = Block { header, - body: BlockBody { - transactions: executed_txs, - ommers: vec![], - withdrawals, - sidecars: None, - }, + body: BlockBody { transactions: executed_txs, ommers: vec![], withdrawals, sidecars: None }, }; let sealed_block = block.seal_slow(); diff --git a/crates/evm/execution-types/src/execution_outcome.rs b/crates/evm/execution-types/src/execution_outcome.rs index 550dfd332..e73fe6aa0 100644 --- a/crates/evm/execution-types/src/execution_outcome.rs +++ b/crates/evm/execution-types/src/execution_outcome.rs @@ -1,7 +1,9 @@ use crate::BlockExecutionOutput; use alloy_eips::eip7685::Requests; use alloy_primitives::{Address, BlockNumber, Bloom, Log, B256, U256}; -use reth_primitives::{logs_bloom, parlia::Snapshot, Account, Bytecode, Receipt, Receipts, StorageEntry}; +use reth_primitives::{ + logs_bloom, parlia::Snapshot, Account, Bytecode, Receipt, Receipts, StorageEntry, +}; use reth_trie::HashedPostState; use revm::{ db::{states::BundleState, BundleAccount}, diff --git a/crates/evm/src/either.rs b/crates/evm/src/either.rs index 493aa760a..0f19059ff 100644 --- a/crates/evm/src/either.rs +++ b/crates/evm/src/either.rs @@ -44,13 +44,17 @@ where } } - fn batch_executor(&self, db: DB) -> Self::BatchExecutor + fn batch_executor( + &self, + db: DB, + prefetch_tx: Option>, + ) -> Self::BatchExecutor where DB: Database + Display>, { match self { - Self::Left(a) => Either::Left(a.batch_executor(db)), - Self::Right(b) => Either::Right(b.batch_executor(db)), + Self::Left(a) => Either::Left(a.batch_executor(db, prefetch_tx)), + Self::Right(b) => Either::Right(b.batch_executor(db, prefetch_tx)), } } } diff --git a/crates/evm/src/execute.rs b/crates/evm/src/execute.rs index 12fda304a..ffda21e58 100644 --- a/crates/evm/src/execute.rs +++ b/crates/evm/src/execute.rs @@ -1,23 +1,25 @@ //! Traits for execution. // Re-export execution types -pub use reth_execution_errors::{ - BlockExecutionError, BlockValidationError, InternalBlockExecutionError, -}; -pub use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput, ExecutionOutcome}; -pub use reth_storage_errors::provider::ProviderError; -use revm::db::states::bundle_state::BundleRetention; use crate::system_calls::OnStateHook; use alloc::{boxed::Box, vec::Vec}; use alloy_eips::eip7685::Requests; use alloy_primitives::BlockNumber; use core::{fmt::Display, marker::PhantomData}; use reth_consensus::ConsensusError; +pub use reth_execution_errors::{ + BlockExecutionError, BlockValidationError, InternalBlockExecutionError, +}; +pub use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput, ExecutionOutcome}; use reth_primitives::{BlockWithSenders, Header, Receipt}; use reth_prune_types::PruneModes; use reth_revm::batch::BlockBatchRecord; -use revm::{db::BundleState, State}; -use revm_primitives::{db::Database, U256, EvmState}; +pub use reth_storage_errors::provider::ProviderError; +use revm::{ + db::{states::bundle_state::BundleRetention, BundleState}, + State, +}; +use revm_primitives::{db::Database, EvmState, U256}; use tokio::sync::mpsc::UnboundedSender; /// A general purpose executor trait that executes an input (e.g. block) and produces an output @@ -157,7 +159,7 @@ pub trait BlockExecutorProvider: Send + Sync + Clone + Unpin + 'static { fn executor( &self, db: DB, - prefetch_rx: Option>, + prefetch_tx: Option>, ) -> Self::Executor where DB: Database + Display>; @@ -166,7 +168,11 @@ pub trait BlockExecutorProvider: Send + Sync + Clone + Unpin + 'static { /// /// Batch executor is used to execute multiple blocks in sequence and keep track of the state /// during historical sync which involves executing multiple blocks in sequence. - fn batch_executor(&self, db: DB) -> Self::BatchExecutor + fn batch_executor( + &self, + db: DB, + prefetch_tx: Option>, + ) -> Self::BatchExecutor where DB: Database + Display>; } @@ -200,6 +206,7 @@ where &mut self, block: &BlockWithSenders, total_difficulty: U256, + tx: Option>, ) -> Result; /// Applies any necessary changes after executing the block's transactions. @@ -282,21 +289,29 @@ where type BatchExecutor + Display>> = BasicBatchExecutor, DB>; - fn executor(&self, db: DB) -> Self::Executor + fn executor( + &self, + db: DB, + prefetch_tx: Option>, + ) -> Self::Executor where DB: Database + Display>, { let strategy = self.strategy_factory.create_strategy(db); - BasicBlockExecutor::new(strategy) + BasicBlockExecutor::new(strategy, prefetch_tx) } - fn batch_executor(&self, db: DB) -> Self::BatchExecutor + fn batch_executor( + &self, + db: DB, + prefetch_tx: Option>, + ) -> Self::BatchExecutor where DB: Database + Display>, { let strategy = self.strategy_factory.create_strategy(db); let batch_record = BlockBatchRecord::default(); - BasicBatchExecutor::new(strategy, batch_record) + BasicBatchExecutor::new(strategy, batch_record, prefetch_tx) } } @@ -310,6 +325,8 @@ where { /// Block execution strategy. pub(crate) strategy: S, + /// Channel to send prefetch requests. + pub(crate) prefetch_tx: Option>, _phantom: PhantomData, } @@ -319,31 +336,32 @@ where DB: Database, { /// Creates a new `BasicBlockExecutor` with the given strategy. - pub const fn new(strategy: S) -> Self { - Self { strategy, _phantom: PhantomData } + pub const fn new(strategy: S, prefetch_tx: Option>) -> Self { + Self { strategy, prefetch_tx, _phantom: PhantomData } } } +// TODO: FIX BasicBlockExecutor and remove this comment impl Executor for BasicBlockExecutor where S: BlockExecutionStrategy, DB: Database + Display>, { - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; + type Input<'a> = BlockExecutionInput<'a, BlockWithSenders, Header>; type Output = BlockExecutionOutput; type Error = S::Error; fn execute(mut self, input: Self::Input<'_>) -> Result { - let BlockExecutionInput { block, total_difficulty } = input; + let BlockExecutionInput { block, total_difficulty, .. } = input; self.strategy.apply_pre_execution_changes(block, total_difficulty)?; let ExecuteOutput { receipts, gas_used } = - self.strategy.execute_transactions(block, total_difficulty)?; + self.strategy.execute_transactions(block, total_difficulty, self.prefetch_tx)?; let requests = self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; let state = self.strategy.finish(); - Ok(BlockExecutionOutput { state, receipts, requests, gas_used }) + Ok(BlockExecutionOutput { state, receipts, requests, gas_used, snapshot: None }) } fn execute_with_state_closure( @@ -354,11 +372,11 @@ where where F: FnMut(&State), { - let BlockExecutionInput { block, total_difficulty } = input; + let BlockExecutionInput { block, total_difficulty, .. } = input; self.strategy.apply_pre_execution_changes(block, total_difficulty)?; let ExecuteOutput { receipts, gas_used } = - self.strategy.execute_transactions(block, total_difficulty)?; + self.strategy.execute_transactions(block, total_difficulty, self.prefetch_tx)?; let requests = self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; @@ -366,7 +384,7 @@ where let state = self.strategy.finish(); - Ok(BlockExecutionOutput { state, receipts, requests, gas_used }) + Ok(BlockExecutionOutput { state, receipts, requests, gas_used, snapshot: None }) } fn execute_with_state_hook( @@ -377,19 +395,19 @@ where where H: OnStateHook + 'static, { - let BlockExecutionInput { block, total_difficulty } = input; + let BlockExecutionInput { block, total_difficulty, .. } = input; self.strategy.with_state_hook(Some(Box::new(state_hook))); self.strategy.apply_pre_execution_changes(block, total_difficulty)?; let ExecuteOutput { receipts, gas_used } = - self.strategy.execute_transactions(block, total_difficulty)?; + self.strategy.execute_transactions(block, total_difficulty, self.prefetch_tx)?; let requests = self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; let state = self.strategy.finish(); - Ok(BlockExecutionOutput { state, receipts, requests, gas_used }) + Ok(BlockExecutionOutput { state, receipts, requests, gas_used, snapshot: None }) } } @@ -405,6 +423,8 @@ where pub(crate) strategy: S, /// Keeps track of batch execution receipts and requests. pub(crate) batch_record: BlockBatchRecord, + /// Channel to send prefetch requests. + pub(crate) prefetch_tx: Option>, _phantom: PhantomData, } @@ -414,8 +434,12 @@ where DB: Database, { /// Creates a new `BasicBatchExecutor` with the given strategy. - pub const fn new(strategy: S, batch_record: BlockBatchRecord) -> Self { - Self { strategy, batch_record, _phantom: PhantomData } + pub const fn new( + strategy: S, + batch_record: BlockBatchRecord, + prefetch_tx: Option>, + ) -> Self { + Self { strategy, batch_record, prefetch_tx, _phantom: PhantomData } } } @@ -424,20 +448,23 @@ where S: BlockExecutionStrategy, DB: Database + Display>, { - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; + type Input<'a> = BlockExecutionInput<'a, BlockWithSenders, Header>; type Output = ExecutionOutcome; type Error = BlockExecutionError; fn execute_and_verify_one(&mut self, input: Self::Input<'_>) -> Result<(), Self::Error> { - let BlockExecutionInput { block, total_difficulty } = input; + let BlockExecutionInput { block, total_difficulty, .. } = input; if self.batch_record.first_block().is_none() { self.batch_record.set_first_block(block.number); } self.strategy.apply_pre_execution_changes(block, total_difficulty)?; - let ExecuteOutput { receipts, .. } = - self.strategy.execute_transactions(block, total_difficulty)?; + let ExecuteOutput { receipts, .. } = self.strategy.execute_transactions( + block, + total_difficulty, + self.prefetch_tx.clone(), + )?; let requests = self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; @@ -505,7 +532,11 @@ mod tests { TestExecutor(PhantomData) } - fn batch_executor(&self, _db: DB) -> Self::BatchExecutor + fn batch_executor( + &self, + _db: DB, + _: Option>, + ) -> Self::BatchExecutor where DB: Database + Display>, { @@ -636,6 +667,7 @@ mod tests { &mut self, _block: &BlockWithSenders, _total_difficulty: U256, + _tx: Option>, ) -> Result { Ok(self.execute_transactions_result.clone()) } @@ -701,8 +733,9 @@ mod tests { }; let provider = BasicBlockExecutorProvider::new(strategy_factory); let db = CacheDB::>::default(); - let executor = provider.executor(db); - let result = executor.execute(BlockExecutionInput::new(&Default::default(), U256::ZERO)); + let executor = provider.executor(db, None); + let result = + executor.execute(BlockExecutionInput::new(&Default::default(), U256::ZERO, None)); assert!(result.is_ok()); let block_execution_output = result.unwrap(); diff --git a/crates/evm/src/noop.rs b/crates/evm/src/noop.rs index df3aae0d5..946f7230a 100644 --- a/crates/evm/src/noop.rs +++ b/crates/evm/src/noop.rs @@ -35,7 +35,11 @@ impl BlockExecutorProvider for NoopBlockExecutorProvider { Self } - fn batch_executor(&self, _: DB) -> Self::BatchExecutor + fn batch_executor( + &self, + _: DB, + _: Option>, + ) -> Self::BatchExecutor where DB: Database + Display>, { diff --git a/crates/evm/src/test_utils.rs b/crates/evm/src/test_utils.rs index e2e830e46..78e689350 100644 --- a/crates/evm/src/test_utils.rs +++ b/crates/evm/src/test_utils.rs @@ -45,7 +45,11 @@ impl BlockExecutorProvider for MockExecutorProvider { self.clone() } - fn batch_executor(&self, _: DB) -> Self::BatchExecutor + fn batch_executor( + &self, + _: DB, + _: Option>, + ) -> Self::BatchExecutor where DB: Database + Display>, { diff --git a/crates/exex/exex/src/backfill/job.rs b/crates/exex/exex/src/backfill/job.rs index 18bd3f802..013bee9fa 100644 --- a/crates/exex/exex/src/backfill/job.rs +++ b/crates/exex/exex/src/backfill/job.rs @@ -72,9 +72,12 @@ where "Executing block range" ); - let mut executor = self.executor.batch_executor(StateProviderDatabase::new( - self.provider.history_by_block_number(self.range.start().saturating_sub(1))?, - )); + let mut executor = self.executor.batch_executor( + StateProviderDatabase::new( + self.provider.history_by_block_number(self.range.start().saturating_sub(1))?, + ), + None, + ); executor.set_prune_modes(self.prune_modes.clone()); let mut fetch_block_duration = Duration::default(); diff --git a/crates/exex/exex/src/backfill/test_utils.rs b/crates/exex/exex/src/backfill/test_utils.rs index 232e86a52..6b0ad366b 100644 --- a/crates/exex/exex/src/backfill/test_utils.rs +++ b/crates/exex/exex/src/backfill/test_utils.rs @@ -201,10 +201,13 @@ where let provider = provider_factory.provider()?; - let executor = - EthExecutorProvider::ethereum(chain_spec).batch_executor(StateProviderDatabase::new( - LatestStateProviderRef::new(provider.tx_ref(), provider.static_file_provider()), - )); + let executor = EthExecutorProvider::ethereum(chain_spec).batch_executor( + StateProviderDatabase::new(LatestStateProviderRef::new( + provider.tx_ref(), + provider.static_file_provider(), + )), + None, + ); let mut execution_outcome = executor.execute_and_verify_batch(vec![ (&block1, U256::ZERO, None).into(), diff --git a/crates/exex/test-utils/src/lib.rs b/crates/exex/test-utils/src/lib.rs index cff82a95d..a0b11575e 100644 --- a/crates/exex/test-utils/src/lib.rs +++ b/crates/exex/test-utils/src/lib.rs @@ -41,7 +41,7 @@ use reth_node_builder::{ }; use reth_node_core::node_config::NodeConfig; use reth_node_ethereum::{ - node::{EthereumAddOns, EthereumNetworkBuilder, EthereumPayloadBuilder, EthereumParliaBuilder}, + node::{EthereumAddOns, EthereumNetworkBuilder, EthereumParliaBuilder, EthereumPayloadBuilder}, EthEngineTypes, EthEvmConfig, }; use reth_payload_builder::noop::NoopPayloadBuilderService; diff --git a/crates/node/api/src/node.rs b/crates/node/api/src/node.rs index e9d7c23a4..13577bab0 100644 --- a/crates/node/api/src/node.rs +++ b/crates/node/api/src/node.rs @@ -4,6 +4,7 @@ use std::{future::Future, marker::PhantomData}; use alloy_rpc_types_engine::JwtSecret; use reth_beacon_consensus::BeaconConsensusEngineHandle; +use reth_bsc_consensus::BscTraceHelper; use reth_consensus::Consensus; use reth_evm::execute::BlockExecutorProvider; use reth_network_api::FullNetwork; @@ -14,8 +15,6 @@ use reth_primitives::Header; use reth_provider::FullProvider; use reth_tasks::TaskExecutor; use reth_transaction_pool::TransactionPool; -#[cfg(feature = "bsc")] -use reth_bsc_consensus::BscTraceHelper; use crate::ConfigureEvm; @@ -106,7 +105,6 @@ pub struct AddOnsContext<'a, N: FullNodeComponents> { pub jwt_secret: JwtSecret, /// Handle bsc trace rpc calls. - #[cfg(feature = "bsc")] pub bsc_trace_helper: Option, } diff --git a/crates/node/builder/src/components/builder.rs b/crates/node/builder/src/components/builder.rs index 6d87f26c6..ea9aa03b7 100644 --- a/crates/node/builder/src/components/builder.rs +++ b/crates/node/builder/src/components/builder.rs @@ -10,13 +10,11 @@ use reth_transaction_pool::TransactionPool; use crate::{ components::{ Components, ConsensusBuilder, ExecutorBuilder, NetworkBuilder, NodeComponents, - PayloadServiceBuilder, PoolBuilder, + ParliaBuilder, PayloadServiceBuilder, PoolBuilder, }, BuilderContext, ConfigureEvm, FullNodeTypes, }; -use super::ParliaBuilder; - /// A generic, general purpose and customizable [`NodeComponentsBuilder`] implementation. /// /// This type is stateful and captures the configuration of the node's components. @@ -141,6 +139,19 @@ impl _marker: self._marker, } } + + /// Apply a function to the parlia builder. + pub fn map_parlia(self, f: impl FnOnce(ParliaB) -> ParliaB) -> Self { + Self { + pool_builder: self.pool_builder, + payload_builder: self.payload_builder, + network_builder: self.network_builder, + executor_builder: self.executor_builder, + consensus_builder: self.consensus_builder, + parlia_builder: f(self.parlia_builder), + _marker: self._marker, + } + } } impl @@ -309,6 +320,37 @@ where _marker, } } + + /// Configures the parlia builder. + /// + /// This accepts a [`ParliaBuilder`] instance that will be used to create the node's components + /// for parlia. + pub fn parlia( + self, + parlia_builder: PB, + ) -> ComponentsBuilder + where + PB: ParliaBuilder, + { + let Self { + pool_builder, + payload_builder, + network_builder, + executor_builder, + consensus_builder, + parlia_builder: _, + _marker, + } = self; + ComponentsBuilder { + pool_builder, + payload_builder, + network_builder, + executor_builder, + consensus_builder, + parlia_builder, + _marker, + } + } } impl NodeComponentsBuilder diff --git a/crates/node/builder/src/components/mod.rs b/crates/node/builder/src/components/mod.rs index e536be9f4..50f96f7be 100644 --- a/crates/node/builder/src/components/mod.rs +++ b/crates/node/builder/src/components/mod.rs @@ -11,6 +11,7 @@ mod builder; mod consensus; mod execute; mod network; +mod parlia; mod payload; mod pool; @@ -18,6 +19,7 @@ pub use builder::*; pub use consensus::*; pub use execute::*; pub use network::*; +pub use parlia::*; pub use payload::*; pub use pool::*; #[cfg(feature = "bsc")] diff --git a/crates/node/builder/src/components/parlia.rs b/crates/node/builder/src/components/parlia.rs new file mode 100644 index 000000000..5c0102169 --- /dev/null +++ b/crates/node/builder/src/components/parlia.rs @@ -0,0 +1,27 @@ +//! Parlia component for the node builder. +use crate::{BuilderContext, FullNodeTypes}; +use reth_bsc_consensus::Parlia; +use std::future::Future; + +/// Needed for bsc parlia consensus. +pub trait ParliaBuilder: Send { + /// Creates the parlia. + fn build_parlia( + self, + ctx: &BuilderContext, + ) -> impl Future> + Send; +} + +impl ParliaBuilder for F +where + Node: FullNodeTypes, + F: FnOnce(&BuilderContext) -> Fut + Send, + Fut: Future> + Send, +{ + fn build_parlia( + self, + ctx: &BuilderContext, + ) -> impl Future> { + self(ctx) + } +} diff --git a/crates/node/builder/src/launch/engine.rs b/crates/node/builder/src/launch/engine.rs index f870970dc..bae8ce482 100644 --- a/crates/node/builder/src/launch/engine.rs +++ b/crates/node/builder/src/launch/engine.rs @@ -236,6 +236,9 @@ where Box::pin(consensus_engine_stream), mining_mode, LocalPayloadAttributesBuilder::new(ctx.chain_spec()), + ctx.node_config().skip_state_root_validation, + ctx.node_config().enable_prefetch, + ctx.node_config().enable_execution_cache, ); Either::Left(eth_service) @@ -261,12 +264,12 @@ where ctx.node_config().enable_prefetch, ctx.node_config().enable_execution_cache, ); - + Either::Right(eth_service) } #[cfg(feature = "bsc")] { - let engine_rx = ctx.node_adapter().components.network().get_to_engine_rx(); + let engine_rx = ctx.components().network().get_to_engine_rx(); let client = ParliaEngineBuilder::new( ctx.chain_spec(), ctx.blockchain_db().clone(), @@ -343,7 +346,6 @@ where let bsc_trace_helper = Some(BscTraceHelper::new(Arc::new(ctx.components().parlia().clone()))); - let add_ons_ctx = AddOnsContext { node: ctx.node_adapter().clone(), config: ctx.node_config(), diff --git a/crates/node/builder/src/launch/mod.rs b/crates/node/builder/src/launch/mod.rs index 64093d1e7..97dd0bbb8 100644 --- a/crates/node/builder/src/launch/mod.rs +++ b/crates/node/builder/src/launch/mod.rs @@ -25,6 +25,8 @@ use reth_consensus_debug_client::{DebugConsensusClient, EtherscanBlockProvider, use reth_engine_util::EngineMessageStreamExt; use reth_exex::ExExManagerHandle; use reth_network::{BlockDownloaderProvider, NetworkEventListenerProvider}; +#[cfg(feature = "bsc")] +use reth_network_api::EngineRxProvider; use reth_node_api::{AddOnsContext, FullNodeTypes, NodeTypesWithDB, NodeTypesWithEngine}; use reth_node_core::{ dirs::{ChainPath, DataDirPath}, @@ -274,7 +276,7 @@ where )?; #[cfg(feature = "bsc")] { - let engine_rx = ctx.node_adapter().components.network().get_to_engine_rx(); + let engine_rx = ctx.components().network().get_to_engine_rx(); let client = ParliaEngineBuilder::new( ctx.chain_spec(), ctx.blockchain_db().clone(), diff --git a/crates/node/builder/src/rpc.rs b/crates/node/builder/src/rpc.rs index cdebe6733..d82d64f05 100644 --- a/crates/node/builder/src/rpc.rs +++ b/crates/node/builder/src/rpc.rs @@ -9,7 +9,6 @@ use std::{ use alloy_rpc_types::engine::ClientVersionV1; use futures::TryFutureExt; -// TODO: use reth_bsc_consensus::BscTraceHelper; use reth_node_api::{ AddOnsContext, EngineValidator, FullNodeComponents, NodeAddOns, NodeTypes, NodeTypesWithEngine, }; @@ -413,7 +412,8 @@ where let Self { eth_api_builder, engine_validator_builder, hooks, _pd: _ } = self; let engine_validator = engine_validator_builder.build(&ctx).await?; - let AddOnsContext { node, config, beacon_engine_handle, jwt_secret } = ctx; + let AddOnsContext { node, config, beacon_engine_handle, jwt_secret, bsc_trace_helper } = + ctx; let client = ClientVersionV1 { code: CLIENT_CODE, @@ -448,7 +448,7 @@ where .with_evm_config(node.evm_config().clone()) .with_block_executor(node.block_executor().clone()) .with_consensus(node.consensus().clone()) - .with_bsc_trace_helper(node.bsc_trace_helper().clone()) + .with_bsc_trace_helper(bsc_trace_helper) .build_with_auth_server(module_config, engine_api, eth_api_builder); // in dev mode we generate 20 random dev-signer accounts diff --git a/crates/node/core/Cargo.toml b/crates/node/core/Cargo.toml index d146c1cb1..a3de10834 100644 --- a/crates/node/core/Cargo.toml +++ b/crates/node/core/Cargo.toml @@ -75,7 +75,11 @@ proptest.workspace = true tokio.workspace = true [features] -optimism = ["reth-primitives/optimism", "reth-db/optimism"] +optimism = [ + "reth-primitives/optimism", + "reth-db/optimism", + "reth-chainspec/optimism" +] opbnb = [ "reth-primitives/opbnb", ] diff --git a/crates/node/core/src/args/payload_builder.rs b/crates/node/core/src/args/payload_builder.rs index 524a93195..cd7ba7dcc 100644 --- a/crates/node/core/src/args/payload_builder.rs +++ b/crates/node/core/src/args/payload_builder.rs @@ -86,7 +86,7 @@ impl TypedValueParser for ExtradataValueParser { ) -> Result { let val = value.to_str().ok_or_else(|| clap::Error::new(clap::error::ErrorKind::InvalidUtf8))?; - if val.as_bytes().len() > MAXIMUM_EXTRA_DATA_SIZE { + if val.len() > MAXIMUM_EXTRA_DATA_SIZE { return Err(clap::Error::raw( clap::error::ErrorKind::InvalidValue, format!( diff --git a/crates/node/core/src/node_config.rs b/crates/node/core/src/node_config.rs index 437536544..b8278c69b 100644 --- a/crates/node/core/src/node_config.rs +++ b/crates/node/core/src/node_config.rs @@ -456,6 +456,9 @@ impl NodeConfig { db: self.db, dev: self.dev, pruning: self.pruning, + enable_prefetch: self.enable_prefetch, + skip_state_root_validation: self.skip_state_root_validation, + enable_execution_cache: self.enable_execution_cache, } } } diff --git a/crates/optimism/cli/Cargo.toml b/crates/optimism/cli/Cargo.toml index a2ba71214..f9847771d 100644 --- a/crates/optimism/cli/Cargo.toml +++ b/crates/optimism/cli/Cargo.toml @@ -87,7 +87,8 @@ optimism = [ "reth-optimism-node/optimism", "reth-execution-types/optimism", "reth-db/optimism", - "reth-db-api/optimism" + "reth-db-api/optimism", + "reth-chainspec/optimism" ] asm-keccak = [ "alloy-primitives/asm-keccak", diff --git a/crates/optimism/consensus/Cargo.toml b/crates/optimism/consensus/Cargo.toml index 2721b246d..4aef95bc2 100644 --- a/crates/optimism/consensus/Cargo.toml +++ b/crates/optimism/consensus/Cargo.toml @@ -34,6 +34,9 @@ alloy-primitives.workspace = true reth-optimism-chainspec.workspace = true [features] -optimism = ["reth-primitives/optimism"] +optimism = [ + "reth-primitives/optimism", + "reth-chainspec/optimism" +] opbnb = ["reth-primitives/opbnb"] diff --git a/crates/optimism/evm/Cargo.toml b/crates/optimism/evm/Cargo.toml index fc562fd46..bcea96a9b 100644 --- a/crates/optimism/evm/Cargo.toml +++ b/crates/optimism/evm/Cargo.toml @@ -70,7 +70,8 @@ optimism = [ "reth-execution-types/optimism", "reth-optimism-consensus/optimism", "revm/optimism", - "revm-primitives/optimism" + "revm-primitives/optimism", + "reth-chainspec/optimism" ] opbnb = [ "reth-primitives/opbnb", diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index 161c22752..ebefec841 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -1,21 +1,13 @@ //! Optimism block execution strategy. -<<<<<<< HEAD -use crate::{ - l1::ensure_create2_deployer, OpChainSpec, OptimismBlockExecutionError, OptimismEvmConfig, -}; -use alloy_primitives::{Address, BlockNumber, U256}; -use reth_chainspec::{ChainSpec, EthereumHardforks}; -======= use crate::{l1::ensure_create2_deployer, OpEvmConfig, OptimismBlockExecutionError}; use alloc::{boxed::Box, sync::Arc, vec::Vec}; use alloy_consensus::Transaction as _; use alloy_eips::eip7685::Requests; -use core::fmt::Display; +use alloy_primitives::Address; use op_alloy_consensus::DepositTransaction; use reth_chainspec::EthereumHardforks; use reth_consensus::ConsensusError; ->>>>>>> v1.1.1 use reth_evm::{ execute::{ BasicBlockExecutorProvider, BlockExecutionError, BlockExecutionStrategy, @@ -28,125 +20,19 @@ use reth_evm::{ use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::validate_block_post_execution; use reth_optimism_forks::OptimismHardfork; -<<<<<<< HEAD -use reth_primitives::{BlockWithSenders, Header, Receipt, Receipts, TxType}; -use reth_prune_types::PruneModes; -use reth_revm::{ - batch::BlockBatchRecord, - db::states::{bundle_state::BundleRetention, StorageSlot}, - state_change::post_block_balance_increments, - Evm, State, -}; -use revm_primitives::{ - db::{Database, DatabaseCommit}, - BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, EvmState, ResultAndState, -}; -use std::{collections::HashMap, fmt::Display, str::FromStr, sync::Arc}; -use tokio::sync::mpsc::UnboundedSender; -use tracing::{debug, trace}; -======= use reth_primitives::{BlockWithSenders, Header, Receipt, TxType}; -use reth_revm::{Database, State}; +use reth_revm::{db::states::StorageSlot, Database, State}; use revm_primitives::{ - db::DatabaseCommit, BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, U256, + db::DatabaseCommit, BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, EvmState, + ResultAndState, U256, }; +use std::{collections::HashMap, fmt::Display, str::FromStr}; +use tokio::sync::mpsc::UnboundedSender; use tracing::trace; ->>>>>>> v1.1.1 /// Factory for [`OpExecutionStrategy`]. #[derive(Debug, Clone)] -<<<<<<< HEAD -pub struct OpExecutorProvider { - chain_spec: Arc, - evm_config: EvmConfig, -} - -impl OpExecutorProvider { - /// Creates a new default optimism executor provider. - pub fn optimism(chain_spec: Arc) -> Self { - Self::new(chain_spec.clone(), OptimismEvmConfig::new(chain_spec)) - } -} - -impl OpExecutorProvider { - /// Creates a new executor provider. - pub const fn new(chain_spec: Arc, evm_config: EvmConfig) -> Self { - Self { chain_spec, evm_config } - } -} - -impl OpExecutorProvider -where - EvmConfig: ConfigureEvm
, -{ - fn op_executor( - &self, - db: DB, - prefetch_tx: Option>, - ) -> OpBlockExecutor - where - DB: Database + Display>, - { - if let Some(tx) = prefetch_tx { - OpBlockExecutor::new_with_prefetch_tx( - self.chain_spec.clone(), - self.evm_config.clone(), - State::builder() - .with_database(db) - .with_bundle_update() - .without_state_clear() - .build(), - tx, - ) - } else { - OpBlockExecutor::new( - self.chain_spec.clone(), - self.evm_config.clone(), - State::builder() - .with_database(db) - .with_bundle_update() - .without_state_clear() - .build(), - ) - } - } -} - -impl BlockExecutorProvider for OpExecutorProvider -where - EvmConfig: ConfigureEvm
, -{ - type Executor + Display>> = - OpBlockExecutor; - - type BatchExecutor + Display>> = - OpBatchExecutor; - fn executor( - &self, - db: DB, - prefetch_tx: Option>, - ) -> Self::Executor - where - DB: Database + Display>, - { - self.op_executor(db, prefetch_tx) - } - - fn batch_executor(&self, db: DB) -> Self::BatchExecutor - where - DB: Database + Display>, - { - let executor = self.op_executor(db, None); - OpBatchExecutor { executor, batch_record: BlockBatchRecord::default() } - } -} - -/// Helper container type for EVM with chain spec. -#[derive(Debug, Clone)] -pub struct OpEvmExecutor { -======= pub struct OpExecutionStrategyFactory { ->>>>>>> v1.1.1 /// The chainspec chain_spec: Arc, /// How to create an EVM. @@ -172,29 +58,10 @@ where EvmConfig: Clone + Unpin + Sync + Send + 'static + ConfigureEvm
, { -<<<<<<< HEAD - /// Executes the transactions in the block and returns the receipts. - /// - /// This applies the pre-execution changes, and executes the transactions. - /// - /// The optional `state_hook` will be executed with the state changes if present. - /// - /// # Note - /// - /// It does __not__ apply post-execution changes. - fn execute_pre_and_transactions( - &self, - block: &BlockWithSenders, - mut evm: Evm<'_, Ext, &mut State>, - state_hook: Option, - tx: Option>, - ) -> Result<(Vec, u64), BlockExecutionError> -======= type Strategy + Display>> = OpExecutionStrategy; fn create_strategy(&self, db: DB) -> Self::Strategy ->>>>>>> v1.1.1 where DB: Database + Display>, { @@ -289,6 +156,7 @@ where &mut self, block: &BlockWithSenders, total_difficulty: U256, + _prefetch_rx: Option>, ) -> Result { let env = self.evm_env_for_block(&block.header, total_difficulty); let mut evm = self.evm_config.evm_with_env(&mut self.state, env); @@ -348,20 +216,8 @@ where ?transaction, "Executed transaction" ); -<<<<<<< HEAD - - system_caller.on_state(&result_and_state); -======= self.system_caller.on_state(&result_and_state); ->>>>>>> v1.1.1 let ResultAndState { result, state } = result_and_state; - - if let Some(tx) = tx.as_ref() { - tx.send(state.clone()).unwrap_or_else(|err| { - debug!(target: "evm_executor", ?err, "Failed to send post state to prefetch channel") - }); - } - evm.db_mut().commit(state); // append gas used @@ -386,114 +242,21 @@ where }); } -<<<<<<< HEAD - Ok((receipts, cumulative_gas_used)) - } -} - -/// A basic Optimism block executor. -/// -/// Expected usage: -/// - Create a new instance of the executor. -/// - Execute the block. -#[derive(Debug)] -pub struct OpBlockExecutor { - /// Chain specific evm config that's used to execute a block. - executor: OpEvmExecutor, - /// The state to use for execution - state: State, - /// Prefetch channel - prefetch_tx: Option>, -} - -impl OpBlockExecutor { - /// Creates a new Optimism block executor. - pub const fn new( - chain_spec: Arc, - evm_config: EvmConfig, - state: State, - ) -> Self { - Self { executor: OpEvmExecutor { chain_spec, evm_config }, state, prefetch_tx: None } - } - - /// Creates a new Optimism block executor with a prefetch channel. - pub const fn new_with_prefetch_tx( - chain_spec: Arc, - evm_config: EvmConfig, - state: State, - tx: UnboundedSender, - ) -> Self { - Self { executor: OpEvmExecutor { chain_spec, evm_config }, state, prefetch_tx: Some(tx) } -======= Ok(ExecuteOutput { receipts, gas_used: cumulative_gas_used }) ->>>>>>> v1.1.1 } fn apply_post_execution_changes( &mut self, block: &BlockWithSenders, total_difficulty: U256, -<<<<<<< HEAD - ) -> Result<(Vec, u64), BlockExecutionError> { - self.execute_without_verification_with_state_hook(block, total_difficulty, None::) - } - - /// Execute a single block and apply the state changes to the internal state. - /// - /// Returns the receipts of the transactions in the block and the total gas used. - /// - /// Returns an error if execution fails. - fn execute_without_verification_with_state_hook( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - state_hook: Option, - ) -> Result<(Vec, u64), BlockExecutionError> - where - F: OnStateHook, - { - // 1. prepare state on new block - self.on_new_block(&block.header); - - // 2. configure the evm and execute - let env = self.evm_env_for_block(&block.header, total_difficulty); - - let (receipts, gas_used) = { - let evm = self.executor.evm_config.evm_with_env(&mut self.state, env); - self.executor.execute_pre_and_transactions( - block, - evm, - state_hook, - self.prefetch_tx.clone(), - ) - }?; - - // 3. apply post execution changes - self.post_execution(block, total_difficulty)?; - - Ok((receipts, gas_used)) - } - - /// Apply settings before a new block is executed. - pub(crate) fn on_new_block(&mut self, header: &Header) { - // Set state clear flag if the block is after the Spurious Dragon hardfork. - let state_clear_flag = self.chain_spec().is_spurious_dragon_active_at_block(header.number); - self.state.set_state_clear_flag(state_clear_flag); - } - - /// Apply post execution state changes, including block rewards, withdrawals, and irregular DAO - /// hardfork state change. - pub fn post_execution( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - ) -> Result<(), BlockExecutionError> { + _receipts: &[Receipt], + ) -> Result { let balance_increments = - post_block_balance_increments(self.chain_spec(), block, total_difficulty); + post_block_balance_increments(&self.chain_spec.clone(), block, total_difficulty); #[cfg(all(feature = "optimism", feature = "opbnb"))] if self - .chain_spec() + .chain_spec .fork(OptimismHardfork::PreContractForkBlock) .transitions_at_block(block.number) { @@ -534,12 +297,7 @@ impl OpBlockExecutor { (governance_token_contract_address, governance_token_change), ]); } -======= - _receipts: &[Receipt], - ) -> Result { - let balance_increments = - post_block_balance_increments(&self.chain_spec.clone(), block, total_difficulty); ->>>>>>> v1.1.1 + // increment balances self.state .increment_balances(balance_increments) @@ -570,170 +328,16 @@ impl OpBlockExecutor { } } -<<<<<<< HEAD -impl Executor for OpBlockExecutor -where - EvmConfig: ConfigureEvm
, - DB: Database + Display>, -{ - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders, Header>; - type Output = BlockExecutionOutput; - type Error = BlockExecutionError; - - /// Executes the block and commits the state changes. - /// - /// Returns the receipts of the transactions in the block. - /// - /// Returns an error if the block could not be executed or failed verification. - /// - /// State changes are committed to the database. - fn execute(mut self, input: Self::Input<'_>) -> Result { - let BlockExecutionInput { block, total_difficulty, .. } = input; - let (receipts, gas_used) = self.execute_without_verification(block, total_difficulty)?; - - // NOTE: we need to merge keep the reverts for the bundle retention - self.state.merge_transitions(BundleRetention::Reverts); - - Ok(BlockExecutionOutput { - state: self.state.take_bundle(), - receipts, - requests: vec![], - gas_used, - snapshot: None, - }) - } - - fn execute_with_state_closure( - mut self, - input: Self::Input<'_>, - mut witness: F, - ) -> Result - where - F: FnMut(&State), - { - let BlockExecutionInput { block, total_difficulty, .. } = input; - let (receipts, gas_used) = self.execute_without_verification(block, total_difficulty)?; - - // NOTE: we need to merge keep the reverts for the bundle retention - self.state.merge_transitions(BundleRetention::Reverts); - witness(&self.state); - - Ok(BlockExecutionOutput { - state: self.state.take_bundle(), - receipts, - requests: vec![], - gas_used, - snapshot: None, - }) - } - - fn execute_with_state_hook( - mut self, - input: Self::Input<'_>, - state_hook: F, - ) -> Result - where - F: OnStateHook, - { - let BlockExecutionInput { block, total_difficulty, .. } = input; - let (receipts, gas_used) = self.execute_without_verification_with_state_hook( - block, - total_difficulty, - Some(state_hook), - )?; - - // NOTE: we need to merge keep the reverts for the bundle retention - self.state.merge_transitions(BundleRetention::Reverts); - - Ok(BlockExecutionOutput { - state: self.state.take_bundle(), - receipts, - requests: vec![], - gas_used, - snapshot: None, - }) - } -} - -/// An executor for a batch of blocks. -/// -/// State changes are tracked until the executor is finalized. -======= /// Helper type with backwards compatible methods to obtain executor providers. ->>>>>>> v1.1.1 #[derive(Debug)] pub struct OpExecutorProvider; -<<<<<<< HEAD -impl OpBatchExecutor { - /// Returns the receipts of the executed blocks. - pub const fn receipts(&self) -> &Receipts { - self.batch_record.receipts() - } - - /// Returns mutable reference to the state that wraps the underlying database. - pub fn state_mut(&mut self) -> &mut State { - self.executor.state_mut() - } -} - -impl BatchExecutor for OpBatchExecutor -where - EvmConfig: ConfigureEvm
, - DB: Database + Display>, -{ - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders, Header>; - type Output = ExecutionOutcome; - type Error = BlockExecutionError; - - fn execute_and_verify_one(&mut self, input: Self::Input<'_>) -> Result<(), Self::Error> { - let BlockExecutionInput { block, total_difficulty, .. } = input; - - if self.batch_record.first_block().is_none() { - self.batch_record.set_first_block(block.number); - } - let (receipts, _gas_used) = - self.executor.execute_without_verification(block, total_difficulty)?; - - validate_block_post_execution(block, self.executor.chain_spec(), &receipts)?; - - // prepare the state according to the prune mode - let retention = self.batch_record.bundle_retention(block.number); - self.executor.state.merge_transitions(retention); - - // store receipts in the set - self.batch_record.save_receipts(receipts)?; - - Ok(()) - } - - fn finalize(mut self) -> Self::Output { - ExecutionOutcome::new( - self.executor.state.take_bundle(), - self.batch_record.take_receipts(), - self.batch_record.first_block().unwrap_or_default(), - self.batch_record.take_requests(), - ) - } - - fn set_tip(&mut self, tip: BlockNumber) { - self.batch_record.set_tip(tip); - } - - fn set_prune_modes(&mut self, prune_modes: PruneModes) { - self.batch_record.set_prune_modes(prune_modes); - } - - fn size_hint(&self) -> Option { - Some(self.executor.state.bundle_state.size_hint()) -======= impl OpExecutorProvider { /// Creates a new default optimism executor strategy factory. pub fn optimism( chain_spec: Arc, ) -> BasicBlockExecutorProvider { BasicBlockExecutorProvider::new(OpExecutionStrategyFactory::optimism(chain_spec)) ->>>>>>> v1.1.1 } } @@ -832,7 +436,7 @@ mod tests { ); let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db), None); // make sure the L1 block contract state is preloaded. executor.with_state_mut(|state| { @@ -917,7 +521,7 @@ mod tests { ); let provider = executor_provider(chain_spec); - let mut executor = provider.batch_executor(StateProviderDatabase::new(&db)); + let mut executor = provider.batch_executor(StateProviderDatabase::new(&db), None); // make sure the L1 block contract state is preloaded. executor.with_state_mut(|state| { diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index 1adb5db74..c33a4338d 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -599,6 +599,7 @@ mod tests { receipts, requests: vec![], first_block: 10, + snapshots: vec![], }; // Create a Chain object with a BTreeMap of blocks mapped to their block numbers, @@ -614,6 +615,7 @@ mod tests { receipts: Receipts { receipt_vec: vec![vec![Some(receipt1)]] }, requests: vec![], first_block: 10, + snapshots: vec![], }; // Assert that the execution outcome at the first block contains only the first receipt @@ -657,6 +659,7 @@ mod tests { receipts: receipts.clone(), requests: requests.clone(), first_block, + snapshots: vec![], }; // Assert that creating a new ExecutionOutcome using the constructor matches exec_res @@ -717,6 +720,7 @@ mod tests { receipts, requests: vec![], first_block, + snapshots: vec![], }; // Test before the first block @@ -753,6 +757,7 @@ mod tests { receipts, requests: vec![], first_block, + snapshots: vec![], }; // Get logs for block number 123 @@ -786,6 +791,7 @@ mod tests { receipts, // Include the created receipts requests: vec![], // Empty vector for requests first_block, // Set the first block number + snapshots: vec![], }; // Get receipts for block number 123 and convert the result into a vector @@ -832,6 +838,7 @@ mod tests { receipts, // Include the created receipts requests: vec![], // Empty vector for requests first_block, // Set the first block number + snapshots: vec![], }; // Assert that the length of receipts in exec_res is 1 @@ -846,6 +853,7 @@ mod tests { receipts: receipts_empty, // Include the empty receipts requests: vec![], // Empty vector for requests first_block, // Set the first block number + snapshots: vec![], }; // Assert that the length of receipts in exec_res_empty_receipts is 0 @@ -884,8 +892,13 @@ mod tests { // Create a ExecutionOutcome object with the created bundle, receipts, requests, and // first_block - let mut exec_res = - ExecutionOutcome { bundle: Default::default(), receipts, requests, first_block }; + let mut exec_res = ExecutionOutcome { + bundle: Default::default(), + receipts, + requests, + first_block, + snapshots: vec![], + }; // Assert that the revert_to method returns true when reverting to the initial block number. assert!(exec_res.revert_to(123)); @@ -930,8 +943,13 @@ mod tests { let first_block = 123; // Create an ExecutionOutcome object. - let mut exec_res = - ExecutionOutcome { bundle: Default::default(), receipts, requests, first_block }; + let mut exec_res = ExecutionOutcome { + bundle: Default::default(), + receipts, + requests, + first_block, + snapshots: vec![], + }; // Extend the ExecutionOutcome object by itself. exec_res.extend(exec_res.clone()); @@ -946,6 +964,7 @@ mod tests { }, requests: vec![Requests::new(vec![request.clone()]), Requests::new(vec![request])], first_block: 123, + snapshots: vec![], } ); } @@ -986,8 +1005,13 @@ mod tests { // Create a ExecutionOutcome object with the created bundle, receipts, requests, and // first_block - let exec_res = - ExecutionOutcome { bundle: Default::default(), receipts, requests, first_block }; + let exec_res = ExecutionOutcome { + bundle: Default::default(), + receipts, + requests, + first_block, + snapshots: vec![], + }; // Split the ExecutionOutcome at block number 124 let result = exec_res.clone().split_at(124); @@ -998,6 +1022,7 @@ mod tests { receipts: Receipts { receipt_vec: vec![vec![Some(receipt.clone())]] }, requests: vec![Requests::new(vec![request.clone()])], first_block, + snapshots: vec![], }; // Define the expected higher ExecutionOutcome after splitting @@ -1008,6 +1033,7 @@ mod tests { }, requests: vec![Requests::new(vec![request.clone()]), Requests::new(vec![request])], first_block: 124, + snapshots: vec![], }; // Assert that the split result matches the expected lower and higher outcomes diff --git a/crates/optimism/hardforks/src/hardfork.rs b/crates/optimism/hardforks/src/hardfork.rs index c06626634..6fd1bad5f 100644 --- a/crates/optimism/hardforks/src/hardfork.rs +++ b/crates/optimism/hardforks/src/hardfork.rs @@ -235,11 +235,7 @@ impl OptimismHardfork { Self::Ecotone => Some(1708534800), Self::Fjord => Some(1716998400), Self::Granite => Some(1723478400), -<<<<<<< HEAD _ => None, -======= - Self::Holocene => None, ->>>>>>> v1.1.1 }, ) } @@ -274,7 +270,6 @@ impl OptimismHardfork { Self::Ecotone => Some(1710374401), Self::Fjord => Some(1720627201), Self::Granite => Some(1726070401), -<<<<<<< HEAD _ => None, }, ) @@ -316,9 +311,6 @@ impl OptimismHardfork { Self::Ecotone => Some(1715754600), Self::Haber => Some(1717048800), _ => None, -======= - Self::Holocene => None, ->>>>>>> v1.1.1 }, ) } diff --git a/crates/optimism/hardforks/src/lib.rs b/crates/optimism/hardforks/src/lib.rs index 38abfd439..154c119ce 100644 --- a/crates/optimism/hardforks/src/lib.rs +++ b/crates/optimism/hardforks/src/lib.rs @@ -46,12 +46,6 @@ pub trait OptimismHardforks: EthereumHardforks { self.fork(OptimismHardfork::Granite).active_at_timestamp(timestamp) } -<<<<<<< HEAD - /// Convenience method to check if [`OptimismHardfork::Wright`] is active at a given block - /// number. - fn is_wright_active_at_timestamp(&self, timestamp: u64) -> bool { - self.fork(OptimismHardfork::Wright).active_at_timestamp(timestamp) -======= /// Returns `true` if [`Holocene`](OptimismHardfork::Holocene) is active at given block /// timestamp. fn is_holocene_active_at_timestamp(&self, timestamp: u64) -> bool { @@ -62,6 +56,11 @@ pub trait OptimismHardforks: EthereumHardforks { /// timestamp. fn is_regolith_active_at_timestamp(&self, timestamp: u64) -> bool { self.fork(OptimismHardfork::Regolith).active_at_timestamp(timestamp) ->>>>>>> v1.1.1 + } + + /// Convenience method to check if [`OptimismHardfork::Wright`] is active at a given block + /// number. + fn is_wright_active_at_timestamp(&self, timestamp: u64) -> bool { + self.fork(OptimismHardfork::Wright).active_at_timestamp(timestamp) } } diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index 439ed2da3..ac656ea2d 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -38,13 +38,11 @@ reth-optimism-chainspec.workspace = true reth-optimism-consensus.workspace = true reth-optimism-forks.workspace = true -<<<<<<< HEAD -# bsc -reth-bsc-consensus.workspace = true -======= # revm with required optimism features revm = { workspace = true, features = ["secp256k1", "blst", "c-kzg"] } ->>>>>>> v1.1.1 + +# bsc +reth-bsc-consensus.workspace = true # ethereum alloy-eips.workspace = true @@ -85,7 +83,8 @@ optimism = [ "reth-optimism-rpc/optimism", "reth-engine-local/optimism", "reth-optimism-consensus/optimism", - "reth-db/optimism" + "reth-db/optimism", + "reth-chainspec/optimism" ] asm-keccak = [ "reth-primitives/asm-keccak", @@ -108,13 +107,8 @@ test-utils = [ "reth-trie-db/test-utils", "revm/test-utils" ] -<<<<<<< HEAD opbnb = [ - "reth-primitives/opbnb", - "reth-optimism-evm/opbnb", - "reth-optimism-payload-builder/opbnb", + "reth-primitives/opbnb", + "reth-optimism-evm/opbnb", + "reth-optimism-payload-builder/opbnb", ] -asm-keccak = ["reth-primitives/asm-keccak"] -test-utils = ["reth-node-builder/test-utils"] -======= ->>>>>>> v1.1.1 diff --git a/crates/optimism/node/src/args.rs b/crates/optimism/node/src/args.rs index 4a9048b7e..ac6d7ea84 100644 --- a/crates/optimism/node/src/args.rs +++ b/crates/optimism/node/src/args.rs @@ -38,16 +38,11 @@ pub struct RollupArgs { #[arg(long = "rollup.discovery.v4", default_value = "false")] pub discovery_v4: bool, -<<<<<<< HEAD - /// Enable the engine2 experimental features on op-reth binary - #[arg(long = "engine.experimental", default_value = "true")] -======= /// Enable the experimental engine features on reth binary /// /// DEPRECATED: experimental engine is default now, use --engine.legacy to enable the legacy /// functionality - #[arg(long = "engine.experimental", default_value = "false")] ->>>>>>> v1.1.1 + #[arg(long = "engine.experimental", default_value = "true")] pub experimental: bool, /// Enable the legacy engine on reth binary @@ -71,12 +66,8 @@ impl Default for RollupArgs { enable_genesis_walkback: false, compute_pending_block: false, discovery_v4: false, -<<<<<<< HEAD experimental: true, -======= - experimental: false, legacy: false, ->>>>>>> v1.1.1 persistence_threshold: DEFAULT_PERSISTENCE_THRESHOLD, memory_block_buffer_target: DEFAULT_MEMORY_BLOCK_BUFFER_TARGET, } diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index d471b06e5..e4f6f7c2c 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -12,14 +12,8 @@ use reth_node_api::{ }; use reth_node_builder::{ components::{ -<<<<<<< HEAD - ComponentsBuilder, ConsensusBuilder, EngineValidatorBuilder, ExecutorBuilder, - NetworkBuilder, ParliaBuilder, PayloadServiceBuilder, PoolBuilder, - PoolBuilderConfigOverrides, -======= - ComponentsBuilder, ConsensusBuilder, ExecutorBuilder, NetworkBuilder, + ComponentsBuilder, ConsensusBuilder, ExecutorBuilder, NetworkBuilder, ParliaBuilder, PayloadServiceBuilder, PoolBuilder, PoolBuilderConfigOverrides, ->>>>>>> v1.1.1 }, node::{FullNodeTypes, NodeTypes, NodeTypesWithEngine}, rpc::{EngineValidatorBuilder, RethRpcAddOns, RpcAddOns, RpcHandle}, @@ -73,21 +67,12 @@ impl OptimismNode { args: RollupArgs, ) -> ComponentsBuilder< Node, -<<<<<<< HEAD - OptimismPoolBuilder, - OptimismPayloadBuilder, - OptimismNetworkBuilder, - OptimismExecutorBuilder, - OptimismConsensusBuilder, - OptimismEngineValidatorBuilder, - OptimismParliaBuilder, -======= OpPoolBuilder, OpPayloadBuilder, OpNetworkBuilder, OpExecutorBuilder, OpConsensusBuilder, ->>>>>>> v1.1.1 + OptimismParliaBuilder, > where Node: FullNodeTypes< @@ -103,15 +88,9 @@ impl OptimismNode { disable_txpool_gossip, disable_discovery_v4: !discovery_v4, }) -<<<<<<< HEAD - .executor(OptimismExecutorBuilder::default()) - .consensus(OptimismConsensusBuilder::default()) - .engine_validator(OptimismEngineValidatorBuilder::default()) - .parlia(OptimismParliaBuilder::default()) -======= .executor(OpExecutorBuilder::default()) .consensus(OpConsensusBuilder::default()) ->>>>>>> v1.1.1 + .parlia(OptimismParliaBuilder::default()) } } @@ -121,21 +100,12 @@ where { type ComponentsBuilder = ComponentsBuilder< N, -<<<<<<< HEAD - OptimismPoolBuilder, - OptimismPayloadBuilder, - OptimismNetworkBuilder, - OptimismExecutorBuilder, - OptimismConsensusBuilder, - OptimismEngineValidatorBuilder, - OptimismParliaBuilder, -======= OpPoolBuilder, OpPayloadBuilder, OpNetworkBuilder, OpExecutorBuilder, OpConsensusBuilder, ->>>>>>> v1.1.1 + OptimismParliaBuilder, >; type AddOns = OptimismAddOns< diff --git a/crates/optimism/node/src/txpool.rs b/crates/optimism/node/src/txpool.rs index ebb737e07..e5d327290 100644 --- a/crates/optimism/node/src/txpool.rs +++ b/crates/optimism/node/src/txpool.rs @@ -4,7 +4,7 @@ use alloy_primitives::U256; use parking_lot::RwLock; use reth_chainspec::ChainSpec; use reth_optimism_evm::RethL1BlockInfo; -use reth_optimism_forks::OptimismHardforks; +use reth_optimism_forks::OptimismHardfork; use reth_primitives::{Block, GotExpected, InvalidTransactionError, SealedBlock}; use reth_provider::{BlockReaderIdExt, StateProviderFactory}; use reth_revm::L1BlockInfo; @@ -146,7 +146,7 @@ where let cost_addition = if self .chain_spec() - .is_wright_active_at_timestamp(self.block_timestamp()) && + .is_fork_active_at_timestamp(OptimismHardfork::Wright, self.block_timestamp()) && valid_tx.transaction().priority_fee_or_price() == 0 { U256::from(0) diff --git a/crates/optimism/payload/Cargo.toml b/crates/optimism/payload/Cargo.toml index ac6b26c5d..6259361a4 100644 --- a/crates/optimism/payload/Cargo.toml +++ b/crates/optimism/payload/Cargo.toml @@ -50,23 +50,15 @@ sha2.workspace = true [features] optimism = [ -<<<<<<< HEAD - "reth-chainspec/optimism", - "reth-primitives/optimism", - "reth-provider/optimism", - "reth-optimism-evm/optimism", - "reth-revm/optimism", -] -opbnb = [ - "reth-primitives/opbnb", - "reth-optimism-evm/opbnb", -] -======= "reth-primitives/optimism", "reth-provider/optimism", "reth-optimism-evm/optimism", "revm/optimism", "reth-execution-types/optimism", - "reth-optimism-consensus/optimism" + "reth-optimism-consensus/optimism", + "reth-chainspec/optimism" +] +opbnb = [ + "reth-primitives/opbnb", + "reth-optimism-evm/opbnb", ] ->>>>>>> v1.1.1 diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 3c43a4414..a472b728e 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -505,17 +505,7 @@ where // seal the block let block = Block { header, -<<<<<<< HEAD - body: BlockBody { - transactions: executed_txs, - ommers: vec![], - withdrawals, - sidecars: None, - requests: None, - }, -======= - body: BlockBody { transactions: executed_txs, ommers: vec![], withdrawals }, ->>>>>>> v1.1.1 + body: BlockBody { transactions: executed_txs, ommers: vec![], withdrawals, sidecars: None }, }; let sealed_block = block.seal_slow(); diff --git a/crates/optimism/rpc/Cargo.toml b/crates/optimism/rpc/Cargo.toml index 767163535..d9409c52a 100644 --- a/crates/optimism/rpc/Cargo.toml +++ b/crates/optimism/rpc/Cargo.toml @@ -67,7 +67,8 @@ optimism = [ "reth-primitives/optimism", "reth-provider/optimism", "revm/optimism", - "reth-optimism-consensus/optimism" + "reth-optimism-consensus/optimism", + "reth-chainspec/optimism" ] opbnb = [ diff --git a/crates/optimism/rpc/src/eth/receipt.rs b/crates/optimism/rpc/src/eth/receipt.rs index 67a46bd52..5b188907a 100644 --- a/crates/optimism/rpc/src/eth/receipt.rs +++ b/crates/optimism/rpc/src/eth/receipt.rs @@ -217,17 +217,13 @@ impl OpReceiptBuilder { .deposit_version(receipt.deposit_receipt_version) .build(); -<<<<<<< HEAD if chain_spec.is_wright_active_at_timestamp(meta.timestamp) && transaction.effective_gas_price(meta.base_fee) == 0 { op_receipt_fields.l1_block_info.l1_fee = Some(0); } - Ok(Self { core_receipt, tx_type, op_receipt_fields }) -======= Ok(Self { core_receipt, op_receipt_fields }) ->>>>>>> v1.1.1 } /// Builds [`OpTransactionReceipt`] by combing core (l1) receipt fields and additional OP diff --git a/crates/primitives-traits/src/blob_sidecar.rs b/crates/primitives-traits/src/blob_sidecar.rs index af11850a1..26d82ae14 100644 --- a/crates/primitives-traits/src/blob_sidecar.rs +++ b/crates/primitives-traits/src/blob_sidecar.rs @@ -114,11 +114,7 @@ impl Decodable for BlobSidecar { let _rlp_head_tx_sidecar = alloy_rlp::Header::decode(buf)?; let this = Self { - blob_transaction_sidecar: BlobTransactionSidecar { - blobs: Decodable::decode(buf)?, - commitments: Decodable::decode(buf)?, - proofs: Decodable::decode(buf)?, - }, + blob_transaction_sidecar: Decodable::decode(buf)?, block_number: Decodable::decode(buf)?, block_hash: Decodable::decode(buf)?, tx_index: Decodable::decode(buf)?, diff --git a/crates/primitives-traits/src/constants/mod.rs b/crates/primitives-traits/src/constants/mod.rs index 5f755a982..25bfd1446 100644 --- a/crates/primitives-traits/src/constants/mod.rs +++ b/crates/primitives-traits/src/constants/mod.rs @@ -9,10 +9,6 @@ pub use gas_units::{GIGAGAS, KILOGAS, MEGAGAS}; /// The client version: `reth/v{major}.{minor}.{patch}` pub const RETH_CLIENT_VERSION: &str = concat!("reth/v", env!("CARGO_PKG_VERSION")); -<<<<<<< HEAD -/// The first four bytes of the call data for a function call specifies the function to be called. -pub const SELECTOR_LEN: usize = 4; - /// Maximum extra data size in a block after genesis #[cfg(not(feature = "bsc"))] pub const MAXIMUM_EXTRA_DATA_SIZE: usize = 32; @@ -21,38 +17,6 @@ pub const MAXIMUM_EXTRA_DATA_SIZE: usize = 32; #[cfg(feature = "bsc")] pub const MAXIMUM_EXTRA_DATA_SIZE: usize = 1024 * 1024; -/// An EPOCH is a series of 32 slots. -pub const EPOCH_SLOTS: u64 = 32; - -/// The duration of a slot in seconds. -/// -/// This is the time period of 12 seconds in which a randomly chosen validator has time to propose a -/// block. -pub const SLOT_DURATION: Duration = Duration::from_secs(12); - -/// An EPOCH is a series of 32 slots (~6.4min). -pub const EPOCH_DURATION: Duration = Duration::from_secs(12 * EPOCH_SLOTS); - -/// The default block nonce in the beacon consensus -pub const BEACON_NONCE: u64 = 0u64; - -/// The default Ethereum block gas limit. -pub const ETHEREUM_BLOCK_GAS_LIMIT: u64 = 30_000_000; - -/// The minimum tx fee below which the txpool will reject the transaction. -/// -/// Configured to `7` WEI which is the lowest possible value of base fee under mainnet EIP-1559 -/// parameters. `BASE_FEE_MAX_CHANGE_DENOMINATOR` -/// is `8`, or 12.5%. Once the base fee has dropped to `7` WEI it cannot decrease further because -/// 12.5% of 7 is less than 1. -/// -/// Note that min base fee under different 1559 parameterizations may differ, but there's no -/// significant harm in leaving this setting as is. -pub const MIN_PROTOCOL_BASE_FEE: u64 = 7; - -/// Same as [`MIN_PROTOCOL_BASE_FEE`] but as a U256. -pub const MIN_PROTOCOL_BASE_FEE_U256: U256 = U256::from_limbs([7u64, 0, 0, 0]); - /// Initial base fee as defined in [EIP-1559](https://eips.ethereum.org/EIPS/eip-1559) #[cfg(not(feature = "bsc"))] pub const EIP1559_INITIAL_BASE_FEE: u64 = 1_000_000_000; @@ -61,14 +25,6 @@ pub const EIP1559_INITIAL_BASE_FEE: u64 = 1_000_000_000; #[cfg(feature = "bsc")] pub const EIP1559_INITIAL_BASE_FEE: u64 = 0; -/// Base fee max change denominator as defined in [EIP-1559](https://eips.ethereum.org/EIPS/eip-1559) -pub const EIP1559_DEFAULT_BASE_FEE_MAX_CHANGE_DENOMINATOR: u64 = 8; - -/// Elasticity multiplier as defined in [EIP-1559](https://eips.ethereum.org/EIPS/eip-1559) -pub const EIP1559_DEFAULT_ELASTICITY_MULTIPLIER: u64 = 2; - -======= ->>>>>>> v1.1.1 /// Minimum gas limit allowed for transactions. pub const MINIMUM_GAS_LIMIT: u64 = 5000; @@ -76,44 +32,14 @@ pub const MINIMUM_GAS_LIMIT: u64 = 5000; pub const HOLESKY_GENESIS_HASH: B256 = b256!("b5f7f912443c940f21fd611f12828d75b534364ed9e95ca4e307729a4661bde4"); -<<<<<<< HEAD -/// Testnet genesis hash: `0x2f980576711e3617a5e4d83dd539548ec0f7792007d505a3d2e9674833af2d7c` -pub const DEV_GENESIS_HASH: B256 = - b256!("2f980576711e3617a5e4d83dd539548ec0f7792007d505a3d2e9674833af2d7c"); - -/// Keccak256 over empty array: `0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470` -pub const KECCAK_EMPTY: B256 = - b256!("c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"); - /// Ommer root of empty list: `0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347` pub const EMPTY_OMMER_ROOT_HASH: B256 = b256!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"); -/// Root hash of an empty trie: `0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421` -pub const EMPTY_ROOT_HASH: B256 = - b256!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"); - -/// From address from Optimism system txs: `0xdeaddeaddeaddeaddeaddeaddeaddeaddead0001` -pub const OP_SYSTEM_TX_FROM_ADDR: Address = address!("deaddeaddeaddeaddeaddeaddeaddeaddead0001"); - -/// To address from Optimism system txs: `0x4200000000000000000000000000000000000015` -pub const OP_SYSTEM_TX_TO_ADDR: Address = address!("4200000000000000000000000000000000000015"); - -/// Transactions root of empty receipts set. -pub const EMPTY_RECEIPTS: B256 = EMPTY_ROOT_HASH; - -/// Transactions root of empty transactions set. -pub const EMPTY_TRANSACTIONS: B256 = EMPTY_ROOT_HASH; - -/// Withdrawals root of empty withdrawals set. -pub const EMPTY_WITHDRAWALS: B256 = EMPTY_ROOT_HASH; - /// Empty mix hash pub const EMPTY_MIX_HASH: B256 = b256!("0000000000000000000000000000000000000000000000000000000000000000"); -======= ->>>>>>> v1.1.1 /// The number of blocks to unwind during a reorg that already became a part of canonical chain. /// /// In reality, the node can end up in this particular situation very rarely. It would happen only diff --git a/crates/primitives/Cargo.toml b/crates/primitives/Cargo.toml index 80c9dc1c0..29f859834 100644 --- a/crates/primitives/Cargo.toml +++ b/crates/primitives/Cargo.toml @@ -20,7 +20,6 @@ reth-trie-common.workspace = true revm = { workspace = true, optional = true } revm-primitives = { workspace = true, features = ["serde"] } reth-codecs = { workspace = true, optional = true } -reth-chainspec = { workspace = true, optional = true } # ethereum alloy-consensus.workspace = true @@ -102,9 +101,14 @@ std = [ "revm-primitives/std", "secp256k1?/std", "serde/std", + "revm?/std" ] reth-codec = ["dep:reth-codecs", "dep:zstd", "dep:modular-bitfield", "std"] -asm-keccak = ["alloy-primitives/asm-keccak", "revm-primitives/asm-keccak"] +asm-keccak = [ + "alloy-primitives/asm-keccak", + "revm-primitives/asm-keccak", + "revm?/asm-keccak" +] arbitrary = [ "dep:arbitrary", "alloy-eips/arbitrary", @@ -122,7 +126,8 @@ arbitrary = [ "alloy-serde?/arbitrary", "op-alloy-consensus?/arbitrary", "op-alloy-rpc-types?/arbitrary", - "reth-codecs?/arbitrary" + "reth-codecs?/arbitrary", + "revm?/arbitrary" ] secp256k1 = ["dep:secp256k1"] c-kzg = [ @@ -135,6 +140,8 @@ optimism = [ "dep:op-alloy-consensus", "reth-codecs?/optimism", "revm-primitives/optimism", + "reth-chainspec/optimism", + "revm?/optimism" ] opbnb = [ "revm/opbnb", @@ -154,6 +161,7 @@ test-utils = [ "reth-chainspec/test-utils", "reth-codecs?/test-utils", "reth-trie-common/test-utils", + "revm?/test-utils" ] serde-bincode-compat = [ "alloy-consensus/serde-bincode-compat", diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index 8af66c1f8..b8e1b7c27 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -122,10 +122,8 @@ mod block_rlp { impl<'a> From<&'a Block> for HelperRef<'a, Header> { fn from(block: &'a Block) -> Self { - let Block { - header, - body: BlockBody { transactions, ommers, withdrawals, sidecars }, - } = block; + let Block { header, body: BlockBody { transactions, ommers, withdrawals, sidecars } } = + block; Self { header, transactions, @@ -154,23 +152,15 @@ mod block_rlp { impl Decodable for Block { fn decode(b: &mut &[u8]) -> alloy_rlp::Result { - let Helper { header, transactions, ommers, withdrawals, sidecars } = - Helper::decode(b)?; - Ok(Self { - header, - body: BlockBody { transactions, ommers, withdrawals, sidecars }, - }) + let Helper { header, transactions, ommers, withdrawals, sidecars } = Helper::decode(b)?; + Ok(Self { header, body: BlockBody { transactions, ommers, withdrawals, sidecars } }) } } impl Decodable for SealedBlock { fn decode(b: &mut &[u8]) -> alloy_rlp::Result { - let Helper { header, transactions, ommers, withdrawals, sidecars } = - Helper::decode(b)?; - Ok(Self { - header, - body: BlockBody { transactions, ommers, withdrawals, sidecars }, - }) + let Helper { header, transactions, ommers, withdrawals, sidecars } = Helper::decode(b)?; + Ok(Self { header, body: BlockBody { transactions, ommers, withdrawals, sidecars } }) } } @@ -212,12 +202,7 @@ impl<'a> arbitrary::Arbitrary<'a> for Block { Ok(Self { header: u.arbitrary()?, - body: BlockBody { - transactions, - ommers, - withdrawals: u.arbitrary()?, - sidecars: None, - }, + body: BlockBody { transactions, ommers, withdrawals: u.arbitrary()?, sidecars: None }, }) } } @@ -686,12 +671,7 @@ impl<'a> arbitrary::Arbitrary<'a> for BlockBody { }) .collect::>>()?; - Ok(Self { - transactions, - ommers, - sidecars: None, - withdrawals: u.arbitrary()?, - }) + Ok(Self { transactions, ommers, sidecars: None, withdrawals: u.arbitrary()? }) } } @@ -701,9 +681,7 @@ pub(super) mod serde_bincode_compat { use alloc::{borrow::Cow, vec::Vec}; use alloy_consensus::serde_bincode_compat::Header; use alloy_primitives::Address; - use reth_primitives_traits::{ - serde_bincode_compat::SealedHeader, BlobSidecars, Withdrawals, - }; + use reth_primitives_traits::{serde_bincode_compat::SealedHeader, BlobSidecars, Withdrawals}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_with::{DeserializeAs, SerializeAs}; diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 26c92030c..0d8fdda58 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -42,8 +42,8 @@ pub use receipt::{ gas_spent_by_transactions, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, Receipts, }; pub use reth_primitives_traits::{ - logs_bloom, Account, BlobSidecar, BlobSidecars, Bytecode, GotExpected, GotExpectedBoxed, Header, HeaderError, Log, - LogData, SealedHeader, StorageEntry, Withdrawals, + logs_bloom, Account, BlobSidecar, BlobSidecars, Bytecode, GotExpected, GotExpectedBoxed, + Header, HeaderError, Log, LogData, SealedHeader, StorageEntry, Withdrawals, }; pub use static_file::StaticFileSegment; @@ -84,7 +84,5 @@ pub mod serde_bincode_compat { } // to make lint happy -#[cfg(feature = "bsc")] -use reth_chainspec as _; #[cfg(any(feature = "bsc", feature = "opbnb"))] use revm as _; diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index 39edac951..5cf31b977 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -335,7 +335,17 @@ impl Self { - Self { provider, pool, network, executor, events, evm_config, block_executor, consensus, bsc_trace_helper: None } + Self { + provider, + pool, + network, + executor, + events, + evm_config, + block_executor, + consensus, + bsc_trace_helper: None, + } } /// Configure the provider instance. @@ -346,8 +356,17 @@ impl { let Self { - provider, executor, events, network, evm_config, block_executor, consensus, bsc_trace_helper, .. + provider, + executor, + events, + network, + evm_config, + block_executor, + consensus, + bsc_trace_helper, + .. } = self; RpcModuleBuilder { provider, @@ -427,7 +462,15 @@ impl { let Self { - provider, pool, executor, events, evm_config, block_executor, consensus, bsc_trace_helper, .. + provider, + pool, + executor, + events, + evm_config, + block_executor, + consensus, + bsc_trace_helper, + .. } = self; RpcModuleBuilder { provider, @@ -483,8 +534,17 @@ impl { - let Self { pool, network, provider, events, evm_config, block_executor, consensus, bsc_trace_helper, .. } = - self; + let Self { + pool, + network, + provider, + events, + evm_config, + block_executor, + consensus, + bsc_trace_helper, + .. + } = self; RpcModuleBuilder { provider, network, @@ -538,7 +607,15 @@ impl RpcModuleBuilder { - let Self { provider, network, pool, executor, events, evm_config, block_executor, bsc_trace_helper, .. } = - self; + let Self { + provider, + network, + pool, + executor, + events, + evm_config, + block_executor, + bsc_trace_helper, + .. + } = self; RpcModuleBuilder { provider, network, @@ -614,17 +719,28 @@ impl( + /// Configure the `bsc_trace_helper` implementation. + #[allow(clippy::use_self)] + pub fn with_bsc_trace_helper( self, - bsc_trace_helper: Option - ) -> RpcModuleBuilder { - let Self { provider, network, pool, executor, events, evm_config, block_executor, consensus, .. } = - self; + bsc_trace_helper: Option, + ) -> RpcModuleBuilder + { + let Self { + provider, + network, + pool, + executor, + events, + evm_config, + block_executor, + consensus, + .. + } = self; RpcModuleBuilder { provider, network, @@ -634,7 +750,7 @@ impl> + SpawnBlocking { // block the transaction is included in let parent_block = block.parent_hash; let parent_timestamp = self - .block(parent_block.into()) + .block_with_senders(parent_block.into()) .await? .map(|block| block.timestamp) .ok_or(EthApiError::UnknownParentBlock)?; @@ -637,7 +637,7 @@ pub trait Call: LoadState> + SpawnBlocking { parent_timestamp, )?; - let tx_env = Call::evm_config(&this).tx_env(&tx); + let tx_env = RpcNodeCore::evm_config(&this).tx_env(tx.as_signed(), tx.signer()); #[cfg(feature = "bsc")] let tx_env = { let mut tx_env = tx_env; @@ -647,11 +647,7 @@ pub trait Call: LoadState> + SpawnBlocking { tx_env }; - let env = EnvWithHandlerCfg::new_with_cfg_env( - cfg, - block_env, - RpcNodeCore::evm_config(&this).tx_env(tx.as_signed(), tx.signer()), - ); + let env = EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, tx_env); let (res, _) = this.transact(&mut db, env)?; f(tx_info, res, db) diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index a6f73e6b9..8e508325e 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -445,12 +445,7 @@ pub trait LoadPendingBlock: // seal the block let block = Block { header, - body: BlockBody { - transactions: executed_txs, - ommers: vec![], - withdrawals, - sidecars, - }, + body: BlockBody { transactions: executed_txs, ommers: vec![], withdrawals, sidecars }, }; Ok((SealedBlockWithSenders { block: block.seal_slow(), senders }, receipts)) } diff --git a/crates/rpc/rpc-eth-api/src/helpers/trace.rs b/crates/rpc/rpc-eth-api/src/helpers/trace.rs index a15838567..668e5807b 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/trace.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/trace.rs @@ -2,7 +2,7 @@ use std::sync::Arc; -use crate::{FromEvmError, RpcNodeCore}; +use crate::{FromEthApiError, FromEvmError, RpcNodeCore}; use alloy_primitives::B256; use alloy_rpc_types::{BlockId, TransactionInfo}; use futures::Future; @@ -190,8 +190,9 @@ pub trait Trace: LoadState> { // block the transaction is included in let parent_block = block.parent_hash; let parent_beacon_block_root = block.parent_beacon_block_root; - let parent_timestamp = LoadState::cache(self) - .get_block(parent_block) + let parent_timestamp = self + .cache() + .get_sealed_block_with_senders(parent_block) .await .map_err(Self::Error::from_eth_err)? .map(|block| block.timestamp) @@ -226,7 +227,7 @@ pub trait Trace: LoadState> { parent_timestamp, )?; - let tx_env = Call::evm_config(&this).tx_env(&tx); + let tx_env = RpcNodeCore::evm_config(&this).tx_env(tx.as_signed(), tx.signer()); #[cfg(feature = "bsc")] let tx_env = { let mut tx_env = tx_env; @@ -236,11 +237,7 @@ pub trait Trace: LoadState> { tx_env }; - let env = EnvWithHandlerCfg::new_with_cfg_env( - cfg, - block_env, - RpcNodeCore::evm_config(&this).tx_env(tx.as_signed(), tx.signer()), - ); + let env = EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env, tx_env); let (res, _) = this.inspect(StateCacheDbRefMutWrapper(&mut db), env, &mut inspector)?; f(tx_info, inspector, res, db) diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index a871e2698..15809d95b 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -463,7 +463,8 @@ pub trait EthTransactions: LoadTransaction { Self: LoadReceipt + 'static, { async move { - let meta = match LoadTransaction::provider(self) + let meta = match self + .provider() .transaction_by_hash_with_meta(hash) .map_err(Self::Error::from_eth_err)? { @@ -472,11 +473,10 @@ pub trait EthTransactions: LoadTransaction { }; // If no block sidecars found, return None - let sidecars = - match LoadTransaction::cache(self).get_sidecars(meta.block_hash).await.unwrap() { - Some(sidecars) => sidecars, - None => return Ok(None), - }; + let sidecars = match self.cache().get_sidecars(meta.block_hash).await.unwrap() { + Some(sidecars) => sidecars, + None => return Ok(None), + }; Ok(sidecars.iter().find(|item| item.tx_hash == hash).map(|sidecar| BlockSidecar { blob_sidecar: sidecar.blob_transaction_sidecar.clone(), diff --git a/crates/rpc/rpc-eth-types/src/builder/ctx.rs b/crates/rpc/rpc-eth-types/src/builder/ctx.rs index 570773ec1..eedb3f458 100644 --- a/crates/rpc/rpc-eth-types/src/builder/ctx.rs +++ b/crates/rpc/rpc-eth-types/src/builder/ctx.rs @@ -5,7 +5,6 @@ use reth_chain_state::CanonStateSubscriptions; use reth_chainspec::ChainSpecProvider; use reth_storage_api::BlockReaderIdExt; use reth_tasks::TaskSpawner; -use std::marker::PhantomData; use crate::{ fee_history::fee_history_cache_new_blocks_task, EthConfig, EthStateCache, FeeHistoryCache, diff --git a/crates/rpc/rpc-eth-types/src/cache/mod.rs b/crates/rpc/rpc-eth-types/src/cache/mod.rs index 25bcb2b95..6ff26797a 100644 --- a/crates/rpc/rpc-eth-types/src/cache/mod.rs +++ b/crates/rpc/rpc-eth-types/src/cache/mod.rs @@ -7,10 +7,7 @@ use reth_chain_state::CanonStateNotification; use reth_errors::{ProviderError, ProviderResult}; use reth_evm::{provider::EvmEnvProvider, ConfigureEvm}; use reth_execution_types::Chain; -use reth_primitives::{ - BlobSidecars, Block, BlockHashOrNumber, BlockWithSenders, Header, Receipt, SealedBlock, - SealedBlockWithSenders, TransactionSigned, TransactionSignedEcRecovered, -}; +use reth_primitives::{BlobSidecars, Header, Receipt, SealedBlockWithSenders, TransactionSigned}; use reth_storage_api::{BlockReader, StateProviderFactory, TransactionVariant}; use reth_tasks::{TaskSpawner, TokioTaskExecutor}; use revm::primitives::{BlockEnv, CfgEnv, CfgEnvWithHandlerCfg, SpecId}; @@ -593,8 +590,14 @@ enum CacheAction { RemoveReorgedChain { chain_change: ChainChange, }, - GetSidecars { block_hash: B256, response_tx: SidecarsResponseSender }, - SidecarsResult { block_hash: B256, res: ProviderResult> }, + GetSidecars { + block_hash: B256, + response_tx: SidecarsResponseSender, + }, + SidecarsResult { + block_hash: B256, + res: ProviderResult>, + }, } struct BlockReceipts { diff --git a/crates/rpc/rpc/Cargo.toml b/crates/rpc/rpc/Cargo.toml index b02a2c424..a1d084363 100644 --- a/crates/rpc/rpc/Cargo.toml +++ b/crates/rpc/rpc/Cargo.toml @@ -107,9 +107,4 @@ jsonrpsee = { workspace = true, features = ["client"] } js-tracer = ["revm-inspectors/js-tracer", "reth-rpc-eth-types/js-tracer"] bsc = [ "reth-rpc-eth-api/bsc", -] -optimism = [ - "reth-primitives/optimism", - "reth-provider/optimism", - "reth-revm/optimism", -] +] \ No newline at end of file diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 1f41fe5d5..02b9c7953 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -95,6 +95,7 @@ where } /// Trace the entire block asynchronously + #[allow(clippy::too_many_arguments)] async fn trace_block( &self, at: BlockId, @@ -178,7 +179,8 @@ where before_system_tx = false; } - let tx_env = Call::evm_config(this.eth_api()).tx_env(&tx); + let tx_env = + RpcNodeCore::evm_config(this.eth_api()).tx_env(tx.as_signed(), tx.signer()); #[cfg(feature = "bsc")] let tx_env = { let mut tx_env = tx_env; @@ -189,13 +191,7 @@ where }; let env = EnvWithHandlerCfg { - env: Env::boxed( - cfg.cfg_env.clone(), - block_env.clone(), - tx_env, - RpcNodeCore::evm_config(this.eth_api()) - .tx_env(tx.as_signed(), tx.signer()), - ), + env: Env::boxed(cfg.cfg_env.clone(), block_env.clone(), tx_env), handler_cfg: cfg.handler_cfg, }; let (result, state_changes) = this.trace_transaction( @@ -244,7 +240,7 @@ where let parent = block.parent_hash; let parent_timestamp = self .eth_api() - .block(parent.into()) + .block_with_senders(parent.into()) .await? .map(|block| block.timestamp) .ok_or(EthApiError::UnknownParentBlock)?; @@ -314,7 +310,7 @@ where let state_at = block.parent_hash; let parent_timestamp = self .eth_api() - .block(state_at.into()) + .block_with_senders(state_at.into()) .await? .map(|block| block.timestamp) .ok_or(EthApiError::UnknownParentBlock)?; @@ -351,7 +347,7 @@ where let block_hash = block.hash(); let parent_timestamp = self .eth_api() - .block(state_at) + .block_with_senders(block.parent_hash.into()) .await? .map(|block| block.timestamp) .ok_or(EthApiError::UnknownParentBlock)?; @@ -394,7 +390,8 @@ where parent_timestamp, )?; - let tx_env = Call::evm_config(this.eth_api()).tx_env(&tx); + let tx_env = + RpcNodeCore::evm_config(this.eth_api()).tx_env(tx.as_signed(), tx.signer()); #[cfg(feature = "bsc")] let tx_env = { let mut tx_env = tx_env; @@ -405,12 +402,7 @@ where }; let env = EnvWithHandlerCfg { - env: Env::boxed( - cfg.cfg_env.clone(), - block_env, - tx_env, - RpcNodeCore::evm_config(this.eth_api()).tx_env(tx.as_signed(), tx.signer()), - ), + env: Env::boxed(cfg.cfg_env.clone(), block_env, tx_env), handler_cfg: cfg.handler_cfg, }; diff --git a/crates/rpc/rpc/src/eth/core.rs b/crates/rpc/rpc/src/eth/core.rs index df1538945..1b8c0e4b9 100644 --- a/crates/rpc/rpc/src/eth/core.rs +++ b/crates/rpc/rpc/src/eth/core.rs @@ -4,11 +4,11 @@ use std::sync::Arc; use crate::eth::EthTxBuilder; +use alloy_eips::BlockNumberOrTag; use alloy_network::Ethereum; use alloy_primitives::U256; use derive_more::Deref; use reth_bsc_consensus::BscTraceHelper; -use alloy_eips::BlockNumberOrTag; use reth_provider::{BlockReaderIdExt, CanonStateSubscriptions, ChainSpecProvider}; use reth_rpc_eth_api::{ helpers::{EthSigner, SpawnBlocking}, @@ -46,7 +46,11 @@ pub struct EthApi { impl Clone for EthApi { fn clone(&self) -> Self { - Self { inner: self.inner.clone(), bsc_trace_helper: self.bsc_trace_helper.clone(), tx_resp_builder: EthTxBuilder } + Self { + inner: self.inner.clone(), + bsc_trace_helper: self.bsc_trace_helper.clone(), + tx_resp_builder: EthTxBuilder, + } } } @@ -125,7 +129,11 @@ where ctx.config.proof_permits, ); - Self { inner: Arc::new(inner), bsc_trace_helper: ctx.bsc_trace_helper.clone(), tx_resp_builder: EthTxBuilder } + Self { + inner: Arc::new(inner), + bsc_trace_helper: ctx.bsc_trace_helper.clone(), + tx_resp_builder: EthTxBuilder, + } } } diff --git a/crates/rpc/rpc/src/eth/helpers/call.rs b/crates/rpc/rpc/src/eth/helpers/call.rs index e513f06ff..967854b76 100644 --- a/crates/rpc/rpc/src/eth/helpers/call.rs +++ b/crates/rpc/rpc/src/eth/helpers/call.rs @@ -1,17 +1,19 @@ //! Contains RPC handler implementations specific to endpoints that call/execute within evm. use crate::EthApi; -use alloy_primitives::B256; +use alloy_primitives::{Address, B256}; use reth_bsc_primitives::system_contracts::is_system_transaction; -use reth_evm::{ConfigureEvm, ConfigureEvmEnv}; -use reth_primitives::{Header, TransactionSignedEcRecovered}; +use reth_evm::ConfigureEvm; +use reth_primitives::{Header, TransactionSigned}; use reth_rpc_eth_api::{ helpers::{Call, EthCall, LoadPendingBlock, LoadState, SpawnBlocking}, FromEvmError, }; use reth_rpc_eth_types::EthApiError; -use revm::db::CacheDB; -use revm_primitives::{db::DatabaseRef, BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg}; +use revm_primitives::{ + db::{Database, DatabaseCommit}, + BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, +}; impl EthCall for EthApi where Self: Call + LoadPendingBlock @@ -34,18 +36,19 @@ where } /// Replays all the transactions until the target transaction is found. - fn replay_transactions_until( + fn replay_transactions_until<'a, DB, I>( &self, - db: &mut CacheDB, + db: &mut DB, cfg: CfgEnvWithHandlerCfg, block_env: BlockEnv, - transactions: impl IntoIterator, + transactions: I, target_tx_hash: B256, parent_timestamp: u64, ) -> Result where - DB: DatabaseRef, + DB: Database + DatabaseCommit, EthApiError: From, + I: IntoIterator, { #[allow(clippy::redundant_clone)] let env = EnvWithHandlerCfg::new_with_cfg_env(cfg, block_env.clone(), Default::default()); @@ -65,12 +68,10 @@ where } } - for tx in transactions { + for (sender, tx) in transactions { // check if the transaction is a system transaction // this should be done before return - if is_bsc && - before_system_tx && - is_system_transaction(&tx, tx.signer(), block_env.coinbase) + if is_bsc && before_system_tx && is_system_transaction(tx, *sender, block_env.coinbase) { if let Some(trace_helper) = self.bsc_trace_helper.as_ref() { // move block reward from the system address to the coinbase @@ -93,8 +94,7 @@ where break } - let sender = tx.signer(); - self.evm_config().fill_tx_env(evm.tx_mut(), &tx.into_signed(), sender); + self.evm_config().fill_tx_env(evm.tx_mut(), tx, *sender); #[cfg(feature = "bsc")] if !before_system_tx { diff --git a/crates/rpc/rpc/src/validation.rs b/crates/rpc/rpc/src/validation.rs index 1476180d4..bae0a3695 100644 --- a/crates/rpc/rpc/src/validation.rs +++ b/crates/rpc/rpc/src/validation.rs @@ -171,12 +171,13 @@ where self.validate_gas_limit(registered_gas_limit, &latest_header, &block.header)?; let state_provider = self.provider.state_by_block_hash(latest_header.hash())?; - let executor = self.executor_provider.executor(StateProviderDatabase::new(&state_provider)); + let executor = + self.executor_provider.executor(StateProviderDatabase::new(&state_provider), None); let block = block.unseal(); let mut accessed_blacklisted = None; let output = executor.execute_with_state_closure( - BlockExecutionInput::new(&block, U256::MAX), + BlockExecutionInput::new(&block, U256::MAX, None), |state| { if !self.disallow.is_empty() { for account in state.cache.accounts.keys() { diff --git a/crates/stages/stages/src/stages/execution.rs b/crates/stages/stages/src/stages/execution.rs index 71f8c420e..04866c189 100644 --- a/crates/stages/stages/src/stages/execution.rs +++ b/crates/stages/stages/src/stages/execution.rs @@ -223,7 +223,7 @@ where provider.tx_ref(), provider.static_file_provider(), )); - let mut executor = self.executor_provider.batch_executor(db); + let mut executor = self.executor_provider.batch_executor(db, None); executor.set_tip(max_block); executor.set_prune_modes(prune_modes); diff --git a/crates/static-file/types/src/lib.rs b/crates/static-file/types/src/lib.rs index 486c94090..dc64be74c 100644 --- a/crates/static-file/types/src/lib.rs +++ b/crates/static-file/types/src/lib.rs @@ -83,8 +83,12 @@ mod tests { #[test] fn test_highest_static_files_highest() { - let files = - HighestStaticFiles { headers: Some(100), receipts: Some(200), transactions: None }; + let files = HighestStaticFiles { + headers: Some(100), + receipts: Some(200), + transactions: None, + sidecars: None, + }; // Test for headers segment assert_eq!(files.highest(StaticFileSegment::Headers), Some(100)); @@ -115,8 +119,12 @@ mod tests { #[test] fn test_highest_static_files_min() { - let files = - HighestStaticFiles { headers: Some(300), receipts: Some(100), transactions: None }; + let files = HighestStaticFiles { + headers: Some(300), + receipts: Some(100), + transactions: None, + sidecars: None, + }; // Minimum value among the available segments assert_eq!(files.min(), Some(100)); @@ -128,8 +136,12 @@ mod tests { #[test] fn test_highest_static_files_max() { - let files = - HighestStaticFiles { headers: Some(300), receipts: Some(100), transactions: Some(500) }; + let files = HighestStaticFiles { + headers: Some(300), + receipts: Some(100), + transactions: Some(500), + sidecars: None, + }; // Maximum value among the available segments assert_eq!(files.max(), Some(500)); diff --git a/crates/storage/db-api/src/models/mod.rs b/crates/storage/db-api/src/models/mod.rs index 0c0a42468..6858725a4 100644 --- a/crates/storage/db-api/src/models/mod.rs +++ b/crates/storage/db-api/src/models/mod.rs @@ -8,7 +8,8 @@ use alloy_genesis::GenesisAccount; use alloy_primitives::{bytes::BufMut, Address, Bytes, Log, B256, U256}; use reth_codecs::{add_arbitrary_tests, Compact}; use reth_primitives::{ - parlia::Snapshot, Account, BlobSidecar, Bytecode, Header, Receipt, StorageEntry, TransactionSignedNoHash, TxType, + parlia::Snapshot, Account, BlobSidecar, BlobSidecars, Bytecode, Header, Receipt, StorageEntry, + TransactionSignedNoHash, TxType, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::StageCheckpoint; diff --git a/crates/storage/db/src/tables/mod.rs b/crates/storage/db/src/tables/mod.rs index 7e9ef5f9c..36e5b146d 100644 --- a/crates/storage/db/src/tables/mod.rs +++ b/crates/storage/db/src/tables/mod.rs @@ -30,7 +30,9 @@ use reth_db_api::{ }, table::{Decode, DupSort, Encode, Table}, }; -use reth_primitives::{parlia::Snapshot, Account, Bytecode, Header, Receipt, StorageEntry, TransactionSignedNoHash}; +use reth_primitives::{ + parlia::Snapshot, Account, Bytecode, Header, Receipt, StorageEntry, TransactionSignedNoHash, +}; use reth_primitives_traits::{BlobSidecars, IntegerList}; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::StageCheckpoint; diff --git a/crates/storage/provider/Cargo.toml b/crates/storage/provider/Cargo.toml index 04a0bf429..4b71d82e2 100644 --- a/crates/storage/provider/Cargo.toml +++ b/crates/storage/provider/Cargo.toml @@ -94,7 +94,8 @@ optimism = [ "reth-codecs/optimism", "reth-db/optimism", "reth-db-api/optimism", - "revm/optimism" + "revm/optimism", + "reth-chainspec/optimism" ] serde = [ "reth-execution-types/serde", diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index 5062e7edc..11ecb4039 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -4,10 +4,10 @@ use crate::{ AccountReader, BlockHashReader, BlockIdReader, BlockNumReader, BlockReader, BlockReaderIdExt, BlockSource, CanonChainTracker, CanonStateNotifications, CanonStateSubscriptions, ChainSpecProvider, ChainStateBlockReader, ChangeSetReader, DatabaseProvider, - DatabaseProviderFactory, EvmEnvProvider, FullProvider, HeaderProvider, ProviderError, - ProviderFactory, PruneCheckpointReader, ReceiptProvider, ReceiptProviderIdExt, + DatabaseProviderFactory, EvmEnvProvider, FullProvider, HeaderProvider, ParliaSnapshotReader, + ProviderError, ProviderFactory, PruneCheckpointReader, ReceiptProvider, ReceiptProviderIdExt, StageCheckpointReader, StateProviderBox, StateProviderFactory, StateReader, - StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, ParliaSnapshotReader + StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag}; use alloy_primitives::{Address, BlockHash, BlockNumber, Sealable, TxHash, TxNumber, B256, U256}; @@ -23,13 +23,13 @@ use reth_evm::ConfigureEvmEnv; use reth_execution_types::ExecutionOutcome; use reth_node_types::NodeTypesWithDB; use reth_primitives::{ - parlia::Snapshot, Account, BlobSidecars, Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, - SealedHeader, StorageEntry, TransactionMeta, TransactionSigned, TransactionSignedNoHash, - Withdrawals, + parlia::Snapshot, Account, BlobSidecars, Block, BlockWithSenders, Header, Receipt, SealedBlock, + SealedBlockWithSenders, SealedHeader, StorageEntry, TransactionMeta, TransactionSigned, + TransactionSignedNoHash, Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; -use reth_storage_api::{DBProvider, StorageChangeSetReader, SidecarsProvider}; +use reth_storage_api::{DBProvider, SidecarsProvider, StorageChangeSetReader}; use reth_storage_errors::provider::ProviderResult; use revm::primitives::{BlockEnv, CfgEnvWithHandlerCfg}; use std::{ @@ -440,7 +440,6 @@ impl SidecarsProvider for BlockchainProvider2 { } } - impl StageCheckpointReader for BlockchainProvider2 { fn get_stage_checkpoint(&self, id: StageId) -> ProviderResult> { self.consistent_provider()?.get_stage_checkpoint(id) diff --git a/crates/storage/provider/src/providers/consistent.rs b/crates/storage/provider/src/providers/consistent.rs index 0784bdb63..6d6738ca4 100644 --- a/crates/storage/provider/src/providers/consistent.rs +++ b/crates/storage/provider/src/providers/consistent.rs @@ -17,13 +17,15 @@ use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; use reth_evm::ConfigureEvmEnv; use reth_execution_types::{BundleStateInit, ExecutionOutcome, RevertsInit}; use reth_primitives::{ - Account, Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, - SealedHeader, StorageEntry, TransactionMeta, TransactionSigned, TransactionSignedNoHash, - Withdrawals, + Account, BlobSidecars, Block, BlockWithSenders, Header, Receipt, SealedBlock, + SealedBlockWithSenders, SealedHeader, StorageEntry, TransactionMeta, TransactionSigned, + TransactionSignedNoHash, Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; -use reth_storage_api::{DatabaseProviderFactory, StateProvider, StorageChangeSetReader}; +use reth_storage_api::{ + DatabaseProviderFactory, SidecarsProvider, StateProvider, StorageChangeSetReader, +}; use reth_storage_errors::provider::ProviderResult; use revm::{ db::states::PlainStorageRevert, @@ -773,6 +775,16 @@ impl BlockIdReader for ConsistentProvider { } } +impl SidecarsProvider for ConsistentProvider { + fn sidecars(&self, block_hash: &BlockHash) -> ProviderResult> { + self.storage_provider.sidecars(block_hash) + } + + fn sidecars_by_number(&self, num: BlockNumber) -> ProviderResult> { + self.storage_provider.sidecars_by_number(num) + } +} + impl BlockReader for ConsistentProvider { fn find_block_by_hash(&self, hash: B256, source: BlockSource) -> ProviderResult> { match source { diff --git a/crates/storage/provider/src/providers/database/mod.rs b/crates/storage/provider/src/providers/database/mod.rs index 1a98886ae..4e3a651ec 100644 --- a/crates/storage/provider/src/providers/database/mod.rs +++ b/crates/storage/provider/src/providers/database/mod.rs @@ -3,9 +3,9 @@ use crate::{ to_range, traits::{BlockSource, ReceiptProvider}, BlockHashReader, BlockNumReader, BlockReader, ChainSpecProvider, DatabaseProviderFactory, - EvmEnvProvider, HeaderProvider, HeaderSyncGap, HeaderSyncGapProvider, ProviderError, - PruneCheckpointReader, StageCheckpointReader, StateProviderBox, StaticFileProviderFactory, - TransactionVariant, TransactionsProvider, WithdrawalsProvider, ParliaSnapshotReader, + EvmEnvProvider, HeaderProvider, HeaderSyncGap, HeaderSyncGapProvider, ParliaSnapshotReader, + ProviderError, PruneCheckpointReader, StageCheckpointReader, StateProviderBox, + StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber}; use alloy_primitives::{Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; @@ -17,8 +17,9 @@ use reth_errors::{RethError, RethResult}; use reth_evm::ConfigureEvmEnv; use reth_node_types::NodeTypesWithDB; use reth_primitives::{ - parlia::Snapshot, BlobSidecars, Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, - StaticFileSegment, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawals, + parlia::Snapshot, BlobSidecars, Block, BlockWithSenders, Header, Receipt, SealedBlock, + SealedBlockWithSenders, SealedHeader, StaticFileSegment, TransactionMeta, TransactionSigned, + TransactionSignedNoHash, Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneModes, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 3717ffb48..4d35474f7 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -10,10 +10,11 @@ use crate::{ BundleStateInit, ChainStateBlockReader, ChainStateBlockWriter, DBProvider, EvmEnvProvider, HashingWriter, HeaderProvider, HeaderSyncGap, HeaderSyncGapProvider, HistoricalStateProvider, HistoricalStateProviderRef, HistoryWriter, LatestStateProvider, LatestStateProviderRef, - OriginalValuesKnown, ProviderError, PruneCheckpointReader, PruneCheckpointWriter, RevertsInit, - StageCheckpointReader, StateChangeWriter, StateProviderBox, StateReader, StateWriter, - StaticFileProviderFactory, StatsReader, StorageReader, StorageTrieWriter, TransactionVariant, - TransactionsProvider, TransactionsProviderExt, TrieWriter, WithdrawalsProvider, ParliaSnapshotReader, + OriginalValuesKnown, ParliaSnapshotReader, ProviderError, PruneCheckpointReader, + PruneCheckpointWriter, RevertsInit, SidecarsProvider, StageCheckpointReader, StateChangeWriter, + StateProviderBox, StateReader, StateWriter, StaticFileProviderFactory, StatsReader, + StorageReader, StorageTrieWriter, TransactionVariant, TransactionsProvider, + TransactionsProviderExt, TrieWriter, WithdrawalsProvider, }; use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber}; use alloy_primitives::{keccak256, Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; @@ -40,10 +41,10 @@ use reth_execution_types::{Chain, ExecutionOutcome}; use reth_network_p2p::headers::downloader::SyncTarget; use reth_node_types::NodeTypes; use reth_primitives::{ - parlia::Snapshot, Account, BlobSidecars, Block, BlockBody, BlockWithSenders, Bytecode, GotExpected, Header, Receipt, - SealedBlock, SealedBlockWithSenders, SealedHeader, StaticFileSegment, StorageEntry, - TransactionMeta, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, - Withdrawals, + parlia::Snapshot, Account, BlobSidecars, Block, BlockBody, BlockWithSenders, Bytecode, + GotExpected, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, + StaticFileSegment, StorageEntry, TransactionMeta, TransactionSigned, + TransactionSignedEcRecovered, TransactionSignedNoHash, Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneModes, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; @@ -536,6 +537,7 @@ impl DatabaseProvider { }) .collect(); + // the sidecars will always be None as this is not needed construct_block(header, body, senders, ommers, withdrawals) } @@ -612,10 +614,8 @@ impl DatabaseProvider { .unwrap_or_default() }; - let sidecars = Some(Default::default()); // no need to read sidecars - if let Ok(b) = - assemble_block(header, tx_range, ommers, withdrawals, sidecars) + assemble_block(header, tx_range, ommers, withdrawals, Some(Default::default())) { blocks.push(b); } @@ -672,24 +672,24 @@ impl DatabaseProvider { .walk_range(tx_range.clone())? .collect::, _>>()?; - let mut senders = Vec::with_capacity(body.len()); - for (tx_num, tx) in tx_range.zip(body.iter()) { - match known_senders.get(&tx_num) { - None => { - // recover the sender from the transaction if not found - let sender = tx - .recover_signer_unchecked() - .ok_or(ProviderError::SenderRecoveryError)?; - senders.push(sender); - } - Some(sender) => senders.push(*sender), + let mut senders = Vec::with_capacity(body.len()); + for (tx_num, tx) in tx_range.zip(body.iter()) { + match known_senders.get(&tx_num) { + None => { + // recover the sender from the transaction if not found + let sender = tx + .recover_signer_unchecked() + .ok_or(ProviderError::SenderRecoveryError)?; + senders.push(sender); } + Some(sender) => senders.push(*sender), } + } - (body, senders) - }; + (body, senders) + }; - assemble_block(header, body, ommers, withdrawals, senders) + assemble_block(header, body, ommers, withdrawals, sidecars, senders) }) } @@ -1127,12 +1127,10 @@ impl DatabaseProvider { let mut block_ommers_iter = block_ommers.into_iter(); let mut block_withdrawals_iter = block_withdrawals.into_iter(); let mut block_sidecars_iter = block_sidecars.into_iter(); - let mut block_ommers = block_ommers_iter.next(); let mut block_withdrawals = block_withdrawals_iter.next(); let mut block_sidecars = block_sidecars_iter.next(); - for ((main_block_number, header), (_, header_hash), (_, tx)) in izip!(block_header_iter, block_header_hashes_iter, block_tx_iter) { @@ -1181,7 +1179,7 @@ impl DatabaseProvider { blocks.push(SealedBlockWithSenders { block: SealedBlock { header, - body: BlockBody { transactions, ommers, withdrawals, sidecars, requests }, + body: BlockBody { transactions, ommers, withdrawals, sidecars }, }, senders, }) @@ -1624,13 +1622,21 @@ impl> BlockReader for Datab transaction_kind, |block_number| self.header_by_number(block_number), |header, transactions, senders, ommers, withdrawals| { - Block { header, body: BlockBody { transactions, ommers, withdrawals, sidecars: Some(Default::default()) } } - // Note: we're using unchecked here because we know the block contains valid txs - // wrt to its height and can ignore the s value check so pre - // EIP-2 txs are allowed - .try_with_senders_unchecked(senders) - .map(Some) - .map_err(|_| ProviderError::SenderRecoveryError) + Block { + header, + body: BlockBody { + transactions, + ommers, + withdrawals, + sidecars: Some(Default::default()), + }, + } + // Note: we're using unchecked here because we know the block contains valid txs + // wrt to its height and can ignore the s value check so pre + // EIP-2 txs are allowed + .try_with_senders_unchecked(senders) + .map(Some) + .map_err(|_| ProviderError::SenderRecoveryError) }, ) } @@ -1645,13 +1651,21 @@ impl> BlockReader for Datab transaction_kind, |block_number| self.sealed_header(block_number), |header, transactions, senders, ommers, withdrawals| { - SealedBlock { header, body: BlockBody { transactions, ommers, withdrawals, sidecars: Some(Default::default()) } } - // Note: we're using unchecked here because we know the block contains valid txs - // wrt to its height and can ignore the s value check so pre - // EIP-2 txs are allowed - .try_with_senders_unchecked(senders) - .map(Some) - .map_err(|_| ProviderError::SenderRecoveryError) + SealedBlock { + header, + body: BlockBody { + transactions, + ommers, + withdrawals, + sidecars: Some(Default::default()), + }, + } + // Note: we're using unchecked here because we know the block contains valid txs + // wrt to its height and can ignore the s value check so pre + // EIP-2 txs are allowed + .try_with_senders_unchecked(senders) + .map(Some) + .map_err(|_| ProviderError::SenderRecoveryError) }, ) } @@ -1670,7 +1684,10 @@ impl> BlockReader for Datab .map(Into::into) .collect() }; - Ok(Block { header, body: BlockBody { transactions, ommers, withdrawals, sidecars } }) + Ok(Block { + header, + body: BlockBody { transactions, ommers, withdrawals, sidecars }, + }) }, ) } @@ -1699,7 +1716,10 @@ impl> BlockReader for Datab |range| self.sealed_headers_range(range), |header, transactions, ommers, withdrawals, sidecars, senders| { SealedBlockWithSenders::new( - SealedBlock { header, body: BlockBody { transactions, ommers, withdrawals, sidecars } }, + SealedBlock { + header, + body: BlockBody { transactions, ommers, withdrawals, sidecars }, + }, senders, ) .ok_or(ProviderError::SenderRecoveryError) @@ -3554,7 +3574,7 @@ impl ChainStateBlockWriter for DatabaseProvider ParliaSnapshotReader for DatabaseProvider { +impl ParliaSnapshotReader for DatabaseProvider { fn get_parlia_snapshot(&self, block_hash: B256) -> ProviderResult> { Ok(self.tx.get::(block_hash)?) } diff --git a/crates/storage/provider/src/providers/mod.rs b/crates/storage/provider/src/providers/mod.rs index c45845989..22559ef15 100644 --- a/crates/storage/provider/src/providers/mod.rs +++ b/crates/storage/provider/src/providers/mod.rs @@ -3,9 +3,10 @@ use crate::{ BlockSource, BlockchainTreePendingStateProvider, CanonChainTracker, CanonStateNotifications, CanonStateSubscriptions, ChainSpecProvider, ChainStateBlockReader, ChangeSetReader, DatabaseProviderFactory, EvmEnvProvider, FullExecutionDataProvider, HeaderProvider, - ParliaSnapshotReader, ProviderError, PruneCheckpointReader, ReceiptProvider, ReceiptProviderIdExt, - StageCheckpointReader, StateProviderBox, StateProviderFactory, StaticFileProviderFactory, - TransactionVariant, TransactionsProvider, TreeViewer, WithdrawalsProvider, + ParliaSnapshotReader, ProviderError, PruneCheckpointReader, ReceiptProvider, + ReceiptProviderIdExt, StageCheckpointReader, StateProviderBox, StateProviderFactory, + StaticFileProviderFactory, TransactionVariant, TransactionsProvider, TreeViewer, + WithdrawalsProvider, }; use alloy_eips::{eip4895::Withdrawal, BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag}; use alloy_primitives::{Address, BlockHash, BlockNumber, Sealable, TxHash, TxNumber, B256, U256}; @@ -20,8 +21,9 @@ use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; use reth_evm::ConfigureEvmEnv; use reth_node_types::NodeTypesWithDB; use reth_primitives::{ - parlia::Snapshot, Account, BlobSidecars, Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, - SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawals, + parlia::Snapshot, Account, BlobSidecars, Block, BlockWithSenders, Header, Receipt, SealedBlock, + SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, + TransactionSignedNoHash, Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; diff --git a/crates/storage/provider/src/providers/static_file/manager.rs b/crates/storage/provider/src/providers/static_file/manager.rs index 10ffc3092..2cabc9f02 100644 --- a/crates/storage/provider/src/providers/static_file/manager.rs +++ b/crates/storage/provider/src/providers/static_file/manager.rs @@ -32,8 +32,9 @@ use reth_primitives::{ find_fixed_range, HighestStaticFiles, SegmentHeader, SegmentRangeInclusive, DEFAULT_BLOCKS_PER_STATIC_FILE, }, - BlobSidecars, Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, - StaticFileSegment, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawals, + BlobSidecars, Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, + SealedHeader, StaticFileSegment, TransactionMeta, TransactionSigned, TransactionSignedNoHash, + Withdrawals, }; use reth_stages_types::{PipelineTarget, StageId}; use reth_storage_api::{DBProvider, SidecarsProvider}; diff --git a/crates/storage/provider/src/providers/static_file/mod.rs b/crates/storage/provider/src/providers/static_file/mod.rs index 08c13afcd..049ce0a0e 100644 --- a/crates/storage/provider/src/providers/static_file/mod.rs +++ b/crates/storage/provider/src/providers/static_file/mod.rs @@ -56,8 +56,8 @@ impl Deref for LoadedJar { mod tests { use super::*; use crate::{test_utils::create_test_provider_factory, HeaderProvider}; - use alloy_eips::eip4844::BlobTransactionSidecar; use alloy_consensus::Transaction; + use alloy_eips::eip4844::BlobTransactionSidecar; use alloy_primitives::{BlockHash, TxNumber, B256, U256}; use rand::seq::SliceRandom; use reth_db::{ diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index 536392561..bf9e9a407 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -21,8 +21,8 @@ use reth_evm::ConfigureEvmEnv; use reth_execution_types::ExecutionOutcome; use reth_node_types::NodeTypes; use reth_primitives::{ - Account, BlobSidecars, Block, BlockWithSenders, Bytecode, GotExpected, Header, Receipt, SealedBlock, - SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, + Account, BlobSidecars, Block, BlockWithSenders, Bytecode, GotExpected, Header, Receipt, + SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawals, }; use reth_stages_types::{StageCheckpoint, StageId}; diff --git a/crates/storage/provider/src/test_utils/noop.rs b/crates/storage/provider/src/test_utils/noop.rs index 49f7b79f9..a589fecd3 100644 --- a/crates/storage/provider/src/test_utils/noop.rs +++ b/crates/storage/provider/src/test_utils/noop.rs @@ -18,8 +18,8 @@ use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; use reth_errors::ProviderError; use reth_evm::ConfigureEvmEnv; use reth_primitives::{ - parlia::Snapshot, Account, BlobSidecars, Block, BlockWithSenders, Bytecode, Header, Receipt, SealedBlock, - SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, + parlia::Snapshot, Account, BlobSidecars, Block, BlockWithSenders, Bytecode, Header, Receipt, + SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; @@ -36,10 +36,10 @@ use crate::{ providers::StaticFileProvider, traits::{BlockSource, ReceiptProvider}, AccountReader, BlockHashReader, BlockIdReader, BlockNumReader, BlockReader, BlockReaderIdExt, - ChainSpecProvider, ChangeSetReader, EvmEnvProvider, HeaderProvider, PruneCheckpointReader, - ReceiptProviderIdExt, StageCheckpointReader, StateProvider, StateProviderBox, - StateProviderFactory, StateRootProvider, StaticFileProviderFactory, TransactionVariant, - TransactionsProvider, WithdrawalsProvider, ParliaSnapshotReader + ChainSpecProvider, ChangeSetReader, EvmEnvProvider, HeaderProvider, ParliaSnapshotReader, + PruneCheckpointReader, ReceiptProviderIdExt, StageCheckpointReader, StateProvider, + StateProviderBox, StateProviderFactory, StateRootProvider, StaticFileProviderFactory, + TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; /// Supports various api interfaces for testing purposes. diff --git a/crates/storage/storage-api/src/block.rs b/crates/storage/storage-api/src/block.rs index 0ff5e91e7..18afd3ebc 100644 --- a/crates/storage/storage-api/src/block.rs +++ b/crates/storage/storage-api/src/block.rs @@ -1,6 +1,6 @@ use crate::{ - BlockNumReader, HeaderProvider, ReceiptProvider, ReceiptProviderIdExt, TransactionVariant, - TransactionsProvider, WithdrawalsProvider, SidecarsProvider + BlockNumReader, HeaderProvider, ReceiptProvider, ReceiptProviderIdExt, SidecarsProvider, + TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumberOrTag}; use alloy_primitives::{BlockNumber, Sealable, B256}; diff --git a/examples/custom-beacon-withdrawals/Cargo.toml b/examples/custom-beacon-withdrawals/Cargo.toml index c396ca11d..9c3341e6b 100644 --- a/examples/custom-beacon-withdrawals/Cargo.toml +++ b/examples/custom-beacon-withdrawals/Cargo.toml @@ -20,7 +20,13 @@ alloy-consensus.workspace = true eyre.workspace = true +tokio = { workspace = true, features = ["sync", "time"] } + [features] optimism = [ - "reth-primitives/optimism" -] \ No newline at end of file + "reth-primitives/optimism", + "reth-chainspec/optimism" +] +bsc = [ + "reth-primitives/bsc" +] diff --git a/examples/custom-beacon-withdrawals/src/main.rs b/examples/custom-beacon-withdrawals/src/main.rs index 43e5f7428..1d2b3dec0 100644 --- a/examples/custom-beacon-withdrawals/src/main.rs +++ b/examples/custom-beacon-withdrawals/src/main.rs @@ -15,7 +15,7 @@ use reth::{ providers::ProviderError, revm::{ interpreter::Host, - primitives::{Env, TransactTo, TxEnv}, + primitives::{Env, EvmState, TransactTo, TxEnv}, Database, DatabaseCommit, Evm, State, }, }; @@ -33,6 +33,7 @@ use reth_primitives::{ BlockWithSenders, Receipt, }; use std::{fmt::Display, sync::Arc}; +use tokio::sync::mpsc::UnboundedSender; pub const SYSTEM_ADDRESS: Address = address!("fffffffffffffffffffffffffffffffffffffffe"); pub const WITHDRAWALS_ADDRESS: Address = address!("4200000000000000000000000000000000000000"); @@ -166,6 +167,7 @@ where &mut self, _block: &BlockWithSenders, _total_difficulty: U256, + _tx: Option>, ) -> Result { Ok(ExecuteOutput { receipts: vec![], gas_used: 0 }) } @@ -276,6 +278,8 @@ fn fill_tx_env_with_system_contract_call( authorization_list: None, #[cfg(feature = "optimism")] optimism: OptimismFields::default(), + #[cfg(feature = "bsc")] + bsc: Default::default(), }; // ensure the block gas limit is >= the tx diff --git a/examples/custom-engine-types/src/main.rs b/examples/custom-engine-types/src/main.rs index d4f5b82c2..049880275 100644 --- a/examples/custom-engine-types/src/main.rs +++ b/examples/custom-engine-types/src/main.rs @@ -60,7 +60,7 @@ use reth_node_core::{args::RpcServerArgs, node_config::NodeConfig}; use reth_node_ethereum::{ node::{ EthereumConsensusBuilder, EthereumExecutorBuilder, EthereumNetworkBuilder, - EthereumPoolBuilder, EthereumParliaBuilder + EthereumParliaBuilder, EthereumPoolBuilder, }, EthEvmConfig, }; diff --git a/op.Dockerfile b/op.Dockerfile index b7c7e7f3f..b3f32086a 100644 --- a/op.Dockerfile +++ b/op.Dockerfile @@ -1,4 +1,4 @@ -FROM lukemathwalker/cargo-chef:latest-rust-1.81 AS chef +FROM lukemathwalker/cargo-chef:latest-rust-1.82 AS chef WORKDIR /app LABEL org.opencontainers.image.source=https://github.com/bnb-chain/reth From 9aaf5ad2b3ff13be72f18e01f916ff1c927b6739 Mon Sep 17 00:00:00 2001 From: dylanhuang Date: Wed, 20 Nov 2024 10:29:51 +0800 Subject: [PATCH 423/425] chore: fix lint ci job --- .github/workflows/lint.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 1ea14e8f4..3d129eb66 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -32,6 +32,7 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@clippy with: + toolchain: ${{ env.TOOL_CHAIN }} components: clippy - uses: Swatinem/rust-cache@v2 with: From 955fee57adb937c616003e5f381dbff265bb4e07 Mon Sep 17 00:00:00 2001 From: dylanhuang Date: Wed, 20 Nov 2024 19:38:24 +0800 Subject: [PATCH 424/425] fix: rlp issue --- crates/chainspec/src/spec.rs | 2 +- crates/primitives-traits/Cargo.toml | 2 +- crates/primitives-traits/src/blob_sidecar.rs | 44 ++++++++++++++++---- 3 files changed, 38 insertions(+), 10 deletions(-) diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index 1c2f8a566..8f88f73ec 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -993,7 +993,7 @@ mod tests { use alloy_chains::Chain; use alloy_genesis::{ChainConfig, GenesisAccount}; - use alloy_primitives::{b256, hex}; + use alloy_primitives::hex; use alloy_trie::EMPTY_ROOT_HASH; use reth_ethereum_forks::{ForkCondition, ForkHash, ForkId, Head}; use reth_trie_common::TrieAccount; diff --git a/crates/primitives-traits/Cargo.toml b/crates/primitives-traits/Cargo.toml index 96a96fcce..38728cc2a 100644 --- a/crates/primitives-traits/Cargo.toml +++ b/crates/primitives-traits/Cargo.toml @@ -18,7 +18,7 @@ alloy-consensus = { workspace = true, features = ["serde"] } alloy-eips.workspace = true alloy-genesis.workspace = true alloy-primitives.workspace = true -alloy-rlp.workspace = true +alloy-rlp = { workspace = true, features = ["derive"] } revm-primitives = { workspace = true, features = ["serde"] } diff --git a/crates/primitives-traits/src/blob_sidecar.rs b/crates/primitives-traits/src/blob_sidecar.rs index 26d82ae14..a3bfe1f55 100644 --- a/crates/primitives-traits/src/blob_sidecar.rs +++ b/crates/primitives-traits/src/blob_sidecar.rs @@ -74,7 +74,7 @@ impl BlobSidecars { /// check for errors because we assume that `BlobSidecars` will only ever contain valid /// sidecars pub fn encode_index(&self, out: &mut dyn BufMut, index: usize) { - let header = alloy_rlp::Header { list: true, payload_length: self.0[index].length() }; + let header = alloy_rlp::Header { list: false, payload_length: self.0[index].length() }; header.encode(out); self.0[index].encode(out); } @@ -82,16 +82,24 @@ impl BlobSidecars { impl Encodable for BlobSidecar { fn encode(&self, out: &mut dyn BufMut) { - let list_header_self = alloy_rlp::Header { list: true, payload_length: self.length() }; + let payload_length = self.blob_transaction_sidecar.length() + + self.block_number.length() + + self.block_hash.length() + + self.tx_index.length() + + self.tx_hash.length(); + + let list_header_self = alloy_rlp::Header { list: false, payload_length }; list_header_self.encode(out); let list_header_tx_sidecar = alloy_rlp::Header { - list: true, + list: false, payload_length: self.blob_transaction_sidecar.length(), }; list_header_tx_sidecar.encode(out); - self.blob_transaction_sidecar.encode(out); + self.blob_transaction_sidecar.blobs.encode(out); + self.blob_transaction_sidecar.commitments.encode(out); + self.blob_transaction_sidecar.proofs.encode(out); self.block_number.encode(out); self.block_hash.encode(out); self.tx_index.encode(out); @@ -99,8 +107,24 @@ impl Encodable for BlobSidecar { } fn length(&self) -> usize { - self.blob_transaction_sidecar.length() + - self.blob_transaction_sidecar.length().length() + + let payload_length = self.blob_transaction_sidecar.length() + + self.block_number.length() + + self.block_hash.length() + + self.tx_index.length() + + self.tx_hash.length(); + + let list_header_self = alloy_rlp::Header { list: false, payload_length }; + let list_header_self_length = list_header_self.length(); + + let list_header_tx_sidecar = alloy_rlp::Header { + list: false, + payload_length: self.blob_transaction_sidecar.length(), + }; + let header_length = list_header_tx_sidecar.length(); + + list_header_self_length + + header_length + + self.blob_transaction_sidecar.length() + self.block_number.length() + self.block_hash.length() + self.tx_index.length() + @@ -114,7 +138,11 @@ impl Decodable for BlobSidecar { let _rlp_head_tx_sidecar = alloy_rlp::Header::decode(buf)?; let this = Self { - blob_transaction_sidecar: Decodable::decode(buf)?, + blob_transaction_sidecar: BlobTransactionSidecar { + blobs: Decodable::decode(buf)?, + commitments: Decodable::decode(buf)?, + proofs: Decodable::decode(buf)?, + }, block_number: Decodable::decode(buf)?, block_hash: Decodable::decode(buf)?, tx_index: Decodable::decode(buf)?, @@ -222,7 +250,7 @@ mod tests { fn test_blob_sidecar_rlp() { let blob_sidecar = BlobSidecar { blob_transaction_sidecar: BlobTransactionSidecar { - blobs: vec![], + blobs: vec![Default::default()], commitments: vec![Default::default()], proofs: vec![Default::default()], }, From 4f510ba04463e7a777ed3077dcdfa90690ca277e Mon Sep 17 00:00:00 2001 From: dylanhuang Date: Fri, 22 Nov 2024 16:26:54 +0800 Subject: [PATCH 425/425] fix: change default db size to 8TB --- crates/storage/db/src/implementation/mdbx/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/storage/db/src/implementation/mdbx/mod.rs b/crates/storage/db/src/implementation/mdbx/mod.rs index 78a3f7971..8e3dad465 100644 --- a/crates/storage/db/src/implementation/mdbx/mod.rs +++ b/crates/storage/db/src/implementation/mdbx/mod.rs @@ -105,7 +105,7 @@ impl DatabaseArguments { Self { client_version, geometry: Geometry { - size: Some(0..(4 * TERABYTE)), + size: Some(0..(8 * TERABYTE)), growth_step: Some(4 * GIGABYTE as isize), shrink_threshold: Some(0), page_size: Some(PageSize::Set(default_page_size())),