diff --git a/Cargo.lock b/Cargo.lock index 38857f568fff3..200a056f28251 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14894,6 +14894,7 @@ dependencies = [ "pallet-revive 0.1.0", "pallet-revive-fixtures 0.1.0", "parity-scale-codec", + "pretty_assertions", "rlp 0.6.1", "sc-cli", "sc-rpc", diff --git a/prdoc/pr_7506.prdoc b/prdoc/pr_7506.prdoc new file mode 100644 index 0000000000000..7e41afbb3a5e8 --- /dev/null +++ b/prdoc/pr_7506.prdoc @@ -0,0 +1,9 @@ +title: '[pallet-revive] Add eth_get_logs' +doc: +- audience: Runtime Dev + description: "Add support for eth_get_logs rpc method" +crates: +- name: pallet-revive-eth-rpc + bump: minor +- name: pallet-revive + bump: minor diff --git a/substrate/frame/revive/rpc/.sqlx/query-76dd0f2460cfc0ffa93dda7a42893cbf05b3451cb8e4c4cb6cf86ec70930a11e.json b/substrate/frame/revive/rpc/.sqlx/query-76dd0f2460cfc0ffa93dda7a42893cbf05b3451cb8e4c4cb6cf86ec70930a11e.json new file mode 100644 index 0000000000000..e8c40966e8395 --- /dev/null +++ b/substrate/frame/revive/rpc/.sqlx/query-76dd0f2460cfc0ffa93dda7a42893cbf05b3451cb8e4c4cb6cf86ec70930a11e.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "\n\t\t\t\t\tINSERT OR REPLACE INTO logs(\n\t\t\t\t\t\tblock_hash,\n\t\t\t\t\t\ttransaction_index,\n\t\t\t\t\t\tlog_index,\n\t\t\t\t\t\taddress,\n\t\t\t\t\t\tblock_number,\n\t\t\t\t\t\ttransaction_hash,\n\t\t\t\t\t\ttopic_0, topic_1, topic_2, topic_3,\n\t\t\t\t\t\tdata)\n\t\t\t\t\tVALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)\n\t\t\t\t\t", + "describe": { + "columns": [], + "parameters": { + "Right": 11 + }, + "nullable": [] + }, + "hash": "76dd0f2460cfc0ffa93dda7a42893cbf05b3451cb8e4c4cb6cf86ec70930a11e" +} diff --git a/substrate/frame/revive/rpc/Cargo.toml b/substrate/frame/revive/rpc/Cargo.toml index c333c9816e579..76f9be56eb40d 100644 --- a/substrate/frame/revive/rpc/Cargo.toml +++ b/substrate/frame/revive/rpc/Cargo.toml @@ -70,6 +70,7 @@ tokio = { workspace = true, features = ["full"] } [dev-dependencies] env_logger = { workspace = true } +pretty_assertions = { workspace = true } static_init = { workspace = true } substrate-cli-test-utils = { workspace = true } subxt-signer = { workspace = true, features = ["unstable-eth"] } diff --git a/substrate/frame/revive/rpc/examples/js/src/build-contracts.ts b/substrate/frame/revive/rpc/examples/js/src/build-contracts.ts index b162b8be0adfe..17f5dffd9973e 100644 --- a/substrate/frame/revive/rpc/examples/js/src/build-contracts.ts +++ b/substrate/frame/revive/rpc/examples/js/src/build-contracts.ts @@ -1,4 +1,4 @@ -import { compile } from '@parity/revive' +import { compile, SolcOutput } from '@parity/revive' import { format } from 'prettier' import { parseArgs } from 'node:util' import solc from 'solc' @@ -8,7 +8,7 @@ import { basename, join } from 'path' type CompileInput = Parameters[0] const { - values: { filter }, + values: { filter, solcOnly }, } = parseArgs({ args: process.argv.slice(2), options: { @@ -16,6 +16,10 @@ const { type: 'string', short: 'f', }, + solcOnly: { + type: 'boolean', + short: 's', + }, }, }) @@ -54,32 +58,23 @@ for (const file of input) { [name]: { content: readFileSync(join(contractsDir, file), 'utf8') }, } - console.log('Compiling with revive...') - const reviveOut = await compile(input) - - for (const contracts of Object.values(reviveOut.contracts)) { - for (const [name, contract] of Object.entries(contracts)) { - console.log(`📜 Add PVM contract ${name}`) - const abi = contract.abi - const abiName = `${name}Abi` - writeFileSync(join(abiDir, `${name}.json`), JSON.stringify(abi, null, 2)) - - writeFileSync( - join(abiDir, `${name}.ts`), - await format(`export const ${abiName} = ${JSON.stringify(abi, null, 2)} as const`, { - parser: 'typescript', - }) - ) + if (!solcOnly) { + console.log('Compiling with revive...') + const reviveOut = await compile(input, { bin: 'resolc' }) - writeFileSync( - join(pvmDir, `${name}.polkavm`), - Buffer.from(contract.evm.bytecode.object, 'hex') - ) + for (const contracts of Object.values(reviveOut.contracts)) { + for (const [name, contract] of Object.entries(contracts)) { + console.log(`📜 Add PVM contract ${name}`) + writeFileSync( + join(pvmDir, `${name}.polkavm`), + Buffer.from(contract.evm.bytecode.object, 'hex') + ) + } } } console.log(`Compile with solc ${file}`) - const evmOut = JSON.parse(evmCompile(input)) as typeof reviveOut + const evmOut = JSON.parse(evmCompile(input)) as SolcOutput for (const contracts of Object.values(evmOut.contracts)) { for (const [name, contract] of Object.entries(contracts)) { @@ -88,6 +83,17 @@ for (const file of input) { join(evmDir, `${name}.bin`), Buffer.from(contract.evm.bytecode.object, 'hex') ) + + const abi = contract.abi + const abiName = `${name}Abi` + writeFileSync(join(abiDir, `${name}.json`), JSON.stringify(abi, null, 2)) + + writeFileSync( + join(abiDir, `${name}.ts`), + await format(`export const ${abiName} = ${JSON.stringify(abi, null, 2)} as const`, { + parser: 'typescript', + }) + ) } } } diff --git a/substrate/frame/revive/rpc/examples/js/src/geth-diff.test.ts b/substrate/frame/revive/rpc/examples/js/src/geth-diff.test.ts index 51bbad3c17966..c6417e3198bdc 100644 --- a/substrate/frame/revive/rpc/examples/js/src/geth-diff.test.ts +++ b/substrate/frame/revive/rpc/examples/js/src/geth-diff.test.ts @@ -7,7 +7,7 @@ import { polkadotSdkPath, } from './util.ts' import { afterAll, afterEach, describe, expect, test } from 'bun:test' -import { encodeFunctionData, Hex, parseEther, decodeEventLog, keccak256, toHex } from 'viem' +import { encodeFunctionData, Hex, parseEther, decodeEventLog } from 'viem' import { ErrorsAbi } from '../abi/Errors' import { EventExampleAbi } from '../abi/EventExample' import { Subprocess, spawn } from 'bun' @@ -15,7 +15,7 @@ import { fail } from 'node:assert' const procs: Subprocess[] = [] if (process.env.START_GETH) { - process.env.USE_ETH_RPC = 'true' + process.env.USE_GETH = 'true' procs.push( // Run geth on port 8546 await (async () => { @@ -364,5 +364,42 @@ for (const env of envs) { ], }) }) + + test('logs', async () => { + let address = await getEventExampleAddr() + let { request } = await env.serverWallet.simulateContract({ + address, + abi: EventExampleAbi, + functionName: 'triggerEvent', + }) + + let hash = await env.serverWallet.writeContract(request) + let receipt = await env.serverWallet.waitForTransactionReceipt({ hash }) + const logs = await env.serverWallet.getLogs({ + address, + blockHash: receipt.blockHash, + }) + expect(logs).toHaveLength(1) + expect(logs[0]).toMatchObject({ + address, + data: '0x00000000000000000000000000000000000000000000000000000000000030390000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000b48656c6c6f20776f726c64000000000000000000000000000000000000000000', + transactionHash: hash, + }) + + expect( + decodeEventLog({ + abi: EventExampleAbi, + data: logs[0].data, + topics: logs[0].topics, + }) + ).toEqual({ + eventName: 'ExampleEvent', + args: { + sender: env.serverWallet.account.address, + value: 12345n, + message: 'Hello world', + }, + }) + }) }) } diff --git a/substrate/frame/revive/rpc/examples/js/src/spammer.ts b/substrate/frame/revive/rpc/examples/js/src/spammer.ts index 29bdf20d935c7..7ebee80278512 100644 --- a/substrate/frame/revive/rpc/examples/js/src/spammer.ts +++ b/substrate/frame/revive/rpc/examples/js/src/spammer.ts @@ -4,7 +4,6 @@ import { getByteCode, killProcessOnPort, polkadotSdkPath, - timeout, wait, waitForHealth, } from './util' @@ -26,39 +25,26 @@ spawn( } ) -// Run eth-indexer -console.log('🔍 Start indexer...') -spawn( - [ - './target/debug/eth-indexer', - '--node-rpc-url=ws://localhost:9944', - '-l=eth-rpc=debug', - '--database-url ${polkadotSdkPath}/substrate/frame/revive/rpc/tx_hashes.db', - ], - { - stdout: Bun.file('/tmp/eth-indexer.out.log'), - stderr: Bun.file('/tmp/eth-indexer.err.log'), - cwd: polkadotSdkPath, - } -) - // Run eth-rpc on 8545 -console.log('💻 Start eth-rpc...') -killProcessOnPort(8545) -spawn( - [ - './target/debug/eth-rpc', - '--dev', - '--node-rpc-url=ws://localhost:9944', - '-l=rpc-metrics=debug,eth-rpc=debug', - ], - { - stdout: Bun.file('/tmp/eth-rpc.out.log'), - stderr: Bun.file('/tmp/eth-rpc.err.log'), - cwd: polkadotSdkPath, - } -) -await waitForHealth('http://localhost:8545').catch() +console.log('🚀 Start eth-rpc...') +if (process.env.START_ETH_RPC) { + console.log('🔍 Start eth-rpc...') + killProcessOnPort(8545) + spawn( + [ + './target/debug/eth-rpc', + '--dev', + '--node-rpc-url=ws://localhost:9944', + '-l=rpc-metrics=debug,eth-rpc=debug', + ], + { + stdout: Bun.file('/tmp/eth-rpc.out.log'), + stderr: Bun.file('/tmp/eth-rpc.err.log'), + cwd: polkadotSdkPath, + } + ) + await waitForHealth('http://localhost:8545').catch() +} const env = await createEnv('eth-rpc') const wallet = env.accountWallet @@ -74,30 +60,32 @@ if (!deployReceipt.contractAddress) throw new Error('Contract address should be const flipperAddr = deployReceipt.contractAddress let nonce = await wallet.getTransactionCount(wallet.account) -let callCount = 0 -console.log('🔄 Starting nonce:', nonce) console.log('🔄 Starting loop...') +console.log('Starting nonce:', nonce) try { while (true) { - callCount++ - console.log(`🔄 Call flip (${callCount})...`) + console.log(`Call flip (nonce: ${nonce})...`) const { request } = await wallet.simulateContract({ account: wallet.account, address: flipperAddr, abi: FlipperAbi, functionName: 'flip', + nonce, }) - console.log(`🔄 Submit flip (call ${callCount}...`) - - await Promise.race([ - (async () => { - const hash = await wallet.writeContract(request) - await wallet.waitForTransactionReceipt({ hash }) - })(), - timeout(15_000), - ]) + const hash = await wallet.writeContract(request) + console.time(hash) + wallet.waitForTransactionReceipt({ hash }).then((receipt) => { + console.timeEnd(hash) + console.log('-----------------------------------') + console.log(`status: ${receipt.status ? '✅' : '❌'}`) + console.log(`block: ${receipt.blockNumber} - hash: ${receipt.blockHash}`) + console.log(`tx: ${hash}`) + console.log('-----------------------------------') + }) + await wait(1_000) + nonce++ } } catch (err) { console.error('Failed with error:', err) diff --git a/substrate/frame/revive/rpc/examples/js/src/transfer.ts b/substrate/frame/revive/rpc/examples/js/src/transfer.ts index aef9a487b0c01..711e4eb893796 100644 --- a/substrate/frame/revive/rpc/examples/js/src/transfer.ts +++ b/substrate/frame/revive/rpc/examples/js/src/transfer.ts @@ -6,10 +6,11 @@ try { console.log(`Signer balance: ${await walletClient.getBalance(walletClient.account)}`) console.log(`Recipient balance: ${await walletClient.getBalance({ address: recipient })}`) - await walletClient.sendTransaction({ + let resp = await walletClient.sendTransaction({ to: recipient, value: parseEther('1.0'), }) + console.log(`Transaction hash: ${resp}`) console.log(`Sent: ${parseEther('1.0')}`) console.log(`Signer balance: ${await walletClient.getBalance(walletClient.account)}`) console.log(`Recipient balance: ${await walletClient.getBalance({ address: recipient })}`) diff --git a/substrate/frame/revive/rpc/migrations/0002_create_log_table.sql b/substrate/frame/revive/rpc/migrations/0002_create_log_table.sql new file mode 100644 index 0000000000000..4b012e17a38c5 --- /dev/null +++ b/substrate/frame/revive/rpc/migrations/0002_create_log_table.sql @@ -0,0 +1,28 @@ +CREATE TABLE IF NOT EXISTS logs ( + block_hash BLOB NOT NULL, + transaction_index INTEGER NOT NULL, + log_index INTEGER NOT NULL, + address BLOB NOT NULL, + block_number INTEGER NOT NULL, + transaction_hash BLOB NOT NULL, + topic_0 BLOB, + topic_1 BLOB, + topic_2 BLOB, + topic_3 BLOB, + data BLOB, + PRIMARY KEY (block_hash, transaction_index, log_index) +); + +CREATE INDEX IF NOT EXISTS idx_block_number_address_topics ON logs ( + block_number, + address, + topic_0, + topic_1, + topic_2, + topic_3 +); + +CREATE INDEX IF NOT EXISTS idx_block_hash ON logs ( + block_hash +); + diff --git a/substrate/frame/revive/rpc/src/block_info_provider.rs b/substrate/frame/revive/rpc/src/block_info_provider.rs index 675a83ed6558b..a8b54907ca37c 100644 --- a/substrate/frame/revive/rpc/src/block_info_provider.rs +++ b/substrate/frame/revive/rpc/src/block_info_provider.rs @@ -38,6 +38,11 @@ pub trait BlockInfoProvider: Send + Sync { /// Return the latest ingested block. async fn latest_block(&self) -> Option>; + /// Return the latest block number + async fn latest_block_number(&self) -> Option { + return self.latest_block().await.map(|block| block.number()); + } + /// Get block by block_number. async fn block_by_number( &self, @@ -225,6 +230,10 @@ pub mod test { None } + async fn latest_block_number(&self) -> Option { + Some(2u32) + } + async fn block_by_number( &self, _block_number: SubstrateBlockNumber, diff --git a/substrate/frame/revive/rpc/src/client.rs b/substrate/frame/revive/rpc/src/client.rs index e538259ca8b74..ae5311deb8e02 100644 --- a/substrate/frame/revive/rpc/src/client.rs +++ b/substrate/frame/revive/rpc/src/client.rs @@ -25,9 +25,9 @@ use crate::{ use jsonrpsee::types::{error::CALL_EXECUTION_FAILED_CODE, ErrorObjectOwned}; use pallet_revive::{ evm::{ - extract_revert_message, Block, BlockNumberOrTag, BlockNumberOrTagOrHash, - GenericTransaction, ReceiptInfo, SyncingProgress, SyncingStatus, TransactionSigned, H160, - H256, U256, + extract_revert_message, Block, BlockNumberOrTag, BlockNumberOrTagOrHash, Filter, + GenericTransaction, Log, ReceiptInfo, SyncingProgress, SyncingStatus, TransactionSigned, + H160, H256, U256, }, EthTransactError, EthTransactInfo, }; @@ -132,6 +132,9 @@ pub enum ClientError { /// The cache is empty. #[error("cache is empty")] CacheEmpty, + /// Failed to filter logs. + #[error("Failed to filter logs")] + LogFilterFailed(#[from] anyhow::Error), } const REVERT_CODE: i32 = 3; @@ -712,4 +715,11 @@ impl Client { pub fn max_block_weight(&self) -> Weight { self.max_block_weight } + + /// Get the logs matching the given filter. + pub async fn logs(&self, filter: Option) -> Result, ClientError> { + let logs = + self.receipt_provider.logs(filter).await.map_err(ClientError::LogFilterFailed)?; + Ok(logs) + } } diff --git a/substrate/frame/revive/rpc/src/lib.rs b/substrate/frame/revive/rpc/src/lib.rs index 536678a97ac63..0a153afa6ddf2 100644 --- a/substrate/frame/revive/rpc/src/lib.rs +++ b/substrate/frame/revive/rpc/src/lib.rs @@ -287,6 +287,11 @@ impl EthRpcServer for EthRpcServerImpl { Ok(self.client.receipts_count_per_block(&block.hash).await.map(U256::from)) } + async fn get_logs(&self, filter: Option) -> RpcResult { + let logs = self.client.logs(filter).await?; + Ok(FilterResults::Logs(logs)) + } + async fn get_storage_at( &self, address: H160, diff --git a/substrate/frame/revive/rpc/src/receipt_provider.rs b/substrate/frame/revive/rpc/src/receipt_provider.rs index bbed54a94b7dc..8f1c20005ef4a 100644 --- a/substrate/frame/revive/rpc/src/receipt_provider.rs +++ b/substrate/frame/revive/rpc/src/receipt_provider.rs @@ -16,7 +16,7 @@ // limitations under the License. use jsonrpsee::core::async_trait; -use pallet_revive::evm::{ReceiptInfo, TransactionSigned, H256}; +use pallet_revive::evm::{Filter, Log, ReceiptInfo, TransactionSigned, H256}; use tokio::join; mod cache; @@ -34,6 +34,9 @@ pub trait ReceiptProvider: Send + Sync { /// Similar to `insert`, but intended for archiving receipts from historical blocks. async fn archive(&self, block_hash: &H256, receipts: &[(TransactionSigned, ReceiptInfo)]); + /// Get logs that match the given filter. + async fn logs(&self, filter: Option) -> anyhow::Result>; + /// Deletes receipts associated with the specified block hash. async fn remove(&self, block_hash: &H256); @@ -102,4 +105,8 @@ impl ReceiptProvider for (Cach } self.1.signed_tx_by_hash(hash).await } + + async fn logs(&self, filter: Option) -> anyhow::Result> { + self.1.logs(filter).await + } } diff --git a/substrate/frame/revive/rpc/src/receipt_provider/cache.rs b/substrate/frame/revive/rpc/src/receipt_provider/cache.rs index 765c12f890106..87947be7c7227 100644 --- a/substrate/frame/revive/rpc/src/receipt_provider/cache.rs +++ b/substrate/frame/revive/rpc/src/receipt_provider/cache.rs @@ -16,7 +16,7 @@ // limitations under the License. use super::ReceiptProvider; use jsonrpsee::core::async_trait; -use pallet_revive::evm::{ReceiptInfo, TransactionSigned, H256}; +use pallet_revive::evm::{Filter, Log, ReceiptInfo, TransactionSigned, H256}; use std::{collections::HashMap, sync::Arc}; use tokio::sync::RwLock; @@ -37,6 +37,10 @@ impl CacheReceiptProvider { impl ReceiptProvider for CacheReceiptProvider { async fn archive(&self, _block_hash: &H256, _receipts: &[(TransactionSigned, ReceiptInfo)]) {} + async fn logs(&self, _filter: Option) -> anyhow::Result> { + anyhow::bail!("Not implemented") + } + async fn insert(&self, block_hash: &H256, receipts: &[(TransactionSigned, ReceiptInfo)]) { let mut cache = self.cache.write().await; cache.insert(block_hash, receipts); diff --git a/substrate/frame/revive/rpc/src/receipt_provider/db.rs b/substrate/frame/revive/rpc/src/receipt_provider/db.rs index 42ffe93a9f8b6..0f82f5df1ba7d 100644 --- a/substrate/frame/revive/rpc/src/receipt_provider/db.rs +++ b/substrate/frame/revive/rpc/src/receipt_provider/db.rs @@ -16,11 +16,11 @@ // limitations under the License. use super::*; -use crate::{BlockInfoProvider, ReceiptExtractor}; +use crate::{Address, AddressOrAddresses, BlockInfoProvider, Bytes, FilterTopic, ReceiptExtractor}; use jsonrpsee::core::async_trait; -use pallet_revive::evm::{ReceiptInfo, TransactionSigned}; -use sp_core::H256; -use sqlx::{query, SqlitePool}; +use pallet_revive::evm::{Filter, Log, ReceiptInfo, TransactionSigned}; +use sp_core::{H256, U256}; +use sqlx::{query, QueryBuilder, Row, Sqlite, SqlitePool}; use std::sync::Arc; /// A `[ReceiptProvider]` that stores receipts in a SQLite database. @@ -95,9 +95,175 @@ impl ReceiptProvider for DBReceiptProvider { if let Err(err) = result { log::error!("Error inserting transaction for block hash {block_hash:?}: {err:?}"); } + + for log in &receipt.logs { + let block_hash = log.block_hash.as_ref(); + let transaction_index = log.transaction_index.as_u64() as i64; + let log_index = log.log_index.as_u32() as i32; + let address = log.address.as_ref(); + let block_number = log.block_number.as_u64() as i64; + let transaction_hash = log.transaction_hash.as_ref(); + + let topic_0 = log.topics.first().as_ref().map(|v| &v[..]); + let topic_1 = log.topics.get(1).as_ref().map(|v| &v[..]); + let topic_2 = log.topics.get(2).as_ref().map(|v| &v[..]); + let topic_3 = log.topics.get(3).as_ref().map(|v| &v[..]); + let data = log.data.as_ref().map(|v| &v.0[..]); + + let result = query!( + r#" + INSERT OR REPLACE INTO logs( + block_hash, + transaction_index, + log_index, + address, + block_number, + transaction_hash, + topic_0, topic_1, topic_2, topic_3, + data) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) + "#, + block_hash, + transaction_index, + log_index, + address, + block_number, + transaction_hash, + topic_0, + topic_1, + topic_2, + topic_3, + data + ) + .execute(&self.pool) + .await; + + if let Err(err) = result { + log::error!("Error inserting log {log:?}: {err:?}"); + } + } } } + async fn logs(&self, filter: Option) -> anyhow::Result> { + let mut qb = QueryBuilder::::new("SELECT logs.* FROM logs WHERE 1=1"); + let filter = filter.unwrap_or_default(); + + let latest_block = + U256::from(self.block_provider.latest_block_number().await.unwrap_or_default()); + + match (filter.from_block, filter.to_block, filter.block_hash) { + (Some(_), _, Some(_)) | (_, Some(_), Some(_)) => { + anyhow::bail!("block number and block hash cannot be used together"); + }, + + (Some(block), _, _) | (_, Some(block), _) if block > latest_block => { + anyhow::bail!("block number exceeds latest block"); + }, + (Some(from_block), Some(to_block), None) if from_block > to_block => { + anyhow::bail!("invalid block range params"); + }, + (Some(from_block), Some(to_block), None) if from_block == to_block => { + qb.push(" AND block_number = ").push_bind(from_block.as_u64() as i64); + }, + (Some(from_block), Some(to_block), None) => { + qb.push(" AND block_number BETWEEN ") + .push_bind(from_block.as_u64() as i64) + .push(" AND ") + .push_bind(to_block.as_u64() as i64); + }, + (Some(from_block), None, None) => { + qb.push(" AND block_number >= ").push_bind(from_block.as_u64() as i64); + }, + (None, Some(to_block), None) => { + qb.push(" AND block_number <= ").push_bind(to_block.as_u64() as i64); + }, + (None, None, Some(hash)) => { + qb.push(" AND block_hash = ").push_bind(hash.0.to_vec()); + }, + (None, None, None) => { + qb.push(" AND block_number = ").push_bind(latest_block.as_u64() as i64); + }, + } + + if let Some(addresses) = filter.address { + match addresses { + AddressOrAddresses::Address(addr) => { + qb.push(" AND address = ").push_bind(addr.0.to_vec()); + }, + AddressOrAddresses::Addresses(addrs) => { + qb.push(" AND address IN ("); + let mut separated = qb.separated(", "); + for addr in addrs { + separated.push_bind(addr.0.to_vec()); + } + separated.push_unseparated(")"); + }, + } + } + + if let Some(topics) = filter.topics { + if topics.len() > 4 { + return Err(anyhow::anyhow!("exceed max topics")); + } + + for (i, topic) in topics.into_iter().enumerate() { + match topic { + FilterTopic::Single(hash) => { + qb.push(format_args!(" AND topic_{i} = ")).push_bind(hash.0.to_vec()); + }, + FilterTopic::Multiple(hashes) => { + qb.push(format_args!(" AND topic_{i} IN (")); + let mut separated = qb.separated(", "); + for hash in hashes { + separated.push_bind(hash.0.to_vec()); + } + separated.push_unseparated(")"); + }, + } + } + } + + qb.push(" LIMIT 10000"); + + let logs = qb + .build() + .try_map(|row| { + let block_hash: Vec = row.try_get("block_hash")?; + let transaction_index: i64 = row.try_get("transaction_index")?; + let log_index: i64 = row.try_get("log_index")?; + let address: Vec = row.try_get("address")?; + let block_number: i64 = row.try_get("block_number")?; + let transaction_hash: Vec = row.try_get("transaction_hash")?; + let topic_0: Option> = row.try_get("topic_0")?; + let topic_1: Option> = row.try_get("topic_1")?; + let topic_2: Option> = row.try_get("topic_2")?; + let topic_3: Option> = row.try_get("topic_3")?; + let data: Option> = row.try_get("data")?; + + let topics = [topic_0, topic_1, topic_2, topic_3] + .iter() + .filter_map(|t| t.as_ref().map(|t| H256::from_slice(t))) + .collect::>(); + + Ok(Log { + address: Address::from_slice(&address), + block_hash: H256::from_slice(&block_hash), + block_number: U256::from(block_number as u64), + data: data.map(Bytes::from), + log_index: U256::from(log_index as u64), + topics, + transaction_hash: H256::from_slice(&transaction_hash), + transaction_index: U256::from(transaction_index as u64), + removed: None, + }) + }) + .fetch_all(&self.pool) + .await?; + + Ok(logs) + } + async fn receipts_count_per_block(&self, block_hash: &H256) -> Option { let block_hash = block_hash.as_ref(); let row = query!( @@ -174,7 +340,8 @@ mod tests { use super::*; use crate::test::MockBlockInfoProvider; use pallet_revive::evm::{ReceiptInfo, TransactionSigned}; - use sp_core::H256; + use pretty_assertions::assert_eq; + use sp_core::{H160, H256}; use sqlx::SqlitePool; async fn setup_sqlite_provider(pool: SqlitePool) -> DBReceiptProvider { @@ -215,4 +382,137 @@ mod tests { let count = provider.receipts_count_per_block(&block_hash).await; assert_eq!(count, Some(2)); } + + #[sqlx::test] + async fn test_query_logs(pool: SqlitePool) -> anyhow::Result<()> { + let provider = setup_sqlite_provider(pool).await; + let log1 = Log { + block_hash: H256::from([1u8; 32]), + block_number: U256::from(1), + address: H160::from([1u8; 20]), + topics: vec![H256::from([1u8; 32]), H256::from([2u8; 32])], + data: Some(vec![0u8; 32].into()), + transaction_hash: H256::default(), + transaction_index: U256::from(1), + log_index: U256::from(1), + ..Default::default() + }; + let log2 = Log { + block_hash: H256::from([2u8; 32]), + block_number: U256::from(2), + address: H160::from([2u8; 20]), + topics: vec![H256::from([2u8; 32]), H256::from([3u8; 32])], + transaction_hash: H256::from([1u8; 32]), + transaction_index: U256::from(2), + log_index: U256::from(1), + ..Default::default() + }; + + provider + .insert( + &log1.block_hash, + &vec![( + TransactionSigned::default(), + ReceiptInfo { logs: vec![log1.clone()], ..Default::default() }, + )], + ) + .await; + provider + .insert( + &log2.block_hash, + &vec![( + TransactionSigned::default(), + ReceiptInfo { logs: vec![log2.clone()], ..Default::default() }, + )], + ) + .await; + + // Empty filter + let logs = provider.logs(None).await?; + assert_eq!(logs, vec![log2.clone()]); + + // from_block filter + let logs = provider + .logs(Some(Filter { from_block: Some(log2.block_number), ..Default::default() })) + .await?; + assert_eq!(logs, vec![log2.clone()]); + + // to_block filter + let logs = provider + .logs(Some(Filter { to_block: Some(log1.block_number), ..Default::default() })) + .await?; + assert_eq!(logs, vec![log1.clone()]); + + // block_hash filter + let logs = provider + .logs(Some(Filter { block_hash: Some(log1.block_hash), ..Default::default() })) + .await?; + assert_eq!(logs, vec![log1.clone()]); + + // single address + let logs = provider + .logs(Some(Filter { + from_block: Some(U256::from(0)), + address: Some(log1.address.into()), + ..Default::default() + })) + .await?; + assert_eq!(logs, vec![log1.clone()]); + + // multiple addresses + let logs = provider + .logs(Some(Filter { + from_block: Some(U256::from(0)), + address: Some(vec![log1.address, log2.address].into()), + ..Default::default() + })) + .await?; + assert_eq!(logs, vec![log1.clone(), log2.clone()]); + + // single topic + let logs = provider + .logs(Some(Filter { + from_block: Some(U256::from(0)), + topics: Some(vec![FilterTopic::Single(log1.topics[0])]), + ..Default::default() + })) + .await?; + assert_eq!(logs, vec![log1.clone()]); + + // multiple topic + let logs = provider + .logs(Some(Filter { + from_block: Some(U256::from(0)), + topics: Some(vec![ + FilterTopic::Single(log1.topics[0]), + FilterTopic::Single(log1.topics[1]), + ]), + ..Default::default() + })) + .await?; + assert_eq!(logs, vec![log1.clone()]); + + // multiple topic for topic_0 + let logs = provider + .logs(Some(Filter { + from_block: Some(U256::from(0)), + topics: Some(vec![FilterTopic::Multiple(vec![log1.topics[0], log2.topics[0]])]), + ..Default::default() + })) + .await?; + assert_eq!(logs, vec![log1.clone(), log2.clone()]); + + // Altogether + let logs = provider + .logs(Some(Filter { + from_block: Some(log1.block_number), + to_block: Some(log2.block_number), + block_hash: None, + address: Some(vec![log1.address, log2.address].into()), + topics: Some(vec![FilterTopic::Multiple(vec![log1.topics[0], log2.topics[0]])]), + })) + .await?; + assert_eq!(logs, vec![log1.clone(), log2.clone()]); + Ok(()) + } } diff --git a/substrate/frame/revive/rpc/src/rpc_methods_gen.rs b/substrate/frame/revive/rpc/src/rpc_methods_gen.rs index da60360d9e61b..2df644f5692bd 100644 --- a/substrate/frame/revive/rpc/src/rpc_methods_gen.rs +++ b/substrate/frame/revive/rpc/src/rpc_methods_gen.rs @@ -95,6 +95,10 @@ pub trait EthRpc { #[method(name = "eth_getCode")] async fn get_code(&self, address: Address, block: BlockNumberOrTagOrHash) -> RpcResult; + /// Returns an array of all logs matching filter with given id. + #[method(name = "eth_getLogs")] + async fn get_logs(&self, filter: Option) -> RpcResult; + /// Returns the value from a storage position at a given address. #[method(name = "eth_getStorageAt")] async fn get_storage_at( diff --git a/substrate/frame/revive/src/evm/api/rpc_types_gen.rs b/substrate/frame/revive/src/evm/api/rpc_types_gen.rs index e7003ee7c1891..8fd4c1072a931 100644 --- a/substrate/frame/revive/src/evm/api/rpc_types_gen.rs +++ b/substrate/frame/revive/src/evm/api/rpc_types_gen.rs @@ -137,6 +137,44 @@ impl Default for BlockNumberOrTagOrHash { } } +/// filter +#[derive( + Debug, Default, Clone, Encode, Decode, TypeInfo, Serialize, Deserialize, Eq, PartialEq, +)] +pub struct Filter { + /// Address(es) + pub address: Option, + /// from block + #[serde(rename = "fromBlock", skip_serializing_if = "Option::is_none")] + pub from_block: Option, + /// to block + #[serde(rename = "toBlock", skip_serializing_if = "Option::is_none")] + pub to_block: Option, + /// Restricts the logs returned to the single block + #[serde(rename = "blockHash", skip_serializing_if = "Option::is_none")] + pub block_hash: Option, + /// Topics + #[serde(skip_serializing_if = "Option::is_none")] + pub topics: Option, +} + +/// Filter results +#[derive( + Debug, Clone, Encode, Decode, TypeInfo, Serialize, Deserialize, From, TryInto, Eq, PartialEq, +)] +#[serde(untagged)] +pub enum FilterResults { + /// new block or transaction hashes + Hashes(Vec), + /// new logs + Logs(Vec), +} +impl Default for FilterResults { + fn default() -> Self { + FilterResults::Hashes(Default::default()) + } +} + /// Transaction object generic to all types #[derive( Debug, Default, Clone, Encode, Decode, TypeInfo, Serialize, Deserialize, Eq, PartialEq, @@ -326,6 +364,26 @@ impl Default for TransactionUnsigned { /// Access list pub type AccessList = Vec; +/// Address(es) +#[derive( + Debug, Clone, Encode, Decode, TypeInfo, Serialize, Deserialize, From, TryInto, Eq, PartialEq, +)] +#[serde(untagged)] +pub enum AddressOrAddresses { + /// Address + Address(Address), + /// Addresses + Addresses(Addresses), +} +impl Default for AddressOrAddresses { + fn default() -> Self { + AddressOrAddresses::Address(Default::default()) + } +} + +/// hex encoded address +pub type Addresses = Vec
; + /// Block tag /// `earliest`: The lowest numbered block the client has available; `finalized`: The most recent /// crypto-economically secure block, cannot be re-orged outside of manual intervention driven by @@ -353,6 +411,9 @@ pub enum BlockTag { Pending, } +/// Filter Topics +pub type FilterTopics = Vec; + #[derive( Debug, Clone, Encode, Decode, TypeInfo, Serialize, Deserialize, From, TryInto, Eq, PartialEq, )] @@ -604,6 +665,23 @@ pub struct AccessListEntry { pub storage_keys: Vec, } +/// Filter Topic List Entry +#[derive( + Debug, Clone, Encode, Decode, TypeInfo, Serialize, Deserialize, From, TryInto, Eq, PartialEq, +)] +#[serde(untagged)] +pub enum FilterTopic { + /// Single Topic Match + Single(H256), + /// Multiple Topic Match + Multiple(Vec), +} +impl Default for FilterTopic { + fn default() -> Self { + FilterTopic::Single(Default::default()) + } +} + /// Signed 1559 Transaction #[derive( Debug, Default, Clone, Encode, Decode, TypeInfo, Serialize, Deserialize, Eq, PartialEq, diff --git a/substrate/frame/revive/src/lib.rs b/substrate/frame/revive/src/lib.rs index 69d60c5d6a952..def5323767d1b 100644 --- a/substrate/frame/revive/src/lib.rs +++ b/substrate/frame/revive/src/lib.rs @@ -723,7 +723,6 @@ pub mod pallet { #[pallet::compact] storage_deposit_limit: BalanceOf, data: Vec, ) -> DispatchResultWithPostInfo { - log::info!(target: LOG_TARGET, "Call: {:?} {:?} {:?}", dest, value, data); let mut output = Self::bare_call( origin, dest,