Skip to content

Commit

Permalink
chore: add test for check state init after processing batch transacti…
Browse files Browse the repository at this point in the history
…on (#179)
  • Loading branch information
aleksuss authored Nov 5, 2024
1 parent 6ee3ecf commit 5c29025
Show file tree
Hide file tree
Showing 4 changed files with 76,971 additions and 14 deletions.
4 changes: 2 additions & 2 deletions refiner-app/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,8 @@ async fn main() -> anyhow::Result<()> {
})?;

aurora_refiner_lib::storage::init_storage(
engine_path.to_path_buf(),
config.refiner.engine_account_id.clone(),
engine_path,
&config.refiner.engine_account_id,
config.refiner.chain_id,
);

Expand Down
83 changes: 79 additions & 4 deletions refiner-lib/src/near_stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,9 @@ impl NearStream {

#[cfg(test)]
pub mod tests {
use aurora_engine::state::EngineStateError;
use aurora_engine_types::account_id::AccountId;
use aurora_engine_types::U256;
use aurora_refiner_types::aurora_block::NearBlock;
use engine_standalone_storage::json_snapshot::{initialize_engine_state, types::JsonSnapshot};
use std::{collections::HashSet, matches};
Expand All @@ -137,6 +139,38 @@ pub mod tests {
assert!(aurora_block.transactions.is_empty());
}

#[tokio::test]
async fn test_block_131407300() {
// The block 131407300 contains a receipt with batch of actions, one of which inits silo
// contract. The test checks that init action in the batch is processed correctly and
// initializes the borealis engine's state.
let db_dir = tempfile::tempdir().unwrap();
let chain_id = 1313161566;
let ctx = TestContextBuilder::new()
.with_account_id("0x4e45415e.c.aurora")
.with_chain_id(chain_id)
.build(&db_dir);
let mut stream = ctx.create_stream();

{
let storage = stream.context.storage.read().await;
let result = storage
.with_engine_access(131407300, 1, &[], |io| aurora_engine::state::get_state(&io))
.result;
assert!(matches!(result, Err(EngineStateError::NotFound)));
}

let block = read_block("tests/res/block_131407300.json");
let _ = stream.next_block(&block).await;
let storage = stream.context.storage.read().await;
let chain_id_from_state = storage
.with_engine_access(131407300, 1, &[], |io| aurora_engine::state::get_state(&io))
.result
.map(|state| U256::from_big_endian(&state.chain_id).as_u64())
.unwrap();
assert_eq!(chain_id_from_state, chain_id);
}

#[tokio::test]
async fn test_block_89402026() {
let db_dir = tempfile::tempdir().unwrap();
Expand All @@ -155,7 +189,7 @@ pub mod tests {

#[tokio::test]
async fn test_block_84423722() {
// The block at hight 84423722 contains a transaction with zero actions.
// The block at height 84423722 contains a transaction with zero actions.
// The refiner should be able to process such a block without crashing.

let db_dir = tempfile::tempdir().unwrap();
Expand Down Expand Up @@ -365,13 +399,20 @@ pub mod tests {

impl TestContext {
pub fn new(db_dir: &tempfile::TempDir) -> Self {
TestContextBuilder::new().build(db_dir)
}

pub fn new_with_args(
db_dir: &tempfile::TempDir,
account_id: AccountId,
chain_id: u64,
) -> Self {
let engine_path = db_dir.path().join("engine");
let tracker_path = db_dir.path().join("tracker");
let chain_id = 1313161554_u64;
let account_id: AccountId = "aurora".parse().unwrap();
crate::storage::init_storage(engine_path.clone(), account_id.clone(), chain_id);
crate::storage::init_storage(&engine_path, &account_id, chain_id);
let engine_context = EngineContext::new(&engine_path, account_id, chain_id).unwrap();
let tx_tracker = TxHashTracker::new(tracker_path, 0).unwrap();

Self {
chain_id,
engine_context,
Expand All @@ -392,4 +433,38 @@ pub mod tests {
NearStream::new(self.chain_id, None, self.engine_context, self.tx_tracker)
}
}

pub struct TestContextBuilder {
chain_id: u64,
account_id: AccountId,
}

impl TestContextBuilder {
pub fn new() -> Self {
Self {
chain_id: 1313161554,
account_id: "aurora".parse().unwrap(),
}
}

pub fn with_account_id(mut self, account_id: &str) -> Self {
self.account_id = account_id.parse().unwrap();
self
}

pub fn with_chain_id(mut self, chain_id: u64) -> Self {
self.chain_id = chain_id;
self
}

pub fn build(self, db_dir: &tempfile::TempDir) -> TestContext {
TestContext::new_with_args(db_dir, self.account_id, self.chain_id)
}
}

impl Default for TestContextBuilder {
fn default() -> Self {
Self::new()
}
}
}
20 changes: 12 additions & 8 deletions refiner-lib/src/storage.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::path::PathBuf;
use std::path::Path;

use aurora_engine_types::{account_id::AccountId, H256, U256};
use engine_standalone_storage::{Storage, StoragePrefix};
Expand All @@ -8,12 +8,12 @@ const VERSION: u8 = 0;
/// Write to the DB in batches of 100k heights at a time
const BATCH_SIZE: usize = 100_000;

pub fn init_storage(storage_path: PathBuf, account_id: AccountId, chain_id: u64) {
let mut storage = migrate_block_hash(storage_path, &account_id, chain_id);
pub fn init_storage<P: AsRef<Path>>(storage_path: P, account_id: &AccountId, chain_id: u64) {
let mut storage = migrate_block_hash(storage_path, account_id, chain_id);

match storage.get_engine_account_id() {
Ok(stored_id) => {
if stored_id != account_id {
if &stored_id != account_id {
panic!(
"Provided engine_account_id={} is not equal to account_id_stored={}",
account_id, stored_id
Expand All @@ -25,7 +25,7 @@ pub fn init_storage(storage_path: PathBuf, account_id: AccountId, chain_id: u64)
"No engine_account_id set in DB. Setting to configured engine_account_id={}",
account_id
);
storage.set_engine_account_id(&account_id).unwrap();
storage.set_engine_account_id(account_id).unwrap();
}
Err(engine_standalone_storage::Error::EngineAccountIdCorrupted) => {
panic!("Fatal error, cannot read engine_account_id from DB. The DB may be corrupted.");
Expand All @@ -36,9 +36,13 @@ pub fn init_storage(storage_path: PathBuf, account_id: AccountId, chain_id: u64)
};
}

fn migrate_block_hash(storage_path: PathBuf, account_id: &AccountId, chain_id: u64) -> Storage {
fn migrate_block_hash<P: AsRef<Path>>(
storage_path: P,
account_id: &AccountId,
chain_id: u64,
) -> Storage {
let chain_id = aurora_engine_types::types::u256_to_arr(&U256::from(chain_id));
let mut storage = Storage::open(storage_path.clone()).unwrap();
let mut storage = Storage::open(&storage_path).unwrap();
let (block_hash, block_height) = match storage.get_latest_block() {
Ok(x) => x,
// If there are no blocks then there is nothing to migrate
Expand All @@ -53,7 +57,7 @@ fn migrate_block_hash(storage_path: PathBuf, account_id: &AccountId, chain_id: u
// Close the current storage instance because we're going to need low-level access to the DB.
let (_, mut block_height) = storage.get_earliest_block().unwrap();
drop(storage);
let db = rocksdb::DB::open_default(storage_path.clone()).unwrap();
let db = rocksdb::DB::open_default(&storage_path).unwrap();

while let MigrationStatus::Continue(height) =
block_hash_migration_batch(&db, block_height, account_id.as_bytes(), chain_id)
Expand Down
Loading

0 comments on commit 5c29025

Please sign in to comment.