From 274892705b4f518e72de8a6299ef103729340c7f Mon Sep 17 00:00:00 2001 From: Kyle Espinola Date: Sat, 14 Sep 2024 15:24:20 +0200 Subject: [PATCH 1/2] feat: move backfill to bubblegum crate and verfiy to it --- Cargo.lock | 38 ++-- Cargo.toml | 9 +- {backfill => bubblegum}/Cargo.toml | 3 +- {backfill => bubblegum}/README.md | 0 .../src => bubblegum/src/backfill}/gap.rs | 3 +- bubblegum/src/backfill/mod.rs | 2 + .../src/backfill}/worker/gap.rs | 5 +- .../src/backfill}/worker/mod.rs | 0 .../backfill}/worker/program_transformer.rs | 4 +- .../src/backfill}/worker/transaction.rs | 8 +- .../src/backfill}/worker/tree.rs | 10 +- {backfill => bubblegum}/src/error.rs | 0 {backfill => bubblegum}/src/lib.rs | 55 ++++-- {backfill => bubblegum}/src/tree.rs | 5 +- bubblegum/src/verify.rs | 174 ++++++++++++++++++ grpc-ingest/src/config.rs | 3 + grpc-ingest/src/main.rs | 11 ++ grpc-ingest/src/monitor.rs | 5 + ops/Cargo.toml | 2 +- ops/src/bubblegum/backfiller.rs | 8 +- ops/src/bubblegum/cmd.rs | 7 +- ops/src/bubblegum/mod.rs | 1 + ops/src/bubblegum/replay.rs | 4 +- ops/src/bubblegum/verify.rs | 30 +++ 24 files changed, 329 insertions(+), 58 deletions(-) rename {backfill => bubblegum}/Cargo.toml (95%) rename {backfill => bubblegum}/README.md (100%) rename {backfill/src => bubblegum/src/backfill}/gap.rs (98%) create mode 100644 bubblegum/src/backfill/mod.rs rename {backfill/src => bubblegum/src/backfill}/worker/gap.rs (94%) rename {backfill/src => bubblegum/src/backfill}/worker/mod.rs (100%) rename {backfill/src => bubblegum/src/backfill}/worker/program_transformer.rs (96%) rename {backfill/src => bubblegum/src/backfill}/worker/transaction.rs (97%) rename {backfill/src => bubblegum/src/backfill}/worker/tree.rs (95%) rename {backfill => bubblegum}/src/error.rs (100%) rename {backfill => bubblegum}/src/lib.rs (75%) rename {backfill => bubblegum}/src/tree.rs (98%) create mode 100644 bubblegum/src/verify.rs create mode 100644 grpc-ingest/src/monitor.rs create mode 100644 ops/src/bubblegum/verify.rs diff --git a/Cargo.lock b/Cargo.lock index 856a5c9a2..d47aef9b4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -290,6 +290,7 @@ dependencies = [ "anchor-derive-accounts", "anchor-derive-serde", "anchor-derive-space", + "anchor-syn", "arrayref", "base64 0.13.1", "bincode", @@ -1097,7 +1098,7 @@ dependencies = [ "solana-transaction-status", "solana-zk-token-sdk", "spl-account-compression", - "spl-concurrent-merkle-tree", + "spl-concurrent-merkle-tree 0.2.0", "spl-noop", "spl-pod", "spl-token", @@ -1323,9 +1324,9 @@ dependencies = [ [[package]] name = "bytemuck" -version = "1.15.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15" +checksum = "8334215b81e418a0a7bdb8ef0849474f40bb10c8b71f1c4ed315cff49f32494d" dependencies = [ "bytemuck_derive", ] @@ -1816,7 +1817,7 @@ dependencies = [ ] [[package]] -name = "das-backfill" +name = "das-bubblegum" version = "0.7.2" dependencies = [ "anchor-client", @@ -1835,6 +1836,7 @@ dependencies = [ "program_transformers", "sea-orm", "serde_json", + "sha3 0.10.8", "solana-client", "solana-program", "solana-sdk", @@ -1936,7 +1938,7 @@ dependencies = [ "cadence", "cadence-macros", "clap 4.4.8", - "das-backfill", + "das-bubblegum", "das-core", "digital_asset_types", "env_logger 0.10.0", @@ -2138,7 +2140,7 @@ dependencies = [ "serde", "serde_json", "solana-sdk", - "spl-concurrent-merkle-tree", + "spl-concurrent-merkle-tree 0.2.0", "thiserror", "tokio", "url", @@ -3766,9 +3768,9 @@ dependencies = [ [[package]] name = "mpl-bubblegum" -version = "1.2.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3cbca5deb859e66a1a21ada94f2eaab3eb5caa4584c0c8ade0efac29a5414b8" +checksum = "a9eff5ae5cafd1acdf7e7c93359da1eec91dcaede318470d9f68b78e8b7469f4" dependencies = [ "borsh 0.10.3", "kaigan", @@ -4066,7 +4068,7 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" dependencies = [ - "proc-macro-crate 1.3.1", + "proc-macro-crate 3.1.0", "proc-macro2", "quote", "syn 2.0.66", @@ -6965,13 +6967,14 @@ dependencies = [ [[package]] name = "spl-account-compression" -version = "0.3.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85c43bd4455d9fb29b9e4f83c087ccffa2f6f41fecfc0549932ae391d00f3378" +checksum = "8ce8314ec6ae26084ec7c6c0802c3dc173ee86aee5f5d5026a3f82c52cfe1c07" dependencies = [ "anchor-lang", "bytemuck", - "spl-concurrent-merkle-tree", + "solana-program", + "spl-concurrent-merkle-tree 0.4.0", "spl-noop", ] @@ -7002,6 +7005,17 @@ dependencies = [ "thiserror", ] +[[package]] +name = "spl-concurrent-merkle-tree" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85d1bbb97252d8a1b90d3d56425038928382a306b71dbba4c836973c94b33f96" +dependencies = [ + "bytemuck", + "solana-program", + "thiserror", +] + [[package]] name = "spl-discriminator" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index 8a77a8473..b0b42139d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace] members = [ - "backfill", + "bubblegum", "blockbuster", "core", "das_api", @@ -48,7 +48,7 @@ cargo-lock = "9.0.0" chrono = "0.4.19" clap = "4.2.2" das-core = { path = "core" } -das-backfill = { path = "backfill" } +das-bubblegum = { path = "bubblegum" } das_api = { path = "das_api" } derive_more = { version = "0.99.17" } digital_asset_types = { path = "digital_asset_types" } @@ -78,7 +78,7 @@ lru = "0.12.3" metrics = "0.20.1" migration = { path = "migration" } mime_guess = "2.0.4" -mpl-bubblegum = "1.2.0" +mpl-bubblegum = "1.4.0" mpl-core = { version = "0.8.0-beta.1", features = ["serde"] } mpl-token-metadata = "4.1.1" nft_ingester = { path = "nft_ingester" } @@ -116,7 +116,7 @@ solana-program = "~1.18" solana-sdk = "~1.18" solana-transaction-status = "~1.18" solana-zk-token-sdk = "1.17.16" -spl-account-compression = "0.3.0" +spl-account-compression = "0.4.0" spl-associated-token-account = ">= 1.1.3, < 3.0" spl-concurrent-merkle-tree = "0.2.0" spl-noop = "0.2.0" @@ -125,6 +125,7 @@ spl-token = ">= 3.5.0, < 5.0" spl-token-2022 = { version = "1.0", features = ["no-entrypoint"] } spl-token-group-interface = "0.1.0" spl-token-metadata-interface = "0.2.0" +sha3 = "0.10.8" sqlx = "0.6.2" stretto = "0.8.4" thiserror = "1.0.31" diff --git a/backfill/Cargo.toml b/bubblegum/Cargo.toml similarity index 95% rename from backfill/Cargo.toml rename to bubblegum/Cargo.toml index ce082f81c..0ecd4704a 100644 --- a/backfill/Cargo.toml +++ b/bubblegum/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "das-backfill" +name = "das-bubblegum" version = { workspace = true } edition = { workspace = true } repository = { workspace = true } @@ -28,6 +28,7 @@ num-traits = { workspace = true } sea-orm = { workspace = true } serde_json = { workspace = true } solana-sdk = { workspace = true } +sha3 = { workspace = true } solana-transaction-status = { workspace = true } spl-account-compression = { workspace = true, features = ["no-entrypoint"] } spl-token = { workspace = true, features = ["no-entrypoint"] } diff --git a/backfill/README.md b/bubblegum/README.md similarity index 100% rename from backfill/README.md rename to bubblegum/README.md diff --git a/backfill/src/gap.rs b/bubblegum/src/backfill/gap.rs similarity index 98% rename from backfill/src/gap.rs rename to bubblegum/src/backfill/gap.rs index feb523b98..e167e888d 100644 --- a/backfill/src/gap.rs +++ b/bubblegum/src/backfill/gap.rs @@ -1,5 +1,4 @@ -use super::ErrorKind; -use crate::Rpc; +use crate::{error::ErrorKind, Rpc}; use anyhow::Result; use clap::Args; use sea_orm::{DatabaseConnection, DbBackend, FromQueryResult, Statement, Value}; diff --git a/bubblegum/src/backfill/mod.rs b/bubblegum/src/backfill/mod.rs new file mode 100644 index 000000000..5a4a874d6 --- /dev/null +++ b/bubblegum/src/backfill/mod.rs @@ -0,0 +1,2 @@ +pub mod gap; +pub mod worker; diff --git a/backfill/src/worker/gap.rs b/bubblegum/src/backfill/worker/gap.rs similarity index 94% rename from backfill/src/worker/gap.rs rename to bubblegum/src/backfill/worker/gap.rs index 07b88f4f5..68523fdbe 100644 --- a/backfill/src/worker/gap.rs +++ b/bubblegum/src/backfill/worker/gap.rs @@ -9,8 +9,7 @@ use tokio::{ task::JoinHandle, }; -use crate::gap::TreeGapFill; -use crate::BubblegumBackfillContext; +use crate::{backfill::gap::TreeGapFill, BubblegumContext}; #[derive(Parser, Debug, Clone)] pub struct GapWorkerArgs { @@ -26,7 +25,7 @@ pub struct GapWorkerArgs { impl GapWorkerArgs { pub fn start( &self, - context: BubblegumBackfillContext, + context: BubblegumContext, forward: Sender, ) -> Result<(JoinHandle<()>, Sender)> { let (gap_sender, mut gap_receiver) = channel::(self.gap_channel_size); diff --git a/backfill/src/worker/mod.rs b/bubblegum/src/backfill/worker/mod.rs similarity index 100% rename from backfill/src/worker/mod.rs rename to bubblegum/src/backfill/worker/mod.rs diff --git a/backfill/src/worker/program_transformer.rs b/bubblegum/src/backfill/worker/program_transformer.rs similarity index 96% rename from backfill/src/worker/program_transformer.rs rename to bubblegum/src/backfill/worker/program_transformer.rs index 6e7c20646..75f75c0a7 100644 --- a/backfill/src/worker/program_transformer.rs +++ b/bubblegum/src/backfill/worker/program_transformer.rs @@ -9,7 +9,7 @@ use std::sync::Arc; use tokio::sync::mpsc::{channel, Sender, UnboundedSender}; use tokio::task::JoinHandle; -use crate::BubblegumBackfillContext; +use crate::BubblegumContext; #[derive(Parser, Debug, Clone)] pub struct ProgramTransformerWorkerArgs { @@ -22,7 +22,7 @@ pub struct ProgramTransformerWorkerArgs { impl ProgramTransformerWorkerArgs { pub fn start( &self, - context: BubblegumBackfillContext, + context: BubblegumContext, forwarder: UnboundedSender, ) -> Result<(JoinHandle<()>, Sender)> { let (sender, mut receiver) = diff --git a/backfill/src/worker/transaction.rs b/bubblegum/src/backfill/worker/transaction.rs similarity index 97% rename from backfill/src/worker/transaction.rs rename to bubblegum/src/backfill/worker/transaction.rs index 6a25f6f1b..910b79a92 100644 --- a/backfill/src/worker/transaction.rs +++ b/bubblegum/src/backfill/worker/transaction.rs @@ -1,4 +1,4 @@ -use crate::error::ErrorKind; +use crate::{error::ErrorKind, BubblegumContext}; use anyhow::Result; use clap::Parser; use das_core::Rpc; @@ -143,11 +143,13 @@ pub struct SignatureWorkerArgs { pub signature_worker_count: usize, } +type TransactionSender = Sender; + impl SignatureWorkerArgs { pub fn start( &self, - context: crate::BubblegumBackfillContext, - forwarder: Sender, + context: BubblegumContext, + forwarder: TransactionSender, ) -> Result<(JoinHandle<()>, Sender)> { let (sig_sender, mut sig_receiver) = channel::(self.signature_channel_size); let worker_count = self.signature_worker_count; diff --git a/backfill/src/worker/tree.rs b/bubblegum/src/backfill/worker/tree.rs similarity index 95% rename from backfill/src/worker/tree.rs rename to bubblegum/src/backfill/worker/tree.rs index e6db2b8d9..e02c21305 100644 --- a/backfill/src/worker/tree.rs +++ b/bubblegum/src/backfill/worker/tree.rs @@ -1,7 +1,7 @@ use crate::{ - gap::{TreeGapFill, TreeGapModel}, + backfill::gap::{TreeGapFill, TreeGapModel}, tree::TreeResponse, - BubblegumBackfillContext, + BubblegumContext, }; use anyhow::Result; use clap::Parser; @@ -32,11 +32,7 @@ pub struct TreeWorkerArgs { pub force: bool, } impl TreeWorkerArgs { - pub fn start( - &self, - context: BubblegumBackfillContext, - tree: TreeResponse, - ) -> JoinHandle> { + pub fn start(&self, context: BubblegumContext, tree: TreeResponse) -> JoinHandle> { let db_pool = context.database_pool.clone(); let metadata_json_download_db_pool = context.database_pool.clone(); diff --git a/backfill/src/error.rs b/bubblegum/src/error.rs similarity index 100% rename from backfill/src/error.rs rename to bubblegum/src/error.rs diff --git a/backfill/src/lib.rs b/bubblegum/src/lib.rs similarity index 75% rename from backfill/src/lib.rs rename to bubblegum/src/lib.rs index dac3aba44..09aa52647 100644 --- a/backfill/src/lib.rs +++ b/bubblegum/src/lib.rs @@ -1,12 +1,13 @@ +mod backfill; mod error; -mod gap; mod tree; -mod worker; use das_core::{MetadataJsonDownloadWorkerArgs, Rpc}; pub use error::ErrorKind; +mod verify; use anyhow::Result; +use backfill::worker::{ProgramTransformerWorkerArgs, SignatureWorkerArgs, TreeWorkerArgs}; use clap::Parser; use digital_asset_types::dao::cl_audits_v2; use futures::{stream::FuturesUnordered, StreamExt}; @@ -17,18 +18,16 @@ use sea_orm::{EntityTrait, QueryFilter}; use solana_sdk::pubkey::Pubkey; use solana_sdk::signature::Signature; use std::str::FromStr; -use tracing::error; +use tracing::{error, info}; use tree::TreeResponse; -use worker::ProgramTransformerWorkerArgs; -use worker::{SignatureWorkerArgs, TreeWorkerArgs}; #[derive(Clone)] -pub struct BubblegumBackfillContext { +pub struct BubblegumContext { pub database_pool: sqlx::PgPool, pub solana_rpc: Rpc, } -impl BubblegumBackfillContext { +impl BubblegumContext { pub const fn new(database_pool: sqlx::PgPool, solana_rpc: Rpc) -> Self { Self { database_pool, @@ -38,7 +37,7 @@ impl BubblegumBackfillContext { } #[derive(Debug, Parser, Clone)] -pub struct BubblegumBackfillArgs { +pub struct BackfillArgs { /// Number of tree crawler workers #[arg(long, env, default_value = "20")] pub tree_crawler_count: usize, @@ -51,10 +50,17 @@ pub struct BubblegumBackfillArgs { pub tree_worker: TreeWorkerArgs, } -pub async fn start_bubblegum_backfill( - context: BubblegumBackfillContext, - args: BubblegumBackfillArgs, -) -> Result<()> { +#[derive(Debug, Parser, Clone)] +pub struct VerifyArgs { + /// The list of trees to verify. If not specified, all trees will be crawled. + #[arg(long, env, use_value_delimiter = true)] + pub only_trees: Option>, + + #[arg(long, env, default_value = "20")] + pub max_concurrency: usize, +} + +pub async fn start_backfill(context: BubblegumContext, args: BackfillArgs) -> Result<()> { let trees = if let Some(ref only_trees) = args.only_trees { TreeResponse::find(&context.solana_rpc, only_trees.clone()).await? } else { @@ -99,7 +105,7 @@ pub struct BubblegumReplayArgs { } pub async fn start_bubblegum_replay( - context: BubblegumBackfillContext, + context: BubblegumContext, args: BubblegumReplayArgs, ) -> Result<()> { let pubkey = Pubkey::from_str(&args.tree) @@ -154,3 +160,26 @@ pub async fn start_bubblegum_replay( Ok(()) } + +pub async fn verify_bubblegum(context: BubblegumContext, args: VerifyArgs) -> Result<()> { + let trees = if let Some(ref only_trees) = args.only_trees { + TreeResponse::find(&context.solana_rpc, only_trees.clone()).await? + } else { + TreeResponse::all(&context.solana_rpc).await? + }; + + for tree in trees { + let report = verify::check(context.clone(), tree, args.max_concurrency).await?; + + info!( + "Tree: {}, Total Leaves: {}, Incorrect Proofs: {}, Not Found Proofs: {}, Correct Proofs: {}", + report.tree_pubkey, + report.total_leaves, + report.incorrect_proofs, + report.not_found_proofs, + report.correct_proofs + ); + } + + Ok(()) +} diff --git a/backfill/src/tree.rs b/bubblegum/src/tree.rs similarity index 98% rename from backfill/src/tree.rs rename to bubblegum/src/tree.rs index efb5c1cbf..405c90037 100644 --- a/backfill/src/tree.rs +++ b/bubblegum/src/tree.rs @@ -1,4 +1,4 @@ -use super::ErrorKind; +use super::error::ErrorKind; use anyhow::Result; use borsh::BorshDeserialize; use das_core::Rpc; @@ -10,7 +10,7 @@ use spl_account_compression::state::{ }; use std::str::FromStr; -#[derive(Debug, Clone)] +#[derive(Clone)] pub struct TreeHeaderResponse { pub max_depth: u32, pub max_buffer_size: u32, @@ -33,7 +33,6 @@ impl TryFrom for TreeHeaderResponse { } } -#[derive(Debug, Clone)] pub struct TreeResponse { pub pubkey: Pubkey, pub tree_header: TreeHeaderResponse, diff --git a/bubblegum/src/verify.rs b/bubblegum/src/verify.rs new file mode 100644 index 000000000..e83297ee9 --- /dev/null +++ b/bubblegum/src/verify.rs @@ -0,0 +1,174 @@ +use super::BubblegumContext; +use crate::error::ErrorKind; +use crate::tree::TreeResponse; +use anyhow::{anyhow, Result}; +use borsh::BorshDeserialize; +use digital_asset_types::dapi::get_proof_for_asset; +use digital_asset_types::rpc::AssetProof; +use futures::stream::{FuturesUnordered, StreamExt}; +use mpl_bubblegum::accounts::TreeConfig; +use sea_orm::SqlxPostgresConnector; +use sha3::{Digest, Keccak256}; +use solana_sdk::{pubkey::Pubkey, syscalls::MAX_CPI_INSTRUCTION_ACCOUNTS}; +use spl_account_compression::{ + canopy::fill_in_proof_from_canopy, + concurrent_tree_wrapper::ProveLeafArgs, + state::{ + merkle_tree_get_size, ConcurrentMerkleTreeHeader, CONCURRENT_MERKLE_TREE_HEADER_SIZE_V1, + }, +}; +use std::sync::Arc; +use tokio::sync::Mutex; +use tracing::error; + +trait TryFromAssetProof { + fn try_from_asset_proof(proof: AssetProof) -> Result + where + Self: Sized; +} + +impl TryFromAssetProof for ProveLeafArgs { + fn try_from_asset_proof(proof: AssetProof) -> Result { + Ok(ProveLeafArgs { + current_root: bs58::decode(&proof.root) + .into_vec() + .map_err(|e| anyhow!(e))? + .try_into() + .map_err(|_| anyhow!("Invalid root length"))?, + leaf: bs58::decode(&proof.leaf) + .into_vec() + .map_err(|e| anyhow!(e))? + .try_into() + .map_err(|_| anyhow!("Invalid leaf length"))?, + proof_vec: proof + .proof + .iter() + .map(|p| { + bs58::decode(p) + .into_vec() + .map_err(|e| anyhow!(e)) + .and_then(|v| v.try_into().map_err(|_| anyhow!("Invalid proof length"))) + }) + .collect::>>()?, + index: proof.node_index as u32, + }) + } +} + +fn hash(left: &[u8], right: &[u8]) -> [u8; 32] { + let mut hasher = Keccak256::new(); + hasher.update(left); + hasher.update(right); + let result = hasher.finalize(); + let mut hash = [0u8; 32]; + hash.copy_from_slice(&result); + hash +} + +fn verify_merkle_proof(root: [u8; 32], proof: &ProveLeafArgs) -> bool { + let mut node = proof.leaf; + for (i, sibling) in proof.proof_vec.iter().enumerate() { + if (proof.index >> i) & 1 == 0 { + node = hash(&node, sibling); + } else { + node = hash(sibling, &node); + } + } + node == root +} + +#[derive(Debug)] +pub struct ProofReport { + pub tree_pubkey: Pubkey, + pub total_leaves: usize, + pub incorrect_proofs: usize, + pub not_found_proofs: usize, + pub correct_proofs: usize, +} + +enum ProofResult { + Correct, + Incorrect, + NotFound, +} + +pub async fn check( + context: BubblegumContext, + tree: TreeResponse, + max_concurrency: usize, +) -> Result { + let (tree_config_pubkey, _) = TreeConfig::find_pda(&tree.pubkey); + + let pool = context.database_pool.clone(); + + let account = context.solana_rpc.get_account(&tree_config_pubkey).await?; + let account = account + .value + .ok_or_else(|| ErrorKind::Generic("Account not found".to_string()))?; + + let tree_config = TreeConfig::from_bytes(account.data.as_slice())?; + + let report = Arc::new(Mutex::new(ProofReport { + tree_pubkey: tree.pubkey, + total_leaves: tree_config.num_minted as usize, + incorrect_proofs: 0, + not_found_proofs: 0, + correct_proofs: 0, + })); + + let mut tasks = FuturesUnordered::new(); + + for i in 0..tree_config.num_minted { + if tasks.len() >= max_concurrency { + tasks.next().await; + } + + let db = SqlxPostgresConnector::from_sqlx_postgres_pool(pool.clone()); + let tree_pubkey = tree.pubkey.clone(); + let report = Arc::clone(&report); + + tasks.push(tokio::spawn(async move { + let (asset, _) = Pubkey::find_program_address( + &[b"asset", &tree_pubkey.to_bytes(), &i.to_le_bytes()], + &mpl_bubblegum::ID, + ); + let result: Result = + match get_proof_for_asset(&db, asset.to_bytes().to_vec()).await { + Ok(proof) => match ProveLeafArgs::try_from_asset_proof(proof) { + Ok(prove_leaf_args) => { + if verify_merkle_proof(prove_leaf_args.current_root, &prove_leaf_args) { + Ok(ProofResult::Correct) + } else { + Ok(ProofResult::Incorrect) + } + } + Err(_) => Ok(ProofResult::Incorrect), + }, + Err(_) => Ok(ProofResult::NotFound), + }; + + if let Ok(proof_result) = result { + let mut report = report.lock().await; + match proof_result { + ProofResult::Correct => report.correct_proofs += 1, + ProofResult::Incorrect => { + report.incorrect_proofs += 1; + error!(tree = %tree_pubkey, leaf_index = i, asset = %asset, "Incorrect proof found"); + } + ProofResult::NotFound => { + report.not_found_proofs += 1; + error!(tree = %tree_pubkey, leaf_index = i, asset = %asset, "Proof not found"); + } + } + } + })); + } + + while tasks.next().await.is_some() {} + + let final_report = Arc::try_unwrap(report) + .expect("Failed to unwrap Arc") + .into_inner(); + + Ok(final_report) +} diff --git a/grpc-ingest/src/config.rs b/grpc-ingest/src/config.rs index 64ef42953..ecad0d46b 100644 --- a/grpc-ingest/src/config.rs +++ b/grpc-ingest/src/config.rs @@ -335,3 +335,6 @@ impl ConfigIngesterDownloadMetadata { Duration::from_millis(3_000) } } + +#[derive(Debug, Clone, Deserialize)] +pub struct ConfigMonitor {} diff --git a/grpc-ingest/src/main.rs b/grpc-ingest/src/main.rs index 907e1e00c..99ad667be 100644 --- a/grpc-ingest/src/main.rs +++ b/grpc-ingest/src/main.rs @@ -6,12 +6,14 @@ use { }, anyhow::Context, clap::{Parser, Subcommand}, + config::ConfigMonitor, std::net::SocketAddr, }; mod config; mod grpc; mod ingester; +mod monitor; mod postgres; mod prom; mod redis; @@ -42,6 +44,8 @@ enum ArgsAction { /// Run ingester process (process events from Redis) #[command(name = "ingester")] Ingester, + #[command(name = "monitor")] + Monitor, } #[tokio::main] @@ -72,5 +76,12 @@ async fn main() -> anyhow::Result<()> { .with_context(|| format!("failed to parse config from: {}", args.config))?; ingester::run(config).await } + ArgsAction::Monitor => { + let config = config_load::(&args.config) + .await + .with_context(|| format!("failed to parse config from: {}", args.config))?; + + monitor::run(config).await + } } } diff --git a/grpc-ingest/src/monitor.rs b/grpc-ingest/src/monitor.rs new file mode 100644 index 000000000..88e0bf308 --- /dev/null +++ b/grpc-ingest/src/monitor.rs @@ -0,0 +1,5 @@ +use crate::config::ConfigMonitor; + +pub async fn run(config: ConfigMonitor) -> anyhow::Result<()> { + Ok(()) +} diff --git a/ops/Cargo.toml b/ops/Cargo.toml index 1f37301b5..d0a3077ea 100644 --- a/ops/Cargo.toml +++ b/ops/Cargo.toml @@ -16,7 +16,7 @@ bs58 = { workspace = true } cadence = { workspace = true } cadence-macros = { workspace = true } clap = { workspace = true, features = ["derive", "cargo", "env"] } -das-backfill = { workspace = true } +das-bubblegum = { workspace = true } das-core = { workspace = true } digital_asset_types = { workspace = true } env_logger = { workspace = true } diff --git a/ops/src/bubblegum/backfiller.rs b/ops/src/bubblegum/backfiller.rs index a53996e64..c13315aaa 100644 --- a/ops/src/bubblegum/backfiller.rs +++ b/ops/src/bubblegum/backfiller.rs @@ -1,13 +1,13 @@ use anyhow::Result; use clap::Parser; -use das_backfill::{start_bubblegum_backfill, BubblegumBackfillArgs, BubblegumBackfillContext}; +use das_bubblegum::{start_backfill, BackfillArgs, BubblegumContext}; use das_core::{connect_db, PoolArgs, Rpc, SolanaRpcArgs}; #[derive(Debug, Parser, Clone)] pub struct Args { /// Backfill Bubblegum Args #[clap(flatten)] - pub backfill_bubblegum: BubblegumBackfillArgs, + pub backfill_bubblegum: BackfillArgs, /// Database configuration #[clap(flatten)] @@ -47,7 +47,7 @@ pub async fn run(config: Args) -> Result<()> { let database_pool = connect_db(&config.database).await?; let solana_rpc = Rpc::from_config(&config.solana); - let context = BubblegumBackfillContext::new(database_pool, solana_rpc); + let context = BubblegumContext::new(database_pool, solana_rpc); - start_bubblegum_backfill(context, config.backfill_bubblegum).await + start_backfill(context, config.backfill_bubblegum).await } diff --git a/ops/src/bubblegum/cmd.rs b/ops/src/bubblegum/cmd.rs index 517c80835..dd19c3246 100644 --- a/ops/src/bubblegum/cmd.rs +++ b/ops/src/bubblegum/cmd.rs @@ -1,4 +1,4 @@ -use super::{backfiller, replay}; +use super::{backfiller, replay, verify}; use anyhow::Result; use clap::{Args, Subcommand}; @@ -10,6 +10,8 @@ pub enum Commands { Backfill(backfiller::Args), #[clap(name = "replay")] Replay(replay::Args), + /// The 'verify' command is used to verify the integrity of the bubblegum index. + Verify(verify::Args), } #[derive(Debug, Clone, Args)] @@ -26,6 +28,9 @@ pub async fn subcommand(subcommand: BubblegumCommand) -> Result<()> { Commands::Replay(args) => { replay::run(args).await?; } + Commands::Verify(args) => { + verify::run(args).await?; + } } Ok(()) diff --git a/ops/src/bubblegum/mod.rs b/ops/src/bubblegum/mod.rs index 6fd6d0efb..0f683a332 100644 --- a/ops/src/bubblegum/mod.rs +++ b/ops/src/bubblegum/mod.rs @@ -1,5 +1,6 @@ mod backfiller; mod cmd; mod replay; +mod verify; pub use cmd::*; diff --git a/ops/src/bubblegum/replay.rs b/ops/src/bubblegum/replay.rs index 23647cbbe..c777b913c 100644 --- a/ops/src/bubblegum/replay.rs +++ b/ops/src/bubblegum/replay.rs @@ -1,6 +1,6 @@ use anyhow::Result; use clap::Parser; -use das_backfill::{start_bubblegum_replay, BubblegumBackfillContext, BubblegumReplayArgs}; +use das_bubblegum::{start_bubblegum_replay, BubblegumContext, BubblegumReplayArgs}; use das_core::{connect_db, PoolArgs, Rpc, SolanaRpcArgs}; #[derive(Debug, Parser, Clone)] @@ -21,7 +21,7 @@ pub async fn run(config: Args) -> Result<()> { let database_pool = connect_db(&config.database).await?; let solana_rpc = Rpc::from_config(&config.solana); - let context = BubblegumBackfillContext::new(database_pool, solana_rpc); + let context = BubblegumContext::new(database_pool, solana_rpc); start_bubblegum_replay(context, config.replay_bubblegum).await } diff --git a/ops/src/bubblegum/verify.rs b/ops/src/bubblegum/verify.rs new file mode 100644 index 000000000..aa9a362b4 --- /dev/null +++ b/ops/src/bubblegum/verify.rs @@ -0,0 +1,30 @@ +use anyhow::Result; +use clap::Parser; +use das_bubblegum::{verify_bubblegum, BubblegumContext, VerifyArgs}; +use das_core::{connect_db, PoolArgs, Rpc, SolanaRpcArgs}; + +#[derive(Debug, Parser, Clone)] +pub struct Args { + /// Verify Bubblegum Args + #[clap(flatten)] + pub verify_bubblegum: VerifyArgs, + + /// Database configuration + #[clap(flatten)] + pub database: PoolArgs, + + /// Solana configuration + #[clap(flatten)] + pub solana: SolanaRpcArgs, +} + +pub async fn run(config: Args) -> Result<()> { + let database_pool = connect_db(&config.database).await?; + + let solana_rpc = Rpc::from_config(&config.solana); + let context = BubblegumContext::new(database_pool, solana_rpc); + + verify_bubblegum(context, config.verify_bubblegum).await?; + + Ok(()) +} From 5127a2b52efd0599f1aeed8437acc435f74f4fea Mon Sep 17 00:00:00 2001 From: Kyle Espinola Date: Tue, 29 Oct 2024 08:04:55 +0100 Subject: [PATCH 2/2] feat: grpc ingest monitor --- Cargo.lock | 2 + bubblegum/src/lib.rs | 41 ++++++++--------- bubblegum/src/verify.rs | 83 ++++++++++++++++++---------------- grpc-ingest/Cargo.toml | 1 + grpc-ingest/config-monitor.yml | 8 ++++ grpc-ingest/src/config.rs | 45 ++++++++++++++---- grpc-ingest/src/monitor.rs | 44 +++++++++++++++++- grpc-ingest/src/postgres.rs | 4 +- grpc-ingest/src/prom.rs | 53 ++++++++++++++++++++++ ops/Cargo.toml | 3 +- ops/src/bubblegum/verify.rs | 14 +++++- 11 files changed, 224 insertions(+), 74 deletions(-) create mode 100644 grpc-ingest/config-monitor.yml diff --git a/Cargo.lock b/Cargo.lock index d47aef9b4..d3563d3b0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1892,6 +1892,7 @@ dependencies = [ "cargo-lock", "chrono", "clap 4.4.8", + "das-bubblegum", "das-core", "digital_asset_types", "futures", @@ -1960,6 +1961,7 @@ dependencies = [ "sqlx", "thiserror", "tokio", + "tracing", ] [[package]] diff --git a/bubblegum/src/lib.rs b/bubblegum/src/lib.rs index 09aa52647..f1dc031c6 100644 --- a/bubblegum/src/lib.rs +++ b/bubblegum/src/lib.rs @@ -5,6 +5,7 @@ mod tree; use das_core::{MetadataJsonDownloadWorkerArgs, Rpc}; pub use error::ErrorKind; mod verify; +pub use verify::ProofReport; use anyhow::Result; use backfill::worker::{ProgramTransformerWorkerArgs, SignatureWorkerArgs, TreeWorkerArgs}; @@ -18,7 +19,7 @@ use sea_orm::{EntityTrait, QueryFilter}; use solana_sdk::pubkey::Pubkey; use solana_sdk::signature::Signature; use std::str::FromStr; -use tracing::{error, info}; +use tracing::error; use tree::TreeResponse; #[derive(Clone)] @@ -50,16 +51,6 @@ pub struct BackfillArgs { pub tree_worker: TreeWorkerArgs, } -#[derive(Debug, Parser, Clone)] -pub struct VerifyArgs { - /// The list of trees to verify. If not specified, all trees will be crawled. - #[arg(long, env, use_value_delimiter = true)] - pub only_trees: Option>, - - #[arg(long, env, default_value = "20")] - pub max_concurrency: usize, -} - pub async fn start_backfill(context: BubblegumContext, args: BackfillArgs) -> Result<()> { let trees = if let Some(ref only_trees) = args.only_trees { TreeResponse::find(&context.solana_rpc, only_trees.clone()).await? @@ -161,25 +152,33 @@ pub async fn start_bubblegum_replay( Ok(()) } -pub async fn verify_bubblegum(context: BubblegumContext, args: VerifyArgs) -> Result<()> { +#[derive(Debug, Parser, Clone)] +pub struct VerifyArgs { + /// The list of trees to verify. If not specified, all trees will be crawled. + #[arg(long, env, use_value_delimiter = true)] + pub only_trees: Option>, + + #[arg(long, env, default_value = "20")] + pub max_concurrency: usize, +} + +pub async fn verify_bubblegum( + context: BubblegumContext, + args: VerifyArgs, +) -> Result> { let trees = if let Some(ref only_trees) = args.only_trees { TreeResponse::find(&context.solana_rpc, only_trees.clone()).await? } else { TreeResponse::all(&context.solana_rpc).await? }; + let mut reports = Vec::new(); + for tree in trees { let report = verify::check(context.clone(), tree, args.max_concurrency).await?; - info!( - "Tree: {}, Total Leaves: {}, Incorrect Proofs: {}, Not Found Proofs: {}, Correct Proofs: {}", - report.tree_pubkey, - report.total_leaves, - report.incorrect_proofs, - report.not_found_proofs, - report.correct_proofs - ); + reports.push(report); } - Ok(()) + Ok(reports) } diff --git a/bubblegum/src/verify.rs b/bubblegum/src/verify.rs index e83297ee9..fae6051d6 100644 --- a/bubblegum/src/verify.rs +++ b/bubblegum/src/verify.rs @@ -2,21 +2,15 @@ use super::BubblegumContext; use crate::error::ErrorKind; use crate::tree::TreeResponse; use anyhow::{anyhow, Result}; -use borsh::BorshDeserialize; use digital_asset_types::dapi::get_proof_for_asset; use digital_asset_types::rpc::AssetProof; use futures::stream::{FuturesUnordered, StreamExt}; use mpl_bubblegum::accounts::TreeConfig; use sea_orm::SqlxPostgresConnector; use sha3::{Digest, Keccak256}; -use solana_sdk::{pubkey::Pubkey, syscalls::MAX_CPI_INSTRUCTION_ACCOUNTS}; -use spl_account_compression::{ - canopy::fill_in_proof_from_canopy, - concurrent_tree_wrapper::ProveLeafArgs, - state::{ - merkle_tree_get_size, ConcurrentMerkleTreeHeader, CONCURRENT_MERKLE_TREE_HEADER_SIZE_V1, - }, -}; +use solana_sdk::pubkey::Pubkey; +use spl_account_compression::concurrent_tree_wrapper::ProveLeafArgs; +use std::fmt; use std::sync::Arc; use tokio::sync::Mutex; use tracing::error; @@ -65,7 +59,7 @@ fn hash(left: &[u8], right: &[u8]) -> [u8; 32] { hash } -fn verify_merkle_proof(root: [u8; 32], proof: &ProveLeafArgs) -> bool { +fn verify_merkle_proof(proof: &ProveLeafArgs) -> bool { let mut node = proof.leaf; for (i, sibling) in proof.proof_vec.iter().enumerate() { if (proof.index >> i) & 1 == 0 { @@ -74,22 +68,43 @@ fn verify_merkle_proof(root: [u8; 32], proof: &ProveLeafArgs) -> bool { node = hash(sibling, &node); } } - node == root + node == proof.current_root } -#[derive(Debug)] +fn leaf_proof_result(proof: AssetProof) -> Result { + match ProveLeafArgs::try_from_asset_proof(proof) { + Ok(proof) if verify_merkle_proof(&proof) => Ok(ProofResult::Correct), + Ok(_) => Ok(ProofResult::Incorrect), + Err(_) => Ok(ProofResult::Corrupt), + } +} + +#[derive(Debug, Default)] pub struct ProofReport { pub tree_pubkey: Pubkey, pub total_leaves: usize, pub incorrect_proofs: usize, pub not_found_proofs: usize, pub correct_proofs: usize, + pub corrupt_proofs: usize, } enum ProofResult { Correct, Incorrect, NotFound, + Corrupt, +} + +impl fmt::Display for ProofResult { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ProofResult::Correct => write!(f, "Correct proof found"), + ProofResult::Incorrect => write!(f, "Incorrect proof found"), + ProofResult::NotFound => write!(f, "Proof not found"), + ProofResult::Corrupt => write!(f, "Corrupt proof found"), + } + } } pub async fn check( @@ -111,9 +126,7 @@ pub async fn check( let report = Arc::new(Mutex::new(ProofReport { tree_pubkey: tree.pubkey, total_leaves: tree_config.num_minted as usize, - incorrect_proofs: 0, - not_found_proofs: 0, - correct_proofs: 0, + ..ProofReport::default() })); let mut tasks = FuturesUnordered::new(); @@ -124,7 +137,7 @@ pub async fn check( } let db = SqlxPostgresConnector::from_sqlx_postgres_pool(pool.clone()); - let tree_pubkey = tree.pubkey.clone(); + let tree_pubkey = tree.pubkey; let report = Arc::clone(&report); tasks.push(tokio::spawn(async move { @@ -132,33 +145,23 @@ pub async fn check( &[b"asset", &tree_pubkey.to_bytes(), &i.to_le_bytes()], &mpl_bubblegum::ID, ); - let result: Result = - match get_proof_for_asset(&db, asset.to_bytes().to_vec()).await { - Ok(proof) => match ProveLeafArgs::try_from_asset_proof(proof) { - Ok(prove_leaf_args) => { - if verify_merkle_proof(prove_leaf_args.current_root, &prove_leaf_args) { - Ok(ProofResult::Correct) - } else { - Ok(ProofResult::Incorrect) - } - } - Err(_) => Ok(ProofResult::Incorrect), - }, - Err(_) => Ok(ProofResult::NotFound), - }; - - if let Ok(proof_result) = result { + let proof_lookup: Result = + get_proof_for_asset(&db, asset.to_bytes().to_vec()) + .await + .map_or_else(|_| Ok(ProofResult::NotFound), leaf_proof_result); + + if let Ok(proof_result) = proof_lookup { let mut report = report.lock().await; match proof_result { ProofResult::Correct => report.correct_proofs += 1, - ProofResult::Incorrect => { - report.incorrect_proofs += 1; - error!(tree = %tree_pubkey, leaf_index = i, asset = %asset, "Incorrect proof found"); - } - ProofResult::NotFound => { - report.not_found_proofs += 1; - error!(tree = %tree_pubkey, leaf_index = i, asset = %asset, "Proof not found"); - } + ProofResult::Incorrect => report.incorrect_proofs += 1, + ProofResult::NotFound => report.not_found_proofs += 1, + ProofResult::Corrupt => report.corrupt_proofs += 1, + } + if let ProofResult::Incorrect | ProofResult::NotFound | ProofResult::Corrupt = + proof_result + { + error!(tree = %tree_pubkey, leaf_index = i, asset = %asset, "{}", proof_result); } } })); diff --git a/grpc-ingest/Cargo.toml b/grpc-ingest/Cargo.toml index b4237fdc2..4036be057 100644 --- a/grpc-ingest/Cargo.toml +++ b/grpc-ingest/Cargo.toml @@ -9,6 +9,7 @@ publish = { workspace = true } anyhow = { workspace = true } async-stream = { workspace = true } atty = { workspace = true } +das-bubblegum = { workspace = true } sqlx = { workspace = true, features = [ "macros", "runtime-tokio-rustls", diff --git a/grpc-ingest/config-monitor.yml b/grpc-ingest/config-monitor.yml new file mode 100644 index 000000000..8b3278864 --- /dev/null +++ b/grpc-ingest/config-monitor.yml @@ -0,0 +1,8 @@ +prometheus: 127.0.0.1:8876 +rpc: http://127.0.0.1:8899 +postgres: + url: postgres://solana:solana@localhost/solana + min_connections: 10 + max_connections: 50 +bubblegum: + only_trees: null diff --git a/grpc-ingest/src/config.rs b/grpc-ingest/src/config.rs index ecad0d46b..cab28ad6c 100644 --- a/grpc-ingest/src/config.rs +++ b/grpc-ingest/src/config.rs @@ -214,7 +214,7 @@ where #[derive(Debug, Clone, Deserialize)] pub struct ConfigIngester { pub redis: String, - pub postgres: ConfigIngesterPostgres, + pub postgres: ConfigPostgres, pub download_metadata: ConfigIngesterDownloadMetadata, pub snapshots: ConfigIngestStream, pub accounts: ConfigIngestStream, @@ -231,31 +231,31 @@ pub enum ConfigIngesterRedisStreamType { } #[derive(Debug, Clone, Deserialize)] -pub struct ConfigIngesterPostgres { +pub struct ConfigPostgres { pub url: String, #[serde( - default = "ConfigIngesterPostgres::default_min_connections", + default = "ConfigPostgres::default_min_connections", deserialize_with = "deserialize_usize_str" )] pub min_connections: usize, #[serde( - default = "ConfigIngesterPostgres::default_max_connections", + default = "ConfigPostgres::default_max_connections", deserialize_with = "deserialize_usize_str" )] pub max_connections: usize, #[serde( - default = "ConfigIngesterPostgres::default_idle_timeout", + default = "ConfigPostgres::default_idle_timeout", deserialize_with = "deserialize_duration_str" )] pub idle_timeout: Duration, #[serde( - default = "ConfigIngesterPostgres::default_max_lifetime", + default = "ConfigPostgres::default_max_lifetime", deserialize_with = "deserialize_duration_str" )] pub max_lifetime: Duration, } -impl ConfigIngesterPostgres { +impl ConfigPostgres { pub const fn default_min_connections() -> usize { 10 } @@ -337,4 +337,33 @@ impl ConfigIngesterDownloadMetadata { } #[derive(Debug, Clone, Deserialize)] -pub struct ConfigMonitor {} +pub struct ConfigMonitor { + pub postgres: ConfigPostgres, + pub rpc: String, + pub bubblegum: ConfigBubblegumVerify, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct ConfigBubblegumVerify { + #[serde( + default = "ConfigBubblegumVerify::default_report_interval", + deserialize_with = "deserialize_duration_str" + )] + pub report_interval: Duration, + #[serde(default)] + pub only_trees: Option>, + #[serde( + default = "ConfigBubblegumVerify::default_max_concurrency", + deserialize_with = "deserialize_usize_str" + )] + pub max_concurrency: usize, +} + +impl ConfigBubblegumVerify { + pub const fn default_report_interval() -> Duration { + Duration::from_millis(5 * 60 * 1000) + } + pub const fn default_max_concurrency() -> usize { + 20 + } +} diff --git a/grpc-ingest/src/monitor.rs b/grpc-ingest/src/monitor.rs index 88e0bf308..ac0c687ec 100644 --- a/grpc-ingest/src/monitor.rs +++ b/grpc-ingest/src/monitor.rs @@ -1,5 +1,47 @@ -use crate::config::ConfigMonitor; +use crate::postgres::create_pool; +use crate::util::create_shutdown; +use crate::{config::ConfigMonitor, prom::update_tree_proof_report}; +use das_bubblegum::{verify_bubblegum, BubblegumContext, VerifyArgs}; +use das_core::{Rpc, SolanaRpcArgs}; +use futures::stream::StreamExt; +use tracing::error; pub async fn run(config: ConfigMonitor) -> anyhow::Result<()> { + let mut shutdown = create_shutdown()?; + let database_pool = create_pool(config.postgres).await?; + let rpc = Rpc::from_config(&SolanaRpcArgs { + solana_rpc_url: config.rpc, + }); + + let bubblegum_verify = tokio::spawn(async move { + loop { + let bubblegum_context = BubblegumContext::new(database_pool.clone(), rpc.clone()); + let verify_args = VerifyArgs { + only_trees: config.bubblegum.only_trees.clone(), + max_concurrency: config.bubblegum.max_concurrency, + }; + + match verify_bubblegum(bubblegum_context, verify_args).await { + Ok(reports) => { + for report in reports { + update_tree_proof_report(&report); + } + } + Err(e) => { + error!( + message = "Error verifying bubblegum", + error = ?e + ); + } + } + + tokio::time::sleep(tokio::time::Duration::from_millis(150)).await; + } + }); + + if let Some(_signal) = shutdown.next().await {} + + bubblegum_verify.abort(); + Ok(()) } diff --git a/grpc-ingest/src/postgres.rs b/grpc-ingest/src/postgres.rs index 1077bc49b..d38c5250c 100644 --- a/grpc-ingest/src/postgres.rs +++ b/grpc-ingest/src/postgres.rs @@ -1,6 +1,6 @@ use { crate::{ - config::ConfigIngesterPostgres, + config::ConfigPostgres, prom::{pgpool_connections_set, PgpoolConnectionsKind}, }, sqlx::{ @@ -9,7 +9,7 @@ use { }, }; -pub async fn create_pool(config: ConfigIngesterPostgres) -> anyhow::Result { +pub async fn create_pool(config: ConfigPostgres) -> anyhow::Result { let options: PgConnectOptions = config.url.parse()?; PgPoolOptions::new() .min_connections(config.min_connections.try_into()?) diff --git a/grpc-ingest/src/prom.rs b/grpc-ingest/src/prom.rs index e70e14671..a82022796 100644 --- a/grpc-ingest/src/prom.rs +++ b/grpc-ingest/src/prom.rs @@ -1,5 +1,6 @@ use { crate::{redis::RedisStreamMessageError, version::VERSION as VERSION_INFO}, + das_bubblegum::ProofReport, das_core::MetadataJsonTaskError, hyper::{ server::conn::AddrStream, @@ -72,6 +73,31 @@ lazy_static::lazy_static! { Opts::new("grpc_tasks", "Number of tasks spawned for writing grpc messages to redis "), &[] ).unwrap(); + + static ref BUBBLEGUM_TREE_TOTAL_LEAVES: IntGaugeVec = IntGaugeVec::new( + Opts::new("bubblegum_tree_total_leaves", "Total number of leaves in the bubblegum tree"), + &["tree"] + ).unwrap(); + + static ref BUBBLEGUM_TREE_INCORRECT_PROOFS: IntGaugeVec = IntGaugeVec::new( + Opts::new("bubblegum_tree_incorrect_proofs", "Number of incorrect proofs in the bubblegum tree"), + &["tree"] + ).unwrap(); + + static ref BUBBLEGUM_TREE_NOT_FOUND_PROOFS: IntGaugeVec = IntGaugeVec::new( + Opts::new("bubblegum_tree_not_found_proofs", "Number of not found proofs in the bubblegum tree"), + &["tree"] + ).unwrap(); + + static ref BUBBLEGUM_TREE_CORRECT_PROOFS: IntGaugeVec = IntGaugeVec::new( + Opts::new("bubblegum_tree_correct_proofs", "Number of correct proofs in the bubblegum tree"), + &["tree"] + ).unwrap(); + + static ref BUBBLEGUM_TREE_CORRUPT_PROOFS: IntGaugeVec = IntGaugeVec::new( + Opts::new("bubblegum_tree_corrupt_proofs", "Number of corrupt proofs in the bubblegum tree"), + &["tree"] + ).unwrap(); } pub fn run_server(address: SocketAddr) -> anyhow::Result<()> { @@ -96,6 +122,11 @@ pub fn run_server(address: SocketAddr) -> anyhow::Result<()> { register!(INGEST_TASKS); register!(ACK_TASKS); register!(GRPC_TASKS); + register!(BUBBLEGUM_TREE_TOTAL_LEAVES); + register!(BUBBLEGUM_TREE_INCORRECT_PROOFS); + register!(BUBBLEGUM_TREE_NOT_FOUND_PROOFS); + register!(BUBBLEGUM_TREE_CORRECT_PROOFS); + register!(BUBBLEGUM_TREE_CORRUPT_PROOFS); VERSION_INFO_METRIC .with_label_values(&[ @@ -319,3 +350,25 @@ pub fn program_transformer_task_status_inc(kind: ProgramTransformerTaskStatusKin .with_label_values(&[kind.to_str()]) .inc() } + +pub fn update_tree_proof_report(report: &ProofReport) { + BUBBLEGUM_TREE_TOTAL_LEAVES + .with_label_values(&[&report.tree_pubkey.to_string()]) + .set(report.total_leaves as i64); + + BUBBLEGUM_TREE_INCORRECT_PROOFS + .with_label_values(&[&report.tree_pubkey.to_string()]) + .set(report.incorrect_proofs as i64); + + BUBBLEGUM_TREE_NOT_FOUND_PROOFS + .with_label_values(&[&report.tree_pubkey.to_string()]) + .set(report.not_found_proofs as i64); + + BUBBLEGUM_TREE_CORRECT_PROOFS + .with_label_values(&[&report.tree_pubkey.to_string()]) + .set(report.correct_proofs as i64); + + BUBBLEGUM_TREE_CORRUPT_PROOFS + .with_label_values(&[&report.tree_pubkey.to_string()]) + .set(report.corrupt_proofs as i64); +} diff --git a/ops/Cargo.toml b/ops/Cargo.toml index d0a3077ea..14f741870 100644 --- a/ops/Cargo.toml +++ b/ops/Cargo.toml @@ -36,5 +36,6 @@ spl-account-compression = { workspace = true } sqlx = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } +tracing = { workspace = true } mpl-token-metadata = { workspace = true } -serde_json = { workspace = true } \ No newline at end of file +serde_json = { workspace = true } diff --git a/ops/src/bubblegum/verify.rs b/ops/src/bubblegum/verify.rs index aa9a362b4..947d2b3ad 100644 --- a/ops/src/bubblegum/verify.rs +++ b/ops/src/bubblegum/verify.rs @@ -2,6 +2,7 @@ use anyhow::Result; use clap::Parser; use das_bubblegum::{verify_bubblegum, BubblegumContext, VerifyArgs}; use das_core::{connect_db, PoolArgs, Rpc, SolanaRpcArgs}; +use tracing::info; #[derive(Debug, Parser, Clone)] pub struct Args { @@ -24,7 +25,18 @@ pub async fn run(config: Args) -> Result<()> { let solana_rpc = Rpc::from_config(&config.solana); let context = BubblegumContext::new(database_pool, solana_rpc); - verify_bubblegum(context, config.verify_bubblegum).await?; + let reports = verify_bubblegum(context, config.verify_bubblegum).await?; + + for report in reports { + info!( + "Tree: {}, Total Leaves: {}, Incorrect Proofs: {}, Not Found Proofs: {}, Correct Proofs: {}", + report.tree_pubkey, + report.total_leaves, + report.incorrect_proofs, + report.not_found_proofs, + report.correct_proofs + ); + } Ok(()) }