diff --git a/.github/workflows/go.yaml b/.github/workflows/go.yaml index 3fa5a8cf0..061c6d3a8 100644 --- a/.github/workflows/go.yaml +++ b/.github/workflows/go.yaml @@ -7,7 +7,7 @@ on: push: branches: ["main"] pull_request: - branches: ["main"] + branches: ["main", "reorg-support"] # TODO: to be removed before merge to main jobs: build: diff --git a/CHANGELOG.md b/CHANGELOG.md index 1b725bce6..3eef40867 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ All notable changes to this project will be documented in this file. The format ## Table of Contents - [Unreleased](#unreleased) +- [1.3.13](#1313---2024-12-05) - [1.3.12](#1312---2024-12-05) - [1.3.2](#132---2024-10-30) - [1.3.0](#130---2024-08-21) @@ -20,6 +21,11 @@ All notable changes to this project will be documented in this file. The format ## [Unreleased] +## [1.3.13] - 2024-12-04 + +### Added +- [Reorg Support](https://bitcoin-sv.github.io/arc/#/?id=chain-reorg) - adapting ARC to handle chain reorganisations. Whenever reorg happens, ARC will update the block info for each transaction affected and rebroadcast those that are not seen in the newest longest chain. + ## [1.3.12] - 2024-12-05 ### Changed diff --git a/README.md b/README.md index 1ee820436..0075a89b9 100644 --- a/README.md +++ b/README.md @@ -185,6 +185,7 @@ The following statuses are available: | `SEEN_IN_ORPHAN_MEMPOOL` | The transaction has been sent to at least 1 Bitcoin node but parent transaction was not found. | | `SEEN_ON_NETWORK` | The transaction has been seen on the Bitcoin network and propagated to other nodes. This status is set when metamorph receives an INV message for the transaction from another node than it was sent to. | | `DOUBLE_SPEND_ATTEMPTED` | The transaction is a double spend attempt. Competing transaction(s) will be returned with this status. | +| `MINED_IN_STALE_BLOCK` | The transaction has been mined into a block that became stale after a reorganisation of chain (reorg). | | `REJECTED` | The transaction has been rejected by the Bitcoin network. | | `MINED` | The transaction has been mined into a block by a mining node. | diff --git a/cmd/arc/services/metamorph.go b/cmd/arc/services/metamorph.go index 1e7831e23..d14f2dfea 100644 --- a/cmd/arc/services/metamorph.go +++ b/cmd/arc/services/metamorph.go @@ -47,7 +47,7 @@ func StartMetamorph(logger *slog.Logger, arcConfig *config.ArcConfig, cacheStore metamorphStore store.MetamorphStore peerHandler *metamorph.PeerHandler pm metamorph.PeerManager - statusMessageCh chan *metamorph.PeerTxMessage + statusMessageCh chan *metamorph.TxStatusMessage mqClient metamorph.MessageQueueClient processor *metamorph.Processor server *metamorph.Server @@ -196,7 +196,6 @@ func StartMetamorph(logger *slog.Logger, arcConfig *config.ArcConfig, cacheStore server, err = metamorph.NewServer(arcConfig.PrometheusEndpoint, arcConfig.GrpcMessageSize, logger, metamorphStore, processor, arcConfig.Tracing, optsServer...) - if err != nil { stopFn() return nil, fmt.Errorf("create GRPCServer failed: %v", err) @@ -273,7 +272,7 @@ func NewMetamorphStore(dbConfig *config.DbConfig, tracingConfig *config.TracingC return s, err } -func initPeerManager(logger *slog.Logger, s store.MetamorphStore, arcConfig *config.ArcConfig) (p2p.PeerManagerI, *metamorph.PeerHandler, chan *metamorph.PeerTxMessage, error) { +func initPeerManager(logger *slog.Logger, s store.MetamorphStore, arcConfig *config.ArcConfig) (p2p.PeerManagerI, *metamorph.PeerHandler, chan *metamorph.TxStatusMessage, error) { network, err := config.GetNetwork(arcConfig.Network) if err != nil { return nil, nil, nil, fmt.Errorf("failed to get network: %v", err) @@ -281,7 +280,7 @@ func initPeerManager(logger *slog.Logger, s store.MetamorphStore, arcConfig *con logger.Info("Assuming bitcoin network", "network", network) - messageCh := make(chan *metamorph.PeerTxMessage, 10000) + messageCh := make(chan *metamorph.TxStatusMessage, 10000) var pmOpts []p2p.PeerManagerOptions if arcConfig.Metamorph.MonitorPeers { pmOpts = append(pmOpts, p2p.WithRestartUnhealthyPeers()) diff --git a/doc/README.md b/doc/README.md index 002cb94dc..125b1541e 100644 --- a/doc/README.md +++ b/doc/README.md @@ -41,6 +41,7 @@ stateDiagram-v2 state SEEN_ON_NETWORK state REJECTED state MINED + state MINED_IN_STALE_BLOCK [*] --> UNKNOWN UNKNOWN --> ERROR: Transaction validation failed @@ -61,6 +62,7 @@ stateDiagram-v2 SEEN_ON_NETWORK --> DOUBLE_SPEND_ATTEMPTED: A competing transactions entered the mempool DOUBLE_SPEND_ATTEMPTED --> MINED: This transaction was accepted and mined DOUBLE_SPEND_ATTEMPTED --> REJECTED: This transaction was rejected in favor\n of one of the competing transactions + MINED --> MINED_IN_STALE_BLOCK: This transaction was mined in a block that became stale after reorg MINED --> [*] ``` @@ -113,7 +115,7 @@ The Callbacker handles request retries and treats any HTTP status code outside t ### BlockTx -BlockTx is a microservice that is responsible for processing blocks mined on the Bitcoin network, and for propagating +BlockTx is a microservice that is responsible for processing blocks mined on the Bitcoin network, handling chain reorganisations ([reorgs](#chain-reorg)) and for propagating the status of transactions to each Metamorph that has subscribed to this service. The main purpose of BlockTx is to de-duplicate processing of (large) blocks. As an incoming block is processed by BlockTx, Metamorph is notified about mined transactions by means of a message queue. BlockTx does not store the transaction data, but instead stores only the transaction IDs and the block height in which they were mined. Metamorph is responsible for storing the transaction data. @@ -514,15 +516,32 @@ Expected outcome * ARC responds with status `ANNOUNCED_TO_NETWORK` or `SEEN_ON_NETWORK` * At latest a couple of minutes later the status will switch to `SEEN_ON_NETWORK` -### Block reorg +### Chain reorg -A block reorg happens and two different blocks get announced at the same block height +A chain reorganization (chain reorg) occurs when the blockchain switches its primary chain of blocks to a different branch, typically because a previously stale chain (fork) has accumulated more chainwork than the current longest chain. Chainwork, which represents the cumulative computational effort used to build a chain, is the deciding factor in determining the longest and most valid chain. This process can impact transaction statuses based on their presence in the affected chains. -Expected outcome: -* ARC does not update the transaction statuses according to the block in the longest chain. -* Information and Merkle path of the block received first will be persisted in the transaction record and not overwritten +#### Transaction in both chains +This is the most common scenario. If a transaction is included in both the original longest chain and the stale chain (which becomes the longest), the transition to the new longest chain does not disrupt the transaction's confirmation status. + +A new `MINED` callback will be send for that transaction with updated block data (block hash, block height, Merkle path). + +#### Transaction in the previously longest chain, but not in the stale chain that becomes longest +When a transaction exists in the original longest chain but not in the stale chain that subsequently becomes the longest, the transaction is effectively removed from the confirmed state of the blockchain. + +A callback with status `MINED_IN_STALE_BLOCK` will be sent for that transaction and ARC will rebroadcast that transaction. Because of that process, the transaction may cycle through statuses, such as `SEEN_ON_NETWORK`, again. User should keep in mind that the status `MINED_IN_STALE_BLOCK` **is not final**. + +#### Transaction in the stale chain only (without reorg) +When a transaction is present only in the stale chain and not in the original longest chain, it remains unconfirmed, in the `MINED_IN_STALE_BLOCK` state, until the stale chain becomes the longest or the transaction is found in the longest chain. + +A callback with status `MINED_IN_STALE_BLOCK` will be sent for that transaction and ARC will rebroadcast that transaction. Because of that process, the transaction may cycle through statuses, such as `SEEN_ON_NETWORK`, again. User should keep in mind that the status `MINED_IN_STALE_BLOCK` **is not final**. + +#### Summary table +| Transaction Scenario | Callback status after reorg | Extra info / Action | +|--------------------------------------|-----------------------------|--------------------------------------------------------------------------------------------------------| +| In both chains | `MINED` | Block data (hash, height, Merkle path) will be updated in the callback | +| In longest chain, not in stale chain | `MINED_IN_STALE_BLOCK` | Transaction will be rebroadcasted and cycle through statuses again until is found in the longest chain | +| In stale chain only (no reorg) | `MINED_IN_STALE_BLOCK` | Transaction will be rebroadcasted and cycle through statuses again until is found in the longest chain | -The planned feature [Update of transactions in case of block reorgs](https://github.com/bitcoin-sv/arc/blob/main/ROADMAP.md#update-of-transactions-in-case-of-block-reorgs) will ensure that ARC updates the statuses of transactions. Transactions which are not in the block of the longest chain will be updated to `REJECTED` status and transactions which are included in the block of the longest chain are updated to `MINED` status. ## Cumulative fees validation diff --git a/internal/blocktx/blockchain_communication/block_message.go b/internal/blocktx/blockchain_communication/block_message.go index 70902f4b8..1df43f095 100644 --- a/internal/blocktx/blockchain_communication/block_message.go +++ b/internal/blocktx/blockchain_communication/block_message.go @@ -9,6 +9,7 @@ import ( // BlockMessage only stores the transaction IDs of the block, not the full transactions type BlockMessage struct { + Hash *chainhash.Hash Header *wire.BlockHeader Height uint64 TransactionHashes []*chainhash.Hash diff --git a/internal/blocktx/blockchain_communication/p2p/init.go b/internal/blocktx/blockchain_communication/p2p/init.go index f7741b0fd..11c7f8490 100644 --- a/internal/blocktx/blockchain_communication/p2p/init.go +++ b/internal/blocktx/blockchain_communication/p2p/init.go @@ -60,6 +60,8 @@ func init() { } blockMessage.Size = uint64(bytesRead) + blockHash := blockMessage.Header.BlockHash() + blockMessage.Hash = &blockHash return bytesRead, blockMessage, nil, nil }) diff --git a/internal/blocktx/blocktx_api/blocktx_api.pb.go b/internal/blocktx/blocktx_api/blocktx_api.pb.go index adf251b4b..553aef540 100644 --- a/internal/blocktx/blocktx_api/blocktx_api.pb.go +++ b/internal/blocktx/blocktx_api/blocktx_api.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.34.2 -// protoc v5.28.2 +// protoc v5.28.3 // source: internal/blocktx/blocktx_api/blocktx_api.proto package blocktx_api @@ -150,10 +150,9 @@ type Block struct { PreviousHash []byte `protobuf:"bytes,2,opt,name=previous_hash,json=previousHash,proto3" json:"previous_hash,omitempty"` // Little endian MerkleRoot []byte `protobuf:"bytes,3,opt,name=merkle_root,json=merkleRoot,proto3" json:"merkle_root,omitempty"` // Little endian Height uint64 `protobuf:"varint,4,opt,name=height,proto3" json:"height,omitempty"` - Orphaned bool `protobuf:"varint,5,opt,name=orphaned,proto3" json:"orphaned,omitempty"` - Processed bool `protobuf:"varint,6,opt,name=processed,proto3" json:"processed,omitempty"` - Status Status `protobuf:"varint,7,opt,name=status,proto3,enum=blocktx_api.Status" json:"status,omitempty"` - Chainwork string `protobuf:"bytes,8,opt,name=chainwork,proto3" json:"chainwork,omitempty"` + Processed bool `protobuf:"varint,5,opt,name=processed,proto3" json:"processed,omitempty"` + Status Status `protobuf:"varint,6,opt,name=status,proto3,enum=blocktx_api.Status" json:"status,omitempty"` + Chainwork string `protobuf:"bytes,7,opt,name=chainwork,proto3" json:"chainwork,omitempty"` } func (x *Block) Reset() { @@ -216,13 +215,6 @@ func (x *Block) GetHeight() uint64 { return 0 } -func (x *Block) GetOrphaned() bool { - if x != nil { - return x.Orphaned - } - return false -} - func (x *Block) GetProcessed() bool { if x != nil { return x.Processed @@ -301,6 +293,7 @@ type TransactionBlock struct { BlockHeight uint64 `protobuf:"varint,2,opt,name=block_height,json=blockHeight,proto3" json:"block_height,omitempty"` TransactionHash []byte `protobuf:"bytes,3,opt,name=transaction_hash,json=transactionHash,proto3" json:"transaction_hash,omitempty"` // Little endian MerklePath string `protobuf:"bytes,4,opt,name=merklePath,proto3" json:"merklePath,omitempty"` + BlockStatus Status `protobuf:"varint,5,opt,name=block_status,json=blockStatus,proto3,enum=blocktx_api.Status" json:"block_status,omitempty"` } func (x *TransactionBlock) Reset() { @@ -363,6 +356,13 @@ func (x *TransactionBlock) GetMerklePath() string { return "" } +func (x *TransactionBlock) GetBlockStatus() Status { + if x != nil { + return x.BlockStatus + } + return Status_UNKNOWN +} + type TransactionBlocks struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -776,118 +776,119 @@ var file_internal_blocktx_blocktx_api_blocktx_api_proto_rawDesc = []byte{ 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, - 0x70, 0x22, 0xfe, 0x01, 0x0a, 0x05, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x12, 0x0a, 0x04, 0x68, + 0x70, 0x22, 0xe2, 0x01, 0x0a, 0x05, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x48, 0x61, 0x73, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x1a, 0x0a, - 0x08, 0x6f, 0x72, 0x70, 0x68, 0x61, 0x6e, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, - 0x08, 0x6f, 0x72, 0x70, 0x68, 0x61, 0x6e, 0x65, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, 0x6f, - 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x70, 0x72, - 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x12, 0x2b, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, - 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x77, 0x6f, 0x72, - 0x6b, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x77, 0x6f, - 0x72, 0x6b, 0x22, 0x4c, 0x0a, 0x0c, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x12, 0x3c, 0x0a, 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, - 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x52, 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x22, 0x9f, 0x01, 0x0a, 0x10, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, - 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x62, 0x6c, 0x6f, 0x63, 0x6b, - 0x48, 0x61, 0x73, 0x68, 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, - 0x69, 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, - 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x74, 0x72, 0x61, 0x6e, 0x73, - 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x0f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x61, - 0x73, 0x68, 0x12, 0x1e, 0x0a, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x50, 0x61, - 0x74, 0x68, 0x22, 0x61, 0x0a, 0x11, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x72, 0x61, 0x6e, 0x73, - 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, - 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x52, 0x11, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x22, 0x39, 0x0a, 0x0b, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x22, 0x31, 0x0a, 0x09, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, - 0x0d, 0x72, 0x65, 0x74, 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x79, 0x73, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x72, 0x65, 0x74, 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x44, - 0x61, 0x79, 0x73, 0x22, 0x2a, 0x0a, 0x14, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, - 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x72, - 0x6f, 0x77, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x72, 0x6f, 0x77, 0x73, 0x22, - 0x48, 0x0a, 0x23, 0x44, 0x65, 0x6c, 0x55, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, - 0x73, 0x65, 0x64, 0x5f, 0x62, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x72, - 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x42, 0x79, 0x22, 0x63, 0x0a, 0x1d, 0x4d, 0x65, 0x72, - 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, - 0x72, 0x6b, 0x6c, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x62, - 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x04, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0x6f, - 0x0a, 0x1e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x56, 0x65, 0x72, - 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x12, 0x4d, 0x0a, 0x0c, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, - 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, - 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x52, 0x0b, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x22, - 0x5a, 0x0a, 0x1e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, - 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x38, 0x0a, 0x18, 0x75, 0x6e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, - 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x04, 0x52, 0x16, 0x75, 0x6e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x73, 0x2a, 0x3b, 0x0a, 0x06, 0x53, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, - 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x4c, 0x4f, 0x4e, 0x47, 0x45, 0x53, 0x54, 0x10, 0x0a, 0x12, - 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41, 0x4c, 0x45, 0x10, 0x14, 0x12, 0x0c, 0x0a, 0x08, 0x4f, 0x52, - 0x50, 0x48, 0x41, 0x4e, 0x45, 0x44, 0x10, 0x1e, 0x32, 0xad, 0x04, 0x0a, 0x0a, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x54, 0x78, 0x41, 0x50, 0x49, 0x12, 0x3f, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, - 0x68, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, - 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x11, 0x43, 0x6c, 0x65, 0x61, - 0x72, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x16, 0x2e, - 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, - 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, - 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4a, 0x0a, 0x0b, 0x43, 0x6c, - 0x65, 0x61, 0x72, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, + 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x1c, 0x0a, + 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x12, 0x2b, 0x0a, 0x06, 0x73, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13, 0x2e, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x68, 0x61, 0x69, + 0x6e, 0x77, 0x6f, 0x72, 0x6b, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x68, 0x61, + 0x69, 0x6e, 0x77, 0x6f, 0x72, 0x6b, 0x22, 0x4c, 0x0a, 0x0c, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3c, 0x0a, 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, + 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xd7, 0x01, 0x0a, 0x10, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, + 0x63, 0x6b, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x61, 0x73, 0x68, 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, + 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x74, + 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x48, 0x61, 0x73, 0x68, 0x12, 0x1e, 0x0a, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, + 0x50, 0x61, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x72, 0x6b, + 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x36, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, + 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13, 0x2e, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x61, + 0x0a, 0x11, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x73, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x1d, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x11, + 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, + 0x73, 0x22, 0x39, 0x0a, 0x0b, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, + 0x68, 0x61, 0x73, 0x68, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0x31, 0x0a, 0x09, + 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x0d, 0x72, 0x65, 0x74, + 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x79, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, + 0x52, 0x0d, 0x72, 0x65, 0x74, 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x79, 0x73, 0x22, + 0x2a, 0x0a, 0x14, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x72, 0x6f, 0x77, 0x73, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x72, 0x6f, 0x77, 0x73, 0x22, 0x48, 0x0a, 0x23, 0x44, + 0x65, 0x6c, 0x55, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, + 0x6b, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x5f, + 0x62, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, + 0x73, 0x65, 0x64, 0x42, 0x79, 0x22, 0x63, 0x0a, 0x1d, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, + 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, + 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x72, + 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0x6f, 0x0a, 0x1e, 0x4d, 0x65, + 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0c, + 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, + 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, + 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x0b, + 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x22, 0x5a, 0x0a, 0x1e, 0x4d, + 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x38, 0x0a, + 0x18, 0x75, 0x6e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x04, 0x52, + 0x16, 0x75, 0x6e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, + 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x73, 0x2a, 0x3b, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0b, + 0x0a, 0x07, 0x4c, 0x4f, 0x4e, 0x47, 0x45, 0x53, 0x54, 0x10, 0x0a, 0x12, 0x09, 0x0a, 0x05, 0x53, + 0x54, 0x41, 0x4c, 0x45, 0x10, 0x14, 0x12, 0x0c, 0x0a, 0x08, 0x4f, 0x52, 0x50, 0x48, 0x41, 0x4e, + 0x45, 0x44, 0x10, 0x1e, 0x32, 0xad, 0x04, 0x0a, 0x0a, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x54, 0x78, + 0x41, 0x50, 0x49, 0x12, 0x3f, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x12, 0x16, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, + 0x61, 0x70, 0x69, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x11, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x54, 0x72, 0x61, + 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x58, 0x0a, 0x19, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x4d, 0x61, 0x70, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, - 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, - 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, - 0x12, 0x75, 0x0a, 0x1c, 0x44, 0x65, 0x6c, 0x55, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, - 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, - 0x12, 0x30, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x44, - 0x65, 0x6c, 0x55, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, - 0x6b, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, - 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x6f, 0x0a, 0x11, 0x56, 0x65, 0x72, 0x69, 0x66, - 0x79, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2b, 0x2e, 0x62, - 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, - 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, - 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, - 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x0f, 0x5a, 0x0d, 0x2e, 0x3b, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x33, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4a, 0x0a, 0x0b, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, + 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, + 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, + 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x22, 0x00, 0x12, 0x58, 0x0a, 0x19, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x42, 0x6c, 0x6f, 0x63, 0x6b, + 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x4d, 0x61, 0x70, 0x12, + 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, + 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, + 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, + 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x75, 0x0a, 0x1c, + 0x44, 0x65, 0x6c, 0x55, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x2e, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x44, 0x65, 0x6c, 0x55, 0x6e, + 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, + 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, + 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, + 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x00, 0x12, 0x6f, 0x0a, 0x11, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x4d, 0x65, 0x72, + 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, + 0x74, 0x73, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, + 0x61, 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, + 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x00, 0x42, 0x0f, 0x5a, 0x0d, 0x2e, 0x3b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, + 0x78, 0x5f, 0x61, 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -925,25 +926,26 @@ var file_internal_blocktx_blocktx_api_blocktx_api_proto_depIdxs = []int32{ 13, // 0: blocktx_api.HealthResponse.timestamp:type_name -> google.protobuf.Timestamp 0, // 1: blocktx_api.Block.status:type_name -> blocktx_api.Status 6, // 2: blocktx_api.Transactions.transactions:type_name -> blocktx_api.Transaction - 4, // 3: blocktx_api.TransactionBlocks.transaction_blocks:type_name -> blocktx_api.TransactionBlock - 10, // 4: blocktx_api.MerkleRootsVerificationRequest.merkle_roots:type_name -> blocktx_api.MerkleRootVerificationRequest - 14, // 5: blocktx_api.BlockTxAPI.Health:input_type -> google.protobuf.Empty - 7, // 6: blocktx_api.BlockTxAPI.ClearTransactions:input_type -> blocktx_api.ClearData - 7, // 7: blocktx_api.BlockTxAPI.ClearBlocks:input_type -> blocktx_api.ClearData - 7, // 8: blocktx_api.BlockTxAPI.ClearBlockTransactionsMap:input_type -> blocktx_api.ClearData - 9, // 9: blocktx_api.BlockTxAPI.DelUnfinishedBlockProcessing:input_type -> blocktx_api.DelUnfinishedBlockProcessingRequest - 11, // 10: blocktx_api.BlockTxAPI.VerifyMerkleRoots:input_type -> blocktx_api.MerkleRootsVerificationRequest - 1, // 11: blocktx_api.BlockTxAPI.Health:output_type -> blocktx_api.HealthResponse - 8, // 12: blocktx_api.BlockTxAPI.ClearTransactions:output_type -> blocktx_api.RowsAffectedResponse - 8, // 13: blocktx_api.BlockTxAPI.ClearBlocks:output_type -> blocktx_api.RowsAffectedResponse - 8, // 14: blocktx_api.BlockTxAPI.ClearBlockTransactionsMap:output_type -> blocktx_api.RowsAffectedResponse - 8, // 15: blocktx_api.BlockTxAPI.DelUnfinishedBlockProcessing:output_type -> blocktx_api.RowsAffectedResponse - 12, // 16: blocktx_api.BlockTxAPI.VerifyMerkleRoots:output_type -> blocktx_api.MerkleRootVerificationResponse - 11, // [11:17] is the sub-list for method output_type - 5, // [5:11] is the sub-list for method input_type - 5, // [5:5] is the sub-list for extension type_name - 5, // [5:5] is the sub-list for extension extendee - 0, // [0:5] is the sub-list for field type_name + 0, // 3: blocktx_api.TransactionBlock.block_status:type_name -> blocktx_api.Status + 4, // 4: blocktx_api.TransactionBlocks.transaction_blocks:type_name -> blocktx_api.TransactionBlock + 10, // 5: blocktx_api.MerkleRootsVerificationRequest.merkle_roots:type_name -> blocktx_api.MerkleRootVerificationRequest + 14, // 6: blocktx_api.BlockTxAPI.Health:input_type -> google.protobuf.Empty + 7, // 7: blocktx_api.BlockTxAPI.ClearTransactions:input_type -> blocktx_api.ClearData + 7, // 8: blocktx_api.BlockTxAPI.ClearBlocks:input_type -> blocktx_api.ClearData + 7, // 9: blocktx_api.BlockTxAPI.ClearBlockTransactionsMap:input_type -> blocktx_api.ClearData + 9, // 10: blocktx_api.BlockTxAPI.DelUnfinishedBlockProcessing:input_type -> blocktx_api.DelUnfinishedBlockProcessingRequest + 11, // 11: blocktx_api.BlockTxAPI.VerifyMerkleRoots:input_type -> blocktx_api.MerkleRootsVerificationRequest + 1, // 12: blocktx_api.BlockTxAPI.Health:output_type -> blocktx_api.HealthResponse + 8, // 13: blocktx_api.BlockTxAPI.ClearTransactions:output_type -> blocktx_api.RowsAffectedResponse + 8, // 14: blocktx_api.BlockTxAPI.ClearBlocks:output_type -> blocktx_api.RowsAffectedResponse + 8, // 15: blocktx_api.BlockTxAPI.ClearBlockTransactionsMap:output_type -> blocktx_api.RowsAffectedResponse + 8, // 16: blocktx_api.BlockTxAPI.DelUnfinishedBlockProcessing:output_type -> blocktx_api.RowsAffectedResponse + 12, // 17: blocktx_api.BlockTxAPI.VerifyMerkleRoots:output_type -> blocktx_api.MerkleRootVerificationResponse + 12, // [12:18] is the sub-list for method output_type + 6, // [6:12] is the sub-list for method input_type + 6, // [6:6] is the sub-list for extension type_name + 6, // [6:6] is the sub-list for extension extendee + 0, // [0:6] is the sub-list for field type_name } func init() { file_internal_blocktx_blocktx_api_blocktx_api_proto_init() } diff --git a/internal/blocktx/blocktx_api/blocktx_api.proto b/internal/blocktx/blocktx_api/blocktx_api.proto index a8ccdce96..ab7ca5364 100644 --- a/internal/blocktx/blocktx_api/blocktx_api.proto +++ b/internal/blocktx/blocktx_api/blocktx_api.proto @@ -49,10 +49,9 @@ message Block { bytes previous_hash = 2; // Little endian bytes merkle_root = 3; // Little endian uint64 height = 4; - bool orphaned = 5; - bool processed = 6; - Status status = 7; - string chainwork = 8; + bool processed = 5; + Status status = 6; + string chainwork = 7; } // swagger:model Transactions @@ -65,6 +64,7 @@ message TransactionBlock { uint64 block_height = 2; bytes transaction_hash = 3; // Little endian string merklePath = 4; + Status block_status = 5; } message TransactionBlocks { diff --git a/internal/blocktx/blocktx_api/blocktx_api_grpc.pb.go b/internal/blocktx/blocktx_api/blocktx_api_grpc.pb.go index 8efd20a68..fdc58f99e 100644 --- a/internal/blocktx/blocktx_api/blocktx_api_grpc.pb.go +++ b/internal/blocktx/blocktx_api/blocktx_api_grpc.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: // - protoc-gen-go-grpc v1.5.1 -// - protoc v5.28.2 +// - protoc v5.28.3 // source: internal/blocktx/blocktx_api/blocktx_api.proto package blocktx_api diff --git a/internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions_map.yaml new file mode 100644 index 000000000..54294aa74 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions_map.yaml @@ -0,0 +1,16 @@ +- blockid: 1002 + txid: 1000003 + merkle_path: merkle-path-3 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1999 # the same tx also in stale block + txid: 1000003 + merkle_path: merkle-path-999 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1999 # the same tx also in stale block + txid: 1000001 + merkle_path: merkle-path-999 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1004 + txid: 1000005 + merkle_path: merkle-path-5 + inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml b/internal/blocktx/integration_test/fixtures/reorg/blocktx.blocks.yaml similarity index 57% rename from internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml rename to internal/blocktx/integration_test/fixtures/reorg/blocktx.blocks.yaml index 6c256dd42..58b3fddeb 100644 --- a/internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml +++ b/internal/blocktx/integration_test/fixtures/reorg/blocktx.blocks.yaml @@ -1,5 +1,5 @@ - inserted_at: 2023-12-15 14:00:00 - id: 0 + id: 1001 hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 @@ -7,11 +7,11 @@ processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false status: 10 + is_longest: true chainwork: '62209952899966' - inserted_at: 2023-12-15 14:30:00 - id: 1 + id: 1002 hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 @@ -19,11 +19,23 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 10 + is_longest: true + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:30:00 + id: 1999 + hash: 0x82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3 + prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 20 # STALE - competing block + is_longest: false chainwork: '62209952899966' - inserted_at: 2023-12-15 14:40:00 - id: 2 + id: 1003 hash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 prevhash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 @@ -31,17 +43,30 @@ processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false status: 10 + is_longest: true chainwork: '62209952899966' - inserted_at: 2023-12-15 14:50:00 - id: 3 + id: 1004 hash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 - prevhash: 0x5696fc6e504b6aa2ae5d9c46b9418192dc61bd1b2e3364030000000000000000 + prevhash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 height: 822017 size: 8630000 + processed_at: 2023-12-15 14:40:00 tx_count: 36724 - orphanedyn: false status: 10 + is_longest: true + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 10052 + hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + prevhash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 + merkleroot: 0xde0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9 + height: 822018 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false chainwork: '62209952899966' diff --git a/internal/blocktx/integration_test/fixtures/reorg/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/reorg/blocktx.transactions.yaml new file mode 100644 index 000000000..0b42d6d15 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg/blocktx.transactions.yaml @@ -0,0 +1,20 @@ +- id: 1000001 + hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000002 + hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e + inserted_at: 2023-12-10 14:00:00 + is_registered: false +- id: 1000003 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000004 + hash: 0x2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000005 + hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 + inserted_at: 2023-12-10 14:00:00 + is_registered: true diff --git a/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions_map.yaml new file mode 100644 index 000000000..e02912cfd --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions_map.yaml @@ -0,0 +1,20 @@ +- blockid: 1002 + txid: 1000001 + merkle_path: merkle-path-1 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1002 + txid: 1000003 + merkle_path: merkle-path-3 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1004 + txid: 1000003 + merkle_path: merkle-path-3-competing + inserted_at: 2023-12-10 14:00:00 +- blockid: 1003 + txid: 1000004 + merkle_path: merkle-path-4 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1006 + txid: 1000005 + merkle_path: merkle-path-5 + inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.blocks.yaml b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.blocks.yaml new file mode 100644 index 000000000..3e18f16d3 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.blocks.yaml @@ -0,0 +1,138 @@ +- inserted_at: 2023-12-15 14:00:00 + id: 1001 + hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 + prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 + merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 + height: 822014 + processed_at: 2023-12-15 14:10:00 + size: 86840000 + tx_count: 23477 + status: 10 + is_longest: true + chainwork: '62209952899966' + +- inserted_at: 2023-12-15 14:30:00 + id: 1002 + hash: 0x82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3 + prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 10 + is_longest: true + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:30:00 + id: 1003 + hash: 0x032c3688bc7536b2d787f3a196b1145a09bf33183cd1448ff6b1a9dfbb022db8 + prevhash: 0x82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822016 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 10 + is_longest: true + chainwork: '12301577519373468' + +- inserted_at: 2023-12-15 14:30:00 + id: 1004 + hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 + prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 20 # STALE + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:40:00 + id: 1005 + hash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 + prevhash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 + merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 + height: 822016 + processed_at: 2023-12-15 14:40:00 + size: 299650000 + tx_count: 62162 + status: 20 # STALE + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1006 + hash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 + prevhash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822017 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 20 # STALE + is_longest: false + chainwork: '62209952899966' + +- inserted_at: 2023-12-15 14:50:00 + id: 1007 + hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + prevhash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 + merkleroot: 0xde0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9 + height: 822018 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1008 + hash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 + prevhash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + merkleroot: 0x51f33784f6d54f1d6414fa4d8b8d6904215cb16a3fa0a8b1fe02e456a90544d4 + height: 822019 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1009 + hash: 0x00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076 + prevhash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822020 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '12301481384759134' + +# gap + +- inserted_at: 2023-12-15 14:50:00 + id: 1010 + hash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c + prevhash: 0xd46bf0a189927b62c8ff785d393a545093ca01af159aed771a8d94749f06c060 + merkleroot: 0xda71199f8ed9203d8a765595e6c030a22e5ed8330b1abb467a82c97d7d21d512 + height: 822022 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1011 + hash: 0x0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd + prevhash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c + merkleroot: 0x8e3177a33d6a87785b7104f20ca345e1713ae11ec2723a41028efddabebb861b + height: 822023 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '12301481384759134' diff --git a/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.transactions.yaml new file mode 100644 index 000000000..0b42d6d15 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.transactions.yaml @@ -0,0 +1,20 @@ +- id: 1000001 + hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000002 + hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e + inserted_at: 2023-12-10 14:00:00 + is_registered: false +- id: 1000003 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000004 + hash: 0x2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000005 + hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 + inserted_at: 2023-12-10 14:00:00 + is_registered: true diff --git a/internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions_map.yaml new file mode 100644 index 000000000..317aaf634 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions_map.yaml @@ -0,0 +1,4 @@ +- blockid: 1002 + txid: 1000003 + merkle_path: merkle-path-3 + inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/stale_block/blocktx.blocks.yaml b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.blocks.yaml new file mode 100644 index 000000000..4336826d4 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.blocks.yaml @@ -0,0 +1,24 @@ +- inserted_at: 2023-12-15 14:00:00 + id: 1001 + hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 + prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 + merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 + height: 822014 + processed_at: 2023-12-15 14:10:00 + size: 86840000 + tx_count: 23477 + status: 10 + is_longest: true + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:30:00 + id: 1002 + hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 + prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 10 + is_longest: true + chainwork: '62209952899966' diff --git a/internal/blocktx/integration_test/fixtures/stale_block/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.transactions.yaml new file mode 100644 index 000000000..423b9b5b2 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.transactions.yaml @@ -0,0 +1,12 @@ +- id: 1000001 + hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000002 + hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e + inserted_at: 2023-12-10 14:00:00 + is_registered: false +- id: 1000003 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + inserted_at: 2023-12-10 14:00:00 + is_registered: true diff --git a/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.block_transactions_map.yaml new file mode 100644 index 000000000..5ed5b4e2e --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.block_transactions_map.yaml @@ -0,0 +1,8 @@ +- blockid: 1002 + txid: 1000003 + merkle_path: merkle-path-3 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1005 + txid: 1000005 + merkle_path: merkle-path-5 + inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.blocks.yaml b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.blocks.yaml new file mode 100644 index 000000000..f3021dd39 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.blocks.yaml @@ -0,0 +1,111 @@ +- inserted_at: 2023-12-15 14:30:00 + id: 1002 + hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 + prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 10 + is_longest: true + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:40:00 + id: 1003 + hash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 + prevhash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 + merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 + height: 822016 + processed_at: 2023-12-15 14:40:00 + size: 299650000 + tx_count: 62162 + status: 10 + is_longest: true + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:40:00 + id: 1004 + hash: 0x00000000000000000643d48201cf609b8cc50befe804194f19a7ec61cf046239 + prevhash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 + merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 + height: 822017 + processed_at: 2023-12-15 14:40:00 + size: 299650000 + tx_count: 62162 + status: 10 + is_longest: true + chainwork: '12301577519373468' # Higher chainwork +- inserted_at: 2023-12-15 14:50:00 + id: 1005 + hash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 + prevhash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822017 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 20 # STALE + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1006 + hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + prevhash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 + merkleroot: 0xde0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9 + height: 822018 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1007 + hash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 + prevhash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + merkleroot: 0x51f33784f6d54f1d6414fa4d8b8d6904215cb16a3fa0a8b1fe02e456a90544d4 + height: 822019 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1008 + hash: 0x00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076 + prevhash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822020 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' + +# gap + +- inserted_at: 2023-12-15 14:50:00 + id: 1009 + hash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c + prevhash: 0xd46bf0a189927b62c8ff785d393a545093ca01af159aed771a8d94749f06c060 + merkleroot: 0xda71199f8ed9203d8a765595e6c030a22e5ed8330b1abb467a82c97d7d21d512 + height: 822022 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1010 + hash: 0x0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd + prevhash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c + merkleroot: 0x8e3177a33d6a87785b7104f20ca345e1713ae11ec2723a41028efddabebb861b + height: 822023 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '12301481384759134' diff --git a/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.transactions.yaml new file mode 100644 index 000000000..9989bb806 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.transactions.yaml @@ -0,0 +1,12 @@ +- id: 1000001 + hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000003 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000005 + hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 + inserted_at: 2023-12-10 14:00:00 + is_registered: true diff --git a/internal/blocktx/integration_test/helpers.go b/internal/blocktx/integration_test/helpers.go index c151f09af..9ce3fff08 100644 --- a/internal/blocktx/integration_test/helpers.go +++ b/internal/blocktx/integration_test/helpers.go @@ -1,12 +1,102 @@ package integrationtest import ( + "context" "database/sql" + "log/slog" + "os" "testing" + "github.com/bitcoin-sv/arc/internal/blocktx" + blockchain "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication" + blocktx_p2p "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication/p2p" + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/bitcoin-sv/arc/internal/blocktx/store/postgresql" + "github.com/bitcoin-sv/arc/internal/message_queue/nats/client/nats_core" + nats_mock "github.com/bitcoin-sv/arc/internal/message_queue/nats/client/nats_core/mocks" testutils "github.com/bitcoin-sv/arc/internal/test_utils" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/proto" ) +func setupSut(t *testing.T, dbInfo string) (*blocktx.Processor, *blocktx_p2p.MsgHandler, *postgresql.PostgreSQL, chan *blocktx_api.TransactionBlock) { + t.Helper() + + logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelDebug})) + + blockProcessCh := make(chan *blockchain.BlockMessage, 10) + + publishedTxsCh := make(chan *blocktx_api.TransactionBlock, 10) + + store, err := postgresql.New(dbInfo, 10, 80) + require.NoError(t, err) + + mockNatsConn := &nats_mock.NatsConnectionMock{ + PublishFunc: func(_ string, data []byte) error { + serialized := &blocktx_api.TransactionBlock{} + err := proto.Unmarshal(data, serialized) + require.NoError(t, err) + + publishedTxsCh <- serialized + return nil + }, + } + mqClient := nats_core.New(mockNatsConn, nats_core.WithLogger(logger)) + + p2pMsgHandler := blocktx_p2p.NewMsgHandler(logger, nil, blockProcessCh) + processor, err := blocktx.NewProcessor( + logger, + store, + nil, + blockProcessCh, + blocktx.WithMessageQueueClient(mqClient), + ) + require.NoError(t, err) + + return processor, p2pMsgHandler, store, publishedTxsCh +} + +func getPublishedTxs(publishedTxsCh chan *blocktx_api.TransactionBlock) []*blocktx_api.TransactionBlock { + publishedTxs := make([]*blocktx_api.TransactionBlock, 0) + + for { + select { + case tx := <-publishedTxsCh: + publishedTxs = append(publishedTxs, tx) + default: + return publishedTxs + } + } +} + func pruneTables(t *testing.T, db *sql.DB) { + t.Helper() testutils.PruneTables(t, db, "blocktx.blocks") + testutils.PruneTables(t, db, "blocktx.transactions") + testutils.PruneTables(t, db, "blocktx.block_transactions_map") +} + +func verifyBlock(t *testing.T, store *postgresql.PostgreSQL, hashStr string, height uint64, status blocktx_api.Status) { + t.Helper() + hash := testutils.RevChainhash(t, hashStr) + block, err := store.GetBlock(context.Background(), hash) + require.NoError(t, err) + require.Equal(t, height, block.Height) + require.Equal(t, status, block.Status) +} + +func verifyTxs(t *testing.T, expectedTxs []*blocktx_api.TransactionBlock, publishedTxs []*blocktx_api.TransactionBlock) { + t.Helper() + + strippedTxs := make([]*blocktx_api.TransactionBlock, len(publishedTxs)) + for i, tx := range publishedTxs { + strippedTxs[i] = &blocktx_api.TransactionBlock{ + BlockHash: tx.BlockHash, + BlockHeight: tx.BlockHeight, + TransactionHash: tx.TransactionHash, + BlockStatus: tx.BlockStatus, + } + } + + require.ElementsMatch(t, expectedTxs, strippedTxs) } diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index 073014ed7..425547e8a 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -1,52 +1,53 @@ package integrationtest // Components of this test: -// Postgresql Store - running on docker // Blocktx Processor +// Postgresql Store - running on docker // PeerHandler - mocked +// Message queue sending txs to metamorph - mocked // // Flow of this test: -// 1. A list of blocks from height 822014 to 822017 is added to db from fixtures +// 1. Blocks at heights 822014-822017 (LONGEST), 822018-822020 (ORPHANED) and 822022-822023 (ORPHANED) are added to db from fixtures // 2. A hardcoded msg with competing block at height 822015 is being sent through the mocked PeerHandler // 3. This block has a chainwork lower than the current tip of chain - becomes STALE -// 4. Next competing block, at height 822016 is being send through the mocked PeerHandler -// 5. This block has a greater chainwork than the current tip of longest chain - it becomes LONGEST despite not being the highest -// -// Todo: Next tasks: -// - Verify if reorg was performed correctly, if previous blocks have updated statuses -// - Include metamorph in this test and verify that transactions statuses are properly updated +// 4. Registered transactions from this block are ignored +// 5. Next competing block, at height 822016 is being sent through the mocked PeerHandler +// 6. This block has a greater chainwork than the current tip of longest chain - it becomes LONGEST despite not being the highest +// 7. Verification of reorg - checking if statuses are correctly switched +// 8. Verification of transactions +// - transactions from the stale chain becoming the longest are published +// - transactions that were previously in the longest chain are published with updated block data +// - transactions that were previously in the longest chain, but are not in the stale chain are published with blockstatus = STALE +// 9. A new block at height 822021 is being sent through the mocked PeerHandler +// 10. This block is extending the orphaned chain and finds that it's connected to the stale chain - orphans get updated to STALE +// 11. The new stale chain does not have a greater chainwork than the current longest chain - entire orphaned chain becomes STALE +// 12. A new block at height 822024 is being sent through the mocked PeerHandler +// 13. This block extends the orphaned chain and finds that it's connected to the stale chain - orphans get updated to STALE +// 14. The new stale chain has a greater chainwork than the current longest chain +// - entire STALE chain at heights 822015 - 822024 becomes LONGEST +// - entire LONGEST chain at height 822015 - 822016 becomes STALE +// 15. Verification of reorg - checking if statuses are correctly switched (for blocks and for transactions) import ( - "context" "database/sql" "log" - "log/slog" "os" "testing" "time" + blockchain "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication" + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + testutils "github.com/bitcoin-sv/arc/internal/test_utils" _ "github.com/golang-migrate/migrate/v4/source/file" _ "github.com/lib/pq" + "github.com/libsv/go-bc" "github.com/libsv/go-p2p/chaincfg/chainhash" "github.com/libsv/go-p2p/wire" "github.com/ory/dockertest/v3" "github.com/stretchr/testify/require" - - "github.com/bitcoin-sv/arc/internal/blocktx" - blockchain "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication" - blocktx_p2p "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication/p2p" - "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" - "github.com/bitcoin-sv/arc/internal/blocktx/store/postgresql" - testutils "github.com/bitcoin-sv/arc/internal/test_utils" ) -const ( - postgresPort = "5432" - migrationsPath = "file://../store/postgresql/migrations" - dbName = "main_test" - dbUsername = "arcuser" - dbPassword = "arcpass" -) +const migrationsPath = "file://../store/postgresql/migrations" var ( dbInfo string @@ -88,143 +89,353 @@ func testmain(m *testing.M) int { return m.Run() } -func TestBlockStatus(t *testing.T) { +func TestReorg(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") } - defer pruneTables(t, dbConn) - - logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelDebug})) - - var blockRequestCh chan blocktx_p2p.BlockRequest = nil // nolint: revive - blockProcessCh := make(chan *blockchain.BlockMessage, 10) + t.Run("block on empty database", func(t *testing.T) { + defer pruneTables(t, dbConn) + + processor, p2pMsgHandler, store, _ := setupSut(t, dbInfo) + + const blockHash822011 = "bf9be09b345cc2d904b59951cc8a2ed452d8d143e2e25cde64058270fb3a667a" + + blockHash := testutils.RevChainhash(t, blockHash822011) + prevBlockHash := testutils.RevChainhash(t, "00000000000000000a00c377b260a3219b0c314763f486bc363df7aa7e22ad72") + txHash, err := chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") + require.NoError(t, err) + merkleRoot, err := chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") + require.NoError(t, err) + + // should become LONGEST + blockMessage := &blockchain.BlockMessage{ + Hash: blockHash, + Header: &wire.BlockHeader{ + Version: 541065216, + PrevBlock: *prevBlockHash, // NON-existent in the db + MerkleRoot: *merkleRoot, + Bits: 0x1d00ffff, + }, + Height: uint64(822011), + TransactionHashes: []*chainhash.Hash{txHash}, + } - blocktxStore, err := postgresql.New(dbInfo, 10, 80) - require.NoError(t, err) + processor.StartBlockProcessing() + p2pMsgHandler.OnReceive(blockMessage, nil) + + // Allow DB to process the block + time.Sleep(200 * time.Millisecond) + + verifyBlock(t, store, blockHash822011, 822011, blocktx_api.Status_LONGEST) + }) + + t.Run("stale block", func(t *testing.T) { + defer pruneTables(t, dbConn) + testutils.LoadFixtures(t, dbConn, "fixtures/stale_block") + + processor, p2pMsgHandler, store, publishedTxsCh := setupSut(t, dbInfo) + + const ( + blockHash822014StartOfChain = "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000" + blockHash822015 = "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000" + blockHash822015Fork = "82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3" + txhash822015 = "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853" + txhash822015Competing = "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430" + ) + + blockHash := testutils.RevChainhash(t, blockHash822015Fork) + prevBlockHash := testutils.RevChainhash(t, blockHash822014StartOfChain) + txHash := testutils.RevChainhash(t, txhash822015) + txHash2 := testutils.RevChainhash(t, txhash822015Competing) // should not be published - is already in the longest chain + treeStore := bc.BuildMerkleTreeStoreChainHash([]*chainhash.Hash{txHash, txHash2}) + merkleRoot := treeStore[len(treeStore)-1] + + // should become STALE + blockMessage := &blockchain.BlockMessage{ + Hash: blockHash, + Header: &wire.BlockHeader{ + Version: 541065216, + PrevBlock: *prevBlockHash, // block with status LONGEST at height 822014 + MerkleRoot: *merkleRoot, + Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing block + }, + Height: uint64(822015), // competing block already exists at this height + TransactionHashes: []*chainhash.Hash{txHash, txHash2}, + } - p2pMsgHandler := blocktx_p2p.NewMsgHandler(logger, blockRequestCh, blockProcessCh) - processor, err := blocktx.NewProcessor(logger, blocktxStore, blockRequestCh, blockProcessCh) - require.NoError(t, err) + processor.StartBlockProcessing() + p2pMsgHandler.OnReceive(blockMessage, nil) + + // Allow DB to process the block + time.Sleep(200 * time.Millisecond) + + verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_LONGEST) + + publishedTxs := getPublishedTxs(publishedTxsCh) + + // verify the no transaction was published to metamorph + require.Len(t, publishedTxs, 0) + }) + + t.Run("reorg", func(t *testing.T) { + defer pruneTables(t, dbConn) + testutils.LoadFixtures(t, dbConn, "fixtures/reorg") + + processor, p2pMsgHandler, store, publishedTxsCh := setupSut(t, dbInfo) + + const ( + blockHash822015Fork = "82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3" + blockHash822016Fork = "032c3688bc7536b2d787f3a196b1145a09bf33183cd1448ff6b1a9dfbb022db8" + + blockHash822014StartOfChain = "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000" + blockHash822015 = "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000" + blockHash822016 = "e1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000" + blockHash822017 = "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000" + blockHash822018Orphan = "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde" + + txhash822015 = "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853" + txhash822015Competing = "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430" + txhash822016 = "2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f" + txhash822017 = "ece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6" + ) + + blockHash := testutils.RevChainhash(t, blockHash822016Fork) + txHash := testutils.RevChainhash(t, txhash822016) + txHash2 := testutils.RevChainhash(t, "ee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa") + treeStore := bc.BuildMerkleTreeStoreChainHash([]*chainhash.Hash{txHash, txHash2}) + merkleRoot := treeStore[len(treeStore)-1] + prevhash := testutils.RevChainhash(t, blockHash822015Fork) + + // should become LONGEST + // reorg should happen + blockMessage := &blockchain.BlockMessage{ + Hash: blockHash, + Header: &wire.BlockHeader{ + Version: 541065216, + PrevBlock: *prevhash, // block with status STALE at height 822015 + MerkleRoot: *merkleRoot, + Bits: 0x1a05db8b, // chainwork: "12301577519373468" higher than the competing chain + }, + Height: uint64(822016), // competing block already exists at this height + TransactionHashes: []*chainhash.Hash{txHash, txHash2}, + } - processor.StartBlockProcessing() + processor.StartBlockProcessing() + p2pMsgHandler.OnReceive(blockMessage, nil) + + // Allow DB to process the block and perform reorg + time.Sleep(1 * time.Second) + + // verify that reorg happened + verifyBlock(t, store, blockHash822016Fork, 822016, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_LONGEST) + + verifyBlock(t, store, blockHash822014StartOfChain, 822014, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822016, 822016, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822017, 822017, blocktx_api.Status_STALE) + + verifyBlock(t, store, blockHash822018Orphan, 822018, blocktx_api.Status_ORPHANED) + + previouslyLongestBlockHash := testutils.RevChainhash(t, blockHash822017) + + expectedTxs := []*blocktx_api.TransactionBlock{ + { // previously in stale chain + BlockHash: prevhash[:], + BlockHeight: 822015, + TransactionHash: testutils.RevChainhash(t, txhash822015)[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, + { // previously in longest chain - also in stale - should have blockdata updated + BlockHash: prevhash[:], + BlockHeight: 822015, + TransactionHash: testutils.RevChainhash(t, txhash822015Competing)[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, + { // newly mined from stale block that became longest after reorg + BlockHash: blockHash[:], + BlockHeight: 822016, + TransactionHash: txHash[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, + { // previously longest chain - not found in the new longest chain + BlockHash: previouslyLongestBlockHash[:], + BlockHeight: 822017, + TransactionHash: testutils.RevChainhash(t, txhash822017)[:], + BlockStatus: blocktx_api.Status_STALE, + }, + } - // test for empty database edge case before inserting fixtures - prevBlockHash := testutils.RevChainhash(t, "00000000000000000a00c377b260a3219b0c314763f486bc363df7aa7e22ad72") - txHash, err := chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") - require.NoError(t, err) - merkleRoot, err := chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") - require.NoError(t, err) + publishedTxs := getPublishedTxs(publishedTxsCh) + + verifyTxs(t, expectedTxs, publishedTxs) + }) + + t.Run("stale orphans", func(t *testing.T) { + defer pruneTables(t, dbConn) + testutils.LoadFixtures(t, dbConn, "fixtures/stale_orphans") + + processor, p2pMsgHandler, store, publishedTxsCh := setupSut(t, dbInfo) + + const ( + blockHash822017Longest = "00000000000000000643d48201cf609b8cc50befe804194f19a7ec61cf046239" + blockHash822017Stale = "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000" + blockHash822018Orphan = "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde" + blockHash822019Orphan = "00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656" + blockHash822020Orphan = "00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076" + blockHash822021 = "d46bf0a189927b62c8ff785d393a545093ca01af159aed771a8d94749f06c060" + blockHash822022Orphan = "0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c" + blockHash822023Orphan = "0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd" + ) + + blockHash := testutils.RevChainhash(t, blockHash822021) + txHash := testutils.RevChainhash(t, "de0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9") + merkleRoot := testutils.RevChainhash(t, "de0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9") + prevhash := testutils.RevChainhash(t, blockHash822020Orphan) + + // should become STALE + blockMessage := &blockchain.BlockMessage{ + Hash: blockHash, + Header: &wire.BlockHeader{ + Version: 541065216, + PrevBlock: *prevhash, // block with status ORPHANED at height 822020 - connected to STALE chain + MerkleRoot: *merkleRoot, + Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing chain + }, + Height: uint64(822021), + TransactionHashes: []*chainhash.Hash{txHash}, + } - // should become LONGEST - blockMessage := &blockchain.BlockMessage{ - Header: &wire.BlockHeader{ - Version: 541065216, - PrevBlock: *prevBlockHash, // NON-existent in the db - MerkleRoot: *merkleRoot, - Bits: 0x1d00ffff, - }, - Height: uint64(822011), - TransactionHashes: []*chainhash.Hash{txHash}, - } + processor.StartBlockProcessing() + p2pMsgHandler.OnReceive(blockMessage, nil) + // Allow DB to process the block and find orphans + time.Sleep(1 * time.Second) + + // verify that the block and orphans have STALE status + verifyBlock(t, store, blockHash822017Stale, 822017, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822018Orphan, 822018, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822019Orphan, 822019, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822020Orphan, 822020, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822021, 822021, blocktx_api.Status_STALE) + + // verify that the longest chain is still the same + verifyBlock(t, store, blockHash822017Longest, 822017, blocktx_api.Status_LONGEST) + + // verify that the blocks after the next gap are still orphans + verifyBlock(t, store, blockHash822022Orphan, 822022, blocktx_api.Status_ORPHANED) + verifyBlock(t, store, blockHash822023Orphan, 822023, blocktx_api.Status_ORPHANED) + + publishedTxs := getPublishedTxs(publishedTxsCh) + + // verify no transaction was published + require.Len(t, publishedTxs, 0) + }) + + t.Run("reorg orphans", func(t *testing.T) { + defer pruneTables(t, dbConn) + testutils.LoadFixtures(t, dbConn, "fixtures/reorg_orphans") + + processor, p2pMsgHandler, store, publishedTxsCh := setupSut(t, dbInfo) + + const ( + blockHash822014StartOfChain = "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000" + blockHash822015 = "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000" + blockHash822016 = "e1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000" + blockHash822017 = "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000" + + blockHash822015Fork = "82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3" + blockHash822016Fork = "032c3688bc7536b2d787f3a196b1145a09bf33183cd1448ff6b1a9dfbb022db8" + + blockHash822018Orphan = "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde" + blockHash822019Orphan = "00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656" + blockHash822020Orphan = "00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076" + blockHash822021 = "d46bf0a189927b62c8ff785d393a545093ca01af159aed771a8d94749f06c060" + blockHash822022Orphan = "0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c" + blockHash822023Orphan = "0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd" + + txhash822015 = "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853" + txhash822015Competing = "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430" + txhash822016 = "2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f" + txhash822017 = "ece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6" + ) + + blockHash := testutils.RevChainhash(t, blockHash822021) + prevhash := testutils.RevChainhash(t, blockHash822020Orphan) + txHash := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") + merkleRoot := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") + + // should become LONGEST + // reorg should happen + blockMessage := &blockchain.BlockMessage{ + Hash: blockHash, + Header: &wire.BlockHeader{ + Version: 541065216, + PrevBlock: *prevhash, // block with status ORPHANED at height 822020 - connected to STALE chain + MerkleRoot: *merkleRoot, + Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing chain + // the sum of orphan chain has a higher chainwork and should cause a reorg + }, + Height: uint64(822021), + TransactionHashes: []*chainhash.Hash{txHash}, + } - p2pMsgHandler.OnReceive(blockMessage, nil) - - // Allow DB to process the block - time.Sleep(200 * time.Millisecond) - - blockHashZero := blockMessage.Header.BlockHash() - - block, err := blocktxStore.GetBlock(context.Background(), &blockHashZero) - require.NoError(t, err) - require.Equal(t, uint64(822011), block.Height) - require.Equal(t, blocktx_api.Status_LONGEST, block.Status) - - // only load fixtures at this point - testutils.LoadFixtures(t, dbConn, "fixtures") - - prevBlockHash = testutils.RevChainhash(t, "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000") - txHash, err = chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") - require.NoError(t, err) - merkleRoot, err = chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") - require.NoError(t, err) - - // should become STALE - blockMessage = &blockchain.BlockMessage{ - Header: &wire.BlockHeader{ - Version: 541065216, - PrevBlock: *prevBlockHash, // block with status LONGEST at height 822014 - MerkleRoot: *merkleRoot, - Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing block - }, - Height: uint64(822015), // competing block already exists at this height - TransactionHashes: []*chainhash.Hash{txHash}, - } + processor.StartBlockProcessing() + p2pMsgHandler.OnReceive(blockMessage, nil) + // Allow DB to process the block, find orphans and perform reorg + time.Sleep(2 * time.Second) + + // verify that the reorg happened + verifyBlock(t, store, blockHash822014StartOfChain, 822014, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822016, 822016, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822017, 822017, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822018Orphan, 822018, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822019Orphan, 822019, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822020Orphan, 822020, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822021, 822021, blocktx_api.Status_LONGEST) + + verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822016Fork, 822016, blocktx_api.Status_STALE) + + verifyBlock(t, store, blockHash822022Orphan, 822022, blocktx_api.Status_ORPHANED) + verifyBlock(t, store, blockHash822023Orphan, 822023, blocktx_api.Status_ORPHANED) + + bh822015 := testutils.RevChainhash(t, blockHash822015) + bh822015Fork := testutils.RevChainhash(t, blockHash822015Fork) + bh822016Fork := testutils.RevChainhash(t, blockHash822016Fork) + bh822017 := testutils.RevChainhash(t, blockHash822017) + + expectedTxs := []*blocktx_api.TransactionBlock{ + { // in stale chain + BlockHash: bh822015Fork[:], + BlockHeight: 822015, + TransactionHash: testutils.RevChainhash(t, txhash822015)[:], + BlockStatus: blocktx_api.Status_STALE, + }, + { // in both chains - should have blockdata updated + BlockHash: bh822015[:], + BlockHeight: 822015, + TransactionHash: testutils.RevChainhash(t, txhash822015Competing)[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, + { // in stale chain + BlockHash: bh822016Fork[:], + BlockHeight: 822016, + TransactionHash: testutils.RevChainhash(t, txhash822016)[:], + BlockStatus: blocktx_api.Status_STALE, + }, + { // in now longest chain + BlockHash: bh822017[:], + BlockHeight: 822017, + TransactionHash: testutils.RevChainhash(t, txhash822017)[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, + } - p2pMsgHandler.OnReceive(blockMessage, nil) - require.NoError(t, err) - // Allow DB to process the block - time.Sleep(200 * time.Millisecond) - - blockHashStale := blockMessage.Header.BlockHash() - - block, err = blocktxStore.GetBlock(context.Background(), &blockHashStale) - require.NoError(t, err) - require.Equal(t, uint64(822015), block.Height) - require.Equal(t, blocktx_api.Status_STALE, block.Status) - - // should become LONGEST - // reorg should happen - blockMessage = &blockchain.BlockMessage{ - Header: &wire.BlockHeader{ - Version: 541065216, - PrevBlock: blockHashStale, // block with status STALE at height 822015 - MerkleRoot: *merkleRoot, - Bits: 0x1a05db8b, // chainwork: "12301577519373468" higher than the competing block - }, - Height: uint64(822016), // competing block already exists at this height - TransactionHashes: []*chainhash.Hash{txHash}, - } + publishedTxs := getPublishedTxs(publishedTxsCh) - p2pMsgHandler.OnReceive(blockMessage, nil) - require.NoError(t, err) - // Allow DB to process the block and perform reorg - time.Sleep(1 * time.Second) - - // verify that reorg happened - blockHashLongest := blockMessage.Header.BlockHash() - - block, err = blocktxStore.GetBlock(context.Background(), &blockHashLongest) - require.NoError(t, err) - require.Equal(t, uint64(822016), block.Height) - require.Equal(t, blocktx_api.Status_LONGEST, block.Status) - - block, err = blocktxStore.GetBlock(context.Background(), &blockHashStale) - require.NoError(t, err) - require.Equal(t, uint64(822015), block.Height) - require.Equal(t, blocktx_api.Status_LONGEST, block.Status) - - previouslyLongestBlockHash := testutils.RevChainhash(t, "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000") - block, err = blocktxStore.GetBlock(context.Background(), previouslyLongestBlockHash) - require.NoError(t, err) - require.Equal(t, uint64(822015), block.Height) - require.Equal(t, blocktx_api.Status_STALE, block.Status) - - previouslyLongestBlockHash = testutils.RevChainhash(t, "e1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000") - block, err = blocktxStore.GetBlock(context.Background(), previouslyLongestBlockHash) - require.NoError(t, err) - require.Equal(t, uint64(822016), block.Height) - require.Equal(t, blocktx_api.Status_STALE, block.Status) - - previouslyLongestBlockHash = testutils.RevChainhash(t, "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000") - block, err = blocktxStore.GetBlock(context.Background(), previouslyLongestBlockHash) - require.NoError(t, err) - require.Equal(t, uint64(822017), block.Height) - require.Equal(t, blocktx_api.Status_STALE, block.Status) - - beginningOfChain := testutils.RevChainhash(t, "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000") - block, err = blocktxStore.GetBlock(context.Background(), beginningOfChain) - require.NoError(t, err) - require.Equal(t, uint64(822014), block.Height) - require.Equal(t, blocktx_api.Status_LONGEST, block.Status) + verifyTxs(t, expectedTxs, publishedTxs) + }) } diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 217a825dd..5a85ee90c 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -6,7 +6,6 @@ import ( "errors" "fmt" "log/slog" - "math/big" "os" "sync" "time" @@ -29,7 +28,11 @@ var ( ErrFailedToSubscribeToTopic = errors.New("failed to subscribe to register topic") ErrFailedToCreateBUMP = errors.New("failed to create new bump for tx hash from merkle tree and index") ErrFailedToGetStringFromBUMPHex = errors.New("failed to get string from bump for tx hash") + ErrFailedToParseBlockHash = errors.New("failed to parse block hash") ErrFailedToInsertBlockTransactions = errors.New("failed to insert block transactions") + ErrBlockAlreadyExists = errors.New("block already exists in the database") + ErrUnexpectedBlockStatus = errors.New("unexpected block status") + ErrFailedToProcessBlock = errors.New("failed to process block") ) const ( @@ -212,16 +215,27 @@ func (p *Processor) StartBlockProcessing() { case <-p.ctx.Done(): return case blockMsg := <-p.blockProcessCh: - blockhash := blockMsg.Header.BlockHash() + var err error + timeStart := time.Now() - defer p.stopBlockProcessGuard(&blockhash) // release guardian at the end + p.logger.Info("received block", slog.String("hash", blockMsg.Hash.String())) - p.logger.Info("received block", slog.String("hash", blockhash.String())) - err := p.processBlock(blockMsg) + err = p.processBlock(blockMsg) if err != nil { - p.logger.Error("block processing failed", slog.String("hash", blockhash.String()), slog.String("err", err.Error())) - p.unlockBlock(p.ctx, &blockhash) + p.logger.Error("block processing failed", slog.String("hash", blockMsg.Hash.String()), slog.String("err", err.Error())) + p.unlockBlock(p.ctx, blockMsg.Hash) + continue + } + + storeErr := p.store.MarkBlockAsDone(p.ctx, blockMsg.Hash, blockMsg.Size, uint64(len(blockMsg.TransactionHashes))) + if storeErr != nil { + p.logger.Error("unable to mark block as processed", slog.String("hash", blockMsg.Hash.String()), slog.String("err", storeErr.Error())) + p.unlockBlock(p.ctx, blockMsg.Hash) + continue } + + // add the total block processing time to the stats + p.logger.Info("Processed block", slog.String("hash", blockMsg.Hash.String()), slog.Int("txs", len(blockMsg.TransactionHashes)), slog.String("duration", time.Since(timeStart).String())) } } }() @@ -231,7 +245,7 @@ func (p *Processor) startBlockProcessGuard(ctx context.Context, hash *chainhash. p.waitGroup.Add(1) execCtx, stopFn := context.WithCancel(ctx) - p.processGuardsMap.Store(hash, stopFn) + p.processGuardsMap.Store(*hash, stopFn) go func() { defer p.waitGroup.Done() @@ -248,7 +262,7 @@ func (p *Processor) startBlockProcessGuard(ctx context.Context, hash *chainhash. // check if block was processed successfully block, _ := p.store.GetBlock(execCtx, hash) - if block != nil && block.Processed { + if block != nil { return // success } @@ -378,7 +392,12 @@ func (p *Processor) StartProcessRequestTxs() { } func (p *Processor) publishMinedTxs(txHashes []*chainhash.Hash) error { - minedTxs, err := p.store.GetMinedTransactions(p.ctx, txHashes) + hashesBytes := make([][]byte, len(txHashes)) + for i, h := range txHashes { + hashesBytes[i] = h[:] + } + + minedTxs, err := p.store.GetMinedTransactions(p.ctx, hashesBytes, false) if err != nil { return fmt.Errorf("failed to get mined transactions: %v", err) } @@ -389,6 +408,7 @@ func (p *Processor) publishMinedTxs(txHashes []*chainhash.Hash) error { BlockHash: minedTx.BlockHash, BlockHeight: minedTx.BlockHeight, MerklePath: minedTx.MerklePath, + BlockStatus: minedTx.BlockStatus, } err = p.mqClient.PublishMarshal(p.ctx, MinedTxsTopic, txBlock) } @@ -421,120 +441,156 @@ func (p *Processor) buildMerkleTreeStoreChainHash(ctx context.Context, txids []* return bc.BuildMerkleTreeStoreChainHash(txids) } -func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { +func (p *Processor) processBlock(blockMsg *blockchain.BlockMessage) (err error) { ctx := p.ctx - var blockHash chainhash.Hash + + var block *blocktx_api.Block + + // release guardian + defer p.stopBlockProcessGuard(blockMsg.Hash) + ctx, span := tracing.StartTracing(ctx, "processBlock", p.tracingEnabled, p.tracingAttributes...) defer func() { if span != nil { - span.SetAttributes(attribute.String("hash", blockHash.String())) + span.SetAttributes(attribute.String("hash", blockMsg.Hash.String())) + span.SetAttributes(attribute.String("status", block.Status.String())) } tracing.EndTracing(span, err) }() - timeStart := time.Now() + p.logger.Info("processing incoming block", slog.String("hash", blockMsg.Hash.String()), slog.Uint64("height", blockMsg.Height)) - blockHash = msg.Header.BlockHash() - previousBlockHash := msg.Header.PrevBlock - merkleRoot := msg.Header.MerkleRoot + // check if we've already processed that block + existingBlock, _ := p.store.GetBlock(ctx, blockMsg.Hash) - // don't process block that was already processed - existingBlock, _ := p.store.GetBlock(ctx, &blockHash) - if existingBlock != nil && existingBlock.Processed { + if existingBlock != nil { + p.logger.Warn("ignoring already existing block", slog.String("hash", blockMsg.Hash.String()), slog.Uint64("height", blockMsg.Height)) return nil } - prevBlock, err := p.getPrevBlock(ctx, &previousBlockHash) + block, err = p.verifyAndInsertBlock(ctx, blockMsg) if err != nil { - p.logger.Error("unable to get previous block from db", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("prevHash", previousBlockHash.String()), slog.String("err", err.Error())) return err } - longestTipExists := true - if prevBlock == nil { - // This check is only in case there's a fresh, empty database - // with no blocks, to mark the first block as the LONGEST chain - longestTipExists, err = p.longestTipExists(ctx) - if err != nil { - p.logger.Error("unable to verify the longest tip existence in db", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("err", err.Error())) - return err - } - } + var longestTxs, staleTxs []store.TransactionBlock + var ok bool - incomingBlock := createBlock(msg, prevBlock, longestTipExists) + switch block.Status { + case blocktx_api.Status_LONGEST: + longestTxs, ok = p.getRegisteredTransactions(ctx, []*blocktx_api.Block{block}) + case blocktx_api.Status_STALE: + longestTxs, staleTxs, ok = p.handleStaleBlock(ctx, block) + case blocktx_api.Status_ORPHANED: + longestTxs, staleTxs, ok = p.handleOrphans(ctx, block) + default: + return ErrUnexpectedBlockStatus + } - competing, err := p.competingChainsExist(ctx, incomingBlock) - if err != nil { - p.logger.Error("unable to check for competing chains", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("err", err.Error())) - return err + if !ok { + // error is already logged in each method above + return ErrFailedToProcessBlock } - if competing { - p.logger.Info("Competing blocks found", slog.String("incoming block hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height)) + p.publishTxsToMetamorph(ctx, longestTxs) + p.publishTxsToMetamorph(ctx, staleTxs) - hasGreatestChainwork, err := p.hasGreatestChainwork(ctx, incomingBlock) - if err != nil { - p.logger.Error("unable to get the chain tip to verify chainwork", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) - return err - } + return nil +} - // find competing chains back to the common ancestor - // get all registered transactions - // prepare msg with competing blocks - incomingBlock.Status = blocktx_api.Status_STALE +func (p *Processor) verifyAndInsertBlock(ctx context.Context, blockMsg *blockchain.BlockMessage) (incomingBlock *blocktx_api.Block, err error) { + ctx, span := tracing.StartTracing(ctx, "verifyAndInsertBlock", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() - if hasGreatestChainwork { - p.logger.Info("reorg detected - updating blocks", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height)) + previousBlockHash := blockMsg.Header.PrevBlock + merkleRoot := blockMsg.Header.MerkleRoot - incomingBlock.Status = blocktx_api.Status_LONGEST + incomingBlock = &blocktx_api.Block{ + Hash: blockMsg.Hash[:], + PreviousHash: previousBlockHash[:], + MerkleRoot: merkleRoot[:], + Height: blockMsg.Height, + Chainwork: calculateChainwork(blockMsg.Header.Bits).String(), + } - err = p.performReorg(ctx, incomingBlock) - if err != nil { - p.logger.Error("unable to perform reorg", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) - return err - } - } + err = p.assignBlockStatus(ctx, incomingBlock, previousBlockHash) + if err != nil { + p.logger.Error("unable to assign block status", slog.String("hash", blockMsg.Hash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + return nil, err } - p.logger.Info("Upserting block", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("status", incomingBlock.Status.String())) + p.logger.Info("Inserting block", slog.String("hash", blockMsg.Hash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("status", incomingBlock.Status.String())) - blockID, err := p.store.UpsertBlock(ctx, incomingBlock) + err = p.insertBlockAndStoreTransactions(ctx, incomingBlock, blockMsg.TransactionHashes, blockMsg.Header.MerkleRoot) if err != nil { - p.logger.Error("unable to upsert block at given height", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("err", err.Error())) - return err + p.logger.Error("unable to insert block and store its transactions", slog.String("hash", blockMsg.Hash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + return nil, err } - calculatedMerkleTree := p.buildMerkleTreeStoreChainHash(ctx, msg.TransactionHashes) + return incomingBlock, nil +} - if !merkleRoot.IsEqual(calculatedMerkleTree[len(calculatedMerkleTree)-1]) { - p.logger.Error("merkle root mismatch", slog.String("hash", blockHash.String())) - return err - } +func (p *Processor) assignBlockStatus(ctx context.Context, block *blocktx_api.Block, prevBlockHash chainhash.Hash) (err error) { + ctx, span := tracing.StartTracing(ctx, "assignBlockStatus", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() - if err = p.markTransactionsAsMined(ctx, blockID, calculatedMerkleTree, msg.Height, &blockHash); err != nil { - p.logger.Error("unable to mark block as mined", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) - return err - } + prevBlock, _ := p.store.GetBlock(ctx, &prevBlockHash) - if err = p.store.MarkBlockAsDone(ctx, &blockHash, msg.Size, uint64(len(msg.TransactionHashes))); err != nil { - p.logger.Error("unable to mark block as processed", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) - return err + if prevBlock == nil { + // This check is only in case there's a fresh, empty database + // with no blocks, to mark the first block as the LONGEST chain + var longestTipExists bool + longestTipExists, err = p.longestTipExists(ctx) + if err != nil { + p.logger.Error("unable to verify the longest tip existence in db", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return err + } + + // if there's no longest block in the + // database - mark this block as LONGEST + // otherwise - it's an orphan + if !longestTipExists { + block.Status = blocktx_api.Status_LONGEST + } else { + block.Status = blocktx_api.Status_ORPHANED + } + return nil } - // add the total block processing time to the stats - p.logger.Info("Processed block", slog.String("hash", blockHash.String()), slog.Int("txs", len(msg.TransactionHashes)), slog.String("duration", time.Since(timeStart).String())) + if prevBlock.Status == blocktx_api.Status_LONGEST { + var competingBlock *blocktx_api.Block + competingBlock, err = p.store.GetLongestBlockByHeight(ctx, block.Height) + if err != nil && !errors.Is(err, store.ErrBlockNotFound) { + p.logger.Error("unable to get the competing block from db", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return err + } - return nil -} + if competingBlock == nil { + block.Status = blocktx_api.Status_LONGEST + return nil + } -func (p *Processor) getPrevBlock(ctx context.Context, prevHash *chainhash.Hash) (*blocktx_api.Block, error) { - prevBlock, err := p.store.GetBlock(ctx, prevHash) - if err != nil && !errors.Is(err, store.ErrBlockNotFound) { - return nil, err + if bytes.Equal(block.Hash, competingBlock.Hash) { + // this means that another instance is already processing + // or have processed this block that we're processing here + // so we can throw an error and finish processing + err = ErrBlockAlreadyExists + return err + } + + block.Status = blocktx_api.Status_STALE + return nil } - return prevBlock, nil + // ORPHANED or STALE + block.Status = prevBlock.Status + + return nil } func (p *Processor) longestTipExists(ctx context.Context) (bool, error) { @@ -550,82 +606,56 @@ func (p *Processor) longestTipExists(ctx context.Context) (bool, error) { return true, nil } -func (p *Processor) competingChainsExist(ctx context.Context, block *blocktx_api.Block) (bool, error) { - if block.Status == blocktx_api.Status_ORPHANED { - return false, nil - } - - if block.Status == blocktx_api.Status_LONGEST { - competingBlock, err := p.store.GetBlockByHeight(ctx, block.Height, blocktx_api.Status_LONGEST) - if err != nil && !errors.Is(err, store.ErrBlockNotFound) { - return false, err - } - - if competingBlock != nil && !bytes.Equal(competingBlock.Hash, block.Hash) { - return true, nil - } - - return false, nil - } - - // If STALE status - return true, nil -} +func (p *Processor) getRegisteredTransactions(ctx context.Context, blocks []*blocktx_api.Block) (txsToPublish []store.TransactionBlock, ok bool) { + var err error + ctx, span := tracing.StartTracing(ctx, "getRegisteredTransactions", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() -func (p *Processor) hasGreatestChainwork(ctx context.Context, incomingBlock *blocktx_api.Block) (bool, error) { - tip, err := p.store.GetChainTip(ctx) - if err != nil && !errors.Is(err, store.ErrBlockNotFound) { - return false, err + blockHashes := make([][]byte, len(blocks)) + for i, b := range blocks { + blockHashes[i] = b.Hash } - // this can happen only in case the blocks table is empty - if tip == nil { - return true, nil + txsToPublish, err = p.store.GetRegisteredTxsByBlockHashes(ctx, blockHashes) + if err != nil { + block := blocks[len(blocks)-1] + p.logger.Error("unable to get registered transactions", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return nil, false } - tipChainWork := new(big.Int) - tipChainWork.SetString(tip.Chainwork, 10) - - incomingBlockChainwork := new(big.Int) - incomingBlockChainwork.SetString(incomingBlock.Chainwork, 10) - - return tipChainWork.Cmp(incomingBlockChainwork) < 0, nil + return txsToPublish, true } -func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api.Block) error { - staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, incomingBlock.PreviousHash) - if err != nil { - return err - } +func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomingBlock *blocktx_api.Block, txHashes []*chainhash.Hash, merkleRoot chainhash.Hash) (err error) { + ctx, span := tracing.StartTracing(ctx, "insertBlockAndStoreTransactions", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() - lowestHeight := incomingBlock.Height - if len(staleBlocks) > 0 { - lowestHeight = getLowestHeight(staleBlocks) + calculatedMerkleTree := p.buildMerkleTreeStoreChainHash(ctx, txHashes) + if !merkleRoot.IsEqual(calculatedMerkleTree[len(calculatedMerkleTree)-1]) { + p.logger.Error("merkle root mismatch", slog.String("hash", getHashStringNoErr(incomingBlock.Hash))) + return err } - longestBlocks, err := p.store.GetLongestChainFromHeight(ctx, lowestHeight) + blockID, err := p.store.UpsertBlock(ctx, incomingBlock) if err != nil { + p.logger.Error("unable to insert block at given height", slog.String("hash", getHashStringNoErr(incomingBlock.Hash)), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) return err } - blockStatusUpdates := make([]store.BlockStatusUpdate, 0) - - for _, b := range staleBlocks { - update := store.BlockStatusUpdate{Hash: b.Hash, Status: blocktx_api.Status_LONGEST} - blockStatusUpdates = append(blockStatusUpdates, update) - } - - for _, b := range longestBlocks { - update := store.BlockStatusUpdate{Hash: b.Hash, Status: blocktx_api.Status_STALE} - blockStatusUpdates = append(blockStatusUpdates, update) + if err = p.storeTransactions(ctx, blockID, incomingBlock, calculatedMerkleTree); err != nil { + p.logger.Error("unable to store transactions from block", slog.String("hash", getHashStringNoErr(incomingBlock.Hash)), slog.String("err", err.Error())) + return err } - err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) - return err + return nil } -func (p *Processor) markTransactionsAsMined(ctx context.Context, blockID uint64, merkleTree []*chainhash.Hash, blockHeight uint64, blockhash *chainhash.Hash) (err error) { - ctx, span := tracing.StartTracing(ctx, "markTransactionsAsMined", p.tracingEnabled, p.tracingAttributes...) +func (p *Processor) storeTransactions(ctx context.Context, blockID uint64, block *blocktx_api.Block, merkleTree []*chainhash.Hash) (err error) { + ctx, span := tracing.StartTracing(ctx, "storeTransactions", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) }() @@ -633,6 +663,11 @@ func (p *Processor) markTransactionsAsMined(ctx context.Context, blockID uint64, txs := make([]store.TxWithMerklePath, 0, p.transactionStorageBatchSize) leaves := merkleTree[:(len(merkleTree)+1)/2] + blockhash, err := chainhash.NewHash(block.Hash) + if err != nil { + return errors.Join(ErrFailedToParseBlockHash, fmt.Errorf("block height: %d", block.Height), err) + } + var totalSize int for totalSize = 1; totalSize < len(leaves); totalSize++ { if leaves[totalSize] == nil { @@ -653,9 +688,9 @@ func (p *Processor) markTransactionsAsMined(ctx context.Context, blockID uint64, break } - bump, err := bc.NewBUMPFromMerkleTreeAndIndex(blockHeight, merkleTree, uint64(txIndex)) + bump, err := bc.NewBUMPFromMerkleTreeAndIndex(block.Height, merkleTree, uint64(txIndex)) // #nosec G115 if err != nil { - return errors.Join(ErrFailedToCreateBUMP, err) + return errors.Join(ErrFailedToCreateBUMP, fmt.Errorf("tx hash %s, block height: %d", hash.String(), block.Height), err) } bumpHex, err := bump.String() @@ -669,30 +704,17 @@ func (p *Processor) markTransactionsAsMined(ctx context.Context, blockID uint64, }) if (txIndex+1)%p.transactionStorageBatchSize == 0 { - updateResp, err := p.store.UpsertBlockTransactions(ctx, blockID, txs) + err := p.store.UpsertBlockTransactions(ctx, blockID, txs) if err != nil { return errors.Join(ErrFailedToInsertBlockTransactions, err) } // free up memory txs = txs[:0] - - for _, updResp := range updateResp { - txBlock := &blocktx_api.TransactionBlock{ - TransactionHash: updResp.Hash[:], - BlockHash: blockhash[:], - BlockHeight: blockHeight, - MerklePath: updResp.MerklePath, - } - err = p.mqClient.PublishMarshal(ctx, MinedTxsTopic, txBlock) - if err != nil { - p.logger.Error("failed to publish mined txs", slog.String("hash", blockhash.String()), slog.Uint64("height", blockHeight), slog.String("err", err.Error())) - } - } } if percentage, found := progress[txIndex+1]; found { if totalSize > 0 { - p.logger.Info(fmt.Sprintf("%d txs out of %d marked as mined", txIndex+1, totalSize), slog.Int("percentage", percentage), slog.String("hash", blockhash.String()), slog.Uint64("height", blockHeight), slog.String("duration", time.Since(now).String())) + p.logger.Info(fmt.Sprintf("%d txs out of %d stored", txIndex+1, totalSize), slog.Int("percentage", percentage), slog.String("hash", blockhash.String()), slog.Uint64("height", block.Height), slog.String("duration", time.Since(now).String())) } } } @@ -700,25 +722,227 @@ func (p *Processor) markTransactionsAsMined(ctx context.Context, blockID uint64, tracing.EndTracing(iterateMerkleTree, nil) // update all remaining transactions - updateResp, err := p.store.UpsertBlockTransactions(ctx, blockID, txs) + err = p.store.UpsertBlockTransactions(ctx, blockID, txs) + if err != nil { + return errors.Join(ErrFailedToInsertBlockTransactions, fmt.Errorf("block height: %d", block.Height), err) + } + + return nil +} + +func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Block) (longestTxs, staleTxs []store.TransactionBlock, ok bool) { + var err error + ctx, span := tracing.StartTracing(ctx, "handleStaleBlock", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() + + staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, block.Hash) + if err != nil { + p.logger.Error("unable to get STALE blocks to verify chainwork", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return nil, nil, false + } + + lowestHeight := block.Height + if len(staleBlocks) > 0 { + lowestHeight = staleBlocks[0].Height + } + + longestBlocks, err := p.store.GetLongestChainFromHeight(ctx, lowestHeight) if err != nil { - return errors.Join(ErrFailedToInsertBlockTransactions, fmt.Errorf("block height: %d", blockHeight), err) + p.logger.Error("unable to get LONGEST blocks to verify chainwork", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return nil, nil, false } - for _, updResp := range updateResp { + staleChainwork := sumChainwork(staleBlocks) + longestChainwork := sumChainwork(longestBlocks) + + if longestChainwork.Cmp(staleChainwork) < 0 { + p.logger.Info("chain reorg detected", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height)) + + longestTxs, staleTxs, err = p.performReorg(ctx, staleBlocks, longestBlocks) + if err != nil { + p.logger.Error("unable to perform reorg", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return nil, nil, false + } + return longestTxs, staleTxs, true + } + + return nil, nil, true +} + +func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api.Block, longestBlocks []*blocktx_api.Block) (longestTxs, staleTxs []store.TransactionBlock, err error) { + ctx, span := tracing.StartTracing(ctx, "performReorg", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() + + staleHashes := make([][]byte, len(staleBlocks)) + longestHashes := make([][]byte, len(longestBlocks)) + + blockStatusUpdates := make([]store.BlockStatusUpdate, len(longestBlocks)+len(staleBlocks)) + + for i, b := range longestBlocks { + longestHashes[i] = b.Hash + + b.Status = blocktx_api.Status_STALE + update := store.BlockStatusUpdate{Hash: b.Hash, Status: b.Status} + blockStatusUpdates[i] = update + } + + for i, b := range staleBlocks { + staleHashes[i] = b.Hash + + b.Status = blocktx_api.Status_LONGEST + update := store.BlockStatusUpdate{Hash: b.Hash, Status: b.Status} + blockStatusUpdates[i+len(longestBlocks)] = update + } + + err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) + if err != nil { + return nil, nil, err + } + + p.logger.Info("reorg performed successfully") + + // now the previously stale chain is the longest, + // so longestTxs are from previously stale block hashes + longestTxs, err = p.store.GetRegisteredTxsByBlockHashes(ctx, staleHashes) + if err != nil { + return nil, nil, err + } + + // now the previously longest chain is stale, + // so staleTxs are from previously longest block hashes + staleTxs, err = p.store.GetRegisteredTxsByBlockHashes(ctx, longestHashes) + if err != nil { + return nil, nil, err + } + + staleTxs = exclusiveRightTxs(longestTxs, staleTxs) + + return longestTxs, staleTxs, nil +} + +func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) (longestTxs, staleTxs []store.TransactionBlock, ok bool) { + var err error + ctx, span := tracing.StartTracing(ctx, "handleOrphans", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() + + orphans, ancestor, err := p.store.GetOrphansBackToNonOrphanAncestor(ctx, block.Hash) + if err != nil { + p.logger.Error("unable to get ORPHANED blocks", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return nil, nil, false + } + + if ancestor == nil || len(orphans) == 0 { + return nil, nil, true + } + + p.logger.Info("orphaned chain found", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("status", block.Status.String())) + + if ancestor.Status == blocktx_api.Status_STALE { + ok = p.acceptIntoChain(ctx, orphans, ancestor.Status) + if !ok { + return nil, nil, false + } + + block.Status = blocktx_api.Status_STALE + return p.handleStaleBlock(ctx, block) + } + + if ancestor.Status == blocktx_api.Status_LONGEST { + // If there is competing block at the height of + // the first orphan, then we need to mark them + // all as stale and recheck for reorg. + // + // If there's no competing block at the height + // of the first orphan, then we can assume that + // there's no competing chain at all. + + var competingBlock *blocktx_api.Block + competingBlock, err = p.store.GetLongestBlockByHeight(ctx, orphans[0].Height) + if err != nil && !errors.Is(err, store.ErrBlockNotFound) { + p.logger.Error("unable to get competing block when handling orphans", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return nil, nil, false + } + + if competingBlock != nil && !bytes.Equal(competingBlock.Hash, orphans[0].Hash) { + ok = p.acceptIntoChain(ctx, orphans, blocktx_api.Status_STALE) + if !ok { + return nil, nil, false + } + + block.Status = blocktx_api.Status_STALE + return p.handleStaleBlock(ctx, block) + } + + ok = p.acceptIntoChain(ctx, orphans, ancestor.Status) // LONGEST + if !ok { + return nil, nil, false + } + + p.logger.Info("orphaned chain accepted into LONGEST chain", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height)) + longestTxs, ok = p.getRegisteredTransactions(ctx, orphans) + return longestTxs, nil, ok + } + + return nil, nil, true +} + +func (p *Processor) acceptIntoChain(ctx context.Context, blocks []*blocktx_api.Block, chain blocktx_api.Status) (ok bool) { + var err error + ctx, span := tracing.StartTracing(ctx, "acceptIntoChain", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() + + blockStatusUpdates := make([]store.BlockStatusUpdate, len(blocks)) + + for i, b := range blocks { + b.Status = chain + blockStatusUpdates[i] = store.BlockStatusUpdate{ + Hash: b.Hash, + Status: b.Status, + } + } + + tip := blocks[len(blocks)-1] + + err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) + if err != nil { + p.logger.Error("unable to accept blocks into chain", slog.String("hash", getHashStringNoErr(tip.Hash)), slog.Uint64("height", tip.Height), slog.String("chain", chain.String()), slog.String("err", err.Error())) + return false + } + + p.logger.Info("blocks successfully accepted into chain", slog.String("hash", getHashStringNoErr(tip.Hash)), slog.Uint64("height", tip.Height), slog.String("chain", chain.String())) + return true +} + +func (p *Processor) publishTxsToMetamorph(ctx context.Context, txs []store.TransactionBlock) { + var publishErr error + ctx, span := tracing.StartTracing(ctx, "publish transactions", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, publishErr) + }() + + for _, tx := range txs { txBlock := &blocktx_api.TransactionBlock{ - TransactionHash: updResp.Hash[:], - BlockHash: blockhash[:], - BlockHeight: blockHeight, - MerklePath: updResp.MerklePath, + BlockHash: tx.BlockHash, + BlockHeight: tx.BlockHeight, + TransactionHash: tx.TxHash, + MerklePath: tx.MerklePath, + BlockStatus: tx.BlockStatus, } - err = p.mqClient.PublishMarshal(ctx, MinedTxsTopic, txBlock) + + err := p.mqClient.PublishMarshal(ctx, MinedTxsTopic, txBlock) if err != nil { - p.logger.Error("failed to publish mined txs", slog.String("hash", blockhash.String()), slog.Uint64("height", blockHeight), slog.String("err", err.Error())) + p.logger.Error("failed to publish mined txs", slog.String("blockHash", getHashStringNoErr(tx.BlockHash)), slog.Uint64("height", tx.BlockHeight), slog.String("txHash", getHashStringNoErr(tx.TxHash)), slog.String("err", err.Error())) + publishErr = err } } - - return nil } func (p *Processor) Shutdown() { diff --git a/internal/blocktx/processor_helpers.go b/internal/blocktx/processor_helpers.go index 078972a04..bb339fd8c 100644 --- a/internal/blocktx/processor_helpers.go +++ b/internal/blocktx/processor_helpers.go @@ -4,50 +4,46 @@ import ( "math" "math/big" - blockchain "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/bitcoin-sv/arc/internal/blocktx/store" + "github.com/libsv/go-p2p/chaincfg/chainhash" ) -func createBlock(msg *blockchain.BlockMessage, prevBlock *blocktx_api.Block, longestTipExists bool) *blocktx_api.Block { - hash := msg.Header.BlockHash() - prevHash := msg.Header.PrevBlock - merkleRoot := msg.Header.MerkleRoot - chainwork := calculateChainwork(msg.Header.Bits) - - var status blocktx_api.Status - if prevBlock == nil { - if longestTipExists { - status = blocktx_api.Status_ORPHANED - } else { - status = blocktx_api.Status_LONGEST - } - } else { - status = prevBlock.Status +func getHashStringNoErr(hash []byte) string { + chash, err := chainhash.NewHash(hash) + if err != nil { + return "" } + return chash.String() +} + +func sumChainwork(blocks []*blocktx_api.Block) *big.Int { + sum := big.NewInt(0) + for _, b := range blocks { + chainwork := new(big.Int) + chainwork.SetString(b.Chainwork, 10) - return &blocktx_api.Block{ - Hash: hash[:], - PreviousHash: prevHash[:], - MerkleRoot: merkleRoot[:], - Height: msg.Height, - Status: status, - Chainwork: chainwork.String(), + sum = sum.Add(sum, chainwork) } + + return sum } -func getLowestHeight(blocks []*blocktx_api.Block) uint64 { - if len(blocks) == 0 { - return 0 +func exclusiveRightTxs(leftTxs, rightTxs []store.TransactionBlock) []store.TransactionBlock { + leftTxsMap := make(map[string]struct{}) + + for _, tx := range leftTxs { + leftTxsMap[string(tx.TxHash)] = struct{}{} } - lowest := blocks[0].Height - for _, b := range blocks { - if b.Height < lowest { - lowest = b.Height + exclusiveRightTxs := make([]store.TransactionBlock, 0) + for _, tx := range rightTxs { + if _, found := leftTxsMap[string(tx.TxHash)]; !found { + exclusiveRightTxs = append(exclusiveRightTxs, tx) } } - return lowest + return exclusiveRightTxs } // calculateChainwork calculates chainwork from the given difficulty bits diff --git a/internal/blocktx/processor_helpers_test.go b/internal/blocktx/processor_helpers_test.go index cf4b1a2e5..794ebef95 100644 --- a/internal/blocktx/processor_helpers_test.go +++ b/internal/blocktx/processor_helpers_test.go @@ -4,35 +4,46 @@ import ( "fmt" "testing" - "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/bitcoin-sv/arc/internal/blocktx/store" "github.com/stretchr/testify/require" ) -func TestGetLowestHeight(t *testing.T) { +func TestExlusiveRightTxs(t *testing.T) { // given - blocks := []*blocktx_api.Block{ + leftTxs := []store.TransactionBlock{ { - Height: 123, + TxHash: []byte("1"), }, { - Height: 250, + TxHash: []byte("2"), + }, + } + rightTxs := []store.TransactionBlock{ + { + TxHash: []byte("A"), }, { - Height: 83340, + TxHash: []byte("B"), }, { - Height: 4, + TxHash: []byte("1"), + }, + } + + expectedStaleTxs := []store.TransactionBlock{ + { + TxHash: []byte("A"), }, { - Height: 40, + TxHash: []byte("B"), }, } // when - lowestHeight := getLowestHeight(blocks) + actualStaleTxs := exclusiveRightTxs(leftTxs, rightTxs) // then - require.Equal(t, uint64(4), lowestHeight) + require.Equal(t, expectedStaleTxs, actualStaleTxs) } func TestChainWork(t *testing.T) { diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index 9c931d683..dc531767b 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -9,20 +9,20 @@ import ( "testing" "time" + "github.com/bitcoin-sv/arc/internal/blocktx" + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/bitcoin-sv/arc/internal/blocktx/mocks" + "github.com/bitcoin-sv/arc/internal/blocktx/store" + storeMocks "github.com/bitcoin-sv/arc/internal/blocktx/store/mocks" + "github.com/bitcoin-sv/arc/internal/testdata" "github.com/libsv/go-p2p/chaincfg/chainhash" "github.com/libsv/go-p2p/wire" "github.com/stretchr/testify/require" "google.golang.org/protobuf/reflect/protoreflect" - "github.com/bitcoin-sv/arc/internal/blocktx" blockchain "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication" blocktx_p2p "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication/p2p" - "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" - "github.com/bitcoin-sv/arc/internal/blocktx/mocks" - "github.com/bitcoin-sv/arc/internal/blocktx/store" - storeMocks "github.com/bitcoin-sv/arc/internal/blocktx/store/mocks" p2p_mocks "github.com/bitcoin-sv/arc/internal/p2p/mocks" - "github.com/bitcoin-sv/arc/internal/testdata" ) func TestHandleBlock(t *testing.T) { @@ -156,7 +156,6 @@ func TestHandleBlock(t *testing.T) { } var actualInsertedBlockTransactions [][]byte - storeMock := &storeMocks.BlocktxStoreMock{ GetBlockFunc: func(_ context.Context, _ *chainhash.Hash) (*blocktx_api.Block, error) { if tc.blockAlreadyProcessed { @@ -164,7 +163,7 @@ func TestHandleBlock(t *testing.T) { } return nil, store.ErrBlockNotFound }, - GetBlockByHeightFunc: func(_ context.Context, _ uint64, _ blocktx_api.Status) (*blocktx_api.Block, error) { + GetLongestBlockByHeightFunc: func(_ context.Context, _ uint64) (*blocktx_api.Block, error) { return nil, store.ErrBlockNotFound }, GetChainTipFunc: func(_ context.Context) (*blocktx_api.Block, error) { @@ -173,11 +172,17 @@ func TestHandleBlock(t *testing.T) { UpsertBlockFunc: func(_ context.Context, _ *blocktx_api.Block) (uint64, error) { return 0, nil }, + GetMinedTransactionsFunc: func(_ context.Context, _ [][]byte, _ bool) ([]store.TransactionBlock, error) { + return nil, nil + }, + GetRegisteredTxsByBlockHashesFunc: func(_ context.Context, _ [][]byte) ([]store.TransactionBlock, error) { + return nil, nil + }, MarkBlockAsDoneFunc: func(_ context.Context, _ *chainhash.Hash, _ uint64, _ uint64) error { return nil }, GetBlockHashesProcessingInProgressFunc: func(_ context.Context, _ string) ([]*chainhash.Hash, error) { return nil, nil }, } - storeMock.UpsertBlockTransactionsFunc = func(_ context.Context, _ uint64, txsWithMerklePaths []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) { + storeMock.UpsertBlockTransactionsFunc = func(_ context.Context, _ uint64, txsWithMerklePaths []store.TxWithMerklePath) error { require.LessOrEqual(t, len(txsWithMerklePaths), batchSize) for _, txWithMr := range txsWithMerklePaths { @@ -187,7 +192,7 @@ func TestHandleBlock(t *testing.T) { actualInsertedBlockTransactions = append(actualInsertedBlockTransactions, tx[:]) } - return txsWithMerklePaths, nil + return nil } mq := &mocks.MessageQueueClientMock{ @@ -202,6 +207,7 @@ func TestHandleBlock(t *testing.T) { require.NoError(t, err) blockMessage := &blockchain.BlockMessage{ + Hash: testdata.Block1Hash, Header: &wire.BlockHeader{ Version: 541065216, PrevBlock: tc.prevBlockHash, @@ -229,14 +235,22 @@ func TestHandleBlock(t *testing.T) { } } -func TestHandleBlockReorg(t *testing.T) { +func TestHandleBlockReorgAndOrphans(t *testing.T) { testCases := []struct { - name string - prevBlockStatus blocktx_api.Status - hasCompetingBlock bool - hasGreaterChainwork bool - expectedStatus blocktx_api.Status + name string + blockAlreadyExists bool + prevBlockStatus blocktx_api.Status + hasCompetingBlock bool + hasGreaterChainwork bool + shouldFindOrphanAncestor bool + ancestorStatus blocktx_api.Status + expectedStatus blocktx_api.Status }{ + { + name: "block already exists - should be ingored", + blockAlreadyExists: true, + expectedStatus: blocktx_api.Status_UNKNOWN, + }, { name: "previous block longest - no competing - no reorg", prevBlockStatus: blocktx_api.Status_LONGEST, @@ -258,32 +272,55 @@ func TestHandleBlockReorg(t *testing.T) { expectedStatus: blocktx_api.Status_LONGEST, }, { - name: "previous block stale - competing - no reorg", - prevBlockStatus: blocktx_api.Status_STALE, - hasCompetingBlock: true, - hasGreaterChainwork: false, - expectedStatus: blocktx_api.Status_STALE, - }, - { - name: "previous block stale - no competing - no reorg", + name: "previous block stale - no reorg", prevBlockStatus: blocktx_api.Status_STALE, - hasCompetingBlock: false, hasGreaterChainwork: false, expectedStatus: blocktx_api.Status_STALE, }, { - name: "previous block stale - no competing - reorg", + name: "previous block stale - reorg", prevBlockStatus: blocktx_api.Status_STALE, - hasCompetingBlock: false, hasGreaterChainwork: true, expectedStatus: blocktx_api.Status_LONGEST, }, { - name: "previous block orphaned - no competing - no reorg", - prevBlockStatus: blocktx_api.Status_ORPHANED, - hasCompetingBlock: false, - hasGreaterChainwork: false, - expectedStatus: blocktx_api.Status_ORPHANED, + name: "previous block orphaned - no ancestor", + prevBlockStatus: blocktx_api.Status_ORPHANED, + shouldFindOrphanAncestor: false, + expectedStatus: blocktx_api.Status_ORPHANED, + }, + { + name: "previous block orphaned - stale ancestor", + prevBlockStatus: blocktx_api.Status_ORPHANED, + shouldFindOrphanAncestor: true, + ancestorStatus: blocktx_api.Status_STALE, + expectedStatus: blocktx_api.Status_STALE, + }, + { + name: "previous block orphaned - longest ancestor - no competing", + prevBlockStatus: blocktx_api.Status_ORPHANED, + shouldFindOrphanAncestor: true, + ancestorStatus: blocktx_api.Status_LONGEST, + hasCompetingBlock: false, + expectedStatus: blocktx_api.Status_LONGEST, + }, + { + name: "previous block orphaned - longest ancestor - competing - no reorg", + prevBlockStatus: blocktx_api.Status_ORPHANED, + shouldFindOrphanAncestor: true, + ancestorStatus: blocktx_api.Status_LONGEST, + hasCompetingBlock: true, + hasGreaterChainwork: false, + expectedStatus: blocktx_api.Status_STALE, + }, + { + name: "previous block orphaned - longest ancestor - competing - reorg", + prevBlockStatus: blocktx_api.Status_ORPHANED, + shouldFindOrphanAncestor: true, + ancestorStatus: blocktx_api.Status_LONGEST, + hasCompetingBlock: true, + hasGreaterChainwork: true, + expectedStatus: blocktx_api.Status_LONGEST, }, } @@ -291,9 +328,8 @@ func TestHandleBlockReorg(t *testing.T) { t.Run(tc.name, func(t *testing.T) { // given var mtx sync.Mutex - var insertedBlock *blocktx_api.Block - - shouldReturnNoBlock := true + insertedBlockStatus := blocktx_api.Status_UNKNOWN + shouldReturnNoBlock := !tc.blockAlreadyExists storeMock := &storeMocks.BlocktxStoreMock{ GetBlockFunc: func(_ context.Context, _ *chainhash.Hash) (*blocktx_api.Block, error) { @@ -303,10 +339,11 @@ func TestHandleBlockReorg(t *testing.T) { } return &blocktx_api.Block{ - Status: tc.prevBlockStatus, + Status: tc.prevBlockStatus, + Processed: true, }, nil }, - GetBlockByHeightFunc: func(_ context.Context, _ uint64, _ blocktx_api.Status) (*blocktx_api.Block, error) { + GetLongestBlockByHeightFunc: func(_ context.Context, _ uint64) (*blocktx_api.Block, error) { if tc.hasCompetingBlock { blockHash, err := chainhash.NewHashFromStr("0000000000000000087590e1ad6360c0c491556c9af75c0d22ce9324cb5713cf") require.NoError(t, err) @@ -318,36 +355,73 @@ func TestHandleBlockReorg(t *testing.T) { return nil, store.ErrBlockNotFound }, GetChainTipFunc: func(_ context.Context) (*blocktx_api.Block, error) { + return &blocktx_api.Block{}, nil + }, + UpsertBlockFunc: func(_ context.Context, block *blocktx_api.Block) (uint64, error) { + mtx.Lock() + insertedBlockStatus = block.Status + mtx.Unlock() + return 1, nil + }, + GetStaleChainBackFromHashFunc: func(_ context.Context, _ []byte) ([]*blocktx_api.Block, error) { if tc.hasGreaterChainwork { - return &blocktx_api.Block{ - Chainwork: "42069", + return []*blocktx_api.Block{ + { + Chainwork: "62209952899966", + }, + { + Chainwork: "42069", + }, + { + Chainwork: "42069", + }, }, nil } - - return &blocktx_api.Block{ - Chainwork: "62209952899966", + return []*blocktx_api.Block{ + { + Chainwork: "62209952899966", + }, }, nil }, - GetStaleChainBackFromHashFunc: func(_ context.Context, _ []byte) ([]*blocktx_api.Block, error) { - return nil, nil - }, GetLongestChainFromHeightFunc: func(_ context.Context, _ uint64) ([]*blocktx_api.Block, error) { - return nil, nil - }, - UpdateBlocksStatusesFunc: func(_ context.Context, _ []store.BlockStatusUpdate) error { - return nil + return []*blocktx_api.Block{ + { + Chainwork: "62209952899966", + }, + { + Chainwork: "42069", + }, + }, nil }, - UpsertBlockFunc: func(_ context.Context, block *blocktx_api.Block) (uint64, error) { + UpdateBlocksStatusesFunc: func(_ context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { mtx.Lock() - insertedBlock = block + tipStatusUpdate := blockStatusUpdates[len(blockStatusUpdates)-1] + insertedBlockStatus = tipStatusUpdate.Status mtx.Unlock() - return 1, nil + return nil + }, + GetOrphansBackToNonOrphanAncestorFunc: func(_ context.Context, hash []byte) ([]*blocktx_api.Block, *blocktx_api.Block, error) { + if tc.shouldFindOrphanAncestor { + orphans := []*blocktx_api.Block{{Hash: hash}} + ancestor := &blocktx_api.Block{Hash: []byte("123"), Status: tc.ancestorStatus, Processed: true} + return orphans, ancestor, nil + } + return nil, nil, nil + }, + UpsertBlockTransactionsFunc: func(_ context.Context, _ uint64, _ []store.TxWithMerklePath) error { + return nil + }, + GetRegisteredTxsByBlockHashesFunc: func(_ context.Context, _ [][]byte) ([]store.TransactionBlock, error) { + return nil, nil + }, + GetMinedTransactionsFunc: func(_ context.Context, _ [][]byte, _ bool) ([]store.TransactionBlock, error) { + return nil, nil }, - MarkBlockAsDoneFunc: func(_ context.Context, _ *chainhash.Hash, _ uint64, _ uint64) error { + MarkBlockAsDoneFunc: func(_ context.Context, _ *chainhash.Hash, _, _ uint64) error { return nil }, - UpsertBlockTransactionsFunc: func(_ context.Context, _ uint64, _ []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) { - return []store.TxWithMerklePath{}, nil + DelBlockProcessingFunc: func(_ context.Context, _ *chainhash.Hash, _ string) (int64, error) { + return 0, nil }, } @@ -366,6 +440,7 @@ func TestHandleBlockReorg(t *testing.T) { require.NoError(t, err) blockMessage := &blockchain.BlockMessage{ + Hash: testdata.Block1Hash, Header: &wire.BlockHeader{ Version: 541065216, MerkleRoot: *merkleRoot, @@ -384,7 +459,7 @@ func TestHandleBlockReorg(t *testing.T) { // then time.Sleep(20 * time.Millisecond) mtx.Lock() - require.Equal(t, tc.expectedStatus, insertedBlock.Status) + require.Equal(t, tc.expectedStatus, insertedBlockStatus) mtx.Unlock() }) } @@ -635,12 +710,12 @@ func TestStartProcessRequestTxs(t *testing.T) { t.Run(tc.name, func(t *testing.T) { // given storeMock := &storeMocks.BlocktxStoreMock{ - GetMinedTransactionsFunc: func(_ context.Context, hashes []*chainhash.Hash) ([]store.GetMinedTransactionResult, error) { + GetMinedTransactionsFunc: func(_ context.Context, hashes [][]byte, _ bool) ([]store.TransactionBlock, error) { for _, hash := range hashes { - require.Equal(t, testdata.TX1Hash, hash) + require.Equal(t, testdata.TX1Hash[:], hash) } - return []store.GetMinedTransactionResult{{ + return []store.TransactionBlock{{ TxHash: testdata.TX1Hash[:], BlockHash: testdata.Block1Hash[:], BlockHeight: 1, diff --git a/internal/blocktx/store/mocks/blocktx_store_mock.go b/internal/blocktx/store/mocks/blocktx_store_mock.go index 8a9451d33..d7acfb51d 100644 --- a/internal/blocktx/store/mocks/blocktx_store_mock.go +++ b/internal/blocktx/store/mocks/blocktx_store_mock.go @@ -33,9 +33,6 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // GetBlockFunc: func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) { // panic("mock out the GetBlock method") // }, -// GetBlockByHeightFunc: func(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) { -// panic("mock out the GetBlockByHeight method") -// }, // GetBlockGapsFunc: func(ctx context.Context, heightRange int) ([]*store.BlockGap, error) { // panic("mock out the GetBlockGaps method") // }, @@ -45,12 +42,21 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // GetChainTipFunc: func(ctx context.Context) (*blocktx_api.Block, error) { // panic("mock out the GetChainTip method") // }, +// GetLongestBlockByHeightFunc: func(ctx context.Context, height uint64) (*blocktx_api.Block, error) { +// panic("mock out the GetLongestBlockByHeight method") +// }, // GetLongestChainFromHeightFunc: func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) { // panic("mock out the GetLongestChainFromHeight method") // }, -// GetMinedTransactionsFunc: func(ctx context.Context, hashes []*chainhash.Hash) ([]store.GetMinedTransactionResult, error) { +// GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { // panic("mock out the GetMinedTransactions method") // }, +// GetOrphansBackToNonOrphanAncestorFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, *blocktx_api.Block, error) { +// panic("mock out the GetOrphansBackToNonOrphanAncestor method") +// }, +// GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { +// panic("mock out the GetRegisteredTxsByBlockHashes method") +// }, // GetStaleChainBackFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { // panic("mock out the GetStaleChainBackFromHash method") // }, @@ -75,7 +81,7 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // UpsertBlockFunc: func(ctx context.Context, block *blocktx_api.Block) (uint64, error) { // panic("mock out the UpsertBlock method") // }, -// UpsertBlockTransactionsFunc: func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) { +// UpsertBlockTransactionsFunc: func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error { // panic("mock out the UpsertBlockTransactions method") // }, // VerifyMerkleRootsFunc: func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) { @@ -100,9 +106,6 @@ type BlocktxStoreMock struct { // GetBlockFunc mocks the GetBlock method. GetBlockFunc func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) - // GetBlockByHeightFunc mocks the GetBlockByHeight method. - GetBlockByHeightFunc func(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) - // GetBlockGapsFunc mocks the GetBlockGaps method. GetBlockGapsFunc func(ctx context.Context, heightRange int) ([]*store.BlockGap, error) @@ -112,11 +115,20 @@ type BlocktxStoreMock struct { // GetChainTipFunc mocks the GetChainTip method. GetChainTipFunc func(ctx context.Context) (*blocktx_api.Block, error) + // GetLongestBlockByHeightFunc mocks the GetLongestBlockByHeight method. + GetLongestBlockByHeightFunc func(ctx context.Context, height uint64) (*blocktx_api.Block, error) + // GetLongestChainFromHeightFunc mocks the GetLongestChainFromHeight method. GetLongestChainFromHeightFunc func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) // GetMinedTransactionsFunc mocks the GetMinedTransactions method. - GetMinedTransactionsFunc func(ctx context.Context, hashes []*chainhash.Hash) ([]store.GetMinedTransactionResult, error) + GetMinedTransactionsFunc func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) + + // GetOrphansBackToNonOrphanAncestorFunc mocks the GetOrphansBackToNonOrphanAncestor method. + GetOrphansBackToNonOrphanAncestorFunc func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, *blocktx_api.Block, error) + + // GetRegisteredTxsByBlockHashesFunc mocks the GetRegisteredTxsByBlockHashes method. + GetRegisteredTxsByBlockHashesFunc func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) // GetStaleChainBackFromHashFunc mocks the GetStaleChainBackFromHash method. GetStaleChainBackFromHashFunc func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) @@ -143,7 +155,7 @@ type BlocktxStoreMock struct { UpsertBlockFunc func(ctx context.Context, block *blocktx_api.Block) (uint64, error) // UpsertBlockTransactionsFunc mocks the UpsertBlockTransactions method. - UpsertBlockTransactionsFunc func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) + UpsertBlockTransactionsFunc func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error // VerifyMerkleRootsFunc mocks the VerifyMerkleRoots method. VerifyMerkleRootsFunc func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) @@ -178,15 +190,6 @@ type BlocktxStoreMock struct { // Hash is the hash argument value. Hash *chainhash.Hash } - // GetBlockByHeight holds details about calls to the GetBlockByHeight method. - GetBlockByHeight []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // Height is the height argument value. - Height uint64 - // Status is the status argument value. - Status blocktx_api.Status - } // GetBlockGaps holds details about calls to the GetBlockGaps method. GetBlockGaps []struct { // Ctx is the ctx argument value. @@ -206,6 +209,13 @@ type BlocktxStoreMock struct { // Ctx is the ctx argument value. Ctx context.Context } + // GetLongestBlockByHeight holds details about calls to the GetLongestBlockByHeight method. + GetLongestBlockByHeight []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Height is the height argument value. + Height uint64 + } // GetLongestChainFromHeight holds details about calls to the GetLongestChainFromHeight method. GetLongestChainFromHeight []struct { // Ctx is the ctx argument value. @@ -218,7 +228,23 @@ type BlocktxStoreMock struct { // Ctx is the ctx argument value. Ctx context.Context // Hashes is the hashes argument value. - Hashes []*chainhash.Hash + Hashes [][]byte + // OnlyLongestChain is the onlyLongestChain argument value. + OnlyLongestChain bool + } + // GetOrphansBackToNonOrphanAncestor holds details about calls to the GetOrphansBackToNonOrphanAncestor method. + GetOrphansBackToNonOrphanAncestor []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Hash is the hash argument value. + Hash []byte + } + // GetRegisteredTxsByBlockHashes holds details about calls to the GetRegisteredTxsByBlockHashes method. + GetRegisteredTxsByBlockHashes []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // BlockHashes is the blockHashes argument value. + BlockHashes [][]byte } // GetStaleChainBackFromHash holds details about calls to the GetStaleChainBackFromHash method. GetStaleChainBackFromHash []struct { @@ -301,12 +327,14 @@ type BlocktxStoreMock struct { lockClose sync.RWMutex lockDelBlockProcessing sync.RWMutex lockGetBlock sync.RWMutex - lockGetBlockByHeight sync.RWMutex lockGetBlockGaps sync.RWMutex lockGetBlockHashesProcessingInProgress sync.RWMutex lockGetChainTip sync.RWMutex + lockGetLongestBlockByHeight sync.RWMutex lockGetLongestChainFromHeight sync.RWMutex lockGetMinedTransactions sync.RWMutex + lockGetOrphansBackToNonOrphanAncestor sync.RWMutex + lockGetRegisteredTxsByBlockHashes sync.RWMutex lockGetStaleChainBackFromHash sync.RWMutex lockGetStats sync.RWMutex lockMarkBlockAsDone sync.RWMutex @@ -462,46 +490,6 @@ func (mock *BlocktxStoreMock) GetBlockCalls() []struct { return calls } -// GetBlockByHeight calls GetBlockByHeightFunc. -func (mock *BlocktxStoreMock) GetBlockByHeight(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) { - if mock.GetBlockByHeightFunc == nil { - panic("BlocktxStoreMock.GetBlockByHeightFunc: method is nil but BlocktxStore.GetBlockByHeight was just called") - } - callInfo := struct { - Ctx context.Context - Height uint64 - Status blocktx_api.Status - }{ - Ctx: ctx, - Height: height, - Status: status, - } - mock.lockGetBlockByHeight.Lock() - mock.calls.GetBlockByHeight = append(mock.calls.GetBlockByHeight, callInfo) - mock.lockGetBlockByHeight.Unlock() - return mock.GetBlockByHeightFunc(ctx, height, status) -} - -// GetBlockByHeightCalls gets all the calls that were made to GetBlockByHeight. -// Check the length with: -// -// len(mockedBlocktxStore.GetBlockByHeightCalls()) -func (mock *BlocktxStoreMock) GetBlockByHeightCalls() []struct { - Ctx context.Context - Height uint64 - Status blocktx_api.Status -} { - var calls []struct { - Ctx context.Context - Height uint64 - Status blocktx_api.Status - } - mock.lockGetBlockByHeight.RLock() - calls = mock.calls.GetBlockByHeight - mock.lockGetBlockByHeight.RUnlock() - return calls -} - // GetBlockGaps calls GetBlockGapsFunc. func (mock *BlocktxStoreMock) GetBlockGaps(ctx context.Context, heightRange int) ([]*store.BlockGap, error) { if mock.GetBlockGapsFunc == nil { @@ -606,6 +594,42 @@ func (mock *BlocktxStoreMock) GetChainTipCalls() []struct { return calls } +// GetLongestBlockByHeight calls GetLongestBlockByHeightFunc. +func (mock *BlocktxStoreMock) GetLongestBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) { + if mock.GetLongestBlockByHeightFunc == nil { + panic("BlocktxStoreMock.GetLongestBlockByHeightFunc: method is nil but BlocktxStore.GetLongestBlockByHeight was just called") + } + callInfo := struct { + Ctx context.Context + Height uint64 + }{ + Ctx: ctx, + Height: height, + } + mock.lockGetLongestBlockByHeight.Lock() + mock.calls.GetLongestBlockByHeight = append(mock.calls.GetLongestBlockByHeight, callInfo) + mock.lockGetLongestBlockByHeight.Unlock() + return mock.GetLongestBlockByHeightFunc(ctx, height) +} + +// GetLongestBlockByHeightCalls gets all the calls that were made to GetLongestBlockByHeight. +// Check the length with: +// +// len(mockedBlocktxStore.GetLongestBlockByHeightCalls()) +func (mock *BlocktxStoreMock) GetLongestBlockByHeightCalls() []struct { + Ctx context.Context + Height uint64 +} { + var calls []struct { + Ctx context.Context + Height uint64 + } + mock.lockGetLongestBlockByHeight.RLock() + calls = mock.calls.GetLongestBlockByHeight + mock.lockGetLongestBlockByHeight.RUnlock() + return calls +} + // GetLongestChainFromHeight calls GetLongestChainFromHeightFunc. func (mock *BlocktxStoreMock) GetLongestChainFromHeight(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) { if mock.GetLongestChainFromHeightFunc == nil { @@ -643,21 +667,23 @@ func (mock *BlocktxStoreMock) GetLongestChainFromHeightCalls() []struct { } // GetMinedTransactions calls GetMinedTransactionsFunc. -func (mock *BlocktxStoreMock) GetMinedTransactions(ctx context.Context, hashes []*chainhash.Hash) ([]store.GetMinedTransactionResult, error) { +func (mock *BlocktxStoreMock) GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { if mock.GetMinedTransactionsFunc == nil { panic("BlocktxStoreMock.GetMinedTransactionsFunc: method is nil but BlocktxStore.GetMinedTransactions was just called") } callInfo := struct { - Ctx context.Context - Hashes []*chainhash.Hash + Ctx context.Context + Hashes [][]byte + OnlyLongestChain bool }{ - Ctx: ctx, - Hashes: hashes, + Ctx: ctx, + Hashes: hashes, + OnlyLongestChain: onlyLongestChain, } mock.lockGetMinedTransactions.Lock() mock.calls.GetMinedTransactions = append(mock.calls.GetMinedTransactions, callInfo) mock.lockGetMinedTransactions.Unlock() - return mock.GetMinedTransactionsFunc(ctx, hashes) + return mock.GetMinedTransactionsFunc(ctx, hashes, onlyLongestChain) } // GetMinedTransactionsCalls gets all the calls that were made to GetMinedTransactions. @@ -665,12 +691,14 @@ func (mock *BlocktxStoreMock) GetMinedTransactions(ctx context.Context, hashes [ // // len(mockedBlocktxStore.GetMinedTransactionsCalls()) func (mock *BlocktxStoreMock) GetMinedTransactionsCalls() []struct { - Ctx context.Context - Hashes []*chainhash.Hash + Ctx context.Context + Hashes [][]byte + OnlyLongestChain bool } { var calls []struct { - Ctx context.Context - Hashes []*chainhash.Hash + Ctx context.Context + Hashes [][]byte + OnlyLongestChain bool } mock.lockGetMinedTransactions.RLock() calls = mock.calls.GetMinedTransactions @@ -678,6 +706,78 @@ func (mock *BlocktxStoreMock) GetMinedTransactionsCalls() []struct { return calls } +// GetOrphansBackToNonOrphanAncestor calls GetOrphansBackToNonOrphanAncestorFunc. +func (mock *BlocktxStoreMock) GetOrphansBackToNonOrphanAncestor(ctx context.Context, hash []byte) ([]*blocktx_api.Block, *blocktx_api.Block, error) { + if mock.GetOrphansBackToNonOrphanAncestorFunc == nil { + panic("BlocktxStoreMock.GetOrphansBackToNonOrphanAncestorFunc: method is nil but BlocktxStore.GetOrphansBackToNonOrphanAncestor was just called") + } + callInfo := struct { + Ctx context.Context + Hash []byte + }{ + Ctx: ctx, + Hash: hash, + } + mock.lockGetOrphansBackToNonOrphanAncestor.Lock() + mock.calls.GetOrphansBackToNonOrphanAncestor = append(mock.calls.GetOrphansBackToNonOrphanAncestor, callInfo) + mock.lockGetOrphansBackToNonOrphanAncestor.Unlock() + return mock.GetOrphansBackToNonOrphanAncestorFunc(ctx, hash) +} + +// GetOrphansBackToNonOrphanAncestorCalls gets all the calls that were made to GetOrphansBackToNonOrphanAncestor. +// Check the length with: +// +// len(mockedBlocktxStore.GetOrphansBackToNonOrphanAncestorCalls()) +func (mock *BlocktxStoreMock) GetOrphansBackToNonOrphanAncestorCalls() []struct { + Ctx context.Context + Hash []byte +} { + var calls []struct { + Ctx context.Context + Hash []byte + } + mock.lockGetOrphansBackToNonOrphanAncestor.RLock() + calls = mock.calls.GetOrphansBackToNonOrphanAncestor + mock.lockGetOrphansBackToNonOrphanAncestor.RUnlock() + return calls +} + +// GetRegisteredTxsByBlockHashes calls GetRegisteredTxsByBlockHashesFunc. +func (mock *BlocktxStoreMock) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { + if mock.GetRegisteredTxsByBlockHashesFunc == nil { + panic("BlocktxStoreMock.GetRegisteredTxsByBlockHashesFunc: method is nil but BlocktxStore.GetRegisteredTxsByBlockHashes was just called") + } + callInfo := struct { + Ctx context.Context + BlockHashes [][]byte + }{ + Ctx: ctx, + BlockHashes: blockHashes, + } + mock.lockGetRegisteredTxsByBlockHashes.Lock() + mock.calls.GetRegisteredTxsByBlockHashes = append(mock.calls.GetRegisteredTxsByBlockHashes, callInfo) + mock.lockGetRegisteredTxsByBlockHashes.Unlock() + return mock.GetRegisteredTxsByBlockHashesFunc(ctx, blockHashes) +} + +// GetRegisteredTxsByBlockHashesCalls gets all the calls that were made to GetRegisteredTxsByBlockHashes. +// Check the length with: +// +// len(mockedBlocktxStore.GetRegisteredTxsByBlockHashesCalls()) +func (mock *BlocktxStoreMock) GetRegisteredTxsByBlockHashesCalls() []struct { + Ctx context.Context + BlockHashes [][]byte +} { + var calls []struct { + Ctx context.Context + BlockHashes [][]byte + } + mock.lockGetRegisteredTxsByBlockHashes.RLock() + calls = mock.calls.GetRegisteredTxsByBlockHashes + mock.lockGetRegisteredTxsByBlockHashes.RUnlock() + return calls +} + // GetStaleChainBackFromHash calls GetStaleChainBackFromHashFunc. func (mock *BlocktxStoreMock) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { if mock.GetStaleChainBackFromHashFunc == nil { @@ -971,7 +1071,7 @@ func (mock *BlocktxStoreMock) UpsertBlockCalls() []struct { } // UpsertBlockTransactions calls UpsertBlockTransactionsFunc. -func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) { +func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error { if mock.UpsertBlockTransactionsFunc == nil { panic("BlocktxStoreMock.UpsertBlockTransactionsFunc: method is nil but BlocktxStore.UpsertBlockTransactions was just called") } diff --git a/internal/blocktx/store/model.go b/internal/blocktx/store/model.go index 260df433c..e4aaaab57 100644 --- a/internal/blocktx/store/model.go +++ b/internal/blocktx/store/model.go @@ -15,11 +15,12 @@ type TxWithMerklePath struct { MerklePath string } -type GetMinedTransactionResult struct { +type TransactionBlock struct { TxHash []byte BlockHash []byte BlockHeight uint64 MerklePath string + BlockStatus blocktx_api.Status } type BlockStatusUpdate struct { diff --git a/internal/blocktx/store/postgresql/fixtures/block_processing/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/block_processing/blocktx.blocks.yaml index b26a8f7e9..4e04686ca 100644 --- a/internal/blocktx/store/postgresql/fixtures/block_processing/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/block_processing/blocktx.blocks.yaml @@ -6,7 +6,6 @@ height: 822013 size: 86840000 tx_count: 23477 - orphanedyn: false - inserted_at: 2023-12-15 14:00:00 id: 2 hash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 @@ -16,4 +15,3 @@ processed_at: 2023-12-15 14:10:00 size: 3030000 tx_count: 856 - orphanedyn: false diff --git a/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.blocks.yaml index 9e00aa7da..976e16a18 100644 --- a/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.blocks.yaml @@ -7,7 +7,6 @@ processed_at: 2023-12-10 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false - inserted_at: 2023-12-15 14:00:00 id: 2 hash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7 @@ -17,4 +16,3 @@ processed_at: 2023-12-15 14:10:00 size: 3030000 tx_count: 856 - orphanedyn: false diff --git a/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml index 44928c5d8..c1a8f0b30 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml @@ -7,7 +7,6 @@ processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false status: 10 chainwork: '123456' - inserted_at: 2023-12-15 14:30:00 @@ -19,7 +18,6 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 10 chainwork: '123456' - inserted_at: 2023-12-15 14:30:00 @@ -31,9 +29,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 20 # STALE - competing block chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:40:00 id: 3 hash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 @@ -43,7 +41,6 @@ processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false status: 10 chainwork: '123456' - inserted_at: 2023-12-15 14:50:00 @@ -52,8 +49,8 @@ prevhash: 0x5696fc6e504b6aa2ae5d9c46b9418192dc61bd1b2e3364030000000000000000 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 height: 822020 + processed_at: 2023-12-15 14:40:00 size: 8630000 tx_count: 36724 - orphanedyn: false status: 10 chainwork: '123456' diff --git a/internal/blocktx/store/postgresql/fixtures/get_block_gaps/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_block_gaps/blocktx.blocks.yaml index 7951c92d8..d3641f5a3 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_block_gaps/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_block_gaps/blocktx.blocks.yaml @@ -3,76 +3,132 @@ hash: 0xca415274325bac4ea3a39b0cef9356e75469d758ba4a57030000000000000000 prevhash: 0x3dc8a25238765c58a9fccbcf83fba59bf93894bcaaee8e040000000000000000 merkleroot: 0x4f3d1594808f485303ba8a1ed4f7c15f8087e5da1b7ebd25fa266630a8a6d880 - height: 812010 + height: 822008 # Out of range + status: 10 # LONGEST + is_longest: true size: 244000000 tx_count: 4437 - orphanedyn: false + +# Gap 822009 + - inserted_at: 2023-12-10 14:00:00 id: 1 hash: 0x72ad227eaaf73d36bc86f46347310c9b21a360b277c3000a0000000000000000 prevhash: 0x4ad773b1a464129a0ed8c7a8c71bb98175f0f01da1793f0e0000000000000000 merkleroot: 0x145b33264b4440278446f4cb5008dcf87e54e7827a215da9621b652eb17eef88 height: 822010 + status: 30 # ORPHANED + is_longest: false processed_at: 2023-12-10 14:10:00 size: 244000000 tx_count: 4437 - orphanedyn: false + +# block 822011 is being processed (in table block_processing) + - inserted_at: 2023-12-10 14:00:00 id: 2 hash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 prevhash: 0x3a03313b727fa08c170fab2660c225d52b4d85516c92a0020000000000000000 merkleroot: 0x3eeee879a8a08fc537a04682178687bb0e58a5103938eafc349705a2acb06410 height: 822012 + status: 30 # ORPHANED + is_longest: false processed_at: 2023-12-10 14:10:00 size: 3030000 tx_count: 856 - orphanedyn: false - inserted_at: 2023-12-15 14:00:00 id: 3 hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 height: 822013 + status: 30 # ORPHANED + is_longest: false processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false + +# Gap 822014 + - inserted_at: 2023-12-15 14:30:00 id: 4 hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 height: 822015 + status: 30 # ORPHANED + is_longest: false processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false - inserted_at: 2023-12-15 14:40:00 id: 5 hash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 prevhash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 height: 822016 + status: 30 # ORPHANED + is_longest: false processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false + +# Gap 822017 - 822019 + - inserted_at: 2023-12-15 14:50:00 id: 6 hash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 prevhash: 0x5696fc6e504b6aa2ae5d9c46b9418192dc61bd1b2e3364030000000000000000 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 height: 822020 + status: 30 # ORPHANED + is_longest: false size: 8630000 tx_count: 36724 - orphanedyn: false - inserted_at: 2023-12-15 15:00:00 id: 7 hash: 0x1d5fc8020fd68baea5c0cad654f04eb791a81100c51045090000000000000000 prevhash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 merkleroot: 0x3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c height: 822021 + status: 30 # ORPHANED + is_longest: false processed_at: 2023-12-15 15:00:00 size: 1620000 tx_count: 5578 - orphanedyn: false + +# competing chain at height 822018 - 822020 +- inserted_at: 2023-12-15 14:50:00 + id: 8 + hash: 0x0000000000000000092497212ff3447c8b53c8777d83082049aa64998d1f04f0 + prevhash: 0x00000000000000000d840fb91c0df3b057db04a0250c6d88b2f25aadcfc8410b + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822018 + status: 30 # ORPHANED + is_longest: false + processed_at: 2023-12-15 15:00:00 + size: 8630000 + tx_count: 36724 +- inserted_at: 2023-12-15 14:50:00 + id: 9 + hash: 0x00000000000000000911795efb7fbbd73ecb589a63d6f6f658bab67b57ce9caa + prevhash: 0x0000000000000000092497212ff3447c8b53c8777d83082049aa64998d1f04f0 + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822019 + status: 30 # ORPHANED + is_longest: false + processed_at: 2023-12-15 15:00:00 + size: 8630000 + tx_count: 36724 +- inserted_at: 2023-12-15 14:50:00 + id: 10 + hash: 0x0000000000000000082e0b0d5924f2f190cb4a53eac364524d9388f0c39cc7bd + prevhash: 0x00000000000000000911795efb7fbbd73ecb589a63d6f6f658bab67b57ce9caa + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822020 + status: 30 # ORPHANED + is_longest: false + processed_at: 2023-12-15 15:00:00 + size: 8630000 + tx_count: 36724 + diff --git a/internal/blocktx/store/postgresql/fixtures/get_longest_chain/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_longest_chain/blocktx.blocks.yaml index ce21cb387..ec6976daf 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_longest_chain/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_longest_chain/blocktx.blocks.yaml @@ -7,9 +7,9 @@ processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false status: 10 # LONGEST chainwork: '123456' + is_longest: true - inserted_at: 2023-12-15 14:30:00 id: 1 hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde @@ -19,9 +19,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 10 # LONGEST chainwork: '123456' + is_longest: true - inserted_at: 2023-12-15 14:30:00 id: 2 hash: 0x00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9 @@ -31,9 +31,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 20 # STALE - competing block chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:40:00 id: 3 hash: 0x0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1 @@ -43,9 +43,9 @@ processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false status: 20 # STALE chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:50:00 id: 4 hash: 0x000000000000000004bf3e68405b31650559ff28d38a42b5e4f1440a865611ca @@ -54,6 +54,6 @@ height: 822017 size: 8630000 tx_count: 36724 - orphanedyn: false status: 20 # STALE chainwork: '123456' + is_longest: false diff --git a/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.block_transactions_map.yaml b/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.block_transactions_map.yaml deleted file mode 100644 index f87903b98..000000000 --- a/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.block_transactions_map.yaml +++ /dev/null @@ -1,12 +0,0 @@ -- blockid: 9736 - txid: 115361489 - merkle_path: merkle-path-1 - inserted_at: 2024-01-10 12:00:00 -- blockid: 9736 - txid: 115361490 - merkle_path: merkle-path-2 - inserted_at: 2024-01-10 12:00:00 -- blockid: 9736 - txid: 115361491 - merkle_path: merkle-path-3 - inserted_at: 2024-01-10 12:00:00 diff --git a/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.blocks.yaml deleted file mode 100644 index 2cf6008e8..000000000 --- a/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.blocks.yaml +++ /dev/null @@ -1,11 +0,0 @@ -- inserted_at: 2024-01-10 13:06:03.375 - id: 9736 - hash: 0x6258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000 - prevhash: 0x000000000000000001a7aa3999410ca53fb645851531ec0a7a5cb9ce2d4ae313 - merkleroot: 0x0d72bf92e7862df18d1935c171ca4dbb70d268b0f025e46716e913bc7e4f2bdb - height: 826481 - processed_at: 2024-01-10 13:06:06.122 - size: 108689370 - tx_count: 799 - orphanedyn: FALSE - merkle_path: "" diff --git a/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.transactions.yaml b/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.transactions.yaml deleted file mode 100644 index a8bffc9e8..000000000 --- a/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.transactions.yaml +++ /dev/null @@ -1,12 +0,0 @@ -- id: 115361488 - hash: 0x76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b - inserted_at: 2024-01-09 13:00:00 -- id: 115361489 - hash: 0x164e85a5d5bc2b2372e8feaa266e5e4b7d0808f8d2b784fb1f7349c4726392b0 - inserted_at: 2024-01-10 12:00:00 -- id: 115361490 - hash: 0xdbbd24251b9bb824566412395bb76a579bca3477c2d0b4cbc210a769d3bb4177 - inserted_at: 2024-01-10 12:00:00 -- id: 115361491 - hash: 0x0d60dd6dc1f2649efb2847f801dfaa61361a438deb526da2de5b6875e0016514 - inserted_at: 2024-01-10 12:00:00 diff --git a/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml new file mode 100644 index 000000000..0c06f92ae --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml @@ -0,0 +1,99 @@ +- inserted_at: 2023-12-15 14:00:00 + id: 0 + hash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067 + prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 + merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 + height: 822015 + processed_at: 2023-12-15 14:10:00 + size: 86840000 + tx_count: 23477 + status: 10 # LONGEST + chainwork: '123456' + is_longest: true +- inserted_at: 2023-12-15 14:30:00 + id: 1 + hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + prevhash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822016 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false +- inserted_at: 2023-12-15 14:30:00 + id: 2 + hash: 0x00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9 + prevhash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822017 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false +- inserted_at: 2023-12-15 14:40:00 + id: 3 + hash: 0x0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1 + prevhash: 0x00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9 + merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 + height: 822018 + processed_at: 2023-12-15 14:40:00 + size: 299650000 + tx_count: 62162 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false +- inserted_at: 2023-12-15 14:40:00 + id: 4 + hash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 + prevhash: 0x0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1 + merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 + height: 822019 + processed_at: 2023-12-15 14:40:00 + size: 299650000 + tx_count: 62162 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false +- inserted_at: 2023-12-15 14:50:00 + id: 5 + hash: 0x000000000000000004bf3e68405b31650559ff28d38a42b5e4f1440a865611ca + prevhash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822020 + processed_at: 2023-12-15 14:10:00 + size: 8630000 + tx_count: 36724 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false + +# GAP + +- inserted_at: 2023-12-15 14:50:00 + id: 6 + hash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c + prevhash: 0x0000000000000000094510c50011a891b74ef054d6cac0a5ae8bd60f02c85f1d + merkleroot: 0xda71199f8ed9203d8a765595e6c030a22e5ed8330b1abb467a82c97d7d21d512 + height: 822022 + processed_at: 2023-12-15 14:10:00 + size: 8630000 + tx_count: 36724 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false +- inserted_at: 2023-12-15 14:50:00 + id: 7 + hash: 0x0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd + prevhash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c + merkleroot: 0xda71199f8ed9203d8a765595e6c030a22e5ed8330b1abb467a82c97d7d21d512 + height: 822023 + processed_at: 2023-12-15 14:10:00 + size: 8630000 + tx_count: 36724 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false diff --git a/internal/blocktx/store/postgresql/fixtures/get_stale_chain/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_stale_chain/blocktx.blocks.yaml index ce21cb387..ec6976daf 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_stale_chain/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_stale_chain/blocktx.blocks.yaml @@ -7,9 +7,9 @@ processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false status: 10 # LONGEST chainwork: '123456' + is_longest: true - inserted_at: 2023-12-15 14:30:00 id: 1 hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde @@ -19,9 +19,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 10 # LONGEST chainwork: '123456' + is_longest: true - inserted_at: 2023-12-15 14:30:00 id: 2 hash: 0x00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9 @@ -31,9 +31,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 20 # STALE - competing block chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:40:00 id: 3 hash: 0x0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1 @@ -43,9 +43,9 @@ processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false status: 20 # STALE chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:50:00 id: 4 hash: 0x000000000000000004bf3e68405b31650559ff28d38a42b5e4f1440a865611ca @@ -54,6 +54,6 @@ height: 822017 size: 8630000 tx_count: 36724 - orphanedyn: false status: 20 # STALE chainwork: '123456' + is_longest: false diff --git a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions_map.yaml b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions_map.yaml new file mode 100644 index 000000000..3bceb9ce5 --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions_map.yaml @@ -0,0 +1,52 @@ +- blockid: 1 + txid: 1 + merkle_path: merkle-path-1 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1 + txid: 2 + merkle_path: merkle-path-2 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1 + txid: 3 + merkle_path: merkle-path-3 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1 + txid: 4 + merkle_path: merkle-path-4 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1 + txid: 5 + merkle_path: merkle-path-5 + inserted_at: 2023-12-10 14:00:00 +- blockid: 2 + txid: 6 + merkle_path: merkle-path-6 + inserted_at: 2023-12-15 14:00:00 +- blockid: 2 + txid: 7 + merkle_path: merkle-path-7 + inserted_at: 2023-12-15 14:00:00 +- blockid: 2 + txid: 8 + merkle_path: merkle-path-8 + inserted_at: 2023-12-15 14:00:00 +- blockid: 2 + txid: 9 + merkle_path: merkle-path-9 + inserted_at: 2023-12-15 14:00:00 +- blockid: 2 + txid: 10 + merkle_path: merkle-path-10 + inserted_at: 2023-12-15 14:00:00 +- blockid: 3 + txid: 11 + merkle_path: merkle-path-11 + inserted_at: 2023-12-15 14:00:00 +- blockid: 3 + txid: 12 + merkle_path: merkle-path-12 + inserted_at: 2023-12-15 14:00:00 +- blockid: 3 + txid: 13 + merkle_path: merkle-path-13 + inserted_at: 2023-12-15 14:00:00 diff --git a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.blocks.yaml new file mode 100644 index 000000000..06c1dba87 --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.blocks.yaml @@ -0,0 +1,33 @@ +- inserted_at: 2023-12-10 14:00:00 + id: 1 + hash: 0x000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9 + prevhash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7 + merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 + height: 822013 + processed_at: 2023-12-10 14:10:00 + size: 86840000 + tx_count: 23477 + status: 10 # LONGEST + is_longest: true +- inserted_at: 2023-12-15 14:00:00 + id: 2 + hash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7 + prevhash: 0x000000000000000002a0926c51854d2bd525c26026ab0f178ca07f723b31033a + merkleroot: 0x3eeee879a8a08fc537a04682178687bb0e58a5103938eafc349705a2acb06410 + height: 822012 + processed_at: 2023-12-15 14:10:00 + size: 3030000 + tx_count: 856 + status: 20 # STALE + is_longest: false +- inserted_at: 2023-12-10 14:00:00 + id: 3 + hash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067 + prevhash: 0x000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9 + merkleroot: 0x713e7713ebc6414420a418e87bbd42ecaf7e0e38c923c2cf9e72718d3c329acb + height: 822014 + processed_at: 2023-12-10 14:10:00 + size: 86840000 + tx_count: 23477 + status: 10 # LONGEST + is_longest: true diff --git a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.transactions.yaml b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.transactions.yaml new file mode 100644 index 000000000..8ae03913e --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.transactions.yaml @@ -0,0 +1,57 @@ +# txs from block 1 - LONGEST +- id: 1 + hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + inserted_at: 2023-12-10 14:00:00 + is_registered: false +- id: 2 + hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 3 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + inserted_at: 2023-12-10 14:00:00 + is_registered: false +- id: 4 + hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa + inserted_at: 2023-12-10 14:00:00 + is_registered: false +- id: 5 + hash: 0x3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd + inserted_at: 2023-12-10 14:00:00 + is_registered: false + +# txs from block 2 - STALE +- id: 6 + hash: 0x213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0 + inserted_at: 2023-12-15 14:00:00 + is_registered: true +- id: 7 + hash: 0x12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357 + inserted_at: 2023-12-15 14:00:00 + is_registered: true +- id: 8 + hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 + inserted_at: 2023-12-15 14:00:00 + is_registered: false +- id: 9 + hash: 0x5c5b621b81fb63d9df4595ee2e6b3c50cce1f5f0e1b83510aac504931ed22799 + inserted_at: 2023-12-15 14:00:00 + is_registered: false +- id: 10 + hash: 0xa3d4e78a8e11e97c8faf34880da861412273948edf467f23590601a1057079d8 + inserted_at: 2023-12-15 14:00:00 + is_registered: false + +# txs from block 3 - LONGEST +- id: 11 + hash: 0x2eb9f15adaf9e7d1de19f3ebc6bf95b62871a4e053c30ac0d1b1df85a6163d8e + inserted_at: 2023-12-15 14:00:00 + is_registered: true +- id: 12 + hash: 0x45ad0e3de133e386faeff8ecf12b665875d527031b9aa75ca96d3fc2b7098fa5 + inserted_at: 2023-12-15 14:00:00 + is_registered: false +- id: 13 + hash: 0x4bac520c26dba4e24c3fb73bf4fd0d66e45ec39b976d1d052f8a4f499f4aa004 + inserted_at: 2023-12-15 14:00:00 + is_registered: false diff --git a/internal/blocktx/store/postgresql/fixtures/insert_block/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/insert_block/blocktx.blocks.yaml new file mode 100644 index 000000000..9cb7bc4c1 --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/insert_block/blocktx.blocks.yaml @@ -0,0 +1,36 @@ +- inserted_at: 2023-12-15 14:30:00 + id: 1 + hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + prevhash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 10 # LONGEST + chainwork: '123456' + is_longest: true +- inserted_at: 2023-12-15 14:30:00 + id: 2 + hash: 0x00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9 + prevhash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 20 # STALE + chainwork: '123456' + is_longest: false +- inserted_at: 2023-12-15 14:30:00 + id: 3 + hash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7 + prevhash: 0x000000000000000002a0926c51854d2bd525c26026ab0f178ca07f723b31033a + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false diff --git a/internal/blocktx/store/postgresql/fixtures/mark_block_as_done/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/mark_block_as_done/blocktx.blocks.yaml index 7a9d599f3..e936d86f2 100644 --- a/internal/blocktx/store/postgresql/fixtures/mark_block_as_done/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/mark_block_as_done/blocktx.blocks.yaml @@ -4,6 +4,5 @@ prevhash: 0x3a03313b727fa08c170fab2660c225d52b4d85516c92a0020000000000000000 merkleroot: 0x3eeee879a8a08fc537a04682178687bb0e58a5103938eafc349705a2acb06410 height: 822012 - orphanedyn: false status: 10 chainwork: '1234' diff --git a/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.blocks.yaml index 2cf6008e8..a2f4e567a 100644 --- a/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.blocks.yaml @@ -7,5 +7,4 @@ processed_at: 2024-01-10 13:06:06.122 size: 108689370 tx_count: 799 - orphanedyn: FALSE merkle_path: "" diff --git a/internal/blocktx/store/postgresql/fixtures/update_blocks_statuses/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/update_blocks_statuses/blocktx.blocks.yaml index ce21cb387..bb97faed5 100644 --- a/internal/blocktx/store/postgresql/fixtures/update_blocks_statuses/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/update_blocks_statuses/blocktx.blocks.yaml @@ -7,9 +7,9 @@ processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false status: 10 # LONGEST chainwork: '123456' + is_longest: true - inserted_at: 2023-12-15 14:30:00 id: 1 hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde @@ -19,9 +19,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 10 # LONGEST chainwork: '123456' + is_longest: true - inserted_at: 2023-12-15 14:30:00 id: 2 hash: 0x00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9 @@ -31,9 +31,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 20 # STALE - competing block chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:40:00 id: 3 hash: 0x0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1 @@ -43,17 +43,18 @@ processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false status: 20 # STALE chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:50:00 id: 4 hash: 0x000000000000000004bf3e68405b31650559ff28d38a42b5e4f1440a865611ca prevhash: 0x0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 height: 822017 + processed_at: 2023-12-15 14:10:00 size: 8630000 tx_count: 36724 - orphanedyn: false status: 20 # STALE chainwork: '123456' + is_longest: false diff --git a/internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.blocks.yaml index 3e007ea6c..59e5f2fbe 100644 --- a/internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.blocks.yaml @@ -4,17 +4,19 @@ prevhash: 0x000000000000000001a7aa3999410ca53fb645851531ec0a7a5cb9ce2d4ae313 merkleroot: 0x0d72bf92e7862df18d1935c171ca4dbb70d268b0f025e46716e913bc7e4f2bdb height: 826481 + status: 10 # STALE + is_longest: true processed_at: 2024-01-10 13:06:06.122 size: 108689370 tx_count: 799 - orphanedyn: FALSE - inserted_at: 2024-01-10 13:06:03.375 id: 9737 hash: 0x7258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000 prevhash: 0x000000000000000001a7aa3999410ca53fb645851531ec0a7a5cb9ce2d4ae313 merkleroot: 0x0d72bf92e7862df18d1935c171ca4dbb70d268b0f025e46716e913bc7e4f2bdb height: 826481 + status: 20 # STALE + is_longest: false processed_at: 2024-01-10 13:06:06.122 size: 108689370 tx_count: 799 - orphanedyn: FALSE diff --git a/internal/blocktx/store/postgresql/fixtures/verify_merkle_roots/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/verify_merkle_roots/blocktx.blocks.yaml index 7951c92d8..9a623765e 100644 --- a/internal/blocktx/store/postgresql/fixtures/verify_merkle_roots/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/verify_merkle_roots/blocktx.blocks.yaml @@ -4,9 +4,9 @@ prevhash: 0x3dc8a25238765c58a9fccbcf83fba59bf93894bcaaee8e040000000000000000 merkleroot: 0x4f3d1594808f485303ba8a1ed4f7c15f8087e5da1b7ebd25fa266630a8a6d880 height: 812010 + processed_at: 2023-12-10 14:10:00 size: 244000000 tx_count: 4437 - orphanedyn: false - inserted_at: 2023-12-10 14:00:00 id: 1 hash: 0x72ad227eaaf73d36bc86f46347310c9b21a360b277c3000a0000000000000000 @@ -16,7 +16,6 @@ processed_at: 2023-12-10 14:10:00 size: 244000000 tx_count: 4437 - orphanedyn: false - inserted_at: 2023-12-10 14:00:00 id: 2 hash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 @@ -26,7 +25,6 @@ processed_at: 2023-12-10 14:10:00 size: 3030000 tx_count: 856 - orphanedyn: false - inserted_at: 2023-12-15 14:00:00 id: 3 hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 @@ -36,7 +34,6 @@ processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false - inserted_at: 2023-12-15 14:30:00 id: 4 hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 @@ -46,7 +43,6 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false - inserted_at: 2023-12-15 14:40:00 id: 5 hash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 @@ -56,16 +52,15 @@ processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false - inserted_at: 2023-12-15 14:50:00 id: 6 hash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 prevhash: 0x5696fc6e504b6aa2ae5d9c46b9418192dc61bd1b2e3364030000000000000000 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 height: 822020 + processed_at: 2023-12-10 14:10:00 size: 8630000 tx_count: 36724 - orphanedyn: false - inserted_at: 2023-12-15 15:00:00 id: 7 hash: 0x1d5fc8020fd68baea5c0cad654f04eb791a81100c51045090000000000000000 @@ -75,4 +70,3 @@ processed_at: 2023-12-15 15:00:00 size: 1620000 tx_count: 5578 - orphanedyn: false diff --git a/internal/blocktx/store/postgresql/get_block.go b/internal/blocktx/store/postgresql/get_block.go index 4733ab91c..adfaadb39 100644 --- a/internal/blocktx/store/postgresql/get_block.go +++ b/internal/blocktx/store/postgresql/get_block.go @@ -16,16 +16,16 @@ func (p *PostgreSQL) GetBlock(ctx context.Context, hash *chainhash.Hash) (*block return p.queryBlockByPredicate(ctx, predicate, hash[:]) } -func (p *PostgreSQL) GetBlockByHeight(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) { - predicate := "WHERE height = $1 AND status = $2" +func (p *PostgreSQL) GetLongestBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) { + predicate := "WHERE height = $1 AND is_longest = true" - return p.queryBlockByPredicate(ctx, predicate, height, status) + return p.queryBlockByPredicate(ctx, predicate, height) } func (p *PostgreSQL) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) { - predicate := "WHERE height = (SELECT MAX(height) FROM blocktx.blocks blks WHERE blks.status = $1)" + predicate := "WHERE height = (SELECT MAX(height) FROM blocktx.blocks blks WHERE blks.is_longest = true)" - return p.queryBlockByPredicate(ctx, predicate, blocktx_api.Status_LONGEST) + return p.queryBlockByPredicate(ctx, predicate) } func (p *PostgreSQL) queryBlockByPredicate(ctx context.Context, predicate string, predicateParams ...any) (*blocktx_api.Block, error) { @@ -36,13 +36,12 @@ func (p *PostgreSQL) queryBlockByPredicate(ctx context.Context, predicate string ,merkleroot ,height ,processed_at - ,orphanedyn ,status ,chainwork FROM blocktx.blocks ` - q += " " + predicate + q += " " + predicate + " AND processed_at IS NOT NULL" var block blocktx_api.Block @@ -54,7 +53,6 @@ func (p *PostgreSQL) queryBlockByPredicate(ctx context.Context, predicate string &block.MerkleRoot, &block.Height, &processedAt, - &block.Orphaned, &block.Status, &block.Chainwork, ); err != nil { diff --git a/internal/blocktx/store/postgresql/get_block_gaps.go b/internal/blocktx/store/postgresql/get_block_gaps.go index bd0fffd6d..6c4a83a6c 100644 --- a/internal/blocktx/store/postgresql/get_block_gaps.go +++ b/internal/blocktx/store/postgresql/get_block_gaps.go @@ -8,25 +8,34 @@ import ( ) func (p *PostgreSQL) GetBlockGaps(ctx context.Context, blockHeightRange int) ([]*store.BlockGap, error) { + // Flow of this query: + // + // 1. Get height - 1 and prevhash from blocks where there isn't a previous block + // and where height is greater than our height range parameter. + // + // 2. Add to result from 1. all blocks from the blocks table that are unprocessed yet. + // + // 3. Combine the result from 1. and 2. with block_processing table and remove all + // results that are being currently processed. + // + // 4. Sort by height. q := ` - SELECT DISTINCT all_missing.height, all_missing.hash FROM - (SELECT missing_blocks.missing_block_height AS height, blocktx.blocks.prevhash AS hash FROM blocktx.blocks - JOIN ( - SELECT bl.block_heights AS missing_block_height FROM ( - SELECT unnest(ARRAY( - SELECT a.n - FROM generate_series((SELECT max(height) - $1 AS block_height FROM blocktx.blocks b), (SELECT max(height) AS block_height FROM blocktx.blocks b)) AS a(n) - )) AS block_heights) AS bl - LEFT JOIN blocktx.blocks blks ON blks.height = bl.block_heights - WHERE blks.height IS NULL - ) AS missing_blocks ON blocktx.blocks.height = missing_blocks.missing_block_height + 1 - UNION - SELECT height, hash FROM blocktx.blocks WHERE processed_at IS NULL AND height < (SELECT max(height) AS block_height FROM blocktx.blocks b) - AND height > (SELECT max(height) - $1 AS block_height FROM blocktx.blocks b) - ) AS all_missing - LEFT JOIN blocktx.block_processing bp ON bp.block_hash = all_missing.hash - WHERE bp IS NULL ORDER BY all_missing.height DESC; - ` + SELECT DISTINCT all_missing.missing_height, all_missing.missing_hash + FROM ( + SELECT b.height - 1 AS missing_height, b.prevhash AS missing_hash + FROM blocktx.blocks b + WHERE b.height > (SELECT max(height) - $1 FROM blocktx.blocks) + AND NOT EXISTS (SELECT 1 FROM blocktx.blocks missing WHERE missing.hash = b.prevhash) + UNION + SELECT unprocessed.height AS missing_height, unprocessed.hash AS missing_hash + FROM blocktx.blocks unprocessed + WHERE unprocessed.processed_at IS NULL + AND unprocessed.height > (SELECT max(height) - $1 FROM blocktx.blocks) + ) AS all_missing + LEFT JOIN blocktx.block_processing bp ON bp.block_hash = all_missing.missing_hash + WHERE bp.block_hash IS NULL + ORDER BY all_missing.missing_height ASC; + ` rows, err := p.db.QueryContext(ctx, q, blockHeightRange) if err != nil { @@ -43,6 +52,12 @@ func (p *PostgreSQL) GetBlockGaps(ctx context.Context, blockHeightRange int) ([] return nil, err } + // in e2e tests, peers will misbehave if we ask + // for a genesis block, so we need to ignore it + if height == uint64(0) { + continue + } + txHash, err := chainhash.NewHash(hash) if err != nil { return nil, err diff --git a/internal/blocktx/store/postgresql/get_longest_chain.go b/internal/blocktx/store/postgresql/get_longest_chain.go index 9e8da6bc3..10bff0265 100644 --- a/internal/blocktx/store/postgresql/get_longest_chain.go +++ b/internal/blocktx/store/postgresql/get_longest_chain.go @@ -2,7 +2,6 @@ package postgresql import ( "context" - "database/sql" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" ) @@ -15,43 +14,17 @@ func (p *PostgreSQL) GetLongestChainFromHeight(ctx context.Context, height uint6 ,merkleroot ,height ,processed_at - ,orphanedyn ,status ,chainwork FROM blocktx.blocks - WHERE height >= $1 AND status = $2 + WHERE height >= $1 AND is_longest = true AND processed_at IS NOT NULL ` - longestBlocks := make([]*blocktx_api.Block, 0) - - rows, err := p.db.QueryContext(ctx, q, height, blocktx_api.Status_LONGEST) + rows, err := p.db.QueryContext(ctx, q, height) if err != nil { return nil, err } defer rows.Close() - for rows.Next() { - var block blocktx_api.Block - var processedAt sql.NullString - - err := rows.Scan( - &block.Hash, - &block.PreviousHash, - &block.MerkleRoot, - &block.Height, - &processedAt, - &block.Orphaned, - &block.Status, - &block.Chainwork, - ) - if err != nil { - return nil, err - } - - block.Processed = processedAt.Valid - - longestBlocks = append(longestBlocks, &block) - } - - return longestBlocks, nil + return p.parseBlocks(rows) } diff --git a/internal/blocktx/store/postgresql/get_mined_transactions.go b/internal/blocktx/store/postgresql/get_mined_transactions.go deleted file mode 100644 index 67032c79a..000000000 --- a/internal/blocktx/store/postgresql/get_mined_transactions.go +++ /dev/null @@ -1,69 +0,0 @@ -package postgresql - -import ( - "context" - - "github.com/lib/pq" - "github.com/libsv/go-p2p/chaincfg/chainhash" - - "github.com/bitcoin-sv/arc/internal/blocktx/store" - "github.com/bitcoin-sv/arc/internal/tracing" -) - -func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes []*chainhash.Hash) (result []store.GetMinedTransactionResult, err error) { - ctx, span := tracing.StartTracing(ctx, "GetMinedTransactions", p.tracingEnabled, p.tracingAttributes...) - defer func() { - tracing.EndTracing(span, err) - }() - - var hashSlice [][]byte - for _, hash := range hashes { - hashSlice = append(hashSlice, hash[:]) - } - - result = make([]store.GetMinedTransactionResult, 0, len(hashSlice)) - - q := ` - SELECT - t.hash, - b.hash, - b.height, - m.merkle_path - FROM blocktx.transactions AS t - JOIN blocktx.block_transactions_map AS m ON t.id = m.txid - JOIN blocktx.blocks AS b ON m.blockid = b.id - WHERE t.hash = ANY($1) - ` - - rows, err := p.db.QueryContext(ctx, q, pq.Array(hashSlice)) - if err != nil { - return nil, err - } - defer rows.Close() - - for rows.Next() { - var txHash []byte - var blockHash []byte - var blockHeight uint64 - var merklePath string - - err = rows.Scan( - &txHash, - &blockHash, - &blockHeight, - &merklePath, - ) - if err != nil { - return nil, err - } - - result = append(result, store.GetMinedTransactionResult{ - TxHash: txHash, - BlockHash: blockHash, - BlockHeight: blockHeight, - MerklePath: merklePath, - }) - } - - return result, nil -} diff --git a/internal/blocktx/store/postgresql/get_orphaned_chain.go b/internal/blocktx/store/postgresql/get_orphaned_chain.go new file mode 100644 index 000000000..79f28b59b --- /dev/null +++ b/internal/blocktx/store/postgresql/get_orphaned_chain.go @@ -0,0 +1,87 @@ +package postgresql + +import ( + "context" + + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/libsv/go-p2p/chaincfg/chainhash" +) + +// GetOrphansBackToNonOrphanAncestor recursively searches for blocks marked +// as ORPHANED from the given hash - back to the first ORPHANED block. Then, it +// tries to get the first non-orphaned ancestor of that orphan chain. +// +// It searches for the block whose hash matches the prevhash of the given block, +// and then repeats that recursively for each newly found orphaned block until +// it has the entire orphaned chain. +func (p *PostgreSQL) GetOrphansBackToNonOrphanAncestor(ctx context.Context, hash []byte) (orphans []*blocktx_api.Block, nonOrphanAncestor *blocktx_api.Block, err error) { + // The way this query works, is that the result from the first SELECT + // will be stored in the `orphans` variable, which is later used + // for recursion in the second SELECT. + // + // Then entire recursion happens in the second SELECT, after UNION ALL, + // and the first SELECT is just to set up the `orphans` variable with + // the first, initial value. Then, the `orphans` variable is recursively + // updated with values returned from the second SELECT. + q := ` + WITH RECURSIVE orphans AS ( + SELECT + hash + ,prevhash + ,merkleroot + ,height + ,processed_at + ,status + ,chainwork + FROM blocktx.blocks WHERE hash = $1 AND status = $2 + UNION ALL + SELECT + b.hash + ,b.prevhash + ,b.merkleroot + ,b.height + ,b.processed_at + ,b.status + ,b.chainwork + FROM blocktx.blocks b JOIN orphans o ON o.prevhash = b.hash AND b.status = $2 + WHERE b.processed_at IS NOT NULL + ) + SELECT + hash + ,prevhash + ,merkleroot + ,height + ,processed_at + ,status + ,chainwork + FROM orphans + ORDER BY height + ` + + rows, err := p.db.QueryContext(ctx, q, hash, blocktx_api.Status_ORPHANED) + if err != nil { + return + } + defer rows.Close() + + orphans, err = p.parseBlocks(rows) + if err != nil { + return + } + + // first element in orphans + // will be the given block + if len(orphans) < 2 { + return + } + + // try to get first non-orphan ancestor + nonOrphanHash, err := chainhash.NewHash(orphans[0].PreviousHash) + if err != nil { + return + } + + nonOrphanAncestor, _ = p.GetBlock(ctx, nonOrphanHash) + + return +} diff --git a/internal/blocktx/store/postgresql/get_stale_chain.go b/internal/blocktx/store/postgresql/get_stale_chain.go index 31b000028..435e70909 100644 --- a/internal/blocktx/store/postgresql/get_stale_chain.go +++ b/internal/blocktx/store/postgresql/get_stale_chain.go @@ -2,7 +2,6 @@ package postgresql import ( "context" - "database/sql" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" ) @@ -36,7 +35,6 @@ func (p *PostgreSQL) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ,merkleroot ,height ,processed_at - ,orphanedyn ,status ,chainwork FROM blocktx.blocks WHERE hash = $1 @@ -47,10 +45,10 @@ func (p *PostgreSQL) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ,b.merkleroot ,b.height ,b.processed_at - ,b.orphanedyn ,b.status ,b.chainwork FROM blocktx.blocks b JOIN prevBlocks p ON b.hash = p.prevhash AND b.status = $2 + WHERE b.processed_at IS NOT NULL ) SELECT hash @@ -58,12 +56,11 @@ func (p *PostgreSQL) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ,merkleroot ,height ,processed_at - ,orphanedyn ,status ,chainwork FROM prevBlocks + ORDER BY height ` - staleBlocks := make([]*blocktx_api.Block, 0) rows, err := p.db.QueryContext(ctx, q, hash, blocktx_api.Status_STALE) if err != nil { @@ -71,28 +68,5 @@ func (p *PostgreSQL) GetStaleChainBackFromHash(ctx context.Context, hash []byte) } defer rows.Close() - for rows.Next() { - var block blocktx_api.Block - var processedAt sql.NullString - - err := rows.Scan( - &block.Hash, - &block.PreviousHash, - &block.MerkleRoot, - &block.Height, - &processedAt, - &block.Orphaned, - &block.Status, - &block.Chainwork, - ) - if err != nil { - return nil, err - } - - block.Processed = processedAt.Valid - - staleBlocks = append(staleBlocks, &block) - } - - return staleBlocks, nil + return p.parseBlocks(rows) } diff --git a/internal/blocktx/store/postgresql/get_transactions.go b/internal/blocktx/store/postgresql/get_transactions.go new file mode 100644 index 000000000..5900cf308 --- /dev/null +++ b/internal/blocktx/store/postgresql/get_transactions.go @@ -0,0 +1,93 @@ +package postgresql + +import ( + "context" + + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/bitcoin-sv/arc/internal/blocktx/store" + "github.com/bitcoin-sv/arc/internal/tracing" + "github.com/lib/pq" +) + +func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) (minedTransactions []store.TransactionBlock, err error) { + ctx, span := tracing.StartTracing(ctx, "GetMinedTransactions", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() + + if onlyLongestChain { + predicate := "WHERE t.hash = ANY($1) AND b.is_longest = true" + return p.getTransactionBlocksByPredicate(ctx, predicate, pq.Array(hashes)) + } + + predicate := "WHERE t.hash = ANY($1) AND (b.status = $2 OR b.status = $3) AND b.processed_at IS NOT NULL" + + return p.getTransactionBlocksByPredicate(ctx, predicate, + pq.Array(hashes), + blocktx_api.Status_LONGEST, + blocktx_api.Status_STALE, + ) +} + +func (p *PostgreSQL) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) (registeredTxs []store.TransactionBlock, err error) { + ctx, span := tracing.StartTracing(ctx, "GetMinedTransactions", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() + + predicate := "WHERE b.hash = ANY($1) AND t.is_registered = TRUE" + + return p.getTransactionBlocksByPredicate(ctx, predicate, pq.Array(blockHashes)) +} + +func (p *PostgreSQL) getTransactionBlocksByPredicate(ctx context.Context, predicate string, predicateParams ...any) ([]store.TransactionBlock, error) { + transactionBlocks := make([]store.TransactionBlock, 0) + + q := ` + SELECT + t.hash, + b.hash, + b.height, + m.merkle_path, + b.status + FROM blocktx.transactions AS t + JOIN blocktx.block_transactions_map AS m ON t.id = m.txid + JOIN blocktx.blocks AS b ON m.blockid = b.id + ` + q += " " + predicate + + rows, err := p.db.QueryContext(ctx, q, predicateParams...) + if err != nil { + return nil, err + } + defer rows.Close() + + for rows.Next() { + var txHash []byte + var blockHash []byte + var blockHeight uint64 + var merklePath string + var blockStatus blocktx_api.Status + + err = rows.Scan( + &txHash, + &blockHash, + &blockHeight, + &merklePath, + &blockStatus, + ) + if err != nil { + return nil, err + } + + transactionBlocks = append(transactionBlocks, store.TransactionBlock{ + TxHash: txHash, + BlockHash: blockHash, + BlockHeight: blockHeight, + MerklePath: merklePath, + BlockStatus: blockStatus, + }) + } + + return transactionBlocks, nil +} diff --git a/internal/blocktx/store/postgresql/insert_block.go b/internal/blocktx/store/postgresql/insert_block.go deleted file mode 100644 index 1da99ea92..000000000 --- a/internal/blocktx/store/postgresql/insert_block.go +++ /dev/null @@ -1,40 +0,0 @@ -package postgresql - -import ( - "context" - "errors" - - "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" - "github.com/bitcoin-sv/arc/internal/blocktx/store" - "github.com/bitcoin-sv/arc/internal/tracing" -) - -func (p *PostgreSQL) UpsertBlock(ctx context.Context, block *blocktx_api.Block) (blockID uint64, err error) { - ctx, span := tracing.StartTracing(ctx, "UpsertBlock", p.tracingEnabled, p.tracingAttributes...) - defer func() { - tracing.EndTracing(span, err) - }() - - qInsert := ` - INSERT INTO blocktx.blocks (hash, prevhash, merkleroot, height, status, chainwork) - VALUES ($1 ,$2 , $3, $4, $5, $6) - ON CONFLICT (hash) DO UPDATE SET orphanedyn = FALSE - RETURNING id - ` - - row := p.db.QueryRowContext(ctx, qInsert, - block.GetHash(), - block.GetPreviousHash(), - block.GetMerkleRoot(), - block.GetHeight(), - block.GetStatus(), - block.GetChainwork(), - ) - - err = row.Scan(&blockID) - if err != nil { - return 0, errors.Join(store.ErrFailedToInsertBlock, err) - } - - return blockID, nil -} diff --git a/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.down.sql b/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.down.sql new file mode 100644 index 000000000..575f2acd6 --- /dev/null +++ b/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE blocktx.block +ADD COLUMN orphanedyn BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.up.sql b/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.up.sql new file mode 100644 index 000000000..d3861e568 --- /dev/null +++ b/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.up.sql @@ -0,0 +1 @@ +ALTER TABLE blocktx.blocks DROP COLUMN orphanedyn; diff --git a/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.down.sql b/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.down.sql new file mode 100644 index 000000000..8fd5c89c6 --- /dev/null +++ b/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE blocktx.blocks +DROP INDEX pux_height_is_longest, +DROP INDEX ix_block_is_longest, +DROP COLUMN is_longest; diff --git a/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.up.sql b/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.up.sql new file mode 100644 index 000000000..d3527fbea --- /dev/null +++ b/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.up.sql @@ -0,0 +1,13 @@ +-- field `is_longest` is an implementation detail that will help to +-- make sure that there is only one longest chain at any given height +-- and is also used as a helper when querying for longest chain +ALTER TABLE blocktx.blocks +ADD COLUMN is_longest BOOLEAN NOT NULL DEFAULT TRUE; + +-- This will make is faster to search for blocks WHERE is_longest = true +CREATE INDEX ix_block_is_longest ON blocktx.blocks(is_longest); + +-- This will make sure that there can only be ONE block at any +-- given height that is considered part of the LONGEST chain. +CREATE UNIQUE INDEX pux_height_is_longest ON blocktx.blocks(height) +WHERE is_longest; diff --git a/internal/blocktx/store/postgresql/postgres.go b/internal/blocktx/store/postgresql/postgres.go index 53fbf7b50..f3b6ec321 100644 --- a/internal/blocktx/store/postgresql/postgres.go +++ b/internal/blocktx/store/postgresql/postgres.go @@ -13,6 +13,8 @@ import ( "github.com/bitcoin-sv/arc/internal/blocktx/store" ) +var ErrNoTransaction = errors.New("sql: transaction has already been committed or rolled back") + const ( postgresDriverName = "postgres" maxPostgresBulkInsertRows = 8192 diff --git a/internal/blocktx/store/postgresql/postgres_helpers.go b/internal/blocktx/store/postgresql/postgres_helpers.go new file mode 100644 index 000000000..5bd3e603e --- /dev/null +++ b/internal/blocktx/store/postgresql/postgres_helpers.go @@ -0,0 +1,35 @@ +package postgresql + +import ( + "database/sql" + + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" +) + +func (p *PostgreSQL) parseBlocks(rows *sql.Rows) ([]*blocktx_api.Block, error) { + blocks := make([]*blocktx_api.Block, 0) + + for rows.Next() { + var block blocktx_api.Block + var processedAt sql.NullString + + err := rows.Scan( + &block.Hash, + &block.PreviousHash, + &block.MerkleRoot, + &block.Height, + &processedAt, + &block.Status, + &block.Chainwork, + ) + if err != nil { + return nil, err + } + + block.Processed = processedAt.Valid + + blocks = append(blocks, &block) + } + + return blocks, nil +} diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index 18bfd6cbd..13d686959 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -3,7 +3,6 @@ package postgresql import ( "bytes" "context" - "database/sql" "log" "os" "testing" @@ -31,9 +30,9 @@ type Block struct { MerkleRoot string `db:"merkleroot"` MerklePath *string `db:"merkle_path"` Height int64 `db:"height"` - Orphaned bool `db:"orphanedyn"` Status int `db:"status"` Chainwork string `db:"chainwork"` + IsLongest bool `db:"is_longest"` Size *int64 `db:"size"` TxCount *int64 `db:"tx_count"` Processed bool `db:"processed"` @@ -89,17 +88,17 @@ func testmain(m *testing.M) int { return m.Run() } -func prepareDb(t *testing.T, db *sql.DB, fixture string) { +func prepareDb(t *testing.T, postgres *PostgreSQL, fixture string) { t.Helper() - testutils.PruneTables(t, db, + testutils.PruneTables(t, postgres.db, "blocktx.blocks", "blocktx.transactions", "blocktx.block_transactions_map", ) if fixture != "" { - testutils.LoadFixtures(t, db, fixture) + testutils.LoadFixtures(t, postgres.db, fixture) } } @@ -131,37 +130,74 @@ func TestPostgresDB(t *testing.T) { var err error - t.Run("insert block / get block", func(t *testing.T) { + t.Run("upsert block / get block", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "") + prepareDb(t, postgresDB, "") blockHash1 := testutils.RevChainhash(t, "000000000000000001b8adefc1eb98896c80e30e517b9e2655f1f929d9958a48") blockHash2 := testutils.RevChainhash(t, "00000000000000000a081a539601645abe977946f8f6466a3c9e0c34d50be4a8") + blockHashViolating := testutils.RevChainhash(t, "00000000b69bd8e4dc60580117617a466d5c76ada85fb7b87e9baea01f9d9984") merkleRoot := testutils.RevChainhash(t, "31e25c5ac7c143687f55fc49caf0f552ba6a16d4f785e4c9a9a842179a085f0c") expectedBlock := &blocktx_api.Block{ Hash: blockHash2[:], PreviousHash: blockHash1[:], MerkleRoot: merkleRoot[:], Height: 100, + Status: blocktx_api.Status_LONGEST, + Processed: true, + } + expectedBlockViolatingUniqueIndex := &blocktx_api.Block{ + Hash: blockHashViolating[:], + PreviousHash: blockHash1[:], + MerkleRoot: merkleRoot[:], + Height: 100, + Status: blocktx_api.Status_LONGEST, + } + expectedBlockOverrideStatus := &blocktx_api.Block{ + Hash: blockHash2[:], + PreviousHash: blockHash1[:], + MerkleRoot: merkleRoot[:], + Height: 100, + Status: blocktx_api.Status_ORPHANED, + Processed: true, } - // when -> then + // when id, err := postgresDB.UpsertBlock(ctx, expectedBlock) require.NoError(t, err) require.Equal(t, uint64(1), id) + err = postgresDB.MarkBlockAsDone(ctx, blockHash2, uint64(1000), uint64(1)) + require.NoError(t, err) + + // then actualBlockResp, err := postgresDB.GetBlock(ctx, blockHash2) require.NoError(t, err) require.Equal(t, expectedBlock, actualBlockResp) + + // when + _, err = postgresDB.UpsertBlock(ctx, expectedBlockViolatingUniqueIndex) + + // then + require.ErrorIs(t, err, store.ErrFailedToInsertBlock) + + // when + id, err = postgresDB.UpsertBlock(ctx, expectedBlockOverrideStatus) + require.NoError(t, err) + require.Equal(t, uint64(1), id) // this should only update the status and retain the same ID + + // then + actualBlockResp, err = postgresDB.GetBlock(ctx, blockHash2) + require.NoError(t, err) + require.Equal(t, expectedBlockOverrideStatus, actualBlockResp) }) t.Run("get block by height / get chain tip", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_block_by_height") + prepareDb(t, postgresDB, "fixtures/get_block_by_height") height := uint64(822015) expectedHashAtHeightLongest := testutils.RevChainhash(t, "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000") - expectedHashAtHeightStale := testutils.RevChainhash(t, "00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9") heightNotFound := uint64(812222) @@ -169,15 +205,11 @@ func TestPostgresDB(t *testing.T) { hashAtTip := testutils.RevChainhash(t, "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000") // when -> then - actualBlock, err := postgresDB.GetBlockByHeight(context.Background(), height, blocktx_api.Status_LONGEST) + actualBlock, err := postgresDB.GetLongestBlockByHeight(context.Background(), height) require.NoError(t, err) require.Equal(t, expectedHashAtHeightLongest[:], actualBlock.Hash) - actualBlock, err = postgresDB.GetBlockByHeight(context.Background(), height, blocktx_api.Status_STALE) - require.NoError(t, err) - require.Equal(t, expectedHashAtHeightStale[:], actualBlock.Hash) - - actualBlock, err = postgresDB.GetBlockByHeight(context.Background(), heightNotFound, blocktx_api.Status_LONGEST) + actualBlock, err = postgresDB.GetLongestBlockByHeight(context.Background(), heightNotFound) require.Nil(t, actualBlock) require.Equal(t, store.ErrBlockNotFound, err) @@ -189,30 +221,36 @@ func TestPostgresDB(t *testing.T) { t.Run("get block gaps", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_block_gaps") + prepareDb(t, postgresDB, "fixtures/get_block_gaps") hash822014 := testutils.RevChainhash(t, "67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000") hash822019 := testutils.RevChainhash(t, "5696fc6e504b6aa2ae5d9c46b9418192dc61bd1b2e3364030000000000000000") hash822020 := testutils.RevChainhash(t, "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000") hash822009 := testutils.RevChainhash(t, "4ad773b1a464129a0ed8c7a8c71bb98175f0f01da1793f0e0000000000000000") + hash822017competing := testutils.RevChainhash(t, "00000000000000000d840fb91c0df3b057db04a0250c6d88b2f25aadcfc8410b") expectedBlockGaps := []*store.BlockGap{ { // gap - Height: 822019, - Hash: hash822019, + Height: 822009, + Hash: hash822009, }, + // block 11 is being processed { // gap Height: 822014, Hash: hash822014, }, + { // gap from competing chain + Height: 822017, + Hash: hash822017competing, + }, + { // gap + Height: 822019, + Hash: hash822019, + }, { // processing not finished Height: 822020, Hash: hash822020, }, - { // gap - Height: 822009, - Hash: hash822009, - }, } // when @@ -220,13 +258,14 @@ func TestPostgresDB(t *testing.T) { // then require.NoError(t, err) - require.Equal(t, 4, len(actualBlockGaps)) + require.Equal(t, len(expectedBlockGaps), len(actualBlockGaps)) require.ElementsMatch(t, expectedBlockGaps, actualBlockGaps) }) t.Run("get stats for block geps", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_block_gaps") + prepareDb(t, postgresDB, "fixtures/get_block_gaps") + const expectedGaps = int64(2007) // range (2016) - already existing blocks (9) = gaps (2007) // when actualBlockGaps, err := postgresDB.GetStats(ctx) @@ -234,12 +273,12 @@ func TestPostgresDB(t *testing.T) { // then require.NoError(t, err) // as we have 6 blocks in the range only it should return remaining 2006 (BlockDistance) - 6 - require.Equal(t, int64(2010), actualBlockGaps.CurrentNumOfBlockGaps) + require.Equal(t, expectedGaps, actualBlockGaps.CurrentNumOfBlockGaps) }) t.Run("get longest chain from height", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_longest_chain") + prepareDb(t, postgresDB, "fixtures/get_longest_chain") startingHeight := uint64(822014) hash0Longest := testutils.RevChainhash(t, "0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067") @@ -263,16 +302,16 @@ func TestPostgresDB(t *testing.T) { t.Run("get stale chain back from hash", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_stale_chain") + prepareDb(t, postgresDB, "fixtures/get_stale_chain") hash2Stale := testutils.RevChainhash(t, "00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9") hash3Stale := testutils.RevChainhash(t, "0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1") hash4Stale := testutils.RevChainhash(t, "000000000000000004bf3e68405b31650559ff28d38a42b5e4f1440a865611ca") expectedStaleHashes := [][]byte{ - hash4Stale[:], - hash3Stale[:], hash2Stale[:], + hash3Stale[:], + hash4Stale[:], } // when @@ -286,9 +325,54 @@ func TestPostgresDB(t *testing.T) { } }) + t.Run("get orphans back to non-orphaned ancestor", func(t *testing.T) { + // given + prepareDb(t, postgresDB, "fixtures/get_orphaned_chain") + + newHash := testutils.RevChainhash(t, "00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656") + hash2Orphaned := testutils.RevChainhash(t, "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde") + hash3Orphaned := testutils.RevChainhash(t, "00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9") + hash4Orphaned := testutils.RevChainhash(t, "0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1") + + noAncestorHash := testutils.RevChainhash(t, "0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd") + hash6Orphaned := testutils.RevChainhash(t, "0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c") + + expectedOrphanedHashes := [][]byte{ + hash2Orphaned[:], + hash3Orphaned[:], + hash4Orphaned[:], + newHash[:], + } + expectedAncestorHash := testutils.RevChainhash(t, "0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067") + + expectedNoAncestorOrphanedHashes := [][]byte{ + noAncestorHash[:], + hash6Orphaned[:], + } + + // when + actualOrphanedBlocks, actualAncestor, err := postgresDB.GetOrphansBackToNonOrphanAncestor(ctx, newHash[:]) + require.NoError(t, err) + + // then + require.Equal(t, len(expectedOrphanedHashes), len(actualOrphanedBlocks)) + for i, b := range actualOrphanedBlocks { + require.Equal(t, expectedOrphanedHashes[i], b.Hash) + } + require.Equal(t, expectedAncestorHash[:], actualAncestor.Hash) + + // when + actualOrphanedBlocks, actualAncestor, err = postgresDB.GetOrphansBackToNonOrphanAncestor(ctx, noAncestorHash[:]) + require.NoError(t, err) + + // then + require.Equal(t, len(expectedNoAncestorOrphanedHashes), len(actualOrphanedBlocks)) + require.Nil(t, actualAncestor) + }) + t.Run("update blocks statuses", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/update_blocks_statuses") + prepareDb(t, postgresDB, "fixtures/update_blocks_statuses") hash1Longest := testutils.RevChainhash(t, "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde") hash2Stale := testutils.RevChainhash(t, "00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9") @@ -302,6 +386,11 @@ func TestPostgresDB(t *testing.T) { {Hash: hash4Stale[:], Status: blocktx_api.Status_LONGEST}, } + blockStatusUpdatesViolating := []store.BlockStatusUpdate{ + // there is already a LONGEST block at that height + {Hash: hash1Longest[:], Status: blocktx_api.Status_LONGEST}, + } + // when err := postgresDB.UpdateBlocksStatuses(ctx, blockStatusUpdates) require.NoError(t, err) @@ -322,36 +411,110 @@ func TestPostgresDB(t *testing.T) { stale4, err := postgresDB.GetBlock(ctx, hash4Stale) require.NoError(t, err) require.Equal(t, blocktx_api.Status_LONGEST, stale4.Status) + + // when + err = postgresDB.UpdateBlocksStatuses(ctx, blockStatusUpdatesViolating) + require.ErrorIs(t, err, store.ErrFailedToUpdateBlockStatuses) }) - t.Run("test getting mined txs", func(t *testing.T) { + t.Run("get mined txs", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_mined_transactions") + prepareDb(t, postgresDB, "fixtures/get_transactions") - txHash1 := testutils.RevChainhash(t, "76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b") - txHash2 := testutils.RevChainhash(t, "164e85a5d5bc2b2372e8feaa266e5e4b7d0808f8d2b784fb1f7349c4726392b0") - txHash3 := testutils.RevChainhash(t, "dbbd24251b9bb824566412395bb76a579bca3477c2d0b4cbc210a769d3bb4177") - txHash4 := testutils.RevChainhash(t, "0d60dd6dc1f2649efb2847f801dfaa61361a438deb526da2de5b6875e0016514") + txHash1 := testutils.RevChainhash(t, "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853") + txHash2 := testutils.RevChainhash(t, "21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e") + txHash3 := testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0") // from STALE block - blockHash := testutils.RevChainhash(t, "6258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000") + blockHash := testutils.RevChainhash(t, "000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9") + blockHash2 := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") + + expectedTxs := []store.TransactionBlock{ + { + TxHash: txHash1[:], + BlockHash: blockHash[:], + BlockHeight: 822013, + MerklePath: "merkle-path-1", + BlockStatus: blocktx_api.Status_LONGEST, + }, + { + TxHash: txHash2[:], + BlockHash: blockHash[:], + BlockHeight: 822013, + MerklePath: "merkle-path-2", + BlockStatus: blocktx_api.Status_LONGEST, + }, + { + TxHash: txHash3[:], + BlockHash: blockHash2[:], + BlockHeight: 822012, + MerklePath: "merkle-path-6", + BlockStatus: blocktx_api.Status_STALE, + }, + } // when - // get mined transaction and corresponding block - minedTxs, err := postgresDB.GetMinedTransactions(ctx, []*chainhash.Hash{txHash1, txHash2, txHash3, txHash4}) + onlyLongestChain := true + actualTxs, err := postgresDB.GetMinedTransactions(ctx, [][]byte{txHash1[:], txHash2[:], txHash3[:]}, onlyLongestChain) + + // then require.NoError(t, err) + require.ElementsMatch(t, expectedTxs[:2], actualTxs) + + // when + onlyLongestChain = false + actualTxs, err = postgresDB.GetMinedTransactions(ctx, [][]byte{txHash1[:], txHash2[:], txHash3[:]}, onlyLongestChain) // then - require.Len(t, minedTxs, 3) + require.NoError(t, err) + require.Equal(t, expectedTxs, actualTxs) + }) + + t.Run("get registered txs by block hashes", func(t *testing.T) { + // given + prepareDb(t, postgresDB, "fixtures/get_transactions") - for _, tx := range minedTxs { - require.True(t, bytes.Equal(tx.TxHash, txHash2[:]) || bytes.Equal(tx.TxHash, txHash3[:]) || bytes.Equal(tx.TxHash, txHash4[:])) - require.Equal(t, tx.BlockHash, blockHash[:]) - require.Equal(t, uint64(826481), tx.BlockHeight) + blockHashLongest := testutils.RevChainhash(t, "000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9") + blockHashStale := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") + + blockHashes := [][]byte{ + blockHashLongest[:], + blockHashStale[:], + } + + expectedTxs := []store.TransactionBlock{ + { + TxHash: testutils.RevChainhash(t, "21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e")[:], + BlockHash: blockHashLongest[:], + BlockHeight: 822013, + MerklePath: "merkle-path-2", + BlockStatus: blocktx_api.Status_LONGEST, + }, + { + TxHash: testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0")[:], + BlockHash: blockHashStale[:], + BlockHeight: 822012, + MerklePath: "merkle-path-6", + BlockStatus: blocktx_api.Status_STALE, + }, + { + TxHash: testutils.RevChainhash(t, "12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357")[:], + BlockHash: blockHashStale[:], + BlockHeight: 822012, + MerklePath: "merkle-path-7", + BlockStatus: blocktx_api.Status_STALE, + }, } + + // when + actualTxs, err := postgresDB.GetRegisteredTxsByBlockHashes(ctx, blockHashes) + + // then + require.NoError(t, err) + require.Equal(t, expectedTxs, actualTxs) }) t.Run("clear data", func(t *testing.T) { - prepareDb(t, postgresDB.db, "fixtures/clear_data") + prepareDb(t, postgresDB, "fixtures/clear_data") resp, err := postgresDB.ClearBlocktxTable(context.Background(), 10, "blocks") require.NoError(t, err) @@ -385,7 +548,7 @@ func TestPostgresDB(t *testing.T) { }) t.Run("set/get/del block processing", func(t *testing.T) { - prepareDb(t, postgresDB.db, "fixtures/block_processing") + prepareDb(t, postgresDB, "fixtures/block_processing") bh1 := testutils.RevChainhash(t, "747468cf7e6639ba9aa277ade1cf27639b0f214cec5719020000000000000000") @@ -418,7 +581,7 @@ func TestPostgresDB(t *testing.T) { t.Run("mark block as done", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/mark_block_as_done") + prepareDb(t, postgresDB, "fixtures/mark_block_as_done") bh1 := testutils.RevChainhash(t, "b71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000") @@ -443,7 +606,7 @@ func TestPostgresDB(t *testing.T) { t.Run("verify merkle roots", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/verify_merkle_roots") + prepareDb(t, postgresDB, "fixtures/verify_merkle_roots") merkleRequests := []*blocktx_api.MerkleRootVerificationRequest{ { @@ -498,7 +661,6 @@ func TestPostgresStore_UpsertBlockTransactions(t *testing.T) { name string txsWithMerklePaths []store.TxWithMerklePath - expectedErr error expectedUpdatedResLen int upsertRepeat bool }{ @@ -577,22 +739,22 @@ func TestPostgresStore_UpsertBlockTransactions(t *testing.T) { for _, tc := range tcs { t.Run(tc.name, func(t *testing.T) { // given - prepareDb(t, sut.db, "fixtures/upsert_block_transactions") + prepareDb(t, sut, "fixtures/upsert_block_transactions") testBlockID := uint64(9736) + testBlockHash := testutils.RevChainhash(t, "6258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000") // when - res, err := sut.UpsertBlockTransactions(ctx, testBlockID, tc.txsWithMerklePaths) + err := sut.UpsertBlockTransactions(ctx, testBlockID, tc.txsWithMerklePaths) if tc.upsertRepeat { - res, err = sut.UpsertBlockTransactions(ctx, testBlockID, tc.txsWithMerklePaths) + err = sut.UpsertBlockTransactions(ctx, testBlockID, tc.txsWithMerklePaths) require.NoError(t, err) } // then - if tc.expectedErr != nil { - require.ErrorIs(t, err, tc.expectedErr) - return - } + require.NoError(t, err) + + res, err := sut.GetRegisteredTxsByBlockHashes(ctx, [][]byte{testBlockHash[:]}) require.NoError(t, err) require.Equal(t, tc.expectedUpdatedResLen, len(res)) @@ -600,7 +762,7 @@ func TestPostgresStore_UpsertBlockTransactions(t *testing.T) { // assert correctness of returned values // assume registered transactions are at the beginning of tc.txs for i := 0; i < tc.expectedUpdatedResLen; i++ { - require.True(t, bytes.Equal(tc.txsWithMerklePaths[i].Hash, res[i].Hash)) + require.True(t, bytes.Equal(tc.txsWithMerklePaths[i].Hash, res[i].TxHash)) require.Equal(t, tc.txsWithMerklePaths[i].MerklePath, res[i].MerklePath) } @@ -637,7 +799,7 @@ func TestPostgresStore_UpsertBlockTransactions_CompetingBlocks(t *testing.T) { defer sut.Close() sut.maxPostgresBulkInsertRows = 5 - prepareDb(t, sut.db, "fixtures/upsert_block_transactions") + prepareDb(t, sut, "fixtures/upsert_block_transactions") testBlockID := uint64(9736) competingBlockID := uint64(9737) @@ -658,30 +820,25 @@ func TestPostgresStore_UpsertBlockTransactions_CompetingBlocks(t *testing.T) { }, } - expected := []store.GetMinedTransactionResult{ + expected := []store.TransactionBlock{ { TxHash: txHash[:], BlockHash: testutils.RevChainhash(t, "6258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000")[:], BlockHeight: uint64(826481), MerklePath: "merkle-path-1", - }, - { - TxHash: txHash[:], - BlockHash: testutils.RevChainhash(t, "7258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000")[:], - BlockHeight: uint64(826481), - MerklePath: "merkle-path-2", + BlockStatus: blocktx_api.Status_LONGEST, }, } // when - _, err := sut.UpsertBlockTransactions(ctx, testBlockID, txsWithMerklePaths) + err := sut.UpsertBlockTransactions(ctx, testBlockID, txsWithMerklePaths) require.NoError(t, err) - _, err = sut.UpsertBlockTransactions(ctx, competingBlockID, competingTxsWithMerklePaths) + err = sut.UpsertBlockTransactions(ctx, competingBlockID, competingTxsWithMerklePaths) require.NoError(t, err) // then - actual, err := sut.GetMinedTransactions(ctx, []*chainhash.Hash{txHash}) + actual, err := sut.GetMinedTransactions(ctx, [][]byte{txHash[:]}, true) require.NoError(t, err) require.ElementsMatch(t, expected, actual) @@ -732,7 +889,7 @@ func TestPostgresStore_RegisterTransactions(t *testing.T) { for _, tc := range tcs { t.Run(tc.name, func(t *testing.T) { // given - prepareDb(t, sut.db, "fixtures/register_transactions") + prepareDb(t, sut, "fixtures/register_transactions") // when result, err := sut.RegisterTransactions(ctx, tc.txs) @@ -770,3 +927,132 @@ func TestPostgresStore_RegisterTransactions(t *testing.T) { }) } } + +func TestUpsertBlockConditions(t *testing.T) { + tt := []struct { + name string + blockStatus blocktx_api.Status + prevBlockExists bool + prevBlockStatus blocktx_api.Status + + shouldSucceed bool + }{ + { + name: "extend longest chain - success", + blockStatus: blocktx_api.Status_LONGEST, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_LONGEST, + shouldSucceed: true, + }, + { + name: "extend stale chain - sucsess", + blockStatus: blocktx_api.Status_STALE, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_STALE, + shouldSucceed: true, + }, + { + name: "extend orphaned chain - success", + blockStatus: blocktx_api.Status_ORPHANED, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_ORPHANED, + shouldSucceed: true, + }, + { + name: "stale block extends longest - success", + blockStatus: blocktx_api.Status_STALE, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_LONGEST, + shouldSucceed: true, + }, + { + name: "orphan block - success", + blockStatus: blocktx_api.Status_ORPHANED, + prevBlockExists: false, + shouldSucceed: true, + }, + { + name: "stale block with no prevBlock - fail", + blockStatus: blocktx_api.Status_STALE, + prevBlockExists: false, + shouldSucceed: false, + }, + { + name: "orphan block extending longest chain - fail", + blockStatus: blocktx_api.Status_ORPHANED, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_LONGEST, + shouldSucceed: false, + }, + { + name: "orphan block extending stale chain - fail", + blockStatus: blocktx_api.Status_ORPHANED, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_STALE, + shouldSucceed: false, + }, + { + name: "longest block extending stale chain - fail", + blockStatus: blocktx_api.Status_LONGEST, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_STALE, + shouldSucceed: false, + }, + } + + // common setup for test cases + ctx, _, sut := setupPostgresTest(t) + defer sut.Close() + + for _, tc := range tt { + t.Run(tc.name, func(t *testing.T) { + // given + prepareDb(t, sut, "fixtures/insert_block") + + blockHashLongest := testutils.RevChainhash(t, "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde") + blockHashStale := testutils.RevChainhash(t, "00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9") + blockHashOrphaned := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") + randomPrevBlockHash := testutils.RevChainhash(t, "0000000000000000099da871f74c55a6305e6a37ef8bf955ad7d29ca4b44fda9") + + var prevBlockHash []byte + + if tc.prevBlockExists { + switch tc.prevBlockStatus { + case blocktx_api.Status_LONGEST: + prevBlockHash = blockHashLongest[:] + case blocktx_api.Status_STALE: + prevBlockHash = blockHashStale[:] + case blocktx_api.Status_ORPHANED: + prevBlockHash = blockHashOrphaned[:] + } + } else { + prevBlockHash = randomPrevBlockHash[:] + } + + blockHash := testutils.RevChainhash(t, "0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1") + merkleRoot := testutils.RevChainhash(t, "7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257") + + block := &blocktx_api.Block{ + Hash: blockHash[:], + PreviousHash: prevBlockHash, + MerkleRoot: merkleRoot[:], + Height: 822016, + Processed: true, + Status: tc.blockStatus, + Chainwork: "123", + } + + // when + blockID, err := sut.UpsertBlock(ctx, block) + + // then + if tc.shouldSucceed { + require.NotEqual(t, uint64(0), blockID) + require.NoError(t, err) + } else { + require.Equal(t, uint64(0), blockID) + require.ErrorIs(t, err, store.ErrFailedToInsertBlock) + } + }) + } +} diff --git a/internal/blocktx/store/postgresql/update_block_statuses.go b/internal/blocktx/store/postgresql/update_block_statuses.go index dc5e19dd3..249fe4295 100644 --- a/internal/blocktx/store/postgresql/update_block_statuses.go +++ b/internal/blocktx/store/postgresql/update_block_statuses.go @@ -2,6 +2,7 @@ package postgresql import ( "context" + "errors" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" @@ -11,22 +12,47 @@ import ( func (p *PostgreSQL) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { q := ` UPDATE blocktx.blocks b - SET status = updates.status - FROM (SELECT * FROM UNNEST($1::BYTEA[], $2::INTEGER[]) AS u(hash, status)) AS updates + SET status = updates.status, is_longest = updates.is_longest + FROM ( + SELECT * FROM UNNEST($1::BYTEA[], $2::INTEGER[], $3::BOOLEAN[]) AS u(hash, status, is_longest) + WHERE is_longest = $4 + ) AS updates WHERE b.hash = updates.hash ` blockHashes := make([][]byte, len(blockStatusUpdates)) statuses := make([]blocktx_api.Status, len(blockStatusUpdates)) + isLongest := make([]bool, len(blockStatusUpdates)) for i, update := range blockStatusUpdates { blockHashes[i] = update.Hash statuses[i] = update.Status + isLongest[i] = update.Status == blocktx_api.Status_LONGEST } - _, err := p.db.ExecContext(ctx, q, pq.Array(blockHashes), pq.Array(statuses)) + tx, err := p.db.Begin() if err != nil { - return err + return errors.Join(store.ErrFailedToUpdateBlockStatuses, err) + } + defer func() { + _ = tx.Rollback() + }() + + // first update blocks that are changing statuses to non-LONGEST + _, err = tx.ExecContext(ctx, q, pq.Array(blockHashes), pq.Array(statuses), pq.Array(isLongest), false) + if err != nil { + return errors.Join(store.ErrFailedToUpdateBlockStatuses, err) + } + + // then update blocks that are changing statuses to LONGEST + _, err = tx.ExecContext(ctx, q, pq.Array(blockHashes), pq.Array(statuses), pq.Array(isLongest), true) + if err != nil { + return errors.Join(store.ErrFailedToUpdateBlockStatuses, err) + } + + err = tx.Commit() + if err != nil { + return errors.Join(store.ErrFailedToUpdateBlockStatuses, err) } return nil diff --git a/internal/blocktx/store/postgresql/upsert_block.go b/internal/blocktx/store/postgresql/upsert_block.go new file mode 100644 index 000000000..cab0188fb --- /dev/null +++ b/internal/blocktx/store/postgresql/upsert_block.go @@ -0,0 +1,57 @@ +package postgresql + +import ( + "context" + "errors" + + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/bitcoin-sv/arc/internal/blocktx/store" + "github.com/bitcoin-sv/arc/internal/tracing" +) + +func (p *PostgreSQL) UpsertBlock(ctx context.Context, block *blocktx_api.Block) (blockID uint64, err error) { + ctx, span := tracing.StartTracing(ctx, "UpsertBlock", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() + + // This query will insert a block ONLY if one of the 3 conditions is met: + // 1. Block being inserted is `ORPHANED` and there's no previous block in the database + // 2. The block being inserted has the same status as its previous block + // 3. The block being inserted has status `STALE` but the previous block was `LONGEST` + // Any other situation would mean an error in block processing + // (probably because of another block being inserted by other blocktx instance at the same time) + // and requires the block to be received and processed again. + qInsert := ` + INSERT INTO blocktx.blocks (hash, prevhash, merkleroot, height, status, chainwork, is_longest) + SELECT v.hash, v.prevhash, v.merkleroot, v.height, v.status, v.chainwork, v.is_longest + FROM (VALUES ($1::BYTEA, $2::BYTEA, $3::BYTEA, $4::BIGINT, $5::INTEGER, $6::TEXT, $7::BOOLEAN)) + AS v(hash, prevhash, merkleroot, height, status, chainwork, is_longest) + LEFT JOIN blocktx.blocks AS prevblock ON prevblock.hash = v.prevhash + WHERE ((v.status = $8 OR v.status = $9) AND prevblock.id IS NULL) + OR prevblock.status = $5 + OR (prevblock.status = $9 AND $5 = $10) + ON CONFLICT (hash) DO UPDATE SET status = EXCLUDED.status + RETURNING id + ` + + row := p.db.QueryRowContext(ctx, qInsert, + block.GetHash(), + block.GetPreviousHash(), + block.GetMerkleRoot(), + block.GetHeight(), + block.GetStatus(), + block.GetChainwork(), + block.GetStatus() == blocktx_api.Status_LONGEST, + blocktx_api.Status_ORPHANED, + blocktx_api.Status_LONGEST, + blocktx_api.Status_STALE, + ) + + err = row.Scan(&blockID) + if err != nil { + return 0, errors.Join(store.ErrFailedToInsertBlock, err) + } + + return blockID, nil +} diff --git a/internal/blocktx/store/postgresql/upsert_block_transactions.go b/internal/blocktx/store/postgresql/upsert_block_transactions.go index 090123aab..3e9e9a2bd 100644 --- a/internal/blocktx/store/postgresql/upsert_block_transactions.go +++ b/internal/blocktx/store/postgresql/upsert_block_transactions.go @@ -3,7 +3,6 @@ package postgresql import ( "context" "errors" - "fmt" "github.com/lib/pq" "go.opentelemetry.io/otel/attribute" @@ -12,8 +11,8 @@ import ( "github.com/bitcoin-sv/arc/internal/tracing" ) -// UpsertBlockTransactions upserts the transaction hashes for a given block hash and returns updated registered transactions hashes. -func (p *PostgreSQL) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) (registeredRows []store.TxWithMerklePath, err error) { +// UpsertBlockTransactions upserts the transaction hashes for a given block hash. +func (p *PostgreSQL) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) (err error) { ctx, span := tracing.StartTracing(ctx, "UpsertBlockTransactions", p.tracingEnabled, append(p.tracingAttributes, attribute.Int("updates", len(txsWithMerklePaths)))...) defer func() { tracing.EndTracing(span, err) @@ -45,45 +44,10 @@ func (p *PostgreSQL) UpsertBlockTransactions(ctx context.Context, blockID uint64 ON CONFLICT(blockid, txid) DO NOTHING; ` - qRegisteredTransactions := ` - SELECT - t.hash, - m.merkle_path - FROM blocktx.transactions t - JOIN blocktx.block_transactions_map AS m ON t.id = m.txid - WHERE m.blockid = $1 AND t.is_registered = TRUE AND t.hash = ANY($2) - ` - _, err = p.db.ExecContext(ctx, qUpsertTransactions, blockID, pq.Array(txHashesBytes), pq.Array(merklePaths)) if err != nil { - return nil, errors.Join(store.ErrFailedToExecuteTxUpdateQuery, err) - } - - rows, err := p.db.QueryContext(ctx, qRegisteredTransactions, blockID, pq.Array(txHashesBytes)) - if err != nil { - return nil, fmt.Errorf("failed to get registered transactions for block with id %d: %v", blockID, err) - } - defer rows.Close() - - registeredRows = make([]store.TxWithMerklePath, 0) - - for rows.Next() { - var txHash []byte - var merklePath string - err = rows.Scan(&txHash, &merklePath) - if err != nil { - return nil, errors.Join(store.ErrFailedToGetRows, err) - } - - registeredRows = append(registeredRows, store.TxWithMerklePath{ - Hash: txHash, - MerklePath: merklePath, - }) - } - - if err := rows.Err(); err != nil { - return nil, fmt.Errorf("error getting registered transactions for block with id %d: %v", blockID, err) + return errors.Join(store.ErrFailedToExecuteTxUpdateQuery, err) } - return registeredRows, nil + return nil } diff --git a/internal/blocktx/store/postgresql/verify_merkle_roots.go b/internal/blocktx/store/postgresql/verify_merkle_roots.go index 9f303d719..0da1d452b 100644 --- a/internal/blocktx/store/postgresql/verify_merkle_roots.go +++ b/internal/blocktx/store/postgresql/verify_merkle_roots.go @@ -17,7 +17,7 @@ func (p *PostgreSQL) VerifyMerkleRoots( maxAllowedBlockHeightMismatch int, ) (*blocktx_api.MerkleRootVerificationResponse, error) { qTopHeight := ` - SELECT MAX(b.height), MIN(b.height) FROM blocktx.blocks b WHERE b.orphanedyn = false + SELECT MAX(b.height), MIN(b.height) FROM blocktx.blocks b WHERE b.is_longest = true AND b.processed_at IS NOT NULL ` var topHeight uint64 @@ -32,7 +32,7 @@ func (p *PostgreSQL) VerifyMerkleRoots( } qMerkleRoot := ` - SELECT b.height FROM blocktx.blocks b WHERE b.merkleroot = $1 AND b.height = $2 AND b.orphanedyn = false + SELECT b.height FROM blocktx.blocks b WHERE b.merkleroot = $1 AND b.height = $2 AND b.is_longest = true AND b.processed_at IS NOT NULL ` var unverifiedBlockHeights []uint64 diff --git a/internal/blocktx/store/store.go b/internal/blocktx/store/store.go index 97d45b7dd..198bedac1 100644 --- a/internal/blocktx/store/store.go +++ b/internal/blocktx/store/store.go @@ -15,6 +15,7 @@ var ( ErrUnableToPrepareStatement = errors.New("unable to prepare statement") ErrUnableToDeleteRows = errors.New("unable to delete rows") ErrFailedToInsertBlock = errors.New("failed to insert block") + ErrFailedToUpdateBlockStatuses = errors.New("failed to update block statuses") ErrFailedToOpenDB = errors.New("failed to open postgres database") ErrFailedToInsertTransactions = errors.New("failed to bulk insert transactions") ErrFailedToGetRows = errors.New("failed to get rows") @@ -30,16 +31,18 @@ type Stats struct { type BlocktxStore interface { RegisterTransactions(ctx context.Context, txHashes [][]byte) (updatedTxs []*chainhash.Hash, err error) GetBlock(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) - GetBlockByHeight(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) + GetLongestBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) UpsertBlock(ctx context.Context, block *blocktx_api.Block) (uint64, error) - UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []TxWithMerklePath) (registeredTxs []TxWithMerklePath, err error) + UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []TxWithMerklePath) error MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error GetBlockGaps(ctx context.Context, heightRange int) ([]*BlockGap, error) ClearBlocktxTable(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) - GetMinedTransactions(ctx context.Context, hashes []*chainhash.Hash) ([]GetMinedTransactionResult, error) + GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]TransactionBlock, error) GetLongestChainFromHeight(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) + GetOrphansBackToNonOrphanAncestor(ctx context.Context, hash []byte) (orphans []*blocktx_api.Block, nonOrphanAncestor *blocktx_api.Block, err error) + GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]TransactionBlock, error) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []BlockStatusUpdate) error GetStats(ctx context.Context) (*Stats, error) diff --git a/internal/callbacker/callbacker_api/callbacker_api.pb.go b/internal/callbacker/callbacker_api/callbacker_api.pb.go index 7d5595532..51705d010 100644 --- a/internal/callbacker/callbacker_api/callbacker_api.pb.go +++ b/internal/callbacker/callbacker_api/callbacker_api.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.34.2 -// protoc v5.28.2 +// protoc v5.28.3 // source: internal/callbacker/callbacker_api/callbacker_api.proto package callbacker_api @@ -40,6 +40,7 @@ const ( Status_SEEN_ON_NETWORK Status = 90 Status_DOUBLE_SPEND_ATTEMPTED Status = 100 Status_REJECTED Status = 110 + Status_MINED_IN_STALE_BLOCK Status = 115 Status_MINED Status = 120 ) @@ -58,6 +59,7 @@ var ( 90: "SEEN_ON_NETWORK", 100: "DOUBLE_SPEND_ATTEMPTED", 110: "REJECTED", + 115: "MINED_IN_STALE_BLOCK", 120: "MINED", } Status_value = map[string]int32{ @@ -73,6 +75,7 @@ var ( "SEEN_ON_NETWORK": 90, "DOUBLE_SPEND_ATTEMPTED": 100, "REJECTED": 110, + "MINED_IN_STALE_BLOCK": 115, "MINED": 120, } ) @@ -363,7 +366,7 @@ var file_internal_callbacker_callbacker_api_callbacker_api_proto_rawDesc = []byt 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x42, 0x61, 0x74, 0x63, 0x68, - 0x2a, 0x83, 0x02, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, + 0x2a, 0x9d, 0x02, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x51, 0x55, 0x45, 0x55, 0x45, 0x44, 0x10, 0x0a, 0x12, 0x0c, 0x0a, 0x08, 0x52, 0x45, 0x43, 0x45, 0x49, 0x56, 0x45, 0x44, 0x10, 0x14, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x4f, 0x52, 0x45, 0x44, 0x10, 0x1e, 0x12, 0x18, @@ -378,20 +381,22 @@ var file_internal_callbacker_callbacker_api_callbacker_api_proto_rawDesc = []byt 0x53, 0x45, 0x45, 0x4e, 0x5f, 0x4f, 0x4e, 0x5f, 0x4e, 0x45, 0x54, 0x57, 0x4f, 0x52, 0x4b, 0x10, 0x5a, 0x12, 0x1a, 0x0a, 0x16, 0x44, 0x4f, 0x55, 0x42, 0x4c, 0x45, 0x5f, 0x53, 0x50, 0x45, 0x4e, 0x44, 0x5f, 0x41, 0x54, 0x54, 0x45, 0x4d, 0x50, 0x54, 0x45, 0x44, 0x10, 0x64, 0x12, 0x0c, 0x0a, - 0x08, 0x52, 0x45, 0x4a, 0x45, 0x43, 0x54, 0x45, 0x44, 0x10, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x4d, - 0x49, 0x4e, 0x45, 0x44, 0x10, 0x78, 0x32, 0xa2, 0x01, 0x0a, 0x0d, 0x43, 0x61, 0x6c, 0x6c, 0x62, - 0x61, 0x63, 0x6b, 0x65, 0x72, 0x41, 0x50, 0x49, 0x12, 0x42, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, - 0x74, 0x68, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1e, 0x2e, 0x63, 0x61, 0x6c, - 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x48, 0x65, 0x61, 0x6c, - 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4d, 0x0a, 0x0c, - 0x53, 0x65, 0x6e, 0x64, 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x12, 0x23, 0x2e, 0x63, - 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, - 0x6e, 0x64, 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x42, 0x12, 0x5a, 0x10, 0x2e, - 0x3b, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x61, 0x70, 0x69, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x08, 0x52, 0x45, 0x4a, 0x45, 0x43, 0x54, 0x45, 0x44, 0x10, 0x6e, 0x12, 0x18, 0x0a, 0x14, 0x4d, + 0x49, 0x4e, 0x45, 0x44, 0x5f, 0x49, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x4c, 0x45, 0x5f, 0x42, 0x4c, + 0x4f, 0x43, 0x4b, 0x10, 0x73, 0x12, 0x09, 0x0a, 0x05, 0x4d, 0x49, 0x4e, 0x45, 0x44, 0x10, 0x78, + 0x32, 0xa2, 0x01, 0x0a, 0x0d, 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x41, + 0x50, 0x49, 0x12, 0x42, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x12, 0x16, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, + 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1e, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x65, + 0x72, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4d, 0x0a, 0x0c, 0x53, 0x65, 0x6e, 0x64, 0x43, 0x61, + 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x12, 0x23, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, + 0x6b, 0x65, 0x72, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, 0x6e, 0x64, 0x43, 0x61, 0x6c, 0x6c, + 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, + 0x70, 0x74, 0x79, 0x22, 0x00, 0x42, 0x12, 0x5a, 0x10, 0x2e, 0x3b, 0x63, 0x61, 0x6c, 0x6c, 0x62, + 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x61, 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x33, } var ( diff --git a/internal/callbacker/callbacker_api/callbacker_api.proto b/internal/callbacker/callbacker_api/callbacker_api.proto index a0c4bb38b..3a93795c2 100644 --- a/internal/callbacker/callbacker_api/callbacker_api.proto +++ b/internal/callbacker/callbacker_api/callbacker_api.proto @@ -28,6 +28,7 @@ enum Status { SEEN_ON_NETWORK = 90; DOUBLE_SPEND_ATTEMPTED = 100; REJECTED = 110; + MINED_IN_STALE_BLOCK = 115; MINED = 120; } diff --git a/internal/callbacker/callbacker_api/callbacker_api_grpc.pb.go b/internal/callbacker/callbacker_api/callbacker_api_grpc.pb.go index e9bedb75d..c2c597bd8 100644 --- a/internal/callbacker/callbacker_api/callbacker_api_grpc.pb.go +++ b/internal/callbacker/callbacker_api/callbacker_api_grpc.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: // - protoc-gen-go-grpc v1.5.1 -// - protoc v5.28.2 +// - protoc v5.28.3 // source: internal/callbacker/callbacker_api/callbacker_api.proto package callbacker_api diff --git a/internal/metamorph/integration_test/double_spend_integration_test.go b/internal/metamorph/integration_test/double_spend_integration_test.go index dd6ce5ef6..9d8452c46 100644 --- a/internal/metamorph/integration_test/double_spend_integration_test.go +++ b/internal/metamorph/integration_test/double_spend_integration_test.go @@ -102,7 +102,7 @@ func TestDoubleSpendDetection(t *testing.T) { logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelDebug})) - statusMessageChannel := make(chan *metamorph.PeerTxMessage, 10) + statusMessageChannel := make(chan *metamorph.TxStatusMessage, 10) minedTxChannel := make(chan *blocktx_api.TransactionBlock, 10) mockedZMQ := &mocks.ZMQIMock{ diff --git a/internal/metamorph/metamorph_api/metamorph_api.pb.go b/internal/metamorph/metamorph_api/metamorph_api.pb.go index dab2a0866..a842d084b 100644 --- a/internal/metamorph/metamorph_api/metamorph_api.pb.go +++ b/internal/metamorph/metamorph_api/metamorph_api.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.34.2 -// protoc v5.28.2 +// protoc v5.28.3 // source: internal/metamorph/metamorph_api/metamorph_api.proto package metamorph_api @@ -40,6 +40,7 @@ const ( Status_SEEN_ON_NETWORK Status = 90 Status_DOUBLE_SPEND_ATTEMPTED Status = 100 Status_REJECTED Status = 110 + Status_MINED_IN_STALE_BLOCK Status = 115 Status_MINED Status = 120 ) @@ -58,6 +59,7 @@ var ( 90: "SEEN_ON_NETWORK", 100: "DOUBLE_SPEND_ATTEMPTED", 110: "REJECTED", + 115: "MINED_IN_STALE_BLOCK", 120: "MINED", } Status_value = map[string]int32{ @@ -73,6 +75,7 @@ var ( "SEEN_ON_NETWORK": 90, "DOUBLE_SPEND_ATTEMPTED": 100, "REJECTED": 110, + "MINED_IN_STALE_BLOCK": 115, "MINED": 120, } ) @@ -1164,7 +1167,7 @@ var file_internal_metamorph_metamorph_api_metamorph_api_proto_rawDesc = []byte{ 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x74, 0x72, 0x61, - 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2a, 0x83, 0x02, 0x0a, 0x06, 0x53, 0x74, + 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2a, 0x9d, 0x02, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x51, 0x55, 0x45, 0x55, 0x45, 0x44, 0x10, 0x0a, 0x12, 0x0c, 0x0a, 0x08, 0x52, 0x45, 0x43, 0x45, 0x49, 0x56, 0x45, 0x44, 0x10, 0x14, 0x12, 0x0a, 0x0a, 0x06, 0x53, @@ -1180,55 +1183,57 @@ var file_internal_metamorph_metamorph_api_metamorph_api_proto_rawDesc = []byte{ 0x5f, 0x4e, 0x45, 0x54, 0x57, 0x4f, 0x52, 0x4b, 0x10, 0x5a, 0x12, 0x1a, 0x0a, 0x16, 0x44, 0x4f, 0x55, 0x42, 0x4c, 0x45, 0x5f, 0x53, 0x50, 0x45, 0x4e, 0x44, 0x5f, 0x41, 0x54, 0x54, 0x45, 0x4d, 0x50, 0x54, 0x45, 0x44, 0x10, 0x64, 0x12, 0x0c, 0x0a, 0x08, 0x52, 0x45, 0x4a, 0x45, 0x43, 0x54, - 0x45, 0x44, 0x10, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x4d, 0x49, 0x4e, 0x45, 0x44, 0x10, 0x78, 0x32, - 0xdd, 0x05, 0x0a, 0x0c, 0x4d, 0x65, 0x74, 0x61, 0x4d, 0x6f, 0x72, 0x70, 0x68, 0x41, 0x50, 0x49, - 0x12, 0x41, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, - 0x74, 0x79, 0x1a, 0x1d, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, - 0x70, 0x69, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x22, 0x00, 0x12, 0x57, 0x0a, 0x0e, 0x50, 0x75, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x21, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, - 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, - 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x00, 0x12, 0x5b, 0x0a, 0x0f, - 0x50, 0x75, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, - 0x22, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, + 0x45, 0x44, 0x10, 0x6e, 0x12, 0x18, 0x0a, 0x14, 0x4d, 0x49, 0x4e, 0x45, 0x44, 0x5f, 0x49, 0x4e, + 0x5f, 0x53, 0x54, 0x41, 0x4c, 0x45, 0x5f, 0x42, 0x4c, 0x4f, 0x43, 0x4b, 0x10, 0x73, 0x12, 0x09, + 0x0a, 0x05, 0x4d, 0x49, 0x4e, 0x45, 0x44, 0x10, 0x78, 0x32, 0xdd, 0x05, 0x0a, 0x0c, 0x4d, 0x65, + 0x74, 0x61, 0x4d, 0x6f, 0x72, 0x70, 0x68, 0x41, 0x50, 0x49, 0x12, 0x41, 0x0a, 0x06, 0x48, 0x65, + 0x61, 0x6c, 0x74, 0x68, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1d, 0x2e, 0x6d, + 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x48, 0x65, 0x61, + 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x57, 0x0a, + 0x0e, 0x50, 0x75, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x21, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x73, 0x1a, 0x22, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, - 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x22, 0x00, 0x12, 0x57, 0x0a, 0x0e, 0x47, 0x65, 0x74, - 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x2e, 0x6d, 0x65, - 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, - 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, - 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x00, 0x12, 0x5a, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x28, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, + 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, + 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x22, 0x00, 0x12, 0x5b, 0x0a, 0x0f, 0x50, 0x75, 0x74, 0x54, 0x72, 0x61, + 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x22, 0x2e, 0x6d, 0x65, 0x74, 0x61, + 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x1a, 0x22, 0x2e, + 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, + 0x73, 0x22, 0x00, 0x12, 0x57, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x1b, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, - 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x00, 0x12, 0x63, - 0x0a, 0x14, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, - 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, - 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x22, 0x00, 0x12, 0x68, 0x0a, 0x11, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, - 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, - 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, - 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x28, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, - 0x69, 0x2e, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, - 0x61, 0x6d, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x50, 0x0a, - 0x09, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x12, 0x1f, 0x2e, 0x6d, 0x65, 0x74, - 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, - 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, - 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, - 0x72, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, - 0x11, 0x5a, 0x0f, 0x2e, 0x3b, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, - 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, + 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, + 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x00, 0x12, 0x5a, 0x0a, 0x0f, + 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, + 0x28, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, + 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x6d, 0x65, 0x74, 0x61, + 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x00, 0x12, 0x63, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x54, + 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, + 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, + 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x00, 0x12, 0x68, 0x0a, + 0x11, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, + 0x6d, 0x65, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, + 0x70, 0x69, 0x2e, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, + 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x6d, 0x65, + 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, 0x74, 0x55, + 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x09, 0x43, 0x6c, 0x65, 0x61, 0x72, + 0x44, 0x61, 0x74, 0x61, 0x12, 0x1f, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, + 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, + 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x11, 0x5a, 0x0f, 0x2e, 0x3b, 0x6d, + 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/internal/metamorph/metamorph_api/metamorph_api.proto b/internal/metamorph/metamorph_api/metamorph_api.proto index 203118e6c..ed24b04b6 100644 --- a/internal/metamorph/metamorph_api/metamorph_api.proto +++ b/internal/metamorph/metamorph_api/metamorph_api.proto @@ -21,8 +21,9 @@ enum Status { ACCEPTED_BY_NETWORK = 70; SEEN_IN_ORPHAN_MEMPOOL = 80; SEEN_ON_NETWORK = 90; - DOUBLE_SPEND_ATTEMPTED = 100; + DOUBLE_SPEND_ATTEMPTED = 100; REJECTED = 110; + MINED_IN_STALE_BLOCK = 115; MINED = 120; } diff --git a/internal/metamorph/metamorph_api/metamorph_api_grpc.pb.go b/internal/metamorph/metamorph_api/metamorph_api_grpc.pb.go index f9257b91d..63ab512ee 100644 --- a/internal/metamorph/metamorph_api/metamorph_api_grpc.pb.go +++ b/internal/metamorph/metamorph_api/metamorph_api_grpc.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: // - protoc-gen-go-grpc v1.5.1 -// - protoc v5.28.2 +// - protoc v5.28.3 // source: internal/metamorph/metamorph_api/metamorph_api.proto package metamorph_api diff --git a/internal/metamorph/peer_handler.go b/internal/metamorph/peer_handler.go index 9c8f403e6..64ee94e9d 100644 --- a/internal/metamorph/peer_handler.go +++ b/internal/metamorph/peer_handler.go @@ -15,13 +15,13 @@ var ErrTxRejectedByPeer = errors.New("transaction rejected by peer") type PeerHandler struct { store store.MetamorphStore - messageCh chan *PeerTxMessage + messageCh chan *TxStatusMessage cancelAll context.CancelFunc ctx context.Context } -func NewPeerHandler(s store.MetamorphStore, messageCh chan *PeerTxMessage) *PeerHandler { +func NewPeerHandler(s store.MetamorphStore, messageCh chan *TxStatusMessage) *PeerHandler { ph := &PeerHandler{ store: s, messageCh: messageCh, @@ -37,7 +37,7 @@ func NewPeerHandler(s store.MetamorphStore, messageCh chan *PeerTxMessage) *Peer // HandleTransactionSent is called when a transaction is sent to a peer. func (m *PeerHandler) HandleTransactionSent(msg *wire.MsgTx, peer p2p.PeerI) error { hash := msg.TxHash() - m.messageCh <- &PeerTxMessage{ + m.messageCh <- &TxStatusMessage{ Hash: &hash, Status: metamorph_api.Status_SENT_TO_NETWORK, Peer: peer.String(), @@ -49,7 +49,7 @@ func (m *PeerHandler) HandleTransactionSent(msg *wire.MsgTx, peer p2p.PeerI) err // HandleTransactionAnnouncement is a message sent to the PeerHandler when a transaction INV message is received from a peer. func (m *PeerHandler) HandleTransactionAnnouncement(msg *wire.InvVect, peer p2p.PeerI) error { select { - case m.messageCh <- &PeerTxMessage{ + case m.messageCh <- &TxStatusMessage{ Hash: &msg.Hash, Status: metamorph_api.Status_SEEN_ON_NETWORK, Peer: peer.String(), @@ -62,7 +62,7 @@ func (m *PeerHandler) HandleTransactionAnnouncement(msg *wire.InvVect, peer p2p. // HandleTransactionRejection is called when a transaction is rejected by a peer. func (m *PeerHandler) HandleTransactionRejection(rejMsg *wire.MsgReject, peer p2p.PeerI) error { - m.messageCh <- &PeerTxMessage{ + m.messageCh <- &TxStatusMessage{ Hash: &rejMsg.Hash, Status: metamorph_api.Status_REJECTED, Peer: peer.String(), @@ -77,7 +77,7 @@ func (m *PeerHandler) HandleTransactionsGet(msgs []*wire.InvVect, peer p2p.PeerI hashes := make([][]byte, len(msgs)) for i, msg := range msgs { - m.messageCh <- &PeerTxMessage{ + m.messageCh <- &TxStatusMessage{ Hash: &msg.Hash, Status: metamorph_api.Status_REQUESTED_BY_NETWORK, Peer: peer.String(), @@ -93,7 +93,7 @@ func (m *PeerHandler) HandleTransactionsGet(msgs []*wire.InvVect, peer p2p.PeerI func (m *PeerHandler) HandleTransaction(msg *wire.MsgTx, peer p2p.PeerI) error { hash := msg.TxHash() - m.messageCh <- &PeerTxMessage{ + m.messageCh <- &TxStatusMessage{ Hash: &hash, Status: metamorph_api.Status_SEEN_ON_NETWORK, Peer: peer.String(), diff --git a/internal/metamorph/peer_handler_test.go b/internal/metamorph/peer_handler_test.go index b6a571451..e9dd4805d 100644 --- a/internal/metamorph/peer_handler_test.go +++ b/internal/metamorph/peer_handler_test.go @@ -17,7 +17,7 @@ import ( ) func TestPeerHandler(t *testing.T) { - messageCh := make(chan *metamorph.PeerTxMessage, 100) + messageCh := make(chan *metamorph.TxStatusMessage, 100) mtmStore := &storeMocks.MetamorphStoreMock{ GetRawTxsFunc: func(_ context.Context, _ [][]byte) ([][]byte, error) { rawTx := []byte("1234") @@ -36,7 +36,7 @@ func TestPeerHandler(t *testing.T) { msgTx := wire.NewMsgTx(70001) hash := msgTx.TxHash() - expectedMsg := &metamorph.PeerTxMessage{ + expectedMsg := &metamorph.TxStatusMessage{ Hash: &hash, Status: metamorph_api.Status_SENT_TO_NETWORK, Peer: "mock_peer", @@ -64,7 +64,7 @@ func TestPeerHandler(t *testing.T) { msgInv := wire.NewInvVect(wire.InvTypeBlock, hash) require.NoError(t, err) - expectedMsg := &metamorph.PeerTxMessage{ + expectedMsg := &metamorph.TxStatusMessage{ Hash: &msgInv.Hash, Status: metamorph_api.Status_SEEN_ON_NETWORK, Peer: "mock_peer", @@ -88,7 +88,7 @@ func TestPeerHandler(t *testing.T) { // given msgReject := wire.NewMsgReject("command", wire.RejectMalformed, "malformed") - expectedMsg := &metamorph.PeerTxMessage{ + expectedMsg := &metamorph.TxStatusMessage{ Hash: &msgReject.Hash, Status: metamorph_api.Status_REJECTED, Peer: "mock_peer", @@ -113,7 +113,7 @@ func TestPeerHandler(t *testing.T) { // given txsCount := 2 invMsgs := make([]*wire.InvVect, txsCount) - expectedMsgs := make([]*metamorph.PeerTxMessage, txsCount) + expectedMsgs := make([]*metamorph.TxStatusMessage, txsCount) for i := 0; i < txsCount; i++ { hash, err := chainhash.NewHashFromStr("1234") @@ -124,7 +124,7 @@ func TestPeerHandler(t *testing.T) { invMsgs[i] = msgInv - expectedMsgs[i] = &metamorph.PeerTxMessage{ + expectedMsgs[i] = &metamorph.TxStatusMessage{ Hash: hash, Status: metamorph_api.Status_REQUESTED_BY_NETWORK, Peer: "mock_peer", @@ -154,7 +154,7 @@ func TestPeerHandler(t *testing.T) { msgTx := wire.NewMsgTx(70001) hash := msgTx.TxHash() - expectedMsg := &metamorph.PeerTxMessage{ + expectedMsg := &metamorph.TxStatusMessage{ Hash: &hash, Status: metamorph_api.Status_SEEN_ON_NETWORK, Peer: "mock_peer", diff --git a/internal/metamorph/processor.go b/internal/metamorph/processor.go index 7be86546d..693fe476b 100644 --- a/internal/metamorph/processor.go +++ b/internal/metamorph/processor.go @@ -74,7 +74,7 @@ type Processor struct { callbackSender CallbackSender responseProcessor *ResponseProcessor - statusMessageCh chan *PeerTxMessage + statusMessageCh chan *TxStatusMessage waitGroup *sync.WaitGroup @@ -111,7 +111,7 @@ type CallbackSender interface { SendCallback(ctx context.Context, data *store.Data) } -func NewProcessor(s store.MetamorphStore, c cache.Store, pm p2p.PeerManagerI, statusMessageChannel chan *PeerTxMessage, opts ...Option) (*Processor, error) { +func NewProcessor(s store.MetamorphStore, c cache.Store, pm p2p.PeerManagerI, statusMessageChannel chan *TxStatusMessage, opts ...Option) (*Processor, error) { if s == nil { return nil, ErrStoreNil } diff --git a/internal/metamorph/processor_test.go b/internal/metamorph/processor_test.go index 90bf3af83..de1b0c690 100644 --- a/internal/metamorph/processor_test.go +++ b/internal/metamorph/processor_test.go @@ -498,7 +498,7 @@ func TestStartSendStatusForTransaction(t *testing.T) { }, } - statusMessageChannel := make(chan *metamorph.PeerTxMessage, 10) + statusMessageChannel := make(chan *metamorph.TxStatusMessage, 10) sut, err := metamorph.NewProcessor(metamorphStore, cStore, pm, statusMessageChannel, metamorph.WithNow(func() time.Time { return time.Date(2023, 10, 1, 13, 0, 0, 0, time.UTC) }), metamorph.WithProcessStatusUpdatesInterval(200*time.Millisecond), metamorph.WithProcessStatusUpdatesBatchSize(3), metamorph.WithCallbackSender(callbackSender)) require.NoError(t, err) @@ -509,7 +509,7 @@ func TestStartSendStatusForTransaction(t *testing.T) { assert.Equal(t, 0, sut.GetProcessorMapSize()) for _, testInput := range tc.inputs { - statusMessageChannel <- &metamorph.PeerTxMessage{ + statusMessageChannel <- &metamorph.TxStatusMessage{ Hash: testInput.hash, Status: testInput.newStatus, Err: testInput.statusErr, diff --git a/internal/metamorph/store/postgresql/postgres.go b/internal/metamorph/store/postgresql/postgres.go index 3f79d15fd..46d66a600 100644 --- a/internal/metamorph/store/postgresql/postgres.go +++ b/internal/metamorph/store/postgresql/postgres.go @@ -589,7 +589,6 @@ func (p *PostgreSQL) GetSeenOnNetwork(ctx context.Context, since time.Time, unti defer rows.Close() res, err = getStoreDataFromRows(rows) - if err != nil { if rollBackErr := tx.Rollback(); rollBackErr != nil { return nil, errors.Join(err, fmt.Errorf("failed to rollback: %v", rollBackErr)) @@ -795,7 +794,6 @@ func (p *PostgreSQL) UpdateDoubleSpend(ctx context.Context, updates []store.Upda defer rows.Close() res, err = getStoreDataFromRows(rows) - if err != nil { if rollbackErr := tx.Rollback(); rollbackErr != nil { return nil, errors.Join(err, fmt.Errorf("failed to rollback: %v", rollbackErr)) @@ -825,22 +823,27 @@ func (p *PostgreSQL) UpdateMined(ctx context.Context, txsBlocks []*blocktx_api.T blockHashes := make([][]byte, len(txsBlocks)) blockHeights := make([]uint64, len(txsBlocks)) merklePaths := make([]string, len(txsBlocks)) + statuses := make([]metamorph_api.Status, len(txsBlocks)) for i, tx := range txsBlocks { txHashes[i] = tx.TransactionHash blockHashes[i] = tx.BlockHash blockHeights[i] = tx.BlockHeight merklePaths[i] = tx.MerklePath + statuses[i] = metamorph_api.Status_MINED + if tx.BlockStatus == blocktx_api.Status_STALE { + statuses[i] = metamorph_api.Status_MINED_IN_STALE_BLOCK + } } qBulkUpdate := ` UPDATE metamorph.transactions t SET - status=$1, + status=bulk_query.mined_status, block_hash=bulk_query.block_hash, block_height=bulk_query.block_height, merkle_path=bulk_query.merkle_path, - last_modified=$2, + last_modified=$1, status_history=status_history || json_build_object( 'status', status, 'timestamp', last_modified @@ -849,8 +852,8 @@ func (p *PostgreSQL) UpdateMined(ctx context.Context, txsBlocks []*blocktx_api.T ( SELECT * FROM - UNNEST($3::BYTEA[], $4::BYTEA[], $5::BIGINT[], $6::TEXT[]) - AS t(hash, block_hash, block_height, merkle_path) + UNNEST($2::INT[], $3::BYTEA[], $4::BYTEA[], $5::BIGINT[], $6::TEXT[]) + AS t(mined_status, hash, block_hash, block_height, merkle_path) ) AS bulk_query WHERE t.hash=bulk_query.hash @@ -889,7 +892,7 @@ func (p *PostgreSQL) UpdateMined(ctx context.Context, txsBlocks []*blocktx_api.T competingTxsData := getCompetingTxsFromRows(rows) rejectedResponses := updateDoubleSpendRejected(ctx, competingTxsData, tx) - rows, err = tx.QueryContext(ctx, qBulkUpdate, metamorph_api.Status_MINED, p.now(), pq.Array(txHashes), pq.Array(blockHashes), pq.Array(blockHeights), pq.Array(merklePaths)) + rows, err = tx.QueryContext(ctx, qBulkUpdate, p.now(), pq.Array(statuses), pq.Array(txHashes), pq.Array(blockHashes), pq.Array(blockHeights), pq.Array(merklePaths)) if err != nil { if rollBackErr := tx.Rollback(); rollBackErr != nil { return nil, errors.Join(err, fmt.Errorf("failed to rollback: %v", rollBackErr)) diff --git a/internal/metamorph/store/postgresql/postgres_test.go b/internal/metamorph/store/postgresql/postgres_test.go index d03cdabf6..3a29f1292 100644 --- a/internal/metamorph/store/postgresql/postgres_test.go +++ b/internal/metamorph/store/postgresql/postgres_test.go @@ -473,6 +473,7 @@ func TestPostgresDB(t *testing.T) { chainHash2 := testutils.RevChainhash(t, "ee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa") chainHash3 := testutils.RevChainhash(t, "a7fd98bd37f9b387dbef4f1a4e4790b9a0d48fb7bbb77455e8f39df0f8909db7") competingHash := testutils.RevChainhash(t, "67fc757d9ed6d119fc0926ae5c82c1a2cf036ec823257cfaea396e49184ec7ff") + chainhash4 := testutils.RevChainhash(t, "3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd") txBlocks := []*blocktx_api.TransactionBlock{ { @@ -480,27 +481,38 @@ func TestPostgresDB(t *testing.T) { BlockHeight: 100, TransactionHash: unminedHash[:], MerklePath: "merkle-path-1", + BlockStatus: blocktx_api.Status_LONGEST, }, { BlockHash: testdata.Block1Hash[:], BlockHeight: 100, TransactionHash: chainHash2[:], MerklePath: "merkle-path-2", + BlockStatus: blocktx_api.Status_LONGEST, }, { BlockHash: testdata.Block1Hash[:], BlockHeight: 100, TransactionHash: testdata.TX3Hash[:], // hash non-existent in db MerklePath: "merkle-path-3", + BlockStatus: blocktx_api.Status_LONGEST, }, { BlockHash: testdata.Block1Hash[:], BlockHeight: 100, TransactionHash: chainHash3[:], // this one has competing transactions MerklePath: "merkle-path-4", + BlockStatus: blocktx_api.Status_LONGEST, + }, + { + BlockHash: testdata.Block2Hash[:], + BlockHeight: 100, + TransactionHash: chainhash4[:], + MerklePath: "merkle-path-5", + BlockStatus: blocktx_api.Status_STALE, // should have status MINED_IN_STALE_BLOCK }, } - expectedUpdates := 4 // 3 for updates + 1 for rejected competing tx + expectedUpdates := 5 // 4 for updates + 1 for rejected competing tx updated, err := postgresDB.UpdateMined(ctx, txBlocks) require.NoError(t, err) @@ -516,14 +528,19 @@ func TestPostgresDB(t *testing.T) { require.Equal(t, "merkle-path-4", updated[1].MerklePath) require.Equal(t, metamorph_api.Status_MINED, updated[1].Status) - require.True(t, unminedHash.IsEqual(updated[2].Hash)) - require.True(t, testdata.Block1Hash.IsEqual(updated[2].BlockHash)) - require.Equal(t, "merkle-path-1", updated[2].MerklePath) - require.Equal(t, metamorph_api.Status_MINED, updated[2].Status) + require.True(t, chainhash4.IsEqual(updated[2].Hash)) + require.True(t, testdata.Block2Hash.IsEqual(updated[2].BlockHash)) + require.Equal(t, "merkle-path-5", updated[2].MerklePath) + require.Equal(t, metamorph_api.Status_MINED_IN_STALE_BLOCK, updated[2].Status) + + require.True(t, unminedHash.IsEqual(updated[3].Hash)) + require.True(t, testdata.Block1Hash.IsEqual(updated[3].BlockHash)) + require.Equal(t, "merkle-path-1", updated[3].MerklePath) + require.Equal(t, metamorph_api.Status_MINED, updated[3].Status) - require.True(t, competingHash.IsEqual(updated[3].Hash)) - require.Equal(t, metamorph_api.Status_REJECTED, updated[3].Status) - require.Equal(t, "double spend attempted", updated[3].RejectReason) + require.True(t, competingHash.IsEqual(updated[4].Hash)) + require.Equal(t, metamorph_api.Status_REJECTED, updated[4].Status) + require.Equal(t, "double spend attempted", updated[4].RejectReason) minedReturned, err := postgresDB.Get(ctx, unminedHash[:]) require.NoError(t, err) @@ -647,6 +664,7 @@ func TestPostgresDB(t *testing.T) { BlockHeight: 100, TransactionHash: unminedHash[:], MerklePath: "merkle-path-1", + BlockStatus: blocktx_api.Status_LONGEST, }, } @@ -667,6 +685,35 @@ func TestPostgresDB(t *testing.T) { unmined.LastModified = postgresDB.now() unmined.Status = metamorph_api.Status_MINED require.Equal(t, &unmined, updatedTx) + + // Fourth update - UpdateMined - MINED_IN_STALE_BLOCK + txBlocks = []*blocktx_api.TransactionBlock{ + { + BlockHash: testdata.Block2Hash[:], + BlockHeight: 100, + TransactionHash: unminedHash[:], + MerklePath: "merkle-path-1", + BlockStatus: blocktx_api.Status_STALE, + }, + } + + updated, err = postgresDB.UpdateMined(ctx, txBlocks) + require.NoError(t, err) + require.Len(t, updated, 1) + + updatedTx, err = postgresDB.Get(ctx, unminedHash[:]) + require.NoError(t, err) + + unmined.BlockHeight = 100 + unmined.BlockHash = testdata.Block2Hash + unmined.MerklePath = "merkle-path-1" + unmined.StatusHistory = append(unmined.StatusHistory, &store.Status{ + Status: unmined.Status, + Timestamp: unmined.LastModified, + }) + unmined.LastModified = postgresDB.now() + unmined.Status = metamorph_api.Status_MINED_IN_STALE_BLOCK + require.Equal(t, &unmined, updatedTx) }) t.Run("clear data", func(t *testing.T) { diff --git a/internal/metamorph/types.go b/internal/metamorph/types.go index 8d6e687c3..d8991cfb8 100644 --- a/internal/metamorph/types.go +++ b/internal/metamorph/types.go @@ -20,7 +20,7 @@ type StatusAndError struct { CompetingTxs []string } -type PeerTxMessage struct { +type TxStatusMessage struct { Start time.Time Hash *chainhash.Hash Status metamorph_api.Status diff --git a/internal/metamorph/zmq.go b/internal/metamorph/zmq.go index ae660d270..c583856dc 100644 --- a/internal/metamorph/zmq.go +++ b/internal/metamorph/zmq.go @@ -37,7 +37,7 @@ type subscriptionRequest struct { type ZMQ struct { url *url.URL - statusMessageCh chan<- *PeerTxMessage + statusMessageCh chan<- *TxStatusMessage handler ZMQI logger *slog.Logger } @@ -82,7 +82,7 @@ type ZMQI interface { Subscribe(string, chan []string) error } -func NewZMQ(zmqURL *url.URL, statusMessageCh chan<- *PeerTxMessage, zmqHandler ZMQI, logger *slog.Logger) (*ZMQ, error) { +func NewZMQ(zmqURL *url.URL, statusMessageCh chan<- *TxStatusMessage, zmqHandler ZMQI, logger *slog.Logger) (*ZMQ, error) { if zmqHandler == nil { return nil, ErrNilZMQHandler } @@ -115,7 +115,7 @@ func (z *ZMQ) Start() error { continue } - z.statusMessageCh <- &PeerTxMessage{ + z.statusMessageCh <- &TxStatusMessage{ Start: time.Now(), Hash: hash, Status: metamorph_api.Status_ACCEPTED_BY_NETWORK, @@ -131,7 +131,7 @@ func (z *ZMQ) Start() error { } if len(competingTxs) == 0 { - z.statusMessageCh <- &PeerTxMessage{ + z.statusMessageCh <- &TxStatusMessage{ Start: time.Now(), Hash: hash, Status: status, @@ -154,7 +154,7 @@ func (z *ZMQ) Start() error { continue } - z.statusMessageCh <- &PeerTxMessage{ + z.statusMessageCh <- &TxStatusMessage{ Start: time.Now(), Hash: hash, Status: metamorph_api.Status_REJECTED, @@ -242,8 +242,8 @@ func (z *ZMQ) parseTxInfo(c []string) (*ZMQTxInfo, error) { return &txInfo, nil } -func (z *ZMQ) prepareCompetingTxMsgs(hash *chainhash.Hash, competingTxs []string) []*PeerTxMessage { - msgs := []*PeerTxMessage{{ +func (z *ZMQ) prepareCompetingTxMsgs(hash *chainhash.Hash, competingTxs []string) []*TxStatusMessage { + msgs := []*TxStatusMessage{{ Start: time.Now(), Hash: hash, Status: metamorph_api.Status_DOUBLE_SPEND_ATTEMPTED, @@ -268,7 +268,7 @@ func (z *ZMQ) prepareCompetingTxMsgs(hash *chainhash.Hash, competingTxs []string // and return a copy of the slice txsWithoutSelf := removeCompetingSelf(allCompetingTxs, tx) - msgs = append(msgs, &PeerTxMessage{ + msgs = append(msgs, &TxStatusMessage{ Start: time.Now(), Hash: competingHash, Status: metamorph_api.Status_DOUBLE_SPEND_ATTEMPTED, diff --git a/internal/metamorph/zmq_test.go b/internal/metamorph/zmq_test.go index ab503750a..7ddb3e5eb 100644 --- a/internal/metamorph/zmq_test.go +++ b/internal/metamorph/zmq_test.go @@ -70,7 +70,7 @@ func TestZMQ(t *testing.T) { }, } - statuses := make(chan *metamorph.PeerTxMessage, tc.expectedStatusesCount) + statuses := make(chan *metamorph.TxStatusMessage, tc.expectedStatusesCount) zmqURL, err := url.Parse("https://some-url.com") require.NoError(t, err) @@ -83,7 +83,7 @@ func TestZMQ(t *testing.T) { require.NoError(t, err) // then - var status *metamorph.PeerTxMessage + var status *metamorph.TxStatusMessage sCounter := 0 for i := 0; i < tc.expectedStatusesCount; i++ { select { @@ -118,7 +118,7 @@ func TestZMQDoubleSpend(t *testing.T) { numberOfMsgs := 2 hashes := []string{"8e75ae10f86d8a43044a54c3c57d660d20cdb74e233be4b5c90ba752ebdc7e88", "d64adfce6b105dc6bdf475494925bf06802a41a0582586f33c2b16d537a0b7b6"} - statuses := make(chan *metamorph.PeerTxMessage, numberOfMsgs) + statuses := make(chan *metamorph.TxStatusMessage, numberOfMsgs) zmqURL, err := url.Parse("https://some-url.com") require.NoError(t, err) @@ -132,7 +132,7 @@ func TestZMQDoubleSpend(t *testing.T) { require.NoError(t, err) // then - var status *metamorph.PeerTxMessage + var status *metamorph.TxStatusMessage sCounter := 0 for i := 0; i < numberOfMsgs; i++ { select { diff --git a/internal/node_client/node_client.go b/internal/node_client/node_client.go index 730965646..f450fc4ab 100644 --- a/internal/node_client/node_client.go +++ b/internal/node_client/node_client.go @@ -106,7 +106,6 @@ func (n NodeClient) GetRawTransaction(ctx context.Context, id string) (rt *sdkTx }() nTx, err := n.bitcoinClient.GetRawTransaction(id) - if err != nil { return nil, errors.Join(ErrFailedToGetRawTransaction, err) } diff --git a/internal/node_client/test_utils.go b/internal/node_client/test_utils.go index 1fb2e4833..cb7becf97 100644 --- a/internal/node_client/test_utils.go +++ b/internal/node_client/test_utils.go @@ -1,7 +1,12 @@ package node_client import ( + "bytes" + "encoding/json" + "errors" "fmt" + "io" + "net/http" "testing" "time" @@ -37,6 +42,19 @@ type BlockData struct { MerkleRoot string `json:"merkleroot"` } +type RPCRequest struct { + Method string `json:"method"` + Params interface{} `json:"params"` + ID int64 `json:"id"` + JSONRpc string `json:"jsonrpc"` +} + +type RPCResponse struct { + ID int64 `json:"id"` + Result json.RawMessage `json:"result"` + Err interface{} `json:"error"` +} + func GetNewWalletAddress(t *testing.T, bitcoind *bitcoin.Bitcoind) (address, privateKey string) { address, err := bitcoind.GetNewAddress() require.NoError(t, err) @@ -287,3 +305,70 @@ func CreateTxFrom(privateKey string, address string, utxos []UnspentOutput, fee return tx, nil } + +func CustomRPCCall(method string, params []interface{}, nodeHost string, nodePort int, nodeUser, nodePassword string) error { + c := http.Client{} + + rpcRequest := RPCRequest{method, params, time.Now().UnixNano(), "1.0"} + payloadBuffer := &bytes.Buffer{} + jsonEncoder := json.NewEncoder(payloadBuffer) + + err := jsonEncoder.Encode(rpcRequest) + if err != nil { + return err + } + + req, err := http.NewRequest( + "POST", + fmt.Sprintf("%s://%s:%d", "http", nodeHost, nodePort), + payloadBuffer, + ) + if err != nil { + return err + } + + req.SetBasicAuth(nodeUser, nodePassword) + req.Header.Add("Content-Type", "application/json;charset=utf-8") + req.Header.Add("Accept", "application/json") + + resp, err := c.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + + data, err := io.ReadAll(resp.Body) + if err != nil { + return err + } + + var rpcResponse RPCResponse + + if resp.StatusCode != 200 { + _ = json.Unmarshal(data, &rpcResponse) + v, ok := rpcResponse.Err.(map[string]interface{}) + if ok { + err = errors.New(v["message"].(string)) + } else { + err = errors.New("HTTP error: " + resp.Status) + } + if err != nil { + return err + } + } + + err = json.Unmarshal(data, &rpcResponse) + if err != nil { + return err + } + + if rpcResponse.Err != nil { + e, ok := rpcResponse.Err.(error) + if ok { + return e + } + return errors.New("unknown error returned from node in rpc response") + } + + return nil +} diff --git a/test/config/config.yaml b/test/config/config.yaml index 1c32339dd..773166aa7 100644 --- a/test/config/config.yaml +++ b/test/config/config.yaml @@ -94,7 +94,7 @@ blocktx: recordRetentionDays: 28 profilerAddr: localhost:9993 registerTxsInterval: 200ms - fillGapsInterval: 15m + fillGapsInterval: 1s # this is needed, becuase during init, peers don't send us some blocks maxAllowedBlockHeightMismatch: 3 api: diff --git a/test/init_test.go b/test/init_test.go index f41f69cd2..061bbab8c 100644 --- a/test/init_test.go +++ b/test/init_test.go @@ -71,5 +71,7 @@ func setupSut() { break } } + + time.Sleep(5 * time.Second) // wait for fillGaps to fill eventual gaps } } diff --git a/test/submit_05_reorg_test.go b/test/submit_05_reorg_test.go new file mode 100644 index 000000000..03e0ac9ec --- /dev/null +++ b/test/submit_05_reorg_test.go @@ -0,0 +1,178 @@ +//go:build e2e + +package test + +import ( + "fmt" + "net/http" + "testing" + "time" + + "github.com/bitcoin-sv/arc/internal/node_client" + "github.com/stretchr/testify/require" +) + +func TestReorg(t *testing.T) { + address, privateKey := node_client.FundNewWallet(t, bitcoind) + + utxos := node_client.GetUtxos(t, bitcoind, address) + require.True(t, len(utxos) > 0, "No UTXOs available for the address") + + tx1, err := node_client.CreateTx(privateKey, address, utxos[0]) + require.NoError(t, err) + + // submit tx1 + rawTx, err := tx1.EFHex() + require.NoError(t, err) + resp := postRequest[TransactionResponse](t, arcEndpointV1Tx, createPayload(t, TransactionRequest{RawTx: rawTx}), map[string]string{"X-WaitFor": StatusSeenOnNetwork}, http.StatusOK) + require.Equal(t, StatusSeenOnNetwork, resp.TxStatus) + + // mine tx1 + invHash := node_client.Generate(t, bitcoind, 1) + + // verify tx1 = MINED + statusURL := fmt.Sprintf("%s/%s", arcEndpointV1Tx, tx1.TxID()) + statusResp := getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResp.TxStatus) + require.Equal(t, invHash, *statusResp.BlockHash) + + // get new UTXO for tx2 + txID := node_client.SendToAddress(t, bitcoind, address, float64(0.002)) + utxos = node_client.GetUtxos(t, bitcoind, address) + require.True(t, len(utxos) > 0, "No UTXOs available for the address") + + // make sure to pick the correct UTXO + var utxo node_client.UnspentOutput + for _, u := range utxos { + if u.Txid == txID { + utxo = u + } + } + + tx2, err := node_client.CreateTx(privateKey, address, utxo) + require.NoError(t, err) + + // prepare a callback server for tx2 + callbackReceivedChan := make(chan *TransactionResponse) + callbackErrChan := make(chan error) + callbackURL, token, shutdown := startCallbackSrv(t, callbackReceivedChan, callbackErrChan, nil) + defer shutdown() + + // submit tx2 + rawTx, err = tx2.EFHex() + require.NoError(t, err) + resp = postRequest[TransactionResponse](t, arcEndpointV1Tx, createPayload(t, TransactionRequest{RawTx: rawTx}), + map[string]string{ + "X-WaitFor": StatusSeenOnNetwork, + "X-CallbackUrl": callbackURL, + "X-CallbackToken": token, + }, http.StatusOK) + require.Equal(t, StatusSeenOnNetwork, resp.TxStatus) + + // mine tx2 + tx2BlockHash := node_client.Generate(t, bitcoind, 1) + + // verify tx2 = MINED + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, tx2.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResp.TxStatus) + require.Equal(t, tx2BlockHash, *statusResp.BlockHash) + + select { + case status := <-callbackReceivedChan: + require.Equal(t, tx2.TxID(), status.Txid) + require.Equal(t, StatusMined, status.TxStatus) + case err := <-callbackErrChan: + t.Fatalf("callback error: %v", err) + case <-time.After(1 * time.Second): + t.Fatal("callback exceeded timeout") + } + + // invalidate the chain with tx1 and tx2 + call(t, "invalidateblock", []interface{}{invHash}) + + // prepare txStale + txID = node_client.SendToAddress(t, bitcoind, address, float64(0.003)) + utxos = node_client.GetUtxos(t, bitcoind, address) + require.True(t, len(utxos) > 0, "No UTXOs available for the address") + + // make sure to pick the correct UTXO + for _, u := range utxos { + if u.Txid == txID { + utxo = u + } + } + + txStale, err := node_client.CreateTx(privateKey, address, utxo) + require.NoError(t, err) + + // post a tx to the STALE chain + rawTx, err = txStale.EFHex() + require.NoError(t, err) + resp = postRequest[TransactionResponse](t, arcEndpointV1Tx, createPayload(t, TransactionRequest{RawTx: rawTx}), map[string]string{"X-WaitFor": StatusSeenOnNetwork}, http.StatusOK) + require.Equal(t, StatusSeenOnNetwork, resp.TxStatus) + + // post the previously mined tx1 to a STALE chain + rawTx, err = tx1.EFHex() + require.NoError(t, err) + _ = postRequest[TransactionResponse](t, arcEndpointV1Tx, createPayload(t, TransactionRequest{RawTx: rawTx}), map[string]string{"X-WaitFor": StatusSeenOnNetwork}, http.StatusOK) + + // generate new block that will create a stale chain that includes the txStale and tx1 + staleHash := node_client.Generate(t, bitcoind, 1) + + // verify that stale tx is still SEEN_ON_NETWORK + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, txStale.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusSeenOnNetwork, statusResp.TxStatus) + + // verify that nothing changed so far with previous mined txs + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, tx1.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResp.TxStatus) + require.Equal(t, invHash, *statusResp.BlockHash) + + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, tx2.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResp.TxStatus) + require.Equal(t, tx2BlockHash, *statusResp.BlockHash) + + // make the STALE chain LONGEST by adding 2 new blocks + node_client.Generate(t, bitcoind, 1) + node_client.Generate(t, bitcoind, 1) + + // verify that stale tx is now MINED + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, txStale.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResp.TxStatus) + require.Equal(t, staleHash, *statusResp.BlockHash) + + // verify that previous mined tx1 have updated block info + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, tx1.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResp.TxStatus) + require.Equal(t, staleHash, *statusResp.BlockHash) + + // verify that tx2 is now MINED_IN_STALE_BLOCK + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, tx2.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMinedInStaleBlock, statusResp.TxStatus) + require.Equal(t, tx2BlockHash, *statusResp.BlockHash) + + // verify that callback for tx2 was received with status MINED_IN_STALE_BLOCK + select { + case status := <-callbackReceivedChan: + require.Equal(t, tx2.TxID(), status.Txid) + require.Equal(t, StatusMinedInStaleBlock, status.TxStatus) + case err := <-callbackErrChan: + t.Fatalf("callback error: %v", err) + case <-time.After(1 * time.Second): + t.Fatal("callback exceeded timeout") + } +} + +func call(t *testing.T, method string, params []interface{}) { + err := node_client.CustomRPCCall(method, params, nodeHost, nodePort, nodeUser, nodePassword) + require.NoError(t, err) + + time.Sleep(5 * time.Second) +} diff --git a/test/utils.go b/test/utils.go index 15f2d877d..76da39afa 100644 --- a/test/utils.go +++ b/test/utils.go @@ -30,6 +30,7 @@ const ( StatusDoubleSpendAttempted = "DOUBLE_SPEND_ATTEMPTED" StatusRejected = "REJECTED" StatusMined = "MINED" + StatusMinedInStaleBlock = "MINED_IN_STALE_BLOCK" ) type TransactionResponseBatch []TransactionResponse @@ -120,8 +121,10 @@ func generateRandomString(length int) string { return string(b) } -type callbackResponseFn func(w http.ResponseWriter, rc chan *TransactionResponse, ec chan error, status *TransactionResponse) -type callbackBatchResponseFn func(w http.ResponseWriter, rc chan *CallbackBatchResponse, ec chan error, status *CallbackBatchResponse) +type ( + callbackResponseFn func(w http.ResponseWriter, rc chan *TransactionResponse, ec chan error, status *TransactionResponse) + callbackBatchResponseFn func(w http.ResponseWriter, rc chan *CallbackBatchResponse, ec chan error, status *CallbackBatchResponse) +) // use buffered channels for multiple callbacks func startCallbackSrv(t *testing.T, receivedChan chan *TransactionResponse, errChan chan error, alternativeResponseFn callbackResponseFn) (callbackURL, token string, shutdownFn func()) {